├── performance-storage-service
├── pss_project
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── models
│ │ │ ├── __init__.py
│ │ │ ├── rest
│ │ │ │ ├── __init__.py
│ │ │ │ ├── metadata
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── NoisePageMetadata.py
│ │ │ │ │ ├── JenkinsMetadata.py
│ │ │ │ │ ├── EnvironmentMetadata.py
│ │ │ │ │ ├── GithubMetadata.py
│ │ │ │ │ └── Metadata.py
│ │ │ │ ├── metrics
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── MemoryMetrics.py
│ │ │ │ │ ├── LatencyMetrics.py
│ │ │ │ │ ├── BasePerformanceMetrics.py
│ │ │ │ │ ├── IncrementalMetrics.py
│ │ │ │ │ ├── SummaryPerformanceMetrics.py
│ │ │ │ │ ├── OLTPBenchMetrics.py
│ │ │ │ │ └── MicrobenchmarkMetrics.py
│ │ │ │ ├── parameters
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── TransactionWeight.py
│ │ │ │ │ ├── MicrobenchmarkParameters.py
│ │ │ │ │ └── OLTPBenchParameters.py
│ │ │ │ ├── utils.py
│ │ │ │ ├── ArtifactStatsRest.py
│ │ │ │ ├── MicrobenchmarkRest.py
│ │ │ │ ├── BaseRest.py
│ │ │ │ └── OLTPBenchRest.py
│ │ │ └── database
│ │ │ │ ├── __init__.py
│ │ │ │ ├── ArtifactStatsResult.py
│ │ │ │ └── MicrobenchmarkResult.py
│ │ ├── tests
│ │ │ ├── __init__.py
│ │ │ ├── models
│ │ │ │ ├── __init__.py
│ │ │ │ ├── test_ArtifactStatsResults.py
│ │ │ │ └── test_MicrobenchmarkResult.py
│ │ │ ├── utils
│ │ │ │ ├── __init__.py
│ │ │ │ └── utils.py
│ │ │ ├── views
│ │ │ │ ├── __init__.py
│ │ │ │ ├── tests_health.py
│ │ │ │ ├── test_oltpbench.py
│ │ │ │ ├── test_artifact_stats.py
│ │ │ │ └── test_microbenchmark.py
│ │ │ ├── factories
│ │ │ │ ├── __init__.py
│ │ │ │ ├── rest
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── metadata
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── GithubMetadataFactory.py
│ │ │ │ │ │ ├── JenkinsMetadataFactory.py
│ │ │ │ │ │ ├── NoisePageMetadataFactory.py
│ │ │ │ │ │ ├── EnvironmentMetadataFactory.py
│ │ │ │ │ │ └── MetadataFactory.py
│ │ │ │ │ ├── metrics
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── MicrobenchmarkMetricsFactory.py
│ │ │ │ │ │ ├── MemoryMetricsFactory.py
│ │ │ │ │ │ ├── IncrementalMetricsFactory.py
│ │ │ │ │ │ ├── LatencyMetricsFactory.py
│ │ │ │ │ │ └── OLTPBenchMetricsFactory.py
│ │ │ │ │ ├── parameters
│ │ │ │ │ │ ├── __init__.py
│ │ │ │ │ │ ├── TransactionWeightFactory.py
│ │ │ │ │ │ ├── MicrobenchmarkParametersFactory.py
│ │ │ │ │ │ └── OLTPBenchParametersFactory.py
│ │ │ │ │ ├── ArtifactStatsRestFactory.py
│ │ │ │ │ ├── OLTPBenchRestFactory.py
│ │ │ │ │ └── MicrobenchmarkRestFactory.py
│ │ │ │ └── database
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── ArtifactStatsDBFactory.py
│ │ │ │ │ ├── MicrobenchmarkDBFactory.py
│ │ │ │ │ └── OLTPBenchDBFactory.py
│ │ │ ├── serializers
│ │ │ │ ├── __init__.py
│ │ │ │ ├── rest
│ │ │ │ │ └── __init__.py
│ │ │ │ ├── database
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── test_OLTPBenchResultSerializer.py
│ │ │ │ │ ├── test_ArtifactStatsResultSerializer.py
│ │ │ │ │ └── test_MicrobenchmarkResultsSerializer.py
│ │ │ │ └── fields
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── test_UnixEpochDateTimeField.py
│ │ │ ├── github_integration
│ │ │ │ └── __init__.py
│ │ │ └── manual
│ │ │ │ ├── Dockerfile
│ │ │ │ └── local-prometheus.yml
│ │ ├── migrations
│ │ │ ├── __init__.py
│ │ │ ├── 0005_merge_20200915_0112.py
│ │ │ ├── 0007_remove_microbenchmarkresult_query_mode.py
│ │ │ ├── 0009_auto_20201109_0226.py
│ │ │ ├── 0004_auto_20200731_2230.py
│ │ │ ├── 0003_auto_20200724_0144.py
│ │ │ ├── 0013_change_creator_password.py
│ │ │ ├── 0012_hypertables.py
│ │ │ ├── 0004_auto_20200811_1313.py
│ │ │ ├── 0002_add_user.py
│ │ │ ├── 0011_auto_20210105_1158.py
│ │ │ ├── 0008_binarymetricsresult.py
│ │ │ ├── 0001_initial.py
│ │ │ ├── 0006_microbenchmarkresult.py
│ │ │ └── 0010_auto_20201121_2224.py
│ │ ├── serializers
│ │ │ ├── __init__.py
│ │ │ ├── rest
│ │ │ │ ├── __init__.py
│ │ │ │ ├── metadata
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── JenkinsMetadataSerializer.py
│ │ │ │ │ ├── NoisePageMetadataSerializer.py
│ │ │ │ │ ├── GithubMetadataSerializer.py
│ │ │ │ │ ├── EnvironmentMetadataSerializer.py
│ │ │ │ │ └── MetadataSerializer.py
│ │ │ │ ├── metrics
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── IncrementalMetricsSerializer.py
│ │ │ │ │ ├── MicrobenchmarkMetricsSerializer.py
│ │ │ │ │ ├── MemoryMetricsSerializer.py
│ │ │ │ │ ├── LatencyMetricsSerializer.py
│ │ │ │ │ └── OLTPBenchMetricsSerializer.py
│ │ │ │ ├── parameters
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── TransactionWeightSerializer.py
│ │ │ │ │ ├── MicrobenchmarkParametersSerializer.py
│ │ │ │ │ └── OLTPBenchParametersSerializer.py
│ │ │ │ ├── ArtifactStatsSerializer.py
│ │ │ │ ├── OLTPBenchSerializer.py
│ │ │ │ └── MicrobenchmarkSerializer.py
│ │ │ ├── database
│ │ │ │ ├── __init__.py
│ │ │ │ ├── ArtifactStatsResultSerializer.py
│ │ │ │ ├── MicrobenchmarkResultSerializer.py
│ │ │ │ └── OLTPBenchResultSerializer.py
│ │ │ └── fields
│ │ │ │ └── UnixEpochDatetimeField.py
│ │ ├── github_integration
│ │ │ ├── __init__.py
│ │ │ └── how_to_create_github_bot.md
│ │ ├── apps.py
│ │ ├── views
│ │ │ ├── __init__.py
│ │ │ ├── health.py
│ │ │ ├── oltpbench.py
│ │ │ ├── microbenchmark.py
│ │ │ └── artifact_stats.py
│ │ ├── urls.py
│ │ └── constants.py
│ ├── settings
│ │ ├── __init__.py
│ │ ├── utils.py
│ │ ├── local.py
│ │ ├── staging.py
│ │ ├── testing.py
│ │ └── production.py
│ ├── asgi.py
│ ├── wsgi.py
│ └── urls.py
├── .coveragerc
├── openapi
│ └── .gitignore
├── setup.cfg
├── .dockerignore
├── Dockerfile
├── manage.py
├── requirements.txt
├── .gitignore
└── README.md
├── timescaledb
├── requirements.txt
├── test_data
│ ├── noop
│ │ ├── oltpbench.expconfig
│ │ └── oltpbench.summary
│ ├── tatp
│ │ ├── oltpbench.summary
│ │ └── oltpbench.expconfig
│ └── smallbank
│ │ ├── oltpbench.summary
│ │ └── oltpbench.expconfig
├── smudge_timestamps.py
└── update_latency.py
├── README.md
├── docs
├── oltpbench_module_view.png
├── pss_full_module_view.png
├── artifact_stats_module_view.png
├── microbenchmark_module_view.png
├── oltpbench_allocation_view.png
├── sqlancer_allocation_view.png
├── full_feature_allocation_view.png
├── performance_cop_module_view.png
├── performance_cop_process_flow.png
├── artifact_stats_allocation_view.png
├── microbenchmark_allocation_view.png
└── performance_cop_allocation_view.png
├── .gitignore
├── deployments
├── kubernetes
│ ├── namespaces.yml
│ ├── performance
│ │ ├── openapi
│ │ │ ├── config-map.yml
│ │ │ ├── service.yml
│ │ │ └── deployment.yml
│ │ ├── timescaledb
│ │ │ ├── postgres-service.yml
│ │ │ ├── postgres-config.yml
│ │ │ ├── persistent-volume-claim.yml
│ │ │ ├── persistent-volume.yml
│ │ │ ├── service.yml
│ │ │ ├── postgres-stateful.yml
│ │ │ ├── deployment_backup.yml
│ │ │ ├── deployment.yml
│ │ │ └── timescaledb.yml
│ │ ├── grafana
│ │ │ ├── persistent-volume-claim.yml
│ │ │ ├── persistent-volume.yml
│ │ │ ├── service.yml
│ │ │ └── deployment.yml
│ │ └── performance-storage-service
│ │ │ ├── service.yml
│ │ │ ├── deployment.yml
│ │ │ └── migration-job.yml
│ └── monitoring
│ │ ├── grafana
│ │ ├── persistent-volume-claim.yml
│ │ ├── persistent-volume.yml
│ │ ├── service.yml
│ │ └── deployment.yml
│ │ ├── prometheus
│ │ ├── persistent-volume-claim.yml
│ │ ├── persistent-volume.yml
│ │ ├── cluster-role-binding.yml
│ │ ├── service.yml
│ │ ├── cluster-role.yml
│ │ └── deployment.yml
│ │ ├── blackbox_exporter
│ │ ├── service.yml
│ │ ├── config-map.yml
│ │ └── deployment.yml
│ │ └── postgres_exporter
│ │ ├── service.yml
│ │ └── deployment.yml
├── roles
│ ├── config_bash
│ │ ├── tasks
│ │ │ └── main.yml
│ │ └── files
│ │ │ └── profile
│ ├── install_k8s
│ │ ├── files
│ │ │ └── daemon.json
│ │ └── tasks
│ │ │ ├── config_user.yml
│ │ │ ├── init_cluster.yml
│ │ │ ├── snap.yml
│ │ │ ├── install.yml
│ │ │ └── main.yml
│ ├── install_docker
│ │ └── tasks
│ │ │ └── main.yml
│ ├── install_basics
│ │ └── tasks
│ │ │ └── main.yml
│ ├── config_ssl
│ │ └── tasks
│ │ │ ├── main.yml
│ │ │ └── install_ssl.yml
│ ├── config_openresty
│ │ ├── tasks
│ │ │ ├── update_conf.yml
│ │ │ └── main.yml
│ │ └── templates
│ │ │ ├── external.nginx
│ │ │ └── k8s_master.nginx
│ ├── install_openresty
│ │ └── tasks
│ │ │ └── main.yml
│ └── install_python3.7
│ │ └── tasks
│ │ └── main.yml
├── playbooks
│ ├── server-init.yml
│ ├── update-proxy.yml
│ ├── blackbox-exporter-deployment.yml
│ ├── postgres-exporter-deployment.yml
│ ├── prometheus-deployment.yml
│ ├── upload-scripts.yml
│ ├── monitoring-grafana-deployment.yml
│ ├── create-namespaces.yml
│ ├── pss-deployment.yml
│ └── openapi-deployment.yml
├── ansible.cfg
└── scripts
│ ├── make_secrets_performance.sh
│ └── make_secrets_monitoring.sh
├── LICENSE
└── grafana
└── README.md
/performance-storage-service/pss_project/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/timescaledb/requirements.txt:
--------------------------------------------------------------------------------
1 | psycopg2==2.8.4
2 | wget==3.2
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/settings/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/models/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/views/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/serializers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/github_integration/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metadata/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metrics/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/parameters/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/database/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/serializers/rest/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metadata/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metrics/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/parameters/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/database/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/github_integration/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/serializers/database/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/serializers/fields/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # noisepage-test
2 | DBMS Performance & Correctness Testing Framework
3 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metadata/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metrics/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/parameters/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/oltpbench_module_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/oltpbench_module_view.png
--------------------------------------------------------------------------------
/docs/pss_full_module_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/pss_full_module_view.png
--------------------------------------------------------------------------------
/docs/artifact_stats_module_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/artifact_stats_module_view.png
--------------------------------------------------------------------------------
/docs/microbenchmark_module_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/microbenchmark_module_view.png
--------------------------------------------------------------------------------
/docs/oltpbench_allocation_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/oltpbench_allocation_view.png
--------------------------------------------------------------------------------
/docs/sqlancer_allocation_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/sqlancer_allocation_view.png
--------------------------------------------------------------------------------
/performance-storage-service/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | branch = True
3 | omit =
4 | env/*
5 | */__init__.py
6 | */tests/*
--------------------------------------------------------------------------------
/performance-storage-service/openapi/.gitignore:
--------------------------------------------------------------------------------
1 | # ignore the rendered openapi.yml
2 | testing/
3 | staging/
4 | production/
5 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # python3 pycache
2 | **/__pycache__
3 |
4 | # python virtualenv
5 | **/env
6 |
7 | ## database configs
8 | **/db.sqlite3
--------------------------------------------------------------------------------
/docs/full_feature_allocation_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/full_feature_allocation_view.png
--------------------------------------------------------------------------------
/docs/performance_cop_module_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/performance_cop_module_view.png
--------------------------------------------------------------------------------
/docs/performance_cop_process_flow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/performance_cop_process_flow.png
--------------------------------------------------------------------------------
/docs/artifact_stats_allocation_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/artifact_stats_allocation_view.png
--------------------------------------------------------------------------------
/docs/microbenchmark_allocation_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/microbenchmark_allocation_view.png
--------------------------------------------------------------------------------
/docs/performance_cop_allocation_view.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-db/noisepage-stats/HEAD/docs/performance_cop_allocation_view.png
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class ApiConfig(AppConfig):
5 | name = 'pss_project.api'
6 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/database/__init__.py:
--------------------------------------------------------------------------------
1 | from .MicrobenchmarkResult import MicrobenchmarkResult
2 | from .OLTPBenchResult import OLTPBenchResult
3 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/manual/Dockerfile:
--------------------------------------------------------------------------------
1 | # Used for testing prometheus locally
2 | FROM prom/prometheus
3 | COPY ./local-prometheus.yml /etc/prometheus/prometheus.yml
--------------------------------------------------------------------------------
/deployments/kubernetes/namespaces.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: v1
3 | kind: Namespace
4 | metadata:
5 | name: performance
6 |
7 | ---
8 | apiVersion: v1
9 | kind: Namespace
10 | metadata:
11 | name: monitoring
--------------------------------------------------------------------------------
/deployments/roles/config_bash/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: copy config files for bash
4 | copy:
5 | src: "{{ item }}"
6 | dest: "./.{{ item }}"
7 | loop:
8 | - bashrc
9 | - profile
10 |
11 |
12 |
--------------------------------------------------------------------------------
/deployments/playbooks/server-init.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: init server
3 | hosts: "{{ env }}"
4 | roles:
5 | - config_bash
6 | - install_basics
7 | - install_docker
8 | - install_python3.7
9 | - install_openresty
10 | - install_k8s
11 |
--------------------------------------------------------------------------------
/deployments/roles/install_k8s/files/daemon.json:
--------------------------------------------------------------------------------
1 | {
2 | "exec-opts": [
3 | "native.cgroupdriver=systemd"
4 | ],
5 | "log-driver": "json-file",
6 | "log-opts": {
7 | "max-size": "100m"
8 | },
9 | "storage-driver": "overlay2"
10 | }
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/openapi/config-map.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: v1
3 | kind: ConfigMap
4 | metadata:
5 | name: openapi-conf-{{ env }}
6 | labels:
7 | app: openapi-conf
8 | namespace: performance
9 | data:
10 | api.yaml: |
11 | {{ openapi_file_content }}
12 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/views/__init__.py:
--------------------------------------------------------------------------------
1 | from .health import HealthViewSet
2 | from .oltpbench import OLTPBenchViewSet
3 | from .microbenchmark import MicrobenchmarkViewSet
4 | from .git_events import GitEventsViewSet
5 | from .artifact_stats import ArtifactStatsViewSet
6 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metadata/NoisePageMetadata.py:
--------------------------------------------------------------------------------
1 | class NoisePageMetadata(object):
2 | """ This class is the model of the NoisePage metadata as it is represented by the HTTP API """
3 |
4 | def __init__(self, db_version):
5 | self.db_version = db_version
6 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/parameters/TransactionWeight.py:
--------------------------------------------------------------------------------
1 | class TransactionWeight(object):
2 | """ This class defines a transaction weight specification for OLTPBench parameters """
3 |
4 | def __init__(self, name, weight):
5 | self.name = name
6 | self.weight = weight
7 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/postgres-service.yml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: postgres
5 | namespace: performance
6 | labels:
7 | app: postgres
8 | spec:
9 | ports:
10 | - port: 5432
11 | name: postgres
12 | clusterIP: None
13 | selector:
14 | app: postgres
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/grafana/persistent-volume-claim.yml:
--------------------------------------------------------------------------------
1 | kind: PersistentVolumeClaim
2 | apiVersion: v1
3 | metadata:
4 | namespace: performance
5 | name: grafana-pv-claim
6 | spec:
7 | storageClassName: manual
8 | accessModes:
9 | - ReadWriteMany
10 | resources:
11 | requests:
12 | storage: 10Gi
--------------------------------------------------------------------------------
/deployments/ansible.cfg:
--------------------------------------------------------------------------------
1 | [defaults]
2 | gathering = explicit
3 | retry_files_enabled = no
4 | host_key_checking = no
5 | deprecation_warnings = no
6 | roles_path = ./roles
7 |
8 | [ssh_connection]
9 | ssh_args = -o ControlMaster=auto -o ControlPersist=yes
10 | control_path = %(directory)s/ssh-%%r@%%n:%%p
11 | control_path_dir = /tmp
12 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/postgres-config.yml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: ConfigMap
3 | metadata:
4 | name: postgres-config-demo
5 | namespace: performance
6 | labels:
7 | app: postgres
8 | data:
9 | POSTGRES_DB: demopostgresdb
10 | POSTGRES_USER: demopostgresadmin
11 | POSTGRES_PASSWORD: demopostgrespwd
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/grafana/persistent-volume.yml:
--------------------------------------------------------------------------------
1 | kind: PersistentVolume
2 | apiVersion: v1
3 | metadata:
4 | namespace: performance
5 | name: grafana-pv
6 | spec:
7 | storageClassName: manual
8 | capacity:
9 | storage: 10Gi
10 | accessModes:
11 | - ReadWriteMany
12 | hostPath:
13 | path: "/mnt/grafanaPV"
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/persistent-volume-claim.yml:
--------------------------------------------------------------------------------
1 | kind: PersistentVolumeClaim
2 | apiVersion: v1
3 | metadata:
4 | namespace: performance
5 | name: timescaledb-pv-claim
6 | spec:
7 | storageClassName: manual
8 | accessModes:
9 | - ReadWriteMany
10 | resources:
11 | requests:
12 | storage: 100Gi
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/grafana/persistent-volume-claim.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | kind: PersistentVolumeClaim
4 | apiVersion: v1
5 | metadata:
6 | namespace: monitoring
7 | name: monitoring-grafana-pv-claim
8 | spec:
9 | storageClassName: manual
10 | accessModes:
11 | - ReadWriteMany
12 | resources:
13 | requests:
14 | storage: 10Gi
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/persistent-volume.yml:
--------------------------------------------------------------------------------
1 | kind: PersistentVolume
2 | apiVersion: v1
3 | metadata:
4 | name: timescaledb-pv
5 | namespace: performance
6 | spec:
7 | storageClassName: manual
8 | capacity:
9 | storage: 100Gi
10 | accessModes:
11 | - ReadWriteMany
12 | hostPath:
13 | path: "/mnt/timescaledbPV"
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/utils.py:
--------------------------------------------------------------------------------
1 | def to_dict(obj):
2 | """ Recursively turn an object to a Python dict """
3 | data = {}
4 | for key, value in obj.__dict__.items():
5 | try:
6 | data[key] = to_dict(value)
7 | except AttributeError:
8 | data[key] = value
9 | return data
10 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/prometheus/persistent-volume-claim.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | kind: PersistentVolumeClaim
4 | apiVersion: v1
5 | metadata:
6 | namespace: monitoring
7 | name: monitoring-prometheus-pv-claim
8 | spec:
9 | storageClassName: manual
10 | accessModes:
11 | - ReadWriteMany
12 | resources:
13 | requests:
14 | storage: 100Gi
--------------------------------------------------------------------------------
/deployments/roles/install_docker/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: update apt repo cache and install docker
4 | become: yes
5 | apt:
6 | name: docker.io
7 | update_cache: yes
8 |
9 | - name: enable and start docker daemon service
10 | become: yes
11 | systemd:
12 | name: docker
13 | state: started
14 | enabled: yes
15 |
16 |
17 |
--------------------------------------------------------------------------------
/performance-storage-service/setup.cfg:
--------------------------------------------------------------------------------
1 | [pep8]
2 | max-line-length = 119
3 | exclude =
4 | env/*,
5 | */migrations/*
6 |
7 | [flake8]
8 | max-line-length = 119
9 | exclude =
10 | env/*,
11 | */migrations/*,
12 | *__init__.py
13 |
14 | [yapf]
15 | max-line-length = 119
16 | exclude =
17 | env/*,
18 | */migrations/*,
19 | *__init__.py
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/parameters/MicrobenchmarkParameters.py:
--------------------------------------------------------------------------------
1 | class MicrobenchmarkParameters(object):
2 | """ This class is the model of the microbenchmark parameters as communicated through the HTTP API """
3 |
4 | def __init__(self, threads, min_runtime):
5 | self.threads = threads
6 | self.min_runtime = min_runtime
7 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/grafana/persistent-volume.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | kind: PersistentVolume
4 | apiVersion: v1
5 | metadata:
6 | namespace: monitoring
7 | name: monitoring-grafana-pv
8 | spec:
9 | storageClassName: manual
10 | capacity:
11 | storage: 10Gi
12 | accessModes:
13 | - ReadWriteMany
14 | hostPath:
15 | path: "/mnt/monitoring-grafanaPV"
--------------------------------------------------------------------------------
/deployments/roles/install_k8s/tasks/config_user.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: create the k8s user
4 | user: name="{{ k8s_user }}" append=yes state=present createhome=yes shell=/bin/bash
5 |
6 | - name: allow the k8s user to have passwordless sudo
7 | lineinfile:
8 | dest: /etc/sudoers
9 | line: "{{ k8s_user }} ALL=(ALL) NOPASSWD: ALL"
10 | validate: "visudo -cf %s"
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/prometheus/persistent-volume.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | kind: PersistentVolume
4 | apiVersion: v1
5 | metadata:
6 | namespace: monitoring
7 | name: monitoring-prometheus-pv
8 | spec:
9 | storageClassName: manual
10 | capacity:
11 | storage: 100Gi
12 | accessModes:
13 | - ReadWriteMany
14 | hostPath:
15 | path: "/mnt/monitoring-prometheusPV"
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/grafana/service.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | kind: Service
4 | apiVersion: v1
5 | metadata:
6 | name: grafana-service
7 | namespace: monitoring
8 | labels:
9 | app: grafana
10 | spec:
11 | type: NodePort
12 | selector:
13 | app: grafana
14 | ports:
15 | - protocol: TCP
16 | port: 3000
17 | nodePort: 32004
18 | externalTrafficPolicy: Local
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/prometheus/cluster-role-binding.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | apiVersion: rbac.authorization.k8s.io/v1beta1
4 | kind: ClusterRoleBinding
5 | metadata:
6 | name: prometheus
7 | roleRef:
8 | apiGroup: rbac.authorization.k8s.io
9 | kind: ClusterRole
10 | name: prometheus
11 | subjects:
12 | - kind: ServiceAccount
13 | name: default
14 | namespace: monitoring
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metadata/JenkinsMetadata.py:
--------------------------------------------------------------------------------
1 | class JenkinsMetadata(object):
2 | """ This class is the model of the Jenkins data relating to the build that ran the tests or collected the metrics.
3 | This class is how the model is represented in the HTTP API """
4 |
5 | def __init__(self, jenkins_job_id):
6 | self.jenkins_job_id = jenkins_job_id
7 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0005_merge_20200915_0112.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-09-15 05:12
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('api', '0004_auto_20200731_2230'),
10 | ('api', '0004_auto_20200811_1313'),
11 | ]
12 |
13 | operations = [
14 | ]
15 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metadata/GithubMetadataFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metadata.GithubMetadata import GithubMetadata
3 |
4 |
5 | class GithubMetadataFactory(Factory):
6 | class Meta:
7 | model = GithubMetadata
8 |
9 | git_commit_id = Faker('sha1')
10 | git_branch = Faker('word')
11 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metadata/JenkinsMetadataFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metadata.JenkinsMetadata import JenkinsMetadata
3 |
4 |
5 | class JenkinsMetadataFactory(Factory):
6 | class Meta:
7 | model = JenkinsMetadata
8 |
9 | jenkins_job_id = Faker('pystr_format', string_format='###')
10 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metadata/NoisePageMetadataFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metadata.NoisePageMetadata import NoisePageMetadata
3 |
4 |
5 | class NoisePageMetadataFactory(Factory):
6 | class Meta:
7 | model = NoisePageMetadata
8 |
9 | db_version = Faker('pystr_format', string_format='##.#')
10 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/prometheus/service.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | apiVersion: v1
4 | kind: Service
5 | metadata:
6 | name: prometheus-service
7 | namespace: monitoring
8 | annotations:
9 | prometheus.io/scrape: "true"
10 | prometheus.io/port: "9090"
11 | spec:
12 | selector:
13 | app: prometheus-server
14 | type: NodePort
15 | ports:
16 | - port: 8080
17 | targetPort: 9090
18 | nodePort: 30000
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/parameters/TransactionWeightFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.parameters.TransactionWeight import TransactionWeight
3 |
4 |
5 | class TransactionWeightFactory(Factory):
6 | class Meta:
7 | model = TransactionWeight
8 |
9 | name = Faker('word')
10 | weight = Faker('random_int', min=1, max=100)
11 |
--------------------------------------------------------------------------------
/deployments/roles/install_basics/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: update apt repo cache and install basic linux tools
4 | become: yes
5 | apt:
6 | pkg:
7 | - vim
8 | - curl
9 | - htop
10 | - wget
11 | - net-tools
12 | - iptables
13 | - rsync
14 | update_cache: yes
15 |
16 | - name: install acme.sh
17 | become: yes
18 | shell: |
19 | curl https://get.acme.sh | sh
20 |
21 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metadata/JenkinsMetadataSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, CharField
2 | from pss_project.api.models.rest.metadata.JenkinsMetadata import JenkinsMetadata
3 |
4 |
5 | class JenkinsMetadataSerializer(Serializer):
6 | # Fields
7 | jenkins_job_id = CharField()
8 |
9 | def create(self, validated_data):
10 | return JenkinsMetadata(**validated_data)
11 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metadata/NoisePageMetadataSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, CharField
2 | from pss_project.api.models.rest.metadata.NoisePageMetadata import NoisePageMetadata
3 |
4 |
5 | class NoisePageMetadataSerializer(Serializer):
6 | # Fields
7 | db_version = CharField()
8 |
9 | def create(self, validated_data):
10 | return NoisePageMetadata(**validated_data)
11 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/blackbox_exporter/service.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | kind: Service
4 | apiVersion: v1
5 | metadata:
6 | name: prometheus-blackbox-exporter-service
7 | labels:
8 | app: prometheus-blackbox-exporter
9 | namespace: monitoring
10 | spec:
11 | selector:
12 | app: prometheus-blackbox-exporter
13 | type: NodePort
14 | ports:
15 | - port: 9115
16 | nodePort: 32005
17 | protocol: TCP
18 | externalTrafficPolicy: Local
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/postgres_exporter/service.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | kind: Service
4 | apiVersion: v1
5 | metadata:
6 | name: prometheus-postgres-exporter-service
7 | labels:
8 | app: prometheus-postgres-exporter
9 | namespace: monitoring
10 | spec:
11 | selector:
12 | app: prometheus-postgres-exporter
13 | type: NodePort
14 | ports:
15 | - port: 9187
16 | nodePort: 32006
17 | protocol: TCP
18 | externalTrafficPolicy: Local
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/grafana/service.yml:
--------------------------------------------------------------------------------
1 | ---
2 | kind: Service
3 | apiVersion: v1
4 | metadata:
5 | name: "grafana-service-{{ env }}"
6 | namespace: performance
7 | labels:
8 | app: grafana
9 | env: "{{ env }}"
10 | spec:
11 | type: NodePort
12 | selector:
13 | app: grafana
14 | env: "{{ env }}"
15 | ports:
16 | - protocol: TCP
17 | port: 3000
18 | nodePort: {{ service_port | int }}
19 | externalTrafficPolicy: Local
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/openapi/service.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | kind: Service
4 | apiVersion: v1
5 | metadata:
6 | name: "openapi-service-{{ env }}"
7 | labels:
8 | app: openapi
9 | env: "{{ env }}"
10 | namespace: performance
11 | spec:
12 | selector:
13 | app: openapi
14 | env: "{{ env }}"
15 | type: NodePort
16 | ports:
17 | - port: 8080
18 | nodePort: {{ service_port | int }}
19 | protocol: TCP
20 | externalTrafficPolicy: Local
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/service.yml:
--------------------------------------------------------------------------------
1 | ---
2 | kind: Service
3 | apiVersion: v1
4 | metadata:
5 | name: "timescaledb-service-{{ env }}"
6 | namespace: performance
7 | labels:
8 | app: timescaledb
9 | env: "{{ env }}"
10 | spec:
11 | type: NodePort
12 | selector:
13 | app: timescaledb
14 | env: "{{ env }}"
15 | ports:
16 | - protocol: TCP
17 | port: 5432
18 | nodePort: {{ service_port | int}}
19 | externalTrafficPolicy: Local
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metadata/GithubMetadataSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, CharField
2 | from pss_project.api.models.rest.metadata.GithubMetadata import GithubMetadata
3 |
4 |
5 | class GithubMetadataSerializer(Serializer):
6 | # Fields
7 | git_commit_id = CharField()
8 | git_branch = CharField()
9 |
10 | def create(self, validated_data):
11 | return GithubMetadata(**validated_data)
12 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/parameters/TransactionWeightSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, CharField, IntegerField
2 | from pss_project.api.models.rest.parameters.TransactionWeight import TransactionWeight
3 |
4 |
5 | class TransactionWeightSerializer(Serializer):
6 | # Fields
7 | name = CharField()
8 | weight = IntegerField()
9 |
10 | def create(self, validated_data):
11 | return TransactionWeight(**validated_data)
12 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/parameters/MicrobenchmarkParametersFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.parameters.MicrobenchmarkParameters import (
3 | MicrobenchmarkParameters)
4 |
5 |
6 | class MicrobenchmarkParametersFactory(Factory):
7 | class Meta:
8 | model = MicrobenchmarkParameters
9 |
10 | threads = Faker('random_int', min=1, max=16)
11 | min_runtime = Faker('random_int', min=30, step=30)
12 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/asgi.py:
--------------------------------------------------------------------------------
1 | """
2 | ASGI config for pss_project project.
3 |
4 | It exposes the ASGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.asgi import get_asgi_application
13 |
14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pss_project.settings')
15 |
16 | application = get_asgi_application()
17 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/settings/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | from django.core.exceptions import ImproperlyConfigured
3 |
4 |
5 | def get_environ_value(env_variable, default_value=None):
6 | value = os.environ.get(env_variable, default_value)
7 | if (value):
8 | return value
9 | else:
10 | error_msg = 'Set the {} environment variable'.format(env_variable)
11 | if (os.environ.get("ENV", "local") != 'local'):
12 | raise ImproperlyConfigured(error_msg)
13 |
--------------------------------------------------------------------------------
/deployments/playbooks/update-proxy.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: update proxy
4 | hosts: "{{ env }}"
5 | vars:
6 | hostname: "{{ inventory_hostname }}"
7 | dir_openresty: /etc/openresty
8 | dir_openresty_ssl: "{{ dir_openresty }}/ssl"
9 | dir_openresty_conf: "{{ dir_openresty }}/conf.d"
10 | dir_openresty_internal_log: "/var/log/openresty/{{ hostname }}"
11 | dir_openresty_external_log: "/var/log/openresty/{{ noisepage_hostname }}"
12 | roles:
13 | - config_openresty
14 | - config_ssl
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0007_remove_microbenchmarkresult_query_mode.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-10-12 00:23
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('api', '0006_microbenchmarkresult'),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveField(
14 | model_name='microbenchmarkresult',
15 | name='query_mode',
16 | ),
17 | ]
18 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/performance-storage-service/service.yml:
--------------------------------------------------------------------------------
1 | ---
2 | kind: Service
3 | apiVersion: v1
4 | metadata:
5 | name: "performance-service-{{ env }}"
6 | annotations:
7 | prometheus.io/scrape: 'true'
8 | namespace: performance
9 | labels:
10 | app: pss
11 | env: "{{ env }}"
12 | spec:
13 | type: NodePort
14 | selector:
15 | app: pss
16 | env: "{{ env }}"
17 | ports:
18 | - protocol: TCP
19 | port: 8080
20 | nodePort: {{ service_port | int }}
21 | externalTrafficPolicy: Local
22 |
--------------------------------------------------------------------------------
/deployments/roles/config_ssl/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: ensure the SSL folder exists
4 | become: yes
5 | file:
6 | path: "{{ dir_openresty_ssl }}"
7 | state: directory
8 | owner: root
9 | mode: 0755
10 |
11 | - name: install the SSL for "{{ hostname }}"
12 | import_tasks: install_ssl.yml
13 | vars:
14 | domain: "{{ hostname }}"
15 |
16 | - name: install the SSL for "{{ noisepage_hostname }}"
17 | import_tasks: install_ssl.yml
18 | vars:
19 | domain: "{{ noisepage_hostname }}"
20 | when: env == "production"
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/parameters/MicrobenchmarkParametersSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, IntegerField
2 | from pss_project.api.models.rest.parameters.MicrobenchmarkParameters import MicrobenchmarkParameters
3 |
4 |
5 | class MicrobenchmarkParametersSerializer(Serializer):
6 |
7 | # Fields
8 | threads = IntegerField()
9 | min_runtime = IntegerField()
10 |
11 | def create(self, validated_data):
12 | return MicrobenchmarkParameters(**validated_data)
13 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for pss_project project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | env = os.environ.get("ENV", "local")
15 | os.environ['DJANGO_SETTINGS_MODULE'] = 'pss_project.settings.{}'.format(env)
16 |
17 | application = get_wsgi_application()
18 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/ArtifactStatsRest.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.BaseRest import BaseRest
2 |
3 |
4 | class ArtifactStatsRest(BaseRest):
5 | """ This class is the model of the Artifact Stats data as it is communicated through the HTTP API """
6 |
7 | def __init__(self, metadata, timestamp, metrics):
8 | super().__init__(metadata, timestamp)
9 | self.metrics = metrics
10 |
11 | def convert_metrics_to_dict(self, metrics):
12 | """ Override the base class method """
13 | return metrics
14 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metadata/EnvironmentMetadata.py:
--------------------------------------------------------------------------------
1 | class EnvironmentMetadata(object):
2 | """ This class is the model of the environment data relating to the system that the tests/metrics were collected
3 | on. This class is how the model is represented in the HTTP API """
4 |
5 | def __init__(self, os_version=None, cpu_number=None, cpu_socket=None, wal_device=None):
6 | self.os_version = os_version
7 | self.cpu_number = cpu_number
8 | self.cpu_socket = cpu_socket
9 | self.wal_device = wal_device
10 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metadata/EnvironmentMetadataFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metadata.EnvironmentMetadata import EnvironmentMetadata
3 |
4 |
5 | class EnvironmentMetadataFactory(Factory):
6 | class Meta:
7 | model = EnvironmentMetadata
8 |
9 | os_version = Faker('word')
10 | cpu_number = Faker('random_int', min=1, max=16)
11 | cpu_socket = Faker('word')
12 | wal_device = Faker('random_element', elements=('RAM disk', 'HDD', 'SATA SSD', 'NVMe SSD'))
13 |
--------------------------------------------------------------------------------
/deployments/scripts/make_secrets_performance.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ENV=$1
3 | DIR_BASE="$(dirname $(realpath $0))"
4 | DIR_ENV="$DIR_BASE/$ENV"
5 | NAMESPACE="performance"
6 | SECRET_NAME="secrets-$ENV"
7 | HELP="Usage: ./make_secrets_performance.sh ENV"
8 |
9 | if [ $# -lt 1 ]; then
10 | echo $HELP
11 | exit 1
12 | fi
13 |
14 | if [ ! -d $DIR_ENV ]; then
15 | echo "Error: secrets files for '$ENV' is not found."
16 | exit 1
17 | fi
18 |
19 | kubectl delete secret $SECRET_NAME -n $NAMESPACE
20 | kubectl create secret generic $SECRET_NAME -n $NAMESPACE --from-file=$DIR_ENV
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metadata/GithubMetadata.py:
--------------------------------------------------------------------------------
1 | class GithubMetadata(object):
2 | """ This class is the model of the github data relating to the what version of the code the tests/metrics were
3 | gathered from. This class is how the model is represented in the HTTP API
4 | git_commit_id - the sha of the commit
5 | git_branch - the branch that the metrics come from
6 | """
7 |
8 | def __init__(self, git_commit_id, git_branch):
9 | self.git_commit_id = git_commit_id
10 | self.git_branch = git_branch
11 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/blackbox_exporter/config-map.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | apiVersion: v1
4 | kind: ConfigMap
5 | metadata:
6 | name: prometheus-blackbox-exporter-conf
7 | labels:
8 | app: prometheus-blackbox-exporter-conf
9 | namespace: monitoring
10 | data:
11 | blackbox.yaml: |
12 | modules:
13 | http_2xx:
14 | http:
15 | no_follow_redirects: false
16 | preferred_ip_protocol: ip4
17 | valid_http_versions:
18 | - HTTP/1.1
19 | - HTTP/2.0
20 | valid_status_codes: []
21 | prober: http
22 | timeout: 5s
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metrics/MemoryMetrics.py:
--------------------------------------------------------------------------------
1 | class MemoryItemSummary:
2 | """ This class is the model of how summary level memory info is represented in the HTTP API """
3 |
4 | def __init__(self, avg=None):
5 | if avg:
6 | self.avg = avg
7 |
8 |
9 | class MemoryMetrics:
10 | """ This class is the model of how memory metrics are represented in the HTTP API. Currently, it captures the
11 | virtual memory size and the resident set size """
12 |
13 | def __init__(self, rss, vms):
14 | self.rss = rss
15 | self.vms = vms
16 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metrics/LatencyMetrics.py:
--------------------------------------------------------------------------------
1 | class LatencyMetrics(object):
2 | """ This class is the model of how latency metrics are represented in the HTTP API """
3 |
4 | def __init__(self, l_25=None, l_75=None, l_90=None, l_95=None, l_99=None,
5 | avg=None, median=None, max=None, min=None):
6 | self.l_25 = l_25
7 | self.l_75 = l_75
8 | self.l_90 = l_90
9 | self.l_95 = l_95
10 | self.l_99 = l_99
11 | self.avg = avg
12 | self.median = median
13 | self.max = max
14 | self.min = min
15 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/prometheus/cluster-role.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | apiVersion: rbac.authorization.k8s.io/v1beta1
4 | kind: ClusterRole
5 | metadata:
6 | name: prometheus
7 | rules:
8 | - apiGroups: [""]
9 | resources:
10 | - nodes
11 | - nodes/proxy
12 | - services
13 | - endpoints
14 | - pods
15 | verbs:
16 | - get
17 | - list
18 | - watch
19 | - apiGroups: ["extensions"]
20 | resources:
21 | - ingresses
22 | verbs:
23 | - get
24 | - list
25 | - watch
26 | - nonResourceURLs:
27 | - /metrics
28 | verbs:
29 | - get
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0009_auto_20201109_0226.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-11-09 07:26
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('api', '0008_binarymetricsresult'),
10 | ]
11 |
12 | operations = [
13 | migrations.RenameModel(
14 | old_name='BinaryMetricsResult',
15 | new_name='ArtifactStatsResult',
16 | ),
17 | migrations.AlterModelTable(
18 | name='artifactstatsresult',
19 | table='artifact_stats_results',
20 | ),
21 | ]
--------------------------------------------------------------------------------
/timescaledb/test_data/noop/oltpbench.expconfig:
--------------------------------------------------------------------------------
1 |
2 |
3 | 1
4 | TRANSACTION_SERIALIZABLE
5 | 1.0
6 | 1
7 |
8 |
9 |
10 | unlimited
11 | 100
12 |
13 |
14 |
15 |
16 | NoOp
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/ArtifactStatsRestFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.ArtifactStatsRest import ArtifactStatsRest
3 | from pss_project.api.tests.factories.rest.metadata.MetadataFactory \
4 | import MetadataFactory
5 | from pss_project.api.tests.utils.utils import generate_dict_factory
6 |
7 |
8 | class ArtifactStatsRestFactory(Factory):
9 | class Meta:
10 | model = ArtifactStatsRest
11 | metadata = generate_dict_factory(MetadataFactory)()
12 | timestamp = Faker('date_time')
13 | metrics = Faker('pydict', value_types=[int, float, str, [], dict])
14 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metadata/EnvironmentMetadataSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, CharField, ChoiceField
2 | from pss_project.api.models.rest.metadata.EnvironmentMetadata import EnvironmentMetadata
3 | from pss_project.api.constants import WAL_DEVICE_CHOICES, NONE
4 |
5 |
6 | class EnvironmentMetadataSerializer(Serializer):
7 | # Fields
8 | os_version = CharField()
9 | cpu_number = CharField()
10 | cpu_socket = CharField()
11 | wal_device = ChoiceField(choices=WAL_DEVICE_CHOICES, default=NONE)
12 |
13 | def create(self, validated_data):
14 | return EnvironmentMetadata(**validated_data)
15 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/urls.py:
--------------------------------------------------------------------------------
1 | """pss_project URL Configuration
2 |
3 | The `urlpatterns` list routes URLs to views. For more information please see:
4 | https://docs.djangoproject.com/en/3.0/topics/http/urls/
5 | """
6 | from django.contrib import admin
7 | from django.urls import include, path
8 |
9 | urlpatterns = [
10 | # path('admin/', admin.site.urls),
11 | path('performance-results/', include('pss_project.api.urls')),
12 |
13 | # Prometheus is used for monitoring
14 | # https://incrudibles-k8s.db.pdl.cmu.edu/grafana/d/IVRURedMz/monitoring?orgId=1&refresh=30s
15 | path("", include("django_prometheus.urls"), name="django-prometheus"),
16 | ]
17 |
--------------------------------------------------------------------------------
/performance-storage-service/.dockerignore:
--------------------------------------------------------------------------------
1 | # Ansible
2 | ansible.cfg
3 | inventory
4 | playbooks/
5 |
6 | # Django
7 | src/{{ project_name }}/settings/local.py
8 |
9 | # Docker
10 | Dockerfile
11 |
12 | # General
13 | README.md
14 |
15 | # GitHub
16 | .git/
17 |
18 | # Jenkins
19 | Jenkinsfile
20 |
21 | # Mac
22 | .DS_Store
23 |
24 | # Python
25 | env
26 | __pycache__
27 |
28 | #Tests
29 | reports/
30 |
31 | # Unknown (copied from template)
32 | *.pyc
33 | *.pyo
34 | *.mo
35 | *.db
36 | *.css.map
37 | *.egg-info
38 | *.sql.gz
39 | .cache
40 | .project
41 | .idea
42 | .pydevproject
43 | .idea/workspace.xml
44 | .sass-cache
45 | dist
46 | docs
47 | logs
48 | web/media
49 | web/static/CACHE
50 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0004_auto_20200731_2230.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-08-01 02:30
2 |
3 | import django.contrib.postgres.fields.jsonb
4 | import django.core.serializers.json
5 | from django.db import migrations
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('api', '0003_auto_20200724_0144'),
12 | ]
13 |
14 | operations = [
15 | migrations.AlterField(
16 | model_name='oltpbenchresult',
17 | name='weights',
18 | field=django.contrib.postgres.fields.jsonb.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder),
19 | ),
20 | ]
21 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0003_auto_20200724_0144.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-07-24 05:44
2 |
3 | import django.contrib.postgres.fields.jsonb
4 | import django.core.serializers.json
5 | from django.db import migrations
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('api', '0002_add_user'),
12 | ]
13 |
14 | operations = [
15 | migrations.AlterField(
16 | model_name='oltpbenchresult',
17 | name='incremental_metrics',
18 | field=django.contrib.postgres.fields.jsonb.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder),
19 | ),
20 | ]
21 |
--------------------------------------------------------------------------------
/deployments/roles/config_openresty/tasks/update_conf.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: set var for the path to the SSL folder of the "{{ domain }}"
4 | set_fact:
5 | dir_domain_ssl: "{{ dir_openresty_ssl }}/{{ domain }}"
6 |
7 | - name: ensure log dir exists
8 | file:
9 | path: "{{ dir_openresty_internal_log }}"
10 | state: directory
11 | owner: root
12 | mode: "0755"
13 |
14 | - name: ensure the SSL folder exists for "{{ domain }}"
15 | file:
16 | path: "{{ dir_domain_ssl }}"
17 | state: directory
18 | owner: root
19 | mode: 0755
20 |
21 | - name: update the openresty conf for "{{ domain }}"
22 | template:
23 | src: "{{ conf_template }}"
24 | dest: "{{ dir_openresty_conf }}/{{ domain }}"
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import path, include
2 | from rest_framework.routers import DefaultRouter
3 | from pss_project.api import views
4 |
5 | # This configures all the HTTP API routing and endpoints
6 |
7 | router = DefaultRouter()
8 | router.register(r'health', views.HealthViewSet, 'health')
9 | router.register(r'oltpbench', views.OLTPBenchViewSet, 'oltpbench')
10 | router.register(r'microbenchmark', views.MicrobenchmarkViewSet, 'microbenchmark')
11 | router.register(r'git-events', views.GitEventsViewSet, 'git-events')
12 | router.register(r'artifact-stats', views.ArtifactStatsViewSet, 'artifact-stats')
13 |
14 | urlpatterns = [
15 | path('', include(router.urls)),
16 | ]
17 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/views/tests_health.py:
--------------------------------------------------------------------------------
1 | from rest_framework.test import APISimpleTestCase
2 |
3 | # Create your tests here.
4 |
5 |
6 | class HealthViewTest(APISimpleTestCase):
7 |
8 | url = '/performance-results/health/'
9 |
10 | def test_200_response(self):
11 | """
12 | Ensure the health endpoint sends back a 200
13 | """
14 | response = self.client.get(self.url)
15 | self.assertEqual(response.status_code, 200)
16 |
17 | def test_uptime_exists(self):
18 | """
19 | Ensure the health endpoint sends back uptime
20 | """
21 | response = self.client.get(self.url)
22 | self.assertTrue('uptime' in response.data)
23 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/views/health.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from rest_framework import viewsets
3 | from rest_framework.response import Response
4 | from rest_framework import status
5 | from datetime import datetime
6 | from pss_project.api.constants import service_start_time
7 |
8 | logger = logging.getLogger()
9 |
10 |
11 | class HealthViewSet(viewsets.ViewSet):
12 | """ Check whether the service is up and get how long it has been up """
13 |
14 | def list(self, request):
15 | logger.debug('health request')
16 | uptime = (datetime.now() - service_start_time).total_seconds()
17 | data = {'uptime': '{} seconds'.format(uptime)}
18 | return Response(data, status=status.HTTP_200_OK)
19 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metrics/BasePerformanceMetrics.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.metrics.LatencyMetrics import LatencyMetrics
2 | from pss_project.api.models.rest.metrics.MemoryMetrics import MemoryMetrics
3 |
4 |
5 | class BasePerformanceMetrics(object):
6 | """ The base class for performance metrics as communicated through the HTTP API. It includes latency, throughput,
7 | and memory utilization metrics """
8 |
9 | def __init__(self, throughput, latency=None, memory_info=None):
10 | self.throughput = throughput
11 | if latency:
12 | self.latency = LatencyMetrics(**latency)
13 | if memory_info:
14 | self.memory_info = MemoryMetrics(**memory_info)
15 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metrics/IncrementalMetrics.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.metrics.BasePerformanceMetrics import BasePerformanceMetrics
2 |
3 |
4 | class IncrementalMetrics(BasePerformanceMetrics):
5 | """ This class is the model of incremental metrics as they are communicated through the HTTP API. The incremental
6 | metrics are similar to the BasePerformanceMetrics except they have a relative time associated with each entry. The
7 | time is the number of seconds into the test when the metric instance was gathered. """
8 |
9 | def __init__(self, time, throughput=None, latency=None, memory_info=None):
10 | self.time = time
11 | super().__init__(throughput, latency, memory_info)
12 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/database/ArtifactStatsResultSerializer.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | from rest_framework.serializers import ModelSerializer
3 | from pss_project.api.models.database.ArtifactStatsResult import ArtifactStatsResult
4 |
5 |
6 | class ArtifactStatsResultSerializer(ModelSerializer):
7 | class Meta:
8 | model = ArtifactStatsResult
9 | fields = ('time', 'jenkins_job_id', 'git_branch', 'git_commit_id',
10 | 'db_version', 'environment', 'metrics')
11 |
12 | def smudge_timestamp(self):
13 | while ArtifactStatsResult.objects.filter(time=self.initial_data['time']).count() > 0:
14 | self.initial_data['time'] = self.initial_data['time'] + timedelta(milliseconds=1)
15 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metrics/MicrobenchmarkMetricsFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metrics.MicrobenchmarkMetrics import MicrobenchmarkMetrics
3 |
4 |
5 | class MicrobenchmarkMetricsFactory(Factory):
6 | class Meta:
7 | model = MicrobenchmarkMetrics
8 |
9 | throughput = Faker('pydecimal', left_digits=9, right_digits=15, positive=True)
10 | tolerance = Faker('random_int')
11 | iterations = Faker('random_int')
12 | status = Faker('random_element', elements=('PASS', 'FAIL'))
13 | ref_throughput = Faker('pydecimal', left_digits=9, right_digits=15, positive=True)
14 | stdev_throughput = Faker('pydecimal', left_digits=9, right_digits=15, positive=True)
15 |
--------------------------------------------------------------------------------
/performance-storage-service/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8
2 |
3 | # create and set working directory
4 | RUN mkdir "/performance-storage-service"
5 | WORKDIR "/performance-storage-service"
6 |
7 | # set default environment variables
8 | ENV PYTHONUNBUFFERED 1
9 | ENV LANG C.UTF-8
10 | ENV PORT=8080
11 |
12 | # Install project dependencies
13 | COPY requirements.txt ./
14 | RUN pip install --upgrade pip
15 | RUN pip install -r requirements.txt
16 |
17 | RUN groupadd -r django && useradd -r -g django django
18 | COPY . .
19 | RUN chown -R django .
20 |
21 | EXPOSE 8080
22 | HEALTHCHECK --interval=10m --timeout=5s\
23 | CMD curl -f http://localhost:$PORT/performance-results/health/ || exit 1
24 |
25 | USER django
26 | CMD gunicorn --bind 0.0.0.0:$PORT pss_project.wsgi:application
27 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/database/ArtifactStatsDBFactory.py:
--------------------------------------------------------------------------------
1 | from django.utils import timezone
2 | from factory import Faker
3 | from factory.django import DjangoModelFactory
4 | from pss_project.api.models.database.ArtifactStatsResult import ArtifactStatsResult
5 |
6 |
7 | class ArtifactStatsDBFactory(DjangoModelFactory):
8 | class Meta:
9 | model = ArtifactStatsResult
10 |
11 | time = Faker('iso8601', tzinfo=timezone.utc)
12 | jenkins_job_id = Faker('pystr_format', string_format='###')
13 | git_branch = Faker('word')
14 | git_commit_id = Faker('sha1')
15 | db_version = Faker('word')
16 | environment = Faker('pydict', value_types=[str])
17 | metrics = Faker('pydict', value_types=[int, float, str, [], dict])
18 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metrics/SummaryPerformanceMetrics.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.metrics.BasePerformanceMetrics import BasePerformanceMetrics
2 | from pss_project.api.models.rest.metrics.MemoryMetrics import MemoryMetrics, MemoryItemSummary
3 |
4 |
5 | class SummaryPerformanceMetrics(BasePerformanceMetrics):
6 | def __init__(self, throughput, latency=None, memory_info=None):
7 | super().__init__(throughput, latency)
8 | if memory_info:
9 | rss = MemoryItemSummary(
10 | **memory_info["rss"]) if "rss" in memory_info else None
11 | vms = MemoryItemSummary(
12 | **memory_info["vms"]) if "vms" in memory_info else None
13 | self.memory_info = MemoryMetrics(rss, vms)
14 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0013_change_creator_password.py:
--------------------------------------------------------------------------------
1 | from django.db import migrations
2 | from django.contrib.auth.models import User
3 | from pss_project.settings.utils import get_environ_value
4 |
5 | def update_password(apps,schema_editor):
6 | try:
7 | user = User.objects.get(username=get_environ_value('PSS_CREATOR_USER'))
8 | user.set_password(get_environ_value('PSS_CREATOR_PASSWORD'))
9 | user.save()
10 | except:
11 | msg = 'Migration error: update user password failed'
12 | print(msg)
13 |
14 | class Migration(migrations.Migration):
15 |
16 | dependencies = [
17 | ('api', '0012_hypertables'),
18 | ]
19 |
20 | operations = [
21 | migrations.RunPython(update_password),
22 | ]
23 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/ArtifactStatsSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, JSONField
2 | from django.core.serializers.json import DjangoJSONEncoder
3 | from pss_project.api.serializers.rest.metadata.MetadataSerializer import MetadataSerializer
4 | from pss_project.api.serializers.fields.UnixEpochDatetimeField import UnixEpochDateTimeField
5 | from pss_project.api.models.rest.ArtifactStatsRest import ArtifactStatsRest
6 |
7 |
8 | class ArtifactStatsSerializer(Serializer):
9 | # Fields
10 | metadata = MetadataSerializer()
11 | timestamp = UnixEpochDateTimeField()
12 | metrics = JSONField(encoder=DjangoJSONEncoder)
13 |
14 | def create(self, validated_data):
15 | return ArtifactStatsRest(**validated_data)
16 |
--------------------------------------------------------------------------------
/performance-storage-service/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Django's command-line utility for administrative tasks."""
3 | import os
4 | import sys
5 |
6 |
7 | def main():
8 | env = os.environ.get("ENV", "local")
9 | os.environ['DJANGO_SETTINGS_MODULE'] = 'pss_project.settings.{}'.format(env)
10 | try:
11 | from django.core.management import execute_from_command_line
12 | except ImportError as exc:
13 | raise ImportError(
14 | "Couldn't import Django. Are you sure it's installed and "
15 | "available on your PYTHONPATH environment variable? Did you "
16 | "forget to activate a virtual environment?"
17 | ) from exc
18 | execute_from_command_line(sys.argv)
19 |
20 |
21 | if __name__ == '__main__':
22 | main()
23 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0012_hypertables.py:
--------------------------------------------------------------------------------
1 | from django.db import migrations
2 | from django.db.migrations.operations.special import RunSQL
3 |
4 |
5 | class Migration(migrations.Migration):
6 |
7 | dependencies = [
8 | ("api", "0011_auto_20210105_1158"),
9 | ]
10 |
11 | operations = [
12 | migrations.RunSQL("SELECT create_hypertable('artifact_stats_results', 'time', chunk_time_interval => INTERVAL '30 days', migrate_data => TRUE);"),
13 | migrations.RunSQL("SELECT create_hypertable('oltpbench_results', 'time', chunk_time_interval => INTERVAL '30 days', migrate_data => TRUE);"),
14 | migrations.RunSQL("SELECT create_hypertable('microbenchmark_results', 'time', chunk_time_interval => INTERVAL '30 days', migrate_data => TRUE);"),
15 | ]
16 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metrics/OLTPBenchMetrics.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.metrics.SummaryPerformanceMetrics import SummaryPerformanceMetrics
2 | from pss_project.api.models.rest.metrics.IncrementalMetrics import IncrementalMetrics
3 |
4 |
5 | class OLTPBenchMetrics(SummaryPerformanceMetrics):
6 | """ This class is the model of the OLTPBench metrics as it is represented in the HTTP API """
7 |
8 | def __init__(self, throughput, latency=None, memory_info=None, incremental_metrics=None):
9 | super().__init__(throughput, latency, memory_info)
10 | if incremental_metrics:
11 | self.incremental_metrics = []
12 | for metric in incremental_metrics:
13 | self.incremental_metrics.append(IncrementalMetrics(**metric))
14 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/fields/UnixEpochDatetimeField.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from time import mktime
3 | from rest_framework.serializers import DateTimeField
4 |
5 |
6 | class UnixEpochDateTimeField(DateTimeField):
7 | """ This is a custom serializer convert between datetime objects and timestamps """
8 |
9 | def to_representation(self, value):
10 | """ Return epoch time for a datetime object or ``None``"""
11 | try:
12 | return int(mktime(value.timetuple())*1e3 + value.microsecond/1e3)
13 | except (AttributeError, TypeError):
14 | return None
15 |
16 | def to_internal_value(self, value):
17 | """ Return a datetime from an epoch time """
18 | return datetime.fromtimestamp(float(value/1000.0))
19 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metrics/MemoryMetricsFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metrics.MemoryMetrics import MemoryMetrics, MemoryItemSummary
3 |
4 |
5 | class MemoryItemSummaryFactory(Factory):
6 | class Meta:
7 | model = MemoryItemSummary
8 |
9 | avg = Faker('pydecimal', left_digits=10, right_digits=4, positive=True)
10 |
11 |
12 | class MemoryMetricsFactory(Factory):
13 | class Meta:
14 | model = MemoryMetrics
15 |
16 | rss = Faker('random_int')
17 | vms = Faker('random_int')
18 |
19 |
20 | class MemorySummaryMetricsFactory(Factory):
21 | class Meta:
22 | model = MemoryMetrics
23 |
24 | rss = MemoryItemSummaryFactory().__dict__
25 | vms = MemoryItemSummaryFactory().__dict__
26 |
--------------------------------------------------------------------------------
/timescaledb/test_data/noop/oltpbench.summary:
--------------------------------------------------------------------------------
1 | {
2 | "Benchmark Type": "noop",
3 | "Current Timestamp (milliseconds)": 1592322443228,
4 | "DBMS Type": "noisepage",
5 | "DBMS Version": "9.5",
6 | "Latency Distribution": {
7 | "Minimum Latency (microseconds)": 29,
8 | "25th Percentile Latency (microseconds)": 53,
9 | "Median Latency (microseconds)": 54,
10 | "Average Latency (microseconds)": 64,
11 | "75th Percentile Latency (microseconds)": 61,
12 | "90th Percentile Latency (microseconds)": 98,
13 | "95th Percentile Latency (microseconds)": 105,
14 | "99th Percentile Latency (microseconds)": 128,
15 | "Maximum Latency (microseconds)": 7002
16 | },
17 | "Throughput (requests/second)": 15488.670879496794,
18 | "isolation": "TRANSACTION_SERIALIZABLE",
19 | "scalefactor": "1.0",
20 | "terminals": "1"
21 | }
22 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/settings/local.py:
--------------------------------------------------------------------------------
1 | from .base import *
2 |
3 | DEBUG = True
4 | ALLOWED_HOSTS = ['0.0.0.0', 'kubernetes.docker.internal', 'localhost', '127.0.0.1', 'host.docker.internal', 'smee.io']
5 | USE_TZ = False
6 |
7 | # Database
8 | # https://docs.djangoproject.com/en/3.0/ref/settings/#databases
9 |
10 | MIDDLEWARE.insert(0, 'django_prometheus.middleware.PrometheusBeforeMiddleware')
11 | MIDDLEWARE.insert(len(MIDDLEWARE), 'django_prometheus.middleware.PrometheusAfterMiddleware')
12 |
13 | DATABASES = {
14 | 'default': {
15 | 'ENGINE': 'django.db.backends.postgresql',
16 | 'NAME': 'postgres',
17 | 'USER': 'postgres',
18 | 'PASSWORD': 'password',
19 | 'HOST': '127.0.0.1', # 'timescaledb-service-local.performance',
20 | 'PORT': '5432',
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/deployments/playbooks/blackbox-exporter-deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - hosts: "{{ host_override | default('k8s_master') }}"
4 | name: Deploy Prometheus
5 | vars:
6 | dir_deployment: "{{ inventory_dir }}"
7 | dir_k8s_blackbox_exporter: "{{ dir_deployment }}/kubernetes/monitoring/blackbox_exporter"
8 | pre_tasks:
9 | - name: Ensure k8s module dependencies are installed.
10 | pip:
11 | state: present
12 | name: openshift
13 |
14 | tasks:
15 | - name: Apply blackbox_exporter deployment configs
16 | vars:
17 | config: "{{ dir_k8s_blackbox_exporter }}/{{ item }}"
18 | community.kubernetes.k8s:
19 | state: present
20 | definition: "{{ lookup('template', '{{ config }}') }}"
21 | loop:
22 | - config-map.yml
23 | - deployment.yml
24 | - service.yml
--------------------------------------------------------------------------------
/deployments/playbooks/postgres-exporter-deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - hosts: "{{ host_override | default('k8s_master') }}"
4 | name: Deploy Prometheus
5 | vars:
6 | dir_deployment: "{{ inventory_dir }}"
7 | dir_k8s_postgres_exporter: "{{ dir_deployment }}/kubernetes/monitoring/postgres_exporter"
8 | pre_tasks:
9 | - name: Ensure k8s module dependencies are installed.
10 | pip:
11 | state: present
12 | name: openshift
13 |
14 | tasks:
15 | - name: Apply postgres_exporter deployment configs
16 | vars:
17 | config: "{{ dir_k8s_postgres_exporter}}/{{ item }}"
18 | community.kubernetes.k8s:
19 | state: present
20 | definition: "{{ lookup('template', '{{ config }}') }}"
21 | loop:
22 | - config-map.yml
23 | - deployment.yml
24 | - service.yml
--------------------------------------------------------------------------------
/deployments/roles/install_k8s/tasks/init_cluster.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: create .kube directory
4 | become: yes
5 | become_user: "{{ k8s_user }}"
6 | file:
7 | path: $HOME/.kube
8 | state: directory
9 | mode: 0755
10 |
11 | - name: initialize the cluster
12 | shell: "kubeadm init --pod-network-cidr={{ k8s_network_cidr }} >> cluster_initialized.txt"
13 | args:
14 | chdir: $HOME/.kube
15 | creates: cluster_initialized.txt
16 |
17 | - name: copy admin.conf to user's kube config
18 | copy:
19 | src: /etc/kubernetes/admin.conf
20 | dest: $HOME/.kube/config
21 | remote_src: yes
22 | owner: "{{ k8s_user }}"
23 |
24 | - name: install Pod network
25 | shell: "kubectl apply -f {{ k8s_flannel_src_url }} >> pod_network_setup.txt"
26 | args:
27 | chdir: $HOME/.kube
28 | creates: pod_network_setup.txt
--------------------------------------------------------------------------------
/timescaledb/test_data/tatp/oltpbench.summary:
--------------------------------------------------------------------------------
1 | {
2 | "Benchmark Type": "tatp",
3 | "Current Timestamp (milliseconds)": 1592474752242,
4 | "DBMS Type": "noisepage",
5 | "DBMS Version": "9.5",
6 | "Latency Distribution": {
7 | "Minimum Latency (microseconds)": 821,
8 | "25th Percentile Latency (microseconds)": 1775,
9 | "Median Latency (microseconds)": 2340,
10 | "Average Latency (microseconds)": 14944,
11 | "75th Percentile Latency (microseconds)": 32536,
12 | "90th Percentile Latency (microseconds)": 38479,
13 | "95th Percentile Latency (microseconds)": 43817,
14 | "99th Percentile Latency (microseconds)": 78607,
15 | "Maximum Latency (microseconds)": 158452
16 | },
17 | "Throughput (requests/second)": 133.64677310920695,
18 | "isolation": "TRANSACTION_SERIALIZABLE",
19 | "scalefactor": "1.0",
20 | "terminals": "2"
21 | }
22 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/settings/staging.py:
--------------------------------------------------------------------------------
1 | from .base import *
2 |
3 | DEBUG = True
4 | ALLOWED_HOSTS = [
5 | 'incrudibles-staging.db.pdl.cmu.edu',
6 | get_environ_value('PSS_POD_IP'),
7 | ]
8 |
9 | MIDDLEWARE.insert(0, 'django_prometheus.middleware.PrometheusBeforeMiddleware')
10 | MIDDLEWARE.insert(len(MIDDLEWARE),
11 | 'django_prometheus.middleware.PrometheusAfterMiddleware')
12 |
13 | DATABASES = {
14 | 'default': {
15 | 'ENGINE': 'django.db.backends.postgresql',
16 | 'NAME': get_environ_value('PSS_DATABASE_NAME'),
17 | 'USER': get_environ_value('PSS_DATABASE_USER'),
18 | 'PASSWORD': get_environ_value('PSS_DATABASE_PASSWORD'),
19 | 'HOST': 'timescaledb-service-staging.performance',
20 | 'PORT': int(get_environ_value('PSS_DATABASE_PORT', 5432)),
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/settings/testing.py:
--------------------------------------------------------------------------------
1 | from .base import *
2 |
3 | DEBUG = True
4 | ALLOWED_HOSTS = [
5 | 'incrudibles-testing.db.pdl.cmu.edu',
6 | get_environ_value('PSS_POD_IP'),
7 | ]
8 |
9 | MIDDLEWARE.insert(0, 'django_prometheus.middleware.PrometheusBeforeMiddleware')
10 | MIDDLEWARE.insert(len(MIDDLEWARE),
11 | 'django_prometheus.middleware.PrometheusAfterMiddleware')
12 |
13 | DATABASES = {
14 | 'default': {
15 | 'ENGINE': 'django.db.backends.postgresql',
16 | 'NAME': get_environ_value('PSS_DATABASE_NAME'),
17 | 'USER': get_environ_value('PSS_DATABASE_USER'),
18 | 'PASSWORD': get_environ_value('PSS_DATABASE_PASSWORD'),
19 | 'HOST': 'timescaledb-service-testing.performance',
20 | 'PORT': int(get_environ_value('PSS_DATABASE_PORT', 5432)),
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/timescaledb/test_data/smallbank/oltpbench.summary:
--------------------------------------------------------------------------------
1 | {
2 | "Benchmark Type": "smallbank",
3 | "Current Timestamp (milliseconds)": 1592478048174,
4 | "DBMS Type": "noisepage",
5 | "DBMS Version": "9.5",
6 | "Latency Distribution": {
7 | "Minimum Latency (microseconds)": 43864,
8 | "25th Percentile Latency (microseconds)": 200404,
9 | "Median Latency (microseconds)": 267164,
10 | "Average Latency (microseconds)": 256545,
11 | "75th Percentile Latency (microseconds)": 308972,
12 | "90th Percentile Latency (microseconds)": 358955,
13 | "95th Percentile Latency (microseconds)": 400800,
14 | "99th Percentile Latency (microseconds)": 467660,
15 | "Maximum Latency (microseconds)": 559380
16 | },
17 | "Throughput (requests/second)": 61.9037569946028,
18 | "isolation": "TRANSACTION_SERIALIZABLE",
19 | "scalefactor": "1.0",
20 | "terminals": "16"
21 | }
22 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/settings/production.py:
--------------------------------------------------------------------------------
1 | from .base import *
2 |
3 | DEBUG = True
4 | ALLOWED_HOSTS = [
5 | 'incrudibles-production.db.pdl.cmu.edu',
6 | get_environ_value('PSS_POD_IP'),
7 | ]
8 |
9 | MIDDLEWARE.insert(0, 'django_prometheus.middleware.PrometheusBeforeMiddleware')
10 | MIDDLEWARE.insert(len(MIDDLEWARE),
11 | 'django_prometheus.middleware.PrometheusAfterMiddleware')
12 |
13 | DATABASES = {
14 | 'default': {
15 | 'ENGINE': 'django.db.backends.postgresql',
16 | 'NAME': get_environ_value('PSS_DATABASE_NAME'),
17 | 'USER': get_environ_value('PSS_DATABASE_USER'),
18 | 'PASSWORD': get_environ_value('PSS_DATABASE_PASSWORD'),
19 | 'HOST': 'timescaledb-service-production.performance',
20 | 'PORT': int(get_environ_value('PSS_DATABASE_PORT', 5432)),
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/database/MicrobenchmarkResultSerializer.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | from rest_framework.serializers import ModelSerializer
3 | from pss_project.api.models.database.MicrobenchmarkResult import MicrobenchmarkResult
4 |
5 |
6 | class MicrobenchmarkResultSerializer(ModelSerializer):
7 | class Meta:
8 | model = MicrobenchmarkResult
9 | fields = ('time', 'jenkins_job_id', 'git_branch', 'git_commit_id',
10 | 'db_version', 'environment', 'benchmark_suite', 'benchmark_name',
11 | 'threads', 'min_runtime', 'wal_device', 'metrics')
12 |
13 | def smudge_timestamp(self):
14 | while MicrobenchmarkResult.objects.filter(time=self.initial_data['time']).count() > 0:
15 | self.initial_data['time'] = self.initial_data['time'] + timedelta(milliseconds=1)
16 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metrics/IncrementalMetricsFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metrics.IncrementalMetrics import IncrementalMetrics
3 | from pss_project.api.tests.factories.rest.metrics.LatencyMetricsFactory \
4 | import LatencyMetricsFactory
5 | from pss_project.api.tests.factories.rest.metrics.MemoryMetricsFactory \
6 | import MemoryMetricsFactory
7 |
8 |
9 | class IncrementalMetricsFactory(Factory):
10 | class Meta:
11 | model = IncrementalMetrics
12 |
13 | time = Faker('random_int')
14 | throughput = Faker('pydecimal',
15 | left_digits=9,
16 | right_digits=15,
17 | positive=True)
18 | latency = LatencyMetricsFactory().__dict__
19 | memory_info = MemoryMetricsFactory().__dict__
20 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/parameters/OLTPBenchParameters.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.parameters.TransactionWeight import TransactionWeight
2 |
3 |
4 | class OLTPBenchParameters(object):
5 | """ This class is the model of the OLTPBench parameters as communicated through the HTTP API """
6 |
7 | def __init__(self, query_mode, scale_factor, terminals, client_time, transaction_weights, max_connection_threads):
8 | self.query_mode = query_mode
9 | self.scale_factor = scale_factor
10 | self.terminals = terminals
11 | self.client_time = client_time
12 | self.transaction_weights = []
13 | for weight in transaction_weights:
14 | self.transaction_weights.append(
15 | TransactionWeight(**weight)
16 | )
17 | self.max_connection_threads = max_connection_threads
18 |
--------------------------------------------------------------------------------
/performance-storage-service/requirements.txt:
--------------------------------------------------------------------------------
1 | asgiref==3.2.10
2 | autopep8==1.4.4
3 | certifi==2020.6.20
4 | cffi==1.14.3
5 | chardet==3.0.4
6 | coverage==4.5.4
7 | cryptography==3.2.1
8 | Django==3.1.2
9 | django-jenkins==0.110.0
10 | django-prometheus==2.1.0
11 | djangorestframework==3.12.1
12 | entrypoints==0.3
13 | factory-boy==3.1.0
14 | Faker==4.14.0
15 | flake8==3.7.9
16 | github3.py==1.3.0
17 | gunicorn==20.0.4
18 | idna==2.10
19 | Jinja2==2.11.2
20 | jwcrypto==0.8
21 | jwt==1.1.0
22 | MarkupSafe==1.1.1
23 | mccabe==0.6.1
24 | pep8==1.7.1
25 | prometheus-client==0.8.0
26 | psycopg2==2.8.6
27 | pycodestyle==2.5.0
28 | pycparser==2.20
29 | pyflakes==2.1.1
30 | python-dateutil==2.8.1
31 | pytz==2020.1
32 | PyYAML==5.3.1
33 | requests==2.24.0
34 | six==1.15.0
35 | sqlparse==0.4.1
36 | tabulate==0.8.7
37 | text-unidecode==1.3
38 | toml==0.10.2
39 | uritemplate==3.0.1
40 | urllib3==1.25.11
41 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/database/OLTPBenchResultSerializer.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | from rest_framework.serializers import ModelSerializer
3 | from pss_project.api.models.database.OLTPBenchResult import OLTPBenchResult
4 |
5 |
6 | class OLTPBenchResultSerializer(ModelSerializer):
7 | class Meta:
8 | model = OLTPBenchResult
9 | fields = ('time', 'git_branch', 'git_commit_id', 'jenkins_job_id', 'db_version', 'environment',
10 | 'benchmark_type', 'query_mode', 'scale_factor', 'terminals', 'client_time', 'weights',
11 | 'wal_device', 'max_connection_threads', 'metrics', 'incremental_metrics')
12 |
13 | def smudge_timestamp(self):
14 | while OLTPBenchResult.objects.filter(time=self.initial_data['time']).count() > 0:
15 | self.initial_data['time'] = self.initial_data['time'] + timedelta(milliseconds=1)
16 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metadata/MetadataFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory
2 | from pss_project.api.models.rest.metadata.Metadata import Metadata
3 | from pss_project.api.tests.factories.rest.metadata.JenkinsMetadataFactory import JenkinsMetadataFactory
4 | from pss_project.api.tests.factories.rest.metadata.GithubMetadataFactory import GithubMetadataFactory
5 | from pss_project.api.tests.factories.rest.metadata.NoisePageMetadataFactory import NoisePageMetadataFactory
6 | from pss_project.api.tests.factories.rest.metadata.EnvironmentMetadataFactory import EnvironmentMetadataFactory
7 |
8 |
9 | class MetadataFactory(Factory):
10 | class Meta:
11 | model = Metadata
12 |
13 | jenkins = JenkinsMetadataFactory().__dict__
14 | github = GithubMetadataFactory().__dict__
15 | noisepage = NoisePageMetadataFactory().__dict__
16 | environment = EnvironmentMetadataFactory().__dict__
17 |
--------------------------------------------------------------------------------
/deployments/roles/config_bash/files/profile:
--------------------------------------------------------------------------------
1 | # ~/.profile: executed by the command interpreter for login shells.
2 | # This file is not read by bash(1), if ~/.bash_profile or ~/.bash_login
3 | # exists.
4 | # see /usr/share/doc/bash/examples/startup-files for examples.
5 | # the files are located in the bash-doc package.
6 |
7 | # the default umask is set in /etc/profile; for setting the umask
8 | # for ssh logins, install and configure the libpam-umask package.
9 | #umask 022
10 |
11 | # if running bash
12 | if [ -n "$BASH_VERSION" ]; then
13 | # include .bashrc if it exists
14 | if [ -f "$HOME/.bashrc" ]; then
15 | . "$HOME/.bashrc"
16 | fi
17 | fi
18 |
19 | # set PATH so it includes user's private bin if it exists
20 | if [ -d "$HOME/bin" ] ; then
21 | PATH="$HOME/bin:$PATH"
22 | fi
23 |
24 | # set PATH so it includes user's private bin if it exists
25 | if [ -d "$HOME/.local/bin" ] ; then
26 | PATH="$HOME/.local/bin:$PATH"
27 | fi
--------------------------------------------------------------------------------
/deployments/playbooks/prometheus-deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - hosts: "{{ host_override | default('k8s_master') }}"
4 | name: Deploy Prometheus
5 | vars:
6 | dir_deployment: "{{ inventory_dir }}"
7 | dir_k8s_prometheus: "{{ dir_deployment }}/kubernetes/monitoring/prometheus"
8 | pre_tasks:
9 | - name: Ensure k8s module dependencies are installed.
10 | pip:
11 | state: present
12 | name: openshift
13 |
14 | tasks:
15 | - name: Apply Prometheus Deployment Configs
16 | vars:
17 | config: "{{ dir_k8s_prometheus }}/{{ item }}"
18 | community.kubernetes.k8s:
19 | state: present
20 | definition: "{{ lookup('template', '{{ config }}') }}"
21 | loop:
22 | - cluster-role.yml
23 | - cluster-role-binding.yml
24 | - config-map.yml
25 | - persistent-volume.yml
26 | - persistent-volume-claim.yml
27 | - deployment.yml
28 | - service.yml
--------------------------------------------------------------------------------
/deployments/playbooks/upload-scripts.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: "{{ host_override | default('k8s_master') }}"
3 | name: Upload Scripts
4 | vars:
5 | dir_repo: "{{ inventory_dir }}"
6 | dir_scripts: "{{ dir_repo }}/scripts"
7 | remote_dir_secrets: /data/secrets
8 | tasks:
9 | - name: Upload scripts for k8s secrets
10 | block:
11 | - name: Ensure secrets folder exists
12 | become: yes
13 | file:
14 | path: "{{ remote_dir_secrets }}"
15 | state: directory
16 | owner: root
17 | mode: 0700
18 |
19 | - name: Upload scripts for k8s secrets
20 | become: yes
21 | copy:
22 | src: "{{ dir_scripts }}/{{ item | string }}"
23 | dest: "{{ remote_dir_secrets }}"
24 | owner: root
25 | mode: 0700
26 | loop:
27 | - make_secrets_performance.sh
28 | - make_secrets_monitoring.sh
--------------------------------------------------------------------------------
/deployments/playbooks/monitoring-grafana-deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: k8s_master
3 | name: Deploy Moniotoring Grafana
4 | vars:
5 | dir_repo: "{{ inventory_dir | dirname }}"
6 | dir_deploy: "{{ inventory_dir }}"
7 | dir_k8s_grafana: "{{ dir_deploy }}/kubernetes/monitoring/grafana"
8 | service_hostname: incrudibles-k8s.db.pdl.cmu.edu
9 | ansible_python_interpreter: /usr/bin/python3
10 | pre_tasks:
11 | - name: Ensure k8s module dependencies are installed.
12 | pip:
13 | name: openshift
14 | state: present
15 | tasks:
16 | - name: Apply Grafana Deployment Configs
17 | vars:
18 | config: "{{ dir_k8s_grafana }}/{{ item }}"
19 | community.kubernetes.k8s:
20 | state: present
21 | definition: "{{ lookup('template', '{{ config }}') }}"
22 | loop:
23 | - persistent-volume.yml
24 | - persistent-volume-claim.yml
25 | - deployment.yml
26 | - service.yml
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0004_auto_20200811_1313.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-08-11 17:13
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('api', '0003_auto_20200724_0144'),
10 | ]
11 |
12 | operations = [
13 | migrations.AddField(
14 | model_name='oltpbenchresult',
15 | name='max_connection_threads',
16 | field=models.PositiveSmallIntegerField(default=4),
17 | preserve_default=False,
18 | ),
19 | migrations.AddField(
20 | model_name='oltpbenchresult',
21 | name='wal_device',
22 | field=models.CharField(choices=[('RAM disk', 'RAM disk'), ('HDD', 'HDD'), ('SATA SSD', 'SATA SSD'), ('NVMe SSD', 'NVMe SSD'),('None','None')], default='HDD', max_length=30),
23 | preserve_default=False,
24 | ),
25 | ]
26 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/serializers/fields/test_UnixEpochDateTimeField.py:
--------------------------------------------------------------------------------
1 | from django.test import SimpleTestCase
2 | from datetime import datetime
3 |
4 | from pss_project.api.serializers.fields.UnixEpochDatetimeField import UnixEpochDateTimeField
5 |
6 |
7 | class TestUnixEpochDateTimeField(SimpleTestCase):
8 | def setUp(self):
9 | self.field = UnixEpochDateTimeField()
10 |
11 | def test_to_representation(self):
12 | """ Convert datetime to timestamp"""
13 | input = datetime.now().replace(microsecond=0)
14 | result = self.field.to_representation(input)
15 | self.assertEqual(datetime.fromtimestamp(float(result/1000.0)), input)
16 |
17 | def test_to_internal_value(self):
18 | """ Convert timestamp to datetime """
19 | input = datetime.now().timestamp()
20 | result = self.field.to_internal_value(input)
21 | self.assertEqual(result, datetime.fromtimestamp(float(input/1000.0)))
22 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/parameters/OLTPBenchParametersSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, ChoiceField, DecimalField, IntegerField
2 | from pss_project.api.serializers.rest.parameters.TransactionWeightSerializer import TransactionWeightSerializer
3 | from pss_project.api.models.rest.parameters.OLTPBenchParameters import OLTPBenchParameters
4 | from pss_project.api.constants import QUERY_MODE_CHOICES
5 |
6 |
7 | class OLTPBenchParametersSerializer(Serializer):
8 |
9 | # Fields
10 | query_mode = ChoiceField(choices=QUERY_MODE_CHOICES)
11 | scale_factor = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
12 | terminals = IntegerField()
13 | client_time = IntegerField()
14 | transaction_weights = TransactionWeightSerializer(many=True)
15 | max_connection_threads = IntegerField()
16 |
17 | def create(self, validated_data):
18 | return OLTPBenchParameters(**validated_data)
19 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/OLTPBenchRestFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.OLTPBenchRest import OLTPBenchRest
3 | from pss_project.api.tests.factories.rest.metadata.MetadataFactory \
4 | import MetadataFactory
5 | from pss_project.api.tests.factories.rest.parameters.OLTPBenchParametersFactory \
6 | import OLTPBenchParametersFactory
7 | from pss_project.api.tests.factories.rest.metrics.OLTPBenchMetricsFactory \
8 | import OLTPBenchMetricsFactory
9 | from pss_project.api.tests.utils.utils import generate_dict_factory
10 |
11 |
12 | class OLTPBenchRestFactory(Factory):
13 | class Meta:
14 | model = OLTPBenchRest
15 |
16 | metadata = generate_dict_factory(MetadataFactory)()
17 | timestamp = Faker('date_time')
18 | type = Faker('word')
19 | parameters = generate_dict_factory(OLTPBenchParametersFactory)()
20 | metrics = generate_dict_factory(OLTPBenchMetricsFactory)()
21 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metrics/IncrementalMetricsSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, DecimalField, IntegerField
2 | from pss_project.api.serializers.rest.metrics.LatencyMetricsSerializer \
3 | import LatencyMetricsSerializer
4 | from pss_project.api.models.rest.metrics.IncrementalMetrics \
5 | import IncrementalMetrics
6 | from pss_project.api.serializers.rest.metrics.MemoryMetricsSerializer \
7 | import MemoryMetricsSerializer
8 |
9 |
10 | class IncrementalMetricsSerializer(Serializer):
11 | # Fields
12 | time = IntegerField()
13 | throughput = DecimalField(max_digits=24,
14 | decimal_places=15,
15 | coerce_to_string=False)
16 | latency = LatencyMetricsSerializer(required=False)
17 | memory_info = MemoryMetricsSerializer(required=False)
18 |
19 | def create(self, validated_data):
20 | return IncrementalMetrics(**validated_data)
21 |
--------------------------------------------------------------------------------
/deployments/roles/install_k8s/tasks/snap.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # the apt package name for 'snap' is 'snapd'
4 | - name: ensure snap is ready
5 | become: yes
6 | apt:
7 | name: snapd
8 | update_cache: yes
9 |
10 | - name: get remote env vars
11 | setup:
12 | gather_subset:
13 | - "env"
14 |
15 | - name: ensure /snap/bin exists in PATH
16 | become: yes
17 | lineinfile:
18 | path: /etc/environment
19 | regexp: "^PATH="
20 | line: "PATH=\"/snap/bin:{{ ansible_env.PATH }}\""
21 | when: ansible_env.PATH is not search("/snap/bin:")
22 |
23 | # kubectl: CLI for k8s on each node (master or worker)
24 | # kubelet: daemon agent on each node (master or worker)
25 | - name: install kubeadm and kubelet on each node
26 | become: yes
27 | snap:
28 | pkg:
29 | - kubeadm
30 | - kubelet
31 | classic: yes
32 |
33 | - name: install kubectl on the master node
34 | become: yes
35 | snap:
36 | name: kubectl
37 | classic: yes
38 | when: env == "k8s_master"
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/OLTPBenchSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, CharField
2 | from pss_project.api.serializers.rest.metadata.MetadataSerializer import MetadataSerializer
3 | from pss_project.api.serializers.fields.UnixEpochDatetimeField import UnixEpochDateTimeField
4 | from pss_project.api.serializers.rest.parameters.OLTPBenchParametersSerializer import OLTPBenchParametersSerializer
5 | from pss_project.api.serializers.rest.metrics.OLTPBenchMetricsSerializer import OLTPBenchMetricsSerializer
6 | from pss_project.api.models.rest.OLTPBenchRest import OLTPBenchRest
7 |
8 |
9 | class OLTPBenchSerializer(Serializer):
10 | # Fields
11 | metadata = MetadataSerializer()
12 | timestamp = UnixEpochDateTimeField()
13 | type = CharField()
14 | parameters = OLTPBenchParametersSerializer()
15 | metrics = OLTPBenchMetricsSerializer()
16 |
17 | def create(self, validated_data):
18 | return OLTPBenchRest(**validated_data)
19 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metadata/MetadataSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer
2 | from pss_project.api.serializers.rest.metadata.JenkinsMetadataSerializer import JenkinsMetadataSerializer
3 | from pss_project.api.serializers.rest.metadata.GithubMetadataSerializer import GithubMetadataSerializer
4 | from pss_project.api.serializers.rest.metadata.NoisePageMetadataSerializer import NoisePageMetadataSerializer
5 | from pss_project.api.serializers.rest.metadata.EnvironmentMetadataSerializer import EnvironmentMetadataSerializer
6 | from pss_project.api.models.rest.metadata.Metadata import Metadata
7 |
8 |
9 | class MetadataSerializer(Serializer):
10 | # Fields
11 | jenkins = JenkinsMetadataSerializer()
12 | github = GithubMetadataSerializer()
13 | noisepage = NoisePageMetadataSerializer()
14 | environment = EnvironmentMetadataSerializer()
15 |
16 | def create(self, validated_data):
17 | return Metadata(**validated_data)
18 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metrics/MicrobenchmarkMetricsSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, ChoiceField, DecimalField, IntegerField
2 | from pss_project.api.models.rest.metrics.MicrobenchmarkMetrics import MicrobenchmarkMetrics
3 | from pss_project.api.constants import MICROBENCHMARK_STATUS_CHOICES
4 |
5 |
6 | class MicrobenchmarkMetricsSerializer(Serializer):
7 | # Fields
8 | throughput = DecimalField(max_digits=34, decimal_places=15, coerce_to_string=False)
9 | tolerance = IntegerField()
10 | iterations = IntegerField()
11 | status = ChoiceField(required=False, choices=MICROBENCHMARK_STATUS_CHOICES)
12 | ref_throughput = DecimalField(required=False, max_digits=34, decimal_places=15, coerce_to_string=False)
13 | stdev_throughput = DecimalField(required=False, max_digits=34, decimal_places=15, coerce_to_string=False)
14 |
15 | def create(self, validated_data):
16 | return MicrobenchmarkMetrics(**validated_data)
17 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metrics/MemoryMetricsSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, DecimalField, IntegerField
2 | from pss_project.api.models.rest.metrics.MemoryMetrics import MemoryMetrics
3 |
4 |
5 | class MemoryItemSummarySerializer(Serializer):
6 | avg = DecimalField(max_digits=100, decimal_places=4, coerce_to_string=False)
7 |
8 | def create(self, validated_data):
9 | return MemoryItemSummarySerializer(**validated_data)
10 |
11 |
12 | class MemoryMetricsSerializer(Serializer):
13 | rss = IntegerField(min_value=0)
14 | vms = IntegerField(min_value=0)
15 |
16 | def create(self, validated_data):
17 | return MemoryMetrics(**validated_data)
18 |
19 |
20 | class MemorySummaryMetricsSerializer(Serializer):
21 | rss = MemoryItemSummarySerializer(required=False)
22 | vms = MemoryItemSummarySerializer(required=False)
23 |
24 | def create(self, validated_data):
25 | return MemoryMetrics(**validated_data)
26 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/postgres-stateful.yml:
--------------------------------------------------------------------------------
1 | apiVersion: apps/v1
2 | kind: StatefulSet
3 | metadata:
4 | name: postgres-demo
5 | namespace: performance
6 | spec:
7 | serviceName: "postgres"
8 | replicas: 2
9 | selector:
10 | matchLabels:
11 | app: postgres
12 | template:
13 | metadata:
14 | labels:
15 | app: postgres
16 | spec:
17 | containers:
18 | - name: postgres
19 | image: postgres:latest
20 | envFrom:
21 | - configMapRef:
22 | name: postgres-config-demo
23 | ports:
24 | - containerPort: 5432
25 | name: postgredb
26 | volumeMounts:
27 | - name: postgredb
28 | mountPath: /var/lib/postgresql/data
29 | subPath: postgres
30 | volumeClaimTemplates:
31 | - metadata:
32 | name: postgredb
33 | spec:
34 | accessModes: [ "ReadWriteOnce" ]
35 | storageClassName: gp2
36 | resources:
37 | requests:
38 | storage: 3Gi
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metrics/LatencyMetricsFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metrics.LatencyMetrics import LatencyMetrics
3 |
4 |
5 | class LatencyMetricsFactory(Factory):
6 | class Meta:
7 | model = LatencyMetrics
8 |
9 | l_25 = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
10 | l_75 = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
11 | l_90 = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
12 | l_95 = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
13 | l_99 = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
14 | avg = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
15 | median = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
16 | max = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
17 | min = Faker('pydecimal', left_digits=6, right_digits=4, positive=True)
18 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/database/MicrobenchmarkDBFactory.py:
--------------------------------------------------------------------------------
1 | from django.utils import timezone
2 | from factory import Faker
3 | from factory.django import DjangoModelFactory
4 | from pss_project.api.models.database.MicrobenchmarkResult import MicrobenchmarkResult
5 |
6 |
7 | class MicrobenchmarkDBFactory(DjangoModelFactory):
8 | class Meta:
9 | model = MicrobenchmarkResult
10 |
11 | time = Faker('iso8601', tzinfo=timezone.utc)
12 | jenkins_job_id = Faker('pystr_format', string_format='###')
13 | git_branch = Faker('word')
14 | git_commit_id = Faker('sha1')
15 | db_version = Faker('word')
16 | environment = Faker('pydict', value_types=[str])
17 | benchmark_suite = Faker('word')
18 | benchmark_name = Faker('word')
19 | threads = Faker('random_int', min=1, max=16)
20 | min_runtime = Faker('random_int', min=30, step=30)
21 | wal_device = Faker('random_element', elements=('RAM disk', 'HDD', 'SATA SSD', 'NVMe SSD', 'None'))
22 | metrics = Faker('pydict', value_types=[int, float])
23 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/models/test_ArtifactStatsResults.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 |
3 | from pss_project.api.models.database.ArtifactStatsResult import ArtifactStatsResult
4 | from pss_project.api.tests.factories.database.ArtifactStatsDBFactory import ArtifactStatsDBFactory
5 |
6 |
7 | class TestArtifactStatsResults(TestCase):
8 |
9 | def test_save(self):
10 | artifact_stats_result = ArtifactStatsDBFactory()
11 | artifact_stats_result.save()
12 | all_db_objects = ArtifactStatsResult.objects.all()
13 | self.assertEqual(all_db_objects.count(), 1)
14 |
15 | def test_smudge_time_save(self):
16 | artifact_stats_result_1 = ArtifactStatsDBFactory()
17 | artifact_stats_result_1.save()
18 | artifact_stats_result_2 = ArtifactStatsDBFactory()
19 | artifact_stats_result_2.time = artifact_stats_result_1.time
20 | artifact_stats_result_2.save()
21 | all_db_objects = ArtifactStatsResult.objects.all()
22 | self.assertEqual(all_db_objects.count(), 2)
23 |
--------------------------------------------------------------------------------
/deployments/playbooks/create-namespaces.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: "{{ host_override | default('k8s_master') }}"
3 | name: Create Namespaces
4 | vars:
5 | dir_k8s: "{{ inventory_dir }}/kubernetes"
6 | pre_tasks:
7 | - name: Ensure k8s module dependencies are installed.
8 | pip:
9 | name: openshift
10 | state: present
11 | tasks:
12 | - name: Create Namespaces
13 | register: create_namespaces
14 | vars:
15 | namespaces_file: "{{ dir_k8s }}/namespaces.yml"
16 | community.kubernetes.k8s:
17 | state: present
18 | definition: "{{ lookup('template', '{{ namespaces_file }}') }}"
19 |
20 | - name: Changes
21 | debug:
22 | msg:
23 | - "Were namespaces changed?"
24 | - "{{ create_namespaces.changed }}"
25 |
26 | - name: Change Details
27 | debug:
28 | msg:
29 | - "The '{{ item.result.metadata.name}}' namespace
30 | was {{ '' if item.changed else 'not' }} changed."
31 | - "It's status is {{ item.result.status}}"
32 | with_items: "{{ create_namespaces.result.results }}"
33 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/MicrobenchmarkRestFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.MicrobenchmarkRest import MicrobenchmarkRest
3 | from pss_project.api.tests.factories.rest.metadata.MetadataFactory \
4 | import MetadataFactory
5 | from pss_project.api.tests.factories.rest.parameters.MicrobenchmarkParametersFactory \
6 | import MicrobenchmarkParametersFactory
7 | from pss_project.api.tests.factories.rest.metrics.MicrobenchmarkMetricsFactory \
8 | import MicrobenchmarkMetricsFactory
9 | from pss_project.api.tests.utils.utils import generate_dict_factory
10 |
11 |
12 | class MicrobenchmarkRestFactory(Factory):
13 | class Meta:
14 | model = MicrobenchmarkRest
15 | metadata = generate_dict_factory(MetadataFactory)()
16 | timestamp = Faker('date_time')
17 | test_suite = Faker('word')
18 | test_name = Faker('word')
19 | parameters = generate_dict_factory(MicrobenchmarkParametersFactory)()
20 | metrics = generate_dict_factory(MicrobenchmarkMetricsFactory)()
21 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/models/test_MicrobenchmarkResult.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 |
3 | from pss_project.api.models.database.MicrobenchmarkResult import MicrobenchmarkResult
4 | from pss_project.api.tests.factories.database.MicrobenchmarkDBFactory import MicrobenchmarkDBFactory
5 |
6 |
7 | class TestMicrobenchmarkResults(TestCase):
8 |
9 | def test_save(self):
10 | microbenchmark_result = MicrobenchmarkDBFactory()
11 | microbenchmark_result.save()
12 | all_db_objects = MicrobenchmarkResult.objects.all()
13 | self.assertEqual(all_db_objects.count(), 1)
14 |
15 | def test_smudge_time_save(self):
16 | microbenchmark_result_1 = MicrobenchmarkDBFactory()
17 | microbenchmark_result_1.save()
18 | microbenchmark_result_2 = MicrobenchmarkDBFactory()
19 | microbenchmark_result_2.time = microbenchmark_result_1.time
20 | microbenchmark_result_2.save()
21 | all_db_objects = MicrobenchmarkResult.objects.all()
22 | self.assertEqual(all_db_objects.count(), 2)
23 |
--------------------------------------------------------------------------------
/deployments/roles/install_k8s/tasks/install.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: install apt transport HTTPS
4 | apt:
5 | name: apt-transport-https
6 | update_cache: yes
7 |
8 | - name: add kubernetes GPG key
9 | apt_key:
10 | url: https://packages.cloud.google.com/apt/doc/apt-key.gpg
11 |
12 | - name: add kubernetes to the apt repository
13 | apt_repository:
14 | repo: deb http://apt.kubernetes.io/ kubernetes-xenial main
15 | state: present
16 | filename: "kubernetes"
17 |
18 | - name: install kubelet agent and kubeadm CLI
19 | apt:
20 | pkg:
21 | - kubelet
22 | - kubeadm
23 | update_cache: true
24 |
25 | - name: change native cgroupdriver to systemd for docker
26 | copy:
27 | src: daemon.json
28 | dest: /etc/docker/daemon.json
29 |
30 | - name: start and enable kubelet
31 | systemd:
32 | name: kubelet
33 | state: started
34 | enabled: yes
35 |
36 | - name: install kubectl CLI
37 | apt:
38 | name: kubectl
39 | update_cache: true
40 | when: k8s_role is defined and k8s_role == "master"
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/MicrobenchmarkSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, CharField
2 | from pss_project.api.serializers.rest.metadata.MetadataSerializer import MetadataSerializer
3 | from pss_project.api.serializers.fields.UnixEpochDatetimeField import UnixEpochDateTimeField
4 | from pss_project.api.serializers.rest.parameters.MicrobenchmarkParametersSerializer import (
5 | MicrobenchmarkParametersSerializer)
6 | from pss_project.api.serializers.rest.metrics.MicrobenchmarkMetricsSerializer import MicrobenchmarkMetricsSerializer
7 | from pss_project.api.models.rest.MicrobenchmarkRest import MicrobenchmarkRest
8 |
9 |
10 | class MicrobenchmarkSerializer(Serializer):
11 | # Fields
12 | metadata = MetadataSerializer()
13 | timestamp = UnixEpochDateTimeField()
14 | test_suite = CharField()
15 | test_name = CharField()
16 | parameters = MicrobenchmarkParametersSerializer()
17 | metrics = MicrobenchmarkMetricsSerializer()
18 |
19 | def create(self, validated_data):
20 | return MicrobenchmarkRest(**validated_data)
21 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/openapi/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | apiVersion: apps/v1
4 | kind: Deployment
5 | metadata:
6 | name: "{{ env }}-openapi"
7 | labels:
8 | app: openapi
9 | env: "{{ env }}"
10 | namespace: performance
11 | spec:
12 | replicas: 1
13 | selector:
14 | matchLabels:
15 | app: openapi
16 | env: "{{ env }}"
17 | template:
18 | metadata:
19 | labels:
20 | app: openapi
21 | env: "{{ env }}"
22 | spec:
23 | restartPolicy: Always
24 | nodeSelector:
25 | env: "{{ env }}"
26 | containers:
27 | - name: swagger-ui
28 | image: swaggerapi/swagger-ui:v3.36.2
29 | imagePullPolicy: IfNotPresent
30 | ports:
31 | - containerPort: 8080
32 | name: http
33 | env:
34 | - name: SWAGGER_JSON
35 | value: /config/api.yaml
36 | volumeMounts:
37 | - mountPath: /config
38 | name: config
39 | volumes:
40 | - name: config
41 | configMap:
42 | name: "openapi-conf-{{ env }}"
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 CMU Database Group
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metrics/LatencyMetricsSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, DecimalField
2 | from pss_project.api.models.rest.metrics.LatencyMetrics import LatencyMetrics
3 |
4 |
5 | class LatencyMetricsSerializer(Serializer):
6 | l_25 = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
7 | l_75 = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
8 | l_90 = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
9 | l_95 = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
10 | l_99 = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
11 | avg = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
12 | median = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
13 | max = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
14 | min = DecimalField(max_digits=10, decimal_places=4, coerce_to_string=False)
15 |
16 | def create(self, validated_data):
17 | return LatencyMetrics(**validated_data)
18 |
--------------------------------------------------------------------------------
/deployments/roles/install_k8s/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: config the k8s user on the master node
4 | become: yes
5 | import_tasks: config_user.yml
6 |
7 | - name: install the kubernetes dependencies
8 | become: yes
9 | import_tasks: install.yml
10 |
11 | - name: check current cluster info
12 | become: yes
13 | command: kubectl cluster-info
14 | register: result_cluster
15 | ignore_errors: yes
16 | when:
17 | - k8s_role is defined
18 | - k8s_role == "master"
19 |
20 | - name: config the kubernetes master node
21 | become: yes
22 | # import_tasks: init_cluster.yml
23 | debug:
24 | msg: ready to init a k8s cluster
25 | when:
26 | - k8s_role is defined
27 | - k8s_role == "master"
28 | - result_cluster.rc is defined
29 | - result_cluster.rc != 0
30 |
31 | # # DONT automate joining in the cluster
32 | # # Manually join the nodes
33 | # - name: config the kubernetes worker node
34 | # become: yes
35 | # import_tasks: join_cluster.yml
36 | # when:
37 | # - k8s_role is defined
38 | # - k8s_role == "worker"
39 | # - result_cluster.rc is defined
40 | # - result_cluster.rc != 0
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/serializers/rest/metrics/OLTPBenchMetricsSerializer.py:
--------------------------------------------------------------------------------
1 | from rest_framework.serializers import Serializer, DecimalField
2 | from pss_project.api.serializers.rest.metrics.MemoryMetricsSerializer import MemorySummaryMetricsSerializer
3 | from pss_project.api.serializers.rest.metrics.LatencyMetricsSerializer import LatencyMetricsSerializer
4 | from pss_project.api.serializers.rest.metrics.IncrementalMetricsSerializer import IncrementalMetricsSerializer
5 | from pss_project.api.models.rest.metrics.OLTPBenchMetrics import OLTPBenchMetrics
6 |
7 |
8 | class OLTPBenchMetricsSerializer(Serializer):
9 | # Fields
10 | throughput = DecimalField(max_digits=24,
11 | decimal_places=15,
12 | coerce_to_string=False)
13 | latency = LatencyMetricsSerializer(required=False)
14 | memory_info = MemorySummaryMetricsSerializer(required=False)
15 | incremental_metrics = IncrementalMetricsSerializer(required=False,
16 | many=True)
17 |
18 | def create(self, validated_data):
19 | return OLTPBenchMetrics(**validated_data)
20 |
--------------------------------------------------------------------------------
/deployments/roles/install_openresty/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # Reference: https://openresty.org/en/linux-packages.html
4 |
5 | - name: install prerequisite packages for openresty
6 | become: yes
7 | apt:
8 | pkg:
9 | - wget
10 | - gnupg
11 | - ca-certificates
12 | update_cache: yes
13 |
14 | - name: import openresty GPG key
15 | become: yes
16 | shell: wget -O - https://openresty.org/package/pubkey.gpg | apt-key add -
17 |
18 | - name: add openresty to the official apt repo
19 | become: yes
20 | shell: |
21 | echo "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main" \
22 | | tee /etc/apt/sources.list.d/openresty.list
23 |
24 | - name: update apt repo cache and install openresty
25 | become: yes
26 | apt:
27 | name: openresty
28 | update_cache: yes
29 |
30 | - name: make symbolic link to the openresty config dir
31 | become: yes
32 | file:
33 | src: /usr/local/openresty/nginx/conf
34 | dest: /etc/openresty
35 | force: yes
36 | state: link
37 |
38 | - name: enable and start docker daemon service
39 | become: yes
40 | systemd:
41 | name: openresty
42 | state: started
43 | enabled: yes
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/manual/local-prometheus.yml:
--------------------------------------------------------------------------------
1 |
2 | # my global config
3 | global:
4 | scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute.
5 | evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute.
6 | # scrape_timeout is set to the global default (10s).
7 |
8 | # Load rules once and periodically evaluate them according to the global 'evaluation_interval'.
9 | rule_files:
10 | # - "first_rules.yml"
11 | # - "second_rules.yml"
12 |
13 | # A scrape configuration containing exactly one endpoint to scrape:
14 | # Here it's Prometheus itself.
15 | scrape_configs:
16 | # The job name is added as a label `job=` to any timeseries scraped from this config.
17 | - job_name: 'prometheus'
18 | # metrics_path defaults to '/metrics'
19 | # scheme defaults to 'http'.
20 | static_configs:
21 | - targets: ['localhost:9090']
22 |
23 | - job_name: 'pss'
24 | metrics_path: '/metrics'
25 | scrape_interval: 5s
26 | static_configs:
27 | - targets: ['host.docker.internal:8000']
28 | labels:
29 | alias: "pss"
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/parameters/OLTPBenchParametersFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.parameters.OLTPBenchParameters import (
3 | OLTPBenchParameters)
4 | from pss_project.api.tests.factories.rest.parameters.TransactionWeightFactory \
5 | import (TransactionWeightFactory)
6 |
7 |
8 | class OLTPBenchParametersFactory(Factory):
9 | class Meta:
10 | model = OLTPBenchParameters
11 |
12 | query_mode = Faker('random_element', elements=('simple', 'extended'))
13 | scale_factor = Faker('pydecimal', left_digits=6,
14 | right_digits=4, positive=True)
15 | terminals = Faker('random_int', min=1, max=16)
16 | client_time = Faker('random_int', min=30, step=30)
17 | transaction_weights = Faker('random_elements', elements=(
18 | TransactionWeightFactory().__dict__,
19 | TransactionWeightFactory().__dict__,
20 | TransactionWeightFactory().__dict__,
21 | TransactionWeightFactory().__dict__,
22 | TransactionWeightFactory().__dict__,
23 | ), unique=True)
24 | max_connection_threads = Faker('random_int', min=1, max=32)
25 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metadata/Metadata.py:
--------------------------------------------------------------------------------
1 |
2 | from pss_project.api.models.rest.metadata.JenkinsMetadata import JenkinsMetadata
3 | from pss_project.api.models.rest.metadata.GithubMetadata import GithubMetadata
4 | from pss_project.api.models.rest.metadata.NoisePageMetadata import NoisePageMetadata
5 | from pss_project.api.models.rest.metadata.EnvironmentMetadata import EnvironmentMetadata
6 |
7 |
8 | class Metadata(object):
9 | """ This class is the model of the all the metadata data as it is represented in the HTTP API
10 | jenkins - all data relating to the job/build that reported the metrics
11 | github - all github related info (i.e. branch, commit sha)
12 | noisepage - all system specific metadata (i.e. DB version)
13 | environment - all environment metadata relating to the conditions under which the metrics were gathered """
14 |
15 | def __init__(self, jenkins, github, noisepage, environment):
16 | self.jenkins = JenkinsMetadata(**jenkins)
17 | self.github = GithubMetadata(**github)
18 | self.noisepage = NoisePageMetadata(**noisepage)
19 | self.environment = EnvironmentMetadata(**environment)
20 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/metrics/MicrobenchmarkMetrics.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.metrics.BasePerformanceMetrics import BasePerformanceMetrics
2 |
3 |
4 | class MicrobenchmarkMetrics(BasePerformanceMetrics):
5 | """ This class is the model of the microbenchmark metrics as represented in the HTTP API
6 | tolerance - the manually set % decrease in performance that is allowed
7 | iterations - the number of time the microbenchmark was run to get a statistically meaningful result.
8 | status - PASS or FAIL which is determined by whether the manual tolerance is violated
9 | ref_throughput - the 30 day rolling average of the microbenchmark
10 | stdev_throughput - the standard deviation of the last 30 days of results
11 | """
12 |
13 | def __init__(self, throughput, tolerance, iterations, status=None, ref_throughput=None, stdev_throughput=None):
14 | super().__init__(throughput)
15 | self.tolerance = tolerance
16 | self.iterations = iterations
17 | self.status = status
18 | self.ref_throughput = ref_throughput
19 | self.stdev_throughput = stdev_throughput
20 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/deployment_backup.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | name: timescaledb
6 | namespace: performance
7 | labels:
8 | app: timescaledb
9 | env: "{{ env }}"
10 | spec:
11 | replicas: 1
12 | strategy:
13 | type: RollingUpdate
14 | selector:
15 | matchLabels:
16 | app: timescaledb
17 | env: "{{ env }}"
18 | template:
19 | metadata:
20 | labels:
21 | app: timescaledb
22 | env: "{{ env }}"
23 | spec:
24 | nodeSelector:
25 | env: "{{ env }}"
26 | volumes:
27 | - name: timescaledb-volume
28 | containers:
29 | - name: timescaledb
30 | image: "{{ timescaledb_container_image }}"
31 | imagePullPolicy: "{{ image_pull_policy }}"
32 | ports:
33 | - name: http
34 | containerPort: 5432
35 | env:
36 | - name: POSTGRES_USER
37 | value: postgres
38 | - name: POSTGRES_PASSWORD
39 | value: postgres
40 | volumeMounts:
41 | - name: timescaledb-volume
42 | mountPath: /pg-data
43 |
44 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0002_add_user.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-07-22 06:22
2 |
3 | from django.db import migrations
4 | from django.contrib.auth.models import User
5 | from pss_project.settings.utils import get_environ_value
6 | from django.db import transaction
7 | from django.utils import timezone
8 |
9 | def add_user(apps, schema_editor):
10 | try:
11 | user = User.objects.create_user(username=get_environ_value('PSS_CREATOR_USER'), password=get_environ_value('PSS_CREATOR_PASSWORD'), last_login=timezone.now())
12 | user.save()
13 | except Exception as e:
14 | msg = 'Migration error: create user failed'
15 | print(msg)
16 | print(e)
17 | raise e
18 |
19 | def remove_user(apps,schema_editor):
20 | try:
21 | user = User.objects.get(username=get_environ_value('PSS_CREATOR_USER'))
22 | user.delete()
23 | except:
24 | msg = 'Migration error: delete user failed'
25 | print(msg)
26 |
27 | class Migration(migrations.Migration):
28 |
29 | dependencies = [
30 | ('api', '0001_initial'),
31 | ]
32 |
33 | operations = [
34 | migrations.RunPython(add_user, remove_user),
35 | ]
36 |
--------------------------------------------------------------------------------
/deployments/roles/config_ssl/tasks/install_ssl.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: set var for the path to the SSL folder of the "{{ domain }}"
4 | set_fact:
5 | dir_domain_ssl: "{{ dir_openresty_ssl }}/{{ domain }}"
6 |
7 | - name: ensure the SSL folder exists for "{{ domain }}"
8 | become: yes
9 | file:
10 | path: "{{ dir_domain_ssl }}"
11 | state: directory
12 | owner: root
13 | mode: 0755
14 |
15 | - name: check if a certificate is currently still valid, ignoring failures
16 | openssl_certificate:
17 | path: "{{ dir_domain_ssl }}/cert.pem"
18 | provider: assertonly
19 | issuer:
20 | O: Let's Encrypt
21 | has_expired: no
22 | ignore_errors: yes
23 | register: validity_check
24 |
25 | - name: install a valid certificate in case the initial check failed
26 | become: yes
27 | block:
28 | - name: issue the cert for the "{{ domain }}"
29 | shell: |
30 | /root/.acme.sh/acme.sh --issue -d {{ domain }} -w {{ dir_domain_ssl }}
31 |
32 | - name: install the cert to openresty
33 | shell: |
34 | /root/.acme.sh/acme.sh --install-cert -d {{ domain }} \
35 | --key-file {{ dir_domain_ssl }}/key.pem \
36 | --fullchain-file {{ dir_domain_ssl }}/cert.pem \
37 | --reloadcmd 'openresty -s reload'
38 | when: validity_check.failed
39 |
40 |
--------------------------------------------------------------------------------
/timescaledb/test_data/smallbank/oltpbench.expconfig:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4
4 | TRANSACTION_SERIALIZABLE
5 | 1.0
6 | 16
7 |
8 |
9 |
10 | unlimited
11 | 15
12 | 15
13 | 15
14 | 25
15 | 15
16 | 15
17 |
18 |
19 |
20 |
21 | Amalgamate
22 |
23 |
24 | Balance
25 |
26 |
27 | DepositChecking
28 |
29 |
30 | SendPayment
31 |
32 |
33 | TransactSavings
34 |
35 |
36 | WriteCheck
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0011_auto_20210105_1158.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2021-01-05 16:58
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('api', '0010_auto_20201121_2224'),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveField(
14 | model_name='artifactstatsresult',
15 | name='id',
16 | ),
17 | migrations.RemoveField(
18 | model_name='microbenchmarkresult',
19 | name='id',
20 | ),
21 | migrations.RemoveField(
22 | model_name='oltpbenchresult',
23 | name='id',
24 | ),
25 | migrations.AlterField(
26 | model_name='artifactstatsresult',
27 | name='time',
28 | field=models.DateTimeField(primary_key=True, serialize=False),
29 | ),
30 | migrations.AlterField(
31 | model_name='microbenchmarkresult',
32 | name='time',
33 | field=models.DateTimeField(primary_key=True, serialize=False),
34 | ),
35 | migrations.AlterField(
36 | model_name='oltpbenchresult',
37 | name='time',
38 | field=models.DateTimeField(primary_key=True, serialize=False),
39 | ),
40 | ]
41 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0008_binarymetricsresult.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-10-30 18:26
2 |
3 | import django.contrib.postgres.fields.jsonb
4 | import django.core.serializers.json
5 | from django.db import migrations, models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('api', '0007_remove_microbenchmarkresult_query_mode'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='BinaryMetricsResult',
17 | fields=[
18 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
19 | ('time', models.DateTimeField()),
20 | ('jenkins_job_id', models.CharField(max_length=15)),
21 | ('git_branch', models.CharField(max_length=255)),
22 | ('git_commit_id', models.CharField(max_length=40)),
23 | ('db_version', models.CharField(max_length=255)),
24 | ('environment', django.contrib.postgres.fields.jsonb.JSONField()),
25 | ('metrics', django.contrib.postgres.fields.jsonb.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder)),
26 | ],
27 | options={
28 | 'db_table': 'binary_metrics_results',
29 | },
30 | ),
31 | ]
32 |
--------------------------------------------------------------------------------
/deployments/roles/config_openresty/templates/external.nginx:
--------------------------------------------------------------------------------
1 | server {
2 | listen 80;
3 | resolver 8.8.8.8;
4 | server_name {{ noisepage_hostname }};
5 | return 301 https://$host$request_uri;
6 | }
7 |
8 | server {
9 | listen 443 ssl;
10 | resolver 8.8.8.8;
11 | server_name {{ noisepage_hostname }};
12 |
13 | # ssl config
14 | ssl_certificate /etc/openresty/ssl/{{ noisepage_hostname }}/cert.pem;
15 | ssl_certificate_key /etc/openresty/ssl/{{ noisepage_hostname }}/key.pem;
16 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
17 | ssl_ciphers HIGH:!aNULL:!MD5;
18 |
19 | # log config
20 | access_log {{ dir_openresty_external_log }}/access.log;
21 | error_log {{ dir_openresty_external_log }}/error.log;
22 |
23 | # verify ssl cert
24 | location ^~ /.well-known/acme-challenge {
25 | root /etc/openresty/ssl/$host;
26 | }
27 |
28 | # landing to the grafana page
29 | location / {
30 | proxy_pass_header Set-Cookie;
31 |
32 | proxy_set_header Host $host;
33 | proxy_set_header X-Real-IP $remote_addr;
34 | proxy_set_header X-Forwarded-Proto $scheme;
35 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
36 |
37 | proxy_pass http://grafana/;
38 | }
39 | }
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/MicrobenchmarkRest.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.BaseRest import BaseRest
2 | from pss_project.api.models.rest.parameters.MicrobenchmarkParameters import MicrobenchmarkParameters
3 | from pss_project.api.models.rest.metrics.MicrobenchmarkMetrics import MicrobenchmarkMetrics
4 |
5 |
6 | class MicrobenchmarkRest(BaseRest):
7 | """ This class is the model of the Microbench data as it is communicated through the HTTP API """
8 |
9 | def __init__(self, metadata, timestamp, test_suite, test_name, parameters, metrics):
10 | super().__init__(metadata, timestamp)
11 | self.test_suite = test_suite
12 | self.test_name = test_name
13 | self.parameters = MicrobenchmarkParameters(**parameters)
14 | self.metrics = MicrobenchmarkMetrics(**metrics)
15 |
16 | def convert_to_db_json(self):
17 | """ Convert the API model into a dict that can be used to instantiate a MicrobenchmarkResult object """
18 | data = super().convert_to_db_json()
19 | microbench_data = {
20 | 'benchmark_suite': self.test_suite,
21 | 'benchmark_name': self.test_name,
22 | 'threads': self.parameters.threads,
23 | 'min_runtime': self.parameters.min_runtime,
24 | 'wal_device': self.metadata.environment.wal_device,
25 | }
26 | data.update(microbench_data)
27 | return data
28 |
--------------------------------------------------------------------------------
/grafana/README.md:
--------------------------------------------------------------------------------
1 | # Grafana
2 |
3 | Grafana will be used by NoisePage to query, visualize, and understand the metrics that stored in TimeScaleDB
4 |
5 | ### Related Deployment Files
6 | `/deployments/kubernetes/performance/grafana/*`
7 |
8 | `/deployments/kubernetes/namespaces.yml`
9 |
10 | `/deployments/playbooks/grafana-deployment.yml`
11 |
12 | `/deployments/playbooks/create-namespaces.yml`
13 |
14 |
15 |
16 | ### Running locally
17 | Make sure you have docker desktop, and ansible installed.
18 |
19 | #### Prerequisite
20 | Make sure your docker-desktop kubernetes node is labeled with `env=local`.
21 |
22 | To do this run `kubectl label nodes docker-desktop env=local`
23 |
24 | #### Execution
25 | ```bash
26 | ansible-playbook -i inventory playbooks/create-namespaces.yml -e "env=local host_override=local"
27 |
28 | ansible-playbook -i inventory playbooks/grafana-deployment.yml -e "env=local host_override=local"
29 | ```
30 | To verify try opening a browser to `http://localhost:32000/`
31 |
32 | To delete the local deployment
33 | ```
34 | kubectl delete pods,service,deployment -n performance --all
35 | ```
36 |
37 | ### Create a dashboard
38 | Check the document here: https://grafana.com/docs/grafana/latest/getting-started/getting-started/#step-3-create-a-dashboard
39 |
40 | Be sure to **store** the dashboard JSON Model into the folder: [noisepage-stats/grafana/dashboards/](https://github.com/cmu-db/noisepage-stats/tree/master/grafana/dashboards)
41 |
--------------------------------------------------------------------------------
/deployments/roles/config_openresty/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: update the nginx.conf
4 | become: yes
5 | copy:
6 | src: nginx.conf
7 | dest: "{{ dir_openresty }}"
8 |
9 | - name: ensure conf.d dir exists
10 | become: yes
11 | file:
12 | path: "{{ dir_openresty_conf }}"
13 | state: directory
14 | owner: root
15 | mode: "0755"
16 |
17 | - name: update conf for k8s master
18 | block:
19 | - name: update the k8s master conf
20 | become: yes
21 | import_tasks: update_conf.yml
22 | vars:
23 | domain: "{{ hostname }}"
24 | conf_template: k8s_master.nginx
25 | when: k8s_role == "master"
26 |
27 | - name: update conf for k8s workers
28 | block:
29 | - name: update the internal conf
30 | become: yes
31 | import_tasks: update_conf.yml
32 | vars:
33 | domain: "{{ hostname }}"
34 | conf_template: internal.nginx
35 |
36 | - name: update the external conf
37 | become: yes
38 | import_tasks: update_conf.yml
39 | vars:
40 | domain: "{{ noisepage_hostname }}"
41 | conf_template: external.nginx
42 | when: env == "production"
43 | when: k8s_role == "worker"
44 |
45 | - name: test config syntax
46 | become: yes
47 | command: openresty -t
48 | register: result_test
49 |
50 | - name: reload openresty
51 | become: yes
52 | command: openresty -s reload
53 | when: result_test.rc is defined and result_test.rc == 0
54 |
55 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/rest/metrics/OLTPBenchMetricsFactory.py:
--------------------------------------------------------------------------------
1 | from factory import Factory, Faker
2 | from pss_project.api.models.rest.metrics.OLTPBenchMetrics import OLTPBenchMetrics
3 | from pss_project.api.tests.factories.rest.metrics.LatencyMetricsFactory import LatencyMetricsFactory
4 | from pss_project.api.tests.factories.rest.metrics.MemoryMetricsFactory import MemorySummaryMetricsFactory
5 | from pss_project.api.tests.factories.rest.metrics.IncrementalMetricsFactory import IncrementalMetricsFactory
6 | from pss_project.api.tests.utils.utils import generate_dict_factory
7 |
8 |
9 | class OLTPBenchMetricsFactory(Factory):
10 | class Meta:
11 | model = OLTPBenchMetrics
12 |
13 | throughput = Faker('pydecimal',
14 | left_digits=9,
15 | right_digits=15,
16 | positive=True)
17 | latency = LatencyMetricsFactory().__dict__
18 | memory_info = MemorySummaryMetricsFactory().__dict__
19 | incremental_metrics = Faker(
20 | 'random_elements',
21 | elements=(
22 | generate_dict_factory(IncrementalMetricsFactory)(),
23 | generate_dict_factory(IncrementalMetricsFactory)(),
24 | generate_dict_factory(IncrementalMetricsFactory)(),
25 | generate_dict_factory(IncrementalMetricsFactory)(),
26 | generate_dict_factory(IncrementalMetricsFactory)(),
27 | ),
28 | unique=True)
29 |
--------------------------------------------------------------------------------
/timescaledb/test_data/tatp/oltpbench.expconfig:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4
4 | TRANSACTION_SERIALIZABLE
5 | 1.0
6 | 2
7 |
8 |
9 |
10 | unlimited
11 | 2
12 | 35
13 | 10
14 | 35
15 | 2
16 | 14
17 | 2
18 |
19 |
20 |
21 |
22 | DeleteCallForwarding
23 |
24 |
25 | GetAccessData
26 |
27 |
28 | GetNewDestination
29 |
30 |
31 | GetSubscriberData
32 |
33 |
34 | InsertCallForwarding
35 |
36 |
37 | UpdateLocation
38 |
39 |
40 | UpdateSubscriberData
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: StatefulSet
4 | metadata:
5 | name: "{{ env }}-timescaledb"
6 | namespace: performance
7 | labels:
8 | app: timescaledb
9 | env: "{{ env }}"
10 | spec:
11 | replicas: 1
12 | strategy:
13 | type: RollingUpdate
14 | selector:
15 | matchLabels:
16 | app: timescaledb
17 | env: "{{ env }}"
18 | template:
19 | metadata:
20 | labels:
21 | app: timescaledb
22 | env: "{{ env }}"
23 | spec:
24 | nodeSelector:
25 | env: "{{ env }}"
26 | volumes:
27 | - name: timescaledb-volume
28 | persistentVolumeClaim:
29 | claimName: timescaledb-pv-claim
30 | containers:
31 | - name: timescaledb
32 | image: "{{ timescaledb_container_image }}"
33 | imagePullPolicy: "{{ image_pull_policy }}"
34 | args: ["-c", "max_connections=500"]
35 | ports:
36 | - containerPort: 5432
37 | env:
38 | - name: POSTGRES_USER
39 | valueFrom:
40 | secretKeyRef:
41 | name: "secrets-{{ env }}"
42 | key: pss_db_user
43 | - name: POSTGRES_PASSWORD
44 | valueFrom:
45 | secretKeyRef:
46 | name: "secrets-{{ env }}"
47 | key: pss_db_password
48 | volumeMounts:
49 | - name: timescaledb-volume
50 | mountPath: /var/lib/postgresql/data
51 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/constants.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from pss_project.settings.utils import get_environ_value
3 |
4 | service_start_time = datetime.now()
5 |
6 | # Query mode validation
7 | SIMPLE_MODE = 'simple'
8 | EXTENDED_MODE = 'extended'
9 | QUERY_MODE_CHOICES = [
10 | (SIMPLE_MODE, 'simple'),
11 | (EXTENDED_MODE, 'extended'),
12 | ]
13 |
14 | # WAL device validation
15 | RAM_DISK = 'RAM disk'
16 | HDD = 'HDD'
17 | SATA_SSD = 'SATA SSD'
18 | NVME_SSD = 'NVMe SSD'
19 | NONE = 'None'
20 | WAL_DEVICE_CHOICES = [
21 | (RAM_DISK, 'RAM disk'),
22 | (HDD, 'HDD'),
23 | (SATA_SSD, 'SATA SSD'),
24 | (NVME_SSD, 'NVMe SSD'),
25 | (NONE, 'None')
26 | ]
27 |
28 | # Microbenchmark status validation
29 | PASS = 'PASS'
30 | FAIL = 'FAIL'
31 | MICROBENCHMARK_STATUS_CHOICES = [
32 | (PASS, 'PASS'),
33 | (FAIL, 'FAIL')
34 | ]
35 |
36 | # Github Integration
37 | GITHUB_APP_ID = 86997
38 | ALLOWED_EVENTS = ['pull_request', 'status']
39 | # The status context that is sent when the Jenkins pipeline finishes
40 | CI_STATUS_CONTEXT = 'continuous-integration/jenkins/pr-merge'
41 | GITHUB_APP_WEBHOOK_SECRET = get_environ_value('GITHUB_APP_WEBHOOK_SECRET')
42 | GITHUB_APP_PRIVATE_KEY = get_environ_value('GITHUB_APP_PRIVATE_KEY')
43 |
44 | GITHUB_WEBHOOK_HASH_HEADER = 'HTTP_X_HUB_SIGNATURE_256'
45 | GITHUB_EVENT_HEADER = 'HTTP_X_GITHUB_EVENT'
46 |
47 | # Github NoisePage Client
48 | REPO_OWNER = 'cmu-db'
49 | REPO_NAME = 'noisepage'
50 | GITHUB_BASE_URL = 'https://api.github.com/'
51 |
52 | MASTER_BRANCH_NAME = 'origin/master'
53 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/utils/utils.py:
--------------------------------------------------------------------------------
1 | import base64
2 | from functools import partial
3 | from typing import Any, Dict
4 | from datetime import datetime
5 | from time import mktime
6 | from decimal import Decimal
7 |
8 | from factory import Factory
9 | from factory.base import StubObject
10 |
11 |
12 | def generate_dict_factory(factory: Factory):
13 | def convert_dict_from_stub(stub: StubObject) -> Dict[str, Any]:
14 | stub_dict = stub.__dict__ if not isinstance(stub, dict) else stub
15 | for key, value in stub_dict.items():
16 | if isinstance(value, StubObject):
17 | stub_dict[key] = convert_dict_from_stub(value)
18 | elif isinstance(value, Decimal):
19 | stub_dict[key] = float(value)
20 | elif isinstance(value, datetime):
21 | stub_dict[key] = int(
22 | mktime(value.timetuple())*1e3 +
23 | value.microsecond/1e3
24 | )
25 | return stub_dict
26 |
27 | def dict_factory(factory, **kwargs):
28 | stub = factory.stub(**kwargs)
29 | stub_dict = convert_dict_from_stub(stub)
30 | return stub_dict
31 |
32 | return partial(dict_factory, factory)
33 |
34 |
35 | def get_basic_auth_header(username, password):
36 | """
37 | Used in tests to create the basic authentication string.
38 | :param username: string
39 | :param password: string
40 | :return:
41 | """
42 | return 'Basic {}'.format(base64.b64encode(('{}:{}'.format(username, password)).encode('ascii')).decode())
43 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/BaseRest.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.metadata.Metadata import Metadata
2 |
3 |
4 | class BaseRest(object):
5 | """ The based class for all objects communicated through the HTTP API """
6 |
7 | def __init__(self, metadata, timestamp):
8 | self.metadata = Metadata(**metadata)
9 | self.timestamp = timestamp
10 |
11 | def convert_to_db_json(self):
12 | """ Convert the base class attributes into a dict that can be used to instantiate database object models """
13 | data = {
14 | 'time': self.timestamp,
15 | 'git_branch': self.metadata.github.git_branch,
16 | 'git_commit_id': self.metadata.github.git_commit_id,
17 | 'jenkins_job_id': self.metadata.jenkins.jenkins_job_id,
18 | 'db_version': self.metadata.noisepage.db_version,
19 | 'environment': convert_environment_to_dict(self.metadata.environment),
20 | 'metrics': self.convert_metrics_to_dict(self.metrics),
21 | }
22 | return data
23 |
24 | def convert_metrics_to_dict(self, metrics):
25 | """ Convert the metrics object to a dict. This should be overridden when the metrics JSON is nested or has a
26 | special format """
27 | return metrics.__dict__
28 |
29 |
30 | def convert_environment_to_dict(environment):
31 | db_formatted_environment = {
32 | 'os_version': environment.os_version,
33 | 'cpu_number': environment.cpu_number,
34 | 'cpu_socket': environment.cpu_socket
35 | }
36 | return db_formatted_environment
37 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/factories/database/OLTPBenchDBFactory.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from django.utils import timezone
3 | from factory import Faker, Dict, LazyAttribute
4 | from factory.django import DjangoModelFactory
5 | from pss_project.api.models.database.OLTPBenchResult import OLTPBenchResult
6 |
7 |
8 | class OLTPBenchDBFactory(DjangoModelFactory):
9 | class Meta:
10 | model = OLTPBenchResult
11 | django_get_or_create = ('git_branch', 'git_commit_id',)
12 |
13 | time = LazyAttribute(lambda _: datetime.now(timezone.utc).isoformat())
14 | query_mode = Faker('random_element', elements=('simple', 'extended'))
15 | jenkins_job_id = Faker('pystr_format', string_format='###')
16 | git_branch = Faker('word')
17 | git_commit_id = Faker('sha1')
18 | db_version = Faker('word')
19 | environment = Faker('pydict', value_types=[str])
20 | benchmark_type = Faker('word')
21 | scale_factor = Faker('pyfloat', left_digits=6,
22 | right_digits=4, positive=True)
23 | terminals = Faker('random_int', min=1, max=32)
24 | client_time = Faker('random_int', min=30, step=30)
25 | weights = Faker('pydict', value_types=[int])
26 | wal_device = Faker('random_element', elements=('RAM disk', 'HDD', 'SATA SSD', 'NVMe SSD', 'None'))
27 | max_connection_threads = Faker('random_int', min=1, max=32)
28 | metrics = Dict({
29 | 'throughput': Faker('pyfloat', positive=True),
30 | 'latency': Faker('pydict', value_types=[float])
31 | })
32 | incremental_metrics = Faker('pydict', value_types=[int, float, str])
33 |
--------------------------------------------------------------------------------
/deployments/playbooks/pss-deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: "{{ host_override | default('k8s_master') }}"
3 | name: Deploy Performance Storage Service
4 | vars:
5 | dir_repo: "{{ inventory_dir }}"
6 | dir_k8s_pss: "{{ dir_repo }}/kubernetes/performance/performance-storage-service"
7 | service_port_dict:
8 | local: 30001
9 | testing: 30001
10 | staging: 31001
11 | production: 32001
12 | service_port: "{{ service_port_dict[env] }}"
13 | pre_tasks:
14 | - name: Ensure k8s module dependencies are installed.
15 | pip:
16 | name: openshift
17 | state: present
18 | tasks:
19 | - name: Remove Old Migration
20 | vars:
21 | job_file: "{{ dir_k8s_pss }}/migration-job.yml"
22 | community.kubernetes.k8s:
23 | state: absent
24 | definition: "{{ lookup('template', '{{ job_file }}') }}"
25 |
26 | - name: Migrate Database
27 | vars:
28 | job_file: "{{ dir_k8s_pss }}/migration-job.yml"
29 | community.kubernetes.k8s:
30 | state: present
31 | definition: "{{ lookup('template', '{{ job_file }}') }}"
32 |
33 | - name: Create Performance Storage Service Deployment
34 | vars:
35 | deployment_file: "{{ dir_k8s_pss }}/deployment.yml"
36 | community.kubernetes.k8s:
37 | state: present
38 | definition: "{{ lookup('template', '{{ deployment_file }}') }}"
39 |
40 | - name: Create Performance Service
41 | vars:
42 | service_file: "{{ dir_k8s_pss }}/service.yml"
43 | community.kubernetes.k8s:
44 | state: present
45 | definition: "{{ lookup('template', '{{ service_file }}') }}" # Trigger
46 |
--------------------------------------------------------------------------------
/deployments/scripts/make_secrets_monitoring.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ENV="k8s-master"
3 | DIR_BASE="$(dirname $(realpath $0))"
4 | DIR_ENV="$DIR_BASE/$ENV"
5 | NAMESPACE="monitoring"
6 | SECRET_NAME="secrets-$ENV"
7 | HELP="Usage: ./make_secrets_monitoring.sh"
8 |
9 | # testing env
10 | TESTING_DB_USER="$(cat testing/pss_db_user)"
11 | TESTING_DB_PASSWORD="$(cat testing/pss_db_password)"
12 | TESTING_DB_HOST="incrudibles-testing.db.pdl.cmu.edu"
13 | TESTING_DB_PORT="30003"
14 | TESTING_DB_NAME="postgresql://$TESTING_DB_USER:$TESTING_DB_PASSWORD@$TESTING_DB_HOST:$TESTING_DB_PORT/pss_database?sslmode=disable"
15 | # staging env
16 | STAGING_DB_USER="$(cat staging/pss_db_user)"
17 | STAGING_DB_PASSWORD="$(cat staging/pss_db_password)"
18 | STAGING_DB_HOST="incrudibles-staging.db.pdl.cmu.edu"
19 | STAGING_DB_PORT="31003"
20 | STAGING_DB_NAME="postgresql://$STAGING_DB_USER:$STAGING_DB_PASSWORD@$STAGING_DB_HOST:$STAGING_DB_PORT/pss_database?sslmode=disable"
21 | # production env
22 | PRODUCTION_DB_USER="$(cat production/pss_db_user)"
23 | PRODUCTION_DB_PASSWORD="$(cat production/pss_db_password)"
24 | PRODUCTION_DB_HOST="incrudibles-production.db.pdl.cmu.edu"
25 | PRODUCTION_DB_PORT="32003"
26 | PRODUCTION_DB_NAME="postgresql://$PRODUCTION_DB_USER:$PRODUCTION_DB_PASSWORD@$PRODUCTION_DB_HOST:$PRODUCTION_DB_PORT/pss_database?sslmode=disable"
27 | # overall postgres db names
28 | DB_NAMES="$TESTING_DB_NAME,$STAGING_DB_NAME,$PRODUCTION_DB_NAME"
29 | printf $DB_NAMES > $DIR_ENV/pss_db_data_sources
30 |
31 | if [ ! -d $DIR_ENV ]; then
32 | echo "Error: secrets files for '$ENV' is not found."
33 | exit 1
34 | fi
35 |
36 | kubectl delete secret $SECRET_NAME -n $NAMESPACE
37 | kubectl create secret generic $SECRET_NAME -n $NAMESPACE --from-file=$DIR_ENV
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/prometheus/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: StatefulSet
4 | metadata:
5 | name: prometheus
6 | namespace: monitoring
7 | labels:
8 | app: prometheus-server
9 | spec:
10 | replicas: 1
11 | strategy:
12 | type: RollingUpdate
13 | selector:
14 | matchLabels:
15 | app: prometheus-server
16 | template:
17 | metadata:
18 | labels:
19 | app: prometheus-server
20 | spec:
21 | nodeSelector:
22 | env: master
23 | containers:
24 | - name: prometheus
25 | image: prom/prometheus:v2.22.2
26 | resources:
27 | requests:
28 | memory: "1G"
29 | cpu: "1"
30 | limits:
31 | memory: "2G"
32 | cpu: "2"
33 | args:
34 | - "--config.file=/etc/prometheus/prometheus.yml"
35 | - "--storage.tsdb.path=/prometheus/"
36 | - "--storage.tsdb.retention.time=15d"
37 | - "--storage.tsdb.retention.size=50GB"
38 | # FOR REVERSED PROXY
39 | - "--web.external-url=http://localhost:9090/prometheus"
40 | - "--web.route-prefix=/"
41 | ports:
42 | - containerPort: 9090
43 | volumeMounts:
44 | - name: prometheus-config-volume
45 | mountPath: /etc/prometheus/
46 | - name: prometheus-storage-volume
47 | mountPath: /prometheus/
48 | volumes:
49 | - name: prometheus-config-volume
50 | configMap:
51 | defaultMode: 420
52 | name: prometheus-server-conf
53 | - name: prometheus-storage-volume
54 | persistentVolumeClaim:
55 | claimName: monitoring-prometheus-pv-claim
56 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/serializers/database/test_OLTPBenchResultSerializer.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 | from django.utils.dateparse import parse_datetime
3 |
4 | from pss_project.api.tests.factories.database.OLTPBenchDBFactory import OLTPBenchDBFactory
5 | from pss_project.api.tests.factories.rest.OLTPBenchRestFactory import OLTPBenchRestFactory
6 | from pss_project.api.serializers.database.OLTPBenchResultSerializer import OLTPBenchResultSerializer
7 |
8 |
9 | class TestOLTPBenchResultSerializer(TestCase):
10 |
11 | def test_serialize_model_fields(self):
12 | """ Assert """
13 | input = OLTPBenchDBFactory()
14 | serializer = OLTPBenchResultSerializer(instance=input)
15 | for key in serializer.data.keys():
16 | input_value = getattr(input, key)
17 | if isinstance(input_value, float):
18 | self.assertEqual(float(serializer.data[key]), input_value)
19 | else:
20 | self.assertEqual(serializer.data[key], input_value)
21 |
22 | def test_deserialize_model_fields(self):
23 | factory = OLTPBenchRestFactory()
24 | input = factory.convert_to_db_json()
25 | serializer = OLTPBenchResultSerializer(data=input)
26 | self.assertTrue(serializer.is_valid(), msg=serializer.errors)
27 |
28 | def test_smudge_timestamp(self):
29 | existing_db_entry = OLTPBenchDBFactory()
30 | existing_db_entry.save()
31 | factory = OLTPBenchRestFactory()
32 | factory.timestamp = parse_datetime(existing_db_entry.time)
33 | input = factory.convert_to_db_json()
34 | serializer = OLTPBenchResultSerializer(data=input)
35 | serializer.smudge_timestamp()
36 | self.assertNotEqual(serializer.initial_data['time'], parse_datetime(existing_db_entry.time))
37 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/serializers/database/test_ArtifactStatsResultSerializer.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 | from django.utils.dateparse import parse_datetime
3 |
4 | from pss_project.api.tests.factories.database.ArtifactStatsDBFactory import ArtifactStatsDBFactory
5 | from pss_project.api.tests.factories.rest.ArtifactStatsRestFactory import ArtifactStatsRestFactory
6 | from pss_project.api.serializers.database.ArtifactStatsResultSerializer import ArtifactStatsResultSerializer
7 |
8 |
9 | class TestArtifactStatsResultSerializer(TestCase):
10 | def test_serialize_model_fields(self):
11 | input = ArtifactStatsDBFactory()
12 | serializer = ArtifactStatsResultSerializer(instance=input)
13 | for key in serializer.data.keys():
14 | input_value = getattr(input, key)
15 | if isinstance(input_value, float):
16 | self.assertEqual(float(serializer.data[key]), input_value)
17 | else:
18 | self.assertEqual(serializer.data[key], input_value)
19 |
20 | def test_deserializer_model_fields(self):
21 | factory = ArtifactStatsRestFactory()
22 | input = factory.convert_to_db_json()
23 | serializer = ArtifactStatsResultSerializer(data=input)
24 | self.assertTrue(serializer.is_valid(), msg=serializer.errors)
25 |
26 | def test_smudge_timestamp(self):
27 | existing_db_entry = ArtifactStatsDBFactory()
28 | existing_db_entry.save()
29 | factory = ArtifactStatsRestFactory()
30 | factory.timestamp = parse_datetime(existing_db_entry.time)
31 | input = factory.convert_to_db_json()
32 | serializer = ArtifactStatsResultSerializer(data=input)
33 | serializer.smudge_timestamp()
34 | self.assertNotEqual(serializer.initial_data['time'], parse_datetime(existing_db_entry.time))
35 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/serializers/database/test_MicrobenchmarkResultsSerializer.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 | from django.utils.dateparse import parse_datetime
3 |
4 | from pss_project.api.tests.factories.database.MicrobenchmarkDBFactory import MicrobenchmarkDBFactory
5 | from pss_project.api.tests.factories.rest.MicrobenchmarkRestFactory import MicrobenchmarkRestFactory
6 | from pss_project.api.serializers.database.MicrobenchmarkResultSerializer import MicrobenchmarkResultSerializer
7 |
8 |
9 | class TestMicrobenchmarkResultSerializer(TestCase):
10 | def test_serialize_model_fields(self):
11 | input = MicrobenchmarkDBFactory()
12 | serializer = MicrobenchmarkResultSerializer(instance=input)
13 | for key in serializer.data.keys():
14 | input_value = getattr(input, key)
15 | if isinstance(input_value, float):
16 | self.assertEqual(float(serializer.data[key]), input_value)
17 | else:
18 | self.assertEqual(serializer.data[key], input_value)
19 |
20 | def test_deserializer_model_fields(self):
21 | factory = MicrobenchmarkRestFactory()
22 | input = factory.convert_to_db_json()
23 | serializer = MicrobenchmarkResultSerializer(data=input)
24 | self.assertTrue(serializer.is_valid(), msg=serializer.errors)
25 |
26 | def test_smudge_timestamp(self):
27 | existing_db_entry = MicrobenchmarkDBFactory()
28 | existing_db_entry.save()
29 | factory = MicrobenchmarkRestFactory()
30 | factory.timestamp = parse_datetime(existing_db_entry.time)
31 | input = factory.convert_to_db_json()
32 | serializer = MicrobenchmarkResultSerializer(data=input)
33 | serializer.smudge_timestamp()
34 | self.assertNotEqual(serializer.initial_data['time'], parse_datetime(existing_db_entry.time))
35 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/postgres_exporter/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | apiVersion: apps/v1
4 | kind: Deployment
5 | metadata:
6 | name: prometheus-postgres-exporter
7 | labels:
8 | app: prometheus-postgres-exporter
9 | namespace: monitoring
10 | spec:
11 | replicas: 1
12 | selector:
13 | matchLabels:
14 | app: prometheus-postgres-exporter
15 | template:
16 | metadata:
17 | labels:
18 | app: prometheus-postgres-exporter
19 | spec:
20 | restartPolicy: Always
21 | nodeSelector:
22 | env: master
23 | containers:
24 | - name: postgres-exporter
25 | image: "wrouesnel/postgres_exporter:v0.8.0"
26 | imagePullPolicy: IfNotPresent
27 | ports:
28 | - containerPort: 9187
29 | name: psql
30 | env:
31 | - name: DATA_SOURCE_NAME
32 | valueFrom:
33 | secretKeyRef:
34 | name: secrets-k8s-master
35 | key: pss_db_data_sources
36 | - name: PG_EXPORTER_EXTEND_QUERY_PATH
37 | value: "/config/queries.yaml"
38 | volumeMounts:
39 | - mountPath: /config
40 | name: config
41 | - name: configmap-reload
42 | image: "jimmidyson/configmap-reload:v0.4.0"
43 | imagePullPolicy: "IfNotPresent"
44 | securityContext:
45 | runAsNonRoot: true
46 | runAsUser: 65534
47 | args:
48 | - --volume-dir=/etc/config
49 | - --webhook-url=http://localhost:9115/-/reload
50 | resources:
51 | {}
52 | volumeMounts:
53 | - mountPath: /etc/config
54 | name: config
55 | readOnly: true
56 | volumes:
57 | - name: config
58 | configMap:
59 | name: prometheus-postgres-exporter-conf
60 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-07-23 07:55
2 |
3 | import django.contrib.postgres.fields.jsonb
4 | import django.core.serializers.json
5 | from django.db import migrations, models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | initial = True
11 |
12 | dependencies = [
13 | ]
14 |
15 | operations = [
16 | migrations.CreateModel(
17 | name='OLTPBenchResult',
18 | fields=[
19 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
20 | ('time', models.DateTimeField()),
21 | ('query_mode', models.CharField(choices=[('simple', 'simple'), ('extended', 'extended')], max_length=30)),
22 | ('jenkins_job_id', models.CharField(max_length=15)),
23 | ('git_branch', models.CharField(max_length=255)),
24 | ('git_commit_id', models.CharField(max_length=40)),
25 | ('db_version', models.CharField(max_length=255)),
26 | ('environment', django.contrib.postgres.fields.jsonb.JSONField()),
27 | ('benchmark_type', models.CharField(max_length=20)),
28 | ('scale_factor', models.DecimalField(decimal_places=4, max_digits=10)),
29 | ('terminals', models.PositiveSmallIntegerField()),
30 | ('client_time', models.PositiveSmallIntegerField()),
31 | ('weights', django.contrib.postgres.fields.jsonb.JSONField()),
32 | ('metrics', django.contrib.postgres.fields.jsonb.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder)),
33 | ('incremental_metrics', django.contrib.postgres.fields.jsonb.JSONField()),
34 | ],
35 | options={
36 | 'db_table': 'oltpbench_results',
37 | },
38 | ),
39 | ]
40 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0006_microbenchmarkresult.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.6 on 2020-09-27 18:35
2 |
3 | import django.contrib.postgres.fields.jsonb
4 | import django.core.serializers.json
5 | from django.db import migrations, models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('api', '0005_merge_20200915_0112'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='MicrobenchmarkResult',
17 | fields=[
18 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
19 | ('time', models.DateTimeField()),
20 | ('query_mode', models.CharField(choices=[('simple', 'simple'), ('extended', 'extended')], max_length=30)),
21 | ('jenkins_job_id', models.CharField(max_length=15)),
22 | ('git_branch', models.CharField(max_length=255)),
23 | ('git_commit_id', models.CharField(max_length=40)),
24 | ('db_version', models.CharField(max_length=255)),
25 | ('environment', django.contrib.postgres.fields.jsonb.JSONField()),
26 | ('benchmark_suite', models.CharField(max_length=255)),
27 | ('benchmark_name', models.CharField(max_length=255)),
28 | ('threads', models.PositiveSmallIntegerField()),
29 | ('min_runtime', models.PositiveIntegerField()),
30 | ('wal_device', models.CharField(choices=[('RAM disk', 'RAM disk'), ('HDD', 'HDD'), ('SATA SSD', 'SATA SSD'), ('NVMe SSD', 'NVMe SSD'), ('None', 'None')], max_length=30)),
31 | ('metrics', django.contrib.postgres.fields.jsonb.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder)),
32 | ],
33 | options={
34 | 'db_table': 'microbenchmark_results',
35 | },
36 | ),
37 | ]
38 |
--------------------------------------------------------------------------------
/deployments/roles/install_python3.7/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: update apt repo cache and install software-properties-common
4 | become: yes
5 | apt:
6 | name: software-properties-common
7 | update_cache: yes
8 |
9 | - name: add deadsnakes ppa
10 | become: yes
11 | command: add-apt-repository ppa:deadsnakes/ppa -y
12 |
13 | - name: update apt repo cache and install python 3.7
14 | become: yes
15 | apt:
16 | pkg:
17 | - python3.7
18 | update_cache: yes
19 |
20 | # *****************************************************************************
21 | # CAUTION: Updating default python3 version WILL result in failure of
22 | # python3-apt and many other packages that rely on the default python3
23 | # version (python 3.6) of the ubuntu 18.04; therefore, just do NOT
24 | # update the default python3 version of the server
25 | # *****************************************************************************
26 |
27 | # - name: get python3 absolute path
28 | # command: which python3
29 | # register: path_python3
30 |
31 | # - name: get python3.7 absolute path
32 | # command: which python3.7
33 | # register: path_python3_7
34 |
35 | # - name: get python3 stat
36 | # stat:
37 | # path: "{{ path_python3.stdout }}"
38 | # register: stat_python3
39 |
40 | # - name: update the python3 symbolic link to python3.7 if it is not
41 | # become: yes
42 | # file:
43 | # src: "{{ path_python3_7.stdout }}"
44 | # dest: "{{ path_python3.stdout }}"
45 | # force: yes
46 | # state: link
47 | # when:
48 | # - stat_python3.stat.islnk is defined
49 | # - stat_python3.stat.islnk
50 | # - stat_python3.stat.lnk_source != path_python3_7.stdout
51 |
52 | - name: install pip3 based on python3.7
53 | become: yes
54 | apt:
55 | pkg:
56 | - python3-pip
57 | update_cache: yes
58 |
59 | - name: global install pip packages
60 | become: yes
61 | pip:
62 | name: virtualenv
63 |
64 |
65 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/database/ArtifactStatsResult.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 |
3 | from django.db.models import Model, DateTimeField, CharField, JSONField
4 | from django.db import IntegrityError
5 | from django.core.serializers.json import DjangoJSONEncoder
6 | from django.utils.dateparse import parse_datetime
7 |
8 |
9 | class ArtifactStatsResult(Model):
10 | """ This class is the model for storing artifact stats results in the database. For more information about the
11 | schema check out the wiki:
12 | https://github.com/cmu-db/noisepage-stats/wiki/Timescaledb-Schema#artifact_stats_results-table """
13 |
14 | class Meta:
15 | db_table = 'artifact_stats_results'
16 |
17 | time = DateTimeField(primary_key=True, auto_now=False)
18 | jenkins_job_id = CharField(max_length=15)
19 | git_branch = CharField(max_length=255)
20 | git_commit_id = CharField(max_length=40)
21 | db_version = CharField(max_length=255)
22 | environment = JSONField()
23 | metrics = JSONField(encoder=DjangoJSONEncoder)
24 |
25 | def save(self, *args, **kwargs):
26 | self.save_and_smear_timestamp(*args, **kwargs)
27 |
28 | def save_and_smear_timestamp(self, *args, **kwargs):
29 | """Recursivly try to save by incrementing the timestamp on duplicate error"""
30 | try:
31 | super().save(*args, **kwargs)
32 | except IntegrityError as exception:
33 | # Only handle the error:
34 | # psycopg2.errors.UniqueViolation: duplicate key value violates unique constraint
35 | # "1_1_farms_sensorreading_pkey"
36 | # DETAIL: Key ("time")=(2020-10-01 22:33:52.507782+00) already exists.
37 | if all(k in exception.args[0] for k in ("Key", "time", "already exists")):
38 | # Increment the timestamp by 1 ms and try again
39 | self.time = str(parse_datetime(self.time) + timedelta(milliseconds=1))
40 | self.save_and_smear_timestamp(*args, **kwargs)
41 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/blackbox_exporter/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | apiVersion: apps/v1
4 | kind: Deployment
5 | metadata:
6 | name: prometheus-blackbox-exporter
7 | labels:
8 | app: prometheus-blackbox-exporter
9 | namespace: monitoring
10 | spec:
11 | replicas: 1
12 | selector:
13 | matchLabels:
14 | app: prometheus-blackbox-exporter
15 | template:
16 | metadata:
17 | labels:
18 | app: prometheus-blackbox-exporter
19 | spec:
20 | restartPolicy: Always
21 | nodeSelector:
22 | env: master
23 | containers:
24 | - name: blackbox-exporter
25 | image: "prom/blackbox-exporter:v0.17.0"
26 | imagePullPolicy: IfNotPresent
27 | securityContext:
28 | readOnlyRootFilesystem: true
29 | runAsNonRoot: true
30 | runAsUser: 1000
31 | args:
32 | - "--config.file=/config/blackbox.yaml"
33 | resources:
34 | {}
35 | ports:
36 | - containerPort: 9115
37 | name: http
38 | livenessProbe:
39 | httpGet:
40 | path: /health
41 | port: http
42 | readinessProbe:
43 | httpGet:
44 | path: /health
45 | port: http
46 | volumeMounts:
47 | - mountPath: /config
48 | name: config
49 | - name: configmap-reload
50 | image: "jimmidyson/configmap-reload:v0.4.0"
51 | imagePullPolicy: "IfNotPresent"
52 | securityContext:
53 | runAsNonRoot: true
54 | runAsUser: 65534
55 | args:
56 | - --volume-dir=/etc/config
57 | - --webhook-url=http://localhost:9115/-/reload
58 | resources:
59 | {}
60 | volumeMounts:
61 | - mountPath: /etc/config
62 | name: config
63 | readOnly: true
64 | volumes:
65 | - name: config
66 | configMap:
67 | name: prometheus-blackbox-exporter-conf
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/migrations/0010_auto_20201121_2224.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1.2 on 2020-11-22 03:24
2 |
3 | import django.core.serializers.json
4 | from django.db import migrations, models
5 |
6 |
7 | class Migration(migrations.Migration):
8 |
9 | dependencies = [
10 | ('api', '0009_auto_20201109_0226'),
11 | ]
12 |
13 | operations = [
14 | migrations.AlterField(
15 | model_name='artifactstatsresult',
16 | name='environment',
17 | field=models.JSONField(),
18 | ),
19 | migrations.AlterField(
20 | model_name='artifactstatsresult',
21 | name='metrics',
22 | field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder),
23 | ),
24 | migrations.AlterField(
25 | model_name='microbenchmarkresult',
26 | name='environment',
27 | field=models.JSONField(),
28 | ),
29 | migrations.AlterField(
30 | model_name='microbenchmarkresult',
31 | name='metrics',
32 | field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder),
33 | ),
34 | migrations.AlterField(
35 | model_name='oltpbenchresult',
36 | name='environment',
37 | field=models.JSONField(),
38 | ),
39 | migrations.AlterField(
40 | model_name='oltpbenchresult',
41 | name='incremental_metrics',
42 | field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder),
43 | ),
44 | migrations.AlterField(
45 | model_name='oltpbenchresult',
46 | name='metrics',
47 | field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder),
48 | ),
49 | migrations.AlterField(
50 | model_name='oltpbenchresult',
51 | name='weights',
52 | field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder),
53 | ),
54 | ]
55 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/performance-storage-service/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | name: "{{ env }}-performance-storage-service"
6 | namespace: performance
7 | labels:
8 | app: pss
9 | env: "{{ env }}"
10 | spec:
11 | replicas: 1
12 | strategy:
13 | type: RollingUpdate
14 | selector:
15 | matchLabels:
16 | app: pss
17 | env: "{{ env }}"
18 | template:
19 | metadata:
20 | labels:
21 | app: pss
22 | env: "{{ env }}"
23 | spec:
24 | nodeSelector:
25 | env: "{{ env }}"
26 | containers:
27 | - name: performance-storage-service
28 | image: "{{ pss_container_image }}"
29 | imagePullPolicy: "{{ image_pull_policy }}"
30 | ports:
31 | - name: http
32 | containerPort: 8080
33 | env:
34 | - name: SECRET_KEY
35 | value: "super-secret"
36 | - name: ENV
37 | value: "{{ env }}"
38 | - name: PSS_DATABASE_NAME
39 | value: "{{ pss_db_name }}"
40 | - name: PSS_DATABASE_PORT
41 | value: "5432"
42 | - name: PSS_DATABASE_USER
43 | valueFrom:
44 | secretKeyRef:
45 | name: "secrets-{{ env }}"
46 | key: pss_db_user
47 | - name: PSS_DATABASE_PASSWORD
48 | valueFrom:
49 | secretKeyRef:
50 | name: "secrets-{{ env }}"
51 | key: pss_db_password
52 | - name: PSS_POD_IP
53 | valueFrom:
54 | fieldRef:
55 | fieldPath: status.podIP
56 | - name: GITHUB_APP_WEBHOOK_SECRET
57 | valueFrom:
58 | secretKeyRef:
59 | name: "secrets-{{ env }}"
60 | key: webhook_secret
61 | - name: GITHUB_APP_PRIVATE_KEY
62 | valueFrom:
63 | secretKeyRef:
64 | name: "secrets-{{ env }}"
65 | key: github_private_key
66 |
67 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/performance-storage-service/migration-job.yml:
--------------------------------------------------------------------------------
1 | apiVersion: batch/v1
2 | kind: Job
3 | metadata:
4 | name: django-migrations
5 | namespace: performance
6 | spec:
7 | ttlSecondsAfterFinished: 43200
8 | template:
9 | spec:
10 | containers:
11 | - name: performance-storage-service-migration
12 | image: "{{ pss_container_image }}"
13 | command: ["python", "manage.py", "migrate", "--noinput"]
14 | imagePullPolicy: "{{ image_pull_policy }}"
15 | env:
16 | - name: ENV
17 | value: "{{ env }}"
18 | - name: PSS_DATABASE_NAME
19 | value: "{{ pss_db_name }}"
20 | - name: PSS_DATABASE_PORT
21 | value: "5432"
22 | - name: PSS_DATABASE_USER
23 | valueFrom:
24 | secretKeyRef:
25 | name: "secrets-{{ env }}"
26 | key: pss_db_user
27 | - name: PSS_DATABASE_PASSWORD
28 | valueFrom:
29 | secretKeyRef:
30 | name: "secrets-{{ env }}"
31 | key: pss_db_password
32 | - name: PSS_CREATOR_USER
33 | valueFrom:
34 | secretKeyRef:
35 | name: "secrets-{{ env }}"
36 | key: pss_creator_user
37 | - name: PSS_CREATOR_PASSWORD
38 | valueFrom:
39 | secretKeyRef:
40 | name: "secrets-{{ env }}"
41 | key: pss_creator_password
42 | - name: PSS_POD_IP
43 | valueFrom:
44 | fieldRef:
45 | fieldPath: status.podIP
46 | - name: GITHUB_APP_WEBHOOK_SECRET
47 | valueFrom:
48 | secretKeyRef:
49 | name: "secrets-{{ env }}"
50 | key: webhook_secret
51 | - name: GITHUB_APP_PRIVATE_KEY
52 | valueFrom:
53 | secretKeyRef:
54 | name: "secrets-{{ env }}"
55 | key: github_private_key
56 |
57 | restartPolicy: Never
58 | backoffLimit: 5
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/views/oltpbench.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from rest_framework import viewsets
4 | from rest_framework.response import Response
5 | from rest_framework import status
6 | from rest_framework.parsers import JSONParser
7 | from pss_project.api.serializers.rest.OLTPBenchSerializer import OLTPBenchSerializer
8 | from pss_project.api.serializers.database.OLTPBenchResultSerializer import OLTPBenchResultSerializer
9 | from rest_framework.authentication import BasicAuthentication
10 |
11 | logger = logging.getLogger()
12 |
13 |
14 | class OLTPBenchViewSet(viewsets.ViewSet):
15 |
16 | def create(self, request):
17 | """ First check that the an authorized user posted the request. Then validate the API request body. Next convert
18 | the request body into a format suitable for the database. Finally, store the new OLTPBench test result in the
19 | database. """
20 | user = BasicAuthentication().authenticate(request)
21 | if user is None:
22 | logger.debug('Invalid authentication')
23 | return Response({'message': "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
24 |
25 | data = JSONParser().parse(request)
26 | api_serializer = OLTPBenchSerializer(data=data)
27 | if not api_serializer.is_valid():
28 | logger.debug(f'Bad Request: {api_serializer.errors}')
29 | return Response(api_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
30 |
31 | api_serializer.save()
32 | db_serializer = OLTPBenchResultSerializer(data=api_serializer.instance.convert_to_db_json())
33 | db_serializer.smudge_timestamp()
34 | if not db_serializer.is_valid():
35 | return Response(db_serializer.errors, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
36 |
37 | try:
38 | db_serializer.save()
39 | except Exception as err:
40 | logger.error(f'OLTPBenchViewSet create failed: {err}')
41 | return Response({'message': str(err)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
42 |
43 | return Response(api_serializer.validated_data, status=status.HTTP_201_CREATED)
44 |
--------------------------------------------------------------------------------
/deployments/roles/config_openresty/templates/k8s_master.nginx:
--------------------------------------------------------------------------------
1 | upstream grafana {
2 | server localhost:{{ port_grafana }};
3 | }
4 |
5 | upstream prometheus {
6 | server localhost:{{ port_prometheus }};
7 | }
8 |
9 | server {
10 | listen 80;
11 | resolver 8.8.8.8;
12 | server_name {{ hostname }};
13 | return 301 https://$host$request_uri;
14 | }
15 |
16 | server {
17 | listen 443 ssl;
18 | resolver 8.8.8.8;
19 | server_name {{ hostname }};
20 |
21 | # ssl config
22 | ssl_certificate /etc/openresty/ssl/{{ hostname }}/cert.pem;
23 | ssl_certificate_key /etc/openresty/ssl/{{ hostname }}/key.pem;
24 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
25 | ssl_ciphers HIGH:!aNULL:!MD5;
26 |
27 | # log config
28 | access_log {{ dir_openresty_internal_log }}/access.log;
29 | error_log {{ dir_openresty_internal_log }}/error.log;
30 |
31 | # default redirection
32 | location / {
33 | return 301 /grafana;
34 | }
35 |
36 | location ^~ /.well-known/acme-challenge {
37 | root /etc/openresty/ssl/$host;
38 | }
39 |
40 | location /static {
41 | alias /data/static;
42 | autoindex on;
43 | }
44 |
45 | # monitoring: prometheus monitoring
46 | location /prometheus/ {
47 | proxy_pass_header Set-Cookie;
48 |
49 | proxy_set_header Host $host;
50 | proxy_set_header X-Real-IP $remote_addr;
51 | proxy_set_header X-Forwarded-Proto $scheme;
52 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
53 |
54 | proxy_pass http://prometheus/;
55 | }
56 |
57 | # monitoring: grafana service
58 | location /grafana/ {
59 | proxy_pass_header Set-Cookie;
60 |
61 | proxy_set_header Host $host;
62 | proxy_set_header X-Real-IP $remote_addr;
63 | proxy_set_header X-Forwarded-Proto $scheme;
64 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
65 |
66 | proxy_pass http://grafana/;
67 | }
68 | }
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/views/microbenchmark.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from rest_framework import viewsets, status
4 | from rest_framework.response import Response
5 | from rest_framework.parsers import JSONParser
6 | from pss_project.api.serializers.rest.MicrobenchmarkSerializer import MicrobenchmarkSerializer
7 | from pss_project.api.serializers.database.MicrobenchmarkResultSerializer import MicrobenchmarkResultSerializer
8 | from rest_framework.authentication import BasicAuthentication
9 |
10 | logger = logging.getLogger()
11 |
12 |
13 | class MicrobenchmarkViewSet(viewsets.ViewSet):
14 |
15 | def create(self, request):
16 | """ First check that the an authorized user posted the request. Then validate the API request body. Next convert
17 | the request body into a format suitable for the database. Finally, store the new microbenchmark result in the
18 | database. """
19 | user = BasicAuthentication().authenticate(request)
20 | if user is None:
21 | logger.debug('Invalid authentication')
22 | return Response({'message': 'Forbidden'}, status=status.HTTP_403_FORBIDDEN)
23 |
24 | data = JSONParser().parse(request)
25 | api_serializer = MicrobenchmarkSerializer(data=data)
26 | if not api_serializer.is_valid():
27 | logger.debug(f'Bad Request: {api_serializer.errors}')
28 | return Response(api_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
29 |
30 | api_serializer.save()
31 | db_serializer = MicrobenchmarkResultSerializer(data=api_serializer.instance.convert_to_db_json())
32 | db_serializer.smudge_timestamp()
33 | if not db_serializer.is_valid():
34 | logger.error(f'Invalid db_serializer: {db_serializer.errors}')
35 | return Response(db_serializer.errors, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
36 | try:
37 | db_serializer.save()
38 | except Exception as err:
39 | logger.error(f'MicrobenchmarkViewSet create failed: {err}')
40 | return Response({'message': str(err)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
41 |
42 | return Response(api_serializer.validated_data, status=status.HTTP_201_CREATED)
43 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/views/artifact_stats.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from rest_framework.viewsets import ViewSet
4 | from rest_framework.response import Response
5 | from rest_framework.parsers import JSONParser
6 | from rest_framework.status import (HTTP_403_FORBIDDEN, HTTP_400_BAD_REQUEST,
7 | HTTP_500_INTERNAL_SERVER_ERROR, HTTP_201_CREATED)
8 | from rest_framework.authentication import BasicAuthentication
9 | from pss_project.api.serializers.rest.ArtifactStatsSerializer import ArtifactStatsSerializer
10 | from pss_project.api.serializers.database.ArtifactStatsResultSerializer import ArtifactStatsResultSerializer
11 |
12 | logger = logging.getLogger()
13 |
14 |
15 | class ArtifactStatsViewSet(ViewSet):
16 |
17 | def create(self, request):
18 | """ First check that the an authorized user posted the request. Then validate the API request body. Next convert
19 | the request body into a format suitable for the database. Finally, store the new artifact stats result in the
20 | database. """
21 | user = BasicAuthentication().authenticate(request)
22 | if user is None:
23 | logger.debug('Invalid authentication')
24 | return Response({'message': 'Forbidden'}, status=HTTP_403_FORBIDDEN)
25 |
26 | data = JSONParser().parse(request)
27 | api_serializer = ArtifactStatsSerializer(data=data)
28 | if not api_serializer.is_valid():
29 | logger.debug(f'Bad Request: {api_serializer.errors}')
30 | return Response(api_serializer.errors, status=HTTP_400_BAD_REQUEST)
31 |
32 | api_serializer.save()
33 | db_serializer = ArtifactStatsResultSerializer(data=api_serializer.instance.convert_to_db_json())
34 | db_serializer.smudge_timestamp()
35 | if not db_serializer.is_valid():
36 | logger.error(f'Invalid db_serializer: {db_serializer.errors}')
37 | return Response(db_serializer.errors, status=HTTP_500_INTERNAL_SERVER_ERROR)
38 | try:
39 | db_serializer.save()
40 | except Exception as err:
41 | logger.error(f'ArtifactStatsViewSet create failed: {err}')
42 | return Response({'message': str(err)}, status=HTTP_500_INTERNAL_SERVER_ERROR)
43 |
44 | return Response(api_serializer.validated_data, status=HTTP_201_CREATED)
45 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/database/MicrobenchmarkResult.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | from django.db.models import (Model, DateTimeField, CharField, PositiveSmallIntegerField, PositiveIntegerField,
3 | JSONField)
4 | from django.db import IntegrityError
5 | from django.core.serializers.json import DjangoJSONEncoder
6 | from django.utils.dateparse import parse_datetime
7 | from pss_project.api.constants import WAL_DEVICE_CHOICES
8 |
9 |
10 | class MicrobenchmarkResult(Model):
11 | """ This class is the model for storing microbenchmark test results in the database. For more information about the
12 | schema check out the wiki:
13 | https://github.com/cmu-db/noisepage-stats/wiki/Timescaledb-Schema#microbenchmark_results-table """
14 |
15 | class Meta:
16 | db_table = 'microbenchmark_results'
17 |
18 | time = DateTimeField(primary_key=True, auto_now=False, validators=[])
19 | jenkins_job_id = CharField(max_length=15)
20 | git_branch = CharField(max_length=255)
21 | git_commit_id = CharField(max_length=40)
22 | db_version = CharField(max_length=255)
23 | environment = JSONField()
24 | benchmark_suite = CharField(max_length=255)
25 | benchmark_name = CharField(max_length=255)
26 | threads = PositiveSmallIntegerField()
27 | min_runtime = PositiveIntegerField()
28 | wal_device = CharField(max_length=30, choices=WAL_DEVICE_CHOICES)
29 | metrics = JSONField(encoder=DjangoJSONEncoder)
30 |
31 | def save(self, *args, **kwargs):
32 | self.save_and_smear_timestamp(*args, **kwargs)
33 |
34 | def save_and_smear_timestamp(self, *args, **kwargs):
35 | """Recursivly try to save by incrementing the timestamp on duplicate error"""
36 | try:
37 | super().save(*args, **kwargs)
38 | except IntegrityError as exception:
39 | # Only handle the error:
40 | # psycopg2.errors.UniqueViolation: duplicate key value violates unique constraint
41 | # "1_1_farms_sensorreading_pkey"
42 | # DETAIL: Key ("time")=(2020-10-01 22:33:52.507782+00) already exists.
43 | if all(k in exception.args[0] for k in ("Key", "time", "already exists")):
44 | # Increment the timestamp by 1 ms and try again
45 | self.time = str(parse_datetime(self.time) + timedelta(milliseconds=1))
46 | self.save_and_smear_timestamp(*args, **kwargs)
47 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/github_integration/how_to_create_github_bot.md:
--------------------------------------------------------------------------------
1 | # How to Create a Github Checker Bot
2 |
3 | The main purpose of the Github checks are to integrate CI tasks into the Github flow. For example, the performance guard will prevent merging a PR if the performance of the PR is significantly worse than master.
4 |
5 | ## Github Bot flow
6 | 1. PR opened/updated/reopened
7 | 2. Bot initializes a check in the pending state
8 | 3. Jenkins CI successfully finishes
9 | 4. Bot updates the check to the complete state with a status(succuess/neutral/failed)
10 |
11 | The `BasePRBot` is designed to follow that flow. It isn't too complicated to follow a different flow. You will need to override a couple methods in your subclass.
12 |
13 | ## Guide
14 | 1. Create a class that inherits `BasePRBot.py`. See `PerformanceGuardBot.py` for an example. Next override the `conclusion_title_map` and `conclusion_summary_map` attributes. These maps associate different titles and summary lines based on whether the result of the check is success, neutral, or failure.
15 |
16 | You must override three methods:
17 |
18 | - `get_conclusion_data(self, payload)`- This method takes in the payload from the Github event and it should return data that is needed to determine whether the check was a success, neutral, or failure. The return value can be of any type.
19 |
20 | - `get_conclusion(self, data)`- This function takes in the data that was returned from `get_conclusion_data` and determines the outcome of the check. This function should return either `CONCLUSION_SUCCESS`, `CONCLUSION_NEUTRAL`, or `CONCLUSION_FAILURE`.
21 |
22 | - `generate_conclusion_markdown(self, data)`- This function takes in the data that was returned from `get_conclusion_data` and generates a string that will be displayed in the detailed view of the check. The string can use markdown.
23 |
24 | 2. Next add an instance of your Bot to `pss_project/api/views/git_events.py`. Pass the `repo_client` into the bot. The `repo_client` allows the PR bot to make API calls to the NoisePage repository. Pick a name for the bot. Instantiate the object and call `run()` like:
25 | ```python
26 | my_bot = MyBot(repo_client=repo_client, name='my-bot')
27 | my_bot.run(event, payload)
28 | ```
29 | 3. Now you are ready to deploy the updated code. Don't forget to increment the version numbers. For instructions on deploying follow the guide [here](https://github.com/cmu-db/noisepage-stats/wiki/Performance-storage-service).
30 |
--------------------------------------------------------------------------------
/deployments/kubernetes/monitoring/grafana/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | apiVersion: apps/v1
4 | kind: StatefulSet
5 | metadata:
6 | name: grafana
7 | namespace: monitoring
8 | labels:
9 | app: grafana
10 | spec:
11 | replicas: 1
12 | strategy:
13 | type: RollingUpdate
14 | selector:
15 | matchLabels:
16 | app: grafana
17 | template:
18 | metadata:
19 | labels:
20 | app: grafana
21 | spec:
22 | nodeSelector:
23 | env: master
24 | volumes:
25 | - name: grafana-volume
26 | persistentVolumeClaim:
27 | claimName: monitoring-grafana-pv-claim
28 | containers:
29 | - name: grafana
30 | image: "{{ grafana_container_image }}"
31 | imagePullPolicy: "{{ image_pull_policy }}"
32 | ports:
33 | - name: http
34 | containerPort: 3000
35 | volumeMounts:
36 | - name: grafana-volume
37 | mountPath: /var/lib/grafana
38 | env:
39 | - name: GF_PATHS_LOGS
40 | value: /var/log/grafana/
41 | - name: GF_LOG_MODE
42 | value: "console file"
43 | - name: GF_SERVER_DOMAIN
44 | value: "{{ service_hostname }}"
45 | - name: GF_SERVER_ROOT_URL
46 | value: "https://{{ service_hostname }}/grafana"
47 | - name: GF_SERVER_SERVE_FROM_SUB_PATH
48 | value: "true"
49 | - name: GF_SERVER_HTTP_PORT
50 | value: "3000"
51 | - name: GF_AUTH_GITHUB_ENABLED
52 | value: "true"
53 | - name: GF_AUTH_GITHUB_ALLOW_SIGN_UP
54 | value: "true"
55 | - name: GF_AUTH_GITHUB_ALLOWED_ORGANIZATIONS
56 | value: "cmu-db"
57 | - name: GF_SECURITY_ADMIN_USER
58 | value: "admin"
59 | - name: GF_SECURITY_ADMIN_PASSWORD
60 | valueFrom:
61 | secretKeyRef:
62 | name: secrets-k8s-master
63 | key: gf_admin_password
64 | #GitHub Login
65 | - name: GF_AUTH_GITHUB_CLIENT_ID
66 | valueFrom:
67 | secretKeyRef:
68 | name: secrets-k8s-master
69 | key: gf_auth_github_client_id
70 | - name: GF_AUTH_GITHUB_CLIENT_SECRET
71 | valueFrom:
72 | secretKeyRef:
73 | name: secrets-k8s-master
74 | key: gf_auth_github_client_secret
--------------------------------------------------------------------------------
/deployments/playbooks/openapi-deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - hosts: "{{ host_override | default('k8s_master') }}"
4 | name: Deploy OpenAPI
5 | vars:
6 | dir_deployment: "{{ inventory_dir }}"
7 | dir_base: "{{ dir_deployment | dirname }}"
8 | dir_openapi: "{{ dir_base }}/performance-storage-service/openapi"
9 | dir_k8s_openapi: "{{ dir_deployment }}/kubernetes/performance/openapi"
10 | service_hostname: "incrudibles-{{ env }}.db.pdl.cmu.edu"
11 | service_port_dict:
12 | testing: 30007
13 | staging: 31007
14 | production: 32007
15 | service_port: "{{ service_port_dict[env] }}"
16 | openapi_template: "{{ dir_openapi }}/openapi.yml.j2"
17 | openapi_file_dir: "{{ dir_openapi }}/{{ env }}"
18 | openapi_file_name: "{{ openapi_file_dir }}/openapi.yml"
19 |
20 | pre_tasks:
21 | - name: Ensure k8s module dependencies are installed.
22 | pip:
23 | state: present
24 | name: openshift
25 |
26 | - name: Prepare openapi doc
27 | block:
28 | - name: Check if the openapi rendered dir exists
29 | stat:
30 | path: "{{ openapi_file_dir }}"
31 | register: stat_openapi_dir
32 | - name: Check if the openapi template exists
33 | stat:
34 | path: "{{ openapi_template }}"
35 | register: stat_openapi_template
36 | - name: Create the openapi dir for {{ env }}
37 | file:
38 | path: "{{ openapi_file_dir }}"
39 | mode: 0755
40 | state: directory
41 | when:
42 | - not stat_openapi_dir.stat.exists
43 | - name: Render the openapi for {{ env }}
44 | template:
45 | src: "{{ openapi_template }}"
46 | dest: "{{ openapi_file_name }}"
47 | when:
48 | - stat_openapi_template.stat.exists
49 | - stat_openapi_template.stat.isreg is defined
50 | - stat_openapi_template.stat.isreg
51 | - name: Append the openapi file to configs
52 | set_fact:
53 | openapi_file_content: "{{ lookup('file', '{{ openapi_file_name }}') | replace('\n', '\n ') }}"
54 | delegate_to: localhost
55 |
56 | tasks:
57 | - name: Apply Swagger OpenAPI Deployment Configs
58 | vars:
59 | config: "{{ dir_k8s_openapi }}/{{ item }}"
60 | community.kubernetes.k8s:
61 | state: present
62 | definition: "{{ lookup('template', '{{ config }}') }}"
63 | loop:
64 | - config-map.yml
65 | - deployment.yml
66 | - service.yml
67 |
--------------------------------------------------------------------------------
/timescaledb/smudge_timestamps.py:
--------------------------------------------------------------------------------
1 | import psycopg2
2 | import argparse
3 | from datetime import timedelta
4 | from random import randrange
5 |
6 | def fetch_all_time(conn, table):
7 | query = f"""
8 | SELECT
9 | time,
10 | id
11 | FROM {table}
12 | ORDER BY time ASC
13 | """
14 | with conn.cursor() as cur:
15 | cur.execute(query)
16 | return cur.fetchall()
17 |
18 | def find_times_with_duplicates(conn, table):
19 | query = f"""
20 | SELECT
21 | time,
22 | COUNT(*)
23 | FROM {table}
24 | GROUP BY time
25 | HAVING COUNT(*) > 1
26 | """
27 | with conn.cursor() as cur:
28 | cur.execute(query)
29 | return cur.fetchall()
30 |
31 | def find_records_with_duplicate_times(conn, table, time):
32 | query = f"""
33 | SELECT
34 | time,
35 | id
36 | FROM {table}
37 | WHERE
38 | time = '{time}'
39 | """
40 | with conn.cursor() as cur:
41 | cur.execute(query)
42 | return cur.fetchall()
43 |
44 | def update_with_smudge(conn, table, record):
45 | old_time = record[0]
46 | new_time = old_time + timedelta(milliseconds=randrange(10))
47 | sql_statement = f"""
48 | UPDATE {table}
49 | SET time = '{new_time}'
50 | WHERE
51 | id = '{record[1]}'
52 | """
53 | with conn.cursor() as cur:
54 | cur.execute(sql_statement)
55 |
56 | def main():
57 | parser = argparse.ArgumentParser()
58 | parser.add_argument('--username', type=str, help='Database username')
59 | parser.add_argument('--password', type=str, help='Datatbase password')
60 | parser.add_argument('--host', type=str, default='incrudibles-production.db.pdl.cmu.edu', help='Hostname of the database (i.e. incrudibles-production.db.pdl.cmu.edu')
61 | parser.add_argument('--port', type=str, default='32003', help='Port that the DB is running on.')
62 | args = parser.parse_args()
63 | username = args.username
64 | password = args.password
65 | host = args.host
66 | port = args.port
67 |
68 | conn = psycopg2.connect(f'postgres://{username}:{password}@{host}:{port}/pss_database')
69 |
70 | table = 'oltpbench_results'
71 | all_table_records = find_times_with_duplicates(conn, table)
72 | count = 1
73 | for time, _ in all_table_records:
74 | print(count)
75 | count +=1
76 | duplicate_time_records = find_records_with_duplicate_times(conn, table, time)
77 | for record in duplicate_time_records:
78 | update_with_smudge(conn, table, record)
79 | conn.commit()
80 |
81 | if __name__ == "__main__":
82 | main()
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/grafana/deployment.yml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: StatefulSet
4 | metadata:
5 | name: "{{ env }}-grafana"
6 | namespace: "performance"
7 | labels:
8 | app: grafana
9 | env: "{{ env }}"
10 | spec:
11 | replicas: 1
12 | strategy:
13 | type: RollingUpdate
14 | selector:
15 | matchLabels:
16 | app: grafana
17 | env: "{{ env }}"
18 | template:
19 | metadata:
20 | labels:
21 | app: grafana
22 | env: "{{ env }}"
23 | spec:
24 | nodeSelector:
25 | env: "{{ env }}"
26 | volumes:
27 | - name: grafana-volume
28 | persistentVolumeClaim:
29 | claimName: grafana-pv-claim
30 | containers:
31 | - name: grafana
32 | image: "{{ grafana_container_image }}"
33 | imagePullPolicy: "{{ image_pull_policy }}"
34 | ports:
35 | - name: http
36 | containerPort: 3000
37 | volumeMounts:
38 | - name: grafana-volume
39 | mountPath: /var/lib/grafana
40 | env:
41 | - name: GF_PATHS_LOGS
42 | value: /var/log/grafana/
43 | - name: GF_LOG_MODE
44 | value: "console file"
45 | - name: GF_SERVER_DOMAIN
46 | value: "{{ service_hostname }}"
47 | - name: GF_SERVER_ROOT_URL
48 | value: "http://{{ service_hostname }}/grafana"
49 | - name: GF_SERVER_SERVE_FROM_SUB_PATH
50 | value: "true"
51 | - name: GF_SERVER_HTTP_PORT
52 | value: "3000"
53 | - name: GF_AUTH_GITHUB_ENABLED
54 | value: "true"
55 | - name: GF_AUTH_GITHUB_ALLOW_SIGN_UP
56 | value: "true"
57 | - name: GF_AUTH_GITHUB_ALLOWED_ORGANIZATIONS
58 | value: "cmu-db"
59 | - name: GF_SECURITY_ADMIN_USER
60 | value: "admin"
61 | - name: GF_INSTALL_PLUGINS
62 | value: "grafana-github-datasource"
63 | - name: GF_SECURITY_ADMIN_PASSWORD
64 | valueFrom:
65 | secretKeyRef:
66 | name: "secrets-{{ env }}"
67 | key: gf_admin_password
68 | #GitHub Login
69 | - name: GF_AUTH_GITHUB_CLIENT_ID
70 | valueFrom:
71 | secretKeyRef:
72 | name: "secrets-{{ env }}"
73 | key: gf_auth_github_client_id
74 | - name: GF_AUTH_GITHUB_CLIENT_SECRET
75 | valueFrom:
76 | secretKeyRef:
77 | name: "secrets-{{ env }}"
78 | key: gf_auth_github_client_secret
79 |
--------------------------------------------------------------------------------
/deployments/kubernetes/performance/timescaledb/timescaledb.yml:
--------------------------------------------------------------------------------
1 | kind: PersistentVolume
2 | apiVersion: v1
3 | metadata:
4 | name: postgres-pv-volume
5 | namespace: performance
6 | # labels:
7 | # type: local
8 | # app: postgres
9 | spec:
10 | storageClassName: manual
11 | capacity:
12 | storage: 100Gi
13 | accessModes:
14 | - ReadWriteMany
15 | hostPath:
16 | path: "/mnt/postgresPV"
17 | ---
18 | apiVersion: v1
19 | kind: PersistentVolumeClaim
20 | metadata:
21 | namespace: performance
22 | name: timescaledb
23 | spec:
24 | storageClassName: manual
25 | accessModes:
26 | - ReadWriteMany
27 | resources:
28 | requests:
29 | storage: 100Gi
30 | ---
31 | apiVersion: v1
32 | kind: Secret
33 | metadata:
34 | namespace: performance
35 | name: timescaledb
36 | type: Opaque
37 | data:
38 | username: YWRtaW4=
39 | # username: admin
40 | password: YWRtaW5wd2Q=
41 | # password: adminpwd
42 | database: YWRtaW5kYg==
43 | # database: admindb
44 | ---
45 | apiVersion: apps/v1
46 | kind: Deployment
47 | metadata:
48 | namespace: performance
49 | name: timescaledb
50 | labels:
51 | app: timescaledb
52 | spec:
53 | replicas: 1
54 | selector:
55 | matchLabels:
56 | app: timescaledb
57 | template:
58 | metadata:
59 | labels:
60 | app: timescaledb
61 | spec:
62 | containers:
63 | - name: timescaledb
64 | image: timescale/timescaledb:latest-pg12
65 | env:
66 | - name: POSTGRES_USER
67 | valueFrom:
68 | secretKeyRef:
69 | name: timescaledb
70 | key: username
71 | - name: POSTGRES_PASSWORD
72 | valueFrom:
73 | secretKeyRef:
74 | name: timescaledb
75 | key: password
76 | - name: POSTGRES_DB
77 | valueFrom:
78 | secretKeyRef:
79 | name: timescaledb
80 | key: database
81 | - name: TIMESCALEDB_TELEMETRY
82 | value: "off"
83 | - name: TS_TUNE_MEMORY
84 | value: "8GB"
85 | - name: TS_TUNE_NUM_CPUS
86 | value: "4"
87 | ports:
88 | - containerPort: 5436
89 | volumeMounts:
90 | - name: timescaledb
91 | mountPath: /var/lib/postgresql/data/
92 | volumes:
93 | - name: timescaledb
94 | persistentVolumeClaim:
95 | claimName: timescaledb
96 | ---
97 | apiVersion: v1
98 | kind: Service
99 | metadata:
100 | namespace: performance
101 | name: timescaledb
102 | labels:
103 | app: timescaledb
104 | spec:
105 | type: NodePort
106 | selector:
107 | app: timescaledb
108 | ports:
109 | - protocol: TCP
110 | port: 5436
111 | nodePort: 32222
112 |
--------------------------------------------------------------------------------
/performance-storage-service/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | reports/
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 | cover/
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | .pybuilder/
77 | target/
78 |
79 | # Jupyter Notebook
80 | .ipynb_checkpoints
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | # For a library or package, you might want to ignore these files since the code is
88 | # intended to run in multiple environments; otherwise, check them in:
89 | # .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
99 | __pypackages__/
100 |
101 | # Celery stuff
102 | celerybeat-schedule
103 | celerybeat.pid
104 |
105 | # SageMath parsed files
106 | *.sage.py
107 |
108 | # Environments
109 | .env
110 | .venv
111 | env/
112 | venv/
113 | ENV/
114 | env.bak/
115 | venv.bak/
116 | i2/
117 |
118 | # Spyder project settings
119 | .spyderproject
120 | .spyproject
121 |
122 | # Rope project settings
123 | .ropeproject
124 |
125 | # mkdocs documentation
126 | /site
127 |
128 | # mypy
129 | .mypy_cache/
130 | .dmypy.json
131 | dmypy.json
132 |
133 | # Pyre type checker
134 | .pyre/
135 |
136 | # pytype static type analyzer
137 | .pytype/
138 |
139 | # Cython debug symbols
140 | cython_debug/
141 |
142 | # VSCode
143 | .vscode/
--------------------------------------------------------------------------------
/timescaledb/update_latency.py:
--------------------------------------------------------------------------------
1 | import json
2 | import psycopg2
3 | import argparse
4 |
5 |
6 | benchmark_types = ['ycsb','tatp','tpcc','noop','smallbank']
7 | terminal_values = [1,2,4,8,16,32]
8 | wal_devices = ['HDD', 'RAM disk', 'None']
9 | client_times = [60,600]
10 | VALID_DATE = '2020-09-22 00:00:00.000+00'
11 |
12 |
13 | def find_last_valid_latency(conn, benchmark, terminals, wal_device, client_time):
14 | query = f"""
15 | SELECT
16 | metrics->'latency' AS "latency",
17 | time
18 | FROM oltpbench_results
19 | WHERE
20 | git_branch = 'origin/master' AND
21 | (metrics->'latency'->>'min')::numeric != 0.0 AND
22 | time > '{VALID_DATE}' AND
23 | benchmark_type = '{benchmark}' AND
24 | terminals = '{terminals}' AND
25 | wal_device = '{wal_device}' AND
26 | client_time = '{client_time}'
27 | ORDER BY time ASC
28 | """
29 | with conn.cursor() as cur:
30 | cur.execute(query)
31 | #print(f'{benchmark}, {terminals}, {wal_device}, {client_time}')
32 | #print(cur.fetchone())
33 | return cur.fetchone()
34 |
35 | def update_latency(conn, benchmark, terminals, wal_device, client_time, latency):
36 | sql_statement = f"""
37 | UPDATE oltpbench_results
38 | SET metrics = jsonb_set(metrics, '{{latency}}', '{json.dumps(latency)}')
39 | WHERE
40 | time < '{VALID_DATE}' AND
41 | benchmark_type = '{benchmark}' AND
42 | terminals = '{terminals}' AND
43 | wal_device = '{wal_device}' AND
44 | client_time = '{client_time}'
45 | """
46 | #print(sql_statement)
47 | with conn.cursor() as cur:
48 | cur.execute(sql_statement)
49 |
50 | def main():
51 | parser = argparse.ArgumentParser()
52 | parser.add_argument('--username', type=str, help='Database username')
53 | parser.add_argument('--password', type=str, help='Datatbase password')
54 | parser.add_argument('--host', type=str, default='incrudibles-production.db.pdl.cmu.edu', help='Hostname of the database (i.e. incrudibles-production.db.pdl.cmu.edu')
55 | parser.add_argument('--port', type=str, default='32003', help='Port that the DB is running on.')
56 | args = parser.parse_args()
57 | username = args.username
58 | password = args.password
59 | host = args.host
60 | port = args.port
61 |
62 |
63 | conn = psycopg2.connect(f'postgres://{username}:{password}@{host}:{port}/pss_database')
64 |
65 | for benchmark in benchmark_types:
66 | for wal_device in wal_devices:
67 | for terminals in terminal_values:
68 | for client_time in client_times:
69 | row = find_last_valid_latency(conn, benchmark, terminals, wal_device, client_time)
70 | if row:
71 | update_latency(conn, benchmark, terminals, wal_device, client_time, row[0])
72 |
73 | conn.commit()
74 |
75 | if __name__ == "__main__":
76 | main()
77 |
78 |
--------------------------------------------------------------------------------
/performance-storage-service/README.md:
--------------------------------------------------------------------------------
1 | # Performance Storage Service
2 |
3 | [![Master Build Status][master_build_badge_url]][master_build_url]
4 |
5 | This serivce will be used to accept data from the Jenkins pipeline and store it in TimeScaleDB.
6 |
7 | ## API Documentation
8 | The openapi.yaml file documents all the endpoints of the API
9 |
10 |
11 | ## Related Kubernetes Files
12 | `/deployments/kubernetes/performance/performance-storage-service/*`
13 |
14 | `/deployments/kubernetes/namespaces.yml`
15 |
16 | `/deployments/playbooks/pss-deployment.yml`
17 |
18 | `/deployments/playbooks/create-namespaces.yml`
19 |
20 |
21 | ## Running Locally
22 |
23 | ### Running Locally - Django runserver
24 |
25 | ```bash
26 | source env/bin/activate
27 |
28 | # install requirements
29 | pip install -r requirements.txt
30 |
31 | # download docker container if you don't already have it
32 | # docker run -d --name timescaledb -p 5432:5432 -e POSTGRES_PASSWORD=password timescale/timescaledb:latest-pg12
33 |
34 | # start timescale docker container
35 | docker start timescaledb
36 |
37 | python manage.py runserver
38 |
39 | docker stop timescaledb
40 | ```
41 |
42 | ### Running locally - Kubernetes
43 | Make sure you have docker desktop, and ansible installed.
44 |
45 | #### Prerequisite
46 | Make sure your docker-desktop kubernetes node is labeled with `env=local`.
47 |
48 | To do this run `kubectl label nodes docker-desktop env=local`
49 |
50 | Add kubernetes secrets `kubectl create secret generic secrets-local --from-literal=pss_db_user=postgres --from-literal=pss_db_password=password --from-literal=pss_creator_user=user --from-literal=pss_creator_password=password -n performance`
51 |
52 | #### Execution
53 | ```bash
54 |
55 | cd performance-storage-service
56 |
57 | docker build -t cmudb/performance-storage-service: .
58 |
59 | cd ../deployments
60 |
61 | ansible-playbook -i inventory playbooks/create-namespaces.yml -e "env=local host_override=local"
62 |
63 | ansible-playbook -i inventory playbooks/pss-deployment.yml -e "env=local host_override=local pss_db_user=postgres pss_db_password=password"
64 | ```
65 | To verify try hitting `http://localhost:31000/performance-results/health`
66 |
67 | To delete the local deployment
68 | ```
69 | kubectl delete pods,service,deployment -n performance --all
70 | ```
71 |
72 | ## Contributing
73 | ### Testing
74 | To run tests, generate coverage report, and generate static analysis reports run:
75 | ```bash
76 | python manage.py jenkins --enable-coverage --coverage-rcfile=.coveragerc
77 | ```
78 |
79 | ### Code Quality
80 | Before committing be sure to review and resolve issues in `/reports/pep8.report`. For simple fixes you can autofix by running:
81 | `autopep8 pss_project --recursive --in-place`. If you just want to autofix a single file run: `autopep8 pss_project/ --in-place`
82 |
83 |
84 |
85 |
86 | [master_build_badge_url]: http://jenkins.db.cs.cmu.edu:8080/buildStatus/icon?job=testing-team%2Fnoisepage-stats-build%2Fmaster
87 | [master_build_url]: http://jenkins.db.cs.cmu.edu:8080/job/testing-team/job/noisepage-stats-build/job/master/
88 |
89 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/views/test_oltpbench.py:
--------------------------------------------------------------------------------
1 | from rest_framework.test import APITransactionTestCase
2 | from pss_project.api.tests.factories.rest.OLTPBenchRestFactory import OLTPBenchRestFactory
3 | from pss_project.api.tests.utils.utils import generate_dict_factory
4 | from pss_project.api.tests.utils.utils import get_basic_auth_header
5 | from django.contrib.auth.models import User
6 | from rest_framework.test import APIClient
7 |
8 |
9 | class OLTPBenchViewTest(APITransactionTestCase):
10 |
11 | test_username = 'testuser'
12 | test_password = 'password'
13 |
14 | def setUp(self):
15 | self.url = '/performance-results/oltpbench/'
16 | self.client = APIClient()
17 |
18 | test_user = User.objects.create_user(
19 | username=self.test_username, password=self.test_password)
20 | test_user.save()
21 | self.credentials = get_basic_auth_header(
22 | self.test_username, self.test_password)
23 |
24 | def test_403_forbidden_error(self):
25 | """
26 | Ensure that an invalid request sends back a 403
27 | """
28 | # unset any existing credentials
29 | self.client.credentials()
30 | response = self.client.post(
31 | self.url, data={'noneya': 'business'}, format='json')
32 | self.assertEqual(response.status_code, 403)
33 |
34 | def test_201_response(self):
35 | """
36 | Ensure that a valid request sends back a 201
37 | """
38 | ClassDictFactory = generate_dict_factory(OLTPBenchRestFactory)
39 | input = ClassDictFactory()
40 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
41 | response = self.client.post(self.url, data=input, format='json')
42 | self.assertEqual(response.status_code, 201)
43 |
44 | def test_201_response_smudge_time(self):
45 | """
46 | Ensure that a second request with the time is saved appropriately
47 | """
48 | ClassDictFactory = generate_dict_factory(OLTPBenchRestFactory)
49 | input = ClassDictFactory()
50 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
51 | self.client.post(self.url, data=input, format='json')
52 | response = self.client.post(self.url, data=input, format='json')
53 | self.assertEqual(response.status_code, 201)
54 |
55 | def test_400_bad_request(self):
56 | """
57 | Ensure that an invalid request sends back a 400
58 | """
59 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
60 | response = self.client.post(
61 | self.url, data={'noneya': 'business'}, format='json')
62 | self.assertEqual(response.status_code, 400)
63 |
64 | def test_invalid_data_num_validation_rules(self):
65 | """
66 | Ensure that an invalid request sends back a 400
67 | """
68 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
69 | response = self.client.post(
70 | self.url, data={'noneya': 'business'}, format='json')
71 | self.assertContains(response, 'required', count=5, status_code=400)
72 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/views/test_artifact_stats.py:
--------------------------------------------------------------------------------
1 | from rest_framework.test import APITransactionTestCase
2 | from pss_project.api.tests.factories.rest.ArtifactStatsRestFactory import ArtifactStatsRestFactory
3 | from pss_project.api.tests.utils.utils import generate_dict_factory
4 | from pss_project.api.tests.utils.utils import get_basic_auth_header
5 | from django.contrib.auth.models import User
6 | from rest_framework.test import APIClient
7 |
8 |
9 | class ArtifactStatsViewTest(APITransactionTestCase):
10 |
11 | test_username = 'testuser'
12 | test_password = 'password'
13 |
14 | def setUp(self):
15 | self.url = '/performance-results/artifact-stats/'
16 | self.client = APIClient()
17 |
18 | test_user = User.objects.create_user(
19 | username=self.test_username, password=self.test_password)
20 | test_user.save()
21 | self.credentials = get_basic_auth_header(
22 | self.test_username, self.test_password)
23 |
24 | def test_403_forbidden_error(self):
25 | """
26 | Ensure that an invalid request sends back a 403
27 | """
28 | # unset any existing credentials
29 | self.client.credentials()
30 | response = self.client.post(
31 | self.url, data={'noneya': 'business'}, format='json')
32 | self.assertEqual(response.status_code, 403)
33 |
34 | def test_201_response(self):
35 | """
36 | Ensure that a valid request sends back a 201
37 | """
38 | ClassDictFactory = generate_dict_factory(ArtifactStatsRestFactory)
39 | input = ClassDictFactory()
40 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
41 | response = self.client.post(self.url, data=input, format='json')
42 | self.assertEqual(response.status_code, 201)
43 |
44 | def test_201_response_smudge_time(self):
45 | """
46 | Ensure that a second request with the time is saved appropriately
47 | """
48 | ClassDictFactory = generate_dict_factory(ArtifactStatsRestFactory)
49 | input = ClassDictFactory()
50 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
51 | self.client.post(self.url, data=input, format='json')
52 | response = self.client.post(self.url, data=input, format='json')
53 | self.assertEqual(response.status_code, 201)
54 |
55 | def test_400_bad_request(self):
56 | """
57 | Ensure that an invalid request sends back a 400
58 | """
59 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
60 | response = self.client.post(
61 | self.url, data={'noneya': 'business'}, format='json')
62 | self.assertEqual(response.status_code, 400)
63 |
64 | def test_invalid_data_num_validation_rules(self):
65 | """
66 | Ensure that an invalid request sends back a 400
67 | """
68 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
69 | response = self.client.post(
70 | self.url, data={'noneya': 'business'}, format='json')
71 | self.assertContains(response, 'required', count=3, status_code=400)
72 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/tests/views/test_microbenchmark.py:
--------------------------------------------------------------------------------
1 | from rest_framework.test import APITransactionTestCase
2 | from pss_project.api.tests.factories.rest.MicrobenchmarkRestFactory import MicrobenchmarkRestFactory
3 | from pss_project.api.tests.utils.utils import generate_dict_factory
4 | from pss_project.api.tests.utils.utils import get_basic_auth_header
5 | from django.contrib.auth.models import User
6 | from rest_framework.test import APIClient
7 |
8 |
9 | class MicrobenchmarkViewTest(APITransactionTestCase):
10 |
11 | test_username = 'testuser'
12 | test_password = 'password'
13 |
14 | def setUp(self):
15 | self.url = '/performance-results/microbenchmark/'
16 | self.client = APIClient()
17 |
18 | test_user = User.objects.create_user(
19 | username=self.test_username, password=self.test_password)
20 | test_user.save()
21 | self.credentials = get_basic_auth_header(
22 | self.test_username, self.test_password)
23 |
24 | def test_403_forbidden_error(self):
25 | """
26 | Ensure that an invalid request sends back a 403
27 | """
28 | # unset any existing credentials
29 | self.client.credentials()
30 | response = self.client.post(
31 | self.url, data={'noneya': 'business'}, format='json')
32 | self.assertEqual(response.status_code, 403)
33 |
34 | def test_201_response(self):
35 | """
36 | Ensure that a valid request sends back a 201
37 | """
38 | ClassDictFactory = generate_dict_factory(MicrobenchmarkRestFactory)
39 | input = ClassDictFactory()
40 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
41 | response = self.client.post(self.url, data=input, format='json')
42 | self.assertEqual(response.status_code, 201)
43 |
44 | def test_201_response_smudge_time(self):
45 | """
46 | Ensure that a second request with the time is saved appropriately
47 | """
48 | ClassDictFactory = generate_dict_factory(MicrobenchmarkRestFactory)
49 | input = ClassDictFactory()
50 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
51 | self.client.post(self.url, data=input, format='json')
52 | response = self.client.post(self.url, data=input, format='json')
53 | self.assertEqual(response.status_code, 201)
54 |
55 | def test_400_bad_request(self):
56 | """
57 | Ensure that an invalid request sends back a 400
58 | """
59 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
60 | response = self.client.post(
61 | self.url, data={'noneya': 'business'}, format='json')
62 | self.assertEqual(response.status_code, 400)
63 |
64 | def test_invalid_data_num_validation_rules(self):
65 | """
66 | Ensure that an invalid request sends back a 400
67 | """
68 | self.client.credentials(HTTP_AUTHORIZATION=self.credentials)
69 | response = self.client.post(
70 | self.url, data={'noneya': 'business'}, format='json')
71 | self.assertContains(response, 'required', count=6, status_code=400)
72 |
--------------------------------------------------------------------------------
/performance-storage-service/pss_project/api/models/rest/OLTPBenchRest.py:
--------------------------------------------------------------------------------
1 | from pss_project.api.models.rest.BaseRest import BaseRest
2 | from pss_project.api.models.rest.parameters.OLTPBenchParameters import OLTPBenchParameters
3 | from pss_project.api.models.rest.metrics.OLTPBenchMetrics import OLTPBenchMetrics
4 | from pss_project.api.models.rest.utils import to_dict
5 |
6 |
7 | class OLTPBenchRest(BaseRest):
8 | """ This class is the model of the OLTPBench data as it is communicated through the HTTP API """
9 |
10 | def __init__(self, metadata, timestamp, type, parameters, metrics):
11 | super().__init__(metadata, timestamp)
12 | self.type = type
13 | self.parameters = OLTPBenchParameters(**parameters)
14 | self.metrics = OLTPBenchMetrics(**metrics)
15 |
16 | def convert_to_db_json(self):
17 | """ Convert the API model into a dict that can be used to instantiate an OLTPBenchResult object """
18 | data = super().convert_to_db_json()
19 | oltpbench_data = {
20 | 'benchmark_type': self.type,
21 | 'query_mode': self.parameters.query_mode,
22 | 'scale_factor': self.parameters.scale_factor,
23 | 'terminals': self.parameters.terminals,
24 | 'client_time': self.parameters.client_time,
25 | 'weights': convert_weights_to_dict(self.parameters.transaction_weights),
26 | 'wal_device': self.metadata.environment.wal_device,
27 | 'max_connection_threads': self.parameters.max_connection_threads,
28 | 'incremental_metrics': convert_incremental_metrics_to_dict(self.metrics.incremental_metrics)
29 | }
30 | data.update(oltpbench_data)
31 | return data
32 |
33 | def convert_metrics_to_dict(self, metrics):
34 | """ This method is required because of the nested nature of the metrics JSON. This overrides the base class
35 | method """
36 | db_formatted_metrics = {
37 | 'throughput': metrics.throughput,
38 | 'latency': metrics.latency.__dict__,
39 | 'memory_info': to_dict(metrics.memory_info),
40 | }
41 | return db_formatted_metrics
42 |
43 |
44 | def convert_weights_to_dict(weights_list):
45 | """ The weights are passed in as a list and we need to convert them to a dict """
46 | db_formatted_weights = {}
47 | for weight_details in weights_list:
48 | weight_name = weight_details.name
49 | weight_value = weight_details.weight
50 | db_formatted_weights[weight_name] = weight_value
51 | return db_formatted_weights
52 |
53 |
54 | def convert_incremental_metrics_to_dict(incremental_metrics):
55 | """ This method is required because of the nested nature of the incremental metrics JSON. """
56 | db_formatted_incremental_metrics = []
57 | for metric in incremental_metrics:
58 | db_formatted_incremental_json = {
59 | 'time': metric.time,
60 | 'throughput': metric.throughput,
61 | 'latency': metric.latency.__dict__,
62 | 'memory_info': metric.memory_info.__dict__,
63 | }
64 | db_formatted_incremental_metrics.append(db_formatted_incremental_json)
65 | return db_formatted_incremental_metrics
66 |
--------------------------------------------------------------------------------