├── HybridCloudEngineer
├── .gitignore
├── resources
│ └── frame_scripts
│ │ └── scripts
│ │ ├── Linux_OS_Update.txt
│ │ ├── Backups
│ │ ├── Linux_OS_Update.txt
│ │ ├── HAProxy_Uninstall.txt
│ │ ├── MySQL_Uninstall.txt
│ │ ├── WebServer_ServiceAction_Stop.txt
│ │ ├── WebServer_ServiceAction_Restart.txt
│ │ ├── CentOS8_URL.txt
│ │ ├── HAProxy_ServiceAction_Stop.txt
│ │ ├── HAProxy_ServiceAction_Restart.txt
│ │ ├── MySQL_Backup.txt
│ │ ├── MySQL_ServiceAction_Stop.txt
│ │ ├── MySQL_ServiceAction_Restart.txt
│ │ ├── WebServer_Uninstall.txt
│ │ ├── cloud-init.txt
│ │ ├── HAProxy_Delete_Webserver.txt
│ │ ├── HAProxy_Add_Webserver.txt
│ │ ├── WebServer_Install.txt
│ │ ├── WebServer_2Tier_Webapp.txt
│ │ ├── aws-cloud-init.txt
│ │ ├── HAProxy_Install-old.txt
│ │ ├── HAProxy_Update.txt
│ │ ├── HAProxy_Install.txt
│ │ ├── WebServer_3Tier_Webapp.txt
│ │ ├── MySQL_Install.txt
│ │ └── MySQL_Install-webuser.txt
│ │ ├── HAProxy_Uninstall.txt
│ │ ├── WebServer_ServiceAction_Stop.txt
│ │ ├── MySQL_Uninstall.txt
│ │ ├── WebServer_ServiceAction_Restart.txt
│ │ ├── CentOS8_URL.txt
│ │ ├── HAProxy_ServiceAction_Stop.txt
│ │ ├── HAProxy_ServiceAction_Restart.txt
│ │ ├── MySQL_ServiceAction_Stop.txt
│ │ ├── MySQL_Backup.txt
│ │ ├── MySQL_ServiceAction_Restart.txt
│ │ ├── WebServer_Uninstall.txt
│ │ ├── API
│ │ ├── POST_PROJECT_LIST_Response_Variables.txt
│ │ ├── HTTP_Task_Response_Variables.txt
│ │ ├── POST_CREATE_VLAN.txt
│ │ └── POST_PROJECT_UPDATE.txt
│ │ ├── cloud-init.txt
│ │ ├── HAProxy_Delete_Webserver.txt
│ │ ├── HAProxy_Add_Webserver.txt
│ │ ├── WebServer_Install.txt
│ │ ├── WebServer_2Tier_Webapp.txt
│ │ ├── aws-cloud-init.txt
│ │ ├── HAProxy_Install.txt
│ │ ├── HAProxy_Update.txt
│ │ ├── WebServer_3Tier_Webapp.txt
│ │ └── MySQL_Install.txt
├── eval
│ ├── images
│ │ ├── blueprints.png
│ │ ├── eval_test.png
│ │ ├── screenshot.png
│ │ ├── select_nic.png
│ │ ├── create_button.png
│ │ ├── download_zip.png
│ │ ├── launch_button.png
│ │ ├── services_calm.png
│ │ ├── get_service_ip.png
│ │ ├── select_service.png
│ │ ├── upload_blueprint.png
│ │ ├── profile_variables.png
│ │ ├── configure_credentials.png
│ │ ├── deployment_finished.png
│ │ ├── service_configuration.png
│ │ └── upload_blueprint_button.png
│ ├── .gitignore
│ ├── setup.py
│ ├── script
│ │ ├── EvaluationItem
│ │ │ └── __init__.py
│ │ ├── Messages
│ │ │ └── __init__.py
│ │ ├── EnvironmentOptions
│ │ │ └── __init__.py
│ │ ├── eval.bash
│ │ └── eval.py
│ ├── criteria
│ │ ├── eval.json
│ │ ├── p3.json
│ │ ├── p3_new.json
│ │ ├── c2.json
│ │ └── c3.json
│ ├── README.md
│ └── blueprints
│ │ ├── basiclinuxvm_test.json
│ │ └── basiclinuxvm_test_2.json
├── course
│ ├── 2
│ │ └── project
│ │ │ ├── README.md
│ │ │ ├── tecc_dba_test.sql
│ │ │ └── tecc_dba_test.php
│ └── 3
│ │ └── project
│ │ ├── MySQL_restore.sh.txt
│ │ ├── MySQL_backup_file.sh.txt
│ │ ├── README.md
│ │ ├── tecc_dba_test.sql
│ │ └── tecc_dba_test.php
└── README.md
├── README.md
└── .gitignore
/HybridCloudEngineer/.gitignore:
--------------------------------------------------------------------------------
1 | pc.devmarketing.nutanix.local/
2 | hpoc/
3 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Linux_OS_Update.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo yum -y update
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/Linux_OS_Update.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo yum -y update
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/HAProxy_Uninstall.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo yum -y erase haproxy
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/HAProxy_Uninstall.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo yum -y erase haproxy
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/blueprints.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/blueprints.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/eval_test.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/eval_test.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/screenshot.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/select_nic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/select_nic.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/WebServer_ServiceAction_Stop.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl stop httpd
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/create_button.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/create_button.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/download_zip.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/download_zip.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/launch_button.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/launch_button.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/services_calm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/services_calm.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/MySQL_Uninstall.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Generic uninstall script, goodbye!"
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/WebServer_ServiceAction_Restart.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl restart httpd
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/get_service_ip.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/get_service_ip.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/select_service.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/select_service.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/upload_blueprint.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/upload_blueprint.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/MySQL_Uninstall.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Generic uninstall script, goodbye!"
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/WebServer_ServiceAction_Stop.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl stop httpd
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/profile_variables.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/profile_variables.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/WebServer_ServiceAction_Restart.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl restart httpd
4 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/configure_credentials.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/configure_credentials.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/deployment_finished.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/deployment_finished.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/service_configuration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/service_configuration.png
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # udacity
2 | Udacity and Nutanix [Hybrid Cloud Engineer Nanodegree](https://www.udacity.com/course/hybrid-cloud-engineer-nanodegree--nd321) materials.
3 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/images/upload_blueprint_button.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/eval/images/upload_blueprint_button.png
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/CentOS8_URL.txt:
--------------------------------------------------------------------------------
1 | https://cloud.centos.org/centos/8/x86_64/images/CentOS-8-GenericCloud-8.2.2004-20200611.2.x86_64.qcow2
--------------------------------------------------------------------------------
/HybridCloudEngineer/course/3/project/MySQL_restore.sh.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | mysql --user=root --password='end-user-provided-password' < /tmp/mysqldump
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/CentOS8_URL.txt:
--------------------------------------------------------------------------------
1 | https://cloud.centos.org/centos/8/x86_64/images/CentOS-8-GenericCloud-8.2.2004-20200611.2.x86_64.qcow2
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/HAProxy_ServiceAction_Stop.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl stop haproxy
4 | sudo systemctl daemon-reload
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/HAProxy_ServiceAction_Stop.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl stop haproxy
4 | sudo systemctl daemon-reload
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/HAProxy_ServiceAction_Restart.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl daemon-reload
4 | sudo systemctl restart haproxy
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/MySQL_ServiceAction_Stop.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl stop mysqld
4 | # sudo systemctl status mysqld.service
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/MySQL_Backup.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | mysqldump --all-databases --user=root --password='@@{MYSQL_PASSWORD}@@'
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/MySQL_ServiceAction_Restart.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl restart mysqld
4 | # sudo systemctl status mysqld.service
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/HAProxy_ServiceAction_Restart.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl daemon-reload
4 | sudo systemctl restart haproxy
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/MySQL_Backup.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | mysqldump --all-databases --user=root --password='@@{MYSQL_PASSWORD}@@'
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/MySQL_ServiceAction_Stop.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl stop mysqld
4 | # sudo systemctl status mysqld.service
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/course/3/project/MySQL_backup_file.sh.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | mysqldump --all-databases --user=root --password='end-user-provided-password' > /tmp/mysqldump
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/MySQL_ServiceAction_Restart.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo systemctl restart mysqld
4 | # sudo systemctl status mysqld.service
5 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/WebServer_Uninstall.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | sudo rm -rf /var/www/html/* || true
5 | sudo yum erase -y httpd php || true
6 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/WebServer_Uninstall.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | sudo rm -rf /var/www/html/* || true
5 | sudo yum erase -y httpd php || true
6 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/API/POST_PROJECT_LIST_Response_Variables.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nutanixdev/udacity/HEAD/HybridCloudEngineer/resources/frame_scripts/scripts/API/POST_PROJECT_LIST_Response_Variables.txt
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/cloud-init.txt:
--------------------------------------------------------------------------------
1 | #cloud-config
2 | users:
3 | - name: @@{superuser.username}@@
4 | ssh-authorized-keys:
5 | - @@{superuser.public_key}@@
6 | sudo: ['ALL=(ALL) NOPASSWD:ALL']
7 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/cloud-init.txt:
--------------------------------------------------------------------------------
1 | #cloud-config
2 | users:
3 | - name: @@{superuser.username}@@
4 | ssh-authorized-keys:
5 | - @@{superuser.public_key}@@
6 | sudo: ['ALL=(ALL) NOPASSWD:ALL']
7 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 |
3 | venv/
4 | .vscode/
5 |
6 | .metrics
7 | *.pdf
8 | *.html
9 |
10 | *.pyc
11 | __pycache__/
12 |
13 | .pytest_cache/
14 | .coverage
15 | htmlcov/
16 |
17 | dist/
18 | build/
19 | *.egg-info/
20 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 |
3 | venv/
4 | .vscode/
5 |
6 | .metrics
7 | *.pdf
8 | *.html
9 |
10 | *.pyc
11 | __pycache__/
12 |
13 | .pytest_cache/
14 | .coverage
15 | htmlcov/
16 |
17 | dist/
18 | build/
19 | *.egg-info/
20 |
21 | #mlavi
22 | pc.devmarketing.nutanix.local/
23 | hpoc/
24 |
25 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/HAProxy_Delete_Webserver.txt:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -ex
3 |
4 | host=$(echo "@@{WebServer.address}@@" | awk -F "," '{print $NF}')
5 | sudo sed -i "/$host/d" /etc/haproxy/haproxy.cfg
6 |
7 | sudo systemctl daemon-reload
8 | sudo systemctl restart haproxy
9 |
10 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/HAProxy_Delete_Webserver.txt:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -ex
3 |
4 | host=$(echo "@@{WebServer.address}@@" | awk -F "," '{print $NF}')
5 | sudo sed -i "/$host/d" /etc/haproxy/haproxy.cfg
6 |
7 | sudo systemctl daemon-reload
8 | sudo systemctl restart haproxy
9 |
10 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/API/HTTP_Task_Response_Variables.txt:
--------------------------------------------------------------------------------
1 | Variable: CLUSTER_UUID
2 | Path: $.entities[0].status.cluster_reference.uuid
3 | Variable: CLUSTER_NAME
4 | Path: $.entities[0].status.cluster_reference.name
5 | Variable: DEF_SUB_UUID
6 | Path: $.entities[0].metadata.uuid
7 | Variable: DEF_SUB_NAME
8 | Path: $.entities[0].status.name
9 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/HAProxy_Add_Webserver.txt:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -ex
3 |
4 | host=$(echo "@@{WebServer.address}@@" | awk -F "," '{print $NF}')
5 | port=80
6 | echo " server host-${host} ${host}:${port} weight 1 maxconn 100 check" | sudo tee -a /etc/haproxy/haproxy.cfg
7 |
8 | sudo systemctl daemon-reload
9 | sudo systemctl restart haproxy
10 |
11 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/HAProxy_Add_Webserver.txt:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -ex
3 |
4 | host=$(echo "@@{WebServer.address}@@" | awk -F "," '{print $NF}')
5 | port=80
6 | echo " server host-${host} ${host}:${port} weight 1 maxconn 100 check" | sudo tee -a /etc/haproxy/haproxy.cfg
7 |
8 | sudo systemctl daemon-reload
9 | sudo systemctl restart haproxy
10 |
11 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/WebServer_Install.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | echo "__ Install httpd and php..."
5 | sudo yum install -y httpd
6 | sudo yum install -y php php-gd php-mbstring php-pdo php-mysqlnd
7 |
8 | echo "
9 | DirectoryIndex index.php index.html
10 | " | sudo tee /etc/httpd/conf.modules.d/dir.conf
11 |
12 | echo "" | sudo tee /var/www/html/index.php
15 |
16 | sudo systemctl enable httpd
17 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name='evaluate-udacity-student-blueprint',
5 | version='0.1',
6 | description='Take an exported Nutanix Calm Blueprint in JSON format and '
7 | + 'evaluate various configurations based on a supplied JSON '
8 | + 'criteria file.',
9 | author='Chris Rasmussen',
10 | author_email='crasmussen@nutanix.com',
11 | install_requires=[
12 | 'wheel',
13 | 'humanize',
14 | 'colorama',
15 | 'dotty-dict'
16 | ],
17 | packages=find_packages('.'),
18 | package_dir={'': '.'}
19 | )
20 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/API/POST_CREATE_VLAN.txt:
--------------------------------------------------------------------------------
1 | {
2 | "spec": {
3 | "description": "VLAN-@@{VLAN_ID}@@",
4 | "resources": {
5 | "subnet_type": "VLAN",
6 | "vswitch_name": "br0",
7 | "vlan_id": @@{calm_int(VLAN_ID)}@@
8 | },
9 | "cluster_reference": {
10 | "kind": "cluster",
11 | "name": "@@{CLUSTER_NAME}@@",
12 | "uuid": "@@{CLUSTER_UUID}@@"
13 | },
14 | "name": "VLAN-@@{VLAN_ID}@@"
15 | },
16 | "api_version": "3.1.0",
17 | "metadata": {
18 | "kind": "subnet",
19 | "spec_version": 0,
20 | "spec_hash": "string",
21 | "should_force_translate": true,
22 | "name": "VLAN-@@{VLAN_ID}@@"
23 | }
24 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/course/2/project/README.md:
--------------------------------------------------------------------------------
1 | # Udacity Hybrid Cloud Engineer: Course 2: Project
2 | ## Private Cloud SaaS: Three-tier web application
3 |
4 | Our fictitious example company is **The E-Commerce Company,** with a stock symbol of **TECC**, hence you will see naming standards which use: tecc.
5 |
6 | The TECC Software Release manager has coordinated with the Database Administrator team to define a test application and test database schema:
7 |
8 | - tecc_webuser@webapp.[tecc_dba_test.sql](tecc_dba_test.sql)
9 | - [tecc_dba_test.php](tecc_dba_test.php)
10 |
11 | The test application and database are used to verify a new workload deployment has a baseline of connectivity and functionality to insure developer productivity "out of the box."
12 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/WebServer_Install.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | echo "__ Install httpd and php..."
5 | sudo yum install -y httpd
6 | sudo yum install -y php php-gd php-mbstring php-pdo php-mysqlnd
7 |
8 | echo "
9 | DirectoryIndex index.php index.html
10 | " | sudo tee /etc/httpd/conf.modules.d/dir.conf
11 |
12 | echo "" | sudo tee /var/www/html/index.php
15 |
16 | sudo systemctl enable httpd
17 |
18 | # temporarily disable SELinux to prevent httpd to MySQL authentication issues
19 | # this is for testing only and should not be done in production environments
20 | echo "__ Temporarily disable SELinux..."
21 | sudo setenforce 0
22 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/WebServer_2Tier_Webapp.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | sudo touch /var/www/html/index.php
5 | sudo chown @@{superuser.username}@@:@@{superuser.username}@@ /var/www/html/index.php
6 |
7 | cat > /var/www/html/index.php <<- "EOF"
8 |
9 |
10 | @@{calm_blueprint_name}@@
11 |
12 |
13 | Hostname: @@{name}@@ at @@{address}@@, replica: @@{calm_array_index}@@
14 | Web docroot: {$_SERVER["DOCUMENT_ROOT"]}, file: {$_SERVER["SCRIPT_NAME"]}\n";
18 | }
19 |
20 | catch (PDOException $e) {
21 | print "Error!: " . $e->getMessage() . "
";
22 | print phpinfo();
23 | die();
24 | }
25 | ?>
26 |
27 | EOF
28 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/WebServer_2Tier_Webapp.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | sudo touch /var/www/html/index.php
5 | sudo chown @@{superuser.username}@@:@@{superuser.username}@@ /var/www/html/index.php
6 |
7 | cat > /var/www/html/index.php <<- "EOF"
8 |
9 |
10 | @@{calm_blueprint_name}@@
11 |
12 |
13 | Hostname: @@{name}@@ at @@{address}@@, replica: @@{calm_array_index}@@
14 | Web docroot: $_SERVER['DOCUMENT_ROOT'], file: $_SERVER['SCRIPT_NAME']\n";
18 | }
19 |
20 | catch (PDOException $e) {
21 | print "Error!: " . $e->getMessage() . "
";
22 | print phpinfo();
23 | die();
24 | }
25 | ?>
26 |
27 | EOF
28 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/course/3/project/README.md:
--------------------------------------------------------------------------------
1 | # Udacity Hybrid Cloud Engineer: Course 3: Project
2 | ## Hybrid Cloud SaaS: Three-tier web application
3 |
4 | Our fictitious example company is **The E-Commerce Company,** with a stock symbol of **TECC**, hence you will see naming standards which use: tecc.
5 |
6 | The TECC Software Release manager has coordinated with the Database Administrator team to define a test application and test database schema:
7 |
8 | - tecc_webuser@webapp.[tecc_dba_test.sql](tecc_dba_test.sql)
9 | - [tecc_dba_test.php](tecc_dba_test.php)
10 |
11 | The test application and database are used to verify a new workload deployment has a baseline of connectivity and functionality to insure developer productivity "out of the box," including ad-hoc database operations:
12 |
13 | - [database backup](MySQL_backup_file.sh.txt)
14 | - [database restore](MySQL_restore.sh.txt)
15 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/course/2/project/tecc_dba_test.sql:
--------------------------------------------------------------------------------
1 | CREATE DATABASE IF NOT EXISTS webapp ;
2 | CREATE TABLE IF NOT EXISTS webapp.tecc_dba_test ( \
3 | id INT NOT NULL AUTO_INCREMENT , \
4 | time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP , \
5 | comment VARCHAR(80) NULL DEFAULT NULL , \
6 | PRIMARY KEY (id)) \
7 | ENGINE = InnoDB CHARSET=ascii COLLATE ascii_general_ci COMMENT = 'testing';
8 |
9 | FLUSH PRIVILEGES;
10 | CREATE USER 'tecc_webuser'@'%' IDENTIFIED WITH mysql_native_password BY 'end-user-provided-password' \
11 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
12 | GRANT SELECT, INSERT ON *.* TO 'tecc_webuser'@'%';
13 |
14 | CREATE USER 'tecc_webuser'@'localhost' IDENTIFIED WITH mysql_native_password BY 'end-user-provided-password' \
15 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
16 | GRANT SELECT, INSERT ON *.* TO 'tecc_webuser'@'localhost';
17 |
18 | FLUSH PRIVILEGES;
19 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/course/3/project/tecc_dba_test.sql:
--------------------------------------------------------------------------------
1 | CREATE DATABASE IF NOT EXISTS webapp ;
2 | CREATE TABLE IF NOT EXISTS webapp.tecc_dba_test ( \
3 | id INT NOT NULL AUTO_INCREMENT , \
4 | time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP , \
5 | comment VARCHAR(80) NULL DEFAULT NULL , \
6 | PRIMARY KEY (id)) \
7 | ENGINE = InnoDB CHARSET=ascii COLLATE ascii_general_ci COMMENT = 'testing';
8 |
9 | FLUSH PRIVILEGES;
10 | CREATE USER 'tecc_webuser'@'%' IDENTIFIED WITH mysql_native_password BY 'end-user-provided-password' \
11 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
12 | GRANT SELECT, INSERT ON *.* TO 'tecc_webuser'@'%';
13 |
14 | CREATE USER 'tecc_webuser'@'localhost' IDENTIFIED WITH mysql_native_password BY 'end-user-provided-password' \
15 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
16 | GRANT SELECT, INSERT ON *.* TO 'tecc_webuser'@'localhost';
17 |
18 | FLUSH PRIVILEGES;
19 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/API/POST_PROJECT_UPDATE.txt:
--------------------------------------------------------------------------------
1 |
2 | {
3 | "spec": {
4 | "name": "default",
5 | "resources": {
6 | "account_reference_list": [
7 | {
8 | "kind": "account",
9 | "name": "@@{ACCOUNT_NAME}@@",
10 | "uuid": "@@{ACCOUNT_UUID}@@"
11 | }
12 | ],
13 | "subnet_reference_list": [
14 | {
15 | "kind": "subnet",
16 | "name": "@@{DEF_SUB_NAME}@@",
17 | "uuid": "@@{DEF_SUB_UUID}@@"
18 | },
19 | {
20 | "kind": "subnet",
21 | "name": "@@{VLAN_NAME}@@",
22 | "uuid": "@@{VLAN_UUID}@@"
23 | }
24 | ],
25 | "external_network_list": []
26 | }
27 | },
28 | "api_version": "3.1",
29 | "metadata": {
30 | "kind": "project",
31 | "uuid": "@@{PROJECT_UUID}@@",
32 | "spec_version": 0,
33 | "project_reference": {
34 | "kind": "project",
35 | "name": "@@{PROJECT_NAME}@@",
36 | "uuid": "@@{PROJECT_UUID}@@"
37 | }
38 | }
39 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/script/EvaluationItem/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | class EvaluationItem():
4 | '''
5 | class to hold information about each part of the blueprint
6 | as it is evaluated
7 | '''
8 |
9 | def __init__(self, eval_criteria: dict):
10 | '''
11 | class constructor
12 | here we are setting up the evaluation criteria object
13 | '''
14 | self.eval_type = eval_criteria["eval_type"]
15 | self.match_type = eval_criteria["match_type"]
16 | self.description = eval_criteria["description"]
17 | self.criteria = eval_criteria["criteria"]
18 | self.expected = eval_criteria["expected"]
19 |
20 | def __repr__(self):
21 | '''
22 | decent __repr__ for debuggability
23 | this is something recommended by Raymond Hettinger
24 | '''
25 | return (f'{self.__class__.__name__}(eval_type={self.eval_type},'
26 | f'match_type={self.match_type},'
27 | f'description={self.description},'
28 | f'criteria={self.criteria},'
29 | f'expected={self.expected}')
30 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/aws-cloud-init.txt:
--------------------------------------------------------------------------------
1 | #cloud-config
2 | # https://aws.amazon.com/premiumsupport/knowledge-center/ec2-user-account-cloud-init-user-data/
3 | # omit: security-group of VPC for SSH?
4 | # omit: SSH-keypair in ec2 region, skips creation: ec2-user
5 | cloud_final_modules:
6 | - [users-groups,always]
7 | users:
8 | - name: @@{superuser.username}@@
9 | groups: [ wheel ]
10 | sudo: [ "ALL=(ALL) NOPASSWD:ALL" ]
11 | shell: /bin/bash
12 | ssh-authorized-keys:
13 | - @@{superuser.public_key}@@
14 | - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC8ZzG4f1TfEZ2YRgDcPSQjb+fxbtRS4m1dr/EcqwqIxiV6nU9Ft3PrTqC8qFkhBjOvSJJ3GeEMCZnvEK9Iq8F+R0G3pa29fZeRA8Ur3PM4d2kb2ZhEYLMmnhKJoSTJw4c5PxaZ4ZXKUH+yATQUsLrwoYsHEC6bAvKl0+yFspQYehaiEEO17o3KQUq/izU7Agi4vETAuR7/dlKwzj1sG8fikaf351gmQhIHd+nXxIY9yTA3CpvfeKTJ77aHN0Cg+HwXg9X+c0+P/6zMfo87oru25qnPX5ITa2TfposQWp1xASGrrJX5B6rNYuQQKzVo5OO7VfFEKDsDlzGAkDVlD797xVoq+/HN6G5gsi82jujBb1X9kyCLGFNhXsseGcqYelBMWmHKHTp69/vnWcjT1dCB4OWEQr5ARcxYc5zNXV4N2CVYwH4iF70ztt4Dd7/4gmvXWmzyuc0j7gd+QHz0BwxJTGZu+MPPXu8xAYtcCSAh/SQFzL7xhnbsIrDj2mXBwOi36vxPQ8APRstr0dtEYivwyPyVb7lzR2SGmQNlHxB/F/xdiHszqD+/c4DGZqW4xj+I6ICu/19lh3gi2rLvrGF86EPTQ3hQKtpv6/t2O+UXONnc+Ayc5dKh6baC6IbwGQnT0tLDPTXM5+o3M2a0KefdqmzL6/gekpATS+scqb6tPQ== mark.lavi@nutanix.com-2018-05-21-passphraseless
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/aws-cloud-init.txt:
--------------------------------------------------------------------------------
1 | #cloud-config
2 | # https://aws.amazon.com/premiumsupport/knowledge-center/ec2-user-account-cloud-init-user-data/
3 | # omit: security-group of VPC for SSH?
4 | # omit: SSH-keypair in ec2 region, skips creation: ec2-user
5 | cloud_final_modules:
6 | - [users-groups,always]
7 | users:
8 | - name: @@{superuser.username}@@
9 | groups: [ wheel ]
10 | sudo: [ "ALL=(ALL) NOPASSWD:ALL" ]
11 | shell: /bin/bash
12 | ssh-authorized-keys:
13 | - @@{superuser.public_key}@@
14 | - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC8ZzG4f1TfEZ2YRgDcPSQjb+fxbtRS4m1dr/EcqwqIxiV6nU9Ft3PrTqC8qFkhBjOvSJJ3GeEMCZnvEK9Iq8F+R0G3pa29fZeRA8Ur3PM4d2kb2ZhEYLMmnhKJoSTJw4c5PxaZ4ZXKUH+yATQUsLrwoYsHEC6bAvKl0+yFspQYehaiEEO17o3KQUq/izU7Agi4vETAuR7/dlKwzj1sG8fikaf351gmQhIHd+nXxIY9yTA3CpvfeKTJ77aHN0Cg+HwXg9X+c0+P/6zMfo87oru25qnPX5ITa2TfposQWp1xASGrrJX5B6rNYuQQKzVo5OO7VfFEKDsDlzGAkDVlD797xVoq+/HN6G5gsi82jujBb1X9kyCLGFNhXsseGcqYelBMWmHKHTp69/vnWcjT1dCB4OWEQr5ARcxYc5zNXV4N2CVYwH4iF70ztt4Dd7/4gmvXWmzyuc0j7gd+QHz0BwxJTGZu+MPPXu8xAYtcCSAh/SQFzL7xhnbsIrDj2mXBwOi36vxPQ8APRstr0dtEYivwyPyVb7lzR2SGmQNlHxB/F/xdiHszqD+/c4DGZqW4xj+I6ICu/19lh3gi2rLvrGF86EPTQ3hQKtpv6/t2O+UXONnc+Ayc5dKh6baC6IbwGQnT0tLDPTXM5+o3M2a0KefdqmzL6/gekpATS+scqb6tPQ== mark.lavi@nutanix.com-2018-05-21-passphraseless
--------------------------------------------------------------------------------
/HybridCloudEngineer/course/2/project/tecc_dba_test.php:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | TECC Test Web Application
5 |
6 |
7 | TECC Test Web Application
8 | true,
12 | PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
13 | ));
14 |
15 | // ensure there is at least 1 row of data for the app to read
16 | $data = $dbh->query("INSERT INTO tecc_dba_test (id, time, comment) VALUES (NULL, CURRENT_TIMESTAMP, "$_SERVER['SCRIPT_NAME']")");
17 |
18 | $data = $dbh->query('SELECT MAX(id) AS count FROM tecc_dba_test')->fetchAll(PDO::FETCH_COLUMN);
19 | $count = $data[0]; //print_r($data);
20 | $data = $dbh->query("SELECT time, comment FROM tecc_dba_test WHERE id = $count")->fetchAll(PDO::FETCH_ASSOC);
21 | $time = $data[0]['time'];
22 | $comment = $data[0]['comment'];
23 |
24 | echo "Database records: $count
\n";
25 | echo "Last comment: $comment, on: $time
\n";
26 | echo "Web docroot: $_SERVER['DOCUMENT_ROOT'], file: $_SERVER['SCRIPT_NAME']
\n";
27 |
28 | $dbh = null;
29 | }
30 | catch (PDOException $e) {
31 | print "Error!: " . $e->getcomment() . "
";
32 | print phpinfo();
33 | die();
34 | }
35 | ?>
36 |
37 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/course/3/project/tecc_dba_test.php:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | TECC Test Web Application
5 |
6 |
7 | TECC Test Web Application
8 | true,
12 | PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
13 | ));
14 |
15 | // ensure there is at least 1 row of data for the app to read
16 | $data = $dbh->query("INSERT INTO tecc_dba_test (id, time, comment) VALUES (NULL, CURRENT_TIMESTAMP, "$_SERVER['SCRIPT_NAME']")");
17 |
18 | $data = $dbh->query('SELECT MAX(id) AS count FROM tecc_dba_test')->fetchAll(PDO::FETCH_COLUMN);
19 | $count = $data[0]; //print_r($data);
20 | $data = $dbh->query("SELECT time, comment FROM tecc_dba_test WHERE id = $count")->fetchAll(PDO::FETCH_ASSOC);
21 | $time = $data[0]['time'];
22 | $comment = $data[0]['comment'];
23 |
24 | echo "Database records: $count
\n";
25 | echo "Last comment: $comment, on: $time
\n";
26 | echo "Web docroot: $_SERVER['DOCUMENT_ROOT'], file: $_SERVER['SCRIPT_NAME']
\n";
27 |
28 | $dbh = null;
29 | }
30 | catch (PDOException $e) {
31 | print "Error!: " . $e->getcomment() . "
";
32 | print phpinfo();
33 | die();
34 | }
35 | ?>
36 |
37 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/script/Messages/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from collections import namedtuple
4 | from colorama import Fore, Style
5 |
6 |
7 | class Messages():
8 | '''
9 | various messages that are used throughout the script
10 | '''
11 |
12 | def __init__(self):
13 | '''
14 | class constructor
15 | '''
16 | Prefixes = namedtuple(
17 | 'Prefixes',
18 | [
19 | 'error',
20 | 'ok',
21 | 'info',
22 | 'passed',
23 | 'fail',
24 | 'warning',
25 | 'reset',
26 | 'line'
27 | ]
28 | )
29 | self.prefixes = Prefixes(
30 | error=Fore.RED + '[ERROR]' + Style.RESET_ALL,
31 | ok=Fore.BLUE + '[ OK]' + Style.RESET_ALL,
32 | info=Fore.BLUE + '[ INFO]' + Style.RESET_ALL,
33 | passed=Fore.GREEN + '[ PASS]' + Style.RESET_ALL,
34 | fail=Fore.RED + '[ FAIL]' + Style.RESET_ALL,
35 | warning=Fore.YELLOW + '[ WARN]' + Style.RESET_ALL,
36 | reset=Style.RESET_ALL,
37 | line=Style.RESET_ALL + '--------------------'
38 | )
39 |
40 | def __repr__(self):
41 | '''
42 | decent __repr__ for debuggability
43 | this is something recommended by Raymond Hettinger
44 | '''
45 | return (f'{self.__class__.__name__}(messages={self.prefixes}')
46 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/criteria/eval.json:
--------------------------------------------------------------------------------
1 | {
2 | "criteria": [{
3 | "description": "Calm Service Instances",
4 | "key": ".spec.resources.service_definition_list",
5 | "type": "number",
6 | "match": "exact",
7 | "expected": 1
8 | },
9 | {
10 | "description": "Calm Package Instances",
11 | "key": ".spec.resources.package_definition_list",
12 | "type": "number",
13 | "match": "exact",
14 | "expected": 2
15 | },
16 | {
17 | "description": "Calm Substrate Instances",
18 | "key": ".spec.resources.substrate_definition_list",
19 | "type": "number",
20 | "match": "exact",
21 | "expected": 1
22 | },
23 | {
24 | "description": "Calm Credential Instances",
25 | "key": ".spec.resources.credential_definition_list",
26 | "type": "number",
27 | "match": "exact",
28 | "expected": 1
29 | },
30 | {
31 | "description": "Number of Application Profiles",
32 | "key": ".spec.resources.app_profile_list",
33 | "type": "number",
34 | "match": "exact",
35 | "expected": 1
36 | },
37 | {
38 | "description": "Application name",
39 | "key": ".spec.name",
40 | "type": "string",
41 | "match": "contains",
42 | "expected": "BasicLinuxVM"
43 | }
44 | ]
45 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/HAProxy_Install-old.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | echo "__ HAProxy installation..."
5 | sudo yum install -y haproxy &&
6 | sudo systemctl enable haproxy
7 | haproxy -v
8 |
9 | echo "__ HAProxy configuration..."
10 | # http://cbonte.github.io/haproxy-dconv/1.8/configuration.html#4
11 | echo "global
12 | log 127.0.0.1 local0
13 | log 127.0.0.1 local1 notice
14 | maxconn 4096
15 | quiet
16 | user haproxy
17 | group haproxy
18 | defaults
19 | log global
20 | mode http
21 | retries 3
22 | timeout client 50s
23 | timeout connect 5s
24 | timeout server 50s
25 | option dontlognull
26 | option httplog
27 | option redispatch
28 | balance roundrobin
29 | # Set up application listeners here.
30 | listen stats
31 | bind *:8080
32 | mode http
33 | log global
34 | stats enable
35 | stats hide-version
36 | stats refresh 30s
37 | stats show-node
38 | stats uri /stats
39 | #listen admin
40 | # bind 127.0.0.1:22002
41 | # mode http
42 | # stats uri /
43 | frontend http
44 | bind *:80
45 | maxconn 2000
46 | default_backend servers-http
47 | backend servers-http" | sudo tee /etc/haproxy/haproxy.cfg
48 |
49 | sudo sed -i 's/server host-/#server host-/g' /etc/haproxy/haproxy.cfg
50 | hosts=$(echo "@@{WebServer.address}@@" | sed 's/^,//' | sed 's/,$//' | tr "," "\n")
51 | port=80
52 |
53 | for host in $hosts; do
54 | echo " server host-${host} ${host}:${port} weight 1 maxconn 100 check" | sudo tee -a /etc/haproxy/haproxy.cfg
55 | done
56 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/HAProxy_Install.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | echo "__ HAProxy installation..."
5 | sudo yum install -y haproxy &&
6 | sudo systemctl enable haproxy || true # idempotent
7 | haproxy -v
8 |
9 | echo "__ HAProxy configuration..."
10 | # http://cbonte.github.io/haproxy-dconv/1.8/configuration.html#4
11 | port=80
12 |
13 | echo "global
14 | log 127.0.0.1 local0
15 | log 127.0.0.1 local1 notice
16 | maxconn 4096
17 | quiet
18 | user haproxy
19 | group haproxy
20 | defaults
21 | log global
22 | mode http
23 | retries 3
24 | timeout client 50s
25 | timeout connect 5s
26 | timeout server 50s
27 | option dontlognull
28 | option httplog
29 | option redispatch
30 | balance roundrobin
31 | # Set up application listeners here.
32 | listen stats
33 | bind *:8080
34 | mode http
35 | log global
36 | stats enable
37 | stats hide-version
38 | stats refresh 30s
39 | stats show-node
40 | stats uri /stats
41 | #listen admin
42 | # bind 127.0.0.1:22002
43 | # mode http
44 | # stats uri /
45 | frontend http
46 | bind *:${port}
47 | maxconn 2000
48 | default_backend servers-http
49 | backend servers-http" | sudo tee /etc/haproxy/haproxy.cfg
50 |
51 | sudo sed -i 's/server host-/#server host-/g' /etc/haproxy/haproxy.cfg
52 |
53 | hosts=$(echo "@@{WebServer.address}@@" | sed 's/^,//' | sed 's/,$//' | tr "," "\n")
54 | for host in $hosts; do
55 | echo " server host-${host} ${host}:${port} weight 1 maxconn 100 check" | sudo tee -a /etc/haproxy/haproxy.cfg
56 | done
57 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/HAProxy_Update.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | echo "__ HAProxy installation..."
5 | sudo yum install -y haproxy &&
6 | sudo systemctl enable haproxy || true # idempotent
7 | haproxy -v
8 |
9 | echo "__ HAProxy configuration..."
10 | # http://cbonte.github.io/haproxy-dconv/1.8/configuration.html#4
11 | port=80
12 |
13 | echo "global
14 | log 127.0.0.1 local0
15 | log 127.0.0.1 local1 notice
16 | maxconn 4096
17 | quiet
18 | user haproxy
19 | group haproxy
20 | defaults
21 | log global
22 | mode http
23 | retries 3
24 | timeout client 50s
25 | timeout connect 5s
26 | timeout server 50s
27 | option dontlognull
28 | option httplog
29 | option redispatch
30 | balance roundrobin
31 | # Set up application listeners here.
32 | listen stats
33 | bind *:8080
34 | mode http
35 | log global
36 | stats enable
37 | stats hide-version
38 | stats refresh 30s
39 | stats show-node
40 | stats uri /stats
41 | #listen admin
42 | # bind 127.0.0.1:22002
43 | # mode http
44 | # stats uri /
45 | frontend http
46 | bind *:${port}
47 | maxconn 2000
48 | default_backend servers-http
49 | backend servers-http" | sudo tee /etc/haproxy/haproxy.cfg
50 |
51 | sudo sed -i 's/server host-/#server host-/g' /etc/haproxy/haproxy.cfg
52 |
53 | hosts=$(echo "@@{WebServer.address}@@" | sed 's/^,//' | sed 's/,$//' | tr "," "\n")
54 | for host in $hosts; do
55 | echo " server host-${host} ${host}:${port} weight 1 maxconn 100 check" | sudo tee -a /etc/haproxy/haproxy.cfg
56 | done
57 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/HAProxy_Update.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | echo "__ HAProxy installation..."
5 | sudo yum install -y haproxy &&
6 | sudo systemctl enable haproxy || true # idempotent
7 | haproxy -v
8 |
9 | echo "__ HAProxy configuration..."
10 | # http://cbonte.github.io/haproxy-dconv/1.8/configuration.html#4
11 | port=80
12 |
13 | echo "global
14 | log 127.0.0.1 local0
15 | log 127.0.0.1 local1 notice
16 | maxconn 4096
17 | quiet
18 | user haproxy
19 | group haproxy
20 | defaults
21 | log global
22 | mode http
23 | retries 3
24 | timeout client 50s
25 | timeout connect 5s
26 | timeout server 50s
27 | option dontlognull
28 | option httplog
29 | option redispatch
30 | balance roundrobin
31 | # Set up application listeners here.
32 | listen stats
33 | bind *:8080
34 | mode http
35 | log global
36 | stats enable
37 | stats hide-version
38 | stats refresh 30s
39 | stats show-node
40 | stats uri /stats
41 | #listen admin
42 | # bind 127.0.0.1:22002
43 | # mode http
44 | # stats uri /
45 | frontend http
46 | bind *:${port}
47 | maxconn 2000
48 | default_backend servers-http
49 | backend servers-http" | sudo tee /etc/haproxy/haproxy.cfg
50 |
51 | sudo sed -i 's/server host-/#server host-/g' /etc/haproxy/haproxy.cfg
52 |
53 | hosts=$(echo "@@{WebServer.address}@@" | sed 's/^,//' | sed 's/,$//' | tr "," "\n")
54 | for host in $hosts; do
55 | echo " server host-${host} ${host}:${port} weight 1 maxconn 100 check" | sudo tee -a /etc/haproxy/haproxy.cfg
56 | done
57 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/HAProxy_Install.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | echo "__ HAProxy installation..."
5 | sudo yum install -y haproxy &&
6 | sudo systemctl enable haproxy || true # idempotent
7 | haproxy -v
8 |
9 | echo "__ HAProxy configuration..."
10 | # http://cbonte.github.io/haproxy-dconv/1.8/configuration.html#4
11 | port=80
12 |
13 | echo "global
14 | log 127.0.0.1 local0
15 | log 127.0.0.1 local1 notice
16 | maxconn 4096
17 | quiet
18 | user haproxy
19 | group haproxy
20 | defaults
21 | log global
22 | mode http
23 | retries 3
24 | timeout client 50s
25 | timeout connect 5s
26 | timeout server 50s
27 | option dontlognull
28 | option httplog
29 | option redispatch
30 | balance roundrobin
31 | # Set up application listeners here.
32 | listen stats
33 | bind *:8080
34 | mode http
35 | log global
36 | stats enable
37 | stats hide-version
38 | stats refresh 30s
39 | stats show-node
40 | stats uri /stats
41 | #listen admin
42 | # bind 127.0.0.1:22002
43 | # mode http
44 | # stats uri /
45 | frontend http
46 | bind *:${port}
47 | maxconn 2000
48 | default_backend servers-http
49 | backend servers-http" | sudo tee /etc/haproxy/haproxy.cfg
50 |
51 | sudo sed -i 's/server host-/#server host-/g' /etc/haproxy/haproxy.cfg
52 |
53 | hosts=$(echo "@@{WebServer.address}@@" | sed 's/^,//' | sed 's/,$//' | tr "," "\n")
54 | for host in $hosts; do
55 | echo " server host-${host} ${host}:${port} weight 1 maxconn 100 check" | sudo tee -a /etc/haproxy/haproxy.cfg
56 | done
57 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/WebServer_3Tier_Webapp.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | sudo touch /var/www/html/index.php
5 | sudo chown @@{superuser.username}@@:@@{superuser.username}@@ /var/www/html/index.php
6 |
7 | cat > /var/www/html/index.php <<- "EOF"
8 |
9 |
10 | @@{calm_blueprint_name}@@
11 |
12 |
13 | Hostname: @@{name}@@ at @@{address}@@, replica: @@{calm_array_index}@@
14 | true,
18 | PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
19 | ));
20 |
21 | // ensure there is at least 1 row of data for the app to read
22 | $data = $dbh->query("INSERT INTO @@{DATABASE_TABLE}@@ (id, time, message) VALUES (NULL, CURRENT_TIMESTAMP, '@@{calm_username}@@')");
23 |
24 | $data = $dbh->query('SELECT MAX(id) AS count FROM @@{DATABASE_TABLE}@@')->fetchAll(PDO::FETCH_COLUMN);
25 | $count = $data[0]; //print_r($data);
26 | $data = $dbh->query("SELECT time, message FROM @@{DATABASE_TABLE}@@ WHERE id = $count")->fetchAll(PDO::FETCH_ASSOC);
27 | $time = $data[0]['time'];
28 | $message = $data[0]['message'];
29 |
30 | echo "Database records: $count
\n";
31 | echo "Last maintainer: $message, on: $time
\n";
32 | echo "Web docroot: {$_SERVER["DOCUMENT_ROOT"]}, file: {$_SERVER["SCRIPT_NAME"]}
\n";
33 |
34 | $dbh = null;
35 | }
36 | catch (PDOException $e) {
37 | print "Error!: " . $e->getMessage() . "
";
38 | print phpinfo();
39 | die();
40 | }
41 | ?>
42 |
43 | EOF
44 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/WebServer_3Tier_Webapp.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | sudo touch /var/www/html/index.php
5 | sudo chown @@{superuser.username}@@:@@{superuser.username}@@ /var/www/html/index.php
6 |
7 | cat > /var/www/html/index.php <<- "EOF"
8 |
9 |
10 | @@{calm_blueprint_name}@@
11 |
12 |
13 | Hostname: @@{name}@@ at @@{address}@@, replica: @@{calm_array_index}@@
14 | true,
18 | PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
19 | ));
20 |
21 | // ensure there is at least 1 row of data for the app to read
22 | $data = $dbh->query("INSERT INTO @@{DATABASE_TABLE}@@ (id, time, message) VALUES (NULL, CURRENT_TIMESTAMP, '@@{calm_username}@@')");
23 |
24 | $data = $dbh->query('SELECT MAX(id) AS count FROM @@{DATABASE_TABLE}@@')->fetchAll(PDO::FETCH_COLUMN);
25 | $count = $data[0]; //print_r($data);
26 | $data = $dbh->query("SELECT time, message FROM @@{DATABASE_TABLE}@@ WHERE id = $count")->fetchAll(PDO::FETCH_ASSOC);
27 | $time = $data[0]['time'];
28 | $message = $data[0]['message'];
29 |
30 | echo "Database records: $count
\n";
31 | echo "Last maintainer: $message, on: $time
\n";
32 | echo "Web docroot: $_SERVER['DOCUMENT_ROOT'], file: $_SERVER['SCRIPT_NAME']
\n";
33 |
34 | $dbh = null;
35 | }
36 | catch (PDOException $e) {
37 | print "Error!: " . $e->getMessage() . "
";
38 | print phpinfo();
39 | die();
40 | }
41 | ?>
42 |
43 | EOF
44 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/MySQL_Install.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | # Assuming CentOS/RedHat 8.2
5 | echo "__ Mysql installation..."
6 | # https://dev.mysql.com/doc/refman/8.0/en/linux-installation-yum-repo.html
7 |
8 | sudo yum install -y "http://repo.mysql.com/mysql80-community-release-el8.rpm" &&
9 | sudo yum module -y disable mysql &&
10 | sudo yum install -y mysql-community-server
11 |
12 | # database must be started for SQL configuration
13 | sudo systemctl start mysqld
14 | # database should start on OS boot
15 | sudo systemctl enable mysqld
16 | # sudo systemctl status mysqld.service
17 |
18 | echo "__ Altering database root password..."
19 |
20 | mysql --user=root --connect-expired-password \
21 | --password="$(sudo grep -oP 'temporary password(.*): \K(\S+)' /var/log/mysqld.log)" <<- EOF
22 |
23 | SET GLOBAL validate_password.policy=LOW;
24 | ALTER user 'root'@'localhost' IDENTIFIED BY '@@{MYSQL_PASSWORD}@@';
25 |
26 | FLUSH PRIVILEGES;
27 | EOF
28 |
29 | echo "__ Creating database, table, and app user permissions..."
30 |
31 | mysql --user=root --password='@@{MYSQL_PASSWORD}@@' <<- "EOF"
32 |
33 | CREATE DATABASE IF NOT EXISTS @@{DATABASE_NAME}@@ ;
34 | CREATE TABLE IF NOT EXISTS @@{DATABASE_NAME}@@.@@{DATABASE_TABLE}@@ ( \
35 | id INT NOT NULL AUTO_INCREMENT , \
36 | time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP , \
37 | message VARCHAR(80) NULL DEFAULT NULL , \
38 | PRIMARY KEY (id)) \
39 | ENGINE = InnoDB CHARSET=ascii COLLATE ascii_general_ci COMMENT = 'testing';
40 |
41 | FLUSH PRIVILEGES;
42 | CREATE USER '@@{MYSQL_APP_USER}@@'@'%' IDENTIFIED WITH mysql_native_password BY '@@{MYSQL_PASSWORD}@@' \
43 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
44 | GRANT SELECT, INSERT ON *.* TO '@@{MYSQL_APP_USER}@@'@'%';
45 |
46 | CREATE USER '@@{MYSQL_APP_USER}@@'@'localhost' IDENTIFIED WITH mysql_native_password BY '@@{MYSQL_PASSWORD}@@' \
47 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
48 | GRANT SELECT, INSERT ON *.* TO '@@{MYSQL_APP_USER}@@'@'localhost';
49 |
50 | FLUSH PRIVILEGES;
51 | EOF
52 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/MySQL_Install.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | # Assuming CentOS/RedHat 8.2
5 | echo "__ Mysql installation..."
6 | # https://dev.mysql.com/doc/refman/8.0/en/linux-installation-yum-repo.html
7 |
8 | sudo yum install -y "http://repo.mysql.com/mysql80-community-release-el8.rpm" &&
9 | sudo yum module -y disable mysql &&
10 | sudo yum install -y mysql-community-server
11 |
12 | # database must be started for SQL configuration
13 | sudo systemctl start mysqld
14 | # database should start on OS boot
15 | sudo systemctl enable mysqld
16 | # sudo systemctl status mysqld.service
17 |
18 | echo "__ Altering database root password..."
19 |
20 | mysql --user=root --connect-expired-password \
21 | --password="$(sudo grep -oP 'temporary password(.*): \K(\S+)' /var/log/mysqld.log)" <<- EOF
22 |
23 | SET GLOBAL validate_password.policy=LOW;
24 | ALTER user 'root'@'localhost' IDENTIFIED BY '@@{MYSQL_PASSWORD}@@';
25 |
26 | FLUSH PRIVILEGES;
27 | EOF
28 |
29 | echo "__ Creating database, table, and app user permissions..."
30 |
31 | mysql --user=root --password='@@{MYSQL_PASSWORD}@@' <<- "EOF"
32 |
33 | CREATE DATABASE IF NOT EXISTS @@{DATABASE_NAME}@@ ;
34 | CREATE TABLE IF NOT EXISTS @@{DATABASE_NAME}@@.@@{DATABASE_TABLE}@@ ( \
35 | id INT NOT NULL AUTO_INCREMENT , \
36 | time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP , \
37 | message VARCHAR(80) NULL DEFAULT NULL , \
38 | PRIMARY KEY (id)) \
39 | ENGINE = InnoDB CHARSET=ascii COLLATE ascii_general_ci COMMENT = 'testing';
40 |
41 | FLUSH PRIVILEGES;
42 | CREATE USER '@@{MYSQL_APP_USER}@@'@'%' IDENTIFIED WITH mysql_native_password BY '@@{MYSQL_PASSWORD}@@' \
43 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
44 | GRANT SELECT, INSERT ON *.* TO '@@{MYSQL_APP_USER}@@'@'%';
45 |
46 | CREATE USER '@@{MYSQL_APP_USER}@@'@'localhost' IDENTIFIED WITH mysql_native_password BY '@@{MYSQL_PASSWORD}@@' \
47 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
48 | GRANT SELECT, INSERT ON *.* TO '@@{MYSQL_APP_USER}@@'@'localhost';
49 |
50 | FLUSH PRIVILEGES;
51 | EOF
52 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/resources/frame_scripts/scripts/Backups/MySQL_Install-webuser.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -ex
3 |
4 | # Assuming CentOS/RedHat 8.2
5 | echo "__ Mysql installation..."
6 | # https://dev.mysql.com/doc/refman/8.0/en/linux-installation-yum-repo.html
7 |
8 | sudo yum install -y "http://repo.mysql.com/mysql80-community-release-el8.rpm" &&
9 | sudo yum module -y disable mysql &&
10 | sudo yum install -y mysql-community-server
11 |
12 | # database must be started for SQL configuration
13 | sudo systemctl start mysqld
14 | # database should start on OS boot
15 | sudo systemctl enable mysqld
16 | # sudo systemctl status mysqld.service
17 |
18 | echo "__ Altering database root password..."
19 |
20 | mysql --user=root --connect-expired-password \
21 | --password="$(sudo grep -oP 'temporary password(.*): \K(\S+)' /var/log/mysqld.log)" <<- EOF
22 |
23 | SET GLOBAL validate_password.policy=LOW;
24 | ALTER user 'root'@'localhost' IDENTIFIED BY '@@{MYSQL_PASSWORD}@@';
25 |
26 | FLUSH PRIVILEGES;
27 | EOF
28 |
29 | echo "__ Creating database, table, and app user permissions..."
30 |
31 | mysql --user=root --password='@@{MYSQL_PASSWORD}@@' <<- "EOF"
32 |
33 | CREATE DATABASE IF NOT EXISTS @@{DATABASE_NAME}@@ ;
34 | CREATE TABLE IF NOT EXISTS @@{DATABASE_NAME}@@.@@{DATABASE_TABLE}@@ ( \
35 | id INT NOT NULL AUTO_INCREMENT , \
36 | time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP , \
37 | message VARCHAR(80) NULL DEFAULT NULL , \
38 | PRIMARY KEY (id)) \
39 | ENGINE = InnoDB CHARSET=ascii COLLATE ascii_general_ci COMMENT = 'testing';
40 |
41 | FLUSH PRIVILEGES;
42 | CREATE USER '@@{MYSQL_APP_USER}@@'@'%' IDENTIFIED WITH mysql_native_password BY '@@{MYSQL_PASSWORD}@@' \
43 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
44 | GRANT SELECT, INSERT ON *.* TO 'webuser'@'%' WITH GRANT OPTION;
45 |
46 | CREATE USER '@@{MYSQL_APP_USER}@@'@'localhost' IDENTIFIED WITH mysql_native_password BY '@@{MYSQL_PASSWORD}@@' \
47 | REQUIRE NONE WITH MAX_QUERIES_PER_HOUR 0 MAX_CONNECTIONS_PER_HOUR 0 MAX_UPDATES_PER_HOUR 0 MAX_USER_CONNECTIONS 0;
48 | GRANT SELECT, INSERT ON *.* TO 'webuser'@'localhost' WITH GRANT OPTION;
49 |
50 | FLUSH PRIVILEGES;
51 | EOF
52 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/README.md:
--------------------------------------------------------------------------------
1 | # Hybrid Cloud Engineer Nanodegree
2 |
3 | Welcome! Students probably want to get directly to the materials for [course 2 project](course/2/project/) and [course 3 project](course/3/project/).
4 |
5 | ## Overview
6 |
7 | This program will enhance learning opportunities in the emerging cloud technologies market that are essential for organizations to uplevel their IT infrastructure. As part of the collaboration, Nutanix will also sponsor 5,000 scholarships to help IT professionals advance their knowledge with this initiative.
8 |
9 | As more services move to the cloud, IT teams require the knowledge and expertise it takes to properly manage these infrastructures that increasingly stretch across on-premises and public cloud in a hybrid model. Although hybrid cloud technology continues to grow and evolve, educational opportunities in this field remain at a standstill. In Nutanix’s recent Enterprise Cloud Index survey, 85% of respondents said that hybrid cloud is the ideal operating model. However, 32% of respondents reported a lack of in-house hybrid cloud skills.
10 |
11 | To fill this educational gap, Udacity and Nutanix have collaborated closely to develop a Nanodegree program that will advance the knowledge of seasoned IT professionals. To strengthen professional development in this space, the Nanodegree program will cover modern private cloud infrastructure and the design of hybrid application deployment. Ideal students are those managing traditional business applications, legacy infrastructure, or cloud-native applications on public cloud infrastructure.
12 |
13 | As increasing services move to cloud functionality, the need for cloud infrastructure expertise increases. In fact, nearly 90% of companies rely on cloud computing, with total spending on cloud services estimated to have hit $97 billion in 2019.
14 |
15 | Learn how to design, deploy, and maintain Hybrid Cloud infrastructure from one of the leaders in the industry.
16 |
17 | ## History
18 |
19 | 2020-03-31: Udacity and Nutanix jointly announced our partnership and the [Hybrid Cloud Engineer Nanodegree](https://www.nutanix.com/press-releases/2020/nutanix-partners-with-udacity-to-offer-hybrid-cloud-nanodegree-program) and [unveiled a scholarship program](https://www.udacity.com/scholarships/nutanix-hybrid-cloud-scholarship-program)!
20 |
21 | ## FAQ
22 |
23 | [What is a Nanodegree](https://udacity.zendesk.com/hc/en-us/articles/360015664831-What-is-a-Nanodegree-Program-) Program?
24 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/criteria/p3.json:
--------------------------------------------------------------------------------
1 | {
2 | "criteria": [{
3 | "description": "Calm Service Instances",
4 | "key": ".spec.resources.service_definition_list",
5 | "type": "number",
6 | "match": "exact",
7 | "expected": 1
8 | },
9 | {
10 | "description": "Calm Package Instances",
11 | "key": ".spec.resources.package_definition_list",
12 | "type": "number",
13 | "match": "exact",
14 | "expected": 2
15 | },
16 | {
17 | "description": "Calm Substrate Instances",
18 | "key": ".spec.resources.substrate_definition_list",
19 | "type": "number",
20 | "match": "exact",
21 | "expected": 1
22 | },
23 | {
24 | "description": "Calm Credential Instances",
25 | "key": ".spec.resources.credential_definition_list",
26 | "type": "number",
27 | "match": "exact",
28 | "expected": 1
29 | },
30 | {
31 | "description": "Number of Application Profiles",
32 | "key": ".spec.resources.app_profile_list",
33 | "type": "number",
34 | "match": "exact",
35 | "expected": 1
36 | },
37 | {
38 | "description": "CentOS 8 Cloud Image URL",
39 | "key": ".spec.resources.package_definition_list[].options.resources.source_uri",
40 | "type": "string",
41 | "match": "contains",
42 | "expected": "cloud.centos.org"
43 | },
44 | {
45 | "description": "AWS AMI ID",
46 | "key": ".spec.resources.substrate_definition_list[].create_spec.resources.image_id",
47 | "type": "string",
48 | "match": "contains",
49 | "expected": "ami-02f147dfb8be58a10"
50 | },
51 | {
52 | "description": "AWS Instance Type",
53 | "key": ".spec.resources.substrate_definition_list[].create_spec.resources.instance_type",
54 | "type": "string",
55 | "match": "contains",
56 | "expected": "t2.micro"
57 | },
58 | {
59 | "description": "Minimum Instance Replicas",
60 | "key": ".spec.resources.app_profile_list[].deployment_create_list[].min_replicas",
61 | "type": "number",
62 | "match": "contains",
63 | "expected": "1"
64 | },
65 | {
66 | "description": "Maximum Instance Replicas",
67 | "key": ".spec.resources.app_profile_list[].deployment_create_list[].max_replicas",
68 | "type": "number",
69 | "match": "contains",
70 | "expected": "2"
71 | },
72 | {
73 | "description": "Apache Web Server Name",
74 | "key": ".spec.resources.substrate_definition_list[].create_spec.name",
75 | "type": "string",
76 | "match": "contains",
77 | "expected": "www@@{calm_array_index}@@"
78 | },
79 | {
80 | "description": "MySQL Database Server Name",
81 | "key": ".spec.resources.substrate_definition_list[].create_spec.name",
82 | "type": "string",
83 | "match": "contains",
84 | "expected": "MySQL-@@{calm_time}@@"
85 | },
86 | {
87 | "description": "HAProxy Load Balancer Server Name",
88 | "key": ".spec.resources.substrate_definition_list[].create_spec.name",
89 | "type": "string",
90 | "match": "contains",
91 | "expected": "HAProxy-@@{calm_time}@@"
92 | },
93 | {
94 | "description": "Application name",
95 | "key": ".spec.name",
96 | "type": "string",
97 | "match": "contains",
98 | "expected": "BasicLinuxVM"
99 | }
100 | ]
101 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/criteria/p3_new.json:
--------------------------------------------------------------------------------
1 | {
2 | "criteria": [{
3 | "description": "Calm Service Instances",
4 | "key": "spec.resources.service_definition_list",
5 | "type": "instances",
6 | "match": "exact",
7 | "expected": 4
8 | },
9 | {
10 | "description": "Calm Package Instances",
11 | "key": "spec.resources.package_definition_list",
12 | "type": "instances",
13 | "match": "exact",
14 | "expected": 9
15 | },
16 | {
17 | "description": "Calm Substrate Instances",
18 | "key": "spec.resources.substrate_definition_list",
19 | "type": "instances",
20 | "match": "exact",
21 | "expected": 8
22 | },
23 | {
24 | "description": "Calm Credential Instances",
25 | "key": "spec.resources.credential_definition_list",
26 | "type": "instances",
27 | "match": "exact",
28 | "expected": 2
29 | },
30 | {
31 | "description": "Calm Application Profiles Instances",
32 | "key": "spec.resources.app_profile_list",
33 | "type": "instances",
34 | "match": "exact",
35 | "expected": 2
36 | },
37 | {
38 | "description": "Deployment Create Lists",
39 | "key": "deployment_create_list",
40 | "type": "list2",
41 | "sub_type": "instances",
42 | "match": "exact",
43 | "expected": 2
44 | },
45 | {
46 | "description": "CentOS 8 Cloud Image URL",
47 | "key": "source_uri",
48 | "type": "list2",
49 | "sub_type": "string",
50 | "match": "contains",
51 | "expected": "cloud.centos.org"
52 | },
53 | {
54 | "description": "AWS AMI ID",
55 | "key": "image_id",
56 | "type": "list2",
57 | "sub_type": "string",
58 | "match": "contains",
59 | "expected": "ami-02f147dfb8be58a10"
60 | },
61 | {
62 | "description": "AWS Instance Type",
63 | "key": "instance_type",
64 | "type": "list2",
65 | "sub_type": "string",
66 | "match": "contains",
67 | "expected": "t2.micro"
68 | },
69 | {
70 | "description": "Minimum Instance Replicas",
71 | "key": "min_replicas",
72 | "type": "list2",
73 | "sub_type": "number",
74 | "match": "exact",
75 | "expected": 1
76 | },
77 | {
78 | "description": "App Profile Evaluations",
79 | "type": "list3",
80 | "key": "app_profile_list",
81 | "sub_keys": [{
82 | "description": "Minimum Instance Replicas",
83 | "key": "min_replicas",
84 | "expected": 1
85 | },
86 | {
87 | "description": "Maximum Instance Replicas",
88 | "key": "max_replicas",
89 | "expected": 4
90 | }
91 | ],
92 | "match": "exact"
93 | },
94 | {
95 | "description": "Minimum Instance Replicas",
96 | "key": ".spec.resources.app_profile_list[].deployment_create_list[].min_replicas",
97 | "type": "number",
98 | "match": "contains",
99 | "expected": "1"
100 | },
101 | {
102 | "description": "Maximum Instance Replicas",
103 | "key": ".spec.resources.app_profile_list[].deployment_create_list[].max_replicas",
104 | "type": "number",
105 | "match": "contains",
106 | "expected": "2"
107 | },
108 | {
109 | "description": "Apache Web Server Name",
110 | "key": ".spec.resources.substrate_definition_list[].create_spec.name",
111 | "type": "string",
112 | "match": "contains",
113 | "expected": "www@@{calm_array_index}@@"
114 | },
115 | {
116 | "description": "MySQL Database Server Name",
117 | "key": ".spec.resources.substrate_definition_list[].create_spec.name",
118 | "type": "string",
119 | "match": "contains",
120 | "expected": "MySQL-@@{calm_time}@@"
121 | },
122 | {
123 | "description": "HAProxy Load Balancer Server Name",
124 | "key": ".spec.resources.substrate_definition_list[].create_spec.name",
125 | "type": "string",
126 | "match": "contains",
127 | "expected": "HAProxy-@@{calm_time}@@"
128 | },
129 | {
130 | "description": "Application name",
131 | "key": ".spec.name",
132 | "type": "string",
133 | "match": "contains",
134 | "expected": "BasicLinuxVM"
135 | }
136 | ]
137 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/script/EnvironmentOptions/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env/python3
2 |
3 | import sys
4 | import argparse
5 | import subprocess
6 | from Messages import Messages
7 |
8 |
9 | class EnvironmentOptions():
10 | '''
11 | class the blueprint evaluation environment
12 | '''
13 |
14 | '''
15 | class constructor
16 | '''
17 | def __init__(self):
18 | self.criteria = ''
19 | self.directory = ''
20 | self.blueprint = ''
21 | self.full_bp = ''
22 | self.valid = False
23 | self.dsl = False
24 | self.debug = False
25 |
26 | def __repr__(self):
27 | '''
28 | decent __repr__ for debuggability
29 | this is something recommended by Raymond Hettinger
30 | '''
31 | return (f'{self.__class__.__name__}(criteria={self.criteria},'
32 | f'directory={self.directory},'
33 | f'blueprint={self.blueprint},'
34 | f'full_bp={self.full_bp},'
35 | f'value={self.valid},'
36 | f'dsl={self.dsl},'
37 | f'debug={self.debug})')
38 |
39 | def get_options(self):
40 | '''
41 | gather script parameters from the command line
42 | '''
43 | parser = argparse.ArgumentParser(description='Evaluate '
44 | + 'a Udacity '
45 | + 'student\' Nutanix Calm '
46 | + 'Blueprint')
47 |
48 | parser.add_argument('--criteria', '-c',
49 | help='JSON file containing evaluation criteria. '
50 | + 'Defaults to "criteria/p3.json".')
51 | parser.add_argument('--directory', '-d',
52 | help='Directory containing exported Nutanix Calm '
53 | + 'Blueprints. '
54 | + 'Defaults to the current directory.')
55 | parser.add_argument('--blueprint', '-b',
56 | help='Nutanix Calm Blueprint file in JSON format. '
57 | + 'Set to "all" to process all blueprints '
58 | + 'in the specified directory. '
59 | + 'Defaults to "blueprint.py".')
60 | parser.add_argument('--valid', '-v',
61 | choices=["true", "false"],
62 | help='Specify that the script should process '
63 | + 'valid blueprints only. '
64 | + 'Defaults to False.')
65 | parser.add_argument('--dsl',
66 | choices=["true", "false"],
67 | help='Specify that the DSL is not required for '
68 | + 'this run. Used for troubleshooting only.')
69 | parser.add_argument('--debug',
70 | choices=["enable", "disable"],
71 | help='Enable debug mode and show detailed info '
72 | + 'when certain events are triggered. '
73 | + 'Defaults to False.')
74 |
75 | args = parser.parse_args()
76 |
77 | self.criteria = (args.criteria if args.criteria
78 | else "criteria/p3.json")
79 | self.directory = (args.directory if args.directory else ".")
80 | self.blueprint = (args.blueprint if args.blueprint else "blueprint.py")
81 | self.valid = (args.valid if args.valid == "true" else False)
82 | self.dsl = (args.dsl if args.dsl == "true" else False)
83 | self.debug = True if args.debug == 'enable' else False
84 |
85 | def check_environment(self, messages: Messages):
86 | '''
87 | check the environment and make sure it has all the required commands
88 | '''
89 |
90 | # setup the list of required commands
91 | required_commands = ['calm'] if self.dsl else []
92 |
93 | try:
94 | for command in required_commands:
95 | subprocess.run(
96 | [command],
97 | stdout=subprocess.PIPE,
98 | stderr=subprocess.PIPE
99 | )
100 | location = subprocess.run(
101 | ['which', command],
102 | stdout=subprocess.PIPE,
103 | stderr=subprocess.PIPE
104 | )
105 | print(f'{messages.info} {command} found in '
106 | + f'{location.stdout.decode()}', end="")
107 | except FileNotFoundError:
108 | print(f'{messages.error} Unable to execute the `{command}` '
109 | + f'command. Please ensure the `{command}` command '
110 | + 'is accessible and executable in your PATH.')
111 | print(f'{messages.info} Environment requirements not met. If you '
112 | + 'not yet done so, please run setup for this script: \n'
113 | + f'{messages.info} pip3 install -e .\n')
114 | sys.exit()
115 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/criteria/c2.json:
--------------------------------------------------------------------------------
1 | {
2 | "criteria": [{
3 | "description": "Entity Lists",
4 | "type": "entity_lists",
5 | "lists": [{
6 | "key_desc": "Calm Service Instances",
7 | "key": "spec.resources.service_definition_list",
8 | "expected": 3
9 | },
10 | {
11 | "key_desc": "Calm Package Instances",
12 | "key": "spec.resources.package_definition_list",
13 | "expected": 4
14 | },
15 | {
16 | "key_desc": "Calm Substrate Instances",
17 | "key": "spec.resources.substrate_definition_list",
18 | "expected": 3
19 | },
20 | {
21 | "key_desc": "Calm Credential Instances",
22 | "key": "spec.resources.credential_definition_list",
23 | "expected": 2
24 | },
25 | {
26 | "key_desc": "Calm App Profile Instances",
27 | "key": "spec.resources.app_profile_list",
28 | "expected": 1
29 | }
30 | ]
31 | },
32 | {
33 | "description": "CentOS Image Name",
34 | "key": "spec.resources.package_definition_list",
35 | "type": "source_uri",
36 | "match": "contains",
37 | "expected": "cloud.centos.org"
38 | },
39 | {
40 | "description": "AHV: VM Names",
41 | "key": "spec.resources.substrate_definition_list",
42 | "type": "ahv_server_names",
43 | "match": "equals",
44 | "expected": [
45 | "www@@{calm_array_index}@@",
46 | "mysql-@@{calm_time}@@",
47 | "haproxy-@@{calm_time}@@"
48 | ]
49 | },
50 | {
51 | "description": "Web Server Max Replicas",
52 | "key": "spec.resources.app_definition_list",
53 | "type": "web_max_replicas",
54 | "match": "equals",
55 | "names": [
56 | "webserverahv",
57 | "webserveraws"
58 | ],
59 | "expected": 4
60 | },
61 | {
62 | "description": "Web Server Min Replicas",
63 | "key": "spec.resources.app_definition_list",
64 | "type": "web_min_replicas",
65 | "match": "equals",
66 | "names": [
67 | "webserverahv",
68 | "webserveraws"
69 | ],
70 | "expected": 2
71 | },
72 | {
73 | "description": "VM Cloud-Init Data",
74 | "key": "spec.resources.service_definition_list",
75 | "type": "cloud_init",
76 | "match": "contains",
77 | "expected": {
78 | "ahv_data": "#cloud-config\nusers:",
79 | "aws_data": "omit: security-group of VPC for SSH"
80 | }
81 | },
82 | {
83 | "description": "Application Credentials",
84 | "key": "spec.resources.credential_definition_list",
85 | "type": "credentials",
86 | "expected": {
87 | "types": [
88 | "key"
89 | ],
90 | "usernames": [
91 | "teccadmin",
92 | "teccdba"
93 | ]
94 | }
95 | },
96 | {
97 | "description": "Course 2 VM Specs",
98 | "key": "spec.resources.substrate_definition_list",
99 | "type": "c2_sizing",
100 | "vm_sizes": [{
101 | "specs": {
102 | "spec_desc": "Web Server",
103 | "name": "www@@{calm_array_index}@@",
104 | "vm_spec": {
105 | "num_sockets": 1,
106 | "num_vcpus_per_socket": 1,
107 | "memory_size_mib": 1024
108 | }
109 |
110 | }
111 | },
112 | {
113 | "specs": {
114 | "spec_desc": "MySQL Database Server",
115 | "name": "mysql-@@{calm_time}@@",
116 | "vm_spec": {
117 | "num_sockets": 1,
118 | "num_vcpus_per_socket": 1,
119 | "memory_size_mib": 1024
120 | }
121 | }
122 | },
123 | {
124 | "specs": {
125 | "spec_desc": "HAProxy Server",
126 | "name": "haproxy-@@{calm_time}@@",
127 | "vm_spec": {
128 | "num_sockets": 1,
129 | "num_vcpus_per_socket": 1,
130 | "memory_size_mib": 1024
131 | }
132 | }
133 | }
134 | ]
135 | }
136 | ]
137 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/criteria/c3.json:
--------------------------------------------------------------------------------
1 | {
2 | "criteria": [{
3 | "description": "Entity Lists",
4 | "type": "entity_lists",
5 | "lists": [{
6 | "key_desc": "Calm Service Instances",
7 | "key": "spec.resources.service_definition_list",
8 | "expected": 4
9 | },
10 | {
11 | "key_desc": "Calm Package Instances",
12 | "key": "spec.resources.package_definition_list",
13 | "expected": 9
14 | },
15 | {
16 | "key_desc": "Calm Substrate Instances",
17 | "key": "spec.resources.substrate_definition_list",
18 | "expected": 8
19 | },
20 | {
21 | "key_desc": "Calm Credential Instances",
22 | "key": "spec.resources.credential_definition_list",
23 | "expected": 2
24 | },
25 | {
26 | "key_desc": "Calm App Profile Instances",
27 | "key": "spec.resources.app_profile_list",
28 | "expected": 2
29 | }
30 | ]
31 | },
32 | {
33 | "description": "CentOS Image Name",
34 | "key": "spec.resources.package_definition_list",
35 | "type": "source_uri",
36 | "match": "contains",
37 | "expected": "cloud.centos.org"
38 | },
39 | {
40 | "description": "AWS AMI ID",
41 | "key": "spec.resources.substrate_definition_list",
42 | "type": "image_id",
43 | "match": "in",
44 | "expected": [
45 | "ami-02f147dfb8be58a10",
46 | "ami-0157b1e4eefd91fd7"
47 | ]
48 | },
49 | {
50 | "description": "AWS Instance Type",
51 | "key": "spec.resources.substrate_definition_list",
52 | "type": "instance_type",
53 | "match": "in",
54 | "expected": [
55 | "t2.micro"
56 | ]
57 | },
58 | {
59 | "description": "AHV: VM Names",
60 | "key": "spec.resources.substrate_definition_list",
61 | "type": "ahv_server_names",
62 | "match": "equals",
63 | "expected": [
64 | "www@@{calm_array_index}@@",
65 | "haproxy-@@{calm_time}@@"
66 | ]
67 | },
68 | {
69 | "description": "AWS: VM Names",
70 | "key": "spec.resources.substrate_definition_list",
71 | "type": "aws_server_names",
72 | "match": "equals",
73 | "expected": [
74 | "www@@{calm_array_index}@@",
75 | "aws-mysql-@@{calm_array_index}@@"
76 | ]
77 | },
78 | {
79 | "description": "Web Server Max Replicas",
80 | "key": "spec.resources.app_definition_list",
81 | "type": "web_max_replicas",
82 | "match": "equals",
83 | "names": [
84 | "webserverahv",
85 | "webserveraws"
86 | ],
87 | "expected": 4
88 | },
89 | {
90 | "description": "Web Server Min Replicas",
91 | "key": "spec.resources.app_definition_list",
92 | "type": "web_min_replicas",
93 | "match": "equals",
94 | "names": [
95 | "webserverahv",
96 | "webserveraws"
97 | ],
98 | "expected": 2
99 | },
100 | {
101 | "description": "VM Cloud-Init Data",
102 | "key": "spec.resources.service_definition_list",
103 | "type": "cloud_init",
104 | "match": "contains",
105 | "expected": {
106 | "ahv_data": "#cloud-config\nusers:",
107 | "aws_data": "omit: security-group of VPC for SSH"
108 | }
109 | },
110 | {
111 | "description": "Application Credentials",
112 | "key": "spec.resources.credential_definition_list",
113 | "type": "credentials",
114 | "expected": {
115 | "types": [
116 | "key"
117 | ],
118 | "usernames": [
119 | "teccadmin",
120 | "teccdba"
121 | ]
122 | }
123 | },
124 | {
125 | "description": "VM Specs",
126 | "key": "spec.resources.substrate_definition_list",
127 | "type": "c3_sizing",
128 | "vm_sizes": [{
129 | "size": "small",
130 | "count": 2,
131 | "specs": {
132 | "spec_desc": "Web Server",
133 | "name": "www@@{calm_array_index}@@",
134 | "partial_name": "www",
135 | "num_sockets": 1,
136 | "num_vcpus_per_socket": 1,
137 | "memory_size_mib": 1024,
138 | "credential_name": "teccadmin"
139 | }
140 | },
141 | {
142 | "size": "medium",
143 | "count": 2,
144 | "specs": {
145 | "spec_desc": "Web Server",
146 | "name": "www@@{calm_array_index}@@",
147 | "partial_name": "www",
148 | "num_sockets": 2,
149 | "num_vcpus_per_socket": 2,
150 | "memory_size_mib": 1024,
151 | "credential_name": "teccadmin"
152 | }
153 | },
154 | {
155 | "size": "small",
156 | "count": 2,
157 | "specs": {
158 | "spec_desc": "Load Balancer",
159 | "name": "HAProxy-@@{calm_time}@@",
160 | "partial_name": "haproxy",
161 | "num_sockets": 1,
162 | "num_vcpus_per_socket": 1,
163 | "memory_size_mib": 1024,
164 | "credential_name": "teccadmin"
165 | }
166 | },
167 | {
168 | "size": "medium",
169 | "count": 2,
170 | "specs": {
171 | "spec_desc": "Load Balancer",
172 | "name": "www@@{calm_array_index}@@",
173 | "partial_name": "haproxy",
174 | "num_sockets": 2,
175 | "num_vcpus_per_socket": 2,
176 | "memory_size_mib": 1024,
177 | "credential_name": "teccadmin"
178 | }
179 | },
180 | {
181 | "size": "small",
182 | "count": 2,
183 | "specs": {
184 | "spec_desc": "MySQL Database Server",
185 | "name": "aws-mysql-@@{calm_array_index}@@",
186 | "partial_name": "mysql",
187 | "num_sockets": 1,
188 | "num_vcpus_per_socket": 1,
189 | "memory_size_mib": 1024,
190 | "credential_name": "teccdba"
191 | }
192 | }
193 | ]
194 | }
195 | ]
196 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/script/eval.bash:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # colours
4 | RED="\e[31m"
5 | GREEN="\e[32m"
6 | BLUE="\e[34m"
7 | NC="\e[39m"
8 |
9 | # set some script-wide variables
10 | ERROR_PREFIX="$RED[ERROR]$NC"
11 | OK_PREFIX="$BLUE[OK]$NC"
12 | INFO_PREFIX="$BLUE[INFO]$NC"
13 | PASS_PREFIX="$GREEN[PASS]$NC"
14 | FAIL_PREFIX="$RED[FAIL]$NC"
15 | LINE_BREAK="----------------------------------------"
16 |
17 | # set this to 1 if you want to bail out when a blueprint
18 | # can't be decompiled successfully
19 | VALID_BP_ONLY=0
20 |
21 | # set this to 1 to show extended debug info
22 | # for example, if a specific evaluation fails, dump the value that was found
23 | # this can make the output look "messy" but will help diagnose what the
24 | # student did vs what was expected
25 | DEBUG=0
26 |
27 | echo -e ""
28 | echo -e "$INFO_PREFIX Evaluation script started at `date`"
29 | echo -e ""
30 |
31 | echo -e "$INFO_PREFIX Checking environment."
32 |
33 | # checking required binaries are found and are executable
34 | COMMANDS=( "calm" "jq" )
35 | for i in "${COMMANDS[@]}"
36 | do
37 | :
38 | # check that each binary exists
39 | if ! command -v $i &> /dev/null
40 | then
41 | echo -e "$ERROR_PREFIX '$i' command could not be found. Please ensure the $i command exists and is executable before running this script."
42 | exit
43 | else
44 | echo -e "$INFO_PREFIX '$i' command found in `which $i`. Continuing."
45 | fi
46 | done
47 |
48 | echo -e "$INFO_PREFIX Environment OK."
49 |
50 | # function used to display script usage help, when required
51 | function show_help {
52 | echo -e ""
53 | echo -e "Usage: eval.bash [ARGS]"
54 | echo -e ""
55 | echo -e "Args:"
56 | echo -e " -h Show this help and exit."
57 | echo -e " -d Location of user blueprints"
58 | echo -e " -b Blueprint name to evaluate"
59 | echo -e " -c Evaluation criteria file to use for comparison"
60 | echo -e ""
61 | echo -e "Note:"
62 | echo -e " -b value can be \"all\", to batch process all JSON blueprints in the specified directory"
63 | echo -e ""
64 | echo -e "Examples:"
65 | echo -e " eval.bash -c eval.json -d ~/blueprints -b blueprint1"
66 | echo -e " eval.bash -c eval.json -d . -b all"
67 | echo -e ""
68 | exit
69 | }
70 | function process_json() {
71 | echo -e $LINE_BREAK
72 | # with the blueprint directory found and a blueprint specified, concatenate to make future work easier
73 | BP_FULL="$BLUEPRINT_DIRECTORY/$1"
74 | # verify the specified blueprint exists
75 | if [ ! -f "$BP_FULL" ]
76 | then
77 | echo -e "$ERROR_PREFIX $1 not found. Please specify a valid blueprint by using the -d and -b arguments."
78 | show_help
79 | exit
80 | else
81 | echo -e "$INFO_PREFIX $1 found. Continuing."
82 | fi
83 | # verify the blueprint is valid
84 | # at this point the blueprint directory and blueprint itself have been found in the user-specified locations
85 | if [ "$VALID_BP_ONLY" == "1" ];
86 | then
87 | calm decompile bp --file "$BP_FULL" > /tmp/null 2>&1
88 | COMPILE_RESULT=$?
89 | if [ ! "$COMPILE_RESULT" == "0" ]
90 | then
91 | echo -e "$ERROR_PREFIX The specified blueprint cannot be decompiled. Please ensure the blueprint contains valid JSON."
92 | exit
93 | else
94 | echo -e "$INFO_PREFIX Blueprint decompiled successfully. Continuing."
95 | fi
96 | fi
97 |
98 | # read the evaluation criteria from the supplied evaluation file
99 | JSON_CRITERIA="`cat ${CRITERIA_FILE}`"
100 |
101 | echo -e ""
102 | echo -e "$INFO_PREFIX Starting evaluation of $BP_FULL."
103 | echo -e ""
104 |
105 | # go over each of the criteria keys in the evaluation file
106 | # compare each key's 'expected' value to that key's value in the student's JSON blueprint
107 | for row in $(echo -e "${JSON_CRITERIA}" | jq -r '.criteria[] | @base64')
108 | do
109 | TYPE=`echo -e ${row} | base64 -d | jq -r '.type'`
110 | MATCH=`echo -e ${row} | base64 -d | jq -r '.match'`
111 | KEY=`echo -e ${row} | base64 -d | jq -r '.key'`
112 | DESCRIPTION=`echo -e ${row} | base64 -d | jq -r '.description'`
113 | EXPECTED_VALUE=`echo -e ${row} | base64 -d | jq -r '.expected'`
114 | # compare the expected vs evaluated values, based on the expected data type
115 | if [ "$TYPE" == "number" ];
116 | then
117 | KEY_VALUE=`cat "$BP_FULL" | jq -r "$KEY | length"`
118 | else
119 | KEY_VALUE=`cat "$BP_FULL" | jq -r "$KEY"`
120 | fi
121 | # do the comparison but compare based on the "match" setting in the evaluation file
122 | # string values can be "exact" or "contains"
123 | # "match" is ignored for number types
124 |
125 | # if [ "$TYPE" == "string" ];
126 | # then
127 | if [ "$MATCH" == "exact" ];
128 | then
129 | if [ "$EXPECTED_VALUE" == "$KEY_VALUE" ];
130 | then
131 | RESULT=1
132 | else
133 | RESULT=0
134 | fi
135 | else
136 | if [[ "$KEY_VALUE" == *"$EXPECTED_VALUE"* ]];
137 | then
138 | RESULT=1
139 | else
140 | RESULT=0
141 | fi
142 | fi
143 | # else
144 | # if [ "$EXPECTED_VALUE" == "$KEY_VALUE" ]
145 | # then
146 | # RESULT=1
147 | # else
148 | # RESULT=0
149 | # fi
150 | # fi
151 | if [ "$RESULT" == "1" ]
152 | then
153 | if [ "$DEBUG" == "1" ];
154 | then
155 | echo -e "$PASS_PREFIX $TYPE | $MATCH | ${DESCRIPTION} | Expected ${EXPECTED_VALUE} | Found ${KEY_VALUE}"
156 | else
157 | echo -e "$PASS_PREFIX $TYPE | $MATCH | ${DESCRIPTION} | Expected ${EXPECTED_VALUE}"
158 | fi
159 | else
160 | if [ "$DEBUG" == "1" ];
161 | then
162 | echo -e "$FAIL_PREFIX $TYPE | $MATCH | ${DESCRIPTION} | Expected ${EXPECTED_VALUE} | Found ${KEY_VALUE}"
163 | else
164 | echo -e "$FAIL_PREFIX $TYPE | $MATCH | ${DESCRIPTION} | Expected ${EXPECTED_VALUE}"
165 | fi
166 | fi
167 | done
168 |
169 | echo -e ""
170 | echo -e "$INFO_PREFIX Evaluation of $BP_FULL completed. Please see results above."
171 | echo -e ""
172 | }
173 |
174 | # verify the required command-line parameters i.e. the BP directory and the BP we want to work with
175 | echo -e "$INFO_PREFIX Verifying command-line arguments."
176 | while getopts ":b:d:c:h:" opt; do
177 | case $opt in
178 | b) export BLUEPRINT="$OPTARG"
179 | ;;
180 | d) export BLUEPRINT_DIRECTORY="$OPTARG"
181 | ;;
182 | c) export CRITERIA_FILE="$OPTARG"
183 | ;;
184 | h) show_help
185 | ;;
186 | \?) echo -e "$ERROR_PREFIX Unrecognised command-line argument specified: -$OPTARG" >&2
187 | ;;
188 | esac
189 | done
190 |
191 | # verify the blueprint directory exists
192 | if [ ! -d "$BLUEPRINT_DIRECTORY" ]
193 | then
194 | echo -e "$ERROR_PREFIX Specified blueprint directory not found or not specified using the -d argument."
195 | show_help
196 | exit
197 | else
198 | echo -e "$INFO_PREFIX Blueprint directory found. Continuing."
199 | fi
200 |
201 | # verify a blueprint has been specified using the -b parameter as a command-line argument
202 | if [ -z "$BLUEPRINT" ]
203 | then
204 | echo -e "$ERROR_PREFIX No blueprint specified. Please specify a blueprint by name by using the -b argument."
205 | show_help
206 | exit
207 | else
208 | echo -e "$INFO_PREFIX Blueprint name specified. Continuing."
209 | fi
210 |
211 | # verify an evaluation criteria file has been specified using the -c parameter as a command-line argument
212 | if [ ! -z "$CRITERIA_FILE" ]
213 | then
214 | if [ ! -f "$CRITERIA_FILE" ]
215 | then
216 | echo -e "$ERROR_PREFIX Evaluation criteria file not found. Please specify a valid evaluation criteria file by using the -c argument."
217 | show_help
218 | exit
219 | else
220 | echo -e "$INFO_PREFIX Evaluation criteria file specified and found. Continuing."
221 | fi
222 | else
223 | echo -e "$ERROR_PREFIX Evaluation criteria file not specified. Please specify an evaluation criteria file by using the -c argument."
224 | show_help
225 | exit
226 | fi
227 |
228 | # check to see if the user has indicated they want to parse all blueprints in the specified blueprint directory
229 | if [ "$BLUEPRINT" == "all" ]
230 | then
231 | echo -e "$INFO_PREFIX All JSON blueprints in $BLUEPRINT_DIRECTORY will be processed."
232 | echo -e ""
233 | # go over all JSON files in the specified blueprint directory
234 | for BP_JSON_FILE in *.json
235 | do
236 | # only process the current JSON file if it is not the specified evaluation criteria file
237 | if [[ ! "$BP_JSON_FILE" == *"$CRITERIA_FILE"* ]]
238 | then
239 | process_json $BP_JSON_FILE
240 | fi
241 | done
242 | else
243 | echo -e "$INFO_PREFIX Only $BLUEPRINT in $BLUEPRINT_DIRECTORY will be processed."
244 | echo -e ""
245 | process_json $BLUEPRINT
246 | fi
247 |
248 | echo -e $LINE_BREAK
249 |
250 | # cleanup
251 | echo -e "$INFO_PREFIX Cleaning up."
252 |
253 | echo -e "$INFO_PREFIX Evaluation completed."
254 | echo -e ""
255 |
256 | echo -e "$INFO_PREFIX Evaluation script finished at `date`"
257 | echo -e ""
258 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/README.md:
--------------------------------------------------------------------------------
1 | # Evaluation Script for Udacity HCE Student Blueprints
2 |
3 | Please use this script to evaluate student blueprints for course 2 and 3 projects and see the Nutanix.DEV blog article [Evaluations the DevOps Way](https://www.nutanix.dev/2020/12/15/evaluations-the-devops-way/).
4 |
5 | ## Evaluation App Deployment Instructions
6 |
7 | - Obtain a copy of this repository from GitHub.
8 |
9 | Users with access to an environment with `git` already installed, this can be done with the following command.
10 |
11 | ```
12 | git clone https://github.com/nutanixdev/udacity.git
13 | ```
14 |
15 | Users that prefer to download this repository using a web browser may follow these steps.
16 |
17 | - Browse to [https://github.com/nutanixdev/udacity.git](https://github.com/nutanixdev/udacity.git)
18 | - Click the `Code` button and select `Download ZIP`, as shown in the screenshot below.
19 |
20 | 
21 |
22 | - Login to Prism Central using the provided credentials
23 | - On the left side of the Prism Central UI, open Nutanix Calm by clicking the "bars" icon, selecting `Services` and clicking `Calm`
24 |
25 | 
26 |
27 | - When Nutanix Calm opens, select the **Blueprints** button on the left side of the Prism Central UI
28 |
29 | 
30 |
31 | - Click **Upload Blueprint**
32 |
33 | 
34 |
35 | - Browse to the location of this repository on your local workstation and select `HybridCloudEngineer/eval/blueprints/Udacity-HybridCloudEngineer-Evaluation.json`
36 |
37 | 
38 |
39 | - Because blueprint names must be unique, it is suggested to use **Udacity-HybridCloudEngineer-Evaluation_**, with **** replaced by **your** initials
40 | - The **Project** field may be different for all environments
41 | - Enter `nutanix4/u` for the **Passphrase**
42 | - Click **Upload**
43 |
44 | If the upload process is successful, you will be taken directly to the Nutanix Calm blueprint designer for the upload blueprint
45 |
46 | - Click the **Launch** button
47 |
48 | 
49 |
50 | - Complete all fields that can be modified
51 |
52 | - Enter an Application name
53 | - **EVAL_DIR**: Leave as default, unless you need to change the evaluation directory within the deployed virtual machine
54 | - **CENTOS_REPO**: Leave as default, unless your environment requires troubleshooting of CentOS package repositories
55 | - **DEFAULT_PROJECT**: Set this to the name of your Nutanix Calm project, as used when uploading the blueprint in previous steps
56 | - **PC_PORT**: Leave as default
57 | - **PC_IP**: Enter the IP address for your Prism Central instance - note this is the **IP Address** only, not the URL
58 |
59 | For example only:
60 |
61 | 
62 |
63 | - Click **Service Configuration** and click **UdacityEval**
64 |
65 | 
66 |
67 | - Scroll towards the lower part of the deployment settings and ensure the **NIC 1** field is set to a network available in your environment
68 |
69 | 
70 |
71 | - Click **Credentials** and click **PC_Credentials**
72 |
73 | - Enter the username and password for **your** Prism Central instance
74 |
75 | 
76 |
77 | **Note:** Please edit the **teccadmin** credential if you would like to use your own SSH key pair.
78 |
79 | - Click **Create**
80 |
81 | 
82 |
83 | - Wait for the Udacity Eval application to finish deploying. Nutanix Calm will need to download the CentOS 7 disk image and update Linux packages - this may take some time. A succesful deployment will be indicated by all steps showing as **Finished** and an application state of **RUNNING**.
84 |
85 | 
86 |
87 | - Click the **Services** tab and select the single service that has been deployed
88 |
89 | 
90 |
91 | - On the right side of the Nutanix Calm UI, take note of the service's **IP Address**. An example is shown below - your service's IP address will be different
92 |
93 | 
94 |
95 | ## SSH Key Pair (If Required)
96 |
97 | If you did not use your own SSH key in earlier steps, the included credential uses the following SSH private and public keys.
98 |
99 | ### Private key
100 |
101 | ```
102 | -----BEGIN RSA PRIVATE KEY-----
103 | MIIJKQIBAAKCAgEA2Xua4cxPKDn4zBgR4f1wtVwIIeJxO/YCh66GT+daggE7P7r1
104 | TRjg6ZUIp9XhnzEL7Sr9Qf1PhYcEIAOWm5WK4wMgsP3jNEorfl84OFhZSAI8KMSn
105 | qKTVQM4fSZ0oglPROrRWkMN7AdDMBpMGM3V996Cb9yb7KcEMSjaeFr5UXcCIlTHa
106 | GffGZJ50SdkdXHZTb2Jx1goUtv8j8kDQXboi0lMkdvtKLvcvineyKmrETDM3XHD6
107 | Sl8C3yyfKuTab7MygugdCpcxFP79oYfYqJnWXGkUjnhw0OmzFb/1pWFy8boh/6Vy
108 | W74+R2WL6YHLzmpIkhXCFnVqcnW17hJV9FZeTCgwfIZPNBqsBp8BUYoN0YRNOTOR
109 | EgMvHj7TJnLJTxjhZoKgacnUVGPqj5zXGHh43PpVrlrR31/PppdrWo9Ykk4I05bt
110 | IeuWIl1D4fuXTLl+TyDQ6Wdhn5JkAbh59HWqJusqhyHi8tgogh61TdGinYC6tVbc
111 | 0LhXGjQ/uo8W4K53YnDSFHgFX3m2Fu9qgciBY+DRnmrpAw6fWSIpKK0LwXu5wtaa
112 | FID0fpJwkpl3VUioycUR30gLpLgAIsBlYLR1LjAHQCtyYQ7a3dtEmllRYLmKmdow
113 | p+rE4/uiXJwZs8faginXiqpF03CC84rcFMP++VzhotTg9CrvuBSV83gVmHsCAwEA
114 | AQKCAgAZ4KE/OMY6bRtzfXGzSCq/CaJWFdGPZuhKKLQGkeqtCWR5Ys+JSuqIIJmV
115 | Dtf3zygyIilcTr3Fj5koDgU/of2SoW6kYLFGKYUfVkkkOy70aH+sAipI4MQXH++N
116 | HMwTJdIBgC/Erd3XC3cbWSclvFcuvceREuxTvYqu0ezhm+gB98TIq9PdvyO2GTyR
117 | BOp/c/zdhgMxiqWIWLd4PcoNPPajvAzcs23SRMs9WJy0e1u8DWQK9c5yRvfagj62
118 | JeDv5YyfEutRtUe7zdN0Auv+s79T45T0ltQlp4kNj3e1I/E/XfxppNAEppBWgnix
119 | YiZT+0WDTF95uKLf1628FCZyGkzB3rvsLrvzvMDbn6PknotijBsV+jwaxmShl46Z
120 | wTtik7M/gXEX5SdG22z+gPBVjAIYY5oOuq962HmCiyq8OiJCYk8yrhXejZPdahQf
121 | 8SLttMmDR++xN9jEmNoC1y6ZYXPloEnE87GKSp1ulDtkgp4EqN2l44rfikJH0tH0
122 | 0UzDmm5Or5Xxzh2BV2UWJYqXvwDoP5udrHaoDSuBi+a6LlCEwPGPcchQXEkTP8K+
123 | Uka1WDlPvGQ8WZnCucQ04NnPv5j+fegMiwJtbDhdk53xPQ3MlVVenpvdXE5FFkYR
124 | dMsvMb9mSqTPGsT944xL6vPaiBWvidPk2QNZuJQayfaQep4dUQKCAQEA/wmf7dcv
125 | TJO1c2ss3Lvs2aXjKNCgIg0h/I15IXA2f9EmFsT4wFkpcRVQfIxzuj/HBGCx7AUq
126 | Q8JfopV6Dp2gMmIpqnVcIT19tXMwOKF4BytZZCajr5aA2eDo6OZpH6HCxYPum5Fe
127 | H0OAqoYwLCa+do5sjUhzfwrXpNCZm3lmJf1fP0hSMPIOri/+RnIAW00g16cmTcLD
128 | COXzNoKKjdZwYCSS+gkl+mfU2K1oR8z1n4ZCfllz9mO8iQMgvYwS+75b+92qeKZx
129 | Zb31Nx2Julxf50RV0Z9ueBiYebtn2xLVdCJ1lLa/cIARShp5b9CWU48KoOZ4wr+O
130 | Ix4MU+AUbKbJxQKCAQEA2k2zeOIKbbCQtfGHzlh9JBEsgLx443NFMtYUzNAj2v1e
131 | ReMlMbDgJCWdN09a3eG/bwPx+ASIM3rkcoc9HhifXEWnlWMzZDwMqNCxlr50H4np
132 | zinwYLtIKdFON5jHR61rbdQfJBQPXbRyAKCMWGJapPvvdCI3Gud49FuCR+7+/c9l
133 | jXA87K/tbNTE/cbCo1m2HfvgLdDP8zklEeufEpQ5GDlRoZ0CEmtS9tMNQ8ck7pRQ
134 | l88L/Tdb0pO8tRZYgkJoIwILQcSArXQLHLdaUm57bt65j8579CmXD9EJA5U3ENwY
135 | Ejxg36/Om3hRbCPN4yJnKrLrJAxt+jvLdpqb1n49PwKCAQEA7cpajFbzafHa3+eQ
136 | 56psdq4+M8L5W2ZlRuXdh/jpR5mJavdye11mz2IxLj+Cc6iddg52NfrG4nCr3JqE
137 | OHZtdCykbqOIKBH+UJ0628JpeD3J1iBUxBWOxBb48pZKc8zOHn0Zj4Elgf7dS6Fb
138 | 7v6c3UuEGOLxlQVV9PzuC9FQFaG9GfLjd3FpkK185s/KV1Z9hbT6eMxcLsLAVxW1
139 | +/NxyaKwS2MWPvhqszBTp4aJiZiUDNx4y8VId3MsJq8g0Ao4F4khNebymgtYRHBN
140 | By6z2JyVa6K8TEuadv3uIovX67VpbiRKMKmn9TDkAwY382bOYSEifWo/K60Lj3TB
141 | dmC4hQKCAQEAl9BBZ2eK6Q1JFb6KOy2+mT54PG7Vgxe+n8PRYz28MabR0LhOPWhL
142 | 99dQpg5BmtxLTODpfRlpvqNQC3WRQ0TMTlcCXBlIvAQmqZ+4YKTCDMzUE9OGnz6e
143 | yLYONwvASlCHzTEfFu/8X6YFQfpkn3KGpwWtWI4S1Fr0+rpIt8209r0sWlgbhl4y
144 | NuUguXtoEEFrLR7kn/f/JZ4v8W5yiXEa44W5BzMxJa4NXCkZqHDtdh0ejvQl2kqN
145 | SLwFF8oaczvLP81EFtCYzAdPUwEPhxPeMxQg56ko+nuR2iX8psk2KjtlYgkrNWQF
146 | wojB1vpvS+244TDb5Y3fTY4XyLtsdJYF8QKCAQBAsGuZlXMn6T1gGkBkLKNYFxAQ
147 | DUzjnmfV6TFJjlld2bIYOORXWPpDAOmcyImhYa0Rtqx+Z++8lMysAW4y66ISXQ5H
148 | omVY6iJsfTqRj94JWA8/lTAMBobw0d9fK3qoNcsegLMKyC5JVFW9VIaj5WjDLeqO
149 | cQ5zjZQgDyxxRZE7FIHKzvmZLc8eC5131I+WRGSmyqU41T4GE/Bz55HtTBbKSn1h
150 | nFnNehWfT/W1+vKrFuzBnPgP6xfiyc1amfa3OflAAZK5/RuxA92kosBIlQBRm+sy
151 | uzhZFfuvAEpPvBQ3Iq2TZEJjq3gnU7DXdvU8ihkpvGBXl/T6+RiTbCNzIco3
152 | -----END RSA PRIVATE KEY-----
153 | ```
154 |
155 | ### Public Key
156 |
157 | ```
158 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDZe5rhzE8oOfjMGBHh/XC1XAgh4nE79gKHroZP51qCATs/uvVNGODplQin1eGfMQvtKv1B/U+FhwQgA5ablYrjAyCw/eM0Sit+Xzg4WFlIAjwoxKeopNVAzh9JnSiCU9E6tFaQw3sB0MwGkwYzdX33oJv3JvspwQxKNp4WvlRdwIiVMdoZ98ZknnRJ2R1cdlNvYnHWChS2/yPyQNBduiLSUyR2+0ou9y+Kd7IqasRMMzdccPpKXwLfLJ8q5NpvszKC6B0KlzEU/v2hh9iomdZcaRSOeHDQ6bMVv/WlYXLxuiH/pXJbvj5HZYvpgcvOakiSFcIWdWpydbXuElX0Vl5MKDB8hk80GqwGnwFRig3RhE05M5ESAy8ePtMmcslPGOFmgqBpydRUY+qPnNcYeHjc+lWuWtHfX8+ml2taj1iSTgjTlu0h65YiXUPh+5dMuX5PINDpZ2GfkmQBuHn0daom6yqHIeLy2CiCHrVN0aKdgLq1VtzQuFcaND+6jxbgrndicNIUeAVfebYW72qByIFj4NGeaukDDp9ZIikorQvBe7nC1poUgPR+knCSmXdVSKjJxRHfSAukuAAiwGVgtHUuMAdAK3JhDtrd20SaWVFguYqZ2jCn6sTj+6JcnBmzx9qCKdeKqkXTcILzitwUw/75XOGi1OD0Ku+4FJXzeBWYew== no-reply@acme.com
159 | ```
160 |
161 | ## Evaluation VM Credentials
162 |
163 | - The username for the deployed VM is **teccadmin**. This user account supports authentication by SSH key pair *only*.
164 |
165 | ## Student Blueprint Evaluation Instructions
166 |
167 | - Obtain exported Nutanix Calm blueprint(s). The blueprints must be **exported** from the Nutanix Calm UI and not decompiled using the Calm DSL (the JSON output is different between each method).
168 |
169 | Each blueprint will be a single JSON document, when exported correctly.
170 |
171 | - Transfer the student blueprint(s) to the evaluation VM deployed by Calm in previous steps. Because the Udacity evaluation deploys a VM that uses SSH key pair authentication only, the transfer steps will be different depending on method and/or application used.
172 |
173 | Some suggested methods are:
174 |
175 | - SCP (built-in to Linux and Mac systems). The example below transfers a file named **blueprint1.json** to the **~/nutanix/udacity_eval** directory on the deployed VM, using an SSH key named **udacity**.
176 |
177 | ```
178 | scp -i ~/.ssh/udacity.pub ~/blueprint1.json teccadmin@:~/nutanix/udacity_eval/blueprint1.json
179 | ```
180 |
181 | - SFTP using [Cyberduck](https://cyberduck.io/). Cyberduck is recommended as it can make use of PEM-formatted SSH keys as mentioned above, without the need to convert them to PuTTY format. Please refer to the [Cyberduck SFTP documentation](https://trac.cyberduck.io/wiki/help/en/howto/sftp) for instructions.
182 | - Any other method of your choice, provided it can connect using SSH key pair
183 |
184 | ## Script Usage
185 |
186 | **Note:** This section assumes you have not altered the **EVAL_DIR** variable from the default - *~/nutanix/udacity_eval/*
187 |
188 | - Open an SSH session to the IP address obtained in earlier steps. The username for the VM is **teccadmin**.
189 |
190 | ```
191 | ssh teccadmin@
192 | ```
193 |
194 | - Change to the Nutanix Calm DSL directory and activate the required virtual directory
195 |
196 | ```
197 | cd ~/nutanix/calm-dsl
198 | . venv/bin/activate
199 | ```
200 |
201 | - Run a quick test to ensure the evaluation script can be executed and does not return any errors
202 |
203 | ```
204 | cd ~/nutanix/udacity_eval/
205 | ./eval.bash
206 | ```
207 |
208 | 
209 |
210 | The script will make sure the environment is ready to run the evaluations and will exit with usage instructions.
211 |
212 | - Student blueprints can now be evaluated.
213 |
214 | If you would like to evaluate all blueprint JSON files in the **~/nutanix/udacity_eval** directory and make sure they match the requirements for Course 3, run the following command:
215 |
216 | ```
217 | ./eval.bash -c c3.json -d . -b all
218 | ```
219 |
220 | If you would like to evaluate a specific blueprint JSON file, the syntax can be altered as follows:
221 |
222 | ```
223 | ./eval.bash -c c3.json -d . -b blueprint1.json
224 | ```
225 |
226 | The following screenshot shows an example of the script's output, when used to evaluate all blueprints in the current directory.
227 |
228 | 
229 |
230 | As you can see, the entire process for two complete blueprints takes seconds only. Please note the speed of this process may vary slightly depending on individual environments.
231 |
232 | ### Script Usage Help (If Required)
233 |
234 | ```
235 | Usage: eval.bash [ARGS]
236 |
237 | Args:
238 | -h Show this help and exit.
239 | -d Location of user blueprints
240 | -b Blueprint name to evaluate
241 | -c Evaluation criteria file to use for comparison
242 |
243 | Note:
244 | -b value can be "all", to batch process all JSON blueprints in the specified directory
245 |
246 | Examples:
247 | eval.bash -c eval.json -d ~/blueprints -b blueprint1
248 | eval.bash -c eval.json -d . -b all
249 | ```
250 |
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/blueprints/basiclinuxvm_test.json:
--------------------------------------------------------------------------------
1 | {
2 | "status": {},
3 | "contains_secrets": false,
4 | "product_version": "3.0.6",
5 | "spec": {
6 | "description": "Simple BP for working with the Nutanix Calm DSL.",
7 | "resources": {
8 | "client_attrs": {
9 | "None": {
10 | "Profile": {
11 | "Default": {
12 | "Action": {},
13 | "dsl_name": "Default"
14 | }
15 | },
16 | "Deployment": {
17 | "basiclinuxvm_deployment": {
18 | "Action": {},
19 | "dsl_name": "basiclinuxvm_deployment"
20 | }
21 | },
22 | "Substrate": {
23 | "BasicLinuxVm": {
24 | "Action": {},
25 | "AhvVm": {
26 | "vm-@@{calm_array_index}@@-@@{calm_time}@@": {
27 | "dsl_name": "vmcalm_array_indexcalm_time"
28 | }
29 | },
30 | "dsl_name": "BasicLinuxVm"
31 | }
32 | },
33 | "Service": {
34 | "BasicLinux": {
35 | "Action": {},
36 | "dsl_name": "BasicLinux"
37 | }
38 | },
39 | "Package": {
40 | "CentosImage": {
41 | "Action": {},
42 | "dsl_name": "CentosImage"
43 | },
44 | "BasicLinuxPackage": {
45 | "Action": {},
46 | "dsl_name": "BasicLinuxPackage"
47 | }
48 | }
49 | }
50 | },
51 | "service_definition_list": [{
52 | "singleton": false,
53 | "action_list": [{
54 | "description": "System action for creating an application",
55 | "type": "system",
56 | "critical": true,
57 | "runbook": {
58 | "task_definition_list": [{
59 | "target_any_local_reference": {
60 | "kind": "app_service",
61 | "name": "BasicLinux"
62 | },
63 | "retries": "0",
64 | "description": "",
65 | "child_tasks_local_reference_list": [],
66 | "name": "DAG_Task_for_Service_BasicLinux_action_create",
67 | "attrs": {
68 | "edges": [],
69 | "type": ""
70 | },
71 | "timeout_secs": "0",
72 | "type": "DAG",
73 | "variable_list": []
74 | }],
75 | "description": "",
76 | "name": "Runbook_for_Service_BasicLinux_action_create",
77 | "main_task_local_reference": {
78 | "kind": "app_task",
79 | "name": "DAG_Task_for_Service_BasicLinux_action_create"
80 | },
81 | "variable_list": []
82 | },
83 | "name": "action_create"
84 | },
85 | {
86 | "description": "System action for deleting an application. Deletes created VMs as well",
87 | "type": "system",
88 | "critical": true,
89 | "runbook": {
90 | "task_definition_list": [{
91 | "target_any_local_reference": {
92 | "kind": "app_service",
93 | "name": "BasicLinux"
94 | },
95 | "retries": "0",
96 | "description": "",
97 | "child_tasks_local_reference_list": [],
98 | "name": "DAG_Task_for_Service_BasicLinux_action_delete",
99 | "attrs": {
100 | "edges": [],
101 | "type": ""
102 | },
103 | "timeout_secs": "0",
104 | "type": "DAG",
105 | "variable_list": []
106 | }],
107 | "description": "",
108 | "name": "Runbook_for_Service_BasicLinux_action_delete",
109 | "main_task_local_reference": {
110 | "kind": "app_task",
111 | "name": "DAG_Task_for_Service_BasicLinux_action_delete"
112 | },
113 | "variable_list": []
114 | },
115 | "name": "action_delete"
116 | },
117 | {
118 | "description": "System action for starting an application",
119 | "type": "system",
120 | "critical": true,
121 | "runbook": {
122 | "task_definition_list": [{
123 | "target_any_local_reference": {
124 | "kind": "app_service",
125 | "name": "BasicLinux"
126 | },
127 | "retries": "0",
128 | "description": "",
129 | "child_tasks_local_reference_list": [],
130 | "name": "DAG_Task_for_Service_BasicLinux_action_start",
131 | "attrs": {
132 | "edges": [],
133 | "type": ""
134 | },
135 | "timeout_secs": "0",
136 | "type": "DAG",
137 | "variable_list": []
138 | }],
139 | "description": "",
140 | "name": "Runbook_for_Service_BasicLinux_action_start",
141 | "main_task_local_reference": {
142 | "kind": "app_task",
143 | "name": "DAG_Task_for_Service_BasicLinux_action_start"
144 | },
145 | "variable_list": []
146 | },
147 | "name": "action_start"
148 | },
149 | {
150 | "description": "System action for stopping an application",
151 | "type": "system",
152 | "critical": true,
153 | "runbook": {
154 | "task_definition_list": [{
155 | "target_any_local_reference": {
156 | "kind": "app_service",
157 | "name": "BasicLinux"
158 | },
159 | "retries": "0",
160 | "description": "",
161 | "child_tasks_local_reference_list": [],
162 | "name": "DAG_Task_for_Service_BasicLinux_action_stop",
163 | "attrs": {
164 | "edges": [],
165 | "type": ""
166 | },
167 | "timeout_secs": "0",
168 | "type": "DAG",
169 | "variable_list": []
170 | }],
171 | "description": "",
172 | "name": "Runbook_for_Service_BasicLinux_action_stop",
173 | "main_task_local_reference": {
174 | "kind": "app_task",
175 | "name": "DAG_Task_for_Service_BasicLinux_action_stop"
176 | },
177 | "variable_list": []
178 | },
179 | "name": "action_stop"
180 | },
181 | {
182 | "description": "System action for restarting an application",
183 | "type": "system",
184 | "critical": true,
185 | "runbook": {
186 | "task_definition_list": [{
187 | "target_any_local_reference": {
188 | "kind": "app_service",
189 | "name": "BasicLinux"
190 | },
191 | "retries": "0",
192 | "description": "",
193 | "child_tasks_local_reference_list": [],
194 | "name": "DAG_Task_for_Service_BasicLinux_action_restart",
195 | "attrs": {
196 | "edges": [],
197 | "type": ""
198 | },
199 | "timeout_secs": "0",
200 | "type": "DAG",
201 | "variable_list": []
202 | }],
203 | "description": "",
204 | "name": "Runbook_for_Service_BasicLinux_action_restart",
205 | "main_task_local_reference": {
206 | "kind": "app_task",
207 | "name": "DAG_Task_for_Service_BasicLinux_action_restart"
208 | },
209 | "variable_list": []
210 | },
211 | "name": "action_restart"
212 | },
213 | {
214 | "description": "System action for deleting an application. Does not delete created VMs",
215 | "type": "system",
216 | "critical": true,
217 | "runbook": {
218 | "task_definition_list": [{
219 | "target_any_local_reference": {
220 | "kind": "app_service",
221 | "name": "BasicLinux"
222 | },
223 | "retries": "0",
224 | "description": "",
225 | "child_tasks_local_reference_list": [],
226 | "name": "DAG_Task_for_Service_BasicLinux_action_soft_delete",
227 | "attrs": {
228 | "edges": [],
229 | "type": ""
230 | },
231 | "timeout_secs": "0",
232 | "type": "DAG",
233 | "variable_list": []
234 | }],
235 | "description": "",
236 | "name": "Runbook_for_Service_BasicLinux_action_soft_delete",
237 | "main_task_local_reference": {
238 | "kind": "app_task",
239 | "name": "DAG_Task_for_Service_BasicLinux_action_soft_delete"
240 | },
241 | "variable_list": []
242 | },
243 | "name": "action_soft_delete"
244 | }
245 | ],
246 | "depends_on_list": [],
247 | "name": "BasicLinux",
248 | "port_list": [],
249 | "tier": "",
250 | "variable_list": [],
251 | "description": ""
252 | }],
253 | "substrate_definition_list": [{
254 | "description": "",
255 | "action_list": [],
256 | "type": "AHV_VM",
257 | "name": "BasicLinuxVm",
258 | "readiness_probe": {
259 | "connection_type": "SSH",
260 | "retries": "5",
261 | "connection_protocol": "",
262 | "connection_port": 22,
263 | "address": "@@{platform.status.resources.nic_list[0].ip_endpoint_list[0].ip}@@",
264 | "delay_secs": "60",
265 | "disable_readiness_probe": false,
266 | "login_credential_local_reference": {
267 | "kind": "app_credential",
268 | "name": "teccadmin"
269 | }
270 | },
271 | "editables": {
272 | "create_spec": {
273 | "resources": {
274 | "nic_list": {},
275 | "serial_port_list": {}
276 | }
277 | }
278 | },
279 | "os_type": "Linux",
280 | "create_spec": {
281 | "name": "vm-@@{calm_array_index}@@-@@{calm_time}@@",
282 | "resources": {
283 | "nic_list": [{
284 | "nic_type": "NORMAL_NIC",
285 | "ip_endpoint_list": [],
286 | "network_function_chain_reference": null,
287 | "network_function_nic_type": "INGRESS",
288 | "mac_address": "",
289 | "subnet_reference": {
290 | "kind": "subnet",
291 | "type": "",
292 | "name": "Primary",
293 | "uuid": "3b6477b0-2cdb-4a3b-9ee8-d93563738959"
294 | },
295 | "type": ""
296 | }],
297 | "serial_port_list": [],
298 | "guest_tools": null,
299 | "num_vcpus_per_socket": 1,
300 | "num_sockets": 1,
301 | "gpu_list": [],
302 | "memory_size_mib": 1024,
303 | "parent_reference": null,
304 | "hardware_clock_timezone": "",
305 | "guest_customization": {
306 | "cloud_init": {
307 | "meta_data": "",
308 | "type": "",
309 | "user_data": "#cloud-config\nusers:\n - name: @@{teccadmin.username}@@\n ssh-authorized-keys:\n - @@{InstancePublicKey}@@\n - @@{teccadmin.public_key}@@\n sudo: ['ALL=(ALL) NOPASSWD:ALL']\n"
310 | },
311 | "type": "",
312 | "sysprep": null
313 | },
314 | "power_state": "ON",
315 | "type": "",
316 | "account_uuid": "24beaac4-5024-4d96-9b93-d9bbf520f645",
317 | "boot_config": {
318 | "boot_device": {
319 | "type": "",
320 | "disk_address": {
321 | "type": "",
322 | "device_index": 0,
323 | "adapter_type": "SCSI"
324 | }
325 | },
326 | "type": "",
327 | "boot_type": "",
328 | "mac_address": ""
329 | },
330 | "disk_list": [{
331 | "data_source_reference": {
332 | "kind": "app_package",
333 | "type": "",
334 | "name": "CentosImage",
335 | "uuid": "288eb91b-989a-44f2-b836-9dcc78deca58"
336 | },
337 | "type": "",
338 | "disk_size_mib": 0,
339 | "volume_group_reference": null,
340 | "device_properties": {
341 | "type": "",
342 | "disk_address": {
343 | "type": "",
344 | "device_index": 0,
345 | "adapter_type": "SCSI"
346 | },
347 | "device_type": "DISK"
348 | }
349 | }]
350 | },
351 | "availability_zone_reference": null,
352 | "backup_policy": null,
353 | "type": "",
354 | "cluster_reference": null,
355 | "categories": ""
356 | },
357 | "variable_list": []
358 | }],
359 | "credential_definition_list": [{
360 | "username": "teccadmin",
361 | "description": "",
362 | "type": "KEY",
363 | "secret": {
364 | "attrs": {
365 | "is_secret_modified": false,
366 | "secret_reference": {}
367 | }
368 | },
369 | "name": "teccadmin"
370 | }],
371 | "package_definition_list": [{
372 | "description": "",
373 | "action_list": [],
374 | "type": "CUSTOM",
375 | "service_local_reference_list": [{
376 | "kind": "app_service",
377 | "name": "BasicLinux"
378 | }],
379 | "name": "BasicLinuxPackage",
380 | "version": "",
381 | "options": {
382 | "install_runbook": {
383 | "task_definition_list": [{
384 | "target_any_local_reference": {
385 | "kind": "app_service",
386 | "name": "BasicLinux"
387 | },
388 | "retries": "0",
389 | "description": "",
390 | "child_tasks_local_reference_list": [{
391 | "kind": "app_task",
392 | "name": "ConfigureBaseVM"
393 | },
394 | {
395 | "kind": "app_task",
396 | "name": "ConfigureFirewall"
397 | }
398 | ],
399 | "name": "BasicLinuxPackage___install___dag",
400 | "attrs": {
401 | "edges": [{
402 | "from_task_reference": {
403 | "kind": "app_task",
404 | "name": "ConfigureBaseVM"
405 | },
406 | "edge_type": "user_defined",
407 | "type": "",
408 | "to_task_reference": {
409 | "kind": "app_task",
410 | "name": "ConfigureFirewall"
411 | }
412 | }],
413 | "type": ""
414 | },
415 | "timeout_secs": "0",
416 | "type": "DAG",
417 | "variable_list": []
418 | },
419 | {
420 | "target_any_local_reference": {
421 | "kind": "app_service",
422 | "name": "BasicLinux"
423 | },
424 | "retries": "0",
425 | "description": "",
426 | "child_tasks_local_reference_list": [],
427 | "name": "ConfigureBaseVM",
428 | "attrs": {
429 | "exit_status": [],
430 | "script": "#!\/bin\/bash\n\n# update base OS packages\nsudo yum -y update\nsudo yum -y upgrade\n\n# install some useful packages\nsudo yum -y install vim git\n",
431 | "script_type": "sh",
432 | "type": "",
433 | "command_line_args": "",
434 | "login_credential_local_reference": {
435 | "kind": "app_credential",
436 | "name": "teccadmin"
437 | }
438 | },
439 | "timeout_secs": "0",
440 | "type": "EXEC",
441 | "variable_list": []
442 | },
443 | {
444 | "target_any_local_reference": {
445 | "kind": "app_service",
446 | "name": "BasicLinux"
447 | },
448 | "retries": "0",
449 | "description": "",
450 | "child_tasks_local_reference_list": [],
451 | "name": "ConfigureFirewall",
452 | "attrs": {
453 | "exit_status": [],
454 | "script": "#!\/bin\/bash\n\n# install and enable firewalld\nsudo yum -y install firewalld\nsudo systemctl enable firewalld\nsudo systemctl start firewalld\n",
455 | "script_type": "sh",
456 | "type": "",
457 | "command_line_args": "",
458 | "login_credential_local_reference": {
459 | "kind": "app_credential",
460 | "name": "teccadmin"
461 | }
462 | },
463 | "timeout_secs": "0",
464 | "type": "EXEC",
465 | "variable_list": []
466 | }
467 | ],
468 | "description": "",
469 | "name": "BasicLinuxPackage___install___runbook",
470 | "main_task_local_reference": {
471 | "kind": "app_task",
472 | "name": "BasicLinuxPackage___install___dag"
473 | },
474 | "variable_list": []
475 | },
476 | "type": "",
477 | "uninstall_runbook": {
478 | "task_definition_list": [{
479 | "target_any_local_reference": {
480 | "kind": "app_service",
481 | "name": "BasicLinux"
482 | },
483 | "retries": "0",
484 | "description": "",
485 | "child_tasks_local_reference_list": [],
486 | "name": "DAG_Task_for_Package_BasicLinuxPackage_action_uninstall",
487 | "attrs": {
488 | "edges": [],
489 | "type": ""
490 | },
491 | "timeout_secs": "0",
492 | "type": "DAG",
493 | "variable_list": []
494 | }],
495 | "description": "",
496 | "name": "Runbook_for_Package_BasicLinuxPackage_action_uninstall",
497 | "main_task_local_reference": {
498 | "kind": "app_task",
499 | "name": "DAG_Task_for_Package_BasicLinuxPackage_action_uninstall"
500 | },
501 | "variable_list": []
502 | }
503 | },
504 | "variable_list": []
505 | },
506 | {
507 | "description": "https:\/\/cloud.centos.org\/centos\/7\/images\/",
508 | "action_list": [],
509 | "type": "SUBSTRATE_IMAGE",
510 | "service_local_reference_list": [],
511 | "name": "CentosImage",
512 | "version": "",
513 | "options": {
514 | "type": "",
515 | "name": "CentOS-7.8-2003",
516 | "resources": {
517 | "image_type": "DISK_IMAGE",
518 | "checksum": {
519 | "checksum_algorithm": "",
520 | "type": "",
521 | "checksum_value": ""
522 | },
523 | "source_uri": "https:\/\/cloud.centos.org\/centos\/7\/images\/CentOS-7-x86_64-GenericCloud-2003.qcow2",
524 | "version": {
525 | "product_version": "7.8",
526 | "type": "",
527 | "product_name": "CentOS"
528 | },
529 | "architecture": "X86_64",
530 | "type": ""
531 | },
532 | "description": ""
533 | },
534 | "variable_list": []
535 | }
536 | ],
537 | "app_profile_list": [{
538 | "deployment_create_list": [{
539 | "type": "GREENFIELD",
540 | "action_list": [],
541 | "name": "basiclinuxvm_deployment",
542 | "min_replicas": "1",
543 | "default_replicas": "1",
544 | "depends_on_list": [],
545 | "published_service_local_reference_list": [],
546 | "max_replicas": "1",
547 | "package_local_reference_list": [{
548 | "kind": "app_package",
549 | "name": "BasicLinuxPackage"
550 | }],
551 | "substrate_local_reference": {
552 | "kind": "app_substrate",
553 | "name": "BasicLinuxVm"
554 | },
555 | "options": {
556 | "type": ""
557 | },
558 | "variable_list": [],
559 | "description": ""
560 | }],
561 | "description": "",
562 | "action_list": [],
563 | "name": "Default",
564 | "variable_list": [{
565 | "val_type": "STRING",
566 | "is_mandatory": false,
567 | "description": "",
568 | "data_type": "BASE",
569 | "type": "LOCAL",
570 | "name": "InstancePublicKey",
571 | "value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCm+7N2tjmJw5jhPmD8MS6urZQJB42ABh73ffGQSJ0XUHgdEDfjUDFkLK0wyJCe0sF5QJnh07UQn0F0BUnBi+VwehPGeODh6S43OP5YS\/14L0fyntFI06B9lckx\/ygRNu82sHxXCX+6VVUFPOPC+sz6j1DQswKY9d4cEYnaMBGSzqRxrqAIf6aWIKTJTYKPFY0zaUZ6ow2iwS0Nlh5EqaXsEBWkqMmr7\/auP9GV\/adUgzFrGLJklYBdfH575SIK6\/PZL6wNT0jE9LmFlEm7dI01ZWPclBuV16FzRyrnzmWr\/ebY62A04vYBtR0vyfEfsW2ZgxgD6aAE6+ytj0v19y0elRtOaeTySN\/HlXh7owKWCHnlXNpTUiSDP8SQ8LRARkhQu3KEDL0ppGCrSF87oFkp1gPzf92U+UK3LaNMMjZXMOy0zLoLEdLtbQo6S8iHggDoX4NI4sWWxcX0mtadvjy\/nIOvskk9IXasQh0u0MT9ARQY5VXPluKDtEVdeow9UbvgJ1xxNkphUgsWjCiy+sjgapsuZvWqKM6TPT1i24XYaau+\/Fa0vhjLb8vCMWrrtkRwGt4re243NDYcYWTzVZUFuUK0w1wqt77KgjCCeyJdsZNwrh15v780Fjqpec3EGVA0xyNbF0jn\/tsnYy9jPh\/6Cv767EratI97JhUxoB4gXw== no-reply@acme.com",
572 | "label": "",
573 | "attrs": {
574 | "type": ""
575 | },
576 | "editables": {
577 | "value": true
578 | },
579 | "is_hidden": false
580 | }]
581 | }],
582 | "published_service_definition_list": [],
583 | "default_credential_local_reference": {
584 | "kind": "app_credential",
585 | "name": "teccadmin"
586 | },
587 | "type": "USER"
588 | },
589 | "name": "BasicLinuxVM-cr"
590 | },
591 | "api_version": "3.0",
592 | "metadata": {
593 | "last_update_time": "1601428495728975",
594 | "kind": "blueprint",
595 | "spec_version": 4,
596 | "creation_time": "1597302209717229",
597 | "name": "BasicLinuxVM-cr"
598 | }
599 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/blueprints/basiclinuxvm_test_2.json:
--------------------------------------------------------------------------------
1 | {
2 | "status": {},
3 | "contains_secrets": false,
4 | "product_version": "3.0.6",
5 | "spec": {
6 | "description": "Simple BP for working with the Nutanix Calm DSL.",
7 | "resources": {
8 | "client_attrs": {
9 | "None": {
10 | "Profile": {
11 | "Default": {
12 | "Action": {},
13 | "dsl_name": "Default"
14 | }
15 | },
16 | "Deployment": {
17 | "basiclinuxvm_deployment": {
18 | "Action": {},
19 | "dsl_name": "basiclinuxvm_deployment"
20 | }
21 | },
22 | "Substrate": {
23 | "BasicLinuxVm": {
24 | "Action": {},
25 | "AhvVm": {
26 | "vm-@@{calm_array_index}@@-@@{calm_time}@@": {
27 | "dsl_name": "vmcalm_array_indexcalm_time"
28 | }
29 | },
30 | "dsl_name": "BasicLinuxVm"
31 | }
32 | },
33 | "Service": {
34 | "BasicLinux": {
35 | "Action": {},
36 | "dsl_name": "BasicLinux"
37 | }
38 | },
39 | "Package": {
40 | "CentosImage": {
41 | "Action": {},
42 | "dsl_name": "CentosImage"
43 | },
44 | "BasicLinuxPackage": {
45 | "Action": {},
46 | "dsl_name": "BasicLinuxPackage"
47 | }
48 | }
49 | }
50 | },
51 | "service_definition_list": [{
52 | "singleton": false,
53 | "action_list": [{
54 | "description": "System action for creating an application",
55 | "type": "system",
56 | "critical": true,
57 | "runbook": {
58 | "task_definition_list": [{
59 | "target_any_local_reference": {
60 | "kind": "app_service",
61 | "name": "BasicLinux"
62 | },
63 | "retries": "0",
64 | "description": "",
65 | "child_tasks_local_reference_list": [],
66 | "name": "DAG_Task_for_Service_BasicLinux_action_create",
67 | "attrs": {
68 | "edges": [],
69 | "type": ""
70 | },
71 | "timeout_secs": "0",
72 | "type": "DAG",
73 | "variable_list": []
74 | }],
75 | "description": "",
76 | "name": "Runbook_for_Service_BasicLinux_action_create",
77 | "main_task_local_reference": {
78 | "kind": "app_task",
79 | "name": "DAG_Task_for_Service_BasicLinux_action_create"
80 | },
81 | "variable_list": []
82 | },
83 | "name": "action_create"
84 | },
85 | {
86 | "description": "System action for deleting an application. Deletes created VMs as well",
87 | "type": "system",
88 | "critical": true,
89 | "runbook": {
90 | "task_definition_list": [{
91 | "target_any_local_reference": {
92 | "kind": "app_service",
93 | "name": "BasicLinux"
94 | },
95 | "retries": "0",
96 | "description": "",
97 | "child_tasks_local_reference_list": [],
98 | "name": "DAG_Task_for_Service_BasicLinux_action_delete",
99 | "attrs": {
100 | "edges": [],
101 | "type": ""
102 | },
103 | "timeout_secs": "0",
104 | "type": "DAG",
105 | "variable_list": []
106 | }],
107 | "description": "",
108 | "name": "Runbook_for_Service_BasicLinux_action_delete",
109 | "main_task_local_reference": {
110 | "kind": "app_task",
111 | "name": "DAG_Task_for_Service_BasicLinux_action_delete"
112 | },
113 | "variable_list": []
114 | },
115 | "name": "action_delete"
116 | },
117 | {
118 | "description": "System action for starting an application",
119 | "type": "system",
120 | "critical": true,
121 | "runbook": {
122 | "task_definition_list": [{
123 | "target_any_local_reference": {
124 | "kind": "app_service",
125 | "name": "BasicLinux"
126 | },
127 | "retries": "0",
128 | "description": "",
129 | "child_tasks_local_reference_list": [],
130 | "name": "DAG_Task_for_Service_BasicLinux_action_start",
131 | "attrs": {
132 | "edges": [],
133 | "type": ""
134 | },
135 | "timeout_secs": "0",
136 | "type": "DAG",
137 | "variable_list": []
138 | }],
139 | "description": "",
140 | "name": "Runbook_for_Service_BasicLinux_action_start",
141 | "main_task_local_reference": {
142 | "kind": "app_task",
143 | "name": "DAG_Task_for_Service_BasicLinux_action_start"
144 | },
145 | "variable_list": []
146 | },
147 | "name": "action_start"
148 | },
149 | {
150 | "description": "System action for stopping an application",
151 | "type": "system",
152 | "critical": true,
153 | "runbook": {
154 | "task_definition_list": [{
155 | "target_any_local_reference": {
156 | "kind": "app_service",
157 | "name": "BasicLinux"
158 | },
159 | "retries": "0",
160 | "description": "",
161 | "child_tasks_local_reference_list": [],
162 | "name": "DAG_Task_for_Service_BasicLinux_action_stop",
163 | "attrs": {
164 | "edges": [],
165 | "type": ""
166 | },
167 | "timeout_secs": "0",
168 | "type": "DAG",
169 | "variable_list": []
170 | }],
171 | "description": "",
172 | "name": "Runbook_for_Service_BasicLinux_action_stop",
173 | "main_task_local_reference": {
174 | "kind": "app_task",
175 | "name": "DAG_Task_for_Service_BasicLinux_action_stop"
176 | },
177 | "variable_list": []
178 | },
179 | "name": "action_stop"
180 | },
181 | {
182 | "description": "System action for restarting an application",
183 | "type": "system",
184 | "critical": true,
185 | "runbook": {
186 | "task_definition_list": [{
187 | "target_any_local_reference": {
188 | "kind": "app_service",
189 | "name": "BasicLinux"
190 | },
191 | "retries": "0",
192 | "description": "",
193 | "child_tasks_local_reference_list": [],
194 | "name": "DAG_Task_for_Service_BasicLinux_action_restart",
195 | "attrs": {
196 | "edges": [],
197 | "type": ""
198 | },
199 | "timeout_secs": "0",
200 | "type": "DAG",
201 | "variable_list": []
202 | }],
203 | "description": "",
204 | "name": "Runbook_for_Service_BasicLinux_action_restart",
205 | "main_task_local_reference": {
206 | "kind": "app_task",
207 | "name": "DAG_Task_for_Service_BasicLinux_action_restart"
208 | },
209 | "variable_list": []
210 | },
211 | "name": "action_restart"
212 | },
213 | {
214 | "description": "System action for deleting an application. Does not delete created VMs",
215 | "type": "system",
216 | "critical": true,
217 | "runbook": {
218 | "task_definition_list": [{
219 | "target_any_local_reference": {
220 | "kind": "app_service",
221 | "name": "BasicLinux"
222 | },
223 | "retries": "0",
224 | "description": "",
225 | "child_tasks_local_reference_list": [],
226 | "name": "DAG_Task_for_Service_BasicLinux_action_soft_delete",
227 | "attrs": {
228 | "edges": [],
229 | "type": ""
230 | },
231 | "timeout_secs": "0",
232 | "type": "DAG",
233 | "variable_list": []
234 | }],
235 | "description": "",
236 | "name": "Runbook_for_Service_BasicLinux_action_soft_delete",
237 | "main_task_local_reference": {
238 | "kind": "app_task",
239 | "name": "DAG_Task_for_Service_BasicLinux_action_soft_delete"
240 | },
241 | "variable_list": []
242 | },
243 | "name": "action_soft_delete"
244 | }
245 | ],
246 | "depends_on_list": [],
247 | "name": "BasicLinux",
248 | "port_list": [],
249 | "tier": "",
250 | "variable_list": [],
251 | "description": ""
252 | }],
253 | "substrate_definition_list": [{
254 | "description": "",
255 | "action_list": [],
256 | "type": "AHV_VM",
257 | "name": "BasicLinuxVm",
258 | "readiness_probe": {
259 | "connection_type": "SSH",
260 | "retries": "5",
261 | "connection_protocol": "",
262 | "connection_port": 22,
263 | "address": "@@{platform.status.resources.nic_list[0].ip_endpoint_list[0].ip}@@",
264 | "delay_secs": "60",
265 | "disable_readiness_probe": false,
266 | "login_credential_local_reference": {
267 | "kind": "app_credential",
268 | "name": "teccadmin"
269 | }
270 | },
271 | "editables": {
272 | "create_spec": {
273 | "resources": {
274 | "nic_list": {},
275 | "serial_port_list": {}
276 | }
277 | }
278 | },
279 | "os_type": "Linux",
280 | "create_spec": {
281 | "name": "vm-@@{calm_array_index}@@-@@{calm_time}@@",
282 | "resources": {
283 | "nic_list": [{
284 | "nic_type": "NORMAL_NIC",
285 | "ip_endpoint_list": [],
286 | "network_function_chain_reference": null,
287 | "network_function_nic_type": "INGRESS",
288 | "mac_address": "",
289 | "subnet_reference": {
290 | "kind": "subnet",
291 | "type": "",
292 | "name": "Primary",
293 | "uuid": "3b6477b0-2cdb-4a3b-9ee8-d93563738959"
294 | },
295 | "type": ""
296 | }],
297 | "serial_port_list": [],
298 | "guest_tools": null,
299 | "num_vcpus_per_socket": 1,
300 | "num_sockets": 1,
301 | "gpu_list": [],
302 | "memory_size_mib": 1024,
303 | "parent_reference": null,
304 | "hardware_clock_timezone": "",
305 | "guest_customization": {
306 | "cloud_init": {
307 | "meta_data": "",
308 | "type": "",
309 | "user_data": "#cloud-config\nusers:\n - name: @@{teccadmin.username}@@\n ssh-authorized-keys:\n - @@{InstancePublicKey}@@\n - @@{teccadmin.public_key}@@\n sudo: ['ALL=(ALL) NOPASSWD:ALL']\n"
310 | },
311 | "type": "",
312 | "sysprep": null
313 | },
314 | "power_state": "ON",
315 | "type": "",
316 | "account_uuid": "24beaac4-5024-4d96-9b93-d9bbf520f645",
317 | "boot_config": {
318 | "boot_device": {
319 | "type": "",
320 | "disk_address": {
321 | "type": "",
322 | "device_index": 0,
323 | "adapter_type": "SCSI"
324 | }
325 | },
326 | "type": "",
327 | "boot_type": "",
328 | "mac_address": ""
329 | },
330 | "disk_list": [{
331 | "data_source_reference": {
332 | "kind": "app_package",
333 | "type": "",
334 | "name": "CentosImage",
335 | "uuid": "288eb91b-989a-44f2-b836-9dcc78deca58"
336 | },
337 | "type": "",
338 | "disk_size_mib": 0,
339 | "volume_group_reference": null,
340 | "device_properties": {
341 | "type": "",
342 | "disk_address": {
343 | "type": "",
344 | "device_index": 0,
345 | "adapter_type": "SCSI"
346 | },
347 | "device_type": "DISK"
348 | }
349 | }]
350 | },
351 | "availability_zone_reference": null,
352 | "backup_policy": null,
353 | "type": "",
354 | "cluster_reference": null,
355 | "categories": ""
356 | },
357 | "variable_list": []
358 | }],
359 | "credential_definition_list": [{
360 | "username": "teccadmin",
361 | "description": "",
362 | "type": "KEY",
363 | "secret": {
364 | "attrs": {
365 | "is_secret_modified": false,
366 | "secret_reference": {}
367 | }
368 | },
369 | "name": "teccadmin"
370 | }],
371 | "package_definition_list": [{
372 | "description": "",
373 | "action_list": [],
374 | "type": "CUSTOM",
375 | "service_local_reference_list": [{
376 | "kind": "app_service",
377 | "name": "BasicLinux"
378 | }],
379 | "name": "BasicLinuxPackage",
380 | "version": "",
381 | "options": {
382 | "install_runbook": {
383 | "task_definition_list": [{
384 | "target_any_local_reference": {
385 | "kind": "app_service",
386 | "name": "BasicLinux"
387 | },
388 | "retries": "0",
389 | "description": "",
390 | "child_tasks_local_reference_list": [{
391 | "kind": "app_task",
392 | "name": "ConfigureBaseVM"
393 | },
394 | {
395 | "kind": "app_task",
396 | "name": "ConfigureFirewall"
397 | }
398 | ],
399 | "name": "BasicLinuxPackage___install___dag",
400 | "attrs": {
401 | "edges": [{
402 | "from_task_reference": {
403 | "kind": "app_task",
404 | "name": "ConfigureBaseVM"
405 | },
406 | "edge_type": "user_defined",
407 | "type": "",
408 | "to_task_reference": {
409 | "kind": "app_task",
410 | "name": "ConfigureFirewall"
411 | }
412 | }],
413 | "type": ""
414 | },
415 | "timeout_secs": "0",
416 | "type": "DAG",
417 | "variable_list": []
418 | },
419 | {
420 | "target_any_local_reference": {
421 | "kind": "app_service",
422 | "name": "BasicLinux"
423 | },
424 | "retries": "0",
425 | "description": "",
426 | "child_tasks_local_reference_list": [],
427 | "name": "ConfigureBaseVM",
428 | "attrs": {
429 | "exit_status": [],
430 | "script": "#!\/bin\/bash\n\n# update base OS packages\nsudo yum -y update\nsudo yum -y upgrade\n\n# install some useful packages\nsudo yum -y install vim git\n",
431 | "script_type": "sh",
432 | "type": "",
433 | "command_line_args": "",
434 | "login_credential_local_reference": {
435 | "kind": "app_credential",
436 | "name": "teccadmin"
437 | }
438 | },
439 | "timeout_secs": "0",
440 | "type": "EXEC",
441 | "variable_list": []
442 | },
443 | {
444 | "target_any_local_reference": {
445 | "kind": "app_service",
446 | "name": "BasicLinux"
447 | },
448 | "retries": "0",
449 | "description": "",
450 | "child_tasks_local_reference_list": [],
451 | "name": "ConfigureFirewall",
452 | "attrs": {
453 | "exit_status": [],
454 | "script": "#!\/bin\/bash\n\n# install and enable firewalld\nsudo yum -y install firewalld\nsudo systemctl enable firewalld\nsudo systemctl start firewalld\n",
455 | "script_type": "sh",
456 | "type": "",
457 | "command_line_args": "",
458 | "login_credential_local_reference": {
459 | "kind": "app_credential",
460 | "name": "teccadmin"
461 | }
462 | },
463 | "timeout_secs": "0",
464 | "type": "EXEC",
465 | "variable_list": []
466 | }
467 | ],
468 | "description": "",
469 | "name": "BasicLinuxPackage___install___runbook",
470 | "main_task_local_reference": {
471 | "kind": "app_task",
472 | "name": "BasicLinuxPackage___install___dag"
473 | },
474 | "variable_list": []
475 | },
476 | "type": "",
477 | "uninstall_runbook": {
478 | "task_definition_list": [{
479 | "target_any_local_reference": {
480 | "kind": "app_service",
481 | "name": "BasicLinux"
482 | },
483 | "retries": "0",
484 | "description": "",
485 | "child_tasks_local_reference_list": [],
486 | "name": "DAG_Task_for_Package_BasicLinuxPackage_action_uninstall",
487 | "attrs": {
488 | "edges": [],
489 | "type": ""
490 | },
491 | "timeout_secs": "0",
492 | "type": "DAG",
493 | "variable_list": []
494 | }],
495 | "description": "",
496 | "name": "Runbook_for_Package_BasicLinuxPackage_action_uninstall",
497 | "main_task_local_reference": {
498 | "kind": "app_task",
499 | "name": "DAG_Task_for_Package_BasicLinuxPackage_action_uninstall"
500 | },
501 | "variable_list": []
502 | }
503 | },
504 | "variable_list": []
505 | },
506 | {
507 | "description": "https:\/\/cloud.centos.org\/centos\/7\/images\/",
508 | "action_list": [],
509 | "type": "SUBSTRATE_IMAGE",
510 | "service_local_reference_list": [],
511 | "name": "CentosImage",
512 | "version": "",
513 | "options": {
514 | "type": "",
515 | "name": "CentOS-7.8-2003",
516 | "resources": {
517 | "image_type": "DISK_IMAGE",
518 | "checksum": {
519 | "checksum_algorithm": "",
520 | "type": "",
521 | "checksum_value": ""
522 | },
523 | "source_uri": "https:\/\/cloud.centos.org\/centos\/7\/images\/CentOS-7-x86_64-GenericCloud-2003.qcow2",
524 | "version": {
525 | "product_version": "7.8",
526 | "type": "",
527 | "product_name": "CentOS"
528 | },
529 | "architecture": "X86_64",
530 | "type": ""
531 | },
532 | "description": ""
533 | },
534 | "variable_list": []
535 | }
536 | ],
537 | "app_profile_list": [{
538 | "deployment_create_list": [{
539 | "type": "GREENFIELD",
540 | "action_list": [],
541 | "name": "basiclinuxvm_deployment",
542 | "min_replicas": "1",
543 | "default_replicas": "1",
544 | "depends_on_list": [],
545 | "published_service_local_reference_list": [],
546 | "max_replicas": "1",
547 | "package_local_reference_list": [{
548 | "kind": "app_package",
549 | "name": "BasicLinuxPackage"
550 | }],
551 | "substrate_local_reference": {
552 | "kind": "app_substrate",
553 | "name": "BasicLinuxVm"
554 | },
555 | "options": {
556 | "type": ""
557 | },
558 | "variable_list": [],
559 | "description": ""
560 | }],
561 | "description": "",
562 | "action_list": [],
563 | "name": "Default",
564 | "variable_list": [{
565 | "val_type": "STRING",
566 | "is_mandatory": false,
567 | "description": "",
568 | "data_type": "BASE",
569 | "type": "LOCAL",
570 | "name": "InstancePublicKey",
571 | "value": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCm+7N2tjmJw5jhPmD8MS6urZQJB42ABh73ffGQSJ0XUHgdEDfjUDFkLK0wyJCe0sF5QJnh07UQn0F0BUnBi+VwehPGeODh6S43OP5YS\/14L0fyntFI06B9lckx\/ygRNu82sHxXCX+6VVUFPOPC+sz6j1DQswKY9d4cEYnaMBGSzqRxrqAIf6aWIKTJTYKPFY0zaUZ6ow2iwS0Nlh5EqaXsEBWkqMmr7\/auP9GV\/adUgzFrGLJklYBdfH575SIK6\/PZL6wNT0jE9LmFlEm7dI01ZWPclBuV16FzRyrnzmWr\/ebY62A04vYBtR0vyfEfsW2ZgxgD6aAE6+ytj0v19y0elRtOaeTySN\/HlXh7owKWCHnlXNpTUiSDP8SQ8LRARkhQu3KEDL0ppGCrSF87oFkp1gPzf92U+UK3LaNMMjZXMOy0zLoLEdLtbQo6S8iHggDoX4NI4sWWxcX0mtadvjy\/nIOvskk9IXasQh0u0MT9ARQY5VXPluKDtEVdeow9UbvgJ1xxNkphUgsWjCiy+sjgapsuZvWqKM6TPT1i24XYaau+\/Fa0vhjLb8vCMWrrtkRwGt4re243NDYcYWTzVZUFuUK0w1wqt77KgjCCeyJdsZNwrh15v780Fjqpec3EGVA0xyNbF0jn\/tsnYy9jPh\/6Cv767EratI97JhUxoB4gXw== no-reply@acme.com",
572 | "label": "",
573 | "attrs": {
574 | "type": ""
575 | },
576 | "editables": {
577 | "value": true
578 | },
579 | "is_hidden": false
580 | }]
581 | }],
582 | "published_service_definition_list": [],
583 | "default_credential_local_reference": {
584 | "kind": "app_credential",
585 | "name": "teccadmin"
586 | },
587 | "type": "USER"
588 | },
589 | "name": "BasicLinuxVM-cr"
590 | },
591 | "api_version": "3.0",
592 | "metadata": {
593 | "last_update_time": "1601428495728975",
594 | "kind": "blueprint",
595 | "spec_version": 4,
596 | "creation_time": "1597302209717229",
597 | "name": "BasicLinuxVM-cr"
598 | }
599 | }
--------------------------------------------------------------------------------
/HybridCloudEngineer/eval/script/eval.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 | import os
5 | from datetime import datetime
6 | import glob
7 | import json
8 | import humanize
9 | from dotty_dict import dotty
10 | from EvaluationItem import EvaluationItem
11 | from EnvironmentOptions import EnvironmentOptions
12 | from Messages import Messages
13 |
14 |
15 | def pretty_time():
16 | '''
17 | display the current time
18 | '''
19 | now = datetime.now()
20 | return now.strftime("%H:%M:%S")
21 |
22 |
23 | def search_json(data, target):
24 | '''
25 | recursive function to search for a specific key
26 | with matching value in the BP JSON
27 | '''
28 | if isinstance(data, list):
29 | for elem in data:
30 | yield from search_json(elem, target)
31 | if isinstance(data, dict):
32 | if target in data.keys():
33 | yield data
34 | for key, value in data.items():
35 | yield from search_json(value, target)
36 |
37 |
38 | def eval_criteria(EvalCriteria: EvaluationItem):
39 | '''
40 | function to evaluate a specific set of blueprint criteria
41 | '''
42 |
43 | '''
44 | for each set of criteria passed, we need to check
45 | what type of check we are doing. this dictates how the
46 | found value is compared to the expected value
47 | e.g. 'instances' will expect the criteria to be a list
48 | or dict, and so on
49 | '''
50 | passed = False
51 | found_message = ''
52 | if EvalCriteria.eval_type == 'instances':
53 | if len(EvalCriteria.criteria) == EvalCriteria.expected:
54 | passed = True
55 | found_message = len(EvalCriteria.criteria)
56 | else:
57 | passed = False
58 | found_message = len(EvalCriteria.criteria)
59 | elif EvalCriteria.eval_type == 'number':
60 | pass
61 | elif EvalCriteria.eval_type == 'string':
62 | if EvalCriteria.match_type == 'exact':
63 | if EvalCriteria.criteria == EvalCriteria.expected:
64 | passed = True
65 | found_message = 'matches'
66 | else:
67 | passed = False
68 | found_message = 'no matches'
69 | else:
70 | if EvalCriteria.expected in EvalCriteria.criteria:
71 | passed = True
72 | found_message = 'matches'
73 | else:
74 | passed = False
75 | found_message = 'no matches'
76 |
77 | if passed:
78 | prefix = messages.passed
79 | summary = True
80 | else:
81 | prefix = messages.fail
82 | summary = True
83 |
84 | '''
85 | now that the evaluation item has been processed,
86 | we can display the results
87 | '''
88 | if summary:
89 | if environment_options.debug:
90 | print(f'{prefix} '
91 | + f'{EvalCriteria.eval_type} | '
92 | + f'{EvalCriteria.match_type} | '
93 | + f'{EvalCriteria.description} | '
94 | + 'Expected '
95 | + f'{EvalCriteria.expected} '
96 | + f'| Found {found_message}')
97 | else:
98 | print(f'{prefix} '
99 | + f'{EvalCriteria.eval_type} | '
100 | + f'{EvalCriteria.match_type} | '
101 | + f'{EvalCriteria.description} | '
102 | + 'Expected '
103 | + f'{EvalCriteria.expected} ')
104 |
105 |
106 | def show_result(passed: bool, title: str,
107 | expected: str = None,
108 | found: str = None):
109 | '''
110 | simple function to show the results of a specific
111 | evaluation pass
112 | '''
113 | if environment_options.debug:
114 | print(f'{messages.passed} | '
115 | + f'{title} | '
116 | + f'{expected} | '
117 | + f'{found}' if passed
118 | else f'{messages.fail} | '
119 | + f'{title} | '
120 | + f'{expected} | '
121 | + f'{found}')
122 | else:
123 | print(f'{messages.passed} | '
124 | + f'{title}' if passed
125 | else
126 | f'{messages.fail} | '
127 | + f'{title}')
128 |
129 |
130 | def process_json(bp: str):
131 | '''
132 | process the blueprint
133 | '''
134 | print(f'{messages.line}')
135 | # verify the specified blueprint exists
136 | if os.path.exists(f'{bp}'):
137 | print(f'{messages.info} Blueprint found. Continuing.')
138 | # with the blueprint found, verify that it is valid JSON
139 | print(f'{messages.info} Validating JSON content of {bp}.')
140 | try:
141 | with open(f'{bp}', 'r') as json_file:
142 | bp_json = json.loads(json_file.read())
143 |
144 | '''
145 | test listcomp on the processed JSON file
146 | please leave this line here for now
147 | '''
148 | '''
149 | vcpus = ([[x["create_spec"]["resources"]["num_sockets"]
150 | for x in
151 | bp_json["spec"]["resources"]
152 | ["substrate_definition_list"]
153 | if "num_sockets" in
154 | x["create_spec"]["resources"]]])
155 | '''
156 |
157 | # the specified JSON file has been opened and parsed as JSON
158 | # it can now be processed
159 | print(f'{messages.info} {bp} parsed successfully. '
160 | + 'Processing.')
161 | # cleanup file handles
162 | json_file.close()
163 |
164 | '''
165 | ----------------------
166 | evaluation starts here
167 | ----------------------
168 | '''
169 |
170 | with open(f'{environment_options.criteria}', 'r') as criteria_file:
171 | json_criteria = json.loads(criteria_file.read())
172 | bp_dot = dotty(bp_json)
173 | try:
174 | for eval_key in json_criteria["criteria"]:
175 | # entity lists
176 | if eval_key['type'] == 'entity_lists':
177 | for entity in eval_key['lists']:
178 | correct = len(bp_dot[entity['key']])
179 | if correct == entity['expected']:
180 | show_result(True, entity['key_desc'],
181 | 'Expected '
182 | f'{entity["expected"]}',
183 | f'Found {correct}')
184 | else:
185 | show_result(False, entity['key_desc'],
186 | 'Expected '
187 | f'{entity["expected"]}',
188 | f'Found {correct}')
189 | # disk images
190 | if eval_key['type'] == 'source_uri':
191 | for image in bp_dot[eval_key['key']]:
192 | if 'options' in image:
193 | image_dot = dotty(image)
194 | if 'resources' in image_dot['options']:
195 | if ('source_uri' in image_dot['options.resources']):
196 | if eval_key['match'] == 'contains':
197 | if eval_key['expected'] in image_dot['options.resources.source_uri']:
198 | show_result(True, eval_key['description'],
199 | f'Expected {eval_key["expected"]}',
200 | 'Found match')
201 | else:
202 | show_result(False, eval_key['description'],
203 | f'Expected {eval_key["expected"]}',
204 | 'Found match')
205 | else:
206 | if eval_key['expected'] == image_dot['options.resources.source_uri']:
207 | show_result(True, eval_key['description'],
208 | f'Expected {eval_key["expected"]}',
209 | 'Found match')
210 | else:
211 | show_result(False, eval_key['description'],
212 | f'Expected {eval_key["expected"]}',
213 | 'Found match')
214 | # aws ami and aws instance type
215 | elif eval_key['type'] == 'image_id' or eval_key['type'] == 'instance_type':
216 | for package in search_json(bp_json, eval_key['type']):
217 | correct = package[eval_key['type']]
218 | if correct in eval_key['expected']:
219 | show_result(True, eval_key['description'],
220 | 'Expected match in list',
221 | f'Found {correct}')
222 | else:
223 | show_result(False, eval_key['description'],
224 | 'Expected match in list',
225 | 'No matches found')
226 | # ahv vm names
227 | elif eval_key['type'] == 'ahv_server_names':
228 | for vm in search_json(bp_json, 'create_spec'):
229 | if 'name' in vm:
230 | if vm['type'].lower() == 'ahv_vm':
231 | vm_dot = dotty(vm)
232 | correct = vm_dot['create_spec.name'].lower()
233 | if correct in eval_key['expected']:
234 | show_result(True, eval_key['description'],
235 | 'Expected match in list',
236 | f'Found {correct}')
237 | else:
238 | show_result(False, eval_key['description'],
239 | 'Expected match in list',
240 | f'Found {correct}')
241 | # aws vm names
242 | elif eval_key['type'] == 'aws_server_names':
243 | for vm in search_json(bp_json, 'create_spec'):
244 | if 'name' in vm:
245 | if vm['type'].lower() == 'aws_vm':
246 | vm_dot = dotty(vm)
247 | correct = vm_dot['create_spec.name'].lower()
248 | if correct in eval_key['expected']:
249 | show_result(True, eval_key['description'],
250 | 'Expected match in list',
251 | f'Found {correct}')
252 | else:
253 | show_result(False, eval_key['description'],
254 | 'Expected match in list',
255 | f'Found {correct}')
256 | # web max replicas
257 | elif eval_key['type'] == 'web_max_replicas':
258 | for vm in search_json(bp_json, 'max_replicas'):
259 | vm_dot = dotty(vm)
260 | for each in eval_key['names']:
261 | if vm_dot['substrate_local_reference.name'].lower() == each:
262 | if int(vm_dot['max_replicas']) == eval_key['expected']:
263 | show_result(True, eval_key['description'],
264 | f'Expected {eval_key["expected"]}',
265 | f'Found {vm_dot["max_replicas"]}')
266 | else:
267 | show_result(False, eval_key['description'],
268 | f'Expected {eval_key["expected"]}',
269 | f'Found {vm_dot["max_replicas"]}')
270 | # web min replicas
271 | elif eval_key['type'] == 'web_min_replicas':
272 | for vm in search_json(bp_json, 'min_replicas'):
273 | vm_dot = dotty(vm)
274 | for each in eval_key['names']:
275 | if vm_dot['substrate_local_reference.name'].lower() == each:
276 | if int(vm_dot['min_replicas']) == eval_key['expected']:
277 | show_result(True, eval_key['description'],
278 | f'Expected {eval_key["expected"]}',
279 | f'Found {vm_dot["min_replicas"]}')
280 | else:
281 | show_result(False, eval_key['description'],
282 | f'Expected {eval_key["expected"]}',
283 | f'Found {vm_dot["min_replicas"]}')
284 | # cloud-init data status
285 | elif eval_key['type'] == "cloud_init":
286 | for vm in search_json(bp_json, "user_data"):
287 | vm_dot = dotty(vm)
288 | # aws vm
289 | if "instance_type" in vm_dot:
290 | if eval_key['expected']['aws_data'] in vm_dot['user_data']:
291 | show_result(True, f'AWS: {eval_key["description"]}',
292 | f'Expected {eval_key["expected"]["aws_data"].encode(encoding="UTF-8")}',
293 | 'Found match')
294 | else:
295 | show_result(False, f'AWS: {eval_key["description"]}',
296 | f'Expected {eval_key["expected"]["aws_data"].encode(encoding="UTF-8")}',
297 | 'Found no matching data')
298 | # ahv vm
299 | else:
300 | if eval_key['expected']['ahv_data'] in vm_dot['user_data']:
301 | show_result(True, f'AHV: {eval_key["description"]}',
302 | f'Expected {eval_key["expected"]["ahv_data"].encode(encoding="UTF-8")}',
303 | 'Found match')
304 | else:
305 | show_result(False, f'AHV: {eval_key["description"]}',
306 | f'Expected {eval_key["expected"]["ahv_data"].encode(encoding="UTF-8")}',
307 | 'Found no matching data')
308 | # credentials
309 | elif eval_key['type'] == 'credentials':
310 | for credential in bp_dot[eval_key['key']]:
311 | cred_type = credential['type'].lower()
312 | username = credential['username'].lower()
313 | if cred_type in eval_key['expected']['types'] and username in eval_key['expected']['usernames']:
314 | show_result(True, f'{eval_key["description"]}',
315 | 'Expected matches in list',
316 | f'Found credential of type {cred_type} and username as {username}')
317 | else:
318 | show_result(False, f'{eval_key["description"]}',
319 | 'Expected matches in list',
320 | f'Found credential of type {cred_type} and username as {username}')
321 | # course 2 vm sizing
322 | elif eval_key['type'] == 'c2_sizing':
323 | for substrate in bp_dot[eval_key['key']]:
324 | for size in eval_key['vm_sizes']:
325 | if substrate['create_spec']['name'].lower() == size['specs']['name'].lower():
326 | if substrate['create_spec']['resources']['num_vcpus_per_socket'] == size['specs']['vm_spec']['num_vcpus_per_socket']:
327 | show_result(True, f'{eval_key["description"]} - '
328 | + f'{size["specs"]["spec_desc"]} num vCPUS Per Socket',
329 | f'Expected {size["specs"]["vm_spec"]["num_vcpus_per_socket"]}',
330 | f'Found {substrate["create_spec"]["resources"]["num_vcpus_per_socket"]}')
331 | else:
332 | show_result(False, f'{eval_key["description"]} - '
333 | + f'{size["specs"]["spec_desc"]} num vCPUS Per Socket',
334 | f'Expected {size["specs"]["vm_spec"]["num_vcpus_per_socket"]}',
335 | f'Found {substrate["create_spec"]["resources"]["num_vcpus_per_socket"]}')
336 | if substrate['create_spec']['resources']['num_sockets'] == size['specs']['vm_spec']['num_sockets']:
337 | show_result(True, f'{eval_key["description"]} - '
338 | + f'{size["specs"]["spec_desc"]} Num Sockets',
339 | f'Expected {size["specs"]["vm_spec"]["num_sockets"]}',
340 | f'Found {substrate["create_spec"]["resources"]["num_sockets"]}')
341 | else:
342 | show_result(False, f'{eval_key["description"]} - '
343 | + f'{size["specs"]["spec_desc"]} Num Sockets',
344 | f'Expected {size["specs"]["vm_spec"]["num_sockets"]}',
345 | f'Found {substrate["create_spec"]["resources"]["num_sockets"]}')
346 | if substrate['create_spec']['resources']['memory_size_mib'] == size['specs']['vm_spec']['memory_size_mib']:
347 | show_result(True, f'{eval_key["description"]} - '
348 | + f'{size["specs"]["spec_desc"]} Memory MiB',
349 | f'Expected {size["specs"]["vm_spec"]["memory_size_mib"]}',
350 | f'Found {substrate["create_spec"]["resources"]["memory_size_mib"]}')
351 | else:
352 | show_result(False, f'{eval_key["description"]} - '
353 | + f'{size["specs"]["spec_desc"]} Memory MiB',
354 | f'Expected {size["specs"]["vm_spec"]["memory_size_mib"]}',
355 | f'Found {substrate["create_spec"]["resources"]["memory_size_mib"]}')
356 | # course 3 vm sizing and specs
357 | elif eval_key['type'] == 'c3_sizing':
358 | for size in eval_key['vm_sizes']:
359 |
360 | # vm count per profile
361 | vms = ([x for x in
362 | bp_dot[eval_key['key']]
363 | if size['size'] in x["name"].lower()
364 | and "num_sockets" in
365 | x["create_spec"]["resources"]])
366 | if len(vms) == size['count']:
367 | show_result(True, f'{eval_key["description"]} - {size["specs"]["spec_desc"]} ({size["size"]})',
368 | f'Expected {size["count"]}',
369 | f'Found {len(vms)}')
370 | else:
371 | show_result(False, f'{eval_key["description"]} - {size["specs"]["spec_desc"]} ({size["size"]})',
372 | f'Expected {size["count"]}',
373 | f'Found {len(vms)}')
374 |
375 | # credential verification/security standards
376 | cred_checks = ([x for x in
377 | bp_dot[eval_key['key']]
378 | if x["readiness_probe"]['login_credential_local_reference']['name'].lower() == size["specs"]["credential_name"]
379 | and size["specs"]["partial_name"] in x["create_spec"]["name"].lower()])
380 | if len(cred_checks) > 0:
381 | show_result(True, f'{size["specs"]["spec_desc"]} Security Standards',
382 | f'Expected credential named {size["specs"]["credential_name"]}',
383 | 'Found matches')
384 |
385 | '''
386 | for substrate in bp_dot[eval_key['key']]:
387 | if 'mysql' in substrate['name'].lower():
388 | if str(substrate['readiness_probe']['login_credential_local_reference']['name']).lower() == 'teccdba':
389 | show_result(True, f'{eval_key["description"]} (MySQL Credential Name)',
390 | f'Expected "teccdba"',
391 | f'Found "{substrate["readiness_probe"]["login_credential_local_reference"]["name"]}"')
392 | else:
393 | show_result(False, f'{eval_key["description"]} (MySQL Credential Name)',
394 | f'Expected "teccdba"',
395 | f'Found "{substrate["readiness_probe"]["login_credential_local_reference"]["name"]}"')
396 | elif 'web' in substrate['name'].lower():
397 | if str(substrate['readiness_probe']['login_credential_local_reference']['name']).lower() == 'teccadmin':
398 | show_result(True, f'{eval_key["description"]} (Web Server Credential Name)',
399 | f'Expected "teccadmin"',
400 | f'Found "{substrate["readiness_probe"]["login_credential_local_reference"]["name"]}"')
401 | else:
402 | show_result(False, f'{eval_key["description"]} (Web Server Credential Name)',
403 | f'Expected "teccadmin"',
404 | f'Found "{substrate["readiness_probe"]["login_credential_local_reference"]["name"]}"')
405 | elif 'haproxy' in substrate['name'].lower():
406 | if str(substrate['readiness_probe']['login_credential_local_reference']['name']).lower() == 'teccadmin':
407 | show_result(True, f'{eval_key["description"]} (Load Balancer Credential Name)',
408 | f'Expected "teccadmin"',
409 | f'Found "{substrate["readiness_probe"]["login_credential_local_reference"]["name"]}"')
410 | else:
411 | show_result(False, f'{eval_key["description"]} (Load Balancer Credential Name)',
412 | f'Expected "teccadmin"',
413 | f'Found "{substrate["readiness_probe"]["login_credential_local_reference"]["name"]}"')
414 | '''
415 | except Exception as e:
416 | print(e)
417 |
418 | '''
419 | ----------------------
420 | evaluation ends here
421 | ----------------------
422 | '''
423 |
424 | except KeyError as e:
425 | print(f'{messages.error} The {e} JSON key was not found'
426 | + 'in the specified Blueprint spec. Please check'
427 | + 'the key, then try again.')
428 | if environment_options.debug:
429 | print(f'{messages.error} Exception details: {e}')
430 | except json.decoder.JSONDecodeError as e:
431 | print(f'{messages.error} The {bp} JSON file could not be parsed. '
432 | + 'Is it a valid Nutanix Calm Blueprint?')
433 | if environment_options.debug:
434 | print(f'{messages.error} Exception details: {e}')
435 | else:
436 | print(f'{messages.error} Blueprint not found. Exiting.')
437 | sys.exit()
438 | print(f'{messages.info} Evaluation of {bp} completed. '
439 | + 'Please see results above.')
440 |
441 |
442 | def main():
443 | '''
444 | main entry point into the script
445 | '''
446 |
447 | # store the start time
448 | # used for duration stats later
449 | start_time = datetime.now()
450 |
451 | global environment_options
452 | global messages
453 | environment_options = EnvironmentOptions()
454 | environment_options.get_options()
455 | messages = Messages().prefixes
456 |
457 | if environment_options.debug:
458 | print(f'{environment_options}\n')
459 |
460 | print(f'{messages.info} Evaluation script started at {pretty_time()}.')
461 |
462 | print(f'{messages.info} Checking environment.')
463 | # check the environment, first
464 | environment_options.check_environment(messages)
465 | print(f'{messages.ok} Environment OK.')
466 |
467 | # verify the specified blueprint directory exists
468 | if os.path.exists(environment_options.directory):
469 | print(f'{messages.info} Blueprint directory found. Continuing.')
470 | else:
471 | print(f'{messages.error} Blueprint directory not found. Exiting.')
472 | sys.exit()
473 |
474 | # verify the specified criteria file exists
475 | if os.path.exists(f'{environment_options.criteria}'):
476 | print(f'{messages.info} Evaluation criteria file found. Continuing.')
477 | # validate the specified criteria as valid JSON
478 | try:
479 | with open(f'{environment_options.criteria}', 'r') as criteria_file:
480 | json.loads(criteria_file.read())
481 | print(f'{messages.info} Criteria file '
482 | + f'{environment_options.criteria} successfully parsed '
483 | + 'as JSON. Continuing.')
484 | criteria_file.close()
485 | except json.decoder.JSONDecodeError as e:
486 | print(f'{messages.error} The {environment_options.criteria} JSON '
487 | + 'file could not be parsed. Is it valid JSON?')
488 | if environment_options.debug:
489 | print(f'{messages.error} Exception details: {e}')
490 |
491 | else:
492 | print(f'{messages.error} Evaluation criteria file not found. '
493 | + 'Exiting.')
494 | sys.exit()
495 |
496 | # keep track of how many blueprints have been processed
497 | processed_ok = 0
498 |
499 | '''
500 | check to see if the user has indicated they want to parse all blueprints
501 | in the specified blueprint directory
502 | '''
503 | if environment_options.blueprint.lower() == 'all':
504 | print(f'{messages.info} All blueprints in '
505 | + f'{environment_options.directory} '
506 | + 'will be processed.')
507 | bp_list = glob.iglob(f'{environment_options.directory}/*.json')
508 | for bp in bp_list:
509 | process_json(bp)
510 | processed_ok += 1
511 | else:
512 | print(f'{messages.info} Only {environment_options.blueprint} in '
513 | + f'{environment_options.directory} will be processed.')
514 | process_json(f'{environment_options.directory}/'
515 | + f'{environment_options.blueprint}')
516 | processed_ok += 1
517 |
518 | # store the finish time
519 | finish_time = datetime.now()
520 |
521 | # calculate how long the script took to run
522 | duration = finish_time - start_time
523 |
524 | # clean up
525 | print(f'{messages.line}')
526 | print(f'{messages.info} Cleaning up.')
527 | print(f'{messages.info} Processed {processed_ok} blueprints.')
528 | print(f'{messages.info} Evaluation completed in '
529 | + f'{humanize.precisedelta(duration, minimum_unit="seconds")}.')
530 | print(f'{messages.info} Evaluation script finished at {pretty_time()}.\n')
531 |
532 |
533 | if __name__ == '__main__':
534 | main()
535 |
--------------------------------------------------------------------------------