├── infrastructure ├── modules │ ├── read-instances │ │ ├── variables.tf │ │ ├── output.tf │ │ └── read-instances.tf │ ├── EFS │ │ ├── output.tf │ │ ├── variables.tf │ │ └── efs.tf │ ├── VPC │ │ ├── variables.tf │ │ ├── output.tf │ │ └── vpc.tf │ ├── EC2-group │ │ ├── variables.tf │ │ └── ec2.tf │ └── EC2-spot-group │ │ ├── variables.tf │ │ └── spot.tf ├── internal_migration │ ├── variables.tf │ ├── security-group.tf │ └── main.tf ├── external_migration │ ├── variables.tf │ ├── security-group.tf │ └── main.tf ├── external_migration_on_spot │ ├── variables.tf │ ├── security-group.tf │ └── main.tf ├── CPU Feature Visualization - simplized aws group(all, exclude single-element groups).csv └── CPU Feature Visualization - minimized aws group(all).csv ├── .gitignore ├── ssh_scripts ├── criu_cpu_check │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── pku │ ├── func_tracking.yml │ ├── func-tracking-lazybinding.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── rdseed │ ├── func_tracking.yml │ ├── func-tracking-lazybinding.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── redis │ ├── func_tracking.yml │ ├── func-tracking-lazybinding.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── cpp_xgboost │ ├── func_tracking.yml │ ├── func-tracking-lazybinding.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── c_matrix_multiplication │ ├── func_tracking.yml │ ├── func-tracking-lazybinding.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── sha │ ├── func_tracking.yml │ ├── func-tracking-lazybinding.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── adox_adcx │ ├── func_tracking.yml │ ├── func-tracking-lazybinding.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── py_pku │ ├── func_tracking.yml │ ├── entire_scanning.yml │ ├── bytecode_tracking.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── py_rdseed │ ├── func_tracking.yml │ ├── bytecode_tracking.yml │ ├── entire_scanning.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── ubuntu_container │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── py_rsa │ ├── func_tracking.yml │ ├── entire_scanning.yml │ ├── bytecode_tracking.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── py_sha │ ├── func_tracking.yml │ ├── entire_scanning.yml │ ├── bytecode_tracking.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── py_matmul │ ├── func_tracking.yml │ ├── entire_scanning.yml │ ├── bytecode_tracking.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── py_xgboost │ ├── func_tracking.yml │ ├── entire_scanning.yml │ ├── bytecode_tracking.yml │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── matrix_multiplication │ ├── external-migration-dump.yml │ ├── external-migration-debug.yml │ ├── external-migration-restore.yml │ └── internal-migration.yml ├── rubin │ ├── external-migration-dump.yml │ └── external-migration-restore.yml └── xgboost │ ├── external-migration-dump.yml │ └── external-migration-restore.yml ├── data_processing_for_lscpu ├── entire │ ├── GroupByAWS.py │ ├── MinimizedAwsGroup(all).py │ └── CreateAllCpuFeature.py └── modules │ ├── GspreadUtils.py │ └── ReadCsv.py ├── README.md ├── InternalMigration.py ├── ExternalMigration(all of cases).py └── ExternalMigration(re-experiment).py /infrastructure/modules/read-instances/variables.tf: -------------------------------------------------------------------------------- 1 | variable "file_path" { 2 | type = string 3 | } -------------------------------------------------------------------------------- /infrastructure/modules/read-instances/output.tf: -------------------------------------------------------------------------------- 1 | output "instance_group" { 2 | value = local.group 3 | } -------------------------------------------------------------------------------- /infrastructure/modules/EFS/output.tf: -------------------------------------------------------------------------------- 1 | output "efs_dns_name" { 2 | value = aws_efs_file_system.efs.dns_name 3 | } 4 | -------------------------------------------------------------------------------- /infrastructure/modules/VPC/variables.tf: -------------------------------------------------------------------------------- 1 | variable "resource_prefix" { 2 | type = string 3 | } 4 | 5 | variable "availability_zone" { 6 | type = string 7 | } -------------------------------------------------------------------------------- /infrastructure/modules/VPC/output.tf: -------------------------------------------------------------------------------- 1 | output "vpc_id" { 2 | value = aws_vpc.vpc.id 3 | } 4 | 5 | output "public_subnet_id" { 6 | value = aws_subnet.public_subnet.id 7 | } -------------------------------------------------------------------------------- /infrastructure/modules/EFS/variables.tf: -------------------------------------------------------------------------------- 1 | variable "group_number" { 2 | type = number 3 | } 4 | 5 | variable "vpc_id" { 6 | type = string 7 | } 8 | 9 | variable "public_subnet_id" { 10 | type = string 11 | } 12 | 13 | variable "resource_prefix" { 14 | type = string 15 | } 16 | 17 | variable "security_group_id" { 18 | type = string 19 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | infrastructure/*/*.tfstate* 2 | infrastructure/*/.terraform 3 | infrastructure/*/.terraform.* 4 | terraform.log 5 | 6 | ssh_scripts/inventory* 7 | ansible.log 8 | 9 | *DS_Store* 10 | 11 | group*.log 12 | 13 | secure-outpost* 14 | 15 | .vscode/* 16 | 17 | __pycache__ 18 | 19 | data_processing_for_lscpu/experiment_failure_cases/* 20 | data_processing_for_lscpu/experiment_success_cases/* 21 | 22 | venv/* -------------------------------------------------------------------------------- /infrastructure/modules/read-instances/read-instances.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_providers { 3 | random = { 4 | source = "hashicorp/random" 5 | version = "3.4.3" 6 | } 7 | } 8 | } 9 | 10 | locals { 11 | group_list = [ 12 | for row in csvdecode(file(var.file_path)) : row["feature groups"] 13 | ] 14 | 15 | group = [ 16 | for group in local.group_list : 17 | split(", ", group) 18 | ] 19 | } -------------------------------------------------------------------------------- /infrastructure/modules/EFS/efs.tf: -------------------------------------------------------------------------------- 1 | resource "aws_efs_file_system" "efs" { 2 | creation_token = "${var.resource_prefix}_efs_${var.group_number}" 3 | performance_mode = "generalPurpose" 4 | encrypted = true 5 | 6 | tags = { 7 | Name = "${var.resource_prefix}_efs_${var.group_number}" 8 | } 9 | } 10 | 11 | resource "aws_efs_mount_target" "mount_target" { 12 | file_system_id = aws_efs_file_system.efs.id 13 | subnet_id = var.public_subnet_id 14 | security_groups = [var.security_group_id] 15 | } 16 | -------------------------------------------------------------------------------- /infrastructure/internal_migration/variables.tf: -------------------------------------------------------------------------------- 1 | variable "region" { 2 | type = string 3 | default = "us-west-2" 4 | } 5 | 6 | variable "resource_prefix" { 7 | type = string 8 | default = "migration" 9 | } 10 | 11 | variable "availability_zone" { 12 | type = string 13 | default = "us-west-2a" 14 | } 15 | 16 | variable "ami_id" { 17 | type = string 18 | default = "ami-0a5f407c50b1500a5" 19 | } 20 | 21 | variable "key_name" { 22 | type = string 23 | default = "junho_us" 24 | } 25 | 26 | variable "user" { 27 | type = string 28 | default = "ec2-user" 29 | } -------------------------------------------------------------------------------- /infrastructure/modules/EC2-group/variables.tf: -------------------------------------------------------------------------------- 1 | variable "group_number" { 2 | type = number 3 | } 4 | 5 | variable "ami_id" { 6 | type = string 7 | } 8 | 9 | variable "key_name" { 10 | type = string 11 | } 12 | 13 | variable "availability_zone" { 14 | type = string 15 | } 16 | 17 | variable "instance_group" { 18 | type = list 19 | } 20 | 21 | variable "public_subnet_id" { 22 | type = string 23 | } 24 | 25 | variable "security_group_id" { 26 | type = string 27 | } 28 | 29 | variable "efs_dns_name" { 30 | type = string 31 | } 32 | 33 | variable "user" { 34 | type = string 35 | } -------------------------------------------------------------------------------- /infrastructure/external_migration/variables.tf: -------------------------------------------------------------------------------- 1 | variable "region" { 2 | type = string 3 | default = "us-west-2" 4 | } 5 | 6 | variable "resource_prefix" { 7 | type = string 8 | default = "migration" 9 | } 10 | 11 | variable "availability_zone" { 12 | type = string 13 | default = "us-west-2c" 14 | } 15 | 16 | variable "ami_id" { 17 | type = string 18 | default = "ami-0af183276504147b7" 19 | } 20 | 21 | variable "key_name" { 22 | type = string 23 | default = "junho_us" 24 | } 25 | 26 | variable "group" { 27 | type = list 28 | } 29 | 30 | variable "user" { 31 | type = string 32 | default = "ubuntu" 33 | } -------------------------------------------------------------------------------- /infrastructure/external_migration_on_spot/variables.tf: -------------------------------------------------------------------------------- 1 | variable "region" { 2 | type = string 3 | default = "us-west-2" 4 | } 5 | 6 | variable "resource_prefix" { 7 | type = string 8 | default = "migration" 9 | } 10 | 11 | variable "availability_zone" { 12 | type = string 13 | default = "us-west-2c" 14 | } 15 | 16 | variable "ami_id" { 17 | type = string 18 | default = "ami-047ea3593e878f22b" 19 | } 20 | 21 | variable "key_name" { 22 | type = string 23 | default = "junho_us" 24 | } 25 | 26 | variable "group" { 27 | type = list 28 | } 29 | 30 | variable "user" { 31 | type = string 32 | default = "ubuntu" 33 | } -------------------------------------------------------------------------------- /infrastructure/modules/EC2-spot-group/variables.tf: -------------------------------------------------------------------------------- 1 | variable "group_number" { 2 | type = number 3 | } 4 | 5 | variable "ami_id" { 6 | type = string 7 | } 8 | 9 | variable "key_name" { 10 | type = string 11 | } 12 | 13 | variable "availability_zone" { 14 | type = string 15 | } 16 | 17 | variable "instance_group" { 18 | type = list(string) 19 | } 20 | 21 | variable "public_subnet_id" { 22 | type = string 23 | } 24 | 25 | variable "security_group_id" { 26 | type = string 27 | } 28 | 29 | variable "efs_dns_name" { 30 | type = string 31 | } 32 | 33 | variable "user" { 34 | type = string 35 | } 36 | 37 | variable "spot_price" { 38 | type = string 39 | description = "The maximum price to request for the spot instance" 40 | } 41 | -------------------------------------------------------------------------------- /ssh_scripts/criu_cpu_check/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: cpuinfo dump 16 | shell: | 17 | sudo criu cpuinfo dump -D /home/ubuntu/migration_test/dump/{{ index }} 18 | 19 | - name: logging - write the src instance type 20 | shell: | 21 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/cpuinfo.log -------------------------------------------------------------------------------- /infrastructure/external_migration/security-group.tf: -------------------------------------------------------------------------------- 1 | resource "aws_security_group" "efs_security_group" { 2 | name_prefix = "${var.resource_prefix}_efs_sg" 3 | 4 | vpc_id = module.vpc.vpc_id 5 | 6 | ingress { 7 | from_port = 2049 8 | to_port = 2049 9 | protocol = "tcp" 10 | cidr_blocks = ["0.0.0.0/0"] 11 | } 12 | } 13 | 14 | resource "aws_security_group" "ec2_security_group" { 15 | name_prefix = "${var.resource_prefix}_ec2_sg" 16 | 17 | vpc_id = module.vpc.vpc_id 18 | 19 | ingress { 20 | from_port = 22 21 | to_port = 22 22 | protocol = "tcp" 23 | cidr_blocks = ["0.0.0.0/0"] 24 | } 25 | 26 | ingress { 27 | from_port = 8888 28 | to_port = 8888 29 | protocol = "tcp" 30 | cidr_blocks = ["0.0.0.0/0"] 31 | } 32 | 33 | egress { 34 | from_port = 0 35 | to_port = 0 36 | protocol = -1 37 | cidr_blocks = ["0.0.0.0/0"] 38 | } 39 | 40 | tags = { 41 | Name = "${var.resource_prefix}_Security_group" 42 | } 43 | } -------------------------------------------------------------------------------- /infrastructure/internal_migration/security-group.tf: -------------------------------------------------------------------------------- 1 | resource "aws_security_group" "efs_security_group" { 2 | name_prefix = "${var.resource_prefix}_efs_sg" 3 | 4 | vpc_id = module.vpc.vpc_id 5 | 6 | ingress { 7 | from_port = 2049 8 | to_port = 2049 9 | protocol = "tcp" 10 | cidr_blocks = ["0.0.0.0/0"] 11 | } 12 | } 13 | 14 | resource "aws_security_group" "ec2_security_group" { 15 | name_prefix = "${var.resource_prefix}_ec2_sg" 16 | 17 | vpc_id = module.vpc.vpc_id 18 | 19 | ingress { 20 | from_port = 22 21 | to_port = 22 22 | protocol = "tcp" 23 | cidr_blocks = ["0.0.0.0/0"] 24 | } 25 | 26 | ingress { 27 | from_port = 8888 28 | to_port = 8888 29 | protocol = "tcp" 30 | cidr_blocks = ["0.0.0.0/0"] 31 | } 32 | 33 | egress { 34 | from_port = 0 35 | to_port = 0 36 | protocol = -1 37 | cidr_blocks = ["0.0.0.0/0"] 38 | } 39 | 40 | tags = { 41 | Name = "${var.resource_prefix}_Security_group" 42 | } 43 | } -------------------------------------------------------------------------------- /infrastructure/external_migration_on_spot/security-group.tf: -------------------------------------------------------------------------------- 1 | resource "aws_security_group" "efs_security_group" { 2 | name_prefix = "${var.resource_prefix}_efs_sg" 3 | 4 | vpc_id = module.vpc.vpc_id 5 | 6 | ingress { 7 | from_port = 2049 8 | to_port = 2049 9 | protocol = "tcp" 10 | cidr_blocks = ["0.0.0.0/0"] 11 | } 12 | } 13 | 14 | resource "aws_security_group" "ec2_security_group" { 15 | name_prefix = "${var.resource_prefix}_ec2_sg" 16 | 17 | vpc_id = module.vpc.vpc_id 18 | 19 | ingress { 20 | from_port = 22 21 | to_port = 22 22 | protocol = "tcp" 23 | cidr_blocks = ["0.0.0.0/0"] 24 | } 25 | 26 | ingress { 27 | from_port = 8888 28 | to_port = 8888 29 | protocol = "tcp" 30 | cidr_blocks = ["0.0.0.0/0"] 31 | } 32 | 33 | egress { 34 | from_port = 0 35 | to_port = 0 36 | protocol = -1 37 | cidr_blocks = ["0.0.0.0/0"] 38 | } 39 | 40 | tags = { 41 | Name = "${var.resource_prefix}_Security_group" 42 | } 43 | } -------------------------------------------------------------------------------- /ssh_scripts/pku/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup /home/ubuntu/migration_test/pku_scripts/pku > /dev/null 2>&1 & 10 | 11 | - name: Get workload PID 12 | shell: pgrep -f pku 13 | register: PID 14 | 15 | - name: func tracking 16 | shell: | 17 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func_tracking/pku/ 26 | -------------------------------------------------------------------------------- /ssh_scripts/rdseed/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup /home/ubuntu/migration_test/rdseed_scripts/rand > /dev/null 2>&1 & 10 | 11 | - name: Get workload PID 12 | shell: pgrep -f rand 13 | register: PID 14 | 15 | - name: func tracking 16 | shell: | 17 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func_tracking/rdseed/ 26 | -------------------------------------------------------------------------------- /ssh_scripts/redis/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup /usr/bin/redis-server --port 7777 > /dev/null 2>&1 & 10 | 11 | - name: Get workload PID 12 | shell: pgrep -u ubuntu redis-server 13 | register: PID 14 | 15 | - name: func tracking 16 | shell: | 17 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func_tracking/redis/ 26 | -------------------------------------------------------------------------------- /ssh_scripts/cpp_xgboost/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup /home/ubuntu/migration_test/xgboost_scripts/mnist > /dev/null 2>&1 & 10 | 11 | - name: Get workload PID 12 | shell: pgrep -f mnist 13 | register: PID 14 | 15 | - name: func tracking 16 | shell: | 17 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func_tracking/cpp_xgboost/ 26 | -------------------------------------------------------------------------------- /ssh_scripts/c_matrix_multiplication/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup /home/ubuntu/migration_test/matrix_script/matrix_multiplication > /dev/null 2>&1 & 10 | 11 | - name: Get workload PID 12 | shell: pgrep -f matrix_multiplication 13 | register: PID 14 | 15 | - name: func tracking 16 | shell: | 17 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func_tracking/c_matrix_multiplication/ 26 | -------------------------------------------------------------------------------- /ssh_scripts/sha/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup /home/ubuntu/migration_test/sha_scripts/sha > /dev/null 2>&1 & 11 | 12 | - name: Get workload PID 13 | shell: pidof sha 14 | register: PID 15 | 16 | - name: func tracking 17 | shell: | 18 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - func tracking result 25 | shell: | 26 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func_tracking/sha/ 27 | -------------------------------------------------------------------------------- /ssh_scripts/adox_adcx/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup /home/ubuntu/migration_test/adx_scripts/rsa > /dev/null 2>&1 & 11 | 12 | - name: Get workload PID 13 | shell: pgrep -f rsa 14 | register: PID 15 | 16 | - name: func tracking 17 | shell: | 18 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - func tracking result 25 | shell: | 26 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func_tracking/adox_adcx/ 27 | -------------------------------------------------------------------------------- /ssh_scripts/pku/func-tracking-lazybinding.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: init 7 | shell: | 8 | sudo rm -rf /home/ubuntu/migration_test/ins_disas/log/*.csv 9 | ignore_errors: yes 10 | 11 | - name: Start workload 12 | shell: | 13 | nohup /home/ubuntu/migration_test/pku_scripts/pku > /dev/null 2>&1 & 14 | 15 | - name: Get workload PID 16 | shell: pgrep -f pku 17 | register: PID 18 | 19 | - name: func tracking 20 | shell: | 21 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 22 | 23 | - name: renaming log 24 | shell: | 25 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 26 | 27 | - name: Upload to S3 - func tracking result 28 | shell: | 29 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func-tracking-lazybinding/pku/ 30 | -------------------------------------------------------------------------------- /ssh_scripts/redis/func-tracking-lazybinding.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: init 7 | shell: | 8 | sudo rm -rf /home/ubuntu/migration_test/ins_disas/log/*.csv 9 | ignore_errors: yes 10 | 11 | - name: Start workload 12 | shell: | 13 | nohup /usr/bin/redis-server --port 7777 > /dev/null 2>&1 & 14 | 15 | - name: Get workload PID 16 | shell: pgrep -u ubuntu redis-server 17 | register: PID 18 | 19 | - name: func tracking 20 | shell: | 21 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 22 | 23 | - name: renaming log 24 | shell: | 25 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 26 | 27 | - name: Upload to S3 - func tracking result 28 | shell: | 29 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func-tracking-lazybinding/redis/ 30 | -------------------------------------------------------------------------------- /ssh_scripts/rdseed/func-tracking-lazybinding.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: init 7 | shell: | 8 | sudo rm -rf /home/ubuntu/migration_test/ins_disas/log/*.csv 9 | ignore_errors: yes 10 | 11 | - name: Start workload 12 | shell: | 13 | nohup /home/ubuntu/migration_test/rdseed_scripts/rand > /dev/null 2>&1 & 14 | 15 | - name: Get workload PID 16 | shell: pgrep -f rand 17 | register: PID 18 | 19 | - name: func tracking 20 | shell: | 21 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 22 | 23 | - name: renaming log 24 | shell: | 25 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 26 | 27 | - name: Upload to S3 - func tracking result 28 | shell: | 29 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func-tracking-lazybinding/rdseed/ 30 | -------------------------------------------------------------------------------- /ssh_scripts/cpp_xgboost/func-tracking-lazybinding.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: init 7 | shell: | 8 | sudo rm -rf /home/ubuntu/migration_test/ins_disas/log/*.csv 9 | ignore_errors: yes 10 | 11 | - name: Start workload 12 | shell: | 13 | nohup /home/ubuntu/migration_test/xgboost_scripts/mnist > /dev/null 2>&1 & 14 | 15 | - name: Get workload PID 16 | shell: pgrep -f mnist 17 | register: PID 18 | 19 | - name: func tracking 20 | shell: | 21 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 22 | 23 | - name: renaming log 24 | shell: | 25 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 26 | 27 | - name: Upload to S3 - func tracking result 28 | shell: | 29 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func-tracking-lazybinding/cpp_xgboost/ 30 | -------------------------------------------------------------------------------- /infrastructure/modules/VPC/vpc.tf: -------------------------------------------------------------------------------- 1 | resource "aws_vpc" "vpc" { 2 | 3 | cidr_block = "172.31.0.0/16" 4 | enable_dns_hostnames = true 5 | 6 | tags = { 7 | Name = "${var.resource_prefix}_VPC" 8 | } 9 | } 10 | 11 | resource "aws_subnet" "public_subnet" { 12 | vpc_id = aws_vpc.vpc.id 13 | 14 | availability_zone = var.availability_zone 15 | cidr_block = "172.31.1.0/24" 16 | enable_resource_name_dns_a_record_on_launch = true 17 | map_public_ip_on_launch = true 18 | 19 | tags = { 20 | Name = "${var.resource_prefix}_Subnet" 21 | } 22 | } 23 | 24 | resource "aws_internet_gateway" "igw" { 25 | vpc_id = aws_vpc.vpc.id 26 | 27 | tags = { 28 | Name = "${var.resource_prefix}_Igw" 29 | } 30 | } 31 | 32 | resource "aws_route_table" "route_table" { 33 | vpc_id = aws_vpc.vpc.id 34 | 35 | route { 36 | cidr_block = "0.0.0.0/0" 37 | gateway_id = aws_internet_gateway.igw.id 38 | } 39 | 40 | tags = { 41 | Name = "${var.resource_prefix}_Route_table" 42 | } 43 | } 44 | 45 | resource "aws_route_table_association" "route_table_association" { 46 | subnet_id = aws_subnet.public_subnet.id 47 | route_table_id = aws_route_table.route_table.id 48 | } -------------------------------------------------------------------------------- /ssh_scripts/c_matrix_multiplication/func-tracking-lazybinding.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: init 7 | shell: | 8 | sudo rm -rf /home/ubuntu/migration_test/ins_disas/log/*.csv 9 | ignore_errors: yes 10 | 11 | - name: Start workload 12 | shell: | 13 | nohup /home/ubuntu/migration_test/matrix_script/matrix_multiplication > /dev/null 2>&1 & 14 | 15 | - name: Get workload PID 16 | shell: pgrep -f matrix_multiplication 17 | register: PID 18 | 19 | - name: func tracking 20 | shell: | 21 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 22 | 23 | - name: renaming log 24 | shell: | 25 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 26 | 27 | - name: Upload to S3 - func tracking result 28 | shell: | 29 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func-tracking-lazybinding/c_matrix_multiplication/ 30 | -------------------------------------------------------------------------------- /data_processing_for_lscpu/entire/GroupByAWS.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | import copy 4 | 5 | import sys 6 | from pathlib import Path 7 | 8 | # module 경로 추가 9 | sys.path.append(str(Path(__file__).resolve().parent.joinpath('..', 'modules'))) 10 | 11 | import CPUFeatures_h 12 | import GspreadUtils 13 | 14 | CPU_FEATURES = CPUFeatures_h.all_CPU_features_simplification_by_lscpu() 15 | 16 | df = GspreadUtils.read_CPU_Feature_Visualization('all features') 17 | df = df.loc[df['CloudProvider'] == 'AWS'] 18 | 19 | # Extract instance types with the same CPU features 20 | columns = copy.deepcopy(CPU_FEATURES) 21 | columns.insert(0, 'feature groups') 22 | 23 | groupList = [] 24 | flagList = [] 25 | grouped = df.groupby(CPU_FEATURES) 26 | i = 0 27 | 28 | df_new = pd.DataFrame(columns=columns) 29 | 30 | for features, group in grouped: 31 | i += 1 32 | instanceTypes = ', '.join(group['InstanceType'].tolist()) 33 | 34 | eachFlag = group[CPU_FEATURES] 35 | row = eachFlag.iloc[0] 36 | row = row.to_frame().T 37 | row.insert(0, 'feature groups', instanceTypes) 38 | 39 | df_new = pd.concat([df_new, row], ignore_index=True) 40 | 41 | GspreadUtils.write_CPU_Feature_Visualization('groupby aws(all)', df_new) -------------------------------------------------------------------------------- /ssh_scripts/sha/func-tracking-lazybinding.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: init 7 | shell: | 8 | sudo rm -rf /home/ubuntu/migration_test/ins_disas/log/*.csv 9 | ignore_errors: yes 10 | 11 | - name: Start workload 12 | shell: | 13 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 14 | nohup /home/ubuntu/migration_test/sha_scripts/sha > /dev/null 2>&1 & 15 | 16 | - name: Get workload PID 17 | shell: pidof sha 18 | register: PID 19 | 20 | - name: func tracking 21 | shell: | 22 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 23 | 24 | - name: renaming log 25 | shell: | 26 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 27 | 28 | - name: Upload to S3 - func tracking result 29 | shell: | 30 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func-tracking-lazybinding/sha/ 31 | -------------------------------------------------------------------------------- /ssh_scripts/adox_adcx/func-tracking-lazybinding.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: init 7 | shell: | 8 | sudo rm -rf /home/ubuntu/migration_test/ins_disas/log/*.csv 9 | ignore_errors: yes 10 | 11 | - name: Start workload 12 | shell: | 13 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 14 | nohup /home/ubuntu/migration_test/adx_scripts/rsa > /dev/null 2>&1 & 15 | 16 | - name: Get workload PID 17 | shell: pgrep -f rsa 18 | register: PID 19 | 20 | - name: func tracking 21 | shell: | 22 | sudo /home/ubuntu/migration_test/ins_disas/measure_overhead_func_tracking.sh {{ PID.stdout_lines[0] }} 23 | 24 | - name: renaming log 25 | shell: | 26 | mv /home/ubuntu/migration_test/ins_disas/log/isa_set.csv /home/ubuntu/migration_test/ins_disas/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 27 | 28 | - name: Upload to S3 - func tracking result 29 | shell: | 30 | aws s3 cp /home/ubuntu/migration_test/ins_disas/log/*.csv s3://migration-compatibility/func-tracking-lazybinding/adox_adcx/ 31 | -------------------------------------------------------------------------------- /data_processing_for_lscpu/entire/MinimizedAwsGroup(all).py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | import sys 4 | from pathlib import Path 5 | 6 | # module 경로 추가 7 | sys.path.append(str(Path(__file__).resolve().parent.joinpath('..', 'modules'))) 8 | 9 | import GspreadUtils 10 | 11 | df = GspreadUtils.read_CPU_Feature_Visualization('simplized aws group(all)') 12 | df = df['feature groups'] 13 | 14 | # df to list 15 | groups = [] 16 | 17 | for i in range(len(df)): 18 | groups.append(df.iloc[i].split(', ')) 19 | 20 | prices = GspreadUtils.read_CPU_Feature_Visualization('ec2 price(us-west-2, 23.05.24)') 21 | prices = prices[['Instance', 'Linux On Demand cost']] 22 | 23 | newGroups = [] 24 | totalPrice = 0 25 | 26 | for group in groups: 27 | tempPrice = 0 28 | tempInstance = '' 29 | for instance in group: 30 | # 인스턴스 가격 조회 31 | price = prices.loc[prices['Instance'] == instance, 'Linux On Demand cost'].values.item() 32 | price = float(re.findall(r'\d+\.\d+', price)[0]) 33 | 34 | if tempPrice == 0 or price < tempPrice: 35 | tempPrice = price 36 | tempInstance = instance 37 | newGroups.append(tempInstance) 38 | totalPrice += tempPrice 39 | 40 | print(f"totalPrice : {totalPrice} USD/hour") 41 | 42 | # update gspread 43 | df = GspreadUtils.read_CPU_Feature_Visualization('minimized aws group(all)') 44 | df['feature groups'] = newGroups 45 | GspreadUtils.write_CPU_Feature_Visualization('minimized aws group(all)', df) -------------------------------------------------------------------------------- /ssh_scripts/py_pku/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: function tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/pku/ && python3 pku.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 pku.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: func tracking 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} c 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/func_tracking/pypku/ 26 | 27 | - name: Cleanup 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo kill -9 {{ PID.stdout_lines[0] }} 31 | ignore_errors: yes 32 | -------------------------------------------------------------------------------- /ssh_scripts/py_rdseed/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: function tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rand/ && python3 rand.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 rand.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: func tracking 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} c 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/func_tracking/pyrdseed/ 26 | 27 | - name: Cleanup 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo kill -9 {{ PID.stdout_lines[0] }} 31 | ignore_errors: yes 32 | -------------------------------------------------------------------------------- /infrastructure/external_migration/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "default" 3 | region = var.region 4 | } 5 | 6 | module "read-instances" { 7 | source = "../modules/read-instances" 8 | file_path = "../CPU Feature Visualization - minimized aws group(all).csv" 9 | } 10 | 11 | module "vpc" { 12 | source = "../modules/VPC" 13 | resource_prefix = var.resource_prefix 14 | availability_zone = var.availability_zone 15 | } 16 | 17 | module "efs" { 18 | count = 1 19 | source = "../modules/EFS" 20 | resource_prefix = var.resource_prefix 21 | group_number = count.index 22 | vpc_id = module.vpc.vpc_id 23 | public_subnet_id = module.vpc.public_subnet_id 24 | security_group_id = aws_security_group.efs_security_group.id 25 | } 26 | 27 | 28 | module "ec2" { 29 | count = length(var.group) 30 | source = "../modules/EC2-group" 31 | group_number = var.group[count.index] 32 | instance_group = module.read-instances.instance_group[var.group[count.index]] 33 | ami_id = var.ami_id 34 | key_name = var.key_name 35 | availability_zone = var.availability_zone 36 | public_subnet_id = module.vpc.public_subnet_id 37 | security_group_id = aws_security_group.ec2_security_group.id 38 | efs_dns_name = module.efs[0].efs_dns_name 39 | user = var.user 40 | 41 | depends_on = [ 42 | module.read-instances, 43 | module.efs 44 | ] 45 | } 46 | -------------------------------------------------------------------------------- /infrastructure/internal_migration/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "default" 3 | region = var.region 4 | } 5 | 6 | module "read-instances" { 7 | source = "../modules/read-instances" 8 | file_path = "../CPU Feature Visualization - simplized aws group(all, exclude single-element groups).csv" 9 | } 10 | 11 | module "vpc" { 12 | source = "../modules/VPC" 13 | resource_prefix = var.resource_prefix 14 | availability_zone = var.availability_zone 15 | } 16 | 17 | module "efs" { 18 | count = 18 19 | source = "../modules/EFS" 20 | resource_prefix = var.resource_prefix 21 | group_number = count.index 22 | vpc_id = module.vpc.vpc_id 23 | public_subnet_id = module.vpc.public_subnet_id 24 | security_group_id = aws_security_group.efs_security_group.id 25 | } 26 | 27 | 28 | module "ec2" { 29 | count = 18 30 | source = "../modules/EC2-group" 31 | group_number = count.index 32 | instance_group = module.read-instances.instance_group[count.index] 33 | ami_id = var.ami_id 34 | key_name = var.key_name 35 | availability_zone = var.availability_zone 36 | public_subnet_id = module.vpc.public_subnet_id 37 | security_group_id = aws_security_group.ec2_security_group.id 38 | efs_dns_name = module.efs[count.index].efs_dns_name 39 | user = var.user 40 | 41 | depends_on = [ 42 | module.read-instances, 43 | module.efs 44 | ] 45 | } 46 | -------------------------------------------------------------------------------- /infrastructure/external_migration_on_spot/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "default" 3 | region = var.region 4 | } 5 | 6 | module "read-instances" { 7 | source = "../modules/read-instances" 8 | file_path = "../CPU Feature Visualization - minimized aws group(all).csv" 9 | } 10 | 11 | module "vpc" { 12 | source = "../modules/VPC" 13 | resource_prefix = var.resource_prefix 14 | availability_zone = var.availability_zone 15 | } 16 | 17 | module "efs" { 18 | count = 1 19 | source = "../modules/EFS" 20 | resource_prefix = var.resource_prefix 21 | group_number = count.index 22 | vpc_id = module.vpc.vpc_id 23 | public_subnet_id = module.vpc.public_subnet_id 24 | security_group_id = aws_security_group.efs_security_group.id 25 | } 26 | 27 | module "spot_instance" { 28 | count = length(var.group) 29 | source = "../modules/EC2-spot-group" 30 | group_number = var.group[count.index] 31 | spot_price = "" 32 | instance_group = module.read-instances.instance_group[var.group[count.index]] 33 | ami_id = var.ami_id 34 | key_name = var.key_name 35 | availability_zone = var.availability_zone 36 | public_subnet_id = module.vpc.public_subnet_id 37 | security_group_id = aws_security_group.ec2_security_group.id 38 | efs_dns_name = module.efs[0].efs_dns_name 39 | user = var.user 40 | 41 | depends_on = [ 42 | module.read-instances, 43 | module.efs 44 | ] 45 | } -------------------------------------------------------------------------------- /ssh_scripts/ubuntu_container/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize contaier 11 | shell: /home/ec2-user/migration_test/podman_init.sh -a 12 | become: true 13 | ignore_errors: yes 14 | 15 | - name: Initialize source instance 16 | shell: | 17 | mkdir /home/ec2-user/migration_test/dump/{{ index }} 18 | 19 | - name: logging - write the src instance type 20 | shell: | 21 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ec2-user/migration_test/dump/{{ index }}/performance.log 22 | 23 | - name: Create container 24 | shell: /home/ec2-user/migration_test/create_container.sh -u 25 | become: true 26 | 27 | - name: wait for 5 sec 28 | wait_for: 29 | timeout: 5 30 | register: result 31 | 32 | - name: logging - write the checkpoint start time 33 | shell: | 34 | echo start checkpoint : $(date) >> /home/ec2-user/migration_test/dump/{{ index }}/timestamp.log 35 | 36 | - name: Container checkpoint 37 | shell: /home/ec2-user/migration_test/checkpoint.sh -n {{ index }} 38 | become: true 39 | 40 | - name: logging - write the checkpoint end time 41 | shell: | 42 | echo end checkpoint : $(date) >> /home/ec2-user/migration_test/dump/{{ index }}/timestamp.log -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Migration Experiment Automation 2 | * Process Migration Experiment Automation 3 | * Automatic Detection of Process Correctness and Debugging for Abnormal Termination After Migration 4 | 5 | ### 1. Experiment Set Configuration 6 | 7 | 1. /data_processing_for_lscpu/entire/CreateAllCpuFeature.py 8 | * Removes CPU features that are either universally present or absent across all instances. 9 | 2. /data_processing_for_lscpu/entire/GroupByAWS.py 10 | * Groups instances with identical CPU features. 11 | 3. /data_processing_for_lscpu/entire/SimplizedAwsGroup(all).py 12 | * Excludes instances that are excessively large or small. 13 | 4. /data_processing_for_lscpu/entire/MinimizedAwsGroup(all).py 14 | * Selects the most cost-effective instances within each group. 15 | 16 | ### 2. Install LiveMigrate-Detector 17 | 18 | Clone LiveMigrate-Detector into the experiment environment. 19 | 20 | git clone https://github.com/ddps-lab/LiveMigrate-Detector.git 21 | 22 | ### 3. Infrastructure Configuration 23 | 24 | 1. Update /infrastructure/*/variables.tf 25 | * Set region, key, AMI ID, and other parameters. 26 | 27 | ### 4. Experiment Execution 28 | 29 | 1. /ExternalMigration(all of cases).py 30 | * Performs migration experiments across instance groups. 31 | 2. /ExternalMigration(re-experiment).py 32 | * Automatically detects and re-executes missing experiment cases, such as failed instance creation. 33 | 3. /InternalMigration.py 34 | * Conducts migration experiments between instances within the same group with identical CPU features. -------------------------------------------------------------------------------- /ssh_scripts/py_rsa/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: function tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rsa/ && python3 rsa.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 rsa.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: func tracking 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} c 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - func tracking result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/func_tracking/pyrsa/ 27 | 28 | - name: Cleanup 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes 33 | -------------------------------------------------------------------------------- /ssh_scripts/py_sha/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: function tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/sha/ && python3 sha.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 sha.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: func tracking 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} c 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - func tracking result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/func_tracking/pysha/ 27 | 28 | - name: Cleanup 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes 33 | -------------------------------------------------------------------------------- /ssh_scripts/criu_cpu_check/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.img 11 | ignore_errors: yes 12 | 13 | - name: Copy the log from the dump directory 14 | shell: | 15 | cp /home/ubuntu/migration_test/dump/{{ src }}/* /home/ubuntu/migration_test 16 | 17 | - name: logging - write the dst instance type 18 | shell: | 19 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 20 | echo "dst : $INSTANCE_TYPE" >> /home/ubuntu/migration_test/cpuinfo.log 21 | 22 | - name: cpuinfo check 23 | shell: | 24 | python3 /home/ubuntu/migration_test/criu_cpuinfo_check/cpuinfo_check.py 25 | 26 | - name: Log to CSV 27 | shell: | 28 | python3 /home/ubuntu/migration_test/criu_cpuinfo_check/log_to_csv.py 29 | mv /home/ubuntu/migration_test/cpuinfo.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/cpuinfo.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/cpuinfo.log).csv" 30 | 31 | - name: Upload to S3 - cpuinfo check result 32 | shell: | 33 | aws s3 cp /home/ubuntu/migration_test/*.csv s3://migration-compatibility/criu-cpuinfo-check/ 34 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_pku/entire_scanning.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: entire scanning 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/pku/ && python3 pku.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 pku.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: entire scanning 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/text_segment_full_scan.sh {{ PID.stdout_lines[0] }} 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - entire scanning result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/entire-scanning/pypku/ 26 | 27 | - name: clean-up process 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.txt; \ 31 | kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes -------------------------------------------------------------------------------- /infrastructure/modules/EC2-group/ec2.tf: -------------------------------------------------------------------------------- 1 | resource "aws_instance" "ec2" { 2 | count = length(var.instance_group) 3 | instance_type = var.instance_group[count.index] 4 | ami = var.ami_id 5 | key_name = var.key_name 6 | availability_zone = var.availability_zone 7 | subnet_id = var.public_subnet_id 8 | 9 | vpc_security_group_ids = [ 10 | var.security_group_id 11 | ] 12 | 13 | tags = { 14 | "Name" = "migration-test_${var.instance_group[count.index]}" 15 | } 16 | 17 | user_data = <<-EOF 18 | #!/bin/bash 19 | sleep 60 20 | mount -t nfs -o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport ${var.efs_dns_name}:/ /home/${var.user}/migration_test/dump 21 | sudo chown ${var.user}:${var.user} /home/${var.user}/migration_test/dump 22 | sudo timedatectl set-timezone 'Asia/Seoul' 23 | sudo hostnamectl set-hostname ${var.instance_group[count.index]} 24 | EOF 25 | } 26 | 27 | 28 | resource "null_resource" "init_inventory" { 29 | depends_on = [ 30 | aws_instance.ec2 31 | ] 32 | 33 | provisioner "local-exec" { 34 | command = "rm ../../ssh_scripts/inventory_${var.group_number}.txt || true" 35 | } 36 | } 37 | 38 | resource "null_resource" "write_inventory" { 39 | count = length(var.instance_group) 40 | depends_on = [ 41 | null_resource.init_inventory 42 | ] 43 | 44 | provisioner "local-exec" { 45 | command = "echo '${aws_instance.ec2[count.index].public_ip}' >> ../../ssh_scripts/inventory_${var.group_number}.txt" 46 | } 47 | } -------------------------------------------------------------------------------- /ssh_scripts/py_matmul/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: function tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/matmul/ && python3 matmul.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 matmul.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: func tracking 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} c 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - func tracking result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/func_tracking/pymatmul/ 27 | 28 | - name: Cleanup 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes 33 | -------------------------------------------------------------------------------- /ssh_scripts/py_matmul/entire_scanning.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: entire scanning 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/matmul/ && python3 matmul.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 matmul.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: entire scanning 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/text_segment_full_scan.sh {{ PID.stdout_lines[0] }} 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - entire scanning result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/entire-scanning/pymatmul/ 26 | 27 | - name: clean-up process 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.txt; \ 31 | kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_pku/bytecode_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: bytecode tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/pku/ && python3 pku.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 pku.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: bytecode tracking 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} python /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/pku/pku.py 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/bytecode_tracking/pypku/ 26 | 27 | - name: Cleanup 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo kill -9 {{ PID.stdout_lines[0] }} 31 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_rdseed/bytecode_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: bytecode tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rand/ && python3 rand.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 rand.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: bytecode tracking 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} python /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rand/rand.py 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/bytecode_tracking/pyrdseed/ 26 | 27 | - name: Cleanup 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo kill -9 {{ PID.stdout_lines[0] }} 31 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_xgboost/func_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: function tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/xgboost/ && python3 xgb_example.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 xgb_example.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: func tracking 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} c 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - func tracking result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/func_tracking/pyxgboost/ 27 | 28 | - name: Cleanup 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes 33 | -------------------------------------------------------------------------------- /ssh_scripts/py_xgboost/entire_scanning.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: entire scanning 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/xgboost/ && python3 xgb_example.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 xgb_example.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: entire scanning 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/text_segment_full_scan.sh {{ PID.stdout_lines[0] }} 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - entire scanning result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/entire-scanning/pyxgboost/ 26 | 27 | - name: clean-up process 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.txt; \ 31 | kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_matmul/bytecode_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: bytecode tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/matmul/ && python3 matmul.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 matmul.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: bytecode tracking 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} python /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/matmul/matmul.py 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/bytecode_tracking/pymatmul/ 26 | 27 | - name: Cleanup 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo kill -9 {{ PID.stdout_lines[0] }} 31 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_xgboost/bytecode_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: bytecode tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/xgboost/ && python3 xgb_example.py > /dev/null 2>&1 &' 10 | 11 | - name: Get workload PID 12 | shell: ps aux | grep "python3 xgb_example.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 13 | register: PID 14 | 15 | - name: bytecode tracking 16 | shell: | 17 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} python /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/xgboost/xgb_example.py 18 | 19 | - name: renaming log 20 | shell: | 21 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 22 | 23 | - name: Upload to S3 - func tracking result 24 | shell: | 25 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/bytecode_tracking/pyxgboost/ 26 | 27 | - name: Cleanup 28 | shell: | 29 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 30 | sudo kill -9 {{ PID.stdout_lines[0] }} 31 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_rsa/entire_scanning.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: entire scanning 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rsa/ && python3 rsa.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 rsa.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: entire scanning 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/text_segment_full_scan.sh {{ PID.stdout_lines[0] }} 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - entire scanning result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/entire-scanning/pyrsa/ 27 | 28 | - name: clean-up process 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.txt; \ 32 | kill -9 {{ PID.stdout_lines[0] }} 33 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_sha/entire_scanning.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: entire scanning 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/sha/ && python3 sha.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 sha.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: entire scanning 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/text_segment_full_scan.sh {{ PID.stdout_lines[0] }} 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - entire scanning result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/entire-scanning/pysha/ 27 | 28 | - name: clean-up process 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.txt; \ 32 | kill -9 {{ PID.stdout_lines[0] }} 33 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_rdseed/entire_scanning.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: entire scanning 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rand/ && python3 rand.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 rand.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: entire scanning 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/text_segment_full_scan.sh {{ PID.stdout_lines[0] }} 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - entire scanning result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/entire-scanning/pyrdseed/ 27 | 28 | - name: clean-up process 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.txt; \ 32 | kill -9 {{ PID.stdout_lines[0] }} 33 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_rsa/bytecode_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: bytecode tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rsa/ && python3 rsa.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 rsa.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: bytecode tracking 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} python /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rsa/rsa.py 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - func tracking result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/bytecode_tracking/pyrsa/ 27 | 28 | - name: Cleanup 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_sha/bytecode_tracking.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: bytecode tracking 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Start workload 7 | shell: | 8 | export LD_BIND_NOW=1; \ 9 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 10 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/sha/ && python3 sha.py > /dev/null 2>&1 &' 11 | 12 | - name: Get workload PID 13 | shell: ps aux | grep "python3 sha.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 14 | register: PID 15 | 16 | - name: bytecode tracking 17 | shell: | 18 | sudo /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/execution_path_tracking.sh {{ PID.stdout_lines[0] }} python /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/sha/sha.py 19 | 20 | - name: renaming log 21 | shell: | 22 | mv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/isa_set.csv /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/"$(curl http://169.254.169.254/latest/meta-data/instance-type).csv" 23 | 24 | - name: Upload to S3 - func tracking result 25 | shell: | 26 | aws s3 cp /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv s3://migration-compatibility/bytecode_tracking/pysha/ 27 | 28 | - name: Cleanup 29 | shell: | 30 | sudo rm -rf /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/log/*.csv; \ 31 | sudo kill -9 {{ PID.stdout_lines[0] }} 32 | ignore_errors: yes -------------------------------------------------------------------------------- /data_processing_for_lscpu/entire/CreateAllCpuFeature.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from pathlib import Path 3 | 4 | # module 경로 추가 5 | sys.path.append(str(Path(__file__).resolve().parent.joinpath('..', 'modules'))) 6 | 7 | import ReadCsv 8 | import CPUFeatures_h 9 | import GspreadUtils 10 | 11 | CPU_FEATURES = CPUFeatures_h.all_CPU_features_simplification_by_lscpu() 12 | 13 | df = ReadCsv.read_csv(CPU_FEATURES) 14 | 15 | # Extract each flag 16 | for i in range(len(df)): 17 | columnIndex = df.columns.get_loc('Flags') 18 | flags = df.iloc[i, columnIndex] 19 | flagList = flags.split(' ') 20 | 21 | for j in range(len(CPU_FEATURES)): 22 | isExist = CPU_FEATURES[j] in flagList 23 | columnIndex += 1 24 | if(isExist): 25 | df.iat[i, columnIndex] = 1 26 | else: 27 | df.iat[i, columnIndex] = 0 28 | 29 | df.reset_index(drop=True, inplace=True) 30 | 31 | # "Flags" column position change to last column 32 | tempColumn = df.pop('Flags') 33 | df.insert(len(df.columns), 'Flags', tempColumn) 34 | 35 | GspreadUtils.write_CPU_Feature_Visualization('all features', df) 36 | 37 | # Extract features that exist or do not exist on all instances 38 | zero_cols = df.columns[df.eq(0).all(axis=0)].tolist() 39 | print(zero_cols) 40 | print(f"The number of features not exists in all instances : {len(zero_cols)}\n") 41 | 42 | one_cols = df.columns[df.eq(1).all(axis=0)].tolist() 43 | 44 | print(one_cols) 45 | print(f"The number of features exists in all instances : {len(one_cols)}\n") 46 | 47 | # InstanceType, CloudProvider, Model Name, Flags 제외 총 칼럼 개수 = 적어도 1개 이상 사용되는 CPU features 개수 48 | print(f"Number of CPU features used at least one : {len(df.columns) - 4}") -------------------------------------------------------------------------------- /ssh_scripts/pku/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }}; \ 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | nohup /home/ubuntu/migration_test/pku_scripts/pku > /dev/null 2>&1 & 23 | 24 | - name: logging - write the src instance type 25 | shell: | 26 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 27 | 28 | - name: Get workload PID 29 | shell: pgrep -f pku 30 | register: PID 31 | 32 | - name: logging - write the checkpoint start time 33 | shell: | 34 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 35 | 36 | - name: Checkpoint workload 37 | shell: | 38 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 39 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 40 | 41 | - name: logging - write the checkpoint end time 42 | shell: | 43 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 44 | 45 | - name: Upload to S3 - dump detail log 46 | shell: | 47 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/rdseed/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | nohup /home/ubuntu/migration_test/rdseed_scripts/rand > /dev/null 2>&1 & 23 | 24 | - name: logging - write the src instance type 25 | shell: | 26 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 27 | 28 | - name: Get workload PID 29 | shell: pgrep -f rand 30 | register: PID 31 | 32 | - name: logging - write the checkpoint start time 33 | shell: | 34 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 35 | 36 | - name: Checkpoint workload 37 | shell: | 38 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 39 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 40 | 41 | - name: logging - write the checkpoint end time 42 | shell: | 43 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 44 | 45 | - name: Upload to S3 - dump detail log 46 | shell: | 47 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/cpp_xgboost/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | nohup /home/ubuntu/migration_test/xgboost_scripts/mnist > /dev/null 2>&1 & 23 | 24 | - name: logging - write the src instance type 25 | shell: | 26 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 27 | 28 | - name: Get workload PID 29 | shell: pgrep -f mnist 30 | register: PID 31 | 32 | - name: logging - write the checkpoint start time 33 | shell: | 34 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 35 | 36 | - name: Checkpoint workload 37 | shell: | 38 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 39 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 40 | 41 | - name: logging - write the checkpoint end time 42 | shell: | 43 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 44 | 45 | - name: Upload to S3 - dump detail log 46 | shell: | 47 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/ubuntu_container/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize contaier 7 | shell: /home/ec2-user/migration_test/podman_init.sh -l 8 | become: true 9 | ignore_errors: yes 10 | 11 | - name: copy dump/log to log dir 12 | shell: | 13 | cp /home/ec2-user/migration_test/dump/{{ src }}/*.log /home/ec2-user/migration_test/log 14 | 15 | - name: logging - write the restore start time 16 | shell: | 17 | echo start restore : $(date) >> /home/ec2-user/migration_test/log/timestamp.log 18 | 19 | - name: logging - write the dst instance type 20 | shell: | 21 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) 22 | sed -i "2i dst : $INSTANCE_TYPE" /home/ec2-user/migration_test/log/performance.log 23 | 24 | - name: Container restore 25 | shell: | 26 | /home/ec2-user/migration_test/restore.sh -n {{ src }} 27 | become: true 28 | 29 | - name: wait for 5 sec 30 | wait_for: 31 | timeout: 5 32 | register: result 33 | 34 | - name: logging - write the restore end time 35 | shell: | 36 | echo end restore : $(date) >> /home/ec2-user/migration_test/log/timestamp.log 37 | 38 | - name: Container health check 39 | shell: | 40 | /home/ec2-user/migration_test/container_health_check.sh 41 | become: true 42 | 43 | - name: Log to CSV 44 | shell: | 45 | python3 /home/ec2-user/migration_test/log_to_csv.py 46 | mv /home/ec2-user/migration_test/migration_data.csv /home/ec2-user/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ec2-user/migration_test/log/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ec2-user/migration_test/log/performance.log).csv" 47 | 48 | - name: Upload to S3 49 | shell: | 50 | aws s3 cp /home/ec2-user/migration_test/*.csv s3://migration-compatibility/Migration-between-groups/ubuntu_container/ -------------------------------------------------------------------------------- /data_processing_for_lscpu/modules/GspreadUtils.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from pathlib import Path 3 | 4 | import gspread as gs 5 | from gspread_formatting import * 6 | 7 | data_processing_for_lscpu_path = str(Path(__file__).resolve().parent.parent) 8 | 9 | gc = gs.service_account(filename=f'{data_processing_for_lscpu_path}/secure-outpost-380004-8d45b1504f3e.json') 10 | 11 | def read_CPU_Feature_Visualization(worksheet): 12 | sheet = gc.open('CPU Feature Visualization').worksheet(worksheet) 13 | df = pd.DataFrame(sheet.get_all_records()) 14 | 15 | return df 16 | 17 | def write_CPU_Feature_Visualization(worksheet, df): 18 | ''' 19 | The function writes the contents of the dataframe to a Google Spreadsheet. 20 | ''' 21 | # write google spread sheet1(core features) 22 | sheet = gc.open('CPU Feature Visualization').worksheet(worksheet) 23 | sheet.clear() # 이전 데이터 삭제 24 | sheet.update([df.columns.values.tolist()] + df.values.tolist()) 25 | 26 | format_cell = cellFormat( 27 | verticalAlignment='MIDDLE', 28 | wrapStrategy='OVERFLOW_CELL', 29 | textFormat=textFormat(fontSize=10) 30 | ) 31 | 32 | format_cell_range(sheet, '1:500', format_cell) 33 | 34 | def read_AWS_migration_compatibility(worksheet): 35 | sheet = gc.open('AWS migration compatibility').worksheet(worksheet) 36 | df = pd.DataFrame(sheet.get_all_records()) 37 | 38 | return df 39 | 40 | def write_AWS_migration_compatibility(worksheet, df): 41 | ''' 42 | The function writes the contents of the dataframe to a Google Spreadsheet. 43 | ''' 44 | # write google spread sheet1(core features) 45 | sheet = gc.open('AWS migration compatibility').worksheet(worksheet) 46 | sheet.clear() # 이전 데이터 삭제 47 | sheet.update([df.columns.values.tolist()] + df.values.tolist()) 48 | 49 | format_cell = cellFormat( 50 | verticalAlignment='MIDDLE', 51 | wrapStrategy='OVERFLOW_CELL', 52 | textFormat=textFormat(fontSize=10) 53 | ) 54 | 55 | format_cell_range(sheet, '1:500', format_cell) -------------------------------------------------------------------------------- /ssh_scripts/c_matrix_multiplication/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | nohup /home/ubuntu/migration_test/matrix_script/matrix_multiplication > /dev/null 2>&1 & 23 | 24 | - name: logging - write the src instance type 25 | shell: | 26 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 27 | 28 | - name: Get workload PID 29 | shell: pgrep -f matrix_multiplication 30 | register: PID 31 | 32 | - name: logging - write the checkpoint start time 33 | shell: | 34 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 35 | 36 | - name: Checkpoint workload 37 | shell: | 38 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 39 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 40 | 41 | - name: logging - write the checkpoint end time 42 | shell: | 43 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 44 | 45 | - name: Upload to S3 - dump detail log 46 | shell: | 47 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /infrastructure/modules/EC2-spot-group/spot.tf: -------------------------------------------------------------------------------- 1 | resource "aws_spot_instance_request" "spot_ec2" { 2 | count = length(var.instance_group) 3 | spot_price = var.spot_price 4 | instance_type = var.instance_group[count.index] 5 | ami = var.ami_id 6 | key_name = var.key_name 7 | availability_zone = var.availability_zone 8 | subnet_id = var.public_subnet_id 9 | 10 | vpc_security_group_ids = [ 11 | var.security_group_id 12 | ] 13 | 14 | user_data = <<-EOF 15 | #!/bin/bash 16 | sleep 60 17 | mount -t nfs -o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport ${var.efs_dns_name}:/ /home/${var.user}/migration_test/dump 18 | sudo chown ${var.user}:${var.user} /home/${var.user}/migration_test/dump 19 | sudo timedatectl set-timezone 'Asia/Seoul' 20 | sudo hostnamectl set-hostname migration-test-${var.instance_group[count.index]} 21 | EOF 22 | 23 | wait_for_fulfillment = true 24 | 25 | lifecycle { 26 | create_before_destroy = true 27 | } 28 | } 29 | 30 | resource "aws_ec2_tag" "spot_instance_tag" { 31 | count = length(aws_spot_instance_request.spot_ec2) 32 | 33 | resource_id = aws_spot_instance_request.spot_ec2[count.index].spot_instance_id 34 | key = "Name" 35 | value = "migration-test(spot)_${var.instance_group[count.index]}" 36 | } 37 | 38 | 39 | resource "null_resource" "spot_init_inventory" { 40 | depends_on = [ 41 | aws_spot_instance_request.spot_ec2 42 | ] 43 | 44 | provisioner "local-exec" { 45 | command = "rm ../../ssh_scripts/inventory_${var.group_number}.txt || true" 46 | } 47 | } 48 | 49 | resource "null_resource" "spot_write_inventory" { 50 | count = length(var.instance_group) 51 | depends_on = [ 52 | null_resource.spot_init_inventory 53 | ] 54 | 55 | provisioner "local-exec" { 56 | when = create 57 | command = "echo '${element(aws_spot_instance_request.spot_ec2.*.public_ip, count.index)}' >> ../../ssh_scripts/inventory_${var.group_number}.txt" 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /ssh_scripts/sha/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 23 | nohup /home/ubuntu/migration_test/sha_scripts/sha > /dev/null 2>&1 & 24 | 25 | - name: logging - write the src instance type 26 | shell: | 27 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 28 | 29 | - name: Get workload PID 30 | shell: pidof sha 31 | register: PID 32 | 33 | - name: logging - write the checkpoint start time 34 | shell: | 35 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 36 | 37 | - name: Checkpoint workload 38 | shell: | 39 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 40 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 41 | 42 | - name: logging - write the checkpoint end time 43 | shell: | 44 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 45 | 46 | - name: Upload to S3 - dump detail log 47 | shell: | 48 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/adox_adcx/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 23 | nohup /home/ubuntu/migration_test/adx_scripts/rsa > /dev/null 2>&1 & 24 | 25 | - name: logging - write the src instance type 26 | shell: | 27 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 28 | 29 | - name: Get workload PID 30 | shell: pgrep -f rsa 31 | register: PID 32 | 33 | - name: logging - write the checkpoint start time 34 | shell: | 35 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 36 | 37 | - name: Checkpoint workload 38 | shell: | 39 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 40 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 41 | 42 | - name: logging - write the checkpoint end time 43 | shell: | 44 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 45 | 46 | - name: Upload to S3 - dump detail log 47 | shell: | 48 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/py_pku/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/pku/ && python3 pku.py > /dev/null 2>&1 &' 23 | 24 | - name: logging - write the src instance type 25 | shell: | 26 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 27 | 28 | - name: Get workload PID 29 | shell: ps aux | grep "python3 pku.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 30 | register: PID 31 | 32 | - name: logging - write the checkpoint start time 33 | shell: | 34 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 35 | 36 | - name: Checkpoint workload 37 | shell: | 38 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 39 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 40 | 41 | - name: logging - write the checkpoint end time 42 | shell: | 43 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 44 | 45 | - name: Upload to S3 - dump detail log 46 | shell: | 47 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/py_rdseed/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rand/ && python3 rand.py > /dev/null 2>&1 &' 23 | 24 | - name: logging - write the src instance type 25 | shell: | 26 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 27 | 28 | - name: Get workload PID 29 | shell: ps aux | grep "python3 rand.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 30 | register: PID 31 | 32 | - name: logging - write the checkpoint start time 33 | shell: | 34 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 35 | 36 | - name: Checkpoint workload 37 | shell: | 38 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 39 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 40 | 41 | - name: logging - write the checkpoint end time 42 | shell: | 43 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 44 | 45 | - name: Upload to S3 - dump detail log 46 | shell: | 47 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/py_rsa/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 23 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/rsa/ && python3 rsa.py > /dev/null 2>&1 &' 24 | 25 | - name: logging - write the src instance type 26 | shell: | 27 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 28 | 29 | - name: Get workload PID 30 | shell: ps aux | grep "python3 rsa.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 31 | register: PID 32 | 33 | - name: logging - write the checkpoint start time 34 | shell: | 35 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 36 | 37 | - name: Checkpoint workload 38 | shell: | 39 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 40 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 41 | 42 | - name: logging - write the checkpoint end time 43 | shell: | 44 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 45 | 46 | - name: Upload to S3 - dump detail log 47 | shell: | 48 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/py_sha/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | ignore_errors: yes 14 | 15 | - name: Increase PID 16 | shell: | 17 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/increase_pid.py > /dev/null 18 | 19 | - name: Start workload 20 | shell: | 21 | export LD_BIND_NOW=1; \ 22 | export LD_LIBRARY_PATH=/home/ubuntu/openssl-openssl-3.1.3:$LD_LIBRARY_PATH; \ 23 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/sha/ && python3 sha.py > /dev/null 2>&1 &' 24 | 25 | - name: logging - write the src instance type 26 | shell: | 27 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 28 | 29 | - name: Get workload PID 30 | shell: ps aux | grep "python3 sha.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 31 | register: PID 32 | 33 | - name: logging - write the checkpoint start time 34 | shell: | 35 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 36 | 37 | - name: Checkpoint workload 38 | shell: | 39 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 40 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 41 | 42 | - name: logging - write the checkpoint end time 43 | shell: | 44 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 45 | 46 | - name: Upload to S3 - dump detail log 47 | shell: | 48 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/redis/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | 14 | - name: Increase PID 15 | shell: | 16 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 17 | 18 | - name: Start workload 19 | shell: nohup /usr/bin/redis-server --port 7777 > /dev/null 2>&1 & 20 | 21 | - name: Create data to redis 22 | shell: python3 /home/ubuntu/migration_test/redis_scripts/write.py 23 | 24 | - name: logging - write the src instance type 25 | shell: | 26 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 27 | 28 | - name: wait for 5 sec 29 | wait_for: 30 | timeout: 5 31 | register: result 32 | 33 | - name: logging - write the checkpoint start time 34 | shell: | 35 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 36 | 37 | - name: Get workload PID 38 | shell: pgrep -u ubuntu redis-server 39 | register: PID 40 | 41 | - name: logging - process ids 42 | shell: | 43 | ps -eLf | awk '$2 == {{ PID.stdout_lines[0] }} { print $4 }' > /home/ubuntu/migration_test/dump/{{ index }}/src-pids.log 44 | 45 | - name: Checkpoint workload 46 | shell: | 47 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 48 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 49 | 50 | - name: logging - write the checkpoint end time 51 | shell: | 52 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 53 | 54 | - name: Upload to S3 - dump detail log 55 | shell: | 56 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/matrix_multiplication/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | 14 | - name: Increase PID 15 | shell: | 16 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 17 | 18 | - name: Start workload 19 | shell: nohup python3 /home/ubuntu/migration_test/matrix_script/matrix_mul.py 1>/dev/null 2>&1 & 20 | 21 | - name: logging - write the src instance type 22 | shell: | 23 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 24 | 25 | - name: wait for 5 sec 26 | wait_for: 27 | timeout: 5 28 | register: result 29 | 30 | - name: logging - write the checkpoint start time 31 | shell: | 32 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 33 | 34 | - name: Get workload PID 35 | shell: pgrep -f matrix_mul.py 36 | register: PID 37 | 38 | - name: logging - process ids 39 | shell: | 40 | ps -eLf | awk '$2 == {{ PID.stdout_lines[0] }} { print $4 }' > /home/ubuntu/migration_test/dump/{{ index }}/src-pids.log 41 | 42 | - name: Checkpoint workload 43 | shell: | 44 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 45 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 46 | 47 | - name: logging - write the checkpoint end time 48 | shell: | 49 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 50 | 51 | - name: Move the workload log to the dump directory 52 | shell: | 53 | mv /home/ubuntu/migration_test/result.log /home/ubuntu/migration_test/dump/{{ index }}/ 54 | 55 | - name: Upload to S3 - dump detail log 56 | shell: | 57 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /data_processing_for_lscpu/modules/ReadCsv.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from pathlib import Path 3 | 4 | data_processing_for_lscpu_path = str(Path(__file__).resolve().parent.parent) 5 | 6 | def read_csv(CPU_FEATURES): 7 | """ 8 | The function reads data from a CSV file that contains information collected from different cloud vendors. It extracts the columns InstanceType, CloudProvider, Model name, and flags to create a dataframe. Additionally, it splits the flags column into separate columns based on the CPU_FEATURES list passed as a parameter to the function. 9 | """ 10 | # csv read & add header 11 | df = pd.read_csv(f'{data_processing_for_lscpu_path}/lscpu/lscpu_all_vendors.csv', usecols=['CloudProvider', 'Architecture', 'InstanceType', 'Model name', 'Flags'], index_col='InstanceType') 12 | df = df.reset_index() 13 | df = df.loc[df['Architecture'] == 'x86_64'] 14 | 15 | df.drop('Architecture', axis=1, inplace=True) 16 | df = df[['InstanceType', 'CloudProvider', 'Model name', 'Flags']] 17 | 18 | df = pd.concat([df, pd.DataFrame(columns=CPU_FEATURES)], axis=1) 19 | 20 | # remove the unsupported instance type by AWS 21 | unsupported = ['m2.2xlarge', 'm2.4xlarge', 'm2.xlarge', 't1.micro', 'c1.xlarge', 'm1.large', 'm1.medium', 'm1.small', 'm1.xlarge', 'c3.2xlarge', 'c3.4xlarge', 'c3.8xlarge', 'c3.large', 'c3.xlarge', 'g2.2xlarge', 'i2.2xlarge', 'i2.4xlarge', 'i2.8xlarge', 'i2.xlarge', 'm3.2xlarge', 'm3.large', 'm3.medium', 'm3.xlarge', 'r3.2xlarge', 'r3.4xlarge', 'r3.8xlarge', 'r3.large', 'r3.xlarge', 'u-18tb1.112xlarge'] 22 | 23 | for instance in unsupported: 24 | df = df.drop(df[df['InstanceType'] == instance].index) 25 | 26 | # Remove high-cost instances 27 | highCostInstances = ['u-3tb1.56xlarge', 'u-6tb1.56xlarge', 'u-6tb1.112xlarge', 'u-9tb1.112xlarge', 'u-12tb1.112xlarge'] 28 | for instance in highCostInstances: 29 | df = df.drop(df[df['InstanceType'] == instance].index) 30 | 31 | return df 32 | 33 | def read_exp_success_cases(filename): 34 | ''' 35 | The function will read the cases of success experiments. The corresponding CSV file was collected through AWS Athena. 36 | ''' 37 | df = pd.read_csv(f'{data_processing_for_lscpu_path}/experiment_success_cases/{filename}', usecols=['source', 'destination']) 38 | 39 | return df 40 | 41 | def read_exp_failure_cases(filename): 42 | ''' 43 | The function will read the cases of failed experiments. The corresponding CSV file was collected through AWS Athena. 44 | ''' 45 | df = pd.read_csv(f'{data_processing_for_lscpu_path}/experiment_failure_cases/{filename}', usecols=['source', 'destination']) 46 | 47 | return df -------------------------------------------------------------------------------- /ssh_scripts/rubin/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 13 | 14 | - name: Increase PID 15 | shell: | 16 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 17 | 18 | - name: Start workload 19 | shell: | 20 | export LD_BIND_NOW=1; \ 21 | nohup /home/ubuntu/anaconda3/condabin/conda run -n rubin-sim python3 /home/ubuntu/migration_test/rubin_sim_notebooks/maf/science/AGN_N_qsos.py 1>/dev/null 2>&1 & 22 | 23 | - name: logging - write the src instance type 24 | shell: | 25 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 26 | 27 | - name: Get workload PID 28 | shell: pgrep -f AGN_N_qsos.py 29 | register: PID 30 | 31 | - name: logging - write the checkpoint start time 32 | shell: | 33 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 34 | 35 | - name: Checkpoint workload 36 | shell: | 37 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 38 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 39 | 40 | - name: Wait for *-dump.log to be created 41 | ansible.builtin.find: 42 | paths: "/home/ubuntu/migration_test" 43 | patterns: '*-dump.log' 44 | register: found_files 45 | until: found_files.matched > 0 46 | retries: 3 47 | delay: 10 48 | 49 | - name: Check for success message in dump.log 50 | shell: grep -E "Dumping finished successfully" /home/ubuntu/migration_test/*-dump.log 51 | register: grep_result 52 | ignore_errors: yes 53 | until: grep_result.rc == 0 54 | retries: 18 55 | delay: 10 56 | 57 | - name: logging - write the checkpoint end time 58 | shell: | 59 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 60 | 61 | - name: Upload to S3 - dump detail log 62 | shell: | 63 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/py_matmul/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Get workload PID 7 | shell: ps aux | grep "python3 matmul.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 8 | register: PID 9 | ignore_errors: yes 10 | 11 | - name: Initialize destination instance 12 | shell: | 13 | sudo dmesg -C; \ 14 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 15 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 16 | sudo kill -9 $(ps aux | grep "python3 matmul.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}') 17 | ignore_errors: yes 18 | 19 | - name: set index variable 20 | set_fact: 21 | index: "{{ groups['all'].index(inventory_hostname) }}" 22 | 23 | - name: Initialize source instance 24 | shell: | 25 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 26 | ignore_errors: yes 27 | 28 | - name: Increase PID 29 | shell: | 30 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/increase_pid.py > /dev/null 31 | 32 | - name: Start workload 33 | shell: | 34 | export LD_BIND_NOW=1; \ 35 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/matmul/ && python3 matmul.py > /dev/null 2>&1 &' 36 | 37 | - name: logging - write the src instance type 38 | shell: | 39 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 40 | 41 | - name: Get workload PID 42 | shell: ps aux | grep "python3 matmul.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 43 | register: PID 44 | 45 | - name: logging - write the checkpoint start time 46 | shell: | 47 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 48 | 49 | - name: Checkpoint workload 50 | shell: | 51 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 52 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 53 | 54 | - name: logging - write the checkpoint end time 55 | shell: | 56 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 57 | 58 | - name: Upload to S3 - dump detail log 59 | shell: | 60 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/xgboost/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: set index variable 7 | set_fact: 8 | index: "{{ groups['all'].index(inventory_hostname) }}" 9 | 10 | - name: Initialize source instance 11 | shell: | 12 | mkdir /home/ubuntu/migration_test/dump/{{ index }}; \ 13 | sudo rm /home/ubuntu/migration_test/dump/xgboost.log 14 | ignore_errors: yes 15 | 16 | - name: Increase PID 17 | shell: | 18 | python3 /home/ubuntu/migration_test/increase_pid.py > /dev/null 19 | 20 | - name: Start workload 21 | shell: nohup python3 /home/ubuntu/migration_test/xgboost_scripts/mnist.py > /dev/null 2>&1 & 22 | 23 | - name: logging - write the src instance type 24 | shell: | 25 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 26 | 27 | - name: Get workload PID 28 | shell: pgrep -f mnist 29 | register: PID 30 | 31 | - name: Wait until xgboost.log has at least one line 32 | shell: wc -l /home/ubuntu/migration_test/xgboost.log | awk '{print $1}' 33 | register: linecount 34 | retries: 12 35 | delay: 5 # wait for 5 seconds between retries 36 | until: linecount.stdout | int >= 1 37 | 38 | - name: Fail if the log file doesn't have at least one line after waiting 39 | fail: 40 | msg: "/home/ubuntu/migration_test/xgboost.log did not have a line after waiting for 60 seconds" 41 | when: linecount.stdout | int < 1 42 | 43 | - name: logging - write the checkpoint start time 44 | shell: | 45 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 46 | 47 | - name: Checkpoint workload 48 | shell: | 49 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 50 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 51 | 52 | - name: logging - write the checkpoint end time 53 | shell: | 54 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 55 | 56 | - name: logging - xgboost log copy to dump dir 57 | shell: | 58 | cp /home/ubuntu/migration_test/xgboost.log /home/ubuntu/migration_test/dump/{{ index }} 59 | 60 | - name: Upload to S3 - dump detail log 61 | shell: | 62 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ssh_scripts/py_xgboost/external-migration-dump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Get workload PID 7 | shell: ps aux | grep "python3 xgb_example.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 8 | register: PID 9 | ignore_errors: yes 10 | 11 | - name: Initialize destination instance 12 | shell: | 13 | sudo dmesg -C; \ 14 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 15 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 16 | sudo kill -9 $(ps aux | grep "python3 xgb_example.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}') 17 | ignore_errors: yes 18 | 19 | - name: set index variable 20 | set_fact: 21 | index: "{{ groups['all'].index(inventory_hostname) }}" 22 | 23 | - name: Initialize source instance 24 | shell: | 25 | mkdir /home/ubuntu/migration_test/dump/{{ index }} 26 | ignore_errors: yes 27 | 28 | - name: Increase PID 29 | shell: | 30 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/increase_pid.py > /dev/null 31 | 32 | - name: Start workload 33 | shell: | 34 | export LD_BIND_NOW=1; \ 35 | nohup bash -c 'cd /home/ubuntu/LiveMigrate-Detector/workload_instruction_analyzer/bytecode_tracking/exp_workloads/xgboost/ && python3 xgb_example.py > /dev/null 2>&1 &' 36 | 37 | - name: logging - write the src instance type 38 | shell: | 39 | echo "src : $(curl http://169.254.169.254/latest/meta-data/instance-type)" > /home/ubuntu/migration_test/dump/{{ index }}/performance.log 40 | 41 | - name: Get workload PID 42 | shell: ps aux | grep "python3 xgb_example.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}' 43 | register: PID 44 | 45 | - name: logging - write the checkpoint start time 46 | shell: | 47 | echo start checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') > /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 48 | 49 | - name: Checkpoint workload 50 | shell: | 51 | sudo criu dump -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ index }}/ -t {{ PID.stdout_lines[0] }} >> /home/ubuntu/migration_test/dump/{{ index }}/performance.log \ 52 | 2> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/dump/{{ index }}/performance.log)-dump.log" 53 | 54 | - name: logging - write the checkpoint end time 55 | shell: | 56 | echo end checkpoint : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/dump/{{ index }}/timestamp.log 57 | 58 | - name: Upload to S3 - dump detail log 59 | shell: | 60 | aws s3 cp /home/ubuntu/migration_test/*-dump.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /InternalMigration.py: -------------------------------------------------------------------------------- 1 | import threading 2 | import subprocess 3 | import time 4 | import datetime 5 | import boto3 6 | 7 | import ssh_scripts.playbook as playbook 8 | 9 | ec2_client = boto3.client('ec2', region_name='us-west-2') 10 | ec2_resource = boto3.resource('ec2', region_name='us-west-2') 11 | 12 | GROUP_NUMBER = 18 13 | 14 | start_time = datetime.datetime.now() 15 | # create infrastructure by group 16 | with open(f'terraform.log', 'w') as f: # Created separately for reuse of some resources, such as VPCs 17 | p = subprocess.Popen(['terraform', 'apply', '-auto-approve', '-target', 'module.read-instances'], cwd='infrastructure/internal_migration', stdout=f, stderr=f, encoding='utf-8') 18 | p.wait() 19 | p = subprocess.Popen(['terraform', 'apply', '-auto-approve'], cwd='infrastructure/internal_migration', stdout=f, stderr=f, encoding='utf-8') 20 | p.wait() 21 | 22 | print('\nComplete infrastructure creation') 23 | 24 | # checking instance status 25 | while True: 26 | print('checking instance status...') 27 | instances = ec2_client.describe_instances(Filters=[ 28 | { 29 | 'Name': 'tag:Name', 30 | 'Values': ['container-migration-test_*'] 31 | } 32 | ]) 33 | 34 | all_running = True 35 | 36 | for reservation in instances['Reservations']: 37 | for instance in reservation['Instances']: 38 | instance_id = instance['InstanceId'] 39 | instance_obj = ec2_resource.Instance(instance_id) 40 | 41 | instance_state = instance_obj.state['Name'] 42 | 43 | if instance_state == 'terminated': 44 | print(f"Instance {instance_id} is terminated") 45 | break 46 | 47 | status = ec2_client.describe_instance_status(InstanceIds=[instance_id]) 48 | if 'InstanceStatuses' not in status or status['InstanceStatuses'][0]['InstanceStatus']['Status'] != 'ok': 49 | print(f"Instance {instance_id} is not yet ready. Waiting 5 seconds...") 50 | all_running = False 51 | break 52 | 53 | if not all_running: 54 | break 55 | 56 | if all_running: 57 | print('All instances are running') 58 | break 59 | time.sleep(10) 60 | 61 | print('Pass all instance health checks') 62 | 63 | subprocess.run('rm -f group*.log', shell=True) 64 | 65 | # Execute an Ansible command to start the container migration test. 66 | def worker(group_num): 67 | playbook.internalMigration(str(group_num)) 68 | 69 | threads = [] 70 | for i in range(GROUP_NUMBER): 71 | thread = threading.Thread(target=worker, args=(i,)) 72 | thread.start() 73 | threads.append(thread) 74 | time.sleep(3) 75 | 76 | # wait for end of test 77 | for thread in threads: 78 | thread.join() 79 | 80 | # destroy infrastructure by group 81 | with open(f'terraform.log', 'a') as f: 82 | p = subprocess.Popen(['terraform', 'destroy', '-auto-approve'], cwd='infrastructure/internal_migration', stdout=f, stderr=f) 83 | p.wait() 84 | 85 | end_time = datetime.datetime.now() 86 | 87 | elapsed_time = end_time - start_time 88 | total_seconds = elapsed_time.total_seconds() 89 | print(f'total time : {total_seconds}') -------------------------------------------------------------------------------- /ssh_scripts/matrix_multiplication/external-migration-debug.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Copy the log from the dump directory 7 | shell: | 8 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 9 | 10 | - name: logging - write the dst instance type 11 | shell: | 12 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 13 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 14 | 15 | - name: Get workload PID 16 | register: PID 17 | shell: | 18 | export PATH=$PATH:~/criu/crit/ 19 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 20 | 21 | - name: logging - process ids 22 | shell: | 23 | ps -eLf | awk '{ if ($4 != "LWP") print $4 }' > /home/ubuntu/migration_test/dst-pids.log 24 | 25 | - name: Upload to S3 - PID logs 26 | shell: | 27 | src_file=$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log) 28 | dst_file=$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log) 29 | echo "src" > "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 30 | cat /home/ubuntu/migration_test/src-pids.log >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 31 | echo "dst" >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 32 | cat /home/ubuntu/migration_test/dst-pids.log >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 33 | aws s3 cp "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" s3://migration-compatibility-detail 34 | 35 | - name: Restore workload 36 | shell: | 37 | sudo dmesg -C 38 | nohup sudo criu restore -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 39 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 40 | 41 | - name: wait for 5 sec 42 | wait_for: 43 | timeout: 5 44 | register: result 45 | 46 | - name: Restore workload to stop state 47 | shell: | 48 | nohup sudo criu restore -j -s -D /home/ubuntu/migration_test/dump/{{ src }} & 49 | 50 | - name: process debug 51 | shell: | 52 | nohup sudo gdb python -ex "attach {{ PID.stdout_lines[0] }}" -ex "source /home/ubuntu/migration_test/gdb_script.py" & 53 | 54 | - name: wait for 5 sec 55 | wait_for: 56 | timeout: 5 57 | register: result 58 | 59 | - name: renaming csv 60 | shell: | 61 | mv /home/ubuntu/migration_test/output.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-debug.csv" 62 | 63 | - name: Upload to S3 - debug result 64 | shell: | 65 | aws s3 cp /home/ubuntu/migration_test/*.csv s3://migration-compatibility/Migration-failure-groups/ 66 | 67 | - name: Upload to S3 - restore detail log 68 | shell: | 69 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail -------------------------------------------------------------------------------- /ExternalMigration(all of cases).py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import time 3 | import datetime 4 | import boto3 5 | from tqdm import tqdm 6 | 7 | import ssh_scripts.playbook as playbook 8 | 9 | ec2_client = boto3.client('ec2', region_name='us-west-2') 10 | ec2_resource = boto3.resource('ec2', region_name='us-west-2') 11 | 12 | CREATE_GRPUP = [i for i in range(27)] 13 | 14 | def createInfrastructure(cwd): 15 | # create infrastructure by group 16 | with open(f'terraform.log', 'w') as f: 17 | subprocess.run(['terraform', 'apply', '-auto-approve', '-target', 'module.read-instances', '-var', 18 | f'group={CREATE_GRPUP}'], cwd=cwd, stdout=f, stderr=f, encoding='utf-8') 19 | subprocess.run(['terraform', 'apply', '-auto-approve', '-parallelism=30', '-var', f'group={CREATE_GRPUP}'], 20 | cwd=cwd, stdout=f, stderr=f, encoding='utf-8') 21 | 22 | print('\nComplete infrastructure creation') 23 | print('wating 2.5 minute..') 24 | 25 | time.sleep(150) 26 | 27 | # checking instance status 28 | print('checking instance status...') 29 | while True: 30 | instances = ec2_client.describe_instances(Filters=[ 31 | { 32 | 'Name': 'tag:Name', 33 | 'Values': ['migration-test_*'] 34 | } 35 | ]) 36 | 37 | all_running = True 38 | 39 | for reservation in instances['Reservations']: 40 | for instance in reservation['Instances']: 41 | instance_id = instance['InstanceId'] 42 | instance_obj = ec2_resource.Instance(instance_id) 43 | 44 | instance_state = instance_obj.state['Name'] 45 | 46 | if instance_state == 'terminated': 47 | break 48 | 49 | status = ec2_client.describe_instance_status( 50 | InstanceIds=[instance_id]) 51 | if 'InstanceStatuses' not in status or status['InstanceStatuses'][0]['InstanceStatus']['Status'] != 'ok': 52 | all_running = False 53 | break 54 | 55 | if not all_running: 56 | break 57 | 58 | if all_running: 59 | break 60 | time.sleep(10) 61 | 62 | print('Pass all instance health checks') 63 | 64 | 65 | def performTask(): 66 | # Extract ISA set from workload 67 | playbook.funcTracking(CREATE_GRPUP) 68 | playbook.bytecodeTracking(CREATE_GRPUP) 69 | playbook.entire_scanning(CREATE_GRPUP) 70 | 71 | # Execute an Ansible command to start the checkpoint. 72 | playbook.externalMigrationDump(CREATE_GRPUP) 73 | 74 | # Execute an Ansible command to start the restore. 75 | with tqdm(total=len(CREATE_GRPUP), unit='Processing') as pbar: 76 | for i in CREATE_GRPUP: 77 | playbook.externalMigrationRestore( 78 | CREATE_GRPUP, CREATE_GRPUP[i]) 79 | 80 | pbar.update(1) 81 | 82 | 83 | def destroyInfrastructure(cwd): 84 | # destroy infrastructure by groups 85 | with open(f'terraform.log', 'a') as f: 86 | p = subprocess.Popen(['terraform', 'destroy', '-auto-approve', '-parallelism=30', '-var', 87 | f'group={CREATE_GRPUP}'], cwd=cwd, stdout=f, stderr=f) 88 | p.wait() 89 | 90 | 91 | if __name__ == '__main__': 92 | playbook.setWorkload() 93 | 94 | print('Select experiment option') 95 | print('1. On-Demand\n2. Spot-Instance') 96 | option = int(input()) - 1 97 | 98 | if option == 0: 99 | cwd = 'infrastructure/external_migration' 100 | elif option == 1: 101 | cwd = 'infrastructure/external_migration_on_spot' 102 | else: 103 | print('invalid option') 104 | exit() 105 | 106 | start_time = datetime.datetime.now() 107 | 108 | createInfrastructure(cwd) 109 | performTask() 110 | destroyInfrastructure(cwd) 111 | 112 | end_time = datetime.datetime.now() 113 | 114 | elapsed_time = end_time - start_time 115 | total_seconds = elapsed_time.total_seconds() 116 | print(f'total time : {total_seconds}') -------------------------------------------------------------------------------- /ssh_scripts/redis/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -u ubuntu redis-server) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:~/criu/crit/ 27 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 28 | 29 | - name: logging - process ids 30 | shell: | 31 | ps -eLf | awk '{ if ($4 != "LWP") print $4 }' > /home/ubuntu/migration_test/dst-pids.log 32 | 33 | - name: Upload to S3 - PID logs 34 | shell: | 35 | src_file=$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log) 36 | dst_file=$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log) 37 | echo "src" > "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 38 | cat /home/ubuntu/migration_test/src-pids.log >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 39 | echo "dst" >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 40 | cat /home/ubuntu/migration_test/dst-pids.log >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 41 | aws s3 cp "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" s3://migration-compatibility-detail 42 | 43 | - name: logging - write the restore start time 44 | shell: | 45 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 46 | 47 | - name: Restore workload 48 | shell: | 49 | nohup sudo criu restore -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 50 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 51 | 52 | - name: logging - write the restore end time 53 | shell: | 54 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 55 | 56 | - name: logging - write the workload state 57 | shell: | 58 | python3 /home/ubuntu/migration_test/redis_scripts/logging_workload_state.py 59 | 60 | - name: Log to CSV 61 | shell: | 62 | python3 /home/ubuntu/migration_test/log_to_csv.py 63 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 64 | 65 | - name: Debugging for Migration Failures 66 | shell: | 67 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 68 | 69 | - name: wait for 5 sec 70 | wait_for: 71 | timeout: 10 72 | register: result 73 | 74 | - name: Upload to S3 - migration result 75 | shell: | 76 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/redis/ --recursive --exclude "*" --include "*.csv" --exclude "*-debug.csv" 77 | 78 | - name: Upload to S3 - restore detail log 79 | shell: | 80 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 81 | 82 | - name: Upload to S3 - debug result 83 | shell: | 84 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/redis/ 85 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/matrix_multiplication/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -f matrix_mul.py) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:~/criu/crit/ 27 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 28 | 29 | - name: logging - process ids 30 | shell: | 31 | ps -eLf | awk '{ if ($4 != "LWP") print $4 }' > /home/ubuntu/migration_test/dst-pids.log 32 | 33 | - name: Upload to S3 - PID logs 34 | shell: | 35 | src_file=$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log) 36 | dst_file=$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log) 37 | echo "src" > "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 38 | cat /home/ubuntu/migration_test/src-pids.log >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 39 | echo "dst" >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 40 | cat /home/ubuntu/migration_test/dst-pids.log >> "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" 41 | aws s3 cp "/home/ubuntu/migration_test/${src_file}_to_${dst_file}-pids.log" s3://migration-compatibility-detail 42 | 43 | - name: logging - write the restore start time 44 | shell: | 45 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 46 | 47 | - name: Restore workload 48 | shell: | 49 | nohup sudo criu restore -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 50 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 51 | 52 | - name: logging - write the restore end time 53 | shell: | 54 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 55 | 56 | - name: logging - write the workload state 57 | shell: | 58 | python3 /home/ubuntu/migration_test/logging_workload_state.py 59 | 60 | - name: Log to CSV 61 | shell: | 62 | python3 /home/ubuntu/migration_test/log_to_csv.py 63 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 64 | 65 | - name: Debugging for Migration Failures 66 | shell: | 67 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 68 | 69 | - name: wait for 5 sec 70 | wait_for: 71 | timeout: 10 72 | register: result 73 | 74 | - name: Upload to S3 - migration result 75 | shell: | 76 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/matrix-multiplication/ --recursive --exclude "*" --include "*.csv" --exclude "*-debug.csv" 77 | 78 | - name: Upload to S3 - restore detail log 79 | shell: | 80 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 81 | 82 | - name: Upload to S3 - debug result 83 | shell: | 84 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/matrix-multiplication/ 85 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_pku/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(ps aux | grep "python3 pku.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}') 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/logging_workload_state.py "pku.py" 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/pypku/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/pypku/ 95 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_rsa/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(ps aux | grep "python3 rsa.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}') 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/logging_workload_state.py "rsa.py" 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/pyrsa/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/pyrsa/ 95 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_sha/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(ps aux | grep "python3 sha.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}') 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/logging_workload_state.py "sha.py" 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/pysha/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/pysha/ 95 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_rdseed/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(ps aux | grep "python3 rand.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}') 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/logging_workload_state.py "rand.py" 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/pyrdseed/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/pyrdseed/ 95 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_matmul/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(ps aux | grep "python3 matmul.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}') 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/logging_workload_state.py "matmul.py" 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/pymatmul/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/pymatmul/ 95 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/py_xgboost/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(ps aux | grep "python3 xgb_example.py" | grep -v "bash -c" | grep -v grep | awk '{print $2}') 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/logging_workload_state.py "xgb_example.py" 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/LiveMigrate-Detector/experiment_utils/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/pyxgboost/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/pyxgboost/ 95 | ignore_errors: yes -------------------------------------------------------------------------------- /ssh_scripts/rubin/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -f AGN_N_qsos) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test; \ 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: logging - write the workload state 68 | shell: | 69 | python3 /home/ubuntu/migration_test/logging_workload_state.py 70 | 71 | - name: Log to CSV 72 | shell: | 73 | python3 /home/ubuntu/migration_test/log_to_csv.py 74 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 75 | 76 | - name: Debugging for Migration Failures 77 | shell: | 78 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 79 | 80 | - name: Upload to S3 - migration result 81 | shell: | 82 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/rubin/ --recursive --exclude "*" --include "*.csv" --exclude "*-debug.csv" 83 | 84 | - name: Upload to S3 - restore detail log 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 87 | 88 | - name: wait for 10 sec 89 | wait_for: 90 | timeout: 10 91 | register: result 92 | 93 | - name: Upload to S3 - debug result 94 | shell: | 95 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/rubin/ 96 | ignore_errors: yes 97 | 98 | # re-exp 99 | # - name: shutdown 100 | # shell: | 101 | # sudo shutdown now -------------------------------------------------------------------------------- /ssh_scripts/sha/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pidof sha) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/migration_test/logging_workload_state.py sha 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/migration_test/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/sha/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/sha/ 95 | ignore_errors: yes 96 | 97 | # re-exp 98 | # - name: shutdown 99 | # shell: | 100 | # sudo shutdown now -------------------------------------------------------------------------------- /ssh_scripts/pku/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -f pku) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/migration_test/logging_workload_state.py pku 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/migration_test/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/pku/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/pku/ 95 | ignore_errors: yes 96 | 97 | # re-exp 98 | # - name: shutdown 99 | # shell: | 100 | # sudo shutdown now -------------------------------------------------------------------------------- /ssh_scripts/rdseed/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -f rand) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/migration_test/logging_workload_state.py rand 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/migration_test/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/rdseed/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/rdseed/ 95 | ignore_errors: yes 96 | 97 | # re-exp 98 | # - name: shutdown 99 | # shell: | 100 | # sudo shutdown now -------------------------------------------------------------------------------- /ssh_scripts/matrix_multiplication/internal-migration.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Migration compatibility check - src 3 | hosts: src 4 | tasks: 5 | - name: Initialize contaier 6 | shell: /home/ec2-user/podman/podman_init.sh -a 7 | ignore_errors: yes 8 | 9 | - name: logging - write the src instance type 10 | shell: | 11 | echo $(curl http://169.254.169.254/latest/meta-data/instance-type) >> /home/ec2-user/podman/dump/performance.log 12 | echo create container : $(date) >> /home/ec2-user/podman/dump/timestamp.log 13 | 14 | - name: Create container 15 | shell: /home/ec2-user/podman/create_container.sh -t 16 | 17 | - name: wait for 5 sec 18 | wait_for: 19 | timeout: 5 20 | register: result 21 | 22 | - name: Execute workload 23 | shell: /home/ec2-user/podman/execute_workload_in_container.sh 24 | 25 | - name: wait for 5 sec 26 | wait_for: 27 | timeout: 5 28 | register: result 29 | 30 | - name: logging - write the checkpoint start time 31 | shell: | 32 | echo start checkpoint : $(date) >> /home/ec2-user/podman/dump/timestamp.log 33 | 34 | - name: Container checkpoint 35 | shell: /home/ec2-user/podman/checkpoint.sh -n 36 | when: result.elapsed >= 5 37 | 38 | - name: logging - write the checkpoint end time 39 | shell: | 40 | echo end checkpoint : $(date) >> /home/ec2-user/podman/dump/timestamp.log 41 | 42 | - name : Migration compatibility check - dst 43 | hosts: dst 44 | tasks: 45 | - name: Initialize contaier 46 | shell: /home/ec2-user/podman/podman_init.sh -l 47 | ignore_errors: yes 48 | 49 | - name: moved dump/log to log dir 50 | shell: | 51 | cat /home/ec2-user/podman/dump/performance.log > /home/ec2-user/podman/log/performance.log 52 | cat /home/ec2-user/podman/dump/timestamp.log > /home/ec2-user/podman/log/timestamp.log 53 | 54 | - name: logging - write the restore start time 55 | shell: | 56 | echo start restore : $(date) >> /home/ec2-user/podman/log/timestamp.log 57 | 58 | - name: logging - write the dst instance type 59 | shell: | 60 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) 61 | sed -i "2i $INSTANCE_TYPE" /home/ec2-user/podman/log/performance.log 62 | 63 | - name: Container restore 64 | shell: | 65 | /home/ec2-user/podman/restore.sh -n 66 | 67 | - name: wait for 5 sec 68 | wait_for: 69 | timeout: 5 70 | register: result 71 | 72 | - name: Container knocking 73 | shell: | 74 | PUBLIC_IP=$(curl http://169.254.169.254/latest/meta-data/public-ipv4) 75 | curl http://$PUBLIC_IP:8888 76 | ignore_errors: yes 77 | 78 | - name: logging - write the container state 79 | shell: | 80 | sudo podman ps -a >> /home/ec2-user/podman/log/timestamp.log 81 | 82 | - name: Crash check 83 | shell: sudo podman ps -a | grep Exited 84 | register: container_output 85 | ignore_errors: yes 86 | 87 | - name: logging - write the restore end time 88 | shell: | 89 | echo end restore : $(date) >> /home/ec2-user/podman/log/timestamp.log 90 | 91 | - name: Combine performance and workload excute log 92 | shell: | 93 | sudo podman cp jupynb:/home/jovyan/workload.log /home/ec2-user/podman 94 | cat /home/ec2-user/podman/workload.log >> /home/ec2-user/podman/log/performance.log 95 | register: result 96 | until: "'No such file or directory' not in result.stderr" 97 | retries: 30 98 | delay: 5 99 | when: container_output.stdout_lines | length == 0 # Container not crashed 100 | 101 | - name: Combine performance and timestamp log 102 | shell: | 103 | cat /home/ec2-user/podman/log/timestamp.log >> /home/ec2-user/podman/log/performance.log 104 | 105 | - name: Renaming log 106 | shell: | 107 | cat /home/ec2-user/podman/log/performance.log > /home/ec2-user/podman/log/"$(sed -n '1p' /home/ec2-user/podman/log/performance.log)_to_$(sed -n '2p' /home/ec2-user/podman/log/performance.log).log" 108 | rm /home/ec2-user/podman/log/performance.log 109 | rm /home/ec2-user/podman/log/timestamp.log 110 | 111 | - name: Upload to S3 112 | shell: | 113 | aws s3 cp /home/ec2-user/podman/log/* s3://container-migration-log -------------------------------------------------------------------------------- /ssh_scripts/xgboost/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -f mnist) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test; \ 17 | cp /home/ubuntu/migration_test/dump/{{ src}}/xgboost.log /home/ubuntu/migration_test 18 | 19 | - name: logging - write the dst instance type 20 | shell: | 21 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 22 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 23 | 24 | - name: Get workload PID 25 | register: PID 26 | shell: | 27 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 28 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 29 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 30 | 31 | - name: logging - write the restore start time 32 | shell: | 33 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 34 | 35 | - name: Restore workload 36 | shell: | 37 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 38 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 39 | 40 | - name: Wait for *-restore.log to be created 41 | ansible.builtin.find: 42 | paths: "/home/ubuntu/migration_test" 43 | patterns: '*-restore.log' 44 | register: found_files 45 | until: found_files.matched > 0 46 | retries: 3 47 | delay: 10 48 | 49 | - name: Check for success message in restore.log 50 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 51 | register: grep_result 52 | ignore_errors: yes 53 | until: grep_result.rc == 0 54 | retries: 18 55 | delay: 10 56 | 57 | - name: Restoring failed 58 | fail: 59 | msg: "Restoring FAILED." 60 | when: 61 | - grep_result.rc != 0 62 | - "'Restoring FAILED.' in grep_result.stdout" 63 | 64 | - name: logging - write the restore end time 65 | shell: | 66 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 67 | 68 | - name: process resume 69 | shell: | 70 | kill -CONT {{ PID.stdout_lines[0] }} 71 | 72 | - name: logging - write the workload state 73 | shell: | 74 | python3 /home/ubuntu/migration_test/logging_workload_state.py 75 | 76 | - name: Log to CSV 77 | shell: | 78 | python3 /home/ubuntu/migration_test/log_to_csv.py 79 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 80 | 81 | - name: Debugging for Migration Failures 82 | shell: | 83 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 84 | 85 | - name: Upload to S3 - migration result 86 | shell: | 87 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/xgboost/ --recursive --exclude "*" --include "*.csv" --exclude "*-debug.csv" 88 | 89 | - name: Upload to S3 - restore detail log 90 | shell: | 91 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 92 | 93 | - name: Upload to S3 - debug result 94 | shell: | 95 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/xgboost/ 96 | ignore_errors: yes 97 | 98 | # re-exp 99 | # - name: shutdown 100 | # shell: | 101 | # sudo shutdown now -------------------------------------------------------------------------------- /ssh_scripts/c_matrix_multiplication/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -f matrix_multiplication) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/migration_test/logging_workload_state.py matrix_multiplication 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/migration_test/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/c_matrix_multiplication/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: Upload to S3 - debug result 93 | shell: | 94 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/c_matrix_multiplication/ 95 | ignore_errors: yes 96 | 97 | # re-exp 98 | # - name: shutdown 99 | # shell: | 100 | # sudo shutdown now -------------------------------------------------------------------------------- /ssh_scripts/adox_adcx/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -f rsa) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/migration_test/logging_workload_state.py rsa 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/migration_test/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/adox_adcx/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: wait for 10 sec 93 | wait_for: 94 | timeout: 10 95 | register: result 96 | 97 | - name: Upload to S3 - debug result 98 | shell: | 99 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/adox_adcx/ 100 | ignore_errors: yes 101 | 102 | # re-exp 103 | # - name: shutdown 104 | # shell: | 105 | # sudo shutdown now -------------------------------------------------------------------------------- /ssh_scripts/cpp_xgboost/external-migration-restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Migration compatibility check - dst 3 | hosts: all 4 | gather_facts: no 5 | tasks: 6 | - name: Initialize destination instance 7 | shell: | 8 | sudo dmesg -C; \ 9 | sudo rm -rf /home/ubuntu/migration_test/*.csv; \ 10 | sudo rm -rf /home/ubuntu/migration_test/*.log; \ 11 | sudo kill -9 $(pgrep -f mnist) 12 | ignore_errors: yes 13 | 14 | - name: Copy the log from the dump directory 15 | shell: | 16 | cp /home/ubuntu/migration_test/dump/{{ src }}/*.log /home/ubuntu/migration_test 17 | 18 | - name: logging - write the dst instance type 19 | shell: | 20 | INSTANCE_TYPE=$(curl http://169.254.169.254/latest/meta-data/instance-type) && \ 21 | sed -i "2i dst : $INSTANCE_TYPE" /home/ubuntu/migration_test/performance.log 22 | 23 | - name: Get workload PID 24 | register: PID 25 | shell: | 26 | export PATH=$PATH:/home/ubuntu/criu-3.18/crit/; \ 27 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python; \ 28 | crit show /home/ubuntu/migration_test/dump/{{ src }}/pstree.img | jq '.entries[0].pid' 29 | 30 | - name: logging - write the restore start time 31 | shell: | 32 | echo start restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 33 | 34 | - name: Restore workload 35 | shell: | 36 | nohup sudo criu restore -j -s -v4 --display-stats -D /home/ubuntu/migration_test/dump/{{ src }} >> /home/ubuntu/migration_test/performance.log \ 37 | 2>> /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log)-restore.log" & 38 | 39 | - name: Wait for *-restore.log to be created 40 | ansible.builtin.find: 41 | paths: "/home/ubuntu/migration_test" 42 | patterns: '*-restore.log' 43 | register: found_files 44 | until: found_files.matched > 0 45 | retries: 3 46 | delay: 10 47 | 48 | - name: Check for success message in restore.log 49 | shell: grep -E "Restore finished successfully. Tasks resumed.|Restoring FAILED." /home/ubuntu/migration_test/*-restore.log 50 | register: grep_result 51 | ignore_errors: yes 52 | until: grep_result.rc == 0 53 | retries: 18 54 | delay: 10 55 | 56 | - name: Restoring failed 57 | fail: 58 | msg: "Restoring FAILED." 59 | when: 60 | - grep_result.rc != 0 61 | - "'Restoring FAILED.' in grep_result.stdout" 62 | 63 | - name: logging - write the restore end time 64 | shell: | 65 | echo end restore : $(date +'%Y-%m-%dT%H:%M:%S') >> /home/ubuntu/migration_test/timestamp.log 66 | 67 | - name: process resume 68 | shell: | 69 | kill -CONT {{ PID.stdout_lines[0] }} 70 | 71 | - name: logging - write the workload state 72 | shell: | 73 | python3 /home/ubuntu/migration_test/logging_workload_state.py mnist 74 | 75 | - name: Log to CSV 76 | shell: | 77 | python3 /home/ubuntu/migration_test/log_to_csv.py 78 | mv /home/ubuntu/migration_test/migration_data.csv /home/ubuntu/migration_test/"$(sed -n '/src :/s/^src : //p' /home/ubuntu/migration_test/performance.log)_to_$(sed -n '/dst :/s/^dst : //p' /home/ubuntu/migration_test/performance.log).csv" 79 | 80 | - name: Debugging for Migration Failures 81 | shell: | 82 | python3 /home/ubuntu/migration_test/debug_scripts/debug.py {{ PID.stdout_lines[0] }} {{ src }} 83 | 84 | - name: Upload to S3 - migration result 85 | shell: | 86 | aws s3 cp /home/ubuntu/migration_test/ s3://migration-compatibility/Migration-between-groups/cpp_xgboost/ --recursive --exclude "*" --include "*_to_*.csv" --exclude "*-debug.csv" --exclude "mnist*.csv" 87 | 88 | - name: Upload to S3 - restore detail log 89 | shell: | 90 | aws s3 cp /home/ubuntu/migration_test/*-restore.log s3://migration-compatibility-detail 91 | 92 | - name: wait for 10 sec 93 | wait_for: 94 | timeout: 10 95 | register: result 96 | 97 | - name: Upload to S3 - debug result 98 | shell: | 99 | aws s3 cp /home/ubuntu/migration_test/*-debug.csv s3://migration-compatibility/Migration-failure-groups/cpp_xgboost/ 100 | ignore_errors: yes 101 | 102 | # re-exp 103 | # - name: shutdown 104 | # shell: | 105 | # sudo shutdown now -------------------------------------------------------------------------------- /infrastructure/CPU Feature Visualization - simplized aws group(all, exclude single-element groups).csv: -------------------------------------------------------------------------------- 1 | feature groups,ss,monitor,vmx,est,pcid,x2apic,tsc_deadline_timer,tsc_adjust,hle,erms,invpcid,rtm,mpx,avx512f,avx512dq,rdseed,adx,smap,avx512ifma,clflushopt,clwb,avx512cd,sha_ni,avx512bw,avx512vl,avx512vbmi,umip,pku,ospke,avx512_vbmi2,gfni,vaes,vpclmulqdq,avx512_vnni,avx512_bitalg,tme,avx512_vpopcntdq,rdpid,md_clear,flush_l1d,arch_capabilities,mmxext,fxsr_opt,pdpe1gb,cmp_legacy,svm,cr8_legacy,sse4a,misalignsse,3dnowprefetch,topoext,perfctr_core,clzero,xsaveerptr,rdpru,wbnoinvd,npt,nrip_save,tsc_scale,vmcb_clean,flushbyasid,decodeassists,pausefilter,pfthreshold,v_vmsave_vmload,constant_tsc,arch_perfmon,xtopology,tsc_reliable,nonstop_tsc,amd_dcm,aperfmperf,tsc_known_freq,cpuid_fault,invpcid_single,pti,ssbd,ibrs,ibpb,stibp,ibrs_enhanced,tpr_shadow,vnmi,ept,vpid,vmmcall,ept_ad,xsavec,xgetbv1,xsaves 2 | "r5a.large, m5a.large",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0 3 | "r5a.2xlarge, m5a.2xlarge",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0 4 | "r5a.8xlarge, m5a.8xlarge",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0 5 | "c6a.large, r6a.large, m6a.large",0,0,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,1,0,1,1,1,1,0,0,0,0,0,1,0,1,1,0 6 | "t2.xlarge, c4.large, d2.xlarge, m4.large",0,0,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 7 | "c6a.24xlarge, r6a.24xlarge, m6a.24xlarge",0,1,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,1,0,1,1,1,1,0,0,0,0,0,1,0,1,1,0 8 | "c4.8xlarge, m4.10xlarge",0,1,0,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 9 | "x1.32xlarge, m4.16xlarge",0,1,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 10 | "r4.8xlarge, i3.8xlarge",0,1,0,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 11 | "r4.16xlarge, i3.16xlarge",0,1,0,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 12 | "i4i.large, m6i.large, c6i.large, r6i.large, x2iedn.xlarge",1,0,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,1,1,1 13 | "c5.large, m5.xlarge, r5.large, d3.xlarge, i3en.large",1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 14 | "r5dn.large, m5dn.large, x2iezn.2xlarge",1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 15 | "m5.large, z1d.large, c5n.large, r5d.4xlarge, i3en.2xlarge",1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 16 | "i4i.16xlarge, m6i.16xlarge, c6i.16xlarge, r6i.16xlarge, x2idn.16xlarge",1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,1,1,1 17 | "m5.12xlarge, r5.12xlarge, c5d.9xlarge, d3en.12xlarge, i3en.24xlarge",1,1,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 18 | "c5.12xlarge, r5dn.12xlarge, m5dn.12xlarge, x2iezn.6xlarge",1,1,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 19 | "c5.9xlarge, m5.24xlarge, z1d.6xlarge, i3en.12xlarge",1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 -------------------------------------------------------------------------------- /infrastructure/CPU Feature Visualization - minimized aws group(all).csv: -------------------------------------------------------------------------------- 1 | feature groups,ss,monitor,vmx,est,pcid,x2apic,tsc_deadline_timer,tsc_adjust,hle,erms,invpcid,rtm,mpx,avx512f,avx512dq,rdseed,adx,smap,avx512ifma,clflushopt,clwb,avx512cd,sha_ni,avx512bw,avx512vl,avx512vbmi,umip,pku,ospke,avx512_vbmi2,gfni,vaes,vpclmulqdq,avx512_vnni,avx512_bitalg,tme,avx512_vpopcntdq,rdpid,md_clear,flush_l1d,arch_capabilities,mmxext,fxsr_opt,pdpe1gb,cmp_legacy,svm,cr8_legacy,sse4a,misalignsse,3dnowprefetch,topoext,perfctr_core,clzero,xsaveerptr,rdpru,wbnoinvd,npt,nrip_save,tsc_scale,vmcb_clean,flushbyasid,decodeassists,pausefilter,pfthreshold,v_vmsave_vmload,constant_tsc,arch_perfmon,xtopology,tsc_reliable,nonstop_tsc,amd_dcm,aperfmperf,tsc_known_freq,cpuid_fault,invpcid_single,pti,ssbd,ibrs,ibpb,stibp,ibrs_enhanced,tpr_shadow,vnmi,ept,vpid,vmmcall,ept_ad,xsavec,xgetbv1,xsaves 2 | m5a.large,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0 3 | m5a.2xlarge,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0 4 | m5a.8xlarge,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0 5 | c5a.large,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0,0,1,1,1,1,0,0,0,0,0,1,0,1,1,0 6 | c6a.large,0,0,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,1,0,1,1,1,1,0,0,0,0,0,1,0,1,1,0 7 | m4.large,0,0,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 8 | h1.2xlarge,0,0,0,0,1,1,1,0,0,1,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 9 | x1e.xlarge,0,0,0,0,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 10 | r4.large,0,0,0,0,1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 11 | i3.large,0,0,0,0,1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 12 | c5a.24xlarge,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0,0,1,1,1,1,0,0,0,0,0,1,0,1,1,0 13 | c6a.24xlarge,0,1,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,1,0,1,1,1,1,0,0,0,0,0,1,0,1,1,0 14 | c4.8xlarge,0,1,0,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 15 | h1.8xlarge,0,1,0,1,1,1,1,0,0,1,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 16 | h1.16xlarge,0,1,0,1,1,1,1,0,0,1,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 17 | x1e.8xlarge,0,1,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 18 | m4.16xlarge,0,1,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 19 | r4.8xlarge,0,1,0,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 20 | r4.16xlarge,0,1,0,1,1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 21 | c6i.large,1,0,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,1,1,1 22 | c5.large,1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 23 | m5n.large,1,0,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 24 | m5.large,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 25 | c6i.16xlarge,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,1,1,1 26 | c5d.9xlarge,1,1,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 27 | m5zn.6xlarge,1,1,0,0,1,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 28 | c5.9xlarge,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1 -------------------------------------------------------------------------------- /ExternalMigration(re-experiment).py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import time 3 | import datetime 4 | import boto3 5 | from tqdm import tqdm 6 | 7 | import csv 8 | from pathlib import Path 9 | csv_path = str(Path(__file__).resolve().parent) + '/infrastructure/CPU Feature Visualization - minimized aws group(all).csv' 10 | 11 | import ssh_scripts.playbook as playbook 12 | 13 | from pprint import pprint 14 | 15 | ec2_client = boto3.client('ec2', region_name='us-west-2') 16 | ec2_resource = boto3.resource('ec2', region_name='us-west-2') 17 | s3_client = boto3.client('s3') 18 | 19 | bucket_name = 'migration-compatibility' 20 | prefix = 'Migration-between-groups/pyxgboost/' 21 | 22 | def createInfrastructure(CREATE_GROUP, cwd): 23 | # create infrastructure by group 24 | with open(f'terraform.log', 'w') as f: 25 | subprocess.run(['terraform', 'apply', '-auto-approve', '-target', 'module.read-instances', '-var', 26 | f'group={CREATE_GROUP}'], cwd=cwd, stdout=f, stderr=f, encoding='utf-8') 27 | subprocess.run(['terraform', 'apply', '-auto-approve', '-var', f'group={CREATE_GROUP}'], 28 | cwd=cwd, stdout=f, stderr=f, encoding='utf-8') 29 | 30 | print('\nComplete infrastructure creation') 31 | print('wating 2.5 minute..') 32 | 33 | time.sleep(150) 34 | 35 | # checking instance status 36 | print('checking instance status...') 37 | while True: 38 | instances = ec2_client.describe_instances(Filters=[ 39 | { 40 | 'Name': 'tag:Name', 41 | 'Values': ['migration-test_*'] 42 | } 43 | ]) 44 | 45 | all_running = True 46 | 47 | for reservation in instances['Reservations']: 48 | for instance in reservation['Instances']: 49 | instance_id = instance['InstanceId'] 50 | instance_obj = ec2_resource.Instance(instance_id) 51 | 52 | instance_state = instance_obj.state['Name'] 53 | 54 | if instance_state == 'terminated': 55 | break 56 | 57 | status = ec2_client.describe_instance_status( 58 | InstanceIds=[instance_id]) 59 | if 'InstanceStatuses' not in status or status['InstanceStatuses'][0]['InstanceStatus']['Status'] != 'ok': 60 | all_running = False 61 | break 62 | 63 | if not all_running: 64 | break 65 | 66 | if all_running: 67 | break 68 | time.sleep(10) 69 | 70 | print('Pass all instance health checks') 71 | 72 | 73 | def performTask(CREATE_GROUP): 74 | # Execute an Ansible command to start the checkpoint. 75 | playbook.externalMigrationDump(CREATE_GROUP, re_exp=True) 76 | 77 | # Execute an Ansible command to start the restore. 78 | playbook.externalMigrationRestore(CREATE_GROUP, 0, re_exp=True) 79 | 80 | 81 | def destroyInfrastructure(CREATE_GROUP, cwd): 82 | # destroy infrastructure by groups 83 | with open(f'terraform.log', 'a') as f: 84 | p = subprocess.Popen(['terraform', 'destroy', '-auto-approve', '-var', 85 | f'group={CREATE_GROUP}'], cwd=cwd, stdout=f, stderr=f) 86 | p.wait() 87 | 88 | 89 | def getReExp(): 90 | instances = ["m5a.large", "m5a.2xlarge", "m5a.8xlarge", "c5a.large", "c6a.large", "m4.large", "h1.2xlarge", "x1e.xlarge", "r4.large", "i3.large", "c5a.24xlarge", "c6a.24xlarge", "c4.8xlarge", "h1.8xlarge", "h1.16xlarge", "x1e.8xlarge", "m4.16xlarge", "r4.8xlarge", "r4.16xlarge", "c6i.large", "c5.large", "m5n.large", "m5.large", "c6i.16xlarge", "c5d.9xlarge", "m5zn.6xlarge", "c5.9xlarge"] 91 | isExists = [] 92 | 93 | for src in instances: 94 | for dst in instances: 95 | if(src == dst): 96 | continue 97 | 98 | isExists.append(src + '_to_' + dst + '.csv') 99 | 100 | # 버킷 내의 모든 객체 조회 101 | response = s3_client.list_objects_v2(Bucket=bucket_name, Prefix=prefix) 102 | objects = response.get('Contents', []) 103 | 104 | # 객체 이름만 리스트로 저장 105 | file_names = [obj['Key'].split('/')[-1] for obj in objects] 106 | file_names = set(file_names) 107 | file_names.discard('') 108 | 109 | notExists = [] 110 | for case in isExists: 111 | if case not in file_names: 112 | notExists.append(case) 113 | 114 | src = notExists[0].split('_')[0] 115 | dsts = [] 116 | reExpCases = [] 117 | for missingCase in notExists: 118 | if src != missingCase.split('_')[0]: 119 | reExpCases.append({src: dsts}) 120 | 121 | dsts = [] 122 | src = missingCase.split('_')[0] 123 | dsts.append(missingCase.split('_')[-1].split('.csv')[0]) 124 | continue 125 | 126 | dsts.append(missingCase.split('_')[-1].split('.csv')[0]) 127 | 128 | reExpCases.append({src: dsts}) 129 | 130 | pprint(reExpCases, width=80) 131 | 132 | return reExpCases 133 | 134 | def setCsv(cases): 135 | data = [] 136 | column = ['feature groups'] 137 | 138 | data.append(column) 139 | data.append(cases.keys()) 140 | for values in cases.values(): 141 | for value in values: 142 | data.append([value]) 143 | 144 | # CSV 파일로 저장 145 | with open(csv_path, 'w', newline='') as file: 146 | writer = csv.writer(file) 147 | writer.writerows(data) 148 | 149 | if __name__ == '__main__': 150 | playbook.setWorkload() 151 | 152 | print('Select experiment option') 153 | print('1. On-Demand\n2. Spot-Instance') 154 | option = int(input()) - 1 155 | 156 | if option == 0: 157 | cwd = 'infrastructure/external_migration' 158 | elif option == 1: 159 | cwd = 'infrastructure/external_migration_on_spot' 160 | else: 161 | print('invalid option') 162 | exit() 163 | 164 | start_time = datetime.datetime.now() 165 | 166 | reExpCases = getReExp() 167 | with tqdm(total=len(reExpCases), unit='Processing') as pbar: 168 | for reExpCase in reExpCases: 169 | setCsv(reExpCase) 170 | 171 | # values(dst instances) count + src instance count 172 | length = len(list(reExpCase.values())[0]) + 1 173 | CREATE_GROUP = [i for i in range(length)] 174 | 175 | createInfrastructure(CREATE_GROUP, cwd) 176 | performTask(CREATE_GROUP) 177 | destroyInfrastructure(CREATE_GROUP, cwd) 178 | pbar.update(1) 179 | 180 | time.sleep(5) 181 | 182 | end_time = datetime.datetime.now() 183 | 184 | elapsed_time = end_time - start_time 185 | total_seconds = elapsed_time.total_seconds() 186 | print(f'total time : {total_seconds}') --------------------------------------------------------------------------------