├── .gitignore ├── LICENSE ├── README.md ├── libraries ├── aws.bash └── util.bash └── sign_s3_url.bash /.gitignore: -------------------------------------------------------------------------------- 1 | # File 2 | .DS_Store -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Nam Nguyen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # aws-tools 2 | 3 | ***Sign S3 URL*** 4 | 5 | ``` 6 | SYNOPSIS : 7 | sign_s3_url.bash 8 | --help 9 | --aws-access-key-id 10 | --aws-secret-access-key 11 | --region 12 | --bucket 13 | --file-path 14 | --method 15 | --minute-expire 16 | 17 | USE CASES : 18 | If you have a private/public S3 bucket and would like to share the downloadable links to anyone, 19 | this tool will help to generate signed S3 URLs 20 | 21 | DESCRIPTION : 22 | --help Help page 23 | --aws-access-key-id AWS Access Key ID (optional, defaults to ${AWS_ACCESS_KEY_ID}) 24 | --aws-secret-access-key AWS Secret Access Key (optional, defaults to ${AWS_SECRET_ACCESS_KEY}) 25 | --region Region (optional, defaults to ${AWS_DEFAULT_REGION}) 26 | Valid regions: ap-northeast-1 ap-northeast-2 ap-south-1 ap-southeast-1 ap-southeast-2 ca-central-1 eu-central-1 eu-west-1 eu-west-2 sa-east-1 us-east-1 us-east-2 us-west-1 us-west-2 27 | --bucket Bucket name (require) 28 | --file-path File path (require) 29 | --method HTTP request method (optional, defaults to 'GET' METHOD) 30 | --minute-expire Minutes to expire signed URL (optional, defaults to '15' minutes) 31 | 32 | EXAMPLES : 33 | ./sign_s3_url.bash --help 34 | ./sign_s3_url.bash --bucket 'my_bucket_name' --file-path 'my_path/my_file.txt' 35 | ./sign_s3_url.bash --aws-access-key-id '5KI6IA4AXMA39FV7O4E0' --aws-secret-access-key '5N2j9gJlw9azyLEVpbIOn/tZ2u3sVjjHM03qJfIA' --region 'us-west-1' --bucket 'my_bucket_name' --file-path 'my_path/my_file.txt' --method 'PUT' --minute-expire '30' 36 | ``` -------------------------------------------------------------------------------- /libraries/aws.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | source "$(dirname "${BASH_SOURCE[0]}")/util.bash" 4 | 5 | ############################## 6 | # AUTO SCALE GROUP UTILITIES # 7 | ############################## 8 | 9 | function getAutoScaleGroupNameByStackName() 10 | { 11 | local -r stackName="${1}" 12 | 13 | checkNonEmptyString "${stackName}" 'undefined stack name' 14 | 15 | aws autoscaling describe-auto-scaling-groups \ 16 | --no-cli-pager \ 17 | --output 'json' | 18 | jq \ 19 | --arg jqStackName "${stackName}" \ 20 | --compact-output \ 21 | --raw-output \ 22 | --sort-keys \ 23 | '.["AutoScalingGroups"] | 24 | .[] | 25 | .["Tags"] | 26 | .[] | 27 | select(.["ResourceType"] == "auto-scaling-group") | 28 | select(.["Key"] == "aws:cloudformation:stack-name") | 29 | select(.["Value"] == $jqStackName) | 30 | .["ResourceId"] // empty' 31 | } 32 | 33 | function getInstanceOrderIndexInAutoScaleInstancesByEIPs() 34 | { 35 | local -r stackName="${1}" 36 | local instanceID="${2}" 37 | local -r elasticPublicIPs=("${@:3}") 38 | 39 | # Set Default Value 40 | 41 | if [[ "$(isEmptyString "${instanceID}")" = 'true' ]] 42 | then 43 | instanceID="$(getInstanceID 'false')" 44 | fi 45 | 46 | # Validate Values 47 | 48 | checkNonEmptyString "${stackName}" 'undefined stack name' 49 | checkNonEmptyString "${instanceID}" 'undefined instance id' 50 | checkNonEmptyArray 'undefined elastic public ips' "${elasticPublicIPs[@]}" 51 | 52 | # Find Order Index 53 | 54 | local -r autoScaleGroupName="$(getAutoScaleGroupNameByStackName "${stackName}")" 55 | 56 | checkNonEmptyString "${autoScaleGroupName}" 'undefined auto scale group name' 57 | 58 | local -r autoScaleInstanceIDs=($( 59 | aws ec2 describe-instances \ 60 | --filters \ 61 | 'Name=instance-state-name,Values=pending,running' \ 62 | "Name=tag:aws:autoscaling:groupName,Values=${autoScaleGroupName}" \ 63 | "Name=tag:aws:cloudformation:stack-name,Values=${stackName}" \ 64 | --no-cli-pager \ 65 | --output 'json' \ 66 | --query 'sort_by(Reservations[*].Instances[], &LaunchTime)[*].{ 67 | "InstanceId": InstanceId, 68 | "PublicIpAddress": PublicIpAddress 69 | }' | 70 | jq \ 71 | --argjson jqElasticPublicIPs "$(printf '%s\n' "${elasticPublicIPs[@]}" | jq -R | jq -s)" \ 72 | --compact-output \ 73 | --raw-output \ 74 | '.[] | select(.["PublicIpAddress"] | IN($jqElasticPublicIPs[]) | not) | .["InstanceId"] // empty' 75 | )) 76 | 77 | local i=0 78 | 79 | for ((i = 0; i < ${#autoScaleInstanceIDs[@]}; i = i + 1)) 80 | do 81 | if [[ "${autoScaleInstanceIDs[i]}" = "${instanceID}" ]] 82 | then 83 | echo "${i}" 84 | i="$((${#autoScaleInstanceIDs[@]}))" 85 | fi 86 | done 87 | } 88 | 89 | function getInstanceOrderIndexInAutoScaleInstancesByENIs() 90 | { 91 | local -r stackName="${1}" 92 | local instanceID="${2}" 93 | local -r elasticNetworkInterfaceIDs=("${@:3}") 94 | 95 | # Set Default Value 96 | 97 | if [[ "$(isEmptyString "${instanceID}")" = 'true' ]] 98 | then 99 | instanceID="$(getInstanceID 'false')" 100 | fi 101 | 102 | # Validate Values 103 | 104 | checkNonEmptyString "${stackName}" 'undefined stack name' 105 | checkNonEmptyString "${instanceID}" 'undefined instance id' 106 | checkNonEmptyArray 'undefined elastic network interface ids' "${elasticNetworkInterfaceIDs[@]}" 107 | 108 | # Filter Network Interface IDs By : 109 | # Status Available 110 | # Instance Subnet ID 111 | # Within Network Interface ID List From Configurations 112 | 113 | local -r instanceSubnetID="$(getInstanceSubnetID)" 114 | 115 | checkNonEmptyString "${instanceSubnetID}" 'undefined instance subnet id' 116 | 117 | local -r filterElasticNetworkInterfaceIDs=($( 118 | aws ec2 describe-network-interfaces \ 119 | --filters \ 120 | 'Name=status,Values=available' \ 121 | "Name=subnet-id,Values=${instanceSubnetID}" \ 122 | --no-cli-pager \ 123 | --output 'json' \ 124 | --query 'sort_by(NetworkInterfaces[*], &NetworkInterfaceId)[*]' | 125 | jq \ 126 | --argjson jqElasticNetworkInterfaceIDs "$(printf '%s\n' "${elasticNetworkInterfaceIDs[@]}" | jq -R | jq -s)" \ 127 | --compact-output \ 128 | --raw-output \ 129 | '.[] | select(.["NetworkInterfaceId"] | IN($jqElasticNetworkInterfaceIDs[])) | .["NetworkInterfaceId"] // empty' 130 | )) 131 | 132 | # Get Instance ID List Has : 133 | # Instance Subnet ID 134 | # Auto Scale Group Name 135 | # Stack Name 136 | # NOT IN Filter Elastic Network Interface IDs 137 | 138 | local -r autoScaleGroupName="$(getAutoScaleGroupNameByStackName "${stackName}")" 139 | 140 | checkNonEmptyString "${autoScaleGroupName}" 'undefined auto scale group name' 141 | 142 | local -r autoScaleInstanceIDs=($( 143 | aws ec2 describe-instances \ 144 | --filters \ 145 | 'Name=instance-state-name,Values=pending,running' \ 146 | "Name=network-interface.subnet-id,Values=${instanceSubnetID}" \ 147 | "Name=tag:aws:autoscaling:groupName,Values=${autoScaleGroupName}" \ 148 | "Name=tag:aws:cloudformation:stack-name,Values=${stackName}" \ 149 | --no-cli-pager \ 150 | --output 'json' \ 151 | --query 'sort_by(Reservations[*].Instances[], &LaunchTime)[*]' | 152 | jq \ 153 | --argjson jqFilterElasticNetworkInterfaceIDs "$(printf '%s\n' "${filterElasticNetworkInterfaceIDs[@]}" | jq -R | jq -s)" \ 154 | --compact-output \ 155 | --raw-output \ 156 | '.[] | select(.["NetworkInterfaces"] | all(.["NetworkInterfaceId"] != ($jqFilterElasticNetworkInterfaceIDs[]))) | .["InstanceId"] // empty' 157 | )) 158 | 159 | # Find Instance Order Index 160 | 161 | local i=0 162 | 163 | for ((i = 0; i < ${#autoScaleInstanceIDs[@]}; i = i + 1)) 164 | do 165 | if [[ "${autoScaleInstanceIDs[i]}" = "${instanceID}" ]] 166 | then 167 | echo "${i}:$(arrayToStringWithDelimiter ' ' "${filterElasticNetworkInterfaceIDs[@]}")" 168 | i="$((${#autoScaleInstanceIDs[@]}))" 169 | fi 170 | done 171 | } 172 | 173 | ############################# 174 | # CLOUD-FORMATION UTILITIES # 175 | ############################# 176 | 177 | function getStackIDByName() 178 | { 179 | local -r stackName="${1}" 180 | 181 | checkNonEmptyString "${stackName}" 'undefined stack name' 182 | 183 | aws cloudformation describe-stacks \ 184 | --no-cli-pager \ 185 | --output 'text' \ 186 | --query 'Stacks[*].[StackId]' \ 187 | --stack-name "${stackName}" \ 188 | 2> '/dev/null' || true 189 | } 190 | 191 | ################# 192 | # EC2 UTILITIES # 193 | ################# 194 | 195 | function associateAvailableElasticPublicIPToInstanceID() 196 | { 197 | local -r region="${1}" 198 | local -r instanceID="${2}" 199 | local -r elasticPublicIPs=("${@:3}") 200 | 201 | local -r availableElasticPublicIP="$(getAvailableElasticPublicIP "${region}" "${elasticPublicIPs[@]}")" 202 | 203 | if [[ "$(isEmptyString "${availableElasticPublicIP}")" = 'false' ]] 204 | then 205 | associateElasticPublicIPToInstanceID "${region}" "${instanceID}" "${availableElasticPublicIP}" 206 | fi 207 | } 208 | 209 | function associateElasticPublicIPToInstanceID() 210 | { 211 | local region="${1}" 212 | local instanceID="${2}" 213 | local -r elasticPublicIP="${3}" 214 | 215 | # Set Default Value 216 | 217 | if [[ "$(isEmptyString "${region}")" = 'true' ]] 218 | then 219 | region="$(getInstanceRegion 'false')" 220 | fi 221 | 222 | if [[ "$(isEmptyString "${instanceID}")" = 'true' ]] 223 | then 224 | instanceID="$(getInstanceID 'false')" 225 | fi 226 | 227 | # Validate Values 228 | 229 | checkValidRegion "${region}" 230 | checkNonEmptyString "${instanceID}" 'undefined instance id' 231 | checkNonEmptyString "${elasticPublicIP}" 'undefined elastic public ip' 232 | 233 | # Associate Elastic Public IP 234 | 235 | local -r allocationID="$(getEC2ElasticAllocationIDByElasticPublicIP "${region}" "${elasticPublicIP}")" 236 | 237 | checkNonEmptyString "${allocationID}" 'undefined allocation id' 238 | 239 | aws ec2 associate-address \ 240 | --allocation-id "${allocationID}" \ 241 | --allow-reassociation \ 242 | --instance-id "${instanceID}" \ 243 | --no-cli-pager \ 244 | --region "${region}" 245 | } 246 | 247 | function attachNetworkInterfaceIDToInstanceID() 248 | { 249 | local instanceID="${1}" 250 | local -r elasticNetworkInterfaceIDs=("${@:2}") 251 | 252 | # Set Default Value 253 | 254 | if [[ "$(isEmptyString "${instanceID}")" = 'true' ]] 255 | then 256 | instanceID="$(getInstanceID 'false')" 257 | fi 258 | 259 | # Validate Values 260 | 261 | checkNonEmptyString "${instanceID}" 'undefined instance id' 262 | checkNonEmptyArray 'undefined elastic network interface ids' "${elasticNetworkInterfaceIDs[@]}" 263 | 264 | # Attach Network Interface 265 | 266 | local -r elasticNetworkInterfaceID="$( 267 | aws ec2 describe-network-interfaces \ 268 | --filters 'Name=status,Values=available' \ 269 | --network-interface-ids "${elasticNetworkInterfaceIDs[@]}" \ 270 | --no-cli-pager \ 271 | --output 'json' | 272 | jq \ 273 | --compact-output \ 274 | --raw-output \ 275 | '.["NetworkInterfaces"] | first | .["NetworkInterfaceId"] // empty' 276 | )" 277 | 278 | checkNonEmptyString "${elasticNetworkInterfaceID}" 'undefined elastic network interface id' 279 | 280 | aws ec2 attach-network-interface \ 281 | --device-index '1' \ 282 | --instance-id "${instanceID}" \ 283 | --network-interface-id "${elasticNetworkInterfaceID}" \ 284 | --no-cli-pager 285 | } 286 | 287 | function getAvailableElasticPublicIP() 288 | { 289 | local -r region="${1}" 290 | local -r elasticPublicIPs=("${@:2}") 291 | 292 | local i=0 293 | 294 | for ((i = 0; i < ${#elasticPublicIPs[@]}; i = i + 1)) 295 | do 296 | if [[ "$(getEC2ElasticAllocationIDByElasticPublicIP "${region}" "${elasticPublicIPs[i]}")" != '' && 297 | "$(getEC2ElasticAssociationIDByElasticPublicIP "${region}" "${elasticPublicIPs[i]}")" = '' ]] 298 | then 299 | echo "${elasticPublicIPs[i]}" 300 | i="$((${#elasticPublicIPs[@]}))" 301 | fi 302 | done 303 | } 304 | 305 | function getEC2ElasticAllocationIDByElasticPublicIP() 306 | { 307 | local region="${1}" 308 | local -r elasticPublicIP="${2}" 309 | 310 | # Set Default Value 311 | 312 | if [[ "$(isEmptyString "${region}")" = 'true' ]] 313 | then 314 | region="$(getInstanceRegion 'false')" 315 | fi 316 | 317 | # Validate Values 318 | 319 | checkValidRegion "${region}" 320 | checkNonEmptyString "${elasticPublicIP}" 'undefined elastic public ip' 321 | 322 | # Get EC2 Elastic Allocation ID 323 | 324 | aws ec2 describe-addresses \ 325 | --no-cli-pager \ 326 | --output 'text' \ 327 | --public-ips "${elasticPublicIP}" \ 328 | --query 'Addresses[0].[AllocationId]' \ 329 | --region "${region}" \ 330 | 2> '/dev/null' 331 | } 332 | 333 | function getEC2ElasticAssociationIDByElasticPublicIP() 334 | { 335 | local region="${1}" 336 | local -r elasticPublicIP="${2}" 337 | 338 | # Set Default Value 339 | 340 | if [[ "$(isEmptyString "${region}")" = 'true' ]] 341 | then 342 | region="$(getInstanceRegion 'false')" 343 | fi 344 | 345 | # Validate Values 346 | 347 | checkValidRegion "${region}" 348 | checkNonEmptyString "${elasticPublicIP}" 'undefined elastic public ip' 349 | 350 | # Get EC2 Elastic Association ID 351 | 352 | aws ec2 describe-addresses \ 353 | --no-cli-pager \ 354 | --output 'text' \ 355 | --public-ips "${elasticPublicIP}" \ 356 | --query 'Addresses[0].[AssociationId]' \ 357 | --region "${region}" \ 358 | 2> '/dev/null' | 359 | grep -i -v '^None$' 360 | } 361 | 362 | function getEC2PrivateIpAddressByInstanceID() 363 | { 364 | local region="${1}" 365 | local -r instanceID="${2}" 366 | 367 | # Set Default Value 368 | 369 | if [[ "$(isEmptyString "${region}")" = 'true' ]] 370 | then 371 | region="$(getInstanceRegion 'false')" 372 | fi 373 | 374 | # Get Private IP 375 | 376 | if [[ "$(isEmptyString "${instanceID}")" = 'true' ]] 377 | then 378 | curl -s --retry 12 --retry-delay 5 'http://instance-data/latest/meta-data/local-ipv4' 379 | else 380 | aws ec2 describe-instances \ 381 | --instance-id "${instanceID}" \ 382 | --no-cli-pager \ 383 | --output 'text' \ 384 | --query 'Reservations[*].Instances[*].PrivateIpAddress' \ 385 | --region "${region}" 386 | fi 387 | } 388 | 389 | function getEC2PrivateIpAddresses() 390 | { 391 | local namePattern="${1}" 392 | local excludeCurrentInstance="${2}" 393 | local vpcID="${3}" 394 | local region="${4}" 395 | 396 | # Set Default Values 397 | 398 | if [[ "$(isEmptyString "${namePattern}")" = 'true' ]] 399 | then 400 | namePattern='*' 401 | fi 402 | 403 | if [[ "${excludeCurrentInstance}" != 'true' ]] 404 | then 405 | excludeCurrentInstance='false' 406 | fi 407 | 408 | if [[ "$(isEmptyString "${vpcID}")" = 'true' ]] 409 | then 410 | vpcID="$(getInstanceVPCID)" 411 | fi 412 | 413 | if [[ "$(isEmptyString "${region}")" = 'true' ]] 414 | then 415 | region="$(getInstanceRegion 'false')" 416 | fi 417 | 418 | # Get Instances 419 | 420 | local -r instances=($( 421 | aws ec2 describe-instances \ 422 | --filters \ 423 | 'Name=instance-state-name,Values=pending,running' \ 424 | "Name=tag:Name,Values=${namePattern}" \ 425 | "Name=vpc-id,Values=${vpcID}" \ 426 | --no-cli-pager \ 427 | --output 'text' \ 428 | --query 'Reservations[*].Instances[*].PrivateIpAddress' \ 429 | --region "${region}" 430 | )) 431 | 432 | if [[ "${excludeCurrentInstance}" = 'true' ]] 433 | then 434 | excludeElementFromArray "$(getEC2PrivateIpAddressByInstanceID '' '')" "${instances[@]}" 435 | else 436 | echo "${instances[@]}" 437 | fi 438 | } 439 | 440 | function getKeyPairFingerPrintByName() 441 | { 442 | local -r keyPairName="${1}" 443 | 444 | checkNonEmptyString "${keyPairName}" 'undefined key pair name' 445 | 446 | aws ec2 describe-key-pairs \ 447 | --key-name "${keyPairName}" \ 448 | --no-cli-pager \ 449 | --output 'text' \ 450 | --query 'KeyPairs[0].[KeyFingerprint]' \ 451 | 2> '/dev/null' | 452 | grep -E -v '^None$' 453 | } 454 | 455 | function getLatestAMIIDByAMINamePattern() 456 | { 457 | local -r amiIsPublic="${1}" 458 | local -r amiNamePattern="${2}" 459 | 460 | checkNonEmptyString "${amiIsPublic}" 'undefined ami is public' 461 | checkNonEmptyString "${amiNamePattern}" 'undefined ami name pattern' 462 | 463 | aws ec2 describe-images \ 464 | --filters \ 465 | 'Name=architecture,Values=x86_64' \ 466 | 'Name=image-type,Values=machine' \ 467 | "Name=is-public,Values=${amiIsPublic}" \ 468 | "Name=name,Values=${amiNamePattern}" \ 469 | 'Name=state,Values=available' \ 470 | --no-cli-pager \ 471 | --output 'text' \ 472 | --query 'sort_by(Images, &CreationDate)[-1].ImageId' | 473 | grep -E -v '^None$' 474 | } 475 | 476 | function getSecurityGroupIDByName() 477 | { 478 | local -r securityGroupName="${1}" 479 | 480 | checkNonEmptyString "${securityGroupName}" 'undefined security group name' 481 | 482 | aws ec2 describe-security-groups \ 483 | --filters "Name=group-name,Values=${securityGroupName}" \ 484 | --no-cli-pager \ 485 | --output 'text' \ 486 | --query 'SecurityGroups[0].[GroupId]' | 487 | grep -E -v '^None$' 488 | } 489 | 490 | function getSecurityGroupIDsByNames() 491 | { 492 | local -r securityGroupNames=("${@}") 493 | 494 | local securityGroupIDs='' 495 | local securityGroupName='' 496 | 497 | for securityGroupName in "${securityGroupNames[@]}" 498 | do 499 | local securityGroupID='' 500 | securityGroupID="$(getSecurityGroupIDByName "${securityGroupName}")" 501 | 502 | checkNonEmptyString "${securityGroupID}" "security group name '${securityGroupName}' not found" 503 | 504 | securityGroupIDs="$(printf '%s\n%s' "${securityGroupIDs}" "${securityGroupID}")" 505 | done 506 | 507 | echo "${securityGroupIDs}" 508 | } 509 | 510 | function revokeSecurityGroupEgress() 511 | { 512 | local -r securityGroupID="${1}" 513 | local -r securityGroupName="${2}" 514 | 515 | checkNonEmptyString "${securityGroupID}" 'undefined security group ID' 516 | checkNonEmptyString "${securityGroupName}" 'undefined security group name' 517 | 518 | local -r ipPermissionsEgress="$( 519 | aws ec2 describe-security-groups \ 520 | --filters "Name=group-name,Values=${securityGroupName}" \ 521 | --no-cli-pager \ 522 | --output 'json' \ 523 | --query 'SecurityGroups[0].[IpPermissionsEgress][0]' 524 | )" 525 | 526 | if [[ "$(isEmptyString "${ipPermissionsEgress}")" = 'false' && "${ipPermissionsEgress}" != '[]' ]] 527 | then 528 | aws ec2 revoke-security-group-egress \ 529 | --group-id "${securityGroupID}" \ 530 | --ip-permissions "${ipPermissionsEgress}" \ 531 | --no-cli-pager 532 | fi 533 | } 534 | 535 | function revokeSecurityGroupIngress() 536 | { 537 | local -r securityGroupID="${1}" 538 | local -r securityGroupName="${2}" 539 | 540 | checkNonEmptyString "${securityGroupID}" 'undefined security group ID' 541 | checkNonEmptyString "${securityGroupName}" 'undefined security group name' 542 | 543 | local -r ipPermissions="$( 544 | aws ec2 describe-security-groups \ 545 | --filters "Name=group-name,Values=${securityGroupName}" \ 546 | --no-cli-pager \ 547 | --output 'json' \ 548 | --query 'SecurityGroups[0].[IpPermissions][0]' 549 | )" 550 | 551 | if [[ "$(isEmptyString "${ipPermissions}")" = 'false' && "${ipPermissions}" != '[]' ]] 552 | then 553 | aws ec2 revoke-security-group-ingress \ 554 | --group-id "${securityGroupID}" \ 555 | --no-cli-pager \ 556 | --ip-permissions "${ipPermissions}" \ 557 | --no-cli-pager 558 | fi 559 | } 560 | 561 | function updateInstanceName() 562 | { 563 | local -r instanceName="${1}" 564 | 565 | header 'UPDATING INSTANCE NAME' 566 | 567 | checkNonEmptyString "${instanceName}" 'undefined instance name' 568 | 569 | info "${instanceName}" 570 | 571 | aws ec2 create-tags \ 572 | --no-cli-pager \ 573 | --region "$(getInstanceRegion 'false')" \ 574 | --resources "$(getInstanceID 'false')" \ 575 | --tags "Key='Name',Value='${instanceName}'" 576 | } 577 | 578 | ##################### 579 | # GENERAL UTILITIES # 580 | ##################### 581 | 582 | function checkValidRegion() 583 | { 584 | local -r region="${1}" 585 | 586 | if [[ "$(isValidRegion "${region}")" = 'false' ]] 587 | then 588 | fatal "\nFATAL : invalid region '${region}'" 589 | fi 590 | } 591 | 592 | function getAllowedRegions() 593 | { 594 | echo 'af-south-1 ap-east-1 ap-northeast-1 ap-northeast-2 ap-northeast-3 ap-south-1 ap-south-2 ap-southeast-1 ap-southeast-2 ap-southeast-3 ap-southeast-4 ap-southeast-5 ca-central-1 ca-west-1 cn-north-1 cn-northwest-1 eu-central-1 eu-central-2 eu-north-1 eu-south-1 eu-south-2 eu-west-1 eu-west-2 eu-west-3 il-central-1 me-central-1 me-south-1 sa-east-1 us-east-1 us-east-2 us-gov-east-1 us-gov-west-1 us-west-1 us-west-2' 595 | } 596 | 597 | function getRegionFromRecordSetAliasTargetDNSName() 598 | { 599 | local -r recordSetAliasTargetDNSName="${1}" 600 | 601 | # Regions 602 | 603 | local -r allowedRegions=($(getAllowedRegions)) 604 | local region='' 605 | 606 | for region in "${allowedRegions[@]}" 607 | do 608 | if [[ "$(grep -F -i -o "${region}" <<< "${recordSetAliasTargetDNSName}")" != '' ]] 609 | then 610 | echo "${region}" && return 0 611 | fi 612 | done 613 | } 614 | 615 | function getShortRegionName() 616 | { 617 | local -r region="${1}" 618 | 619 | checkValidRegion "${region}" 620 | 621 | if [[ "${region}" = 'af-south-1' ]] 622 | then 623 | echo 'afs1' 624 | elif [[ "${region}" = 'ap-east-1' ]] 625 | then 626 | echo 'ape1' 627 | elif [[ "${region}" = 'ap-northeast-1' ]] 628 | then 629 | echo 'apne1' 630 | elif [[ "${region}" = 'ap-northeast-2' ]] 631 | then 632 | echo 'apne2' 633 | elif [[ "${region}" = 'ap-northeast-3' ]] 634 | then 635 | echo 'apne3' 636 | elif [[ "${region}" = 'ap-south-1' ]] 637 | then 638 | echo 'aps1' 639 | elif [[ "${region}" = 'ap-south-2' ]] 640 | then 641 | echo 'aps2' 642 | elif [[ "${region}" = 'ap-southeast-1' ]] 643 | then 644 | echo 'apse1' 645 | elif [[ "${region}" = 'ap-southeast-2' ]] 646 | then 647 | echo 'apse2' 648 | elif [[ "${region}" = 'ap-southeast-3' ]] 649 | then 650 | echo 'apse3' 651 | elif [[ "${region}" = 'ap-southeast-4' ]] 652 | then 653 | echo 'apse4' 654 | elif [[ "${region}" = 'ap-southeast-5' ]] 655 | then 656 | echo 'apse5' 657 | elif [[ "${region}" = 'ca-central-1' ]] 658 | then 659 | echo 'cac1' 660 | elif [[ "${region}" = 'ca-west-1' ]] 661 | then 662 | echo 'caw1' 663 | elif [[ "${region}" = 'cn-north-1' ]] 664 | then 665 | echo 'cnn1' 666 | elif [[ "${region}" = 'cn-northwest-1' ]] 667 | then 668 | echo 'cnnw1' 669 | elif [[ "${region}" = 'eu-central-1' ]] 670 | then 671 | echo 'euc1' 672 | elif [[ "${region}" = 'eu-central-2' ]] 673 | then 674 | echo 'euc2' 675 | elif [[ "${region}" = 'eu-north-1' ]] 676 | then 677 | echo 'eun1' 678 | elif [[ "${region}" = 'eu-south-1' ]] 679 | then 680 | echo 'eus1' 681 | elif [[ "${region}" = 'eu-south-2' ]] 682 | then 683 | echo 'eus2' 684 | elif [[ "${region}" = 'eu-west-1' ]] 685 | then 686 | echo 'euw1' 687 | elif [[ "${region}" = 'eu-west-2' ]] 688 | then 689 | echo 'euw2' 690 | elif [[ "${region}" = 'eu-west-3' ]] 691 | then 692 | echo 'euw3' 693 | elif [[ "${region}" = 'il-central-1' ]] 694 | then 695 | echo 'ilc1' 696 | elif [[ "${region}" = 'me-central-1' ]] 697 | then 698 | echo 'mec1' 699 | elif [[ "${region}" = 'me-south-1' ]] 700 | then 701 | echo 'mes1' 702 | elif [[ "${region}" = 'sa-east-1' ]] 703 | then 704 | echo 'sae1' 705 | elif [[ "${region}" = 'us-east-1' ]] 706 | then 707 | echo 'use1' 708 | elif [[ "${region}" = 'us-east-2' ]] 709 | then 710 | echo 'use2' 711 | elif [[ "${region}" = 'us-gov-east-1' ]] 712 | then 713 | echo 'usgove1' 714 | elif [[ "${region}" = 'us-gov-west-1' ]] 715 | then 716 | echo 'usgovw1' 717 | elif [[ "${region}" = 'us-west-1' ]] 718 | then 719 | echo 'usw1' 720 | elif [[ "${region}" = 'us-west-2' ]] 721 | then 722 | echo 'usw2' 723 | fi 724 | } 725 | 726 | function isValidRegion() 727 | { 728 | local -r region="${1}" 729 | 730 | local -r allowedRegions=($(getAllowedRegions)) 731 | 732 | isElementInArray "${region}" "${allowedRegions[@]}" 733 | } 734 | 735 | function unzipAWSS3RemoteFile() 736 | { 737 | local -r downloadURL="${1}" 738 | local -r installFolder="${2}" 739 | local extension="${3}" 740 | 741 | # Find Extension 742 | 743 | local exExtension='' 744 | 745 | if [[ "$(isEmptyString "${extension}")" = 'true' ]] 746 | then 747 | extension="$(getFileExtension "${downloadURL}")" 748 | exExtension="$(rev <<< "${downloadURL}" | cut -d '.' -f 1-2 | rev)" 749 | fi 750 | 751 | # Unzip 752 | 753 | if [[ "$(grep -i '^tgz$' <<< "${extension}")" != '' || "$(grep -i '^tar\.gz$' <<< "${extension}")" != '' || "$(grep -i '^tar\.gz$' <<< "${exExtension}")" != '' ]] 754 | then 755 | debug "Downloading '${downloadURL}'\n" 756 | 757 | aws s3 cp "${downloadURL}" - | tar -C "${installFolder}" -x -z --strip 1 || 758 | fatal "\nFATAL : '${downloadURL}' does not exist or authentication failed" 759 | else 760 | fatal "\nFATAL : file extension '${extension}' not supported" 761 | fi 762 | } 763 | 764 | ################# 765 | # IAM UTILITIES # 766 | ################# 767 | 768 | function cloneIAMRole() 769 | { 770 | local -r existIAMRoleName="${1}" 771 | local -r newIAMRoleName="${2}" 772 | 773 | if [[ "$(existIAMRole "${existIAMRoleName}")" = 'false' ]] 774 | then 775 | fatal "\nFATAL : existing iam role '${existIAMRoleName}' not found" 776 | fi 777 | 778 | if [[ "$(existIAMRole "${newIAMRoleName}")" = 'true' ]] 779 | then 780 | fatal "\nFATAL : new iam role '${newIAMRoleName}' found" 781 | fi 782 | 783 | # Temp File Path 784 | 785 | local -r policyTempFilePath="$(getTemporaryFile)" 786 | 787 | # Get Exist IAM Role Trust Relationships 788 | 789 | aws iam get-role \ 790 | --no-cli-pager \ 791 | --output 'json' \ 792 | --role-name "${existIAMRoleName}" | 793 | jq \ 794 | --compact-output \ 795 | --raw-output \ 796 | '.["Role"] | .["AssumeRolePolicyDocument"] // empty' > "${policyTempFilePath}" || 797 | rm -f "${policyTempFilePath}" 798 | 799 | # Create New IAM Role Using Exist IAM Role Trust Relationships 800 | 801 | local -r newIAMRole="$( 802 | aws iam create-role \ 803 | --assume-role-policy-document "file://${policyTempFilePath}" \ 804 | --no-cli-pager \ 805 | --output 'json' \ 806 | --role-name "${newIAMRoleName}" 807 | )" 808 | 809 | # Get Exist Inline Policies And Put Inline Policies 810 | 811 | local -r existInlinePolicyNames="$( 812 | aws iam list-role-policies \ 813 | --no-cli-pager \ 814 | --output 'json' \ 815 | --role-name "${existIAMRoleName}" | 816 | jq \ 817 | --compact-output \ 818 | --raw-output \ 819 | '.["PolicyNames"] | .[] // empty' 820 | )" 821 | 822 | local existInlinePolicyName='' 823 | 824 | for existInlinePolicyName in ${existInlinePolicyNames[@]} 825 | do 826 | local existInlineRolePolicy="$( 827 | aws iam get-role-policy \ 828 | --no-cli-pager \ 829 | --output 'json' \ 830 | --policy-name "${existInlinePolicyName}" \ 831 | --role-name "${existIAMRoleName}" 832 | )" 833 | 834 | jq --compact-output --raw-output '.["PolicyDocument"] // empty' <<< "${existInlineRolePolicy}" > "${policyTempFilePath}" || 835 | rm -f "${policyTempFilePath}" 836 | 837 | aws iam put-role-policy \ 838 | --no-cli-pager \ 839 | --output 'json' \ 840 | --policy-document "file://${policyTempFilePath}" \ 841 | --policy-name "$(jq --compact-output --raw-output '.["PolicyName"] // empty' <<< "${existInlineRolePolicy}")" \ 842 | --role-name "${newIAMRoleName}" || 843 | rm -f "${policyTempFilePath}" 844 | done 845 | 846 | rm -f "${policyTempFilePath}" 847 | 848 | # Get Exist Managed Policies And Attach Managed Policies 849 | 850 | local -r managedPolicyArns="$( 851 | aws iam list-attached-role-policies \ 852 | --no-cli-pager \ 853 | --output 'json' \ 854 | --role-name "${existIAMRoleName}" | 855 | jq \ 856 | --compact-output \ 857 | --raw-output \ 858 | '.["AttachedPolicies"] | .[] | .["PolicyArn"] // empty' \ 859 | )" 860 | 861 | local managedPolicyArn='' 862 | 863 | for managedPolicyArn in ${managedPolicyArns[@]} 864 | do 865 | aws iam attach-role-policy \ 866 | --no-cli-pager \ 867 | --policy-arn "${managedPolicyArn}" \ 868 | --role-name "${newIAMRoleName}" 869 | done 870 | 871 | # Display New IAM Role 872 | 873 | jq --compact-output --raw-output --sort-keys '. // empty' <<< "${newIAMRole}" 874 | } 875 | 876 | function createIAMRole() 877 | { 878 | local -r iamRoleName="${1}" 879 | 880 | if [[ "$(existIAMRole "${iamRoleName}")" = 'true' ]] 881 | then 882 | fatal "\nFATAL : iam role '${iamRoleName}' found" 883 | else 884 | header "CREATING IAM ROLE ${iamRoleName}" 885 | 886 | local -r policyTempFilePath="$(getTemporaryFile)" 887 | 888 | echo '{"Statement":[{"Action":"sts:AssumeRole","Effect":"Allow","Principal":{"Service":"ec2.amazonaws.com"}}],"Version":"2012-10-17"}' > "${policyTempFilePath}" 889 | 890 | aws iam create-role \ 891 | --assume-role-policy-document "file://${policyTempFilePath}" \ 892 | --no-cli-pager \ 893 | --output 'json' \ 894 | --role-name "${iamRoleName}" | 895 | jq --raw-output --sort-keys '. // empty' || rm -f "${policyTempFilePath}" 896 | 897 | rm -f "${policyTempFilePath}" 898 | fi 899 | } 900 | 901 | function deleteIAMRole() 902 | { 903 | local -r iamRoleName="${1}" 904 | 905 | if [[ "$(existIAMRole "${iamRoleName}")" = 'true' ]] 906 | then 907 | header "DELETING IAM ROLE ${iamRoleName}" 908 | 909 | deleteIAMRoleInlinePolicies "${iamRoleName}" 910 | detachIAMRolePolicies "${iamRoleName}" 911 | removeIAMRoleFromInstanceProfile "${iamRoleName}" 912 | 913 | aws iam delete-role \ 914 | --no-cli-pager \ 915 | --output 'json' \ 916 | --role-name "${iamRoleName}" 917 | 918 | echo -e "\n\033[1;32mdeleted iam role\033[0m '\033[1;34m${iamRoleName}\033[0m'" 919 | fi 920 | } 921 | 922 | function deleteIAMRoleInlinePolicies() 923 | { 924 | local -r iamRoleName="${1}" 925 | 926 | local -r policies=($( 927 | aws iam list-role-policies \ 928 | --no-cli-pager \ 929 | --output 'json' \ 930 | --role-name "${iamRoleName}" | 931 | jq \ 932 | --compact-output \ 933 | --raw-output \ 934 | '.["PolicyNames"] | .[] // empty') 935 | ) 936 | 937 | if [[ "${#policies[@]}" -gt '0' ]] 938 | then 939 | info 'deleting inline policies' 940 | 941 | local policy='' 942 | 943 | for policy in "${policies[@]}" 944 | do 945 | aws iam delete-role-policy \ 946 | --no-cli-pager \ 947 | --output 'json' \ 948 | --role-name "${iamRoleName}" \ 949 | --policy-name "${policy}" 950 | 951 | echo -e " deleted '\033[1;35m${policy}\033[0m'" 952 | done 953 | fi 954 | } 955 | 956 | function detachIAMRolePolicies() 957 | { 958 | local -r iamRoleName="${1}" 959 | 960 | local -r policyARNs=($( 961 | aws iam list-attached-role-policies \ 962 | --no-cli-pager \ 963 | --output 'json' \ 964 | --role-name "${iamRoleName}" | 965 | jq \ 966 | --compact-output \ 967 | --raw-output \ 968 | '.["AttachedPolicies"] | .[] | .["PolicyArn"] // empty') 969 | ) 970 | 971 | if [[ "${#policyARNs[@]}" -gt '0' ]] 972 | then 973 | info '\ndetaching policies' 974 | 975 | local policyARN='' 976 | 977 | for policyARN in "${policyARNs[@]}" 978 | do 979 | aws iam detach-role-policy \ 980 | --no-cli-pager \ 981 | --output 'json' \ 982 | --role-name "${iamRoleName}" \ 983 | --policy-arn "${policyARN}" 984 | 985 | echo -e " detached '\033[1;35m${policyARN}\033[0m'" 986 | done 987 | fi 988 | } 989 | 990 | function removeIAMRoleFromInstanceProfile() 991 | { 992 | local -r iamRoleName="${1}" 993 | 994 | local -r instanceProfiles=($( 995 | aws iam list-instance-profiles-for-role \ 996 | --no-cli-pager \ 997 | --output 'json' \ 998 | --role-name "${iamRoleName}" | 999 | jq \ 1000 | --compact-output \ 1001 | --raw-output \ 1002 | '.["InstanceProfiles"] | map(.["InstanceProfileName"])[]') 1003 | ) 1004 | 1005 | if [[ "${#instanceProfiles[@]}" -gt '0' ]] 1006 | then 1007 | info '\nremoving role from instance profiles' 1008 | 1009 | local instanceProfile='' 1010 | 1011 | for instanceProfile in "${instanceProfiles[@]}" 1012 | do 1013 | aws iam remove-role-from-instance-profile \ 1014 | --instance-profile-name "${instanceProfile}" \ 1015 | --no-cli-pager \ 1016 | --role-name "${iamRoleName}" 1017 | 1018 | echo -e " removed role '\033[1;34m${iamRoleName}\033[0m' from instance profile '\033[1;35m${instanceProfile}\033[0m'" 1019 | done 1020 | fi 1021 | } 1022 | 1023 | function existIAMRole() 1024 | { 1025 | local -r iamRoleName="${1}" 1026 | 1027 | invertTrueFalseString "$(isEmptyString "$(aws iam get-role --no-cli-pager --role-name "${iamRoleName}" 2> '/dev/null')")" 1028 | } 1029 | 1030 | ########################### 1031 | # INSTANCE DATA UTILITIES # 1032 | ########################### 1033 | 1034 | function getInstanceAvailabilityZone() 1035 | { 1036 | curl -s --retry 12 --retry-delay 5 'http://instance-data/latest/meta-data/placement/availability-zone' 1037 | } 1038 | 1039 | function getInstanceIAMRole() 1040 | { 1041 | curl -s --retry 12 --retry-delay 5 'http://instance-data/latest/meta-data/iam/info' | 1042 | jq \ 1043 | --compact-output \ 1044 | --raw-output \ 1045 | --sort-keys \ 1046 | '.["InstanceProfileArn"] // empty' | 1047 | cut -d '/' -f 2 1048 | } 1049 | 1050 | function getInstanceID() 1051 | { 1052 | local -r idOnly="${1}" 1053 | 1054 | local -r fullInstanceID="$(curl -s --retry 12 --retry-delay 5 'http://instance-data/latest/meta-data/instance-id')" 1055 | 1056 | if [[ "${idOnly}" = 'true' ]] 1057 | then 1058 | cut -d '-' -f 2 <<< "${fullInstanceID}" 1059 | else 1060 | echo "${fullInstanceID}" 1061 | fi 1062 | } 1063 | 1064 | function getInstanceMACAddress() 1065 | { 1066 | curl -s --retry 12 --retry-delay 5 'http://instance-data/latest/meta-data/mac' 1067 | } 1068 | 1069 | function getInstancePublicIPV4() 1070 | { 1071 | curl -s --retry 12 --retry-delay 5 'http://instance-data/latest/meta-data/public-ipv4' 1072 | } 1073 | 1074 | function getInstanceRegion() 1075 | { 1076 | local -r shortVersion="${1}" 1077 | 1078 | local -r availabilityZone="$(getInstanceAvailabilityZone)" 1079 | 1080 | checkNonEmptyString "${availabilityZone}" 'undefined availabilityZone' 1081 | 1082 | local -r fullRegionName="${availabilityZone:0:${#availabilityZone} - 1}" 1083 | 1084 | if [[ "${shortVersion}" = 'true' ]] 1085 | then 1086 | getShortRegionName "${fullRegionName}" 1087 | else 1088 | echo "${fullRegionName}" 1089 | fi 1090 | } 1091 | 1092 | function getInstanceSubnetID() 1093 | { 1094 | curl -s --retry 12 --retry-delay 5 "http://instance-data/latest/meta-data/network/interfaces/macs/$(getInstanceMACAddress)/subnet-id" 1095 | } 1096 | 1097 | function getInstanceUserDataValue() 1098 | { 1099 | local -r key="$(escapeGrepSearchPattern "${1}")" 1100 | 1101 | trimString "$( 1102 | curl -s --retry 12 --retry-delay 5 'http://instance-data/latest/user-data' | 1103 | grep -E -o "^\s*${key}\s*=\s*.*$" | 1104 | tail -1 | 1105 | awk -F '=' '{ print $2 }' 1106 | )" 1107 | } 1108 | 1109 | function getInstanceVPCID() 1110 | { 1111 | curl -s --retry 12 --retry-delay 5 "http://instance-data/latest/meta-data/network/interfaces/macs/$(getInstanceMACAddress)/vpc-id" 1112 | } 1113 | 1114 | ########################### 1115 | # LOAD BALANCER UTILITIES # 1116 | ########################### 1117 | 1118 | function getAWSELBAccountID() 1119 | { 1120 | local -r region="${1}" 1121 | 1122 | checkValidRegion "${region}" 1123 | 1124 | if [[ "${region}" = 'af-south-1' ]] 1125 | then 1126 | echo '098369216593' 1127 | elif [[ "${region}" = 'ap-east-1' ]] 1128 | then 1129 | echo '754344448648' 1130 | elif [[ "${region}" = 'ap-northeast-1' ]] 1131 | then 1132 | echo '582318560864' 1133 | elif [[ "${region}" = 'ap-northeast-2' ]] 1134 | then 1135 | echo '600734575887' 1136 | elif [[ "${region}" = 'ap-northeast-3' ]] 1137 | then 1138 | echo '383597477331' 1139 | elif [[ "${region}" = 'ap-south-1' ]] 1140 | then 1141 | echo '718504428378' 1142 | elif [[ "${region}" = 'ap-southeast-1' ]] 1143 | then 1144 | echo '114774131450' 1145 | elif [[ "${region}" = 'ap-southeast-2' ]] 1146 | then 1147 | echo '783225319266' 1148 | elif [[ "${region}" = 'ap-southeast-3' ]] 1149 | then 1150 | echo '589379963580' 1151 | elif [[ "${region}" = 'ca-central-1' ]] 1152 | then 1153 | echo '985666609251' 1154 | elif [[ "${region}" = 'cn-north-1' ]] 1155 | then 1156 | echo '638102146993' 1157 | elif [[ "${region}" = 'cn-northwest-1' ]] 1158 | then 1159 | echo '037604701340' 1160 | elif [[ "${region}" = 'eu-central-1' ]] 1161 | then 1162 | echo '054676820928' 1163 | elif [[ "${region}" = 'eu-north-1' ]] 1164 | then 1165 | echo '897822967062' 1166 | elif [[ "${region}" = 'eu-south-1' ]] 1167 | then 1168 | echo '635631232127' 1169 | elif [[ "${region}" = 'eu-west-1' ]] 1170 | then 1171 | echo '156460612806' 1172 | elif [[ "${region}" = 'eu-west-2' ]] 1173 | then 1174 | echo '652711504416' 1175 | elif [[ "${region}" = 'eu-west-3' ]] 1176 | then 1177 | echo '009996457667' 1178 | elif [[ "${region}" = 'me-south-1' ]] 1179 | then 1180 | echo '076674570225' 1181 | elif [[ "${region}" = 'sa-east-1' ]] 1182 | then 1183 | echo '507241528517' 1184 | elif [[ "${region}" = 'us-east-1' ]] 1185 | then 1186 | echo '127311923021' 1187 | elif [[ "${region}" = 'us-east-2' ]] 1188 | then 1189 | echo '033677994240' 1190 | elif [[ "${region}" = 'us-gov-east-1' ]] 1191 | then 1192 | echo '190560391635' 1193 | elif [[ "${region}" = 'us-gov-west-1' ]] 1194 | then 1195 | echo '048591011584' 1196 | elif [[ "${region}" = 'us-west-1' ]] 1197 | then 1198 | echo '027434742980' 1199 | elif [[ "${region}" = 'us-west-2' ]] 1200 | then 1201 | echo '797873946194' 1202 | fi 1203 | } 1204 | 1205 | function getLoadBalancerDNSNameByName() 1206 | { 1207 | local -r loadBalancerName="${1}" 1208 | 1209 | checkNonEmptyString "${loadBalancerName}" 'undefined load balancer name' 1210 | 1211 | aws elb describe-load-balancers \ 1212 | --load-balancer-name "${loadBalancerName}" \ 1213 | --no-cli-pager \ 1214 | --output 'text' \ 1215 | --query 'LoadBalancerDescriptions[*].DNSName' 1216 | } 1217 | 1218 | function isLoadBalancerFromStackName() 1219 | { 1220 | local -r loadBalancerName="${1}" 1221 | local -r stackName="${2}" 1222 | 1223 | checkNonEmptyString "${loadBalancerName}" 'undefined load balancer name' 1224 | checkNonEmptyString "${stackName}" 'undefined stack name' 1225 | 1226 | local -r loadBalancerStackName="$( 1227 | aws elb describe-tags \ 1228 | --load-balancer-name "${loadBalancerName}" \ 1229 | --no-cli-pager \ 1230 | --output 'json' | 1231 | jq \ 1232 | --arg jqStackName "${stackName}" \ 1233 | --compact-output \ 1234 | --raw-output \ 1235 | --sort-keys \ 1236 | '.["TagDescriptions"] | 1237 | .[] | 1238 | .["Tags"] | 1239 | .[] | 1240 | select(.["Key"] == "aws:cloudformation:stack-name") | 1241 | select(.["Value"] == $jqStackName) // empty' 1242 | )" 1243 | 1244 | if [[ "$(isEmptyString "${loadBalancerStackName}")" = 'false' ]] 1245 | then 1246 | echo 'true' && return 0 1247 | fi 1248 | 1249 | echo 'false' && return 1 1250 | } 1251 | 1252 | function getLoadBalancerTag() 1253 | { 1254 | local -r tags="${1}" 1255 | local -r key="${2}" 1256 | 1257 | jq \ 1258 | --arg jqKey "${key}" \ 1259 | --compact-output \ 1260 | --raw-output \ 1261 | --sort-keys \ 1262 | '.["TagDescriptions"] | 1263 | .[] | 1264 | .["Tags"] | 1265 | map(select(.["Key"] == $jqKey))[] | 1266 | .["Value"] // empty' \ 1267 | <<< "${tags}" 1268 | } 1269 | 1270 | function getLoadBalancerTags() 1271 | { 1272 | local -r loadBalancerName="${1}" 1273 | 1274 | checkNonEmptyString "${loadBalancerName}" 'undefined load balancer name' 1275 | 1276 | aws elb describe-tags \ 1277 | --no-cli-pager \ 1278 | --output 'json' \ 1279 | --load-balancer-name "${loadBalancerName}" 1280 | } 1281 | 1282 | ###################### 1283 | # ROUTE-53 UTILITIES # 1284 | ###################### 1285 | 1286 | function getHostedZoneIDByDomainName() 1287 | { 1288 | local -r hostedZoneDomainName="${1}" 1289 | 1290 | checkNonEmptyString "${hostedZoneDomainName}" 'undefined hosted zone domain name' 1291 | 1292 | aws route53 list-hosted-zones-by-name \ 1293 | --dns-name "${hostedZoneDomainName}" \ 1294 | --no-cli-pager \ 1295 | --output 'text' \ 1296 | --query 'HostedZones[0].[Id]' | 1297 | grep -E -v '^None$' | 1298 | awk -F '/' '{ print $3 }' 1299 | } 1300 | 1301 | ################ 1302 | # S3 UTILITIES # 1303 | ################ 1304 | 1305 | function existS3Bucket() 1306 | { 1307 | local -r bucketName="${1}" 1308 | 1309 | isEmptyString "$(aws s3api head-bucket --bucket "${bucketName}" 2>&1)" 1310 | } 1311 | 1312 | ################# 1313 | # STS UTILITIES # 1314 | ################# 1315 | 1316 | function getAWSAccountID() 1317 | { 1318 | aws sts get-caller-identity \ 1319 | --no-cli-pager \ 1320 | --output 'text' \ 1321 | --query 'Account' 1322 | } 1323 | 1324 | ################# 1325 | # VPC UTILITIES # 1326 | ################# 1327 | 1328 | function acceptVPCPeeringConnection() 1329 | { 1330 | local -r vpcPeeringConnectionID="${1}" 1331 | local -r vpcPeeringConnectionName="${2}" 1332 | 1333 | checkNonEmptyString "${vpcPeeringConnectionID}" 'undefined vpc peering connection id' 1334 | 1335 | if [[ "$(isEmptyString "${vpcPeeringConnectionName}")" = 'true' ]] 1336 | then 1337 | header "${vpcPeeringConnectionID}" 1338 | else 1339 | header "${vpcPeeringConnectionID} :: ${vpcPeeringConnectionName}" 1340 | fi 1341 | 1342 | # Accept Connection Request 1343 | 1344 | local -r vpcPeeringConnection="$( 1345 | aws ec2 accept-vpc-peering-connection \ 1346 | --no-cli-pager \ 1347 | --output 'json' \ 1348 | --vpc-peering-connection-id "${vpcPeeringConnectionID}" | 1349 | jq \ 1350 | --compact-output \ 1351 | --raw-output \ 1352 | --sort-keys \ 1353 | '. // empty' 1354 | )" 1355 | 1356 | # Update Connection Name 1357 | 1358 | if [[ "$(isEmptyString "${vpcPeeringConnectionName}")" = 'false' ]] 1359 | then 1360 | aws ec2 create-tags \ 1361 | --no-cli-pager \ 1362 | --resources "${vpcPeeringConnectionID}" \ 1363 | --tags "Key=Name,Value=${vpcPeeringConnectionName}" 1364 | fi 1365 | 1366 | # Update Accepter Route Tables 1367 | 1368 | local -r requesterVPCCIDRBlocks="$(jq --compact-output --raw-output '.["VpcPeeringConnection"] | .["RequesterVpcInfo"] | .["CidrBlockSet"] | .[] | .["CidrBlock"] // empty' <<< "${vpcPeeringConnection}")" 1369 | 1370 | local -r accepterVPCID="$(jq --compact-output --raw-output '.["VpcPeeringConnection"] | .["AccepterVpcInfo"] | .["VpcId"] // empty' <<< "${vpcPeeringConnection}")" 1371 | 1372 | local -r accepterRouteTables="$( 1373 | aws ec2 describe-route-tables \ 1374 | --filter "Name=vpc-id,Values=${accepterVPCID}" \ 1375 | --no-cli-pager \ 1376 | --output 'json' 1377 | )" 1378 | 1379 | local -r accepterRouteTableIDs="$(jq --compact-output --raw-output '.["RouteTables"] | .[] | .["RouteTableId"] // empty' <<< "${accepterRouteTables}")" 1380 | 1381 | local accepterRouteTableID='' 1382 | 1383 | for accepterRouteTableID in ${accepterRouteTableIDs[@]} 1384 | do 1385 | local requesterVPCCIDRBlock='' 1386 | 1387 | for requesterVPCCIDRBlock in ${requesterVPCCIDRBlocks[@]} 1388 | do 1389 | echo -e "creating route with requester cidr \033[1;36m${requesterVPCCIDRBlock}\033[0m to route table \033[1;34m${accepterRouteTableID}\033[0m of \033[1;34m${accepterVPCID}\033[0m" 1390 | 1391 | local createRouteResult="$( 1392 | aws ec2 create-route \ 1393 | --destination-cidr-block "${requesterVPCCIDRBlock}" \ 1394 | --no-cli-pager \ 1395 | --output 'text' \ 1396 | --route-table-id "${accepterRouteTableID}" \ 1397 | --vpc-peering-connection-id "${vpcPeeringConnectionID}" 2>&1 | 1398 | tr -d '\n' 1399 | )" 1400 | 1401 | if [[ "${createRouteResult}" = 'True' ]] 1402 | then 1403 | echo -e " \033[1;32mcreated route successfully\033[0m" 1404 | else 1405 | local existVPCPeeringConnectionID="$( 1406 | jq \ 1407 | --arg jqAccepterRouteTableID "${accepterRouteTableID}" \ 1408 | --arg jqRequesterVPCCIDRBlock "${requesterVPCCIDRBlock}" \ 1409 | --compact-output \ 1410 | --raw-output \ 1411 | '.["RouteTables"] | 1412 | .[] | 1413 | select(.["RouteTableId"] == $jqAccepterRouteTableID) | 1414 | .["Routes"] | 1415 | .[] | 1416 | select(.["DestinationCidrBlock"] == $jqRequesterVPCCIDRBlock) | 1417 | .["VpcPeeringConnectionId"] // empty' \ 1418 | <<< "${accepterRouteTables}" 1419 | )" 1420 | 1421 | if [[ "${vpcPeeringConnectionID}" = "${existVPCPeeringConnectionID}" ]] 1422 | then 1423 | warn " WARN : ${createRouteResult}" 1424 | else 1425 | error " ERROR : ${createRouteResult} (${existVPCPeeringConnectionID})" 1426 | fi 1427 | fi 1428 | 1429 | echo 1430 | done 1431 | done 1432 | } 1433 | 1434 | function getAccepterVPCIDByVPCPeeringConnectionID() 1435 | { 1436 | local -r vpcPeeringConnectionID="${1}" 1437 | 1438 | checkNonEmptyString "${vpcPeeringConnectionID}" 'undefined vpc peering connection id' 1439 | 1440 | aws ec2 describe-vpc-peering-connections \ 1441 | --filters "Name=vpc-peering-connection-id,Values=${vpcPeeringConnectionID}" \ 1442 | --no-cli-pager \ 1443 | --output 'json' | 1444 | jq \ 1445 | --compact-output \ 1446 | --raw-output \ 1447 | '.["VpcPeeringConnections"] | .[] | .["AccepterVpcInfo"] | .["VpcId"] // empty' 1448 | } 1449 | 1450 | function getAvailabilityZonesByVPCName() 1451 | { 1452 | local -r vpcName="${1}" 1453 | 1454 | checkNonEmptyString "${vpcName}" 'undefined VPC name' 1455 | 1456 | local -r vpcID="$(getVPCIDByName "${vpcName}")" 1457 | 1458 | checkNonEmptyString "${vpcID}" 'undefined VPC ID' 1459 | 1460 | aws ec2 describe-subnets \ 1461 | --filters \ 1462 | 'Name=state,Values=available' \ 1463 | "Name=vpc-id,Values=${vpcID}" \ 1464 | --no-cli-pager \ 1465 | --output 'json' \ 1466 | --query 'Subnets[*].AvailabilityZone' | 1467 | jq \ 1468 | --compact-output \ 1469 | --raw-output \ 1470 | 'unique | 1471 | .[] // empty' 1472 | } 1473 | 1474 | function getCurrentVPCCIDRBlock() 1475 | { 1476 | curl -s --retry 12 --retry-delay 5 "http://instance-data/latest/meta-data/network/interfaces/macs/$(getInstanceMACAddress)/vpc-ipv4-cidr-block" 1477 | } 1478 | 1479 | function getIPV4CIDRByVPCName() 1480 | { 1481 | local -r vpcName="${1}" 1482 | 1483 | checkNonEmptyString "${vpcName}" 'undefined VPC name' 1484 | 1485 | aws ec2 describe-vpcs \ 1486 | --filter "Name=tag:Name,Values=${vpcName}" \ 1487 | --no-cli-pager \ 1488 | --output 'text' \ 1489 | --query 'Vpcs[0].CidrBlock' | 1490 | grep -E -v '^None$' 1491 | } 1492 | 1493 | function getPublicElasticIPs() 1494 | { 1495 | aws ec2 describe-addresses \ 1496 | --no-cli-pager \ 1497 | --output 'text' \ 1498 | --query 'sort_by(Addresses, &PublicIp)[*].[PublicIp]' 1499 | } 1500 | 1501 | function getRequesterCIDRByVPCPeeringConnectionID() 1502 | { 1503 | local -r vpcPeeringConnectionID="${1}" 1504 | 1505 | checkNonEmptyString "${vpcPeeringConnectionID}" 'undefined vpc peering connection id' 1506 | 1507 | aws ec2 describe-vpc-peering-connections \ 1508 | --filters "Name=vpc-peering-connection-id,Values=${vpcPeeringConnectionID}" \ 1509 | --no-cli-pager \ 1510 | --output 'json' | 1511 | jq \ 1512 | --compact-output \ 1513 | --raw-output \ 1514 | '.["VpcPeeringConnections"] | .[] | .["RequesterVpcInfo"] | .["CidrBlock"] // empty' 1515 | } 1516 | 1517 | function getRequesterCIDRSetByVPCPeeringConnectionID() 1518 | { 1519 | local -r vpcPeeringConnectionID="${1}" 1520 | 1521 | checkNonEmptyString "${vpcPeeringConnectionID}" 'undefined vpc peering connection id' 1522 | 1523 | aws ec2 describe-vpc-peering-connections \ 1524 | --filters "Name=vpc-peering-connection-id,Values=${vpcPeeringConnectionID}" \ 1525 | --no-cli-pager \ 1526 | --output 'json' | 1527 | jq \ 1528 | --compact-output \ 1529 | --raw-output \ 1530 | '.["VpcPeeringConnections"] | .[] | .["RequesterVpcInfo"] | .["CidrBlockSet"] | .[] | .["CidrBlock"] // empty' 1531 | } 1532 | 1533 | function getRequesterVPCIDByVPCPeeringConnectionID() 1534 | { 1535 | local -r vpcPeeringConnectionID="${1}" 1536 | 1537 | checkNonEmptyString "${vpcPeeringConnectionID}" 'undefined vpc peering connection id' 1538 | 1539 | aws ec2 describe-vpc-peering-connections \ 1540 | --filters "Name=vpc-peering-connection-id,Values=${vpcPeeringConnectionID}" \ 1541 | --no-cli-pager \ 1542 | --output 'json' | 1543 | jq \ 1544 | --compact-output \ 1545 | --raw-output \ 1546 | '.["VpcPeeringConnections"] | .[] | .["RequesterVpcInfo"] | .["VpcId"] // empty' 1547 | } 1548 | 1549 | function getSubnetIDByName() 1550 | { 1551 | local -r vpcName="${1}" 1552 | local -r subnetName="${2}" 1553 | 1554 | local -r vpcID="$(getVPCIDByName "${vpcName}")" 1555 | 1556 | checkNonEmptyString "${vpcID}" 'undefined VPC ID' 1557 | 1558 | aws ec2 describe-subnets \ 1559 | --filter \ 1560 | "Name=tag:Name,Values=${subnetName}" \ 1561 | "Name=vpc-id,Values=${vpcID}" \ 1562 | --no-cli-pager \ 1563 | --output 'text' \ 1564 | --query 'Subnets[0].[SubnetId]' | 1565 | grep -E -v '^None$' 1566 | } 1567 | 1568 | function getSubnetIDsByNames() 1569 | { 1570 | local -r vpcName="${1}" 1571 | local -r subnetNames=("${@:2}") 1572 | 1573 | local subnetIDs='' 1574 | local subnetName='' 1575 | 1576 | for subnetName in "${subnetNames[@]}" 1577 | do 1578 | local subnetID='' 1579 | subnetID="$(getSubnetIDByName "${vpcName}" "${subnetName}")" 1580 | 1581 | checkNonEmptyString "${subnetID}" "subnet name '${subnetName}' not found" 1582 | 1583 | subnetIDs="$(printf '%s\n%s' "${subnetIDs}" "${subnetID}")" 1584 | done 1585 | 1586 | echo "${subnetIDs}" 1587 | } 1588 | 1589 | function getVPCIDByName() 1590 | { 1591 | local -r vpcName="${1}" 1592 | 1593 | checkNonEmptyString "${vpcName}" 'undefined VPC name' 1594 | 1595 | aws ec2 describe-vpcs \ 1596 | --filter "Name=tag:Name,Values=${vpcName}" \ 1597 | --no-cli-pager \ 1598 | --output 'text' \ 1599 | --query 'Vpcs[0].[VpcId]' | 1600 | grep -E -v '^None$' 1601 | } -------------------------------------------------------------------------------- /libraries/util.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | ################### 4 | # ARRAY UTILITIES # 5 | ################### 6 | 7 | function arrayToParameters() 8 | { 9 | local -r array=("${@}") 10 | 11 | local -r string="$(printf "'%s' " "${array[@]}")" 12 | 13 | echo "${string:0:${#string} - 1}" 14 | } 15 | 16 | function arrayToString() 17 | { 18 | local -r array=("${@}") 19 | 20 | arrayToStringWithDelimiter ',' "${array[@]}" 21 | } 22 | 23 | function arrayToStringWithDelimiter() 24 | { 25 | local -r delimiter="${1}" 26 | local -r list=("${@:2}") 27 | 28 | local -r string="$(printf "%s${delimiter}" "${list[@]}")" 29 | 30 | echo "${string:0:${#string} - ${#delimiter}}" 31 | } 32 | 33 | function checkNonEmptyArray() 34 | { 35 | local -r errorMessage="${1}" 36 | local -r array=("${@:2}") 37 | 38 | if [[ "${#array[@]}" -lt '1' ]] 39 | then 40 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' ]] 41 | then 42 | fatal '\nFATAL : empty array detected' 43 | fi 44 | 45 | fatal "\nFATAL : ${errorMessage}" 46 | fi 47 | } 48 | 49 | function excludeElementFromArray() 50 | { 51 | local -r element="${1}" 52 | local array=("${@:2}") 53 | 54 | local i=0 55 | 56 | for ((i = 0; i < ${#array[@]}; i = i + 1)) 57 | do 58 | if [[ "${array[i]}" = "${element}" ]] 59 | then 60 | unset array['${i}'] 61 | fi 62 | done 63 | 64 | echo "${array[@]}" 65 | } 66 | 67 | function isElementInArray() 68 | { 69 | local -r element="${1}" 70 | local -r array=("${@:2}") 71 | 72 | local walker='' 73 | 74 | for walker in "${array[@]}" 75 | do 76 | [[ "${walker}" = "${element}" ]] && echo 'true' && return 0 77 | done 78 | 79 | echo 'false' && return 1 80 | } 81 | 82 | function sortUniqArray() 83 | { 84 | local -r array=("${@}") 85 | 86 | trimString "$(tr ' ' '\n' <<< "${array[@]}" | sort -u | tr '\n' ' ')" 87 | } 88 | 89 | ##################### 90 | # COMPILE UTILITIES # 91 | ##################### 92 | 93 | function compileAndInstallFromSource() 94 | { 95 | local -r downloadURL="${1}" 96 | local -r installFolderPath="${2}" 97 | local -r installFileOrFolderBinPath="${3}" 98 | local -r user="${4}" 99 | 100 | initializeFolder "${installFolderPath}" 101 | 102 | local -r currentWorkingDirectory="$(pwd)" 103 | local -r tempFolder="$(getTemporaryFolder)" 104 | 105 | unzipRemoteFile "${downloadURL}" "${tempFolder}" 106 | cd "${tempFolder}" 107 | "${tempFolder}/configure" --prefix="${installFolderPath}" 108 | make 109 | make install 110 | chown -R "${user}:$(getUserGroupName "${user}")" "${installFolderPath}" 111 | symlinkUsrBin "${installFileOrFolderBinPath}" 112 | cd "${currentWorkingDirectory}" 113 | rm -f -r "${tempFolder}" 114 | } 115 | 116 | ####################### 117 | # DATE TIME UTILITIES # 118 | ####################### 119 | 120 | function convertISO8601ToSeconds() 121 | { 122 | local -r time="${1}" 123 | 124 | if [[ "$(isMacOperatingSystem)" = 'true' ]] 125 | then 126 | date -j -u -f '%FT%T' "$(awk -F '.' '{ print $1 }' <<< "${time}" | tr -d 'Z')" +'%s' 127 | elif [[ "$(isAmazonLinuxDistributor)" = 'true' || "$(isCentOSDistributor)" = 'true' || "$(isRedHatDistributor)" = 'true' || "$(isRockyLinuxDistributor)" = 'true' || "$(isUbuntuDistributor)" = 'true' ]] 128 | then 129 | date -d "${time}" +'%s' 130 | else 131 | fatal '\nFATAL : only support Amazon-Linux, CentOS, Mac, RedHat, or Ubuntu OS' 132 | fi 133 | } 134 | 135 | function getISO8601DateTimeNow() 136 | { 137 | date -u +'%Y-%m-%dT%H:%M:%SZ' 138 | } 139 | 140 | function getUTCNowInSeconds() 141 | { 142 | date -u +'%s' 143 | } 144 | 145 | function secondsToReadableTime() 146 | { 147 | local -r time="${1}" 148 | 149 | local -r day="$((time / 60 / 60 / 24))" 150 | local -r hour="$((time / 60 / 60 % 24))" 151 | local -r minute="$((time / 60 % 60))" 152 | local -r second="$((time % 60))" 153 | 154 | if [[ "${day}" = '0' ]] 155 | then 156 | printf '%02d:%02d:%02d' "${hour}" "${minute}" "${second}" 157 | elif [[ "${day}" = '1' ]] 158 | then 159 | printf '%d day and %02d:%02d:%02d' "${day}" "${hour}" "${minute}" "${second}" 160 | else 161 | printf '%d days and %02d:%02d:%02d' "${day}" "${hour}" "${minute}" "${second}" 162 | fi 163 | } 164 | 165 | ######################## 166 | # FILE LOCAL UTILITIES # 167 | ######################## 168 | 169 | function appendToFileIfNotFound() 170 | { 171 | local -r file="${1}" 172 | local -r pattern="${2}" 173 | local -r string="${3}" 174 | local -r patternAsRegex="${4}" 175 | local -r stringAsRegex="${5}" 176 | local -r addNewLine="${6}" 177 | 178 | # Validate Inputs 179 | 180 | checkExistFile "${file}" 181 | checkNonEmptyString "${pattern}" 'undefined pattern' 182 | checkNonEmptyString "${string}" 'undefined string' 183 | checkTrueFalseString "${patternAsRegex}" 184 | checkTrueFalseString "${stringAsRegex}" 185 | 186 | if [[ "${stringAsRegex}" = 'false' ]] 187 | then 188 | checkTrueFalseString "${addNewLine}" 189 | fi 190 | 191 | # Append String 192 | 193 | if [[ "${patternAsRegex}" = 'true' ]] 194 | then 195 | local -r found="$(grep -E -o "${pattern}" "${file}")" 196 | else 197 | local -r found="$(grep -F -o "${pattern}" "${file}")" 198 | fi 199 | 200 | if [[ "$(isEmptyString "${found}")" = 'true' ]] 201 | then 202 | if [[ "${stringAsRegex}" = 'true' ]] 203 | then 204 | echo -e "${string}" >> "${file}" 205 | else 206 | if [[ "${addNewLine}" = 'true' ]] 207 | then 208 | echo >> "${file}" 209 | fi 210 | 211 | echo "${string}" >> "${file}" 212 | fi 213 | fi 214 | } 215 | 216 | function checkExistFile() 217 | { 218 | local -r file="${1}" 219 | local -r errorMessage="${2}" 220 | 221 | if [[ "${file}" = '' || ! -f "${file}" ]] 222 | then 223 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' ]] 224 | then 225 | fatal "\nFATAL : file '${file}' not found" 226 | fi 227 | 228 | fatal "\nFATAL : ${errorMessage}" 229 | fi 230 | } 231 | 232 | function checkExistFolder() 233 | { 234 | local -r folder="${1}" 235 | local -r errorMessage="${2}" 236 | 237 | if [[ "${folder}" = '' || ! -d "${folder}" ]] 238 | then 239 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' ]] 240 | then 241 | fatal "\nFATAL : folder '${folder}' not found" 242 | fi 243 | 244 | fatal "\nFATAL : ${errorMessage}" 245 | fi 246 | } 247 | 248 | function checkValidJSONContent() 249 | { 250 | local -r content="${1}" 251 | 252 | if [[ "$(isValidJSONContent "${content}")" = 'false' ]] 253 | then 254 | fatal '\nFATAL : invalid JSON' 255 | fi 256 | } 257 | 258 | function checkValidJSONFile() 259 | { 260 | local -r file="${1}" 261 | 262 | if [[ "$(isValidJSONFile "${file}")" = 'false' ]] 263 | then 264 | fatal "\nFATAL : invalid JSON file '${file}'" 265 | fi 266 | } 267 | 268 | function cleanUpSystemFolders() 269 | { 270 | header 'CLEANING UP SYSTEM FOLDERS' 271 | 272 | local -r folders=( 273 | '/tmp' 274 | '/var/tmp' 275 | ) 276 | 277 | local folder='' 278 | 279 | for folder in "${folders[@]}" 280 | do 281 | echo "Cleaning up folder '${folder}'" 282 | emptyFolder "${folder}" 283 | done 284 | } 285 | 286 | function clearFolder() 287 | { 288 | local -r folderPath="${1}" 289 | 290 | checkExistFolder "${folderPath}" 291 | 292 | rsync --archive --delete "$(getTemporaryFolder)/" "${folderPath}/" 293 | } 294 | 295 | function copyFolderContent() 296 | { 297 | local -r sourceFolder="${1}" 298 | local -r destinationFolder="${2}" 299 | 300 | checkExistFolder "${sourceFolder}" 301 | checkExistFolder "${destinationFolder}" 302 | 303 | find "${sourceFolder}" \ 304 | -mindepth 1 \ 305 | -maxdepth 1 \ 306 | -exec cp -p -r '{}' "${destinationFolder}" \; 307 | } 308 | 309 | function createAbsoluteUsrBin() 310 | { 311 | local -r binFileName="${1}" 312 | local -r sourceFilePath="${2}" 313 | 314 | checkExistFile "${sourceFilePath}" 315 | 316 | mkdir -p '/usr/bin' 317 | printf "#!/bin/bash -e\n\n'%s' \"\${@}\"" "${sourceFilePath}" > "/usr/bin/${binFileName}" 318 | chmod 755 "/usr/bin/${binFileName}" 319 | } 320 | 321 | function createFileFromTemplate() 322 | { 323 | local -r sourceFile="${1}" 324 | local -r destinationFile="${2}" 325 | local -r oldNewData=("${@:3}") 326 | 327 | checkExistFile "${sourceFile}" 328 | checkExistFolder "$(dirname "${destinationFile}")" 329 | 330 | local content='' 331 | content="$(cat "${sourceFile}")" 332 | 333 | local i=0 334 | 335 | for ((i = 0; i < ${#oldNewData[@]}; i = i + 2)) 336 | do 337 | content="$(replaceString "${content}" "${oldNewData[${i}]}" "${oldNewData[${i} + 1]}")" 338 | done 339 | 340 | echo "${content}" > "${destinationFile}" 341 | } 342 | 343 | function createInitFileFromTemplate() 344 | { 345 | local -r serviceName="${1}" 346 | local -r templateFolderPath="${2}" 347 | local -r initConfigDataFromTemplate=("${@:3}") 348 | 349 | createFileFromTemplate \ 350 | "${templateFolderPath}/${serviceName}.service.systemd" \ 351 | "/etc/systemd/system/${serviceName}.service" \ 352 | "${initConfigDataFromTemplate[@]}" 353 | } 354 | 355 | function deleteOldLogs() 356 | { 357 | local logFolderPaths=("${@}") 358 | 359 | header 'DELETING OLD LOGS' 360 | 361 | # Default Log Folder Path 362 | 363 | if [[ "${#logFolderPaths[@]}" -lt '1' ]] 364 | then 365 | logFolderPaths+=('/var/log') 366 | fi 367 | 368 | # Walk Each Log Folder Path 369 | 370 | local i=0 371 | 372 | for ((i = 0; i < ${#logFolderPaths[@]}; i = i + 1)) 373 | do 374 | checkExistFolder "${logFolderPaths[i]}" 375 | 376 | find \ 377 | -L \ 378 | "${logFolderPaths[i]}" \ 379 | -type f \ 380 | \( \ 381 | -regex '.*-[0-9]+' -o \ 382 | -regex '.*\.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\.log' -o \ 383 | -regex '.*\.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\.txt' -o \ 384 | -regex '.*\.[0-9]+' -o \ 385 | -regex '.*\.[0-9]+\.log' -o \ 386 | -regex '.*\.gz' -o \ 387 | -regex '.*\.log\.[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]T[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]' -o \ 388 | -regex '.*\.old' -o \ 389 | -regex '.*\.xz' \ 390 | \) \ 391 | -delete \ 392 | -print 393 | done 394 | } 395 | 396 | function emptyFolder() 397 | { 398 | local -r folder="${1}" 399 | 400 | checkExistFolder "${folder}" 401 | 402 | find "${folder}" \ 403 | -mindepth 1 \ 404 | -delete 405 | } 406 | 407 | function getFileExtension() 408 | { 409 | local -r string="${1}" 410 | 411 | local -r fullFileName="$(basename "${string}")" 412 | 413 | echo "${fullFileName##*.}" 414 | } 415 | 416 | function getFileName() 417 | { 418 | local -r string="${1}" 419 | 420 | local -r fullFileName="$(basename "${string}")" 421 | 422 | echo "${fullFileName%.*}" 423 | } 424 | 425 | function getTemporaryFile() 426 | { 427 | local extension="${1}" 428 | 429 | if [[ "$(isEmptyString "${extension}")" = 'false' && "$(grep -i -o "^." <<< "${extension}")" != '.' ]] 430 | then 431 | extension=".${extension}" 432 | fi 433 | 434 | mktemp "$(getTemporaryFolderRoot)/$(date +'%Y%m%d-%H%M%S')-XXXXXXXXXX${extension}" 435 | } 436 | 437 | function getTemporaryFolder() 438 | { 439 | mktemp -d "$(getTemporaryFolderRoot)/$(date +'%Y%m%d-%H%M%S')-XXXXXXXXXX" 440 | } 441 | 442 | function getTemporaryFolderRoot() 443 | { 444 | local temporaryFolder='/tmp' 445 | 446 | if [[ "$(isEmptyString "${TMPDIR}")" = 'false' ]] 447 | then 448 | temporaryFolder="$(formatPath "${TMPDIR}")" 449 | fi 450 | 451 | echo "${temporaryFolder}" 452 | } 453 | 454 | function initializeFolder() 455 | { 456 | local -r folder="${1}" 457 | 458 | if [[ -d "${folder}" ]] 459 | then 460 | emptyFolder "${folder}" 461 | else 462 | mkdir -p "${folder}" 463 | fi 464 | } 465 | 466 | function isEmptyFolder() 467 | { 468 | local -r folderPath="${1}" 469 | 470 | checkExistFolder "${folderPath}" 471 | 472 | if [[ "$(isEmptyString "$(find "${folderPath}" -maxdepth 1 -mindepth 1)")" = 'true' ]] 473 | then 474 | echo 'true' && return 0 475 | fi 476 | 477 | echo 'false' && return 1 478 | } 479 | 480 | function isValidJSONContent() 481 | { 482 | local -r content="${1}" 483 | 484 | if ( python -m 'json.tool' <<< "${content}" &> '/dev/null' || python3 -m 'json.tool' <<< "${content}" &> '/dev/null' ) 485 | then 486 | echo 'true' && return 0 487 | fi 488 | 489 | echo 'false' && return 1 490 | } 491 | 492 | function isValidJSONFile() 493 | { 494 | local -r file="${1}" 495 | 496 | checkExistFile "${file}" 497 | 498 | isValidJSONContent "$(cat "${file}")" 499 | } 500 | 501 | function moveFolderContent() 502 | { 503 | local -r sourceFolder="${1}" 504 | local -r destinationFolder="${2}" 505 | 506 | checkExistFolder "${sourceFolder}" 507 | checkExistFolder "${destinationFolder}" 508 | 509 | find "${sourceFolder}" \ 510 | -mindepth 1 \ 511 | -maxdepth 1 \ 512 | -exec mv '{}' "${destinationFolder}" \; 513 | } 514 | 515 | function redirectOutputToLogFile() 516 | { 517 | local -r logFile="${1}" 518 | 519 | mkdir -p "$(dirname "${logFile}")" 520 | exec > >(tee -a "${logFile}") 2>&1 521 | } 522 | 523 | function resetFolderPermission() 524 | { 525 | local -r folderPath="${1}" 526 | local -r userLogin="${2}" 527 | local -r groupName="${3}" 528 | 529 | checkExistFolder "${folderPath}" 530 | checkExistUserLogin "${userLogin}" 531 | checkExistGroupName "${groupName}" 532 | 533 | header "RESETTING FOLDER PERMISSION ${folderPath}" 534 | 535 | chown -R "${userLogin}:${groupName}" "${folderPath}" 536 | 537 | find "${folderPath}" \ 538 | -type d \ 539 | \( \ 540 | -not -path "*/.git" -a \ 541 | -not -path "*/.git/*" \ 542 | \) \ 543 | -exec chmod 700 '{}' \; \ 544 | -print 545 | 546 | find "${folderPath}" \ 547 | -type f \ 548 | \( \ 549 | -not -path "*/.git" -a \ 550 | -not -path "*/.git/*" \ 551 | \) \ 552 | -exec chmod 600 '{}' \; \ 553 | -print 554 | } 555 | 556 | function resetLogs() 557 | { 558 | local logFolderPaths=("${@}") 559 | 560 | # Default Log Folder Path 561 | 562 | if [[ "${#logFolderPaths[@]}" -lt '1' ]] 563 | then 564 | logFolderPaths+=('/var/log') 565 | fi 566 | 567 | # Delete Old Logs 568 | 569 | deleteOldLogs "${logFolderPaths[@]}" 570 | 571 | # Reset Logs 572 | 573 | header 'RESETTING LOGS' 574 | 575 | local i=0 576 | 577 | for ((i = 0; i < ${#logFolderPaths[@]}; i = i + 1)) 578 | do 579 | checkExistFolder "${logFolderPaths[i]}" 580 | 581 | find "${logFolderPaths[i]}" \ 582 | -type f \ 583 | -exec cp -f '/dev/null' '{}' \; \ 584 | -print 585 | done 586 | } 587 | 588 | function sortUniqueFile() 589 | { 590 | local -r filePath="${1}" 591 | 592 | checkExistFile "${filePath}" 593 | 594 | printf '%s' "$(awk 'NF' "${filePath}" | sort -u)" > "${filePath}" 595 | } 596 | 597 | function sortUniqueTrimFile() 598 | { 599 | local -r filePath="${1}" 600 | 601 | checkExistFile "${filePath}" 602 | 603 | printf '%s' "$(awk 'NF' "${filePath}" | awk '{$1=$1};1' | sort -u)" > "${filePath}" 604 | } 605 | 606 | function symlinkListUsrBin() 607 | { 608 | local -r sourceFilePaths=("${@}") 609 | 610 | local sourceFilePath='' 611 | 612 | for sourceFilePath in "${sourceFilePaths[@]}" 613 | do 614 | chmod 755 "${sourceFilePath}" 615 | rm -f -r "/usr/bin/$(basename "${sourceFilePath}")" 616 | ln -f -s "${sourceFilePath}" "/usr/bin/$(basename "${sourceFilePath}")" 617 | done 618 | } 619 | 620 | function symlinkUsrBin() 621 | { 622 | local -r sourceBinFileOrFolder="${1}" 623 | 624 | if [[ "$(isMacOperatingSystem)" = 'true' ]] 625 | then 626 | mkdir -p '/usr/bin' 627 | 628 | if [[ -d "${sourceBinFileOrFolder}" ]] 629 | then 630 | find "${sourceBinFileOrFolder}" -maxdepth 1 \( -type f -o -type l \) -perm -u+x -exec bash -c -e ' 631 | for file 632 | do 633 | fileType="$(stat -f "%HT" "${file}")" 634 | 635 | if [[ "${fileType}" = "Regular File" ]] 636 | then 637 | ln -f -s "${file}" "/usr/bin/$(basename "${file}")" 638 | elif [[ "${fileType}" = "Symbolic Link" ]] 639 | then 640 | cd "$(dirname "${file}")" 641 | 642 | if [[ -f "$(readlink "${file}")" ]] 643 | then 644 | ln -f -s "${file}" "/usr/bin/$(basename "${file}")" 645 | fi 646 | fi 647 | done' bash '{}' \; 648 | elif [[ -f "${sourceBinFileOrFolder}" ]] 649 | then 650 | ln -f -s "${sourceBinFileOrFolder}" "/usr/bin/$(basename "${sourceBinFileOrFolder}")" 651 | else 652 | fatal "\nFATAL : '${sourceBinFileOrFolder}' is not directory or file" 653 | fi 654 | elif [[ "$(isAmazonLinuxDistributor)" = 'true' || "$(isCentOSDistributor)" = 'true' || "$(isRedHatDistributor)" = 'true' || "$(isRockyLinuxDistributor)" = 'true' || "$(isUbuntuDistributor)" = 'true' ]] 655 | then 656 | mkdir -p '/usr/bin' 657 | 658 | if [[ -d "${sourceBinFileOrFolder}" ]] 659 | then 660 | find "${sourceBinFileOrFolder}" -maxdepth 1 -xtype f -perm -u+x -exec bash -c -e ' 661 | for file 662 | do 663 | ln -f -s "${file}" "/usr/bin/$(basename "${file}")" 664 | done' bash '{}' \; 665 | elif [[ -f "${sourceBinFileOrFolder}" ]] 666 | then 667 | ln -f -s "${sourceBinFileOrFolder}" "/usr/bin/$(basename "${sourceBinFileOrFolder}")" 668 | else 669 | fatal "\nFATAL : '${sourceBinFileOrFolder}' is not directory or file" 670 | fi 671 | else 672 | fatal '\nFATAL : only support Amazon-Linux, CentOS, Mac, RedHat, or Ubuntu OS' 673 | fi 674 | } 675 | 676 | function trimFile() 677 | { 678 | local -r filePath="${1}" 679 | 680 | checkExistFile "${filePath}" 681 | 682 | printf '%s' "$(< "${filePath}")" > "${filePath}" 683 | } 684 | 685 | ######################### 686 | # FILE REMOTE UTILITIES # 687 | ######################### 688 | 689 | function checkExistURL() 690 | { 691 | local -r url="${1}" 692 | 693 | if [[ "$(existURL "${url}")" = 'false' ]] 694 | then 695 | fatal "\nFATAL : url '${url}' not found" 696 | fi 697 | } 698 | 699 | function downloadFile() 700 | { 701 | local -r url="${1}" 702 | local -r destinationFile="${2}" 703 | local overwrite="${3}" 704 | 705 | checkExistURL "${url}" 706 | 707 | # Check Overwrite 708 | 709 | if [[ "$(isEmptyString "${overwrite}")" = 'true' ]] 710 | then 711 | overwrite='false' 712 | fi 713 | 714 | checkTrueFalseString "${overwrite}" 715 | 716 | # Validate 717 | 718 | if [[ -f "${destinationFile}" ]] 719 | then 720 | if [[ "${overwrite}" = 'false' ]] 721 | then 722 | fatal "\nFATAL : file '${destinationFile}' found" 723 | fi 724 | 725 | rm -f "${destinationFile}" 726 | elif [[ -e "${destinationFile}" ]] 727 | then 728 | fatal "\nFATAL : file '${destinationFile}' already exists" 729 | fi 730 | 731 | # Download 732 | 733 | debug "\nDownloading '${url}' to '${destinationFile}'\n" 734 | curl -L "${url}" -o "${destinationFile}" --retry 12 --retry-delay 5 735 | } 736 | 737 | function existURL() 738 | { 739 | local -r url="${1}" 740 | 741 | # Install Curl 742 | 743 | installCURLCommand > '/dev/null' 744 | 745 | # Check URL 746 | 747 | if ( curl -f --head -k -L "${url}" -o '/dev/null' -s --retry 12 --retry-delay 5 || 748 | curl -f -k -L "${url}" -o '/dev/null' -r 0-0 -s --retry 12 --retry-delay 5 ) 749 | then 750 | echo 'true' && return 0 751 | fi 752 | 753 | echo 'false' && return 1 754 | } 755 | 756 | function getRemoteFileContent() 757 | { 758 | local -r url="${1}" 759 | 760 | checkExistURL "${url}" 761 | curl -s -X 'GET' -L "${url}" --retry 12 --retry-delay 5 762 | } 763 | 764 | function unzipRemoteFile() 765 | { 766 | local -r downloadURL="${1}" 767 | local -r installFolder="${2}" 768 | local extension="${3}" 769 | 770 | # Install Curl 771 | 772 | installCURLCommand 773 | 774 | # Validate URL 775 | 776 | checkExistURL "${downloadURL}" 777 | 778 | # Find Extension 779 | 780 | local exExtension='' 781 | 782 | if [[ "$(isEmptyString "${extension}")" = 'true' ]] 783 | then 784 | extension="$(getFileExtension "${downloadURL}")" 785 | exExtension="$(rev <<< "${downloadURL}" | cut -d '.' -f 1-2 | rev)" 786 | fi 787 | 788 | # Unzip 789 | 790 | if [[ "$(grep -i '^tgz$' <<< "${extension}")" != '' || "$(grep -i '^tar\.gz$' <<< "${extension}")" != '' || "$(grep -i '^tar\.gz$' <<< "${exExtension}")" != '' ]] 791 | then 792 | debug "\nDownloading '${downloadURL}'\n" 793 | curl -L "${downloadURL}" --retry 12 --retry-delay 5 | tar -C "${installFolder}" -x -z --strip 1 794 | echo 795 | elif [[ "$(grep -i '^tar\.bz2$' <<< "${exExtension}")" != '' ]] 796 | then 797 | # Install BZip2 798 | 799 | installBZip2Command 800 | 801 | # Unzip 802 | 803 | debug "\nDownloading '${downloadURL}'\n" 804 | curl -L "${downloadURL}" --retry 12 --retry-delay 5 | tar -C "${installFolder}" -j -x --strip 1 805 | echo 806 | elif [[ "$(grep -i '^zip$' <<< "${extension}")" != '' ]] 807 | then 808 | # Install Unzip 809 | 810 | installUnzipCommand 811 | 812 | # Unzip 813 | 814 | if [[ "$(existCommand 'unzip')" = 'false' ]] 815 | then 816 | fatal 'FATAL : command unzip not found' 817 | fi 818 | 819 | local -r zipFile="${installFolder}/$(basename "${downloadURL}")" 820 | 821 | downloadFile "${downloadURL}" "${zipFile}" 'true' 822 | unzip -q "${zipFile}" -d "${installFolder}" 823 | rm -f "${zipFile}" 824 | echo 825 | else 826 | fatal "\nFATAL : file extension '${extension}' not supported" 827 | fi 828 | } 829 | 830 | ##################### 831 | # INSTALL UTILITIES # 832 | ##################### 833 | 834 | function installPortableBinary() 835 | { 836 | local -r appTitleName="${1}" 837 | local -r downloadURL="${2}" 838 | local -r installFolderPath="${3}" 839 | local -r binarySubPaths=($(sortUniqArray "$(replaceString "${4}" ',' ' ')")) 840 | local -r versionOption="${5}" 841 | local -r remoteUnzip="${6}" 842 | 843 | checkNonEmptyString "${appTitleName}" 'undefined app title name' 844 | checkNonEmptyString "${versionOption}" 'undefined version option' 845 | checkTrueFalseString "${remoteUnzip}" 846 | 847 | if [[ "${#binarySubPaths[@]}" -lt '1' ]] 848 | then 849 | fatal '\nFATAL : undefined binary sub paths' 850 | fi 851 | 852 | header "INSTALLING ${appTitleName}" 853 | 854 | checkRequireLinuxSystem 855 | checkRequireRootUser 856 | 857 | umask '0022' 858 | 859 | initializeFolder "${installFolderPath}" 860 | 861 | if [[ "${remoteUnzip}" = 'true' ]] 862 | then 863 | if [[ "$(getFileExtension "${downloadURL}")" = 'sh' ]] 864 | then 865 | curl -s -L "${downloadURL}" --retry 12 --retry-delay 5 | bash -e 866 | else 867 | unzipRemoteFile "${downloadURL}" "${installFolderPath}" 868 | fi 869 | 870 | printf '%s\n\nexport PATH="%s/%s:${PATH}"' \ 871 | '#!/bin/sh -e' \ 872 | "${installFolderPath}" \ 873 | "$(dirname "${binarySubPaths[0]}")" \ 874 | > "/etc/profile.d/$(basename "${installFolderPath}").sh" 875 | 876 | chmod 644 "/etc/profile.d/$(basename "${installFolderPath}").sh" 877 | else 878 | downloadFile "${downloadURL}" "${installFolderPath}/${binarySubPaths[0]}" 'true' 879 | fi 880 | 881 | chown -R "$(whoami):$(whoami)" "${installFolderPath}" 882 | 883 | local binarySubPath='' 884 | 885 | for binarySubPath in "${binarySubPaths[@]}" 886 | do 887 | symlinkListUsrBin "${installFolderPath}/${binarySubPath}" 888 | done 889 | 890 | displayVersion "$("/usr/bin/$(basename "${binarySubPaths[0]}")" "${versionOption}")" 891 | 892 | umask '0077' 893 | 894 | installCleanUp 895 | } 896 | 897 | ################# 898 | # MAC UTILITIES # 899 | ################# 900 | 901 | function closeMacApplications() 902 | { 903 | local -r headerMessage="${1}" 904 | local -r applicationNames=("${@:2}") 905 | 906 | checkRequireMacSystem 907 | 908 | if [[ "${#applicationNames[@]}" -gt '0' ]] 909 | then 910 | header "${headerMessage}" 911 | fi 912 | 913 | local applicationName='' 914 | 915 | for applicationName in "${applicationNames[@]}" 916 | do 917 | applicationName="$(getFileName "${applicationName}")" 918 | 919 | if [[ "${applicationName}" != 'Terminal' ]] 920 | then 921 | local errorMessage='' 922 | errorMessage="$(osascript -e "tell application \"${applicationName}\" to quit" 2>&1 || true)" 923 | 924 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' || "$(grep -E -o '\(-128)$' <<< "${errorMessage}")" != '' ]] 925 | then 926 | info "closing '${applicationName}'" 927 | else 928 | error "${errorMessage}" 929 | fi 930 | fi 931 | done 932 | } 933 | 934 | function getMacCurrentUserICloudDriveFolderPath() 935 | { 936 | local -r iCloudFolderPath="$(getCurrentUserHomeFolder)/Library/Mobile Documents/com~apple~CloudDocs" 937 | 938 | if [[ -d "${iCloudFolderPath}" ]] 939 | then 940 | echo "${iCloudFolderPath}" 941 | else 942 | echo 943 | fi 944 | } 945 | 946 | function openMacApplications() 947 | { 948 | local -r headerMessage="${1}" 949 | local -r applicationNames=("${@:2}") 950 | 951 | checkRequireMacSystem 952 | 953 | if [[ "${#applicationNames[@]}" -gt '0' ]] 954 | then 955 | header "${headerMessage}" 956 | fi 957 | 958 | local applicationName='' 959 | 960 | for applicationName in "${applicationNames[@]}" 961 | do 962 | info "openning '${applicationName}'" 963 | osascript -e "tell application \"${applicationName}\" to activate" 964 | done 965 | } 966 | 967 | #################### 968 | # NUMBER UTILITIES # 969 | #################### 970 | 971 | function checkNaturalNumber() 972 | { 973 | local -r string="${1}" 974 | local -r errorMessage="${2}" 975 | 976 | if [[ "$(isNaturalNumber "${string}")" = 'false' ]] 977 | then 978 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' ]] 979 | then 980 | fatal '\nFATAL : not natural number detected' 981 | fi 982 | 983 | fatal "\nFATAL : ${errorMessage}" 984 | fi 985 | } 986 | 987 | function checkPositiveInteger() 988 | { 989 | local -r string="${1}" 990 | local -r errorMessage="${2}" 991 | 992 | if [[ "$(isPositiveInteger "${string}")" = 'false' ]] 993 | then 994 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' ]] 995 | then 996 | fatal '\nFATAL : not positive number detected' 997 | fi 998 | 999 | fatal "\nFATAL : ${errorMessage}" 1000 | fi 1001 | } 1002 | 1003 | function isNaturalNumber() 1004 | { 1005 | local -r string="${1}" 1006 | 1007 | if [[ "${string}" =~ ^[0-9]+$ ]] 1008 | then 1009 | echo 'true' && return 0 1010 | fi 1011 | 1012 | echo 'false' && return 1 1013 | } 1014 | 1015 | function isPositiveInteger() 1016 | { 1017 | local -r string="${1}" 1018 | 1019 | if [[ "${string}" =~ ^[1-9][0-9]*$ ]] 1020 | then 1021 | echo 'true' && return 0 1022 | fi 1023 | 1024 | echo 'false' && return 1 1025 | } 1026 | 1027 | ################ 1028 | # OS UTILITIES # 1029 | ################ 1030 | 1031 | function checkRequireLinuxSystem() 1032 | { 1033 | if [[ "$(isAmazonLinuxDistributor)" = 'false' && "$(isCentOSDistributor)" = 'false' && "$(isRedHatDistributor)" = 'false' && "$(isRockyLinuxDistributor)" = 'false' && "$(isUbuntuDistributor)" = 'false' ]] 1034 | then 1035 | fatal '\nFATAL : only support Amazon-Linux, CentOS, RedHat, or Ubuntu OS' 1036 | fi 1037 | 1038 | if [[ "$(is64BitSystem)" = 'false' ]] 1039 | then 1040 | fatal '\nFATAL : non x86_64 OS found' 1041 | fi 1042 | } 1043 | 1044 | function checkRequireMacSystem() 1045 | { 1046 | if [[ "$(isMacOperatingSystem)" = 'false' ]] 1047 | then 1048 | fatal '\nFATAL : only support Mac OS' 1049 | fi 1050 | 1051 | if [[ "$(is64BitSystem)" = 'false' ]] 1052 | then 1053 | fatal '\nFATAL : non x86_64 OS found' 1054 | fi 1055 | } 1056 | 1057 | function getMachineDescription() 1058 | { 1059 | lsb_release -d -s 1060 | } 1061 | 1062 | function getMachineRelease() 1063 | { 1064 | lsb_release -r -s 1065 | } 1066 | 1067 | function is64BitSystem() 1068 | { 1069 | if [[ "$(isMachineHardware 'x86_64')" = 'true' || "$(isMachineHardware 'arm64')" = 'true' ]] 1070 | then 1071 | echo 'true' && return 0 1072 | fi 1073 | 1074 | echo 'false' && return 1 1075 | } 1076 | 1077 | function isAmazonLinuxDistributor() 1078 | { 1079 | isDistributor 'amzn' 1080 | } 1081 | 1082 | function isCentOSDistributor() 1083 | { 1084 | isDistributor 'centos' 1085 | } 1086 | 1087 | function isDistributor() 1088 | { 1089 | local -r distributor="${1}" 1090 | 1091 | local -r found="$(grep -F -i -o -s "${distributor}" '/proc/version')" 1092 | 1093 | if [[ "$(isEmptyString "${found}")" = 'true' ]] 1094 | then 1095 | echo 'false' && return 1 1096 | fi 1097 | 1098 | echo 'true' && return 0 1099 | } 1100 | 1101 | function isLinuxOperatingSystem() 1102 | { 1103 | isOperatingSystem 'Linux' 1104 | } 1105 | 1106 | function isMachineHardware() 1107 | { 1108 | local -r machineHardware="$(escapeGrepSearchPattern "${1}")" 1109 | 1110 | local -r found="$(uname -m | grep -E -i -o "^${machineHardware}$")" 1111 | 1112 | if [[ "$(isEmptyString "${found}")" = 'true' ]] 1113 | then 1114 | echo 'false' && return 1 1115 | fi 1116 | 1117 | echo 'true' && return 0 1118 | } 1119 | 1120 | function isMacOperatingSystem() 1121 | { 1122 | isOperatingSystem 'Darwin' 1123 | } 1124 | 1125 | function isOperatingSystem() 1126 | { 1127 | local -r operatingSystem="$(escapeGrepSearchPattern "${1}")" 1128 | 1129 | local -r found="$(uname -s | grep -E -i -o "^${operatingSystem}$")" 1130 | 1131 | if [[ "$(isEmptyString "${found}")" = 'true' ]] 1132 | then 1133 | echo 'false' && return 1 1134 | fi 1135 | 1136 | echo 'true' && return 0 1137 | } 1138 | 1139 | function isRedHatDistributor() 1140 | { 1141 | isDistributor 'redhat' 1142 | } 1143 | 1144 | function isRockyLinuxDistributor() 1145 | { 1146 | isDistributor 'rockylinux' 1147 | } 1148 | 1149 | function isUbuntuDistributor() 1150 | { 1151 | isDistributor 'ubuntu' 1152 | } 1153 | 1154 | ##################### 1155 | # PACKAGE UTILITIES # 1156 | ##################### 1157 | 1158 | function getLastAptGetUpdate() 1159 | { 1160 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1161 | then 1162 | local -r aptDate="$(stat -c %Y '/var/cache/apt')" 1163 | local -r nowDate="$(date +'%s')" 1164 | 1165 | echo $((nowDate - aptDate)) 1166 | fi 1167 | } 1168 | 1169 | function installBuildEssential() 1170 | { 1171 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1172 | then 1173 | installPackages 'g++' 'build-essential' 1174 | elif [[ "$(isAmazonLinuxDistributor)" = 'true' || "$(isCentOSDistributor)" = 'true' || "$(isRedHatDistributor)" = 'true' || "$(isRockyLinuxDistributor)" = 'true' ]] 1175 | then 1176 | installPackages 'gcc' 'gcc-c++' 'kernel-devel' 'make' 'openssl-devel' 1177 | else 1178 | fatal '\nFATAL : only support Amazon-Linux, CentOS, RedHat, or Ubuntu OS' 1179 | fi 1180 | } 1181 | 1182 | function installBZip2Command() 1183 | { 1184 | local -r commandPackage=('bzip2' 'bzip2') 1185 | 1186 | installCommands "${commandPackage[@]}" 1187 | } 1188 | 1189 | function installCleanUp() 1190 | { 1191 | header 'CLEANING UP INSTALLATION' 1192 | 1193 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1194 | then 1195 | DEBIAN_FRONTEND='noninteractive' apt-get --fix-missing -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' autoremove 1196 | DEBIAN_FRONTEND='noninteractive' apt-get --fix-missing -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' clean 1197 | DEBIAN_FRONTEND='noninteractive' apt-get --fix-missing -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' autoclean 1198 | elif [[ "$(isAmazonLinuxDistributor)" = 'true' || "$(isCentOSDistributor)" = 'true' || "$(isRedHatDistributor)" = 'true' || "$(isRockyLinuxDistributor)" = 'true' ]] 1199 | then 1200 | yum clean all 1201 | else 1202 | fatal '\nFATAL : only support Amazon-Linux, CentOS, RedHat, or Ubuntu OS' 1203 | fi 1204 | } 1205 | 1206 | function installCommands() 1207 | { 1208 | local -r commandPackageData=("${@}") 1209 | 1210 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1211 | then 1212 | runAptGetUpdate '' 1213 | fi 1214 | 1215 | local i=0 1216 | 1217 | for ((i = 0; i < ${#commandPackageData[@]}; i = i + 2)) 1218 | do 1219 | local command="${commandPackageData[${i}]}" 1220 | local package="${commandPackageData[${i} + 1]}" 1221 | 1222 | checkNonEmptyString "${command}" 'undefined command' 1223 | checkNonEmptyString "${package}" 'undefined package' 1224 | 1225 | if [[ "$(existCommand "${command}")" = 'false' ]] 1226 | then 1227 | installPackages "${package}" 1228 | fi 1229 | done 1230 | } 1231 | 1232 | function installCURLCommand() 1233 | { 1234 | local -r commandPackage=('curl' 'curl') 1235 | 1236 | installCommands "${commandPackage[@]}" 1237 | } 1238 | 1239 | function installPackage() 1240 | { 1241 | local -r aptPackage="${1}" 1242 | local -r rpmPackage="${2}" 1243 | 1244 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1245 | then 1246 | if [[ "$(isEmptyString "${aptPackage}")" = 'false' ]] 1247 | then 1248 | if [[ "$(isAptGetPackageInstall "${aptPackage}")" = 'true' ]] 1249 | then 1250 | debug "\nApt-Get Package '${aptPackage}' has already been installed" 1251 | else 1252 | echo -e "\033[1;35m\nInstalling Apt-Get Package '${aptPackage}'\033[0m" 1253 | DEBIAN_FRONTEND='noninteractive' apt-get install "${aptPackage}" --fix-missing -y || 1254 | (DEBIAN_FRONTEND='noninteractive' apt-get install --fix-missing --yes -f -y && DEBIAN_FRONTEND='noninteractive' apt-get install "${aptPackage}" --fix-missing -y) 1255 | fi 1256 | fi 1257 | elif [[ "$(isAmazonLinuxDistributor)" = 'true' || "$(isCentOSDistributor)" = 'true' || "$(isRedHatDistributor)" = 'true' || "$(isRockyLinuxDistributor)" = 'true' ]] 1258 | then 1259 | if [[ "$(isEmptyString "${rpmPackage}")" = 'false' ]] 1260 | then 1261 | yum install -y "${rpmPackage}" 1262 | fi 1263 | else 1264 | fatal '\nFATAL : only support Amazon-Linux, CentOS, RedHat, or Ubuntu OS' 1265 | fi 1266 | } 1267 | 1268 | function installPackages() 1269 | { 1270 | local -r packages=("${@}") 1271 | 1272 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1273 | then 1274 | runAptGetUpdate '' 1275 | fi 1276 | 1277 | local package='' 1278 | 1279 | for package in "${packages[@]}" 1280 | do 1281 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1282 | then 1283 | installPackage "${package}" 1284 | elif [[ "$(isAmazonLinuxDistributor)" = 'true' || "$(isCentOSDistributor)" = 'true' || "$(isRedHatDistributor)" = 'true' || "$(isRockyLinuxDistributor)" = 'true' ]] 1285 | then 1286 | installPackage '' "${package}" 1287 | else 1288 | fatal '\nFATAL : only support Amazon-Linux, CentOS, RedHat, or Ubuntu OS' 1289 | fi 1290 | done 1291 | } 1292 | 1293 | function installPIPCommand() 1294 | { 1295 | local -r commandPackage=('pip' 'python-pip') 1296 | 1297 | installCommands "${commandPackage[@]}" 1298 | } 1299 | 1300 | function installPIPPackage() 1301 | { 1302 | local -r package="${1}" 1303 | 1304 | if [[ "$(isPIPPackageInstall "${package}")" = 'true' ]] 1305 | then 1306 | debug "PIP Package '${package}' found" 1307 | else 1308 | echo -e "\033[1;35m\nInstalling PIP package '${package}'\033[0m" 1309 | pip install "${package}" 1310 | fi 1311 | } 1312 | 1313 | function installUnzipCommand() 1314 | { 1315 | local -r commandPackage=('unzip' 'unzip') 1316 | 1317 | installCommands "${commandPackage[@]}" 1318 | } 1319 | 1320 | function isAptGetPackageInstall() 1321 | { 1322 | local -r package="$(escapeGrepSearchPattern "${1}")" 1323 | 1324 | local -r found="$(dpkg --get-selections | grep -E -o "^${package}(:amd64)*\s+install$")" 1325 | 1326 | if [[ "$(isEmptyString "${found}")" = 'true' ]] 1327 | then 1328 | echo 'false' && return 1 1329 | fi 1330 | 1331 | echo 'true' && return 0 1332 | } 1333 | 1334 | function isPIPPackageInstall() 1335 | { 1336 | local -r package="$(escapeGrepSearchPattern "${1}")" 1337 | 1338 | # Install PIP 1339 | 1340 | installPIPCommand > '/dev/null' 1341 | 1342 | # Check Command 1343 | 1344 | if [[ "$(existCommand 'pip')" = 'false' ]] 1345 | then 1346 | fatal 'FATAL : command python-pip not found' 1347 | fi 1348 | 1349 | local -r found="$(pip list | grep -E -o "^${package}\s+\(.*\)$")" 1350 | 1351 | if [[ "$(isEmptyString "${found}")" = 'true' ]] 1352 | then 1353 | echo 'false' && return 1 1354 | fi 1355 | 1356 | echo 'true' && return 0 1357 | } 1358 | 1359 | function runAptGetUpdate() 1360 | { 1361 | local updateInterval="${1}" 1362 | 1363 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1364 | then 1365 | local -r lastAptGetUpdate="$(getLastAptGetUpdate)" 1366 | 1367 | if [[ "$(isEmptyString "${updateInterval}")" = 'true' ]] 1368 | then 1369 | # Default To 24 hours 1370 | updateInterval="$((24 * 60 * 60))" 1371 | fi 1372 | 1373 | if [[ "${lastAptGetUpdate}" -gt "${updateInterval}" ]] 1374 | then 1375 | info 'apt-get update' 1376 | apt-get update -m 1377 | else 1378 | local -r lastUpdate="$(date -u -d @"${lastAptGetUpdate}" +'%-Hh %-Mm %-Ss')" 1379 | 1380 | info "\nSkip apt-get update because its last run was '${lastUpdate}' ago" 1381 | fi 1382 | fi 1383 | } 1384 | 1385 | function runUpgrade() 1386 | { 1387 | header 'UPGRADING SYSTEM' 1388 | 1389 | if [[ "$(isUbuntuDistributor)" = 'true' ]] 1390 | then 1391 | runAptGetUpdate '' 1392 | 1393 | info '\napt-get upgrade' 1394 | DEBIAN_FRONTEND='noninteractive' apt-get --fix-missing -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' upgrade 1395 | 1396 | info '\napt-get dist-upgrade' 1397 | DEBIAN_FRONTEND='noninteractive' apt-get --fix-missing -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' dist-upgrade 1398 | 1399 | info '\napt-get autoremove' 1400 | DEBIAN_FRONTEND='noninteractive' apt-get --fix-missing -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' autoremove 1401 | 1402 | info '\napt-get clean' 1403 | DEBIAN_FRONTEND='noninteractive' apt-get --fix-missing -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' clean 1404 | 1405 | info '\napt-get autoclean' 1406 | DEBIAN_FRONTEND='noninteractive' apt-get --fix-missing -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold' autoclean 1407 | elif [[ "$(isAmazonLinuxDistributor)" = 'true' || "$(isCentOSDistributor)" = 'true' || "$(isRedHatDistributor)" = 'true' || "$(isRockyLinuxDistributor)" = 'true' ]] 1408 | then 1409 | yum -y --security update 1410 | yum -y update --nogpgcheck --skip-broken 1411 | fi 1412 | } 1413 | 1414 | function upgradePIPPackage() 1415 | { 1416 | local -r package="${1}" 1417 | 1418 | if [[ "$(isPIPPackageInstall "${package}")" = 'true' ]] 1419 | then 1420 | echo -e "\033[1;35mUpgrading PIP package '${package}'\033[0m" 1421 | pip install --upgrade "${package}" 1422 | else 1423 | debug "PIP Package '${package}' not found" 1424 | fi 1425 | } 1426 | 1427 | ##################### 1428 | # SERVICE UTILITIES # 1429 | ##################### 1430 | 1431 | function disableService() 1432 | { 1433 | local -r serviceName="${1}" 1434 | 1435 | checkNonEmptyString "${serviceName}" 'undefined service name' 1436 | 1437 | if [[ "$(existCommand 'systemctl')" = 'true' ]] 1438 | then 1439 | header "DISABLE SYSTEMD ${serviceName}" 1440 | 1441 | systemctl daemon-reload 1442 | systemctl disable "${serviceName}" 1443 | systemctl stop "${serviceName}" || true 1444 | else 1445 | header "DISABLE SERVICE ${serviceName}" 1446 | 1447 | chkconfig "${serviceName}" off 1448 | service "${serviceName}" stop || true 1449 | fi 1450 | 1451 | statusService "${serviceName}" 1452 | } 1453 | 1454 | function enableService() 1455 | { 1456 | local -r serviceName="${1}" 1457 | 1458 | checkNonEmptyString "${serviceName}" 'undefined service name' 1459 | 1460 | if [[ "$(existCommand 'systemctl')" = 'true' ]] 1461 | then 1462 | header "ENABLE SYSTEMD ${serviceName}" 1463 | 1464 | systemctl daemon-reload 1465 | systemctl enable "${serviceName}" || true 1466 | else 1467 | header "ENABLE SERVICE ${serviceName}" 1468 | 1469 | chkconfig "${serviceName}" on 1470 | fi 1471 | 1472 | statusService "${serviceName}" 1473 | } 1474 | 1475 | function restartService() 1476 | { 1477 | local -r serviceName="${1}" 1478 | 1479 | checkNonEmptyString "${serviceName}" 'undefined service name' 1480 | 1481 | stopService "${serviceName}" 1482 | startService "${serviceName}" 1483 | } 1484 | 1485 | function startService() 1486 | { 1487 | local -r serviceName="${1}" 1488 | 1489 | checkNonEmptyString "${serviceName}" 'undefined service name' 1490 | 1491 | if [[ "$(existCommand 'systemctl')" = 'true' ]] 1492 | then 1493 | header "STARTING SYSTEMD ${serviceName}" 1494 | 1495 | systemctl daemon-reload 1496 | systemctl enable "${serviceName}" || true 1497 | systemctl start "${serviceName}" 1498 | else 1499 | header "STARTING SERVICE ${serviceName}" 1500 | 1501 | chkconfig "${serviceName}" on 1502 | service "${serviceName}" start 1503 | fi 1504 | 1505 | statusService "${serviceName}" 1506 | } 1507 | 1508 | function statusService() 1509 | { 1510 | local -r serviceName="${1}" 1511 | 1512 | checkNonEmptyString "${serviceName}" 'undefined service name' 1513 | 1514 | if [[ "$(existCommand 'systemctl')" = 'true' ]] 1515 | then 1516 | header "STATUS SYSTEMD ${serviceName}" 1517 | 1518 | systemctl status "${serviceName}" --full --no-pager || true 1519 | else 1520 | header "STATUS SERVICE ${serviceName}" 1521 | 1522 | service "${serviceName}" status || true 1523 | fi 1524 | } 1525 | 1526 | function stopService() 1527 | { 1528 | local -r serviceName="${1}" 1529 | 1530 | checkNonEmptyString "${serviceName}" 'undefined service name' 1531 | 1532 | if [[ "$(existCommand 'systemctl')" = 'true' ]] 1533 | then 1534 | header "STOPPING SYSTEMD ${serviceName}" 1535 | 1536 | systemctl daemon-reload 1537 | systemctl stop "${serviceName}" || true 1538 | else 1539 | header "STOPPING SERVICE ${serviceName}" 1540 | 1541 | service "${serviceName}" stop || true 1542 | fi 1543 | 1544 | statusService "${serviceName}" 1545 | } 1546 | 1547 | #################### 1548 | # STRING UTILITIES # 1549 | #################### 1550 | 1551 | function checkNonEmptyString() 1552 | { 1553 | local -r string="${1}" 1554 | local -r errorMessage="${2}" 1555 | 1556 | if [[ "$(isEmptyString "${string}")" = 'true' ]] 1557 | then 1558 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' ]] 1559 | then 1560 | fatal '\nFATAL : empty value detected' 1561 | fi 1562 | 1563 | fatal "\nFATAL : ${errorMessage}" 1564 | fi 1565 | } 1566 | 1567 | function checkTrueFalseString() 1568 | { 1569 | local -r string="${1}" 1570 | local -r errorMessage="${2}" 1571 | 1572 | if [[ "${string}" != 'true' && "${string}" != 'false' ]] 1573 | then 1574 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' ]] 1575 | then 1576 | fatal "\nFATAL : '${string}' is not 'true' or 'false'" 1577 | fi 1578 | 1579 | fatal "\nFATAL : ${errorMessage}" 1580 | fi 1581 | } 1582 | 1583 | function debug() 1584 | { 1585 | local -r message="${1}" 1586 | 1587 | if [[ "$(isEmptyString "${message}")" = 'false' ]] 1588 | then 1589 | echo -e "\033[1;34m${message}\033[0m" 2>&1 1590 | fi 1591 | } 1592 | 1593 | function deleteSpaces() 1594 | { 1595 | local -r content="${1}" 1596 | 1597 | replaceString "${content}" ' ' '' 1598 | } 1599 | 1600 | function displayVersion() 1601 | { 1602 | local -r message="${1}" 1603 | local -r applicationName="${2}" 1604 | 1605 | if [[ "$(isEmptyString "${applicationName}")" = 'true' ]] 1606 | then 1607 | header 'DISPLAYING VERSION' 1608 | else 1609 | header "DISPLAYING ${applicationName} VERSION" 1610 | fi 1611 | 1612 | info "${message}" 1613 | } 1614 | 1615 | function encodeURL() 1616 | { 1617 | local -r url="${1}" 1618 | 1619 | local i=0 1620 | 1621 | for ((i = 0; i < ${#url}; i++)) 1622 | do 1623 | local walker='' 1624 | walker="${url:i:1}" 1625 | 1626 | case "${walker}" in 1627 | [a-zA-Z0-9.~_-]) 1628 | printf '%s' "${walker}" 1629 | ;; 1630 | ' ') 1631 | printf + 1632 | ;; 1633 | *) 1634 | printf '%%%X' "'${walker}" 1635 | ;; 1636 | esac 1637 | done 1638 | } 1639 | 1640 | function error() 1641 | { 1642 | local -r message="${1}" 1643 | 1644 | if [[ "$(isEmptyString "${message}")" = 'false' ]] 1645 | then 1646 | echo -e "\033[1;31m${message}\033[0m" 1>&2 1647 | fi 1648 | } 1649 | 1650 | function escapeGrepSearchPattern() 1651 | { 1652 | local -r searchPattern="${1}" 1653 | 1654 | sed 's/[]\.|$(){}?+*^]/\\&/g' <<< "${searchPattern}" 1655 | } 1656 | 1657 | function escapeSearchPattern() 1658 | { 1659 | local -r searchPattern="${1}" 1660 | 1661 | sed -e "s@\@@\\\\\\@@g" -e "s@\[@\\\\[@g" -e "s@\*@\\\\*@g" -e "s@\%@\\\\%@g" <<< "${searchPattern}" 1662 | } 1663 | 1664 | function fatal() 1665 | { 1666 | local -r message="${1}" 1667 | 1668 | error "${message}" 1669 | exit 1 1670 | } 1671 | 1672 | function formatPath() 1673 | { 1674 | local path="${1}" 1675 | 1676 | while [[ "$(grep -F '//' <<< "${path}")" != '' ]] 1677 | do 1678 | path="$(sed -e 's/\/\/*/\//g' <<< "${path}")" 1679 | done 1680 | 1681 | sed -e 's/\/$//g' <<< "${path}" 1682 | } 1683 | 1684 | function header() 1685 | { 1686 | local -r title="${1}" 1687 | 1688 | if [[ "$(isEmptyString "${title}")" = 'false' ]] 1689 | then 1690 | echo -e "\n\033[1;33m>>>>>>>>>> \033[1;4;35m${title}\033[0m \033[1;33m<<<<<<<<<<\033[0m\n" 1691 | fi 1692 | } 1693 | 1694 | function indentString() 1695 | { 1696 | local -r indentString="$(escapeSearchPattern "${1}")" 1697 | local -r string="$(escapeSearchPattern "${2}")" 1698 | 1699 | sed "s@^@${indentString}@g" <<< "${string}" 1700 | } 1701 | 1702 | function info() 1703 | { 1704 | local -r message="${1}" 1705 | 1706 | if [[ "$(isEmptyString "${message}")" = 'false' ]] 1707 | then 1708 | echo -e "\033[1;36m${message}\033[0m" 2>&1 1709 | fi 1710 | } 1711 | 1712 | function invertTrueFalseString() 1713 | { 1714 | local -r string="${1}" 1715 | 1716 | checkTrueFalseString "${string}" 1717 | 1718 | if [[ "${string}" = 'true' ]] 1719 | then 1720 | echo 'false' && return 1 1721 | fi 1722 | 1723 | echo 'true' && return 0 1724 | } 1725 | 1726 | function isEmptyString() 1727 | { 1728 | local -r string="${1}" 1729 | 1730 | if [[ "$(trimString "${string}")" = '' ]] 1731 | then 1732 | echo 'true' && return 0 1733 | fi 1734 | 1735 | echo 'false' && return 1 1736 | } 1737 | 1738 | function postUpMessage() 1739 | { 1740 | echo -e "\n\033[1;32m¯\_(ツ)_/¯\033[0m" 1741 | } 1742 | 1743 | function printTable() 1744 | { 1745 | local -r delimiter="${1}" 1746 | local -r tableData="$(removeEmptyLines "${2}")" 1747 | local -r colorHeader="${3}" 1748 | local -r displayTotalCount="${4}" 1749 | 1750 | if [[ "${delimiter}" != '' && "$(isEmptyString "${tableData}")" = 'false' ]] 1751 | then 1752 | local -r numberOfLines="$(trimString "$(wc -l <<< "${tableData}")")" 1753 | 1754 | if [[ "${numberOfLines}" -gt '0' ]] 1755 | then 1756 | local table='' 1757 | local i=1 1758 | 1759 | for ((i = 1; i <= "${numberOfLines}"; i = i + 1)) 1760 | do 1761 | local line='' 1762 | line="$(sed "${i}q;d" <<< "${tableData}")" 1763 | 1764 | local numberOfColumns=0 1765 | numberOfColumns="$(awk -F "${delimiter}" '{print NF}' <<< "${line}")" 1766 | 1767 | # Add Line Delimiter 1768 | 1769 | if [[ "${i}" -eq '1' ]] 1770 | then 1771 | table="${table}$(printf '%s#+' "$(repeatString '#+' "${numberOfColumns}")")" 1772 | fi 1773 | 1774 | # Add Header Or Body 1775 | 1776 | table="${table}\n" 1777 | 1778 | local j=1 1779 | 1780 | for ((j = 1; j <= "${numberOfColumns}"; j = j + 1)) 1781 | do 1782 | table="${table}$(printf '#| %s' "$(cut -d "${delimiter}" -f "${j}" <<< "${line}")")" 1783 | done 1784 | 1785 | table="${table}#|\n" 1786 | 1787 | # Add Line Delimiter 1788 | 1789 | if [[ "${i}" -eq '1' ]] || [[ "${numberOfLines}" -gt '1' && "${i}" -eq "${numberOfLines}" ]] 1790 | then 1791 | table="${table}$(printf '%s#+' "$(repeatString '#+' "${numberOfColumns}")")" 1792 | fi 1793 | done 1794 | 1795 | if [[ "$(isEmptyString "${table}")" = 'false' ]] 1796 | then 1797 | local output='' 1798 | output="$(echo -e "${table}" | column -s '#' -t | awk '/^ *\+/{gsub(" ", "-", $0)}1' | sed 's/^--\|^ / /g')" 1799 | 1800 | if [[ "${colorHeader}" = 'true' ]] 1801 | then 1802 | echo -e "\033[1;32m$(head -n 3 <<< "${output}")\033[0m" 1803 | tail -n +4 <<< "${output}" 1804 | else 1805 | echo "${output}" 1806 | fi 1807 | fi 1808 | fi 1809 | 1810 | if [[ "${displayTotalCount}" = 'true' && "${numberOfLines}" -ge '0' ]] 1811 | then 1812 | if [[ "${colorHeader}" = 'true' ]] 1813 | then 1814 | echo -e "\n\033[1;36mTOTAL ROWS : $((numberOfLines - 1))\033[0m" 1815 | else 1816 | echo -e "\nTOTAL ROWS : $((numberOfLines - 1))" 1817 | fi 1818 | fi 1819 | fi 1820 | } 1821 | 1822 | function removeEmptyLines() 1823 | { 1824 | local -r content="${1}" 1825 | 1826 | echo -e "${content}" | sed '/^\s*$/d' 1827 | } 1828 | 1829 | function repeatString() 1830 | { 1831 | local -r string="${1}" 1832 | local -r numberToRepeat="${2}" 1833 | 1834 | if [[ "${string}" != '' && "$(isPositiveInteger "${numberToRepeat}")" = 'true' ]] 1835 | then 1836 | local -r result="$(printf "%${numberToRepeat}s")" 1837 | echo -e "${result// /${string}}" 1838 | fi 1839 | } 1840 | 1841 | function replaceString() 1842 | { 1843 | local -r content="${1}" 1844 | local -r oldValue="$(escapeSearchPattern "${2}")" 1845 | local -r newValue="$(escapeSearchPattern "${3}")" 1846 | 1847 | sed "s@${oldValue}@${newValue}@g" <<< "${content}" 1848 | } 1849 | 1850 | function stringToNumber() 1851 | { 1852 | local -r string="${1}" 1853 | 1854 | checkNonEmptyString "${string}" 'undefined string' 1855 | 1856 | if [[ "$(existCommand 'md5')" = 'true' ]] 1857 | then 1858 | md5 <<< "${string}" | tr -cd '0-9' 1859 | elif [[ "$(existCommand 'md5sum')" = 'true' ]] 1860 | then 1861 | md5sum <<< "${string}" | tr -cd '0-9' 1862 | else 1863 | fatal '\nFATAL : md5 or md5sum command not found' 1864 | fi 1865 | } 1866 | 1867 | function stringToSearchPattern() 1868 | { 1869 | local -r string="$(trimString "${1}")" 1870 | 1871 | if [[ "$(isEmptyString "${string}")" = 'true' ]] 1872 | then 1873 | echo "${string}" 1874 | else 1875 | echo "^\s*$(sed -e 's/\s\+/\\s+/g' <<< "$(escapeSearchPattern "${string}")")\s*$" 1876 | fi 1877 | } 1878 | 1879 | function trimString() 1880 | { 1881 | local -r string="${1}" 1882 | 1883 | sed 's,^[[:blank:]]*,,' <<< "${string}" | sed 's,[[:blank:]]*$,,' 1884 | } 1885 | 1886 | function warn() 1887 | { 1888 | local -r message="${1}" 1889 | 1890 | if [[ "$(isEmptyString "${message}")" = 'false' ]] 1891 | then 1892 | echo -e "\033[1;33m${message}\033[0m" 1>&2 1893 | fi 1894 | } 1895 | 1896 | #################### 1897 | # SYSTEM UTILITIES # 1898 | #################### 1899 | 1900 | function addSwapSpace() 1901 | { 1902 | local swapSize="${1}" 1903 | local swapFile="${2}" 1904 | 1905 | header 'ADDING SWAP SPACE' 1906 | 1907 | # Set Default Values 1908 | 1909 | if [[ "$(isEmptyString "${swapSize}")" = 'true' ]] 1910 | then 1911 | swapSize='1024000' 1912 | fi 1913 | 1914 | if [[ "$(isEmptyString "${swapFile}")" = 'true' ]] 1915 | then 1916 | swapFile='/mnt/swapfile' 1917 | fi 1918 | 1919 | if [[ -f "${swapFile}" ]] 1920 | then 1921 | swapoff "${swapFile}" 1922 | fi 1923 | 1924 | rm -f "${swapFile}" 1925 | touch "${swapFile}" 1926 | 1927 | # Create Swap File 1928 | 1929 | dd if=/dev/zero of="${swapFile}" bs=1024 count="${swapSize}" 1930 | mkswap "${swapFile}" 1931 | chmod 600 "${swapFile}" 1932 | swapon "${swapFile}" 1933 | 1934 | # Config Swap File System 1935 | 1936 | local -r fstabConfig="${swapFile} swap swap defaults 0 0" 1937 | 1938 | appendToFileIfNotFound '/etc/fstab' "$(stringToSearchPattern "${fstabConfig}")" "${fstabConfig}" 'true' 'false' 'true' 1939 | 1940 | # Display Swap Status 1941 | 1942 | free -m 1943 | } 1944 | 1945 | function checkExistCommand() 1946 | { 1947 | local -r command="${1}" 1948 | local -r errorMessage="${2}" 1949 | 1950 | if [[ "$(existCommand "${command}")" = 'false' ]] 1951 | then 1952 | if [[ "$(isEmptyString "${errorMessage}")" = 'true' ]] 1953 | then 1954 | fatal "\nFATAL : command '${command}' not found" 1955 | fi 1956 | 1957 | fatal "\nFATAL : ${errorMessage}" 1958 | fi 1959 | } 1960 | 1961 | function checkRequirePorts() 1962 | { 1963 | local -r ports=("${@}") 1964 | 1965 | installPackages 'lsof' 1966 | 1967 | local -r headerRegex='^COMMAND\s\+PID\s\+USER\s\+FD\s\+TYPE\s\+DEVICE\s\+SIZE\/OFF\s\+NODE\s\+NAME$' 1968 | local -r status="$(lsof -i -n -P | grep "\( (LISTEN)$\)\|\(${headerRegex}\)")" 1969 | 1970 | local open='' 1971 | local port='' 1972 | 1973 | for port in "${ports[@]}" 1974 | do 1975 | local found='' 1976 | found="$(grep -i ":${port} (LISTEN)$" <<< "${status}" || echo)" 1977 | 1978 | if [[ "$(isEmptyString "${found}")" = 'false' ]] 1979 | then 1980 | open="${open}\n${found}" 1981 | fi 1982 | done 1983 | 1984 | if [[ "$(isEmptyString "${open}")" = 'false' ]] 1985 | then 1986 | echo -e "\033[1;31mFollowing ports are still opened. Make sure you uninstall or stop them before a new installation!\033[0m" 1987 | echo -e -n "\033[1;34m\n$(grep "${headerRegex}" <<< "${status}")\033[0m" 1988 | echo -e "\033[1;36m${open}\033[0m\n" 1989 | 1990 | exit 1 1991 | fi 1992 | } 1993 | 1994 | function displayOpenPorts() 1995 | { 1996 | local -r sleepTimeInSecond="${1}" 1997 | 1998 | installPackages 'lsof' 1999 | 2000 | header 'DISPLAYING OPEN PORTS' 2001 | 2002 | if [[ "$(isEmptyString "${sleepTimeInSecond}")" = 'false' ]] 2003 | then 2004 | sleep "${sleepTimeInSecond}" 2005 | fi 2006 | 2007 | lsof -i -n -P | grep -i ' (LISTEN)$' | sort -f 2008 | } 2009 | 2010 | function existCommand() 2011 | { 2012 | local -r command="${1}" 2013 | 2014 | if [[ "$(which "${command}" 2> '/dev/null')" = '' ]] 2015 | then 2016 | echo 'false' && return 1 2017 | fi 2018 | 2019 | echo 'true' && return 0 2020 | } 2021 | 2022 | function existDisk() 2023 | { 2024 | local -r disk="${1}" 2025 | 2026 | local -r foundDisk="$(fdisk -l "${disk}" 2> '/dev/null' | grep -E -i -o "^Disk\s+$(escapeGrepSearchPattern "${disk}"): ")" 2027 | 2028 | if [[ "$(isEmptyString "${disk}")" = 'false' && "$(isEmptyString "${foundDisk}")" = 'false' ]] 2029 | then 2030 | echo 'true' && return 0 2031 | fi 2032 | 2033 | echo 'false' && return 1 2034 | } 2035 | 2036 | function existDiskMount() 2037 | { 2038 | local -r disk="$(escapeGrepSearchPattern "${1}")" 2039 | local -r mountOn="$(escapeGrepSearchPattern "${2}")" 2040 | 2041 | local -r foundMount="$(df | grep -E "^${disk}\s+.*\s+${mountOn}$")" 2042 | 2043 | if [[ "$(isEmptyString "${foundMount}")" = 'true' ]] 2044 | then 2045 | echo 'false' && return 1 2046 | fi 2047 | 2048 | echo 'true' && return 0 2049 | } 2050 | 2051 | function existModule() 2052 | { 2053 | local -r module="${1}" 2054 | 2055 | checkNonEmptyString "${module}" 'undefined module' 2056 | 2057 | if [[ "$(lsmod | awk '{ print $1 }' | grep -F -o "${module}")" = '' ]] 2058 | then 2059 | echo 'false' && return 1 2060 | fi 2061 | 2062 | echo 'true' && return 0 2063 | } 2064 | 2065 | function existMount() 2066 | { 2067 | local -r mountOn="$(escapeGrepSearchPattern "${1}")" 2068 | 2069 | local -r foundMount="$(df | grep -E ".*\s+${mountOn}$")" 2070 | 2071 | if [[ "$(isEmptyString "${foundMount}")" = 'true' ]] 2072 | then 2073 | echo 'false' && return 1 2074 | fi 2075 | 2076 | echo 'true' && return 0 2077 | } 2078 | 2079 | function flushFirewall() 2080 | { 2081 | header 'FLUSHING FIREWALL' 2082 | 2083 | iptables -P INPUT ACCEPT 2084 | iptables -P FORWARD ACCEPT 2085 | iptables -P OUTPUT ACCEPT 2086 | 2087 | iptables -t nat -F 2088 | iptables -t mangle -F 2089 | iptables -F 2090 | iptables -X 2091 | 2092 | iptables --list 2093 | 2094 | saveFirewall 2095 | } 2096 | 2097 | function isPortOpen() 2098 | { 2099 | local -r port="$(escapeGrepSearchPattern "${1}")" 2100 | 2101 | checkNonEmptyString "${port}" 'undefined port' 2102 | 2103 | if [[ "$(isAmazonLinuxDistributor)" = 'true' || "$(isRedHatDistributor)" = 'true' || "$(isRockyLinuxDistributor)" = 'true' || "$(isUbuntuDistributor)" = 'true' ]] 2104 | then 2105 | local -r process="$(netstat -l -n -t -u | grep -E ":${port}\s+" | head -1)" 2106 | elif [[ "$(isCentOSDistributor)" = 'true' || "$(isMacOperatingSystem)" = 'true' ]] 2107 | then 2108 | if [[ "$(isCentOSDistributor)" = 'true' ]] 2109 | then 2110 | installPackages 'lsof' 2111 | fi 2112 | 2113 | local -r process="$(lsof -i -n -P | grep -E -i ":${port}\s+\(LISTEN\)$" | head -1)" 2114 | else 2115 | fatal '\nFATAL : only support Amazon-Linux, CentOS, Mac, RedHat, or Ubuntu OS' 2116 | fi 2117 | 2118 | if [[ "$(isEmptyString "${process}")" = 'true' ]] 2119 | then 2120 | echo 'false' && return 1 2121 | fi 2122 | 2123 | echo 'true' && return 0 2124 | } 2125 | 2126 | function redirectJDKTMPDir() 2127 | { 2128 | local -r option="_JAVA_OPTIONS='-Djava.io.tmpdir=/var/tmp'" 2129 | 2130 | appendToFileIfNotFound '/etc/environment' "${option}" "${option}" 'false' 'false' 'true' 2131 | appendToFileIfNotFound '/etc/profile' "${option}" "${option}" 'false' 'false' 'true' 2132 | } 2133 | 2134 | function remountTMP() 2135 | { 2136 | header 'RE-MOUNTING TMP' 2137 | 2138 | if [[ "$(existMount '/tmp')" = 'true' ]] 2139 | then 2140 | mount -o 'remount,rw,exec,nosuid' -v '/tmp' 2141 | else 2142 | warn 'WARN : mount /tmp not found' 2143 | fi 2144 | } 2145 | 2146 | function saveFirewall() 2147 | { 2148 | header 'SAVING FIREWALL' 2149 | 2150 | local ruleFile='' 2151 | 2152 | for ruleFile in '/etc/iptables/rules.v4' '/etc/iptables/rules.v6' '/etc/sysconfig/iptables' '/etc/sysconfig/ip6tables' 2153 | do 2154 | if [[ -f "${ruleFile}" ]] 2155 | then 2156 | if [[ "$(grep -F '6' <<< "${ruleFile}")" = '' ]] 2157 | then 2158 | iptables-save > "${ruleFile}" 2159 | else 2160 | ip6tables-save > "${ruleFile}" 2161 | fi 2162 | 2163 | info "${ruleFile}" 2164 | cat "${ruleFile}" 2165 | echo 2166 | fi 2167 | done 2168 | } 2169 | 2170 | ############################ 2171 | # USER AND GROUP UTILITIES # 2172 | ############################ 2173 | 2174 | function addUser() 2175 | { 2176 | local -r userLogin="${1}" 2177 | local -r groupName="${2}" 2178 | local -r createHome="${3}" 2179 | local -r systemAccount="${4}" 2180 | local -r allowLogin="${5}" 2181 | 2182 | checkNonEmptyString "${userLogin}" 'undefined user login' 2183 | checkNonEmptyString "${groupName}" 'undefined group name' 2184 | 2185 | # Options 2186 | 2187 | if [[ "${createHome}" = 'true' ]] 2188 | then 2189 | local -r createHomeOption=('-m') 2190 | else 2191 | local -r createHomeOption=('-M') 2192 | fi 2193 | 2194 | if [[ "${allowLogin}" = 'true' ]] 2195 | then 2196 | local -r allowLoginOption=('-s' '/bin/bash') 2197 | else 2198 | local -r allowLoginOption=('-s' '/bin/false') 2199 | fi 2200 | 2201 | # Add Group 2202 | 2203 | groupadd -f -r "${groupName}" 2204 | 2205 | # Add User 2206 | 2207 | if [[ "$(existUserLogin "${userLogin}")" = 'true' ]] 2208 | then 2209 | if [[ "$(isUserLoginInGroupName "${userLogin}" "${groupName}")" = 'false' ]] 2210 | then 2211 | usermod -a -G "${groupName}" "${userLogin}" 2212 | fi 2213 | 2214 | # Not Exist Home 2215 | 2216 | if [[ "${createHome}" = 'true' ]] 2217 | then 2218 | local -r userHome="$(getUserHomeFolder "${userLogin}")" 2219 | 2220 | if [[ "$(isEmptyString "${userHome}")" = 'true' || ! -d "${userHome}" ]] 2221 | then 2222 | mkdir -m 700 -p "/home/${userLogin}" 2223 | chown -R "${userLogin}:${groupName}" "/home/${userLogin}" 2224 | fi 2225 | fi 2226 | else 2227 | if [[ "${systemAccount}" = 'true' ]] 2228 | then 2229 | useradd "${createHomeOption[@]}" -r "${allowLoginOption[@]}" -g "${groupName}" "${userLogin}" 2230 | else 2231 | useradd "${createHomeOption[@]}" "${allowLoginOption[@]}" -g "${groupName}" "${userLogin}" 2232 | fi 2233 | fi 2234 | } 2235 | 2236 | function addUserAuthorizedKey() 2237 | { 2238 | local -r userLogin="${1}" 2239 | local -r groupName="${2}" 2240 | local -r sshRSA="${3}" 2241 | 2242 | configUserSSH "${userLogin}" "${groupName}" "${sshRSA}" 'authorized_keys' 2243 | } 2244 | 2245 | function addUserSSHKnownHost() 2246 | { 2247 | local -r userLogin="${1}" 2248 | local -r groupName="${2}" 2249 | local -r sshRSA="${3}" 2250 | 2251 | configUserSSH "${userLogin}" "${groupName}" "${sshRSA}" 'known_hosts' 2252 | } 2253 | 2254 | function addUserToSudoWithoutPassword() 2255 | { 2256 | local -r userLogin="${1}" 2257 | 2258 | echo "${userLogin} ALL=(ALL) NOPASSWD:ALL" > "/etc/sudoers.d/${userLogin}" 2259 | chmod 440 "/etc/sudoers.d/${userLogin}" 2260 | } 2261 | 2262 | function checkExistGroupName() 2263 | { 2264 | local -r groupName="${1}" 2265 | 2266 | if [[ "$(existGroupName "${groupName}")" = 'false' ]] 2267 | then 2268 | fatal "\nFATAL : group name '${groupName}' not found" 2269 | fi 2270 | } 2271 | 2272 | function checkExistUserLogin() 2273 | { 2274 | local -r userLogin="${1}" 2275 | 2276 | if [[ "$(existUserLogin "${userLogin}")" = 'false' ]] 2277 | then 2278 | fatal "\nFATAL : user login '${userLogin}' not found" 2279 | fi 2280 | } 2281 | 2282 | function checkRequireNonRootUser() 2283 | { 2284 | if [[ "$(whoami)" = 'root' ]] 2285 | then 2286 | fatal '\nFATAL : non root login required' 2287 | fi 2288 | } 2289 | 2290 | function checkRequireRootUser() 2291 | { 2292 | checkRequireUserLogin 'root' 2293 | } 2294 | 2295 | function checkRequireUserLogin() 2296 | { 2297 | local -r userLogin="${1}" 2298 | 2299 | if [[ "$(whoami)" != "${userLogin}" ]] 2300 | then 2301 | fatal "\nFATAL : user login '${userLogin}' required" 2302 | fi 2303 | } 2304 | 2305 | function configUserGIT() 2306 | { 2307 | local -r userLogin="${1}" 2308 | local -r gitUserName="${2}" 2309 | local -r gitUserEmail="${3}" 2310 | 2311 | header "CONFIGURING GIT FOR USER ${userLogin}" 2312 | 2313 | checkExistUserLogin "${userLogin}" 2314 | checkNonEmptyString "${gitUserName}" 'undefined git user name' 2315 | checkNonEmptyString "${gitUserEmail}" 'undefined git user email' 2316 | 2317 | su -l "${userLogin}" -c 'git config --global pull.rebase false' 2318 | su -l "${userLogin}" -c 'git config --global push.default simple' 2319 | su -l "${userLogin}" -c "git config --global user.email '${gitUserEmail}'" 2320 | su -l "${userLogin}" -c "git config --global user.name '${gitUserName}'" 2321 | 2322 | info "$(su -l "${userLogin}" -c 'git config --list')" 2323 | } 2324 | 2325 | function configUserSSH() 2326 | { 2327 | local -r userLogin="${1}" 2328 | local -r groupName="${2}" 2329 | local -r sshRSA="${3}" 2330 | local -r configFileName="${4}" 2331 | 2332 | header "CONFIGURING ${configFileName} FOR USER ${userLogin}" 2333 | 2334 | checkExistUserLogin "${userLogin}" 2335 | checkExistGroupName "${groupName}" 2336 | checkNonEmptyString "${sshRSA}" 'undefined SSH-RSA' 2337 | checkNonEmptyString "${configFileName}" 'undefined config file' 2338 | 2339 | local -r userHome="$(getUserHomeFolder "${userLogin}")" 2340 | 2341 | checkExistFolder "${userHome}" 2342 | 2343 | mkdir -m 700 -p "${userHome}/.ssh" 2344 | touch "${userHome}/.ssh/${configFileName}" 2345 | appendToFileIfNotFound "${userHome}/.ssh/${configFileName}" "${sshRSA}" "${sshRSA}" 'false' 'false' 'false' 2346 | chmod 600 "${userHome}/.ssh/${configFileName}" 2347 | 2348 | chown -R "${userLogin}:${groupName}" "${userHome}/.ssh" 2349 | 2350 | cat "${userHome}/.ssh/${configFileName}" 2351 | } 2352 | 2353 | function deleteUser() 2354 | { 2355 | local -r userLogin="${1}" 2356 | 2357 | if [[ "$(existUserLogin "${userLogin}")" = 'true' ]] 2358 | then 2359 | userdel -f -r "${userLogin}" 2> '/dev/null' || true 2360 | fi 2361 | } 2362 | 2363 | function existGroupName() 2364 | { 2365 | local -r group="${1}" 2366 | 2367 | if [[ "$(grep -E -o "^${group}:" '/etc/group')" = '' ]] 2368 | then 2369 | echo 'false' && return 1 2370 | fi 2371 | 2372 | echo 'true' && return 0 2373 | } 2374 | 2375 | function existUserLogin() 2376 | { 2377 | local -r user="${1}" 2378 | 2379 | if ( id -u "${user}" > '/dev/null' 2>&1 ) 2380 | then 2381 | echo 'true' && return 0 2382 | fi 2383 | 2384 | echo 'false' && return 1 2385 | } 2386 | 2387 | function generateSSHPublicKeyFromPrivateKey() 2388 | { 2389 | local -r userLogin="${1}" 2390 | local groupName="${2}" 2391 | 2392 | # Set Default 2393 | 2394 | if [[ "$(isEmptyString "${groupName}")" = 'true' ]] 2395 | then 2396 | groupName="${userLogin}" 2397 | fi 2398 | 2399 | # Validate Input 2400 | 2401 | checkExistUserLogin "${userLogin}" 2402 | checkExistGroupName "${groupName}" 2403 | 2404 | local -r userHome="$(getUserHomeFolder "${userLogin}")" 2405 | 2406 | checkExistFile "${userHome}/.ssh/id_rsa" 2407 | 2408 | # Generate SSH Public Key 2409 | 2410 | header "GENERATING SSH PUBLIC KEY FOR USER '${userLogin}' FROM PRIVATE KEY" 2411 | 2412 | rm -f "${userHome}/.ssh/id_rsa.pub" 2413 | su -l "${userLogin}" -c "ssh-keygen -f '${userHome}/.ssh/id_rsa' -y > '${userHome}/.ssh/id_rsa.pub'" 2414 | chmod 600 "${userHome}/.ssh/id_rsa.pub" 2415 | chown "${userLogin}:${groupName}" "${userHome}/.ssh/id_rsa.pub" 2416 | 2417 | cat "${userHome}/.ssh/id_rsa.pub" 2418 | } 2419 | 2420 | function generateUserSSHKey() 2421 | { 2422 | local -r userLogin="${1}" 2423 | local groupName="${2}" 2424 | 2425 | # Set Default 2426 | 2427 | if [[ "$(isEmptyString "${groupName}")" = 'true' ]] 2428 | then 2429 | groupName="${userLogin}" 2430 | fi 2431 | 2432 | # Validate Input 2433 | 2434 | checkExistUserLogin "${userLogin}" 2435 | checkExistGroupName "${groupName}" 2436 | 2437 | local -r userHome="$(getUserHomeFolder "${userLogin}")" 2438 | 2439 | checkExistFolder "${userHome}" 2440 | 2441 | # Generate SSH Key 2442 | 2443 | header "GENERATING SSH KEY FOR USER '${userLogin}'" 2444 | 2445 | rm -f "${userHome}/.ssh/id_rsa" "${userHome}/.ssh/id_rsa.pub" 2446 | mkdir -m 700 -p "${userHome}/.ssh" 2447 | chown "${userLogin}:${groupName}" "${userHome}/.ssh" 2448 | 2449 | su -l "${userLogin}" -c "ssh-keygen -q -t rsa -N '' -f '${userHome}/.ssh/id_rsa'" 2450 | chmod 600 "${userHome}/.ssh/id_rsa" "${userHome}/.ssh/id_rsa.pub" 2451 | chown "${userLogin}:${groupName}" "${userHome}/.ssh/id_rsa" "${userHome}/.ssh/id_rsa.pub" 2452 | 2453 | cat "${userHome}/.ssh/id_rsa.pub" 2454 | } 2455 | 2456 | function getCurrentUserHomeFolder() 2457 | { 2458 | getUserHomeFolder "$(whoami)" 2459 | } 2460 | 2461 | function getProfileFilePath() 2462 | { 2463 | local -r user="${1}" 2464 | 2465 | local -r userHome="$(getUserHomeFolder "${user}")" 2466 | 2467 | if [[ "$(isEmptyString "${userHome}")" = 'false' && -d "${userHome}" ]] 2468 | then 2469 | local -r bashProfileFilePath="${userHome}/.bash_profile" 2470 | local -r profileFilePath="${userHome}/.profile" 2471 | 2472 | if [[ ! -f "${bashProfileFilePath}" && -f "${profileFilePath}" ]] 2473 | then 2474 | echo "${profileFilePath}" 2475 | else 2476 | echo "${bashProfileFilePath}" 2477 | fi 2478 | fi 2479 | } 2480 | 2481 | function getUserGroupName() 2482 | { 2483 | local -r userLogin="${1}" 2484 | 2485 | checkExistUserLogin "${userLogin}" 2486 | 2487 | id -g -n "${userLogin}" 2488 | } 2489 | 2490 | function getUserHomeFolder() 2491 | { 2492 | local -r user="${1}" 2493 | 2494 | if [[ "$(isEmptyString "${user}")" = 'false' ]] 2495 | then 2496 | local -r homeFolder="$(eval "echo ~${user}")" 2497 | 2498 | if [[ "${homeFolder}" = "\~${user}" ]] 2499 | then 2500 | echo 2501 | else 2502 | echo "${homeFolder}" 2503 | fi 2504 | else 2505 | echo 2506 | fi 2507 | } 2508 | 2509 | function isUserLoginInGroupName() 2510 | { 2511 | local -r userLogin="${1}" 2512 | local -r groupName="${2}" 2513 | 2514 | checkNonEmptyString "${userLogin}" 'undefined user login' 2515 | checkNonEmptyString "${groupName}" 'undefined group name' 2516 | 2517 | if [[ "$(existUserLogin "${userLogin}")" = 'true' ]] && [[ "$(groups "${userLogin}" | grep "\b${groupName}\b")" != '' ]] 2518 | then 2519 | echo 'true' && return 0 2520 | fi 2521 | 2522 | echo 'false' && return 1 2523 | } -------------------------------------------------------------------------------- /sign_s3_url.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | function displayUsage() 4 | { 5 | local -r scriptName="$(basename "${BASH_SOURCE[0]}")" 6 | 7 | echo -e "\033[1;33m" 8 | echo "SYNOPSIS :" 9 | echo " ${scriptName}" 10 | echo " --help" 11 | echo " --aws-access-key-id " 12 | echo " --aws-secret-access-key " 13 | echo " --region " 14 | echo " --bucket " 15 | echo " --file-path " 16 | echo " --method " 17 | echo " --minute-expire " 18 | echo -e "\033[1;32m" 19 | echo "USE CASES :" 20 | echo " If you have a private/public S3 bucket and would like to share the downloadable links to anyone," 21 | echo " this tool will help to generate signed S3 URLs" 22 | echo -e "\033[1;35m" 23 | echo "DESCRIPTION :" 24 | echo " --help Help page" 25 | echo " --aws-access-key-id AWS Access Key ID (optional, defaults to \${AWS_ACCESS_KEY_ID})" 26 | echo " --aws-secret-access-key AWS Secret Access Key (optional, defaults to \${AWS_SECRET_ACCESS_KEY})" 27 | echo " --region Region (optional, defaults to \${AWS_DEFAULT_REGION})" 28 | echo " Valid regions: $(getAllowedRegions)" 29 | echo " --bucket Bucket name (require)" 30 | echo " --file-path File path (require)" 31 | echo " --method HTTP request method (optional, defaults to '${METHOD}' METHOD)" 32 | echo " --minute-expire Minutes to expire signed URL (optional, defaults to '${MINUTE_EXPIRE}' minutes)" 33 | echo -e "\033[1;36m" 34 | echo "EXAMPLES :" 35 | echo " ./${scriptName} --help" 36 | echo " ./${scriptName} --bucket 'my_bucket_name' --file-path 'my_path/my_file.txt'" 37 | echo " ./${scriptName} --aws-access-key-id '5KI6IA4AXMA39FV7O4E0' --aws-secret-access-key '5N2j9gJlw9azyLEVpbIOn/tZ2u3sVjjHM03qJfIA' --region 'us-west-1' --bucket 'my_bucket_name' --file-path 'my_path/my_file.txt' --method 'PUT' --minute-expire '30'" 38 | echo -e "\033[0m" 39 | 40 | exit "${1}" 41 | } 42 | 43 | function generateSignURL() 44 | { 45 | local -r awsAccessKeyID="${1}" 46 | local -r awsSecretAccessKey="${2}" 47 | local region="${3}" 48 | local -r bucket="${4}" 49 | local -r filePath="${5}" 50 | local -r method="${6}" 51 | local -r minuteExpire="${7}" 52 | 53 | if [[ "${region}" = 'us-east-1' ]] 54 | then 55 | region='' 56 | fi 57 | 58 | local -r endPoint="$("$(isEmptyString "${region}")" = 'true' && echo 's3.amazonaws.com' || echo "s3-${region}.amazonaws.com")" 59 | local -r expire="$(($(date +'%s') + minuteExpire * 60))" 60 | local -r signature="$( 61 | echo -en "${method}\n\n\n${expire}\n/${bucket}/${filePath}" | 62 | openssl dgst -sha1 -binary -hmac "${awsSecretAccessKey}" | 63 | openssl base64 64 | )" 65 | local -r query="AWSAccessKeyId=$(encodeURL "${awsAccessKeyID}")&Expires=${expire}&Signature=$(encodeURL "${signature}")" 66 | 67 | echo "https://${endPoint}/${bucket}/${filePath}?${query}" 68 | } 69 | 70 | function main() 71 | { 72 | source "$(dirname "${BASH_SOURCE[0]}")/libraries/aws.bash" 73 | source "$(dirname "${BASH_SOURCE[0]}")/libraries/util.bash" 74 | 75 | # Set Default Values 76 | 77 | local awsAccessKeyID="${AWS_ACCESS_KEY_ID}" 78 | local awsSecretAccessKey="${AWS_SECRET_ACCESS_KEY}" 79 | 80 | REGION="${AWS_DEFAULT_REGION}" 81 | METHOD='GET' 82 | MINUTE_EXPIRE='15' 83 | 84 | # Parse Inputs 85 | 86 | local -r optCount="${#}" 87 | 88 | while [[ "${#}" -gt '0' ]] 89 | do 90 | case "${1}" in 91 | --help) 92 | displayUsage 0 93 | ;; 94 | 95 | --aws-access-key-id) 96 | shift 97 | 98 | if [[ "${#}" -gt '0' ]] 99 | then 100 | awsAccessKeyID="$(trimString "${1}")" 101 | echo 102 | fi 103 | 104 | ;; 105 | 106 | --aws-secret-access-key) 107 | shift 108 | 109 | if [[ "${#}" -gt '0' ]] 110 | then 111 | awsSecretAccessKey="$(trimString "${1}")" 112 | fi 113 | 114 | ;; 115 | 116 | --region) 117 | shift 118 | 119 | if [[ "${#}" -gt '0' ]] 120 | then 121 | REGION="$(trimString "${1}")" 122 | fi 123 | 124 | ;; 125 | 126 | --bucket) 127 | shift 128 | 129 | if [[ "${#}" -gt '0' ]] 130 | then 131 | local bucket='' 132 | bucket="$(trimString "${1}")" 133 | fi 134 | 135 | ;; 136 | 137 | --file-path) 138 | shift 139 | 140 | if [[ "${#}" -gt '0' ]] 141 | then 142 | local filePath='' 143 | filePath="$(formatPath "$(trimString "${1}")" | sed -e 's/^\///g')" 144 | fi 145 | 146 | ;; 147 | 148 | --method) 149 | shift 150 | 151 | if [[ "${#}" -gt '0' ]] 152 | then 153 | METHOD="$(trimString "${1}")" 154 | fi 155 | 156 | ;; 157 | 158 | --minute-expire) 159 | shift 160 | 161 | if [[ "${#}" -gt '0' ]] 162 | then 163 | MINUTE_EXPIRE="$(trimString "${1}")" 164 | fi 165 | 166 | ;; 167 | 168 | *) 169 | shift 170 | ;; 171 | esac 172 | done 173 | 174 | # Validate Inputs 175 | 176 | if [[ "$(isEmptyString "${awsAccessKeyID}")" = 'true' || "$(isEmptyString "${awsSecretAccessKey}")" = 'true' || "$(isEmptyString "${bucket}")" = 'true' || "$(isEmptyString "${filePath}")" = 'true' ]] 177 | then 178 | if [[ "${optCount}" -lt '1' ]] 179 | then 180 | displayUsage 0 181 | fi 182 | 183 | error '\nERROR: awsAccessKeyID, awsSecretAccessKey, bucket, or filePath not found\n' 184 | displayUsage 1 185 | fi 186 | 187 | if [[ "$(isEmptyString "${REGION}")" = 'true' || "$(isValidRegion "${REGION}")" = 'false' ]] 188 | then 189 | fatal "\nFATAL: region must be valid string of $(getAllowedRegions)\n" 190 | fi 191 | 192 | if [[ "${MINUTE_EXPIRE}" -lt '1' ]] 193 | then 194 | fatal '\nFATAL: invalid MINUTE_EXPIRE\n' 195 | fi 196 | 197 | generateSignURL "${awsAccessKeyID}" "${awsSecretAccessKey}" "${REGION}" "${bucket}" "${filePath}" "${METHOD}" "${MINUTE_EXPIRE}" 198 | } 199 | 200 | main "$@" --------------------------------------------------------------------------------