├── .DS_Store ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── custom.md │ └── feature_request.md └── dco.yml ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE ├── MAINTAINERS.md ├── README.md ├── ansible ├── Jenkinsfile-example ├── LICENCE.txt ├── README.md ├── ansible.cfg ├── aws-route53-play │ ├── aws-route53-play.yml │ ├── examples │ │ ├── aws-route53-vars.yml │ │ └── inventory │ ├── readme.md │ ├── requirements.yml │ └── roles ├── common-service-cat-src-inst-play │ ├── Jenkinsfile │ ├── README.md │ ├── common-services-cat-src.yml │ ├── examples │ │ ├── cs_cat_src_vars.yml │ │ └── inventory │ └── roles ├── common-service-fyre-play │ ├── Jenkinsfile │ ├── README.md │ ├── common-services-fyre.yml │ ├── examples │ │ ├── cs_vars_fyre.yml │ │ └── inventory │ └── roles ├── common-service-play │ ├── Jenkinsfile │ ├── README.md │ ├── common-services.yml │ ├── examples │ │ ├── cs_vars.yml │ │ └── inventory │ └── roles ├── csi-cephfs-fyre-play │ ├── Jenkinsfile │ ├── README.md │ ├── ansible.cfg │ ├── csi-cephfs.yml │ ├── examples │ │ └── inventory │ └── roles ├── db2-openshift-play │ ├── README.md │ ├── ansible.cfg │ ├── db2-openshift-play.yml │ ├── examples │ │ └── db2_vars_example.yml │ └── galaxy.yml ├── deply-ova-vmware-play │ ├── Jenkinsfile │ ├── README.md │ ├── ansible.cfg │ ├── deploy-ova-vmware.yml │ ├── examples │ │ ├── deploy_vmware_vars.yml │ │ └── inventory │ └── roles ├── fix-fyre-hosts-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── README.md │ ├── examples │ │ └── inventory │ ├── fix-fyre-hosts-play.yml │ └── roles ├── galaxy.yml ├── hive-ocp-cluster-play │ ├── Jenkinsfile │ ├── ansible.cfg │ ├── examples │ │ ├── aws-vars.yml │ │ ├── azure-vars.yml │ │ ├── common-vars.yml │ │ ├── google-vars.yml │ │ ├── inventory │ │ ├── roks-vars.yml │ │ └── vsphere-vars.yml │ ├── hive-ocp-cluster-delete.yml │ ├── hive-ocp-cluster-provision.yml │ ├── readme.md │ └── requirements.yml ├── ibm_installation_manager_play │ ├── .gitignore │ ├── README.md │ ├── examples │ │ └── inventory │ ├── ibm_installation_manager_play.yml │ └── roles ├── install_instana_agent │ ├── .gitignore │ ├── Jenkinsfile │ ├── README.md │ ├── examples │ │ ├── inventory.switch.yml │ │ ├── inventory.unix.yml │ │ └── inventory.windows.yml │ ├── install_instana_agent.yml │ ├── instana_agent_switch.yml │ ├── instana_agent_zone.yml │ └── roles ├── ipi-ocp-aws-play │ ├── README.md │ ├── ansible.cfg │ ├── delete-cluster.yml │ ├── examples │ │ ├── aws-vars.yml │ │ ├── common-vars.yml │ │ └── inventory │ ├── provision-cluster.yml │ └── roles ├── ipi-ocp-vmware-play │ ├── README.md │ ├── ansible.cfg │ ├── delete-cluster.yml │ ├── examples │ │ ├── aws-vars.yml │ │ ├── common-vars.yml │ │ ├── inventory │ │ └── vsphere-vars.yml │ └── provision-cluster.yml ├── nest-user-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── README.md │ ├── examples │ │ └── inventory │ ├── nest-user-play.yml │ └── roles ├── nfs-storageclass-openshift-fyre-play │ ├── README.md │ ├── ansible.cfg │ ├── examples │ │ └── nfs_sc_vars_example.yml │ ├── galaxy.yml │ └── nfs-storageclass-openshift-fyre-play.yml ├── ocp-cluster-admin-play │ ├── Jenkinsfile │ ├── examples │ │ ├── admin-vars.yml │ │ └── inventory │ ├── ocp-cluster-admin-play.yml │ ├── readme.md │ └── roles ├── ocp-cluster-tag-play │ ├── Jenkinsfile-aws │ ├── examples │ │ ├── inventory │ │ └── tag-vars.yml │ ├── ocp-cluster-tag-play.yml │ └── roles ├── ocp-pool-claim-play │ ├── README.md │ ├── ansible.cfg │ ├── examples │ │ ├── content_devops_post_install_vars.yml │ │ ├── example-team-vars.yml │ │ ├── inventory │ │ └── pool-vars.yml │ ├── ocp-pool-claim-play.yml │ └── roles ├── osprereq-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── README.md │ ├── examples │ │ └── inventory │ ├── osprereq-play.yml │ └── roles ├── prereq-play │ ├── inventory │ ├── prereq-play.yml │ ├── requirements.yml │ └── roles ├── provision-ocp-cluster-play │ └── readme.md ├── provision-ocp-vmware-ceph-play │ ├── Jenkinsfile │ ├── ansible.cfg │ ├── examples │ │ ├── common-vars.yml │ │ ├── inventory │ │ └── vsphere-vars.yml │ ├── provision-ocp-vmware-ceph-play.yml │ ├── readme.md │ └── requirements.yml ├── provision-pool-play │ ├── Jenkinsfile-aws │ ├── ansible.cfg │ ├── examples │ │ ├── aws-pool-vars.yml │ │ ├── azure-pool-vars.yml │ │ ├── common-pool-vars.yml │ │ ├── google-pool-vars.yml │ │ ├── inventory │ │ ├── roks-pool-vars.yml │ │ └── vsphere-pool-vars.yml │ ├── provision-pool-play.yml │ └── readme.md ├── request-crc-fyre-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── README.md │ ├── examples │ │ └── inventory │ ├── request-crc-fyre-play.yml │ └── roles ├── request-instana-host-fyre-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── README.md │ ├── examples │ │ └── inventory │ ├── request-instana-host-fyre-play.yml │ └── roles ├── request-ocp-ceph-fyre-play │ ├── Jenkinsfile │ ├── README.md │ ├── ansible.cfg │ ├── examples │ │ └── inventory │ ├── request-ocp-ceph.yml │ └── roles ├── request-ocp-cs-install-fyre-play │ ├── Jenkinsfile │ ├── README.md │ ├── ansible.cfg │ ├── examples │ │ ├── cs_vars_fyre.yml │ │ └── inventory │ ├── request-ocp-cs-install.yml │ └── roles ├── request-ocp-fyre-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── README.md │ ├── ansible.cfg │ ├── examples │ │ ├── inventory │ │ └── ocp_vars_example.yml │ ├── request-ocp-fyre-play.yml │ └── roles ├── request-ocp-roks-play │ ├── README.md │ ├── examples │ │ ├── remove-roks-vars.yml │ │ └── request-roks-vars.yml │ ├── remove-roks.yml │ ├── request-roks.yml │ ├── requirements.yml │ └── roles ├── request-ocp4-logging-fyre-play │ ├── README.md │ ├── examples │ │ ├── examplefyreAPIjson │ │ ├── inventory │ │ └── ocp_logging_fyre_vars.yml │ ├── request-ocp4-logging-fyre.yml │ └── roles ├── request-ocp4-logging-play │ ├── README.md │ ├── examples │ │ ├── inventory │ │ ├── inventory_remote_template │ │ ├── ocp_logging_vars.yml │ │ └── ocp_logging_vars_template.yml │ ├── request-ocp4-logging.yml │ └── roles ├── request-ocpplus-cluster-transfer-fyre-play │ ├── README.md │ ├── examples │ │ ├── inventory │ │ └── ocp_transfer_vars.yml │ ├── request-ocpplus-transfer.yml │ └── roles ├── request-ocs-fyre-play │ ├── Jenkinsfile │ ├── README.md │ ├── ansible.cfg │ ├── examples │ │ ├── example_fyre_api │ │ ├── inventory │ │ ├── inventory_remote_inf_node │ │ ├── ocs_install_vars.yml │ │ └── sample-output.txt │ ├── request-ocs-fyre.yml │ └── roles ├── request-ocs-local-storage-vmware │ ├── Jenkinsfile │ ├── README.md │ ├── ansible.cfg │ ├── examples │ │ ├── inventory_local │ │ ├── inventory_remote_bastion_host │ │ ├── inventory_remote_keyfile_template │ │ └── ocs_install_vars.yml │ ├── request-ocs-local-storage-vmware.yml │ └── roles ├── request-ocs-play │ ├── README.md │ ├── examples │ │ ├── inventory │ │ ├── inventory_remote_template │ │ ├── ocs_vars.yml │ │ └── ocs_vars_template.yml │ ├── request-ocs.yml │ └── roles ├── request-rhel-db2-fyre-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── Jenkinsfile.db2only │ ├── README.md │ ├── db2.yml │ ├── examples │ │ └── inventory │ ├── request-rhel-db2-fyre-play.yml │ └── roles ├── request-rhel-jmeter-fyre-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── Jenkinsfile.jmeter │ ├── README.md │ ├── examples │ │ └── inventory │ ├── forceReplace.sh │ ├── jmeter-play.yml │ ├── request-rhel-jmeter-fyre-play.yml │ ├── roles │ └── scripts │ │ ├── isRunning.sh │ │ ├── reportJmeter.sh │ │ ├── startJmeter.sh │ │ └── stopJmeter.sh ├── roles │ ├── authorized_keys │ │ ├── readme.md │ │ └── tasks │ │ │ └── main.yml │ ├── aws_cli_install │ │ ├── readme.md │ │ └── tasks │ │ │ ├── aws_cli_install.yml │ │ │ └── main.yml │ ├── aws_route53 │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── aws_route53.yml │ │ │ └── main.yml │ ├── azure_cli_install │ │ ├── readme.md │ │ └── tasks │ │ │ ├── install-azure-client.yml │ │ │ └── main.yml │ ├── clean_vmware_kubevols │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── clean_vmware_kubevols.yaml │ │ │ └── main.yml │ │ └── templates │ │ │ └── ocp_clean_kubevols.sh.j2 │ ├── cleanup_dhcp_leases │ │ ├── files │ │ │ └── remove-leases.sh │ │ ├── readme.md │ │ └── tasks │ │ │ ├── cleanup-leases.yml │ │ │ └── main.yml │ ├── clone_repo │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── clone-repo.yml │ │ │ └── main.yml │ ├── collect_ips │ │ ├── readme.md │ │ └── tasks │ │ │ ├── collect_ips.yml │ │ │ └── main.yml │ ├── common_services │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ └── uninstall.yml │ │ └── templates │ │ │ ├── catalog-source.yaml.j2 │ │ │ ├── cs-group.yaml.j2 │ │ │ ├── cs-request.bash.j2 │ │ │ ├── cs-request.yaml.j2 │ │ │ ├── cs-sub.yaml.j2 │ │ │ └── cs-validation.bash.j2 │ ├── common_services_cat_src_inst │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── common_services_cat_src.yml │ │ │ └── main.yml │ ├── content_devops_post_install │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── post-install.yml │ ├── crc_enable_telemetry │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── oc_enable_monitoring.sh │ │ │ └── oc_get_clusterversion.sh │ │ ├── readme.md │ │ └── tasks │ │ │ ├── crc_enable_telemetry.yml │ │ │ └── main.yml │ ├── crc_fyrevm │ │ ├── readme.md │ │ └── tasks │ │ │ ├── crc_fyrevm.yml │ │ │ └── main.yml │ ├── crc_install │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── crc_install.yml │ │ │ └── main.yml │ ├── crc_oc_cli │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── crc_oc_cli.yml │ │ │ └── main.yml │ ├── crc_start │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── crc_pull_pass │ │ │ └── crc_pull_secret.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── crc_start.yml │ │ │ └── main.yml │ ├── crc_user │ │ ├── readme.md │ │ └── tasks │ │ │ ├── crc_user.yml │ │ │ └── main.yml │ ├── csi_cephfs_fyre │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── csi-ceph.sh │ │ │ └── wait-for-csi-ceph.sh │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── csi_cephfs_fyre.yaml │ │ │ └── main.yml │ ├── csi_cephfs_vmware │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── csi-ceph.sh │ │ │ ├── vsphere-block-storage.yaml │ │ │ └── wait-for-csi-ceph.sh │ │ ├── readme.md │ │ └── tasks │ │ │ ├── csi_cephfs_vmware.yaml │ │ │ └── main.yml │ ├── db2_fyre │ │ ├── readme.md │ │ └── tasks │ │ │ └── main.yml │ ├── db2_fyrevm │ │ ├── readme.md │ │ └── tasks │ │ │ ├── db2_fyrevm.yml │ │ │ └── main.yml │ ├── db2_openshift │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── db2-deploy-openshift.yml │ │ │ └── main.yml │ │ ├── templates │ │ │ ├── db2-cluster.yml.j2 │ │ │ ├── db2-operator-group.yml.j2 │ │ │ ├── db2-subscription.yml.j2 │ │ │ ├── ibm-catalog-resource.yml.j2 │ │ │ └── ibm-pull-secret.yml.j2 │ │ └── vars │ │ │ └── main.yml │ ├── db2jcc_jars │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── db2jcc_jars.yml │ │ │ └── main.yml │ ├── deploy_ingress_router_vmware │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── deploy_ingress_router_vmware.yaml │ │ │ └── main.yml │ │ └── templates │ │ │ └── ingress-router-patch-template.j2 │ ├── deploy_ova_vmware │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── deploy_ova_vmware.yaml │ │ │ └── main.yml │ ├── deploy_vmdisk_vmware │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── deploy_vmdisk_vmware.yaml │ │ │ └── main.yml │ ├── docker_ce_install │ │ ├── readme.md │ │ └── tasks │ │ │ ├── RedHat.yml │ │ │ ├── Ubuntu.yml │ │ │ └── main.yml │ ├── fix_fyre_hosts_file │ │ ├── readme.md │ │ └── tasks │ │ │ ├── fix-fyre-host-file.yml │ │ │ └── main.yml │ ├── fyrevm_delete │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── fyrevm_delete.yml │ │ │ └── main.yml │ ├── fyrevm_provision │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── fyrevm_provision.yml │ │ │ └── main.yml │ │ └── templates │ │ │ └── vm_request.json.j2 │ ├── get_liberty_archive │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── get_liberty_archive.yml │ │ │ └── main.yml │ ├── get_ocp_installer │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── get_ocp_installer.yml │ │ │ └── main.yml │ ├── git_install_fyre │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── git_install_fyre.yaml │ │ │ └── main.yml │ ├── google_cli_install │ │ ├── readme.md │ │ └── tasks │ │ │ ├── install-google-client.yml │ │ │ └── main.yml │ ├── hive_ocp_cluster_delete │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── delete-cluster.yml │ │ │ └── main.yml │ ├── hive_ocp_cluster_provision │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── logmon.sh │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── provision-cluster.yml │ │ └── templates │ │ │ ├── aws-template.j2 │ │ │ ├── azure-osServicePrincipal.json.j2 │ │ │ ├── azure-template.j2 │ │ │ ├── google-osServiceAccount.json.j2 │ │ │ ├── google-template.j2 │ │ │ ├── ibmcloud-template.j2 │ │ │ └── vsphere-template.j2 │ ├── http_defaults │ │ ├── defaults │ │ │ └── main.yml │ │ └── readme.md │ ├── http_start │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── http_start.yml │ │ │ └── main.yml │ ├── http_stop │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── http_stop.yml │ │ │ └── main.yml │ ├── ibm_installation_manager │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── ibm_installation_manager.yml │ │ │ └── main.yml │ │ └── templates │ │ │ └── ibm_im_responsefile.xml.j2 │ ├── ibm_installation_manager_cic_selector │ │ ├── defaults │ │ │ └── main.yml │ │ └── readme.md │ ├── ibm_rhsm │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── RedHat.yml │ │ │ └── main.yml │ ├── ibm_semeru_jdks │ │ ├── readme.md │ │ ├── scripts │ │ │ └── getSemeruJDK.py │ │ └── tasks │ │ │ ├── ibm_semeru_jdks.yml │ │ │ └── main.yml │ ├── im_cleanup_http │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── im_cleanup_http.yml │ │ │ └── main.yml │ ├── im_cleanup_liberty │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── im_cleanup_liberty.yml │ │ │ └── main.yml │ ├── im_install_http_plugin │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── im_install_http_plugin.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── https_conf_WAS855.j2 │ │ │ ├── https_conf_WAS90.j2 │ │ │ ├── ihsserverkey.sh.j2 │ │ │ ├── im_http_plugin_WAS855.xml.j2 │ │ │ ├── im_http_plugin_WAS90.xml.j2 │ │ │ └── wct_definition.xml.j2 │ ├── im_install_liberty │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── im_install_liberty.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── im_liberty.OS400.xml.j2 │ │ │ ├── im_liberty.xml.j2 │ │ │ └── server.env.j2 │ ├── im_install_twas │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── im_install_twas.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── im_twas_WAS855.xml.j2 │ │ │ └── im_twas_WAS90.xml.j2 │ ├── im_rollback_twas │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── im_rollback_twas.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── im_twas_WAS855.xml.j2 │ │ │ └── im_twas_WAS90.xml.j2 │ ├── im_update_twas │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── im_update_twas.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── im_twas_WAS855.xml.j2 │ │ │ └── im_twas_WAS90.xml.j2 │ ├── instana_agent_install_config │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── Windows.yml │ │ │ ├── custom_package_cron.yml │ │ │ ├── install_agent_custom.yml │ │ │ ├── install_agent_custom_chrp.yml │ │ │ ├── install_agent_normal.yml │ │ │ ├── main.yml │ │ │ ├── remove_directory.yml │ │ │ ├── remove_package.yml │ │ │ ├── start.yml │ │ │ ├── stop.yml │ │ │ ├── unix.yml │ │ │ └── zone.yml │ ├── instana_agent_switch │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── com.instana.agent.main.sender.Backend.cfg.j2 │ ├── instana_agent_zone │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── configuration-zone.yaml.j2 │ ├── instana_fyrevm │ │ ├── readme.md │ │ └── tasks │ │ │ ├── instana_fyrevm.yml │ │ │ └── main.yml │ ├── instana_instance_on_prem │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── RedHat.yml │ │ │ ├── Ubuntu.yml │ │ │ ├── instana_instance_on_prem.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── fdisk.create.j2 │ │ │ ├── fdisk.delete.j2 │ │ │ ├── instana.rhel.repo.j2 │ │ │ └── settings.hcl.j2 │ ├── ipi_ocp_cluster_delete │ │ ├── readme.md │ │ └── tasks │ │ │ ├── delete_cluster.yml │ │ │ └── main.yml │ ├── ipi_ocp_cluster_provision │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── provision-cluster.yml │ │ └── templates │ │ │ ├── aws-install-config.template.j2 │ │ │ ├── azure-install-config.template.j2 │ │ │ ├── google-install-config.template.j2 │ │ │ └── vsphere-install-config.template.j2 │ ├── jmeter │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ └── main.yml │ ├── jmeter_fyrevm │ │ ├── readme.md │ │ └── tasks │ │ │ ├── jmeter_fyrevm.yml │ │ │ └── main.yml │ ├── jmeter_get_logs │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── jmeter_get_logs.yml │ │ │ └── main.yml │ ├── jmeter_java │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ └── main.yml │ ├── jmeter_prereqs │ │ ├── readme.md │ │ └── tasks │ │ │ └── main.yml │ ├── liberty_collective │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── liberty_collective.yml │ │ │ └── main.yml │ │ └── templates │ │ │ └── jvm.options.j2 │ ├── liberty_daytrader8 │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── io.openliberty.sample.daytrader8.war │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── liberty_daytrader8.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── jvm.options.j2 │ │ │ └── server.xml.j2 │ ├── liberty_defaults │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ └── main.yml │ ├── liberty_dynamicRouting │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── liberty_dynamicRouting.yml │ │ │ └── main.yml │ ├── liberty_dynamicRouting_plugin │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── liberty_dynamicRouting_plugin.yml │ │ │ └── main.yml │ ├── liberty_fetch_plugins │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── liberty_fetch_plugins.yml │ │ │ └── main.yml │ ├── liberty_get_logs │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── liberty_get_logs.yml │ │ │ └── main.yml │ ├── liberty_ibmi_grantauth │ │ ├── readme.md │ │ └── tasks │ │ │ ├── liberty_ibmi_granthauth.yml │ │ │ └── main.yml │ ├── liberty_join_collective │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── liberty_join_collective.yml │ │ │ └── main.yml │ ├── liberty_scaling │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── liberty_scaling.yml │ │ │ └── main.yml │ │ └── templates │ │ │ └── scaling.xml.j2 │ ├── liberty_servers │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── liberty_servers.yml │ │ │ └── main.yml │ ├── linux_kill_process │ │ ├── readme.md │ │ └── tasks │ │ │ ├── linux_kill_process.yml │ │ │ └── main.yml │ ├── load_secrets │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── load_secrets.yml │ │ │ └── main.yml │ ├── nestuser │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── scripts │ │ │ └── password_hasher.py │ │ └── tasks │ │ │ └── main.yml │ ├── nfs_client_provisioner_fyre │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── nfs-client-provisioner-ocp-private.yml │ │ ├── templates │ │ │ ├── class.yml │ │ │ ├── deployment.yml.j2 │ │ │ ├── nfs-cluster-rb.yml │ │ │ ├── nfs-cluster-role.yml │ │ │ ├── nfs-role-binding.yml │ │ │ ├── nfs-role.yml │ │ │ ├── nfs-sa.yml │ │ │ ├── rbac.yml │ │ │ └── test-claim.yml │ │ └── vars │ │ │ └── main.yml │ ├── oc_client_install │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── oc-client-install.yml │ ├── ocp_add_users_to_scc │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── ocp-add-users-to-scc.yml │ │ └── vars │ │ │ └── main.yml │ ├── ocp_cluster_tag │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── add-tags.yml │ │ │ └── main.yml │ ├── ocp_login │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── ocp_login.yml │ ├── ocp_pool_claim │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── ocp-pool-claim.yml │ │ └── templates │ │ │ ├── claim-template.j2 │ │ │ └── vsphere-install-config-template.j2 │ ├── ocp_request_token │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── ocp-request-token.yml │ │ └── vars │ │ │ └── main.yml │ ├── ops_svt_jmeter_container │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── ops_svt_jmeter_container.yml │ ├── osprereqs │ │ ├── readme.md │ │ └── tasks │ │ │ ├── AIX.7.yml │ │ │ ├── RedHat.7.yml │ │ │ ├── RedHat.8.yml │ │ │ ├── RedHat.9.yml │ │ │ ├── SLES.12.yml │ │ │ ├── SLES.15.yml │ │ │ ├── Ubuntu.18.yml │ │ │ ├── Ubuntu.20.yml │ │ │ ├── Ubuntu.22.yml │ │ │ ├── anylinux.yml │ │ │ └── main.yml │ ├── pentest_fyrevm │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── pentest_fyrevm.yml │ ├── podman │ │ ├── readme.md │ │ └── tasks │ │ │ ├── RedHat.8.yml │ │ │ ├── RedHat.9.yml │ │ │ └── main.yml │ ├── post_install │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── post-install.yml │ ├── provision_pool │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── provision_pool.yml │ │ └── templates │ │ │ ├── aws-install-config-template.j2 │ │ │ ├── aws-pool-template.j2 │ │ │ ├── azure-pool-template.j2 │ │ │ ├── google-pool-template.j2 │ │ │ ├── ibmcloud-pool-template.j2 │ │ │ ├── image-set-template.j2 │ │ │ └── vsphere-pool-template.j2 │ ├── python │ │ ├── readme.md │ │ └── tasks │ │ │ ├── AIX.yml │ │ │ ├── default.yml │ │ │ └── main.yml │ ├── python_install_fyre │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── python_install_fyre.yaml │ ├── quickcheck │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── QuickCheck6.jar │ │ │ └── daytrader.txt │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── quickcheck.yml │ ├── remove_ocp_roks │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── remove-cluster.yml │ ├── request_ocp4_logging │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── request_ocp_logging.yml │ │ └── templates │ │ │ ├── elasticsearch-operator.yaml.j2 │ │ │ ├── logging-operand.yaml.j2 │ │ │ ├── logging-operand0.yaml.j2 │ │ │ ├── logging-operator.yaml.j2 │ │ │ ├── logging-rbac.yaml.j2 │ │ │ ├── ocp4-logging-operand.sh.j2 │ │ │ └── ocp4-logging-operator.sh.j2 │ ├── request_ocp_fyre │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── create-ocpplus-custom-fyre.yml │ │ │ ├── create-ocpplus-normal-fyre.yml │ │ │ ├── create-ocpplus-quickburn-fyre.yml │ │ │ ├── main.yml │ │ │ ├── request-ocp-fyre.yml │ │ │ ├── request-ocpplus-fyre.yml │ │ │ └── request-quickburn-fyre.yml │ ├── request_ocp_roks │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── create-cluster.yml │ │ │ └── main.yml │ ├── request_ocpplus_cluster_transfer_fyre │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── request-ocpplus-cluster-transfer.yml │ │ └── templates │ │ │ └── ocp_transfer.sh.j2 │ ├── request_ocs │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── ocs_install.sh │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── request_ocs.yml │ ├── request_ocs_local_storage │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── request_ocs_local_storage.yml │ │ └── templates │ │ │ ├── local-storage-operator.yaml.j2 │ │ │ ├── local-volume-set.yaml.j2 │ │ │ ├── local-volumes-discovery.yaml.j2 │ │ │ ├── ocs-operator.yaml.j2 │ │ │ └── storage-cluster.yaml.j2 │ ├── setup_autowas │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ └── main.yml │ ├── setup_svt_db2_container │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── setup_svt_db2_container.yml │ ├── setup_svt_jmeter_container │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── setup_svt_jmeter_container.yml │ ├── start_aws_cluster │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── start_aws_cluster.yml │ ├── stop_aws_cluster │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── stop_aws_cluster.yml │ ├── timezone │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ └── main.yml │ ├── twas855x_ospreqs │ │ ├── readme.md │ │ └── tasks │ │ │ ├── AIX.7.chrp.yml │ │ │ ├── RedHat.7.x86_64.yml │ │ │ ├── RedHat.8.x86_64.yml │ │ │ ├── RedHat.9.ppc64le.yml │ │ │ ├── RedHat.9.s390x.yml │ │ │ ├── RedHat.9.x86_64.yml │ │ │ ├── SLES.12.x86_64.yml │ │ │ ├── SLES.15.x86_64.yml │ │ │ ├── Ubuntu.18.x86_64.yml │ │ │ ├── Ubuntu.20.x86_64.yml │ │ │ ├── Ubuntu.22.x86_64.yml │ │ │ └── main.yml │ ├── twas_cell_clear_logs │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_clear_logs.yml │ ├── twas_cell_cluster_servers │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_cluster_servers.yml │ │ └── templates │ │ │ └── twas_cell_cluster_servers.py.j2 │ ├── twas_cell_common_daytrader │ │ ├── readme.md │ │ └── templates │ │ │ ├── daytrader3.py.j2 │ │ │ ├── daytrader7.py.j2 │ │ │ ├── daytrader_config.py.j2 │ │ │ ├── daytrader_vars.py.j2 │ │ │ └── resource_scripts.py.j2 │ ├── twas_cell_daytrader3 │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── DayTrader3.0.9-ee6-src.ear │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_daytrader3.yml │ ├── twas_cell_daytrader7 │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── DayTrader-ee7.0.11.ear │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_daytrader7.yml │ ├── twas_cell_db_drop_sib_tables │ │ ├── files │ │ │ └── db2_drop_sib.py │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_db_drop_sib_tables.yml │ ├── twas_cell_defaults │ │ ├── defaults │ │ │ └── main.yml │ │ └── readme.md │ ├── twas_cell_enable_ldap_security │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_enable_ldap_security.yml │ │ └── templates │ │ │ └── 99_twas_enable_ldap_security.py.j2 │ ├── twas_cell_federate │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_federate.yml │ ├── twas_cell_get_logs │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_get_logs.yml │ ├── twas_cell_ihsplugin_gen_propagate │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_ihsplugin_gen_propagate.yml │ │ └── templates │ │ │ └── ihsplugin_gen_propagate.py.j2 │ ├── twas_cell_kill │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_kill.yml │ ├── twas_cell_profiles │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_profiles.yml │ ├── twas_cell_put_db2jars │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_put_db2jars.yml │ ├── twas_cell_start_cluster │ │ ├── files │ │ │ └── twas_cell_start_cluster.py │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_start_cluster.yml │ ├── twas_cell_start_dmgr │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_start_dmgr.yml │ ├── twas_cell_start_node │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_start_node.yml │ ├── twas_cell_status │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_status.yml │ ├── twas_cell_stop_cluster │ │ ├── files │ │ │ └── twas_cell_stop_cluster.py │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_stop_cluster.yml │ ├── twas_cell_stop_dmgr │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_stop_dmgr.yml │ ├── twas_cell_stop_node │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_stop_node.yml │ ├── twas_cell_test_ldap │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_test_ldap.yml │ ├── twas_cell_tuning │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_tuning.yml │ │ └── templates │ │ │ ├── resource_vars.py.j2 │ │ │ ├── set_instana_jvm.py.j2 │ │ │ └── tuneDayTrader.py.j2 │ ├── twas_cell_unamaged_web_node │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_unamaged_web_node.yml │ │ └── templates │ │ │ └── twas_cell_unamaged_web_node.py.j2 │ ├── twas_cell_wsadminlib │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_cell_wsadminlib.yml │ ├── twas_config │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── scripts │ │ │ ├── enableCustomProps.py │ │ │ ├── enableHPEL.py │ │ │ ├── enableTLS.py │ │ │ ├── jvm.py │ │ │ ├── updateCipherList.py │ │ │ └── updateWebContainerThreadPools.py │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_config.yml │ ├── twas_default_swaggerui │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_default_swaggerui.yml │ ├── twas_enable_wim_registry │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ ├── tasks │ │ │ ├── main.yml │ │ │ └── twas_enable_wim_registry.yml │ │ └── templates │ │ │ └── wim_wizard.py.j2 │ ├── twas_ssl_client_props │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_ssl_client_props.yml │ ├── twas_start_default │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_start_default.yml │ ├── twas_stop_default │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── twas_stop_default.yml │ ├── validate_ocp_install │ │ ├── meta │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── validate_cluster.yml │ ├── vnc │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── vnc.RedHat.yml │ ├── wait_for_cluster_ready │ │ ├── defaults │ │ │ └── main.yml │ │ ├── readme.md │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── wait_for_cluster_ready.yaml │ └── was_automation_register │ │ ├── defaults │ │ └── main.yml │ │ ├── readme.md │ │ ├── scripts │ │ └── configuretWasUsageMetering.py │ │ ├── tasks │ │ ├── liberty.yml │ │ ├── main.yml │ │ └── twas.yml │ │ └── templates │ │ ├── wa.pem.j2 │ │ └── was-usage-metering.xml.j2 ├── setup-autowas-play │ ├── .gitignore │ ├── README.md │ ├── examples │ │ └── inventory │ ├── roles │ └── setup-autowas-play ├── setup-new-fyre-host-play │ ├── .gitignore │ ├── Jenkinsfile │ ├── README.md │ ├── examples │ │ └── inventory │ ├── roles │ └── setup-new-fyre-host-play.yml ├── vnc-play │ ├── .gitignore │ ├── README.md │ ├── examples │ │ └── inventory │ ├── roles │ └── vnc-play └── was-automation-register │ ├── .gitignore │ ├── Jenkinsfile.Liberty │ ├── Jenkinsfile.tWAS │ ├── Jenkinsfile.tWAS.remove │ ├── Jenkinsfile.test │ ├── README.md │ ├── examples │ └── inventory.yml │ ├── roles │ └── was-automation-register.yml ├── docs ├── collection.md ├── docker-image.md ├── folder-structure.md ├── jenkinsfile.md ├── other-repositories.md ├── playbooks.md ├── roks.md ├── role.readme.template.md ├── roles.md └── scripts.md └── scripts ├── README.md ├── common ├── README.md ├── builddoc.sh ├── community-docker-build.sh ├── dockerfile ├── install-prereqs.sh └── rhel8-functions.sh └── content-tools └── README.md /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/.DS_Store -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/custom.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Custom issue template 3 | about: Describe this issue template's purpose here. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/dco.yml: -------------------------------------------------------------------------------- 1 | # This enables DCO bot for you, please take a look https://github.com/probot/dco 2 | # for more details. 3 | require: 4 | members: false 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sudo: true 3 | dist: bionic 4 | os: linux 5 | 6 | before_install: 7 | - sudo apt -y update 8 | - sudo apt -y install python3 python3-pip 9 | - sudo pip3 install setuptools 10 | - sudo pip3 install --upgrade pip 11 | - sudo pip3 install --upgrade setuptools 12 | 13 | install: 14 | # Install Ansible. 15 | - pip3 install ansible 16 | - pip3 install ansible-lint 17 | 18 | script: 19 | # Check the role/playbook's syntax. 20 | #- cd ansible/roles 21 | #- ansible-lint 22 | 23 | -------------------------------------------------------------------------------- /MAINTAINERS.md: -------------------------------------------------------------------------------- 1 | ## MAINTAINERS 2 | Ray Ashworth - ashworth@us.ibm.com 3 | Walter Krapohl - krapohl@us.ibm.com 4 | -------------------------------------------------------------------------------- /ansible/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | -------------------------------------------------------------------------------- /ansible/aws-route53-play/aws-route53-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: localhost 3 | vars_files: 4 | - aws-route53-vars.yml 5 | collections: 6 | - community.aws 7 | 8 | tasks: 9 | - import_role: 10 | name: aws_route53 -------------------------------------------------------------------------------- /ansible/aws-route53-play/examples/aws-route53-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | aws_access_key: "" 3 | aws_secret_key: "" 4 | base_domain: "purple-chesterfield.com" 5 | clustername: "your cluster name" 6 | api_ip: "" 7 | api_dns: "api.{{ clustername }}.{{ base_domain }}" 8 | apps_ip: "your static ip for apps url>" 9 | apps_dns: "*.apps.{{ clustername }}.{{ base_domain }}" 10 | route_task: "add" # add|delete 11 | -------------------------------------------------------------------------------- /ansible/aws-route53-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/aws-route53-play/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | collections: 3 | - name: community.aws 4 | source: https://galaxy.ansible.com -------------------------------------------------------------------------------- /ansible/aws-route53-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/common-service-cat-src-inst-play/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/common-service-cat-src-inst-play/Jenkinsfile -------------------------------------------------------------------------------- /ansible/common-service-cat-src-inst-play/common-services-cat-src.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install common services catalog source install 3 | hosts: bastion 4 | roles: 5 | - common_services_cat_src_inst 6 | -------------------------------------------------------------------------------- /ansible/common-service-cat-src-inst-play/examples/cs_cat_src_vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | catalog_source_version: latest # This is the version of the catalog source you want from quay.io. Options are `latest` or 'dev-latest`.' 3 | -------------------------------------------------------------------------------- /ansible/common-service-cat-src-inst-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/common-service-cat-src-inst-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/common-service-fyre-play/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/common-service-fyre-play/Jenkinsfile -------------------------------------------------------------------------------- /ansible/common-service-fyre-play/common-services-fyre.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install common services onto OCP+Beta Fyre clusters 3 | hosts: bastion 4 | roles: 5 | - csi_cephfs_fyre 6 | - common_services 7 | -------------------------------------------------------------------------------- /ansible/common-service-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/common-service-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/common-service-play/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/common-service-play/Jenkinsfile -------------------------------------------------------------------------------- /ansible/common-service-play/common-services.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install common services 3 | hosts: bastion 4 | roles: 5 | - common_services 6 | -------------------------------------------------------------------------------- /ansible/common-service-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local 3 | 4 | -------------------------------------------------------------------------------- /ansible/common-service-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/csi-cephfs-fyre-play/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/csi-cephfs-fyre-play/Jenkinsfile -------------------------------------------------------------------------------- /ansible/csi-cephfs-fyre-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | -------------------------------------------------------------------------------- /ansible/csi-cephfs-fyre-play/csi-cephfs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install csi-cephfs on fyre OCP+Beta env 3 | hosts: bastion 4 | gather_facts: no 5 | tasks: 6 | - name: python_install_fyre 7 | include_role: 8 | name: python_install_fyre 9 | - name: Gathering Facts 10 | setup: 11 | - name: git_install_fyre 12 | include_role: 13 | name: git_install_fyre 14 | - name: csi_cephfs_fyre 15 | include_role: 16 | name: csi_cephfs_fyre 17 | -------------------------------------------------------------------------------- /ansible/csi-cephfs-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | fyre.inf.node.9dot.ip ansible_connection=ssh ansible_ssh_user=root ansible_ssh_pass="fyre.root.pw" ansible_ssh_common_args='-o StrictHostKeyChecking=no' 3 | -------------------------------------------------------------------------------- /ansible/csi-cephfs-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/db2-openshift-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | roles_path = ../roles 11 | nocows = 1 -------------------------------------------------------------------------------- /ansible/db2-openshift-play/db2-openshift-play.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | --- 6 | - name: Install DB2 Community Edition on fyre 7 | hosts: localhost 8 | gather_facts: no 9 | roles: 10 | - role: db2_openshift -------------------------------------------------------------------------------- /ansible/db2-openshift-play/examples/db2_vars_example.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | --- 6 | kubeadmin_user: "my_kubeadmin_user_name" 7 | kubeadmin_password: "my_kubeadmin_user_name" 8 | ocp_api_url: "https://my_ocp_api_url:port" 9 | db2_namespace: "any_namespace" 10 | storageClassName: "storage_class_name" 11 | entitled_key: "my_IBM_entitled_key" -------------------------------------------------------------------------------- /ansible/db2-openshift-play/galaxy.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | --- 6 | collections: 7 | - name: community.okd 8 | version: 1.0.1 9 | - name: community.general 10 | - name: community.kubernetes 11 | version: 1.1.1 12 | 13 | -------------------------------------------------------------------------------- /ansible/deply-ova-vmware-play/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/deply-ova-vmware-play/Jenkinsfile -------------------------------------------------------------------------------- /ansible/deply-ova-vmware-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | -------------------------------------------------------------------------------- /ansible/deply-ova-vmware-play/deploy-ova-vmware.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Download ova and then deploy to vcenter 3 | hosts: bastion 4 | roles: 5 | - deploy_ova_vmware 6 | -------------------------------------------------------------------------------- /ansible/deply-ova-vmware-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/deply-ova-vmware-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/fix-fyre-hosts-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | -------------------------------------------------------------------------------- /ansible/fix-fyre-hosts-play/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Playbook for configuring target Fyre hosts for tWAS operations 2 | 3 | ## Assumptions: 4 | 5 | 6 | ## Setting up inventory 7 | 8 | - From the `fix-fyre-hosts-play` directory copy the sample inventory file at `examples/inventory` to the current directory. 9 | - Modify `hosts` to match your target hosts 10 | 11 | ``` 12 | cp examples/inventory . 13 | ``` 14 | 15 | ## Run playbook 16 | 17 | The playbook/role supports Fyre hosts 18 | 19 | 20 | Once you have configured the `inventory` file, run the playbook using: 21 | 22 | ``` 23 | ansible-playbook -i inventory fix-fyre-hosts-play.yml 24 | 25 | ``` 26 | -------------------------------------------------------------------------------- /ansible/fix-fyre-hosts-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [all] 2 | rhel1.fyre.ibm.com 3 | sles1.fyre.ibm.com 4 | ub1.fyre.ibm.com 5 | 6 | [all:vars] 7 | ansible_user=root 8 | -------------------------------------------------------------------------------- /ansible/fix-fyre-hosts-play/fix-fyre-hosts-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | gather_facts: true 5 | become: yes 6 | roles: 7 | - role: fix_fyre_hosts_file 8 | -------------------------------------------------------------------------------- /ansible/fix-fyre-hosts-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/hive-ocp-cluster-play/Jenkinsfile: -------------------------------------------------------------------------------- 1 | # jenkins file for hive ocp provisioning 2 | -------------------------------------------------------------------------------- /ansible/hive-ocp-cluster-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | roles_path = ../roles 11 | -------------------------------------------------------------------------------- /ansible/hive-ocp-cluster-play/examples/aws-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | aws_access_key: " " # your AWS_ACCESS_KEY_ID 3 | aws_secret_key: " " # your AWS_SECRET_ACCESS_KEY 4 | AWS_ACCESS_KEY_ID: "{{ aws_access_key }}" 5 | AWS_SECRET_ACCESS_KEY: "{{ aws_secret_key }}" 6 | AWS_REGION: "us-east-1" 7 | WORKER_VM_SIZE: m5.2xlarge # size of worker VM instances 8 | MASTER_VM_SIZE: m5.2xlarge # sice of master VM instances 9 | WORKER_VOLUME_SIZE: 256 # GB 10 | MASTER_VOLUME_SIZE: 256 # GB 11 | -------------------------------------------------------------------------------- /ansible/hive-ocp-cluster-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/hive-ocp-cluster-play/examples/roks-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # PLACE HOLDER ONLY 3 | "roks_api_token": "3-rKI5k7QG10Dq0ycSGiPHxb6hqfXmbeybahqshO-fI" 4 | "roks_ocp_api_url": "https://c104-e.us-east.containers.cloud.ibm.com:30431 -------------------------------------------------------------------------------- /ansible/hive-ocp-cluster-play/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | collections: 3 | - name: community.general 4 | source: https://galaxy.ansible.com 5 | - name: community.aws 6 | source: https://galaxy.ansible.com 7 | - name: amazon.aws 8 | source: https://galaxy.ansible.com 9 | - name: google.cloud 10 | source: https://galaxy.ansible.com 11 | -------------------------------------------------------------------------------- /ansible/ibm_installation_manager_play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | -------------------------------------------------------------------------------- /ansible/ibm_installation_manager_play/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Playbook for installing IBM Installation Manager 2 | 3 | ## Limitations: 4 | 5 | - Support for user install 6 | 7 | ## Setting up inventory 8 | 9 | - From the `ibm_installation_manager_play` directory copy the sample inventory file at `examples/inventory` to the current directory. 10 | 11 | ``` 12 | cp examples/inventory . 13 | ``` 14 | - Change the ansible_user in the inventory to match the target user you wish to install IBM Installation Manager 15 | 16 | ## Run playbook 17 | 18 | 19 | Once you have configured the `inventory` file, run the playbook using: 20 | 21 | ``` 22 | ansible-playbook -i inventory ibm_installation_manager_play.yml 23 | 24 | -------------------------------------------------------------------------------- /ansible/ibm_installation_manager_play/examples/inventory: -------------------------------------------------------------------------------- 1 | [all] 2 | myhost 3 | 4 | [all:vars] 5 | ansible_user=root 6 | im_driver_version=latest 7 | gsa_user='yourgsauser' 8 | gsa_pass='yourgsapass' 9 | -------------------------------------------------------------------------------- /ansible/ibm_installation_manager_play/ibm_installation_manager_play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | roles: 5 | - role: ibm_installation_manager 6 | 7 | -------------------------------------------------------------------------------- /ansible/ibm_installation_manager_play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/install_instana_agent/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory* 2 | -------------------------------------------------------------------------------- /ansible/install_instana_agent/examples/inventory.switch.yml: -------------------------------------------------------------------------------- 1 | all: 2 | vars: 3 | agent_key: '' 4 | instana_host: '' 5 | instana_port: '443' 6 | 7 | # adjust the contents below to match your configuration 8 | hosts: 9 | children: 10 | Unix: 11 | vars: 12 | ansible_user: root # user on the target envs 13 | hosts: 14 | rhel8.fyre.ibm.com 15 | # windows only required variables 16 | Windows: 17 | vars: 18 | ansible_user: Administrator # user on the target envs 19 | ansible_shell_type: cmd 20 | hosts: 21 | win1.fyre.ibm.com 22 | -------------------------------------------------------------------------------- /ansible/install_instana_agent/examples/inventory.unix.yml: -------------------------------------------------------------------------------- 1 | all: 2 | vars: 3 | agent_key: '' 4 | instana_host: '' 5 | instana_port: '1444' 6 | instana_zone: '' 7 | custom_agent_url: '' 8 | instana_mode: 'dynamic' # dynamic or static 9 | instana_runtime: '-j' # blank is azul , -j is Eclipse OpenJ9 11 10 | 11 | # adjust the contents below to match your configuration 12 | hosts: 13 | children: 14 | Unix: 15 | vars: 16 | ansible_user: nest # user on the target envs 17 | hosts: 18 | rhel8.fyre.ibm.com: 19 | aix7.fyre.ibm.com: 20 | sles15.fyre.ibm.com: 21 | -------------------------------------------------------------------------------- /ansible/install_instana_agent/install_instana_agent.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: all 3 | roles: 4 | - role: instana_agent_install_config 5 | -------------------------------------------------------------------------------- /ansible/install_instana_agent/instana_agent_switch.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: all 3 | roles: 4 | - role: instana_agent_switch 5 | -------------------------------------------------------------------------------- /ansible/install_instana_agent/instana_agent_zone.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: all 3 | roles: 4 | - role: instana_agent_zone 5 | -------------------------------------------------------------------------------- /ansible/install_instana_agent/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/ipi-ocp-aws-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | roles_path = ../roles 11 | nocows = 1 -------------------------------------------------------------------------------- /ansible/ipi-ocp-aws-play/delete-cluster.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Delete AWS cluster 3 | hosts: bastion 4 | vars: 5 | cloud: aws 6 | 7 | vars_files: 8 | - common-vars.yml 9 | - aws-vars.yml 10 | 11 | environment: 12 | PATH: ".:{{ ansible_env.PATH }}" 13 | 14 | roles: 15 | - role: ipi_ocp_cluster_delete 16 | 17 | tasks: 18 | - name: Delete install content & directory 19 | file: 20 | state: absent 21 | path: install -------------------------------------------------------------------------------- /ansible/ipi-ocp-aws-play/examples/aws-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | aws_access_key: " " # your AWS_ACCESS_KEY_ID 3 | aws_secret_key: " " # your AWS_SECRET_ACCESS_KEY 4 | AWS_ACCESS_KEY_ID: "{{ aws_access_key }}" 5 | AWS_SECRET_ACCESS_KEY: "{{ aws_secret_key }}" 6 | AWS_REGION: "us-east-1" 7 | 8 | WORKER_VM_SIZE: m5.2xlarge # size of worker VM instances 9 | MASTER_VM_SIZE: m5.2xlarge # sice of master VM instances 10 | WORKER_VOLUME_SIZE: 256 # GB 11 | MASTER_VOLUME_SIZE: 256 # GB -------------------------------------------------------------------------------- /ansible/ipi-ocp-aws-play/examples/common-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "CLUSTER_NAME": "" # limit to 21 characters on AWS, limit has not been researched on other platforms. 3 | 4 | # Public ssh key. Typically from the VM your creating the cluster on 5 | "SSH_PUBLIC_KEY": '' 6 | 7 | # Set to purple-chesterfield.com or brown-chesterfield.com only. 8 | "BASE_DOMAIN": "purple-chesterfield.com" 9 | 10 | # What OCP version you want to install 11 | "OCP_RELEASE_IMAGE": "quay.io/openshift-release-dev/ocp-release:4.8.13-x86_64" 12 | 13 | # Redhat pull secret. You can get one here https://cloud.redhat.com/openshift/install/aws/installer-provisioned 14 | "PULL_SECRET": '' # be sure to leave the quotes in place 15 | -------------------------------------------------------------------------------- /ansible/ipi-ocp-aws-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/ipi-ocp-aws-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/ipi-ocp-vmware-play/README.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | This playbook will deploy a cluster using the IPI installer on VMWare infrastructure. 4 | 5 | ## Requirements 6 | 7 | - Assumes ansible 2.9 or higher 8 | 9 | ## Variable files 10 | 11 | Sample variable files can be found in examples folder, should be copied to main play folder. 12 | 13 | - vsphere-vars.yml - holds vsphere specific variables 14 | - common-vars.yml - holds common variables 15 | - aws-vars.yml - hold AWS Route53 DNS information 16 | 17 | ## Provision cluster 18 | 19 | ```bash 20 | # ansible-playbook -i inventory provision-cluster.yml 21 | ``` 22 | 23 | ## Delete cluster 24 | 25 | ```bash 26 | # ansible-playbook -i inventory delete-cluster.yml 27 | ``` 28 | -------------------------------------------------------------------------------- /ansible/ipi-ocp-vmware-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | roles_path = ../roles 11 | nocows = 1 -------------------------------------------------------------------------------- /ansible/ipi-ocp-vmware-play/examples/aws-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | aws_access_key: " " # your AWS_ACCESS_KEY_ID 3 | aws_secret_key: " " # your AWS_SECRET_ACCESS_KEY 4 | AWS_ACCESS_KEY_ID: "{{ aws_access_key }}" 5 | AWS_SECRET_ACCESS_KEY: "{{ aws_secret_key }}" 6 | AWS_REGION: "us-east-1" 7 | 8 | WORKER_VM_SIZE: m5.2xlarge # size of worker VM instances 9 | MASTER_VM_SIZE: m5.2xlarge # sice of master VM instances 10 | WORKER_VOLUME_SIZE: 256 # GB 11 | MASTER_VOLUME_SIZE: 256 # GB -------------------------------------------------------------------------------- /ansible/ipi-ocp-vmware-play/examples/common-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "CLUSTER_NAME": "" # limit to 21 characters on AWS, limit has not been researched on other platforms. 3 | 4 | # Public ssh key. Typically from the VM your creating the cluster on 5 | "SSH_PUBLIC_KEY": '' 6 | 7 | # Set to purple-chesterfield.com or brown-chesterfield.com only. 8 | "BASE_DOMAIN": "purple-chesterfield.com" 9 | 10 | # What OCP version you want to install 11 | "OCP_RELEASE_IMAGE": "quay.io/openshift-release-dev/ocp-release:4.7.0-x86_64" 12 | 13 | # Redhat pull secret. You can get one here https://cloud.redhat.com/openshift/install/aws/installer-provisioned 14 | "PULL_SECRET": '' # be sure to leave the quotes in plac -------------------------------------------------------------------------------- /ansible/ipi-ocp-vmware-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 -------------------------------------------------------------------------------- /ansible/ipi-ocp-vmware-play/examples/vsphere-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Worker details (Defaults) uncomment if you have changes 3 | # WORKER_CPUS: 16 4 | # WORKER_MEMORY: 5 | # WORKER_DISK_SIZE: 300 6 | # WORKER_COUNT: 3 7 | 8 | # MASTER details (Defaults) uncomment if you have changes 9 | # MASTER_CPUS: 10 10 | # MASTER_MEMORY: 32xxx 11 | # MASTER_DISK_SIZE: 200 12 | # MASTER_COUNT: 3 13 | 14 | # vCenter details 15 | API_PUBLIC_IP: "9.x.x.x" 16 | VCENTER_CLUSTERNAME: "" 17 | VCENTER_DATACENTER: "" 18 | VCENTER_DATASOURCE: "" 19 | APPS_PUBLIC_IP: "9.x.x.x" 20 | VCENTER_NETWORK: "" 21 | VCENTER_PASSWORD: "" 22 | VCENTER_ADMIN_USER: "" 23 | VCENTER_SERVER: "" -------------------------------------------------------------------------------- /ansible/nest-user-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | password.hash 3 | -------------------------------------------------------------------------------- /ansible/nest-user-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [all] 2 | rhel1.fyre.ibm.com 3 | sles1.fyre.ibm.com 4 | ub1.fyre.ibm.com 5 | 6 | [all:vars] 7 | ansible_user=root 8 | -------------------------------------------------------------------------------- /ansible/nest-user-play/nest-user-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | gather_facts: true 5 | vars_files: 6 | - "{{ password_file }}" 7 | vars: 8 | - ansible_user: root 9 | roles: 10 | - role: nestuser 11 | -------------------------------------------------------------------------------- /ansible/nest-user-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/nfs-storageclass-openshift-fyre-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | roles_path = ../roles 11 | nocows = 1 -------------------------------------------------------------------------------- /ansible/nfs-storageclass-openshift-fyre-play/galaxy.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | --- 6 | collections: 7 | - name: community.okd 8 | version: 1.0.1 9 | - name: community.general 10 | - name: community.kubernetes 11 | version: 1.1.1 12 | 13 | -------------------------------------------------------------------------------- /ansible/nfs-storageclass-openshift-fyre-play/nfs-storageclass-openshift-fyre-play.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | --- 6 | - name: Install NFS storage using NFS client provisioner on the Fyre master node 7 | hosts: localhost 8 | gather_facts: no 9 | roles: 10 | - role: nfs_client_provisioner_fyre -------------------------------------------------------------------------------- /ansible/ocp-cluster-admin-play/examples/admin-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "aws_access_key_id": "AWS_ACCESS_KEY_ID" 3 | "aws_secret_access_key": "AWS_SECCRET_ACCESS_KEY" 4 | "aws_region": " " # eg. us-east-1 5 | "admin_task": " " # stop|start 6 | "cloud": " " # aws|azure|google 7 | -------------------------------------------------------------------------------- /ansible/ocp-cluster-admin-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/ocp-cluster-admin-play/ocp-cluster-admin-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "{{ admin_task }} an {{ cloud }} cluster" 3 | hosts: bastion 4 | vars_files: 5 | - admin-vars.yml 6 | environment: 7 | AWS_SECRET_ACCESS_KEY: "{{ aws_secret_access_key }}" 8 | AWS_ACCESS_KEY_ID: "{{ aws_access_key_id }}" 9 | roles: 10 | - "{{ cloud }}-cli-install" 11 | - "{{ admin_task }}-{{ cloud }}-cluster" 12 | -------------------------------------------------------------------------------- /ansible/ocp-cluster-admin-play/readme.md: -------------------------------------------------------------------------------- 1 | # Cluster administration play 2 | 3 | ## Admin Tasks 4 | - Stop and AWS Cluster 5 | - Start an AWS Cluster 6 | 7 | ## prereq's 8 | - an available AWS cluster 9 | - cluster tag 10 | - owner tag 11 | - AWS credentials 12 | 13 | ## usage 14 | Jenkins job 15 | https://hyc-ibm-automation-guild-team-jenkins.swg-devops.com/job/cluster-ops/job/ocp-cluster-admin/job/master/build?delay=0sec 16 | 17 | From ocp-cluster-admin-play folder 18 | ``` 19 | cp examples/inventory . 20 | cp examples/admin-vars.yml . 21 | ``` 22 | 23 | edit and update admin-vars.yml 24 | ``` 25 | # ansible-playbook -i inventory ocp-cluster-admin-play.yml 26 | ``` 27 | -------------------------------------------------------------------------------- /ansible/ocp-cluster-admin-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/ocp-cluster-tag-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/ocp-cluster-tag-play/ocp-cluster-tag-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "{{ admin_task }} an {{ cloud }} cluster" 3 | hosts: bastion 4 | gather_facts: no 5 | vars_files: 6 | - tag-vars.yml 7 | 8 | environment: 9 | AWS_SECRET_ACCESS_KEY: "{{ aws_secret_key }}" 10 | AWS_ACCESS_KEY_ID: "{{ aws_access_key }}" 11 | 12 | roles: 13 | - ocp_cluster_tag -------------------------------------------------------------------------------- /ansible/ocp-cluster-tag-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/ocp-pool-claim-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug -------------------------------------------------------------------------------- /ansible/ocp-pool-claim-play/examples/example-team-vars.yml: -------------------------------------------------------------------------------- 1 | # make a copy of this example and add your own values 2 | 3 | "ocs_storage": "false" # true|false - add OCS storage 4 | "ocp_logging": "false" # true|false - add OCP logging 5 | 6 | "post_install": "false" # true|false - call your post install role listed below 7 | "post_install_role": "" # a role containing tasks specific to your team. 8 | -------------------------------------------------------------------------------- /ansible/ocp-pool-claim-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [hive_instance] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/ocp-pool-claim-play/examples/pool-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # any or all of these can be passed on the ansible-playbook command line using -e "var=value" 3 | # ACM/HIVE Cluster Information 4 | "kubeadmin_user": "kubeadmin" 5 | "hive_hub_password": "" # RHACM/Hive Instance aquired from content devops team 6 | "ocp_api_url": "api.hive2.purple-chesterfield.com" # RHACM/Hive api URL 7 | 8 | # pool options 9 | "admin_task": "" # claim|release can be easier to pass and param to playbook, -e "admin_task=claim" 10 | "pool_name": "" # the name of the cluster pool, predefined in RHACM/hive cluster 11 | "pool_namespace": "" # the cluster pool namespace, predefined in RHACM/hive cluster 12 | "claim_name": "" # a unique name of your choice -------------------------------------------------------------------------------- /ansible/ocp-pool-claim-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/osprereq-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | -------------------------------------------------------------------------------- /ansible/osprereq-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [all] 2 | rhel1.fyre.ibm.com 3 | sles1.fyre.ibm.com 4 | ub1.fyre.ibm.com 5 | 6 | [all:vars] 7 | ansible_user=root 8 | -------------------------------------------------------------------------------- /ansible/osprereq-play/osprereq-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | gather_facts: true 5 | vars: 6 | - ansible_user: root 7 | roles: 8 | - role: osprereqs 9 | -------------------------------------------------------------------------------- /ansible/osprereq-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/prereq-play/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/prereq-play/prereq-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install prereqs 3 | hosts: bastion 4 | roles: 5 | - oc_client_install 6 | tasks: 7 | - name: install required pyhon libaries 8 | pip: 9 | name: 10 | - boto 11 | - boto3 12 | - botocore 13 | - pyVim 14 | - pyvmomi 15 | - six 16 | - google-auth 17 | - google-api-python-client 18 | - google-auth-httplib2 19 | executable: pip3 20 | ignore_errors: yes 21 | -------------------------------------------------------------------------------- /ansible/prereq-play/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | collections: 3 | - name: community.general 4 | source: https://galaxy.ansible.com 5 | - name: community.aws 6 | source: https://galaxy.ansible.com 7 | - name: amazon.aws 8 | source: https://galaxy.ansible.com 9 | - name: google.cloud 10 | source: https://galaxy.ansible.com 11 | -------------------------------------------------------------------------------- /ansible/prereq-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/provision-ocp-cluster-play/readme.md: -------------------------------------------------------------------------------- 1 | # This playbook moved 2 | 3 | see [hive-ocp-cluster-play](https://github.com/IBM/community-automation/ansible/hive-ocp-cluster-play/readme.md) -------------------------------------------------------------------------------- /ansible/provision-ocp-vmware-ceph-play/Jenkinsfile: -------------------------------------------------------------------------------- 1 | # jenkins file for provisioning 2 | -------------------------------------------------------------------------------- /ansible/provision-ocp-vmware-ceph-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | roles_path = ../roles 11 | nocows = 1 -------------------------------------------------------------------------------- /ansible/provision-ocp-vmware-ceph-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [hive_instance] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/provision-ocp-vmware-ceph-play/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | collections: 3 | - name: community.general 4 | source: https://galaxy.ansible.com 5 | - name: community.aws 6 | source: https://galaxy.ansible.com 7 | - name: amazon.aws 8 | source: https://galaxy.ansible.com 9 | - name: google.cloud 10 | source: https://galaxy.ansible.com 11 | - name: community.vmware 12 | source: https://galaxy.ansible.com 13 | -------------------------------------------------------------------------------- /ansible/provision-pool-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | roles_path = ../roles 11 | nocows = 1 -------------------------------------------------------------------------------- /ansible/provision-pool-play/examples/aws-pool-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | aws_access_key: " " # your AWS_ACCESS_KEY_ID 3 | aws_secret_key: " " # your AWS_SECRET_ACCESS_KEY 4 | AWS_ACCESS_KEY_ID: "{{ aws_access_key }}" 5 | AWS_SECRET_ACCESS_KEY: "{{ aws_secret_key}}" 6 | AWS_REGION: "us-east-1" 7 | WORKER_COUNT: 3 8 | WORKER_VM_SIZE: m5.2xlarge # size of worker VM instances 9 | MASTER_VM_SIZE: m5.2xlarge # sice of master VM instances 10 | WORKER_VOLUME_SIZE: 256 # GB 11 | MASTER_VOLUME_SIZE: 256 # GB 12 | -------------------------------------------------------------------------------- /ansible/provision-pool-play/examples/azure-pool-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # will contain azure creds and/or keys 3 | -------------------------------------------------------------------------------- /ansible/provision-pool-play/examples/google-pool-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "google_project": "YOUR_GOOGLE_PROJECT" # example: oceanic-guard-191815 3 | "google_zone": "YOUR_GOOGLE_ZONE" 4 | "google_auth_kind": "serviceaccount" 5 | "google_instance_filter": "filter_specific_instances" 6 | "google_account_file": "YOUR_ACCOUNT_JSON_FILE_REFERENCE" # example: "/workspace/creds.gcp.json" 7 | -------------------------------------------------------------------------------- /ansible/provision-pool-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/provision-pool-play/examples/roks-pool-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # PLACE HOLDER ONLY 3 | "roks_api_token": "" 4 | "roks_ocp_api_url": "https://c104-e.us-east.containers.cloud.ibm.com:30431 5 | -------------------------------------------------------------------------------- /ansible/provision-pool-play/provision-pool-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Provision Hive Pool 3 | hosts: all 4 | vars_files: 5 | - common-pool-vars.yml 6 | - "{{ cloud }}-pool-vars.yml" 7 | 8 | environment: 9 | AWS_ACCESS_KEY_ID: "{{ aws_access_key }}" 10 | AWS_SECRET_ACCESS_KEY: "{{ aws_secret_key }}" 11 | PATH: ".:{{ ansible_env.PATH }}" 12 | 13 | roles: 14 | # used to login into RHACM/HIVE cluster 15 | - role: ocp_login 16 | - role: provision_pool 17 | -------------------------------------------------------------------------------- /ansible/request-crc-fyre-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | -------------------------------------------------------------------------------- /ansible/request-crc-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [fyreApi] 2 | localhost ansible_connection=local crc_username=kevin fyreuser=myfyreuser fyreapikey=myfyreapikey vnc=True site=rtp 3 | -------------------------------------------------------------------------------- /ansible/request-crc-fyre-play/request-crc-fyre-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: fyreApi 3 | roles: 4 | - role: crc_fyrevm 5 | 6 | - hosts: crc 7 | vars: 8 | - ansible_user: root 9 | roles: 10 | - role: crc_user 11 | 12 | - hosts: crc 13 | roles: 14 | - { role: vnc, when: vnc|bool } 15 | - role: crc_install 16 | - role: crc_start 17 | - role: crc_oc_cli 18 | - role: crc_enable_telemetry 19 | 20 | -------------------------------------------------------------------------------- /ansible/request-crc-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-instana-host-fyre-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | /*.json 3 | -------------------------------------------------------------------------------- /ansible/request-instana-host-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [fyreApi] 2 | localhost ansible_connection=local fyreuser=myfyreuser fyreapikey=myfyreapikey site=rtp 3 | 4 | [instana] 5 | download_key=yourdkey agent_key=yourakey sales_key=yourskey instana_tenet=yourTenet instana_unit=yourUnit 6 | -------------------------------------------------------------------------------- /ansible/request-instana-host-fyre-play/request-instana-host-fyre-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: fyreApi 3 | roles: 4 | - role: instana_fyrevm 5 | 6 | - hosts: instana 7 | vars: 8 | - ansible_user: root 9 | roles: 10 | - role: fix_fyre_hosts_file 11 | - role: timezone 12 | - role: docker_ce_install 13 | - role: instana_instance_on_prem 14 | -------------------------------------------------------------------------------- /ansible/request-instana-host-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocp-ceph-fyre-play/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/request-ocp-ceph-fyre-play/Jenkinsfile -------------------------------------------------------------------------------- /ansible/request-ocp-ceph-fyre-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | -------------------------------------------------------------------------------- /ansible/request-ocp-ceph-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [fyreApi] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 fyreuser=myfyreuser fyreapikey=myfyreapikey fyre_ocptype=ocpplus 3 | -------------------------------------------------------------------------------- /ansible/request-ocp-ceph-fyre-play/request-ocp-ceph.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install OCP 4.x onto OCP+Beta Fyre cluster 3 | hosts: fyreApi 4 | roles: 5 | - role: request_ocp_fyre 6 | 7 | - name: Install csi-cephfs onto OCP+Beta Fyre cluster 8 | hosts: ocpClusters 9 | gather_facts: true 10 | roles: 11 | - role: git_install_fyre 12 | - role: csi_cephfs_fyre 13 | -------------------------------------------------------------------------------- /ansible/request-ocp-ceph-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocp-cs-install-fyre-play/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/request-ocp-cs-install-fyre-play/Jenkinsfile -------------------------------------------------------------------------------- /ansible/request-ocp-cs-install-fyre-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | -------------------------------------------------------------------------------- /ansible/request-ocp-cs-install-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [fyreApi] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 fyreuser=myfyreuser fyreapikey=myfyreapikey fyre_ocptype=ocpplus 3 | -------------------------------------------------------------------------------- /ansible/request-ocp-cs-install-fyre-play/request-ocp-cs-install.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install OCP+Beta Fyre cluster 3 | hosts: fyreApi 4 | roles: 5 | - role: request_ocp_fyre 6 | 7 | - name: Install csi-cephfs and then common services onto OCP+Beta Fyre cluster 8 | hosts: ocpClusters 9 | gather_facts: false 10 | roles: 11 | - role: git_install_fyre 12 | - role: csi_cephfs_fyre 13 | - role: common_services 14 | -------------------------------------------------------------------------------- /ansible/request-ocp-cs-install-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocp-fyre-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory -------------------------------------------------------------------------------- /ansible/request-ocp-fyre-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | stdout_callback = debug 10 | roles_path = ../roles 11 | nocows = 1 -------------------------------------------------------------------------------- /ansible/request-ocp-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [fyreApi] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 fyreuser=myfyreuser fyreapikey=myfyreapikey -------------------------------------------------------------------------------- /ansible/request-ocp-fyre-play/examples/ocp_vars_example.yml: -------------------------------------------------------------------------------- 1 | ocpPlatform: "x" 2 | fyre_site: "svl" 3 | fyre_master_quantity: 3 4 | fyre_master_cpu: 8 5 | fyre_master_memory: 16 6 | fyre_worker_quantity: 3 7 | fyre_worker_cpu: 16 8 | fyre_worker_memory: 32 9 | fyre_worker_additional_disk: 300 10 | -------------------------------------------------------------------------------- /ansible/request-ocp-fyre-play/request-ocp-fyre-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: fyreApi 3 | roles: 4 | - role: request_ocp_fyre 5 | 6 | - hosts: ocpClusters 7 | gather_facts: false 8 | tasks: 9 | - name: Check machine alive 10 | raw: echo "hello im running on the inf node" 11 | changed_when: false 12 | -------------------------------------------------------------------------------- /ansible/request-ocp-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocp-roks-play/examples/remove-roks-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ansible_python_interpreter: /usr/bin/python3 3 | apikey: 1234567890123456 # Set the api key here to use for IBM Cloud authentication 4 | clusterName: newclustername # Provide a unique cluster name 5 | dataCenter: wdc04 # Provide the data center (zone) to deploy to 6 | hardware: shared # shared for virtual workers 7 | resourceGroup: default # Provide the Resouce Group name where the cluster is deployed -- default is "default" resource group 8 | -------------------------------------------------------------------------------- /ansible/request-ocp-roks-play/remove-roks.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: localhost 3 | vars_files: 4 | - remove-roks-vars.yml 5 | collections: 6 | - ibm.cloudcollection 7 | 8 | tasks: 9 | - name: delete roks cluster 10 | import_role: 11 | name: remove_ocp_roks 12 | -------------------------------------------------------------------------------- /ansible/request-ocp-roks-play/request-roks.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: localhost 3 | vars_files: 4 | - request-roks-vars.yml 5 | collections: 6 | - ibm.cloudcollection 7 | 8 | tasks: 9 | - name: create roks cluster 10 | import_role: 11 | name: request_ocp_roks 12 | -------------------------------------------------------------------------------- /ansible/request-ocp-roks-play/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | collections: 3 | - name: ibm.cloudcollection 4 | version: '1.28.0' 5 | source: https://galaxy.ansible.com -------------------------------------------------------------------------------- /ansible/request-ocp-roks-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-fyre-play/examples/ocp_logging_fyre_vars.yml: -------------------------------------------------------------------------------- 1 | rook_cephfs_release: v1.3.8 #Rook-ceph release from https://github.com/rook/rook/releases. Default of v1.3.8 should work for most. 2 | rendered_sc: "csi-cephfs" #Storageclass to use for ocp logging PVCs. If left empty ("") will use the designated default storageclass. 3 | -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-fyre-play/request-ocp4-logging-fyre.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install OCP logging onto a fyre OCP+Beta 4.x cluster 3 | hosts: bastion 4 | roles: 5 | - request_ocs_local_storage 6 | - request_ocp4_logging 7 | -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-play/examples/inventory_remote_template: -------------------------------------------------------------------------------- 1 | [bastion] 2 | @@@HOST@@@ ansible_connection=ssh ansible_ssh_user=@@@USERNAME@@@ ansible_ssh_private_key_file=@@@SSHKEY@@@ ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ConnectTimeout=30' ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-play/examples/ocp_logging_vars.yml: -------------------------------------------------------------------------------- 1 | rendered_sc: "" #Storageclass to use for ocp logging PVCs. If left empty will use the designated default storageclass. 2 | -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-play/examples/ocp_logging_vars_template.yml: -------------------------------------------------------------------------------- 1 | rendered_sc: "@@@SCLASS@@@" #Storageclass to use for ocp logging PVCs. If left empty will use the designated default storageclass. 2 | -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-play/request-ocp4-logging.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install OCP logging onto a OCP 4.x cluster 3 | hosts: bastion 4 | roles: 5 | - request_ocp4_logging 6 | -------------------------------------------------------------------------------- /ansible/request-ocp4-logging-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocpplus-cluster-transfer-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local 3 | -------------------------------------------------------------------------------- /ansible/request-ocpplus-cluster-transfer-fyre-play/examples/ocp_transfer_vars.yml: -------------------------------------------------------------------------------- 1 | transfer_to_email: #IBM Intranet email of person transfering fyre OCP cluster to. 2 | cluster_name: #Name of OCP+Beta cluster to transfer 3 | fyre_user: #Fyre API User name 4 | fyre_api_key: #FYre API key 5 | -------------------------------------------------------------------------------- /ansible/request-ocpplus-cluster-transfer-fyre-play/request-ocpplus-transfer.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Transfer fyre OCP Plus Beta cluster to new user 3 | hosts: bastion 4 | roles: 5 | - request_ocpplus_cluster_transfer_fyre 6 | -------------------------------------------------------------------------------- /ansible/request-ocpplus-cluster-transfer-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocs-fyre-play/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/request-ocs-fyre-play/Jenkinsfile -------------------------------------------------------------------------------- /ansible/request-ocs-fyre-play/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | -------------------------------------------------------------------------------- /ansible/request-ocs-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ConnectTimeout=30' 3 | -------------------------------------------------------------------------------- /ansible/request-ocs-fyre-play/examples/inventory_remote_inf_node: -------------------------------------------------------------------------------- 1 | [bastion] 2 | fyre.inf.node.9dot.ip ansible_connection=ssh ansible_ssh_user=root ansible_ssh_pass="fyre.root.pw" ansible_ssh_common_args='-o StrictHostKeyChecking=no' 3 | -------------------------------------------------------------------------------- /ansible/request-ocs-fyre-play/examples/ocs_install_vars.yml: -------------------------------------------------------------------------------- 1 | setdefault: true #Set the default storageclass to the value in defatult_sc 2 | default_sc: ocs-storagecluster-cephfs # OCS storageclass to set as the default. 3 | 4 | fyre_ui_build: true # false when you use the fyre API to create the cluster 5 | ocp_client_version: "4.9.15" 6 | client_os: "linux" # mac|windows|linux 7 | 8 | login_retries: 10 9 | kubeadmin_password: "" # your kubeadmin password 10 | ocp_api_url: "api.myocp.cp.fyre.ibm.com" # note: do not add the https:// 11 | ocs_type: "ocs" # ocs|odf (default is ocs) 12 | ocs_channel_override: "" # eg 4.9 for OCP 4.10 update if you need to use a different channel stable- will be pre-pended 13 | -------------------------------------------------------------------------------- /ansible/request-ocs-fyre-play/request-ocs-fyre.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install ocs on fyre OCP+ env 3 | hosts: bastion 4 | vars_files: 5 | - ocs_install_vars.yml 6 | gather_facts: yes 7 | roles: 8 | - request_ocs_local_storage 9 | -------------------------------------------------------------------------------- /ansible/request-ocs-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocs-local-storage-vmware/Jenkinsfile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/request-ocs-local-storage-vmware/Jenkinsfile -------------------------------------------------------------------------------- /ansible/request-ocs-local-storage-vmware/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = inventory 3 | command_warnings = False 4 | filter_plugins = filter_plugins 5 | host_key_checking = False 6 | deprecation_warnings = False 7 | retry_files_enabled = False 8 | pipelining = True 9 | -------------------------------------------------------------------------------- /ansible/request-ocs-local-storage-vmware/examples/inventory_local: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/request-ocs-local-storage-vmware/examples/inventory_remote_bastion_host: -------------------------------------------------------------------------------- 1 | [bastion] 2 | fyre.inf.node.9dot.ip ansible_connection=ssh ansible_ssh_user=root ansible_ssh_pass="fyre.root.pw" ansible_ssh_common_args='-o StrictHostKeyChecking=no' 3 | -------------------------------------------------------------------------------- /ansible/request-ocs-local-storage-vmware/examples/inventory_remote_keyfile_template: -------------------------------------------------------------------------------- 1 | [bastion] 2 | @@@HOST@@@ ansible_connection=ssh ansible_ssh_user=@@@USERNAME@@@ ansible_ssh_private_key_file=@@@SSHKEY@@@ ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ConnectTimeout=30' ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/request-ocs-local-storage-vmware/examples/ocs_install_vars.yml: -------------------------------------------------------------------------------- 1 | setdefault: true #Set the default storageclass to the value in defatult_sc 2 | default_sc: ocs-storagecluster-cephfs # OCS storageclass to set as the default. 3 | 4 | # Change to the additional disk definition for your cluster type. 5 | # Only used for OCP 4.5 or 4.4 clusters when OCS 4.5 is installed. 6 | # Not used for OCP 4.6 clusters or newer when OCS 4.6 is installed. 7 | ocs_device: /dev/sdb # First additional disk drive definition. 8 | -------------------------------------------------------------------------------- /ansible/request-ocs-local-storage-vmware/request-ocs-local-storage-vmware.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install OCS on VMware clusters using local storageclass 3 | hosts: bastion 4 | gather_facts: no 5 | roles: 6 | - request_ocs_local_storage 7 | -------------------------------------------------------------------------------- /ansible/request-ocs-local-storage-vmware/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-ocs-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [bastion] 2 | localhost ansible_connection=local 3 | -------------------------------------------------------------------------------- /ansible/request-ocs-play/examples/inventory_remote_template: -------------------------------------------------------------------------------- 1 | [bastion] 2 | @@@HOST@@@ ansible_connection=ssh ansible_ssh_user=@@@USERNAME@@@ ansible_ssh_private_key_file=@@@SSHKEY@@@ ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ConnectTimeout=30' ansible_python_interpreter=/usr/bin/python3 3 | -------------------------------------------------------------------------------- /ansible/request-ocs-play/examples/ocs_vars.yml: -------------------------------------------------------------------------------- 1 | ocs_bastion_setup_dir: ~/setup-files/ocs-setup # Default dir to copy scripts into 2 | setdefault: true # Set ocs cephfs storageclass as default 3 | -------------------------------------------------------------------------------- /ansible/request-ocs-play/examples/ocs_vars_template.yml: -------------------------------------------------------------------------------- 1 | setdefault: "@@@SCDEDAULT@@@" # Set ocs cephfs storageclass as default 2 | -------------------------------------------------------------------------------- /ansible/request-ocs-play/request-ocs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install ocs on AWS or VMware 3 | hosts: bastion 4 | roles: 5 | - request_ocs 6 | -------------------------------------------------------------------------------- /ansible/request-ocs-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-rhel-db2-fyre-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | -------------------------------------------------------------------------------- /ansible/request-rhel-db2-fyre-play/db2.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | vars: 5 | - ansible_user: root 6 | roles: 7 | - role: db2_fyre 8 | - { role: vnc, when: vnc|bool } 9 | 10 | -------------------------------------------------------------------------------- /ansible/request-rhel-db2-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [fyreApi] 2 | localhost ansible_connection=local fyreuser=myfyreuser fyreapikey=myfyreapikey vnc=True site=rtp 3 | -------------------------------------------------------------------------------- /ansible/request-rhel-db2-fyre-play/request-rhel-db2-fyre-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: fyreApi 3 | roles: 4 | - role: db2_fyrevm 5 | 6 | - hosts: db2 7 | vars: 8 | - ansible_user: root 9 | roles: 10 | - role: timezone 11 | 12 | - hosts: db2 13 | roles: 14 | - role: db2_fyre 15 | - { role: vnc, when: vnc|bool } 16 | 17 | -------------------------------------------------------------------------------- /ansible/request-rhel-db2-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [fyreApi] 2 | localhost ansible_connection=local jmeterUser=nest fyreuser=myfyreuser fyreapikey=myfyreapikey vnc=True site=rtp 3 | -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/forceReplace.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ansible-playbook -i somehost.fqdn, jmeter-play.yml \ 3 | -e jmeterUser=nest \ 4 | -e noLog=false \ 5 | -e javaArchive='https://github.com/ibmruntimes/semeru17-binaries/releases/download/jdk-17.0.2+8_openj9-0.30.0/ibm-semeru-open-jdk_x64_linux_17.0.2_8_openj9-0.30.0.tar.gz' \ 6 | -e jmeterArchive='http://somehost.fqdn/binaries/jmeter/apache-jmeter-5.4.3.zip' \ 7 | -e jmeterManagerPluginUrl='https://jmeter-plugins.org/get/' \ 8 | -e forceReplace=true \ 9 | -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/jmeter-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | vars: 5 | - ansible_user: root 6 | roles: 7 | - role: podman 8 | 9 | - hosts: all 10 | vars: 11 | - ansible_user: "{{ jmeterUser }}" 12 | - javaDir: "/home/{{ jmeterUser }}/java" 13 | - jmeterDir: "/home/{{ jmeterUser }}/jmeter" 14 | roles: 15 | - role: jmeter_java 16 | - role: jmeter 17 | - { role: vnc, when: vnc|bool } 18 | -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/request-rhel-jmeter-fyre-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: fyreApi 3 | roles: 4 | - role: jmeter_fyrevm 5 | 6 | - hosts: jmeter 7 | vars_files: 8 | - "{{ password_file }}" 9 | vars: 10 | - ansible_user: root 11 | - crc_username: "{{ jmeterUser }}" 12 | roles: 13 | - role: jmeter_prereqs 14 | - role: nestuser 15 | - role: timezone 16 | - role: podman 17 | 18 | - hosts: jmeter 19 | vars: 20 | - javaDir: "/home/{{ jmeterUser }}/java" 21 | - jmeterDir: "/home/{{ jmeterUser }}/jmeter" 22 | roles: 23 | - role: jmeter_java 24 | - role: jmeter 25 | - { role: vnc, when: vnc|bool } 26 | 27 | -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/scripts/isRunning.sh: -------------------------------------------------------------------------------- 1 | pids=$(ps -fe|grep jmeter|grep -v grep|awk '{print $2}') 2 | if [ "$1" == "gui" ] ; then 3 | if [ -z "$pids" ] ; then 4 | printf "Jmeter is not running\n" 5 | else 6 | printf "Jmeter is running. Process(s)\n$pids\n" 7 | fi 8 | else 9 | printf "$pids\n" 10 | fi 11 | -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/scripts/reportJmeter.sh: -------------------------------------------------------------------------------- 1 | # presume 1 report jmeter.jtl input file 2 | jreportCount=$(ls -1 *.jmeter.jtl|wc -l) 3 | [ "$jreportCount" != "1" ] && echo "ERROR: multiple input .jmeter.jtl files" && exit 2||true 4 | IFS='.' read inputjreport jreportext <<< $(ls -1 *.jmeter.jtl) 5 | [ -z $inputjreport ] && exit 1 6 | ### run as a sep process on a large host 7 | ### jmeter -g $inputjmx.jmeter.file -o $inputjmx.jmeter.report 8 | jmeter -g $inputjreport.$jreportext -o $inputjreport.jmeter.report 9 | -------------------------------------------------------------------------------- /ansible/request-rhel-jmeter-fyre-play/scripts/stopJmeter.sh: -------------------------------------------------------------------------------- 1 | #./isRunning.sh | xargs kill 2>/dev/null 2 | shutdown.sh 4445 3 | #shutdown.sh 4446 4 | #shutdown.sh 4447 5 | -------------------------------------------------------------------------------- /ansible/roles/authorized_keys/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Add authorized keys 4 | when: authorized_keys_url is defined 5 | block: 6 | - name: authorized_keys root 7 | authorized_key: 8 | user: root 9 | state: present 10 | manage_dir: yes 11 | key: "{{ authorized_keys_url }}" 12 | become: yes 13 | - name: authorized_keys user 14 | authorized_key: 15 | user: "{{ user_username }}" 16 | state: present 17 | manage_dir: yes 18 | key: "{{ authorized_keys_url }}" 19 | -------------------------------------------------------------------------------- /ansible/roles/aws_cli_install/tasks/aws_cli_install.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: check for oc client 4 | command: aws 5 | environment: 6 | PATH: ".:{{ ansible_env.PATH }}" 7 | register: aws_installed 8 | ignore_errors: yes 9 | 10 | - name: install AWS CLI 11 | when: '"usage" not in aws_installed.stderr' 12 | block: 13 | - name: download 14 | get_url: 15 | url: "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" 16 | dest: "." 17 | mode: 0755 18 | validate_certs: false 19 | 20 | - name: Unpack AWS CLI 21 | shell: | 22 | unzip -o "awscli-exe-linux-x86_64.zip" >/dev/null 23 | sudo ./aws/install 24 | -------------------------------------------------------------------------------- /ansible/roles/aws_cli_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: aws_cli_install.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/aws_route53/defaults/main.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/roles/aws_route53/defaults/main.yml -------------------------------------------------------------------------------- /ansible/roles/aws_route53/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: aws_route53.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/azure_cli_install/tasks/install-azure-client.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/roles/azure_cli_install/tasks/install-azure-client.yml -------------------------------------------------------------------------------- /ansible/roles/azure_cli_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: install-azure-client.yml -------------------------------------------------------------------------------- /ansible/roles/clean_vmware_kubevols/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "CLUSTER_NAME": "" 3 | "VCENTER_SERVER": "" 4 | "VCENTER_ADMIN_USER": "" 5 | "VCENTER_PASSWORD": '' 6 | "VCENTER_DATASOURCE": "" 7 | "VCENTER_DATACENTER": "" 8 | # GOVC info 9 | "GOVC_VERS": "0.21.0" 10 | "GOVC_DWLD": "govc_linux_amd64.gz" 11 | "GOVC_PRQ": "vmware" 12 | -------------------------------------------------------------------------------- /ansible/roles/clean_vmware_kubevols/readme.md: -------------------------------------------------------------------------------- 1 | clean_vmware_kubevols 2 | ========= 3 | 4 | After vmware cluster is uninstalled call this role to cleam up any orphaned kubevol files. 5 | 6 | ------------ 7 | 8 | Requirements 9 | ------------ 10 | 11 | - Need to be on an linux box or docker image that has run `community-automation/scripts/common/install-prereqs.sh`. 12 | 13 | Example Playbook 14 | ---------------- 15 | 16 | - name: clean orphaned kubvols on vCenter 17 | hosts: bastion 18 | roles: 19 | - clean_vmware_kubevols 20 | 21 | License 22 | ------- 23 | 24 | See [LICENSE](https://github.com/IBM/community-automation/blob/master/LICENSE) 25 | -------------------------------------------------------------------------------- /ansible/roles/clean_vmware_kubevols/tasks/clean_vmware_kubevols.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Download from URL oav and deploy to vCenter 3 | - name: Populate clean kubevols template 4 | template: 5 | src: "ocp_clean_kubevols.sh.j2" 6 | dest: "ocp_clean_kubevols.sh" 7 | backup: false 8 | mode: 0755 9 | 10 | - name: Clean kubevols for cluster {{ CLUSTER_NAME }} 11 | shell: bash -lc "ocp_clean_kubevols.sh" 12 | register: kubevols 13 | 14 | - name: Viewing kubevols cleanup for cluster {{ CLUSTER_NAME }} 15 | debug: 16 | msg: "{{ kubevols.stdout_lines }}" 17 | -------------------------------------------------------------------------------- /ansible/roles/clean_vmware_kubevols/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: clean_vmware_kubevols.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/cleanup_dhcp_leases/tasks/cleanup-leases.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Create bin dir 3 | file: 4 | path: /root/bin 5 | state: directory 6 | 7 | - name: copy removal script to dhcp server 8 | copy: 9 | src: files/remove-lease.sh 10 | dest: /root/bin 11 | mode: 0755 12 | 13 | - name: cleanup DHCP leases 14 | shell: ./remove-lease.sh "{{ CLUSTER_NAME }}" 15 | args: 16 | chdir: /root/bin 17 | register: command_results 18 | -------------------------------------------------------------------------------- /ansible/roles/cleanup_dhcp_leases/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: cleanup-leases.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/clone_repo/defaults/main.yml: -------------------------------------------------------------------------------- 1 | "ssh_private_key_file": "~/.ssh/id_rsa" 2 | "local_repo_location": "/tmp" 3 | "repo_branch": "master" 4 | "temp_branch": "test_branch" 5 | -------------------------------------------------------------------------------- /ansible/roles/clone_repo/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: clone-repo.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/collect_ips/tasks/collect_ips.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Collect cluster IP's 3 | shell: oc --no-headers=true get nodes -o wide | awk '{ print $6 }' >> "{{ cluster_name }}"_details.log 4 | environment: 5 | KUBECONFIG: "{{ kubeconfig_location }}/config" 6 | args: 7 | executable: /bin/bash 8 | register: ip_results 9 | -------------------------------------------------------------------------------- /ansible/roles/collect_ips/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: collect_ips.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/common_services/templates/catalog-source.yaml.j2: -------------------------------------------------------------------------------- 1 | apiVersion: operators.coreos.com/v1alpha1 2 | kind: CatalogSource 3 | metadata: 4 | name: opencloud-operators 5 | namespace: openshift-marketplace 6 | spec: 7 | displayName: IBMCS Operators 8 | publisher: IBM 9 | sourceType: grpc 10 | image: {{ cs_operator_catalog_image }} 11 | updateStrategy: 12 | registryPoll: 13 | interval: 45m 14 | -------------------------------------------------------------------------------- /ansible/roles/common_services/templates/cs-group.yaml.j2: -------------------------------------------------------------------------------- 1 | apiVersion: operators.coreos.com/v1 2 | kind: OperatorGroup 3 | metadata: 4 | name: {{ cs_operator_project_name }} 5 | namespace: {{ cs_operator_project_name }} 6 | spec: 7 | targetNamespaces: 8 | - {{ cs_operator_project_name }} 9 | 10 | -------------------------------------------------------------------------------- /ansible/roles/common_services/templates/cs-request.yaml.j2: -------------------------------------------------------------------------------- 1 | apiVersion: operator.ibm.com/v1alpha1 2 | kind: OperandRequest 3 | metadata: 4 | name: common-service 5 | namespace: ibm-common-services 6 | spec: 7 | requests: 8 | - operands: 9 | {% for o in cs_operand_list %} 10 | - name: {{ o }} 11 | {% endfor %} 12 | registry: common-service 13 | 14 | -------------------------------------------------------------------------------- /ansible/roles/common_services/templates/cs-sub.yaml.j2: -------------------------------------------------------------------------------- 1 | apiVersion: operators.coreos.com/v1alpha1 2 | kind: Subscription 3 | metadata: 4 | name: {{ cs_operator_name }} 5 | namespace: {{ cs_operator_project_name }} 6 | spec: 7 | channel: {{ cs_subscription_channel }} 8 | installPlanApproval: {{ cs_subscription_strategy }} 9 | name: {{ cs_operator_name }} 10 | source: opencloud-operators 11 | sourceNamespace: openshift-marketplace 12 | 13 | -------------------------------------------------------------------------------- /ansible/roles/common_services_cat_src_inst/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | catalog_source_version: latest # This is the version of the catalog source you want from quay.io. Options are `latest` or 'dev-latest`.' 3 | -------------------------------------------------------------------------------- /ansible/roles/common_services_cat_src_inst/tasks/common_services_cat_src.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks ocs install 3 | - name: Install SourceCatalog for the Common Services operator {{ catalog_source_version }} 4 | shell: | 5 | cat << EOF | oc apply -f - 6 | apiVersion: operators.coreos.com/v1alpha1 7 | kind: CatalogSource 8 | metadata: 9 | name: opencloud-operators 10 | namespace: openshift-marketplace 11 | spec: 12 | displayName: IBMCS Operators 13 | publisher: IBM 14 | sourceType: grpc 15 | image: quay.io/opencloudio/ibm-common-service-catalog:{{ catalog_source_version }} 16 | updateStrategy: 17 | registryPoll: 18 | interval: 45m 19 | EOF 20 | -------------------------------------------------------------------------------- /ansible/roles/common_services_cat_src_inst/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | import_tasks: common_services_cat_src.yml 3 | run_once: true 4 | -------------------------------------------------------------------------------- /ansible/roles/content_devops_post_install/readme.md: -------------------------------------------------------------------------------- 1 | # Post installation tasks 2 | 3 | ## expects the following variables 4 | 5 | details about the variables can be found in the ../../ocp-pool-claim-play/examples/content_devops_post_install_vars.yml 6 | -------------------------------------------------------------------------------- /ansible/roles/content_devops_post_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: post-install.yml -------------------------------------------------------------------------------- /ansible/roles/crc_enable_telemetry/defaults/main.yml: -------------------------------------------------------------------------------- 1 | crcEnableTelemetryFlag: False 2 | -------------------------------------------------------------------------------- /ansible/roles/crc_enable_telemetry/files/oc_enable_monitoring.sh: -------------------------------------------------------------------------------- 1 | # https://code-ready.github.io/crc/#starting-monitoring-alerting-telemetry_gsg 2 | #$1 is the numeric index 3 | if [ ! -z $1 ] ; then 4 | s1="oc patch clusterversion/version --type='json' -p '[{\"op\":\"remove\", \"path\":\"/spec/overrides/monidx\"}]' -oyaml" 5 | eval ${s1/monidx/$1} 6 | else 7 | echo "Require's 1 argument" 8 | fi 9 | -------------------------------------------------------------------------------- /ansible/roles/crc_enable_telemetry/files/oc_get_clusterversion.sh: -------------------------------------------------------------------------------- 1 | # https://code-ready.github.io/crc/#starting-monitoring-alerting-telemetry_gsg 2 | # using the grep 1 line should be returned , and cut only the index 3 | oc get clusterversion version -ojsonpath='{range .spec.overrides[*]}{.name}{"\n"}{end}' | nl -v 0|grep cluster-monitoring-operator|tr -d '[[:space:]]'|cut -c1 4 | -------------------------------------------------------------------------------- /ansible/roles/crc_enable_telemetry/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ role_main_file }}" 3 | with_first_found: 4 | - files: 5 | - crc_enable_telemetry.yml 6 | loop_control: 7 | loop_var: role_main_file 8 | -------------------------------------------------------------------------------- /ansible/roles/crc_fyrevm/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: crc_fyrevm.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/crc_install/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | crc_download_src: "http://svt-auto01.fyre.ibm.com/opt/crc-linux-amd64.tar.xz" 3 | #crc_download_src: "http://9.46.68.100/crc-linux-amd64.tar.xz" 4 | -------------------------------------------------------------------------------- /ansible/roles/crc_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ role_main_file }}" 3 | with_first_found: 4 | - files: 5 | - "{{ ansible_os_family }}.yml" 6 | - crc_install.yml 7 | loop_control: 8 | loop_var: role_main_file -------------------------------------------------------------------------------- /ansible/roles/crc_oc_cli/defaults/main.yml: -------------------------------------------------------------------------------- 1 | oc_cluster: 'https://api.crc.testing:6443' -------------------------------------------------------------------------------- /ansible/roles/crc_oc_cli/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ role_main_file }}" 3 | with_first_found: 4 | - files: 5 | - crc_oc_cli.yml 6 | loop_control: 7 | loop_var: role_main_file 8 | -------------------------------------------------------------------------------- /ansible/roles/crc_start/defaults/main.yml: -------------------------------------------------------------------------------- 1 | crc_pull_password: 'Just4T3st!ng' 2 | -------------------------------------------------------------------------------- /ansible/roles/crc_start/files/crc_pull_pass: -------------------------------------------------------------------------------- 1 | Just4T3st!ng 2 | -------------------------------------------------------------------------------- /ansible/roles/crc_start/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ role_main_file }}" 3 | with_first_found: 4 | - files: 5 | - "crc_start_{{ ansible_os_family }}.yml" 6 | - crc_start.yml 7 | loop_control: 8 | loop_var: role_main_file -------------------------------------------------------------------------------- /ansible/roles/crc_user/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ role_main_file }}" 3 | with_first_found: 4 | - files: 5 | - crc_user.yml 6 | loop_control: 7 | loop_var: role_main_file 8 | -------------------------------------------------------------------------------- /ansible/roles/csi_cephfs_fyre/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | cephfs_bastion_setup_dir: ~/setup-files/ceph-setup 3 | rook_cephfs_release: v1.8.6 #Rook-ceph release from https://github.com/rook/rook/releases 4 | device_name: vdb 5 | default_sc: csi-cephfs 6 | registry: 7 | registry_user: 8 | registry_pwd: 9 | -------------------------------------------------------------------------------- /ansible/roles/csi_cephfs_fyre/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: ocp_login 4 | -------------------------------------------------------------------------------- /ansible/roles/csi_cephfs_fyre/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: csi_cephfs_fyre.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/csi_cephfs_vmware/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | rook_cephfs_release: v1.5.9 #Rook-ceph release from https://github.com/rook/rook/releases 3 | device_name: sdb 4 | default_sc: file-storage 5 | -------------------------------------------------------------------------------- /ansible/roles/csi_cephfs_vmware/files/vsphere-block-storage.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: storage.k8s.io/v1 2 | kind: StorageClass 3 | metadata: 4 | name: vsphere-block-storage 5 | parameters: 6 | diskformat: thin 7 | provisioner: kubernetes.io/vsphere-volume 8 | reclaimPolicy: Delete 9 | volumeBindingMode: Immediate 10 | -------------------------------------------------------------------------------- /ansible/roles/csi_cephfs_vmware/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: csi_cephfs_vmware.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/db2_fyrevm/tasks/db2_fyrevm.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Create FyreVM with appropriate spec 3 | include_role: 4 | name: fyrevm_provision 5 | vars: 6 | fyre_platform: x 7 | fyre_cpu: 2 8 | fyre_memory: 4 9 | fyre_os: 'Redhat 9.0' 10 | fyre_site: "{{ site }}" 11 | fyre_comment: 'RH Jmeter' 12 | clusterName_prefix: db2 13 | target_group: db2 14 | vnc: "{{ vnc }}" 15 | -------------------------------------------------------------------------------- /ansible/roles/db2_fyrevm/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: db2_fyrevm.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/db2_openshift/meta/main.yml: -------------------------------------------------------------------------------- 1 | galaxy_info: 2 | author: Rahul Tripathi 3 | company: IBM 4 | license: license (Apache License 2.0) 5 | min_ansible_version: 2.10 6 | galaxy_tags: [] 7 | dependencies: 8 | - role: ocp_request_token -------------------------------------------------------------------------------- /ansible/roles/db2_openshift/tasks/main.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | --- 6 | - include_tasks: db2-deploy-openshift.yml -------------------------------------------------------------------------------- /ansible/roles/db2_openshift/templates/db2-operator-group.yml.j2: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | apiVersion: operators.coreos.com/v1 6 | kind: OperatorGroup 7 | metadata: 8 | annotations: 9 | olm.providedAPIs: Db2uCluster.v1.db2u.databases.ibm.com,Db2uHadr.v1alpha1.db2u.databases.ibm.com,Db2uHelmMigration.v1alpha1.db2u.databases.ibm.com,Formation.v1.db2u.databases.ibm.com,FormationLock.v1.db2u.databases.ibm.com 10 | generateName: db2- 11 | generation: 1 12 | name: db2-qpx28 13 | namespace: "{{ db2_namespace }}" 14 | spec: 15 | targetNamespaces: 16 | - "{{ db2_namespace }}" -------------------------------------------------------------------------------- /ansible/roles/db2_openshift/templates/db2-subscription.yml.j2: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | apiVersion: operators.coreos.com/v1alpha1 6 | kind: Subscription 7 | metadata: 8 | name: db2u-operator 9 | namespace: {{ db2_namespace }} 10 | labels: 11 | operators.coreos.com/db2u-operator.db2: '' 12 | spec: 13 | channel: v1.0 14 | installPlanApproval: Automatic 15 | name: db2u-operator 16 | source: ibm-operator-catalog 17 | sourceNamespace: openshift-marketplace 18 | startingCSV: db2u-operator.v1.0.2 19 | -------------------------------------------------------------------------------- /ansible/roles/db2_openshift/templates/ibm-catalog-resource.yml.j2: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | apiVersion: operators.coreos.com/v1alpha1 6 | kind: CatalogSource 7 | metadata: 8 | name: ibm-operator-catalog 9 | namespace: openshift-marketplace 10 | spec: 11 | displayName: "IBM Operator Catalog" 12 | publisher: IBM 13 | sourceType: grpc 14 | image: docker.io/ibmcom/ibm-operator-catalog 15 | updateStrategy: 16 | registryPoll: 17 | interval: 45m -------------------------------------------------------------------------------- /ansible/roles/db2_openshift/templates/ibm-pull-secret.yml.j2: -------------------------------------------------------------------------------- 1 | { 2 | # 3 | # Copyright 2020- IBM Inc. All rights reserved 4 | # SPDX-License-Identifier: Apache2.0 5 | # 6 | "auths": { 7 | "cp.icr.io": { 8 | "auth": "{{ ('cp:' + entitled_key) | b64encode }}" 9 | } 10 | } 11 | } -------------------------------------------------------------------------------- /ansible/roles/db2_openshift/vars/main.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | --- 6 | kubeadmin_user: "" 7 | kubeadmin_password: "" 8 | ocp_api_url: "https://:6443" 9 | ldap_namespace: "" 10 | storageClassName: "Add your storage class name" 11 | entitled_key: "IBM Entitled Key" -------------------------------------------------------------------------------- /ansible/roles/db2jcc_jars/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | db_user: 'db2inst1' 3 | db_host: 'dbhost' 4 | -------------------------------------------------------------------------------- /ansible/roles/db2jcc_jars/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: db2jcc_jars.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/deploy_ingress_router_vmware/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /ansible/roles/deploy_ingress_router_vmware/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: deploy_ingress_router_vmware.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/deploy_ingress_router_vmware/templates/ingress-router-patch-template.j2: -------------------------------------------------------------------------------- 1 | {"spec":{"replicas": {{ num_worker_nodes.stdout }} }} 2 | -------------------------------------------------------------------------------- /ansible/roles/deploy_ova_vmware/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ova_url: https://mirror.openshift.com/pub/openshift-v4/dependencies/rhcos/4.4/4.4.3/rhcos-4.4.3-x86_64-vmware.x86_64.ova 3 | vcenter_ip: 9.37.222.18 4 | vcenter_uid: @icovcpc65.rtp 5 | vcenter_pw: 6 | vcenter_datacenter: IOCDCPC1 7 | vcenter_datastore: ICOVCPC-RSX6-102 8 | vcenter_cluster: ICO01 9 | vcenter_network_label: VIS241 10 | vcenter_folder: ova 11 | local_target_folder: ~/ova_cp_folder # 12 | govc_prq: vmware 13 | govc_vers: 0.21.0 14 | govc_dwld: govc_linux_amd64.gz 15 | -------------------------------------------------------------------------------- /ansible/roles/deploy_ova_vmware/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: deploy_ova_vmware.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/deploy_vmdisk_vmware/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /ansible/roles/deploy_vmdisk_vmware/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: deploy_vmdisk_vmware.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/docker_ce_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ ansible_distribution }}.yml" 3 | when: 4 | - ansible_system == 'Linux' 5 | -------------------------------------------------------------------------------- /ansible/roles/fix_fyre_hosts_file/tasks/fix-fyre-host-file.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: use public IP instead of private as the default 4 | lineinfile: 5 | path: /etc/hosts 6 | state: present 7 | regexp: "{{ansible_facts['fqdn'] }}" 8 | line: "{{ ansible_facts['default_ipv4']['address'] }} {{ ansible_facts['fqdn'] }} {{ ansible_facts['hostname'] }}" 9 | backup: yes 10 | become: yes 11 | -------------------------------------------------------------------------------- /ansible/roles/fix_fyre_hosts_file/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: fix-fyre-host-file.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/fyrevm_delete/defaults/main.yml: -------------------------------------------------------------------------------- 1 | stackName: "{{ clusterName|default(omit) }}" 2 | 3 | fyre_modifyKnownHosts: true 4 | fyre_requestRetries: 999 5 | -------------------------------------------------------------------------------- /ansible/roles/fyrevm_delete/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ role_main_file_fd }}" 3 | with_first_found: 4 | - files: 5 | - "fyrevm_delete{{ ansible_os_family }}.yml" 6 | - fyrevm_delete.yml 7 | loop_control: 8 | loop_var: role_main_file_fd 9 | -------------------------------------------------------------------------------- /ansible/roles/fyrevm_provision/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: fyrevm_provision.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/get_liberty_archive/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | liberty_daily_or_test: 'daily' 3 | -------------------------------------------------------------------------------- /ansible/roles/get_liberty_archive/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: load_secrets -------------------------------------------------------------------------------- /ansible/roles/get_liberty_archive/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: get_liberty_archive.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/get_ocp_installer/defaults/main.yml: -------------------------------------------------------------------------------- 1 | installer_url: "https://mirror.openshift.com/pub/openshift-v4/clients/ocp" 2 | installer_version: "4.7.33" 3 | installer_file: "openshift-install-linux.tar.gz" -------------------------------------------------------------------------------- /ansible/roles/get_ocp_installer/tasks/get_ocp_installer.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name : Download and unpack IPI installer 3 | unarchive: 4 | src: "{{ installer_url }}/{{ installer_version }}/{{ installer_file }}" 5 | dest: "/usr/local/bin" 6 | remote_src: yes 7 | mode: 0755 8 | -------------------------------------------------------------------------------- /ansible/roles/get_ocp_installer/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: get_ocp_installer.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/git_install_fyre/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /ansible/roles/git_install_fyre/tasks/git_install_fyre.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks git install 3 | 4 | - name: Install git on fyre inf node 5 | shell: "sudo dnf install git-all -y" 6 | register: gitinstall 7 | 8 | - name: Viewing git-install log 9 | debug: 10 | msg: "{{ gitinstall.stdout_lines }}" 11 | -------------------------------------------------------------------------------- /ansible/roles/git_install_fyre/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: git_install_fyre.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/google_cli_install/tasks/install-google-client.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Installing google auth and requests 3 | shell: "pip3 install requests google-auth" 4 | register: pip_install_results 5 | -------------------------------------------------------------------------------- /ansible/roles/google_cli_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: install-google-client.yml -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_delete/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "pool_size": 1 3 | "ocp_api_url": "api.awsacm.purple-chesterfield.com" 4 | "kubeadmin_user": "kubeadmin" 5 | -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_delete/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: ocp_login 4 | -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_delete/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | import_tasks: delete-cluster.yml 3 | run_once: true 4 | -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_provision/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "pool_size": 1 3 | "ocp_api_url": "api.awsacm.purple-chesterfield.com" 4 | "kubeadmin_user": "kubeadmin" 5 | -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_provision/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: ocp_login 4 | -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_provision/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | import_tasks: provision-cluster.yml 3 | run_once: true 4 | -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_provision/templates/azure-osServicePrincipal.json.j2: -------------------------------------------------------------------------------- 1 | { 2 | "subscriptionId": "{{ AZURE_SUBSCRIPTIONID }}", 3 | "clientId": "{{ AZURE_CLIENTID }}", 4 | "clientSecret": "{{ AZURE_CLIENTSECRET }}", 5 | "tenantId": "{{ AZURE_TENANTID }}" 6 | } 7 | -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_provision/templates/google-osServiceAccount.json.j2: -------------------------------------------------------------------------------- 1 | { 2 | "type": "service_account", 3 | "project_id": "{{ GOOGLE_PROJECT_ID }}", 4 | "private_key_id": "{{ GOOGLE_PRIVATE_KEY_ID }}", 5 | "private_key": "{{ GOOGLE_PRIVATE_KEY }}", 6 | "client_email": "{{ GOOGLE_CLIENT_EMAIL }}", 7 | "client_id": "{{ GOOGLE_CLIENT_ID }}", 8 | "auth_uri": "{{ GOOGLE_AUTH_URI }}", 9 | "token_uri": "{{ GOOGLE_TOKEN_URI}}", 10 | "auth_provider_x509_cert_url": "{{ GOOGLE_AUTH_PROVIDER_X509_CERT_URL }}", 11 | "client_x509_cert_url": "{{ GOOGLE_CLIENT_X509_CERT_URL }}" 12 | } 13 | -------------------------------------------------------------------------------- /ansible/roles/hive_ocp_cluster_provision/templates/ibmcloud-template.j2: -------------------------------------------------------------------------------- 1 | # not supported at this time. -------------------------------------------------------------------------------- /ansible/roles/http_start/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: http_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/http_start/tasks/http_start.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: start httpd 4 | shell: "{{install_ihs_base }}/{{ http_dir }}/bin/apachectl start" 5 | register: starthttpdOutput 6 | 7 | - name: starthttpdOutput 8 | debug: 9 | msg: "{{ starthttpdOutput.stdout_lines }}" 10 | 11 | - name: start adminctl 12 | shell: "{{install_ihs_base }}/{{ http_dir }}/bin/adminctl start " 13 | register: startadminctlOutput 14 | 15 | - name: startadminctlOutput 16 | debug: 17 | msg: "{{ startadminctlOutput.stdout_lines }}" 18 | 19 | - name: Check server-status 20 | uri: 21 | url: "https://localhost:{{ ihs_https_port }}" 22 | return_content: no 23 | validate_certs: false 24 | 25 | -------------------------------------------------------------------------------- /ansible/roles/http_start/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: http_start.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/http_stop/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: http_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/http_stop/tasks/http_stop.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: stop httpd 4 | shell: "{{install_ihs_base }}/{{ http_dir }}/bin/apachectl stop" 5 | register: stophttpdOutput 6 | 7 | - name: stophttpdOutput 8 | debug: 9 | msg: "{{ stophttpdOutput.stdout_lines }}" 10 | 11 | - name: stop adminctl 12 | shell: "{{install_ihs_base }}/{{ http_dir }}/bin/adminctl stop " 13 | register: stopadminctlOutput 14 | 15 | - name: stopadminctlOutput 16 | debug: 17 | msg: "{{ stopadminctlOutput.stdout_lines }}" 18 | -------------------------------------------------------------------------------- /ansible/roles/http_stop/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: http_stop.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/ibm_installation_manager/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | im_archive_driver_version: '1922' 3 | im_archive_driver: '1.9.2002.20220323_1321' 4 | im_archive: 'https://rtpgsa.ibm.com/gsa/rtpgsa/home/w/a/wasngi/web/public/NGI/IM/recommended/drivers/{{ im_archive_driver_version }}/ga' 5 | install_base: '/home/nest' 6 | # trick the template to install onto windoze via cygwin 7 | install_base_win: '' 8 | shared_dir: 'IM-shared' 9 | -------------------------------------------------------------------------------- /ansible/roles/ibm_installation_manager/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | dependencies: 4 | - role: load_secrets 5 | -------------------------------------------------------------------------------- /ansible/roles/ibm_installation_manager/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: ibm_installation_manager.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/ibm_rhsm/meta/main.yml: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - role: load_secrets 3 | -------------------------------------------------------------------------------- /ansible/roles/ibm_rhsm/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ ansible_distribution }}.yml" 3 | when: ansible_distribution == "RedHat" 4 | -------------------------------------------------------------------------------- /ansible/roles/ibm_semeru_jdks/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: ibm_semeru_jdks.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/im_cleanup_http/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | shared_dir: 'IM-shared' 3 | -------------------------------------------------------------------------------- /ansible/roles/im_cleanup_http/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: http_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/im_cleanup_http/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: im_cleanup_http.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/im_cleanup_liberty/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | shared_dir: 'IM-shared' 3 | -------------------------------------------------------------------------------- /ansible/roles/im_cleanup_liberty/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/im_cleanup_liberty/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: im_cleanup_liberty.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/im_install_http_plugin/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # trick the template to install onto windoze via cygwin 3 | install_ihs_base_win: '' 4 | -------------------------------------------------------------------------------- /ansible/roles/im_install_http_plugin/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: ibm_installation_manager_cic_selector 4 | - role: twas_cell_defaults 5 | - role: http_defaults 6 | -------------------------------------------------------------------------------- /ansible/roles/im_install_http_plugin/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: im_install_http_plugin.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/im_install_http_plugin/templates/https_conf_WAS855.j2: -------------------------------------------------------------------------------- 1 | LoadModule ibm_ssl_module modules/mod_ibm_ssl.so 2 | Listen {{ ihs_https_port }} 3 | 4 | ServerName {{ ansible_hostname }} 5 | SSLEnable 6 | 7 | KeyFile {{ install_ihs_base }}/{{ http_dir }}/conf/ihsserverkey.kdb 8 | -------------------------------------------------------------------------------- /ansible/roles/im_install_http_plugin/templates/https_conf_WAS90.j2: -------------------------------------------------------------------------------- 1 | LoadModule ibm_ssl_module modules/mod_ibm_ssl.so 2 | Listen {{ ihs_https_port }} 3 | SSLCheckCertificateExpiration 30 4 | 5 | ServerName {{ ansible_hostname }} 6 | SSLEnable 7 | Header always set Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" 8 | 9 | KeyFile {{ install_ihs_base }}/{{ http_dir }}/conf/ihsserverkey.kdb 10 | -------------------------------------------------------------------------------- /ansible/roles/im_install_liberty/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | liberty_im_driver: 'cl211120211019-1900.52.linux' 3 | liberty_daily_or_test: 'test' 4 | install_base: '/home/nest' 5 | liberty_dir: 'wlp' 6 | shared_dir: 'IM-shared' 7 | wl_server: 'server' 8 | cc_server: 'cc' 9 | gsa_user: 'gsauser' 10 | gsa_pass: 'gsapass' 11 | ibm_user: 'external_ibmuser' 12 | ibm_pass: 'external_ibmpass' 13 | im_liberty_package: 'com.ibm.websphere.liberty.NDTRIAL.v85' 14 | im_java_package: 'com.ibm.java.jdk.v11' 15 | -------------------------------------------------------------------------------- /ansible/roles/im_install_liberty/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: load_secrets 4 | - role: ibm_installation_manager_cic_selector 5 | -------------------------------------------------------------------------------- /ansible/roles/im_install_liberty/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: im_install_liberty.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/im_install_liberty/templates/server.env.j2: -------------------------------------------------------------------------------- 1 | WLP_OUTPUT_DIR={{ wlp_output }} 2 | WLP_USER_DIR={{ wlp_usr_dir }} 3 | -------------------------------------------------------------------------------- /ansible/roles/im_install_twas/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: ibm_installation_manager_cic_selector 4 | - role: load_secrets 5 | - role: twas_cell_defaults 6 | -------------------------------------------------------------------------------- /ansible/roles/im_install_twas/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: im_install_twas.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/im_rollback_twas/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: load_secrets 4 | - role: twas_cell_defaults 5 | -------------------------------------------------------------------------------- /ansible/roles/im_rollback_twas/tasks/im_rollback_twas.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: rollback tWAS 3 | tags: rollback 4 | block: 5 | - name: check if imcl installed as a prereq 6 | stat: 7 | path: "{{ install_base }}/IM/eclipse/tools/imcl" 8 | register: isimcl 9 | failed_when: isimcl.stat.exists == False 10 | 11 | - name: rollback twas 12 | shell: "{{ install_base }}/IM/eclipse/tools/imcl rollback {{ twas_rollback_version }}" 13 | register: rollbackOutput 14 | failed_when: ( rollbackOutput.rc not in [ 0, 1 ] ) 15 | 16 | - name: rollback twas output 17 | debug: 18 | msg: "{{ rollbackOutput.stdout_lines }}" 19 | -------------------------------------------------------------------------------- /ansible/roles/im_rollback_twas/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: im_rollback_twas.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/im_update_twas/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: ibm_installation_manager_cic_selector 4 | - role: load_secrets 5 | - role: twas_cell_defaults 6 | -------------------------------------------------------------------------------- /ansible/roles/im_update_twas/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: im_update_twas.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/Windows.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: get the install agent script 3 | win_get_url: 4 | url: "{{ get_unattended_windows_exe }}" 5 | dest: ./AgentBootstrap.exe 6 | validate_certs: no 7 | timeout: 1500 # just in case connections are slow 8 | - name: install the windows agent 9 | win_command: "{{ AgentBootstrap }}" 10 | register: isInstana 11 | - debug: 12 | msg: "{{ isInstana.stdout_lines }}" 13 | - name: Set service startup mode to auto and ensure it is started 14 | win_service: 15 | name: instana-agent-service 16 | start_mode: auto 17 | state: started 18 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/custom_package_cron.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Creates instana crontab manual reboot start 3 | ansible.builtin.cron: 4 | name: "start custom instana agent at reboot" 5 | special_time: reboot 6 | job: "{{ instana_dir_custom }}/bin/start >/dev/null 2>&1" 7 | become: yes 8 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/install_agent_custom.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install custom Instana archive 3 | unarchive: 4 | src: "{{ custom_agent_url }}/instana-agent-{{ ansible_architecture }}.tar.gz" 5 | dest: /opt 6 | remote_src: yes 7 | become: yes 8 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/install_agent_custom_chrp.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install custom AIX Instana archive 3 | unarchive: 4 | src: "{{ custom_agent_url }}/instana-agent-{{ ansible_architecture }}.zip" 5 | dest: /opt 6 | remote_src: yes 7 | become: yes 8 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/install_agent_normal.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: get the install agent script 3 | become: yes 4 | get_url: 5 | url: "{{ get_normal_linux_agent }}" 6 | dest: ~/setup_agent.sh 7 | mode: 0755 8 | - name: install the linux agent 9 | become: yes 10 | command: "{{ install_normal_linux_agent }}" 11 | register: isInstana 12 | - debug: 13 | msg: "{{ isInstana.stdout_lines }}" 14 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/main.yml: -------------------------------------------------------------------------------- 1 | # "ansible_distribution": "Microsoft Windows Server 2019 Datacenter" 2 | # "ansible_distribution": "CYGWIN_NT-10.0-17763" 3 | --- 4 | - include_tasks: Windows.yml 5 | when: 6 | - ansible_shell_type is defined 7 | - ansible_shell_type == 'cmd' or ansible_shell_type == 'powershell' 8 | - include_tasks: unix.yml 9 | when: ansible_shell_type is undefined 10 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/remove_directory.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: remove custom package tree 3 | become: yes 4 | file: 5 | path: "{{ instana_dir_custom }}" 6 | state: absent 7 | - name: remove normal package tree 8 | become: yes 9 | file: 10 | path: "{{ instana_dir_normal }}" 11 | state: absent 12 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/remove_package.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Remove the instana package 3 | become: yes 4 | ansible.builtin.package: 5 | name: 6 | - "instana-agent-dynamic" 7 | - "instana-agent-dynamic-j9" 8 | - "instana-agent-static" 9 | - "instana-agent-static-j9" 10 | state: absent 11 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/unix.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ### "ansible_architecture": "s390x", 3 | ### "ansible_architecture": "ppc64le", 4 | ### "ansible_architecture": "x86_64", 5 | ### "ansible_architecture": "chrp", --- AIX 6 | - include_tasks: stop.yml 7 | - include_tasks: remove_package.yml 8 | - include_tasks: remove_directory.yml 9 | - include_tasks: install_agent_normal.yml 10 | when: 11 | - ansible_architecture == 'x86_64' or ansible_architecture == 's390x' or ansible_architecture == 'ppc64le' 12 | - include_tasks: install_agent_custom_chrp.yml 13 | when: 14 | - ansible_architecture == 'chrp' 15 | - include_tasks: start.yml 16 | - include_tasks: zone.yml 17 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_install_config/tasks/zone.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: set the instana agent zone if present 3 | include_role: 4 | name: instana_agent_zone 5 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_switch/defaults/main.yml: -------------------------------------------------------------------------------- 1 | ### instana dir's are hard coded 2 | instana_dir_custom: '/opt/instana-agent' 3 | instana_dir_normal: '/opt/instana/agent' 4 | agent_key: 'thisIsAllowsOverridden' 5 | instana_host: 'thisIsAllowsOverridden' 6 | instana_port: '1444' 7 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_switch/templates/com.instana.agent.main.sender.Backend.cfg.j2: -------------------------------------------------------------------------------- 1 | # this overwrites the existing config 2 | host={{ instana_host }} 3 | port={{ instana_port }} 4 | protocol=HTTP/2 5 | key={{ agent_key }} 6 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_zone/defaults/main.yml: -------------------------------------------------------------------------------- 1 | ### instana dir's are hard coded 2 | instana_dir_custom: '/opt/instana-agent' 3 | instana_dir_normal: '/opt/instana/agent' 4 | instana_zone: '' # default is undefined 5 | -------------------------------------------------------------------------------- /ansible/roles/instana_agent_zone/templates/configuration-zone.yaml.j2: -------------------------------------------------------------------------------- 1 | # Hardware & Zone 2 | com.instana.plugin.generic.hardware: 3 | enabled: true # disabled by default 4 | availability-zone: '{{ instana_zone }}' 5 | -------------------------------------------------------------------------------- /ansible/roles/instana_fyrevm/tasks/instana_fyrevm.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Create FyreVM with appropriate spec 4 | include_role: 5 | name: fyrevm_provision 6 | vars: 7 | fyre_platform: x 8 | fyre_cpu: 16 9 | fyre_memory: 64 10 | fyre_additional_disk: 2048 11 | fyre_os: 'RedHat 9.0' 12 | fyre_site: "{{ site }}" 13 | fyre_comment: 'Instana Host' 14 | clusterName_prefix: instana 15 | target_group: instana 16 | -------------------------------------------------------------------------------- /ansible/roles/instana_fyrevm/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: instana_fyrevm.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/instana_instance_on_prem/defaults/main.yml: -------------------------------------------------------------------------------- 1 | ### WebSphere Automation instance artifacts 2 | InstanaAdminUserPassword: 'password' 3 | InstanaServiceUserPassword: 'password' 4 | -------------------------------------------------------------------------------- /ansible/roles/instana_instance_on_prem/tasks/RedHat.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install jq 4 | become: yes 5 | ansible.builtin.dnf: 6 | state: latest 7 | name: 8 | - jq 9 | - postfix 10 | 11 | - name: configure the Instana repo 12 | become: yes 13 | template: 14 | src: instana.rhel.repo.j2 15 | dest: /etc/yum.repos.d/Instana-Product.repo 16 | 17 | - name: install instana-console 18 | become: yes 19 | dnf: 20 | name: "instana-console" 21 | state: present 22 | -------------------------------------------------------------------------------- /ansible/roles/instana_instance_on_prem/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ ansible_distribution }}.yml" 3 | - include_tasks: instana_instance_on_prem.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/instana_instance_on_prem/templates/fdisk.create.j2: -------------------------------------------------------------------------------- 1 | n 2 | p 3 | 1 4 | 5 | 6 | i 7 | w 8 | q 9 | -------------------------------------------------------------------------------- /ansible/roles/instana_instance_on_prem/templates/fdisk.delete.j2: -------------------------------------------------------------------------------- 1 | i 2 | d 3 | w 4 | q 5 | -------------------------------------------------------------------------------- /ansible/roles/instana_instance_on_prem/templates/instana.rhel.repo.j2: -------------------------------------------------------------------------------- 1 | [instana-product] 2 | name=Instana-Product 3 | baseurl=https://self-hosted.instana.io/rpm/release/product/rpm/generic/x86_64/Packages 4 | enabled=1 5 | gpgcheck=1 6 | repo_gpgcheck=1 7 | gpgkey=https://self-hosted.instana.io/signing_key.gpg 8 | priority=5 9 | sslverify=1 10 | -------------------------------------------------------------------------------- /ansible/roles/ipi_ocp_cluster_delete/tasks/delete_cluster.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Destroy cluster 3 | shell: "openshift-install destroy cluster --dir=install/{{ CLUSTER_NAME }} 2>&1 | tee install/delete.log" 4 | args: 5 | executable: /bin/bash 6 | async: 1800 7 | poll: 0 8 | register: destroy_results 9 | 10 | - name: Check destroy, wait up to 30 min 11 | async_status: 12 | jid: "{{ destroy_results.ansible_job_id }}" 13 | register: job_result 14 | until: job_result.finished 15 | retries: 30 16 | delay: 60 17 | failed_when: job_result.finished != 1 18 | -------------------------------------------------------------------------------- /ansible/roles/ipi_ocp_cluster_delete/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: delete_cluster.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/ipi_ocp_cluster_provision/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | cloud: aws 3 | logfile: "/tmp/deploy.log" 4 | 5 | # Master nodes 6 | "MASTER_CPUS": "10" 7 | "MASTER_MEMORY": "32768" 8 | "MASTER_DISK_SIZE": "300" 9 | "MASTER_COUNT": 3 10 | 11 | # Worker nodes 12 | "WORKER_COUNT": 3 13 | "WORKER_CPUS": "16" 14 | "WORKER_MEMORY": "73728" 15 | "WORKER_DISK_SIZE": "200" 16 | 17 | "WORKER_VM_SIZE": "m5.2xlarge" # size of worker VM instances 18 | "MASTER_VM_SIZE": "m5.2xlarge" # sice of master VM instances -------------------------------------------------------------------------------- /ansible/roles/ipi_ocp_cluster_provision/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: get_ocp_installer 4 | -------------------------------------------------------------------------------- /ansible/roles/ipi_ocp_cluster_provision/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: provision-cluster.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/ipi_ocp_cluster_provision/templates/azure-install-config.template.j2: -------------------------------------------------------------------------------- 1 | --- 2 | # to be built 3 | -------------------------------------------------------------------------------- /ansible/roles/ipi_ocp_cluster_provision/templates/google-install-config.template.j2: -------------------------------------------------------------------------------- 1 | --- 2 | # to be built 3 | -------------------------------------------------------------------------------- /ansible/roles/jmeter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | forceReplace: False 3 | -------------------------------------------------------------------------------- /ansible/roles/jmeter_fyrevm/tasks/jmeter_fyrevm.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Create FyreVM with appropriate spec 4 | include_role: 5 | name: fyrevm_provision 6 | vars: 7 | fyre_platform: x 8 | fyre_cpu: 2 9 | fyre_memory: 2 10 | fyre_os: 'Redhat 9.0' 11 | fyre_site: "{{ site }}" 12 | fyre_comment: 'RH Jmeter' 13 | clusterName_prefix: crc 14 | target_group: jmeter 15 | target_user: "{{ jmeterUser }}" 16 | jmeterUser: "{{ jmeterUser }}" 17 | vnc: "{{ vnc }}" 18 | -------------------------------------------------------------------------------- /ansible/roles/jmeter_fyrevm/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: jmeter_fyrevm.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/jmeter_get_logs/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | env_name: myLibertyEnv 4 | -------------------------------------------------------------------------------- /ansible/roles/jmeter_get_logs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: jmeter_get_logs.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/jmeter_java/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | forceReplace: False 3 | -------------------------------------------------------------------------------- /ansible/roles/jmeter_prereqs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Install RHEL jmeter pre-reqs 4 | become: yes 5 | dnf: 6 | name: "{{ packages }}" 7 | state: latest 8 | vars: 9 | packages: 10 | - rsync 11 | - zip 12 | - unzip 13 | -------------------------------------------------------------------------------- /ansible/roles/liberty_collective/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | min_heap: '256m' 3 | max_heap: '1024m' 4 | -------------------------------------------------------------------------------- /ansible/roles/liberty_collective/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/liberty_collective/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_collective.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_collective/templates/jvm.options.j2: -------------------------------------------------------------------------------- 1 | -Dhttps.protocols=TLSv1.2 2 | -Xms{{ min_heap }} 3 | -Xmx{{ max_heap }} 4 | #openJ9 jvm.options 5 | -verbose:gc 6 | -Xdump:heap 7 | -Xaggressive 8 | -Xverbosegclog:logs/verbosegc.log,200,10000 9 | -------------------------------------------------------------------------------- /ansible/roles/liberty_daytrader8/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | install_base: '/home/nest' 3 | liberty_dir: 'wlp' 4 | wl_server: 'server' 5 | min_heap: '1G' 6 | max_heap: '3G' 7 | cluster_name: 'DayTrader8' 8 | db_user: 'db2inst1' 9 | db_user1: 'db2inst1' 10 | db_pass: 'guessme' 11 | db_pass1: 'guessme' 12 | db_host: 'dbhost' 13 | db_host1: 'dbhost' 14 | db_port: '50000' 15 | db_port1: '50000' 16 | db_name: 'tradedb' 17 | # could be sessions if separate db 18 | db_name1: 'tradedb' 19 | -------------------------------------------------------------------------------- /ansible/roles/liberty_daytrader8/files/io.openliberty.sample.daytrader8.war: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/roles/liberty_daytrader8/files/io.openliberty.sample.daytrader8.war -------------------------------------------------------------------------------- /ansible/roles/liberty_daytrader8/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/liberty_daytrader8/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_daytrader8.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_daytrader8/templates/jvm.options.j2: -------------------------------------------------------------------------------- 1 | -Dhttps.protocols=TLSv1.2 2 | -Xms{{ min_heap }} 3 | -Xmx{{ max_heap }} 4 | #openJ9 jvm.options 5 | -verbose:gc 6 | -Xdump:heap 7 | -Xaggressive 8 | -Xverbosegclog:logs/verbosegc.log,200,10000 9 | -------------------------------------------------------------------------------- /ansible/roles/liberty_defaults/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | install_base: '/home/nest' 3 | liberty_dir: 'wlp' 4 | liberty_admin_user: 'admin' 5 | liberty_admin_pass: 'adminpwd' 6 | liberty_admin_port: '9080' 7 | liberty_admin_ports: '9443' 8 | liberty_http_port: '9080' 9 | liberty_http_ports: '9443' 10 | cc_server: 'cc' 11 | wl_server: 'server' 12 | -------------------------------------------------------------------------------- /ansible/roles/liberty_defaults/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: set wlp_cmd, wlp_nohup, wlp_usr_dir, wlp_output 3 | set_fact: 4 | wlp_cmd: " " 5 | wlp_nohup: "nohup " 6 | wlp_usr_dir: "{{ install_base }}/{{ liberty_dir }}/usr" 7 | wlp_output: "{{ install_base }}/{{ liberty_dir }}/usr/servers" 8 | when: ansible_system != 'OS400' 9 | - name: set OS400 wlp_cmd, wlp_nohup, wlp_usr_dir, wlp_output 10 | set_fact: 11 | wlp_cmd: "qsh " 12 | wlp_nohup: "" 13 | wlp_owner: qejbsvr 14 | wlp_usr_dir: "/QIBM/UserData/WebSphere/Liberty/V85/ND/wlp/usr" 15 | wlp_output: "/QIBM/UserData/WebSphere/Liberty/V85/ND/wlp/output/servers" 16 | when: ansible_system == 'OS400' 17 | 18 | -------------------------------------------------------------------------------- /ansible/roles/liberty_dynamicRouting/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | plugin_dir: 'Plugins' 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_dynamicRouting/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | - role: http_defaults 5 | -------------------------------------------------------------------------------- /ansible/roles/liberty_dynamicRouting/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_dynamicRouting.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_dynamicRouting_plugin/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | - role: http_defaults 5 | -------------------------------------------------------------------------------- /ansible/roles/liberty_dynamicRouting_plugin/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_dynamicRouting_plugin.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_fetch_plugins/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/liberty_fetch_plugins/tasks/liberty_fetch_plugins.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Remove plugin-cfg.xml and plugin-key.p12 local fetched files 3 | delegate_to: localhost 4 | file: 5 | path: "{{ item }}" 6 | state: absent 7 | with_items: 8 | - plugin-cfg.xml 9 | - plugin-key.p12 10 | 11 | - name: fetch plugin-cfg.xml and plugin-key.p12 generated files 12 | fetch: 13 | src: "{{ wlp_usr_dir }}/servers/{{ cc_server }}/{{ item }}" 14 | dest: "{{ item }}" 15 | flat: yes 16 | with_items: 17 | - plugin-cfg.xml 18 | - plugin-key.p12 19 | -------------------------------------------------------------------------------- /ansible/roles/liberty_fetch_plugins/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_fetch_plugins.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_get_logs/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | history_days: '-3' 4 | env_name: myLibertyEnv 5 | -------------------------------------------------------------------------------- /ansible/roles/liberty_get_logs/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/liberty_get_logs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_get_logs.yml -------------------------------------------------------------------------------- /ansible/roles/liberty_ibmi_grantauth/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_ibmi_granthauth.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_join_collective/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/liberty_join_collective/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_join_collective.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_scaling/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | install_base: '/home/nest' 3 | liberty_dir: 'wlp' 4 | cc_server: 'cc' 5 | cluster_name: 'DayTrader8' 6 | -------------------------------------------------------------------------------- /ansible/roles/liberty_scaling/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/liberty_scaling/tasks/liberty_scaling.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Scaling 3 | tags: enablescaling 4 | block: 5 | - name: create configDropins/overrides directory 6 | file: 7 | path: "{{ item }}" 8 | state: directory 9 | with_items: 10 | - "{{ wlp_usr_dir }}/servers/{{ cc_server }}/configDropins/overrides" 11 | 12 | - name: copy scaling.xml template to configDropins/overrides 13 | template: 14 | src: scaling.xml.j2 15 | dest: "{{ wlp_usr_dir }}/servers/{{ cc_server }}/configDropins/overrides/scaling.xml" 16 | -------------------------------------------------------------------------------- /ansible/roles/liberty_scaling/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_scaling.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/liberty_servers/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | install_base: '/home/nest' 3 | liberty_dir: 'wlp' 4 | wl_server: 'server' 5 | -------------------------------------------------------------------------------- /ansible/roles/liberty_servers/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: liberty_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/liberty_servers/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: liberty_servers.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/linux_kill_process/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: linux_kill_process.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/load_secrets/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | load_secrets_dir: "{{ lookup('env','ansible_secrets_dir') }}" 4 | -------------------------------------------------------------------------------- /ansible/roles/load_secrets/tasks/load_secrets.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Load all secrets 4 | include_vars: 5 | dir: "{{ load_secrets_dir }}" 6 | files_matching: ".*\\.yml" 7 | -------------------------------------------------------------------------------- /ansible/roles/load_secrets/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: load_secrets.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/nestuser/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | user_passfile: password.hash 3 | user_shell: '/bin/bash' 4 | -------------------------------------------------------------------------------- /ansible/roles/nestuser/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | dependencies: 4 | - role: load_secrets 5 | -------------------------------------------------------------------------------- /ansible/roles/nestuser/scripts/password_hasher.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Import the SHA256 Hash Algorithm 4 | import sys 5 | from passlib.hash import sha256_crypt 6 | 7 | password = sys.argv[1] 8 | # Generate a new salt and hash the provided password 9 | hash = sha256_crypt.hash(password); 10 | # Output 11 | print (hash) 12 | 13 | -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/meta/main.yml: -------------------------------------------------------------------------------- 1 | galaxy_info: 2 | author: Rahul Tripathi 3 | company: IBM 4 | license: license (Apache License 2.0) 5 | min_ansible_version: 2.10 6 | galaxy_tags: [] 7 | dependencies: 8 | - role: ocp_request_token -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for nfs-client-provisioner-fyre 3 | - include_tasks: nfs-client-provisioner-ocp-private.yml -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/templates/class.yml: -------------------------------------------------------------------------------- 1 | apiVersion: storage.k8s.io/v1 2 | kind: StorageClass 3 | metadata: 4 | name: "{{ storageClassName }}" 5 | namespace: "{{ nfs_provisioner_namespace }}" 6 | provisioner: fuseim.pri/ifs # or choose another name, must match deployment's env PROVISIONER_NAME' 7 | parameters: 8 | archiveOnDelete: "false" 9 | -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/templates/nfs-cluster-rb.yml: -------------------------------------------------------------------------------- 1 | kind: ClusterRoleBinding 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | metadata: 4 | name: run-nfs-client-provisioner 5 | subjects: 6 | - kind: ServiceAccount 7 | name: nfs-client-provisioner 8 | namespace: "{{ nfs_provisioner_namespace }}" 9 | roleRef: 10 | kind: ClusterRole 11 | name: nfs-client-provisioner-runner 12 | apiGroup: rbac.authorization.k8s.io -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/templates/nfs-cluster-role.yml: -------------------------------------------------------------------------------- 1 | kind: ClusterRole 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | metadata: 4 | name: nfs-client-provisioner-runner 5 | rules: 6 | - apiGroups: [""] 7 | resources: ["persistentvolumes"] 8 | verbs: ["get", "list", "watch", "create", "delete"] 9 | - apiGroups: [""] 10 | resources: ["persistentvolumeclaims"] 11 | verbs: ["get", "list", "watch", "update"] 12 | - apiGroups: ["storage.k8s.io"] 13 | resources: ["storageclasses"] 14 | verbs: ["get", "list", "watch"] 15 | - apiGroups: [""] 16 | resources: ["events"] 17 | verbs: ["create", "update", "patch"] -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/templates/nfs-role-binding.yml: -------------------------------------------------------------------------------- 1 | kind: RoleBinding 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | metadata: 4 | name: leader-locking-nfs-client-provisioner 5 | namespace: "{{ nfs_provisioner_namespace }}" 6 | subjects: 7 | - kind: ServiceAccount 8 | name: nfs-client-provisioner 9 | # replace with namespace where provisioner is deployed 10 | namespace: "{{ nfs_provisioner_namespace }}" 11 | roleRef: 12 | kind: Role 13 | name: leader-locking-nfs-client-provisioner 14 | apiGroup: rbac.authorization.k8s.io -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/templates/nfs-role.yml: -------------------------------------------------------------------------------- 1 | kind: Role 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | metadata: 4 | name: leader-locking-nfs-client-provisioner 5 | namespace: "{{ nfs_provisioner_namespace }}" 6 | rules: 7 | - apiGroups: [""] 8 | resources: ["endpoints"] 9 | verbs: ["get", "list", "watch", "create", "update", "patch"] -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/templates/nfs-sa.yml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: nfs-client-provisioner 5 | namespace: "{{ nfs_provisioner_namespace }}" -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/templates/test-claim.yml: -------------------------------------------------------------------------------- 1 | kind: PersistentVolumeClaim 2 | apiVersion: v1 3 | metadata: 4 | name: test-claim 5 | namespace: "{{ nfs_provisioner_namespace }}" 6 | annotations: 7 | volume.beta.kubernetes.io/storage-class: "{{ storageClassName }}" 8 | spec: 9 | accessModes: 10 | - ReadWriteMany 11 | resources: 12 | requests: 13 | storage: 1Mi -------------------------------------------------------------------------------- /ansible/roles/nfs_client_provisioner_fyre/vars/main.yml: -------------------------------------------------------------------------------- 1 | # vars file for nfs-client-provisioner-fyre 2 | # 3 | # Copyright 2020- IBM Inc. All rights reserved 4 | # SPDX-License-Identifier: Apache2.0 5 | # 6 | --- 7 | kubeadmin_user: "my_kubeadmin_user_name" 8 | kubeadmin_password: "my_kubeadmin_user_name" 9 | ocp_api_url: "https://my_ocp_api_url:port" 10 | nfs_provisioner_namespace: "new_namespace_to_deploy_nfs_provisioner" 11 | storageClassName: "storage_class_name_example_rook-cephfs" 12 | infra_node_private_ip: "private_ip_address_of_infra_node" -------------------------------------------------------------------------------- /ansible/roles/oc_client_install/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "ocp_client_version": "4.5.10" 3 | "client_os": "linux" # linux|mac|windows 4 | -------------------------------------------------------------------------------- /ansible/roles/oc_client_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: oc-client-install.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/ocp_add_users_to_scc/meta/main.yml: -------------------------------------------------------------------------------- 1 | galaxy_info: 2 | author: Rahul Tripathi 3 | company: IBM 4 | license: license (Apache License 2.0) 5 | min_ansible_version: 2.10 6 | galaxy_tags: [] 7 | dependencies: 8 | - role: ocp_request_token -------------------------------------------------------------------------------- /ansible/roles/ocp_add_users_to_scc/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for ocp_add_users_to_scc 3 | - include_tasks: ocp-add-users-to-scc.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/ocp_add_users_to_scc/vars/main.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | #--- 6 | scc_to_be_modified: "target-scc" 7 | user_string_for_scc: 'system:serviceaccount:my-test-namespace:mt-test-service-account' 8 | 9 | 10 | ################ EXAMPLE ################ 11 | #scc_to_be_modified: "hostmount-anyuid" 12 | #user_string_for_scc: 'system:serviceaccount:{{ nfs_provisioner_namespace }}:nfs-client-provisioner' 13 | -------------------------------------------------------------------------------- /ansible/roles/ocp_cluster_tag/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "tag_task": add 3 | -------------------------------------------------------------------------------- /ansible/roles/ocp_cluster_tag/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: add-tags.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/ocp_login/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "login_retries": 10 3 | "ocp_api_port": "6443" 4 | kubeadmin_user: "kubeadmin" 5 | kubeconfig_location: "{{ ansible_user_dir }}/.kube" 6 | -------------------------------------------------------------------------------- /ansible/roles/ocp_login/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: oc_client_install 4 | -------------------------------------------------------------------------------- /ansible/roles/ocp_login/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: ocp_login.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/ocp_pool_claim/meta/main.yml: -------------------------------------------------------------------------------- 1 | allow_duplicates: true -------------------------------------------------------------------------------- /ansible/roles/ocp_pool_claim/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: ocp-pool-claim.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/ocp_pool_claim/templates/claim-template.j2: -------------------------------------------------------------------------------- 1 | apiVersion: hive.openshift.io/v1 2 | kind: ClusterClaim 3 | metadata: 4 | name: "{{ claim_name }}" 5 | namespace: "{{ pool_namespace }}" 6 | spec: 7 | clusterPoolName: "{{ pool_name }}" 8 | subjects: 9 | - kind: Group 10 | apiGroup: rbac.authorization.k8s.io 11 | name: 'system:masters' 12 | -------------------------------------------------------------------------------- /ansible/roles/ocp_pool_claim/templates/vsphere-install-config-template.j2: -------------------------------------------------------------------------------- 1 | # TODO: add vsphere specific template details 2 | -------------------------------------------------------------------------------- /ansible/roles/ocp_request_token/meta/main.yml: -------------------------------------------------------------------------------- 1 | galaxy_info: 2 | author: Rahul Tripathi 3 | company: IBM 4 | license: license (Apache License 2.0) 5 | min_ansible_version: 2.10 6 | galaxy_tags: [] 7 | dependencies: [] 8 | 9 | -------------------------------------------------------------------------------- /ansible/roles/ocp_request_token/tasks/main.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2020- IBM Inc. All rights reserved 3 | # SPDX-License-Identifier: Apache2.0 4 | # 5 | --- 6 | # tasks file for ocp-request-token 7 | - include_tasks: ocp-request-token.yml 8 | -------------------------------------------------------------------------------- /ansible/roles/ocp_request_token/vars/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | kubeadmin_user: "" 3 | kubeadmin_password: "" 4 | ocp_api_url: "https://:6443" 5 | -------------------------------------------------------------------------------- /ansible/roles/ops_svt_jmeter_container/defaults/main.yml: -------------------------------------------------------------------------------- 1 | jmeter_tz: 'America/New_York' 2 | -------------------------------------------------------------------------------- /ansible/roles/ops_svt_jmeter_container/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | dependencies: 4 | - role: load_secrets 5 | -------------------------------------------------------------------------------- /ansible/roles/ops_svt_jmeter_container/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: ops_svt_jmeter_container.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/osprereqs/tasks/SLES.12.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # specific OS supports wildcards 3 | - name: remove extraneous packages 4 | become: yes 5 | ignore_errors: yes 6 | zypper: 7 | name: "{{ packages }}" 8 | state: absent 9 | vars: 10 | packages: 11 | - "adobe*" 12 | - "java*" 13 | - "samba*" 14 | - "valgrind*" 15 | - "tignervnc" 16 | 17 | - name: remove extraneous packages 18 | become: yes 19 | zypper: 20 | name: '*' 21 | state: latest 22 | 23 | - name: install packages 24 | become: yes 25 | zypper: 26 | name: "{{ packages }}" 27 | state: present 28 | vars: 29 | packages: 30 | - 'xorg-x11-xauth' 31 | - 'less' 32 | - 'zip' 33 | - 'unzip' 34 | 35 | -------------------------------------------------------------------------------- /ansible/roles/osprereqs/tasks/Ubuntu.18.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # specific OS supports wildcards 3 | - name: remove extraneous packages 4 | become: yes 5 | ignore_errors: yes 6 | apt: 7 | name: "{{ packages }}" 8 | state: absent 9 | autoremove: yes 10 | vars: 11 | packages: 12 | - "java*" 13 | - "samba*" 14 | - "valgrind*" 15 | - "git*" 16 | - "tignervnc" 17 | 18 | - name: apt update 19 | become: yes 20 | apt: 21 | name: '*' 22 | state: latest 23 | 24 | - name: install packages 25 | become: yes 26 | apt: 27 | name: "{{ packages }}" 28 | state: present 29 | vars: 30 | packages: 31 | - 'libxtst6' 32 | - 'libgtk2.0-0' 33 | - 'libxft2' 34 | - 'zip' 35 | - 'unzip' 36 | -------------------------------------------------------------------------------- /ansible/roles/osprereqs/tasks/Ubuntu.20.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # specific OS supports wildcards 3 | - name: remove extraneous packages 4 | become: yes 5 | ignore_errors: yes 6 | apt: 7 | name: "{{ packages }}" 8 | state: absent 9 | autoremove: yes 10 | vars: 11 | packages: 12 | - "java*" 13 | - "samba*" 14 | - "valgrind*" 15 | - "git*" 16 | - "tignervnc" 17 | 18 | - name: apt update 19 | become: yes 20 | apt: 21 | name: '*' 22 | state: latest 23 | 24 | - name: install packages 25 | become: yes 26 | apt: 27 | name: "{{ packages }}" 28 | state: present 29 | vars: 30 | packages: 31 | - 'libxtst6' 32 | - 'libgtk2.0-0' 33 | - 'libxft2' 34 | - 'zip' 35 | - 'unzip' 36 | -------------------------------------------------------------------------------- /ansible/roles/osprereqs/tasks/anylinux.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: enable /etc/ssh/sshd_config X11Forwarding yes 4 | lineinfile: 5 | path: '/etc/ssh/sshd_config' 6 | state: present 7 | regexp: 'X11Forwarding no' 8 | line: 'X11Forwarding yes' 9 | become: yes 10 | 11 | - name: enable /etc/ssh/sshd_config X11UseLocalhost no 12 | lineinfile: 13 | path: '/etc/ssh/sshd_config' 14 | state: present 15 | regexp: 'X11UseLocalhost yes' 16 | line: 'X11UseLocalhost no' 17 | become: yes 18 | 19 | - name: restart sshd 20 | command: "systemctl restart sshd" 21 | become: yes 22 | -------------------------------------------------------------------------------- /ansible/roles/osprereqs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: anylinux.yml 3 | when: 4 | - ansible_system == 'Linux' 5 | - include_tasks: "{{ ansible_distribution }}.{{ ansible_distribution_major_version }}.yml" 6 | -------------------------------------------------------------------------------- /ansible/roles/pentest_fyrevm/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | dependencies: 4 | - role: load_secrets 5 | -------------------------------------------------------------------------------- /ansible/roles/pentest_fyrevm/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: pentest_fyrevm.yml 4 | -------------------------------------------------------------------------------- /ansible/roles/pentest_fyrevm/tasks/pentest_fyrevm.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Create FyreVM with appropriate spec 4 | include_role: 5 | name: fyrevm_provision 6 | vars: 7 | fyre_platform: x 8 | fyre_cpu: 2 9 | fyre_memory: 4 10 | fyre_os: 'RedHat 9.0' 11 | fyre_site: "{{ site }}" 12 | fyre_comment: 'Pentest Host' 13 | clusterName_prefix: pentest 14 | target_group: pentest 15 | -------------------------------------------------------------------------------- /ansible/roles/podman/tasks/RedHat.8.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # specific OS supports wildcards, if preceded with @ its a group 3 | # if the db is corrupt, this can fix it: rpm --rebuilddb 4 | - name: dnf update 5 | become: yes 6 | dnf: 7 | name: '*' 8 | state: latest 9 | 10 | - name: install packages 11 | become: yes 12 | dnf: 13 | name: "{{ packages }}" 14 | state: present 15 | vars: 16 | packages: 17 | - 'podman' 18 | -------------------------------------------------------------------------------- /ansible/roles/podman/tasks/RedHat.9.yml: -------------------------------------------------------------------------------- 1 | RedHat.8.yml -------------------------------------------------------------------------------- /ansible/roles/podman/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ ansible_distribution }}.{{ ansible_distribution_major_version }}.yml" 3 | when: 4 | - ansible_system == 'Linux' 5 | -------------------------------------------------------------------------------- /ansible/roles/post_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: post-install.yml -------------------------------------------------------------------------------- /ansible/roles/provision_pool/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | LABEL: "content: build" -------------------------------------------------------------------------------- /ansible/roles/provision_pool/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | import_tasks: provision_pool.yml 3 | run_once: true 4 | -------------------------------------------------------------------------------- /ansible/roles/provision_pool/templates/aws-pool-template.j2: -------------------------------------------------------------------------------- 1 | apiVersion: hive.openshift.io/v1 2 | kind: ClusterPool 3 | metadata: 4 | name: {{ pool_name }} 5 | namespace: {{ pool_namespace }} 6 | spec: 7 | baseDomain: {{ BASE_DOMAIN }} 8 | imageSetRef: 9 | name: openshift-{{ imageset.stdout }} 10 | installConfigSecretTemplateRef: 11 | name: install-config-{{ cloud }}-{{ WORKER_COUNT }}-{{ WORKER_VM_SIZE }}-{{ WORKER_VOLUME_SIZE }}-template 12 | labels: 13 | {{ LABEL }} 14 | skipMachinePools: true 15 | pullSecretRef: 16 | name: global-pull-secret 17 | platform: 18 | aws: 19 | credentialsSecretRef: 20 | name: aws-creds 21 | region: {{ AWS_REGION }} 22 | size: {{ pool_size }} 23 | -------------------------------------------------------------------------------- /ansible/roles/provision_pool/templates/azure-pool-template.j2: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: {{ CLUSTER_NAME }} -------------------------------------------------------------------------------- /ansible/roles/provision_pool/templates/google-pool-template.j2: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: {{ CLUSTER_NAME }} -------------------------------------------------------------------------------- /ansible/roles/provision_pool/templates/ibmcloud-pool-template.j2: -------------------------------------------------------------------------------- 1 | # not supported at this time. -------------------------------------------------------------------------------- /ansible/roles/provision_pool/templates/image-set-template.j2: -------------------------------------------------------------------------------- 1 | apiVersion: hive.openshift.io/v1 2 | kind: ClusterImageSet 3 | metadata: 4 | name: openshift-{{ imageset.stdout }} 5 | spec: 6 | releaseImage: "{{ OCP_RELEASE_IMAGE }}" 7 | -------------------------------------------------------------------------------- /ansible/roles/python/tasks/default.yml: -------------------------------------------------------------------------------- 1 | --- 2 | #No common way to install python lets just check if python is installed sufficiently for ansible 3 | - name: Test Python is installed sufficiently for ansible 4 | setup: -------------------------------------------------------------------------------- /ansible/roles/python/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ item }}" 3 | with_first_found: 4 | - files: 5 | - "{{ python_install_type }}.yml" 6 | - default.yml -------------------------------------------------------------------------------- /ansible/roles/python_install_fyre/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /ansible/roles/python_install_fyre/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: python_install_fyre.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/python_install_fyre/tasks/python_install_fyre.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks python install 3 | 4 | - name: Install python on fyre inf node 5 | # using raw here allows ansible to connect without using ansible shell which requires python 6 | raw: "sudo yum update --nobest -y && sudo yum install -y python3 && sudo alternatives --set python /usr/bin/python3" 7 | register: pythoninstall 8 | 9 | - name: Viewing python install log 10 | debug: 11 | msg: "{{ pythoninstall.stdout_lines }}" 12 | -------------------------------------------------------------------------------- /ansible/roles/quickcheck/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | env_name: myenv 4 | -------------------------------------------------------------------------------- /ansible/roles/quickcheck/files/QuickCheck6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/roles/quickcheck/files/QuickCheck6.jar -------------------------------------------------------------------------------- /ansible/roles/quickcheck/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: quickcheck.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/quickcheck/tasks/quickcheck.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install QuickCheck6.jar 4 | copy: 5 | src: files/QuickCheck6.jar 6 | dest: ~/QuickCheck6.jar 7 | - name: install daytrader.txt 8 | copy: 9 | src: files/daytrader.txt 10 | dest: ~/daytrader.txt 11 | 12 | - name: QuickCheck all logs 13 | delegate_to: localhost 14 | shell: 'java -jar ~/QuickCheck6.jar ~/daytrader.txt . ' 15 | args: 16 | chdir: "logs/{{ env_name }}/" 17 | register: qcoutput 18 | 19 | - name: quickcheck output 20 | debug: 21 | msg: "{{ qcoutput.stdout_lines }}" 22 | 23 | - name: quickcheck dir location 24 | debug: 25 | msg: "{{ playbook_dir }}/logs/{{ env_name }}/QuickCheckOutput/" 26 | -------------------------------------------------------------------------------- /ansible/roles/remove_ocp_roks/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | resourceGroup: default 3 | -------------------------------------------------------------------------------- /ansible/roles/remove_ocp_roks/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: remove-cluster.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/request_ocp4_logging/defaults/main.yml: -------------------------------------------------------------------------------- 1 | logging_bastion_setup_dir: ~/setup-files/ocp-logging-setup 2 | rendered_sc: "" #Storageclass to use for ocp logging PVCs. By default it will use the designated default storageclass. 3 | log_version: 4.6 #Set to current newest logging version 4 | -------------------------------------------------------------------------------- /ansible/roles/request_ocp4_logging/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | import_tasks: request_ocp_logging.yml 3 | run_once: true 4 | -------------------------------------------------------------------------------- /ansible/roles/request_ocp4_logging/templates/logging-operator.yaml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: operators.coreos.com/v1 3 | kind: OperatorGroup 4 | metadata: 5 | name: cluster-logging 6 | namespace: openshift-logging 7 | spec: 8 | targetNamespaces: 9 | - openshift-logging 10 | --- 11 | apiVersion: operators.coreos.com/v1alpha1 12 | kind: Subscription 13 | metadata: 14 | name: "cluster-logging" 15 | namespace: "openshift-logging" 16 | spec: 17 | channel: "{{ log_version }}" 18 | source: "redhat-operators" 19 | sourceNamespace: "openshift-marketplace" 20 | name: "cluster-logging" 21 | -------------------------------------------------------------------------------- /ansible/roles/request_ocp_fyre/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | include_tasks: "request-{{ fyre_ocptype }}-fyre.yml" 3 | run_once: true -------------------------------------------------------------------------------- /ansible/roles/request_ocp_roks/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | hardware: shared 3 | kubeVersion: 4.6_openshift 4 | defaultPoolSize: 2 5 | resourceGroup: default 6 | cloudregion: us-south 7 | entitlement: cloud_pak 8 | force_deletet_storage: true -------------------------------------------------------------------------------- /ansible/roles/request_ocp_roks/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: create-cluster.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/request_ocpplus_cluster_transfer_fyre/defaults/main.yml: -------------------------------------------------------------------------------- 1 | transfer_bastion_setup_dir: ~/setup-files/transfer-setup 2 | transfer_to_email: #IBM Intranet email of person transfering fyre OCP cluster to. 3 | cluster_name: #Name of OCP cluster to transfer 4 | fyre_user: #Fyre API User name 5 | fyre_api_key: #FYre API key 6 | -------------------------------------------------------------------------------- /ansible/roles/request_ocpplus_cluster_transfer_fyre/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | import_tasks: request-ocpplus-cluster-transfer.yml 3 | run_once: true 4 | -------------------------------------------------------------------------------- /ansible/roles/request_ocs/defaults/main.yml: -------------------------------------------------------------------------------- 1 | ocs_channel: "stable-4.6" 2 | ocs_bastion_setup_dir: ~/setup-files/ocs-setup 3 | setdefault: false 4 | -------------------------------------------------------------------------------- /ansible/roles/request_ocs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | import_tasks: request_ocs.yml 3 | run_once: true 4 | -------------------------------------------------------------------------------- /ansible/roles/request_ocs_local_storage/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: ocp_login 4 | -------------------------------------------------------------------------------- /ansible/roles/request_ocs_local_storage/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: import tasks 2 | import_tasks: request_ocs_local_storage.yml 3 | run_once: true 4 | -------------------------------------------------------------------------------- /ansible/roles/request_ocs_local_storage/templates/local-volumes-discovery.yaml.j2: -------------------------------------------------------------------------------- 1 | apiVersion: local.storage.openshift.io/v1alpha1 2 | kind: LocalVolumeDiscovery 3 | metadata: 4 | name: auto-discover-devices 5 | namespace: {{ local_storage_namespace }} 6 | spec: 7 | nodeSelector: 8 | nodeSelectorTerms: 9 | - matchExpressions: 10 | - key: cluster.ocs.openshift.io/openshift-storage 11 | operator: In 12 | values: 13 | - "" -------------------------------------------------------------------------------- /ansible/roles/request_ocs_local_storage/templates/ocs-operator.yaml.j2: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: openshift-storage 5 | --- 6 | apiVersion: operators.coreos.com/v1 7 | kind: OperatorGroup 8 | metadata: 9 | namespace: openshift-storage 10 | name: openshift-storage 11 | spec: 12 | targetNamespaces: 13 | - openshift-storage 14 | --- 15 | apiVersion: operators.coreos.com/v1alpha1 16 | kind: Subscription 17 | metadata: 18 | name: "{{ ocs_type }}-operator" 19 | namespace: openshift-storage 20 | spec: 21 | channel: "{{ ocs_channel }}" 22 | installPlanApproval: Automatic 23 | name: "{{ ocs_type }}-operator" 24 | source: redhat-operators 25 | sourceNamespace: openshift-marketplace 26 | -------------------------------------------------------------------------------- /ansible/roles/setup_autowas/defaults/main.yml: -------------------------------------------------------------------------------- 1 | autowas_url: http://rtpgsa.ibm.com/projects/m/moonstone/drivers/releases/latest.tar.gz 2 | autowas_user: nest 3 | autowas_dir: /home/nest/autowas 4 | gsa_user: 'yourgsauser' 5 | gsa_pass: 'yourgsapass' 6 | -------------------------------------------------------------------------------- /ansible/roles/setup_svt_db2_container/defaults/main.yml: -------------------------------------------------------------------------------- 1 | svt_db2_image: someimage 2 | hyc_user: user 3 | hyc_token: token 4 | -------------------------------------------------------------------------------- /ansible/roles/setup_svt_db2_container/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | dependencies: 4 | - role: load_secrets 5 | -------------------------------------------------------------------------------- /ansible/roles/setup_svt_db2_container/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: setup_svt_db2_container.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/setup_svt_jmeter_container/defaults/main.yml: -------------------------------------------------------------------------------- 1 | svt_jmeter_image: someimage 2 | hyc_user: user 3 | hyc_token: token 4 | -------------------------------------------------------------------------------- /ansible/roles/setup_svt_jmeter_container/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | dependencies: 4 | - role: load_secrets 5 | -------------------------------------------------------------------------------- /ansible/roles/setup_svt_jmeter_container/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: setup_svt_jmeter_container.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/start_aws_cluster/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: import tasks 3 | import_tasks: start_aws_cluster.yml 4 | run_once: true -------------------------------------------------------------------------------- /ansible/roles/stop_aws_cluster/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: import tasks 3 | import_tasks: stop_aws_cluster.yml 4 | run_once: true 5 | -------------------------------------------------------------------------------- /ansible/roles/timezone/defaults/main.yml: -------------------------------------------------------------------------------- 1 | regionTimeZone: 'America/New_York' 2 | -------------------------------------------------------------------------------- /ansible/roles/timezone/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Set timezone to {{ regionTimeZone }}" 4 | timezone: 5 | name: "{{ regionTimeZone }}" 6 | 7 | 8 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/AIX.7.chrp.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ### https://www.ibm.com/support/pages/understanding-aix-virtual-rpm-package-rpmrte 3 | - name: updtvpkg 4 | command: 'updtvpkg' 5 | become: yes 6 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/RedHat.7.x86_64.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | dnf: 6 | name: "{{ packages }}" 7 | state: present 8 | skip_broken: yes 9 | vars: 10 | packages: 11 | - 'glibc.i686' 12 | - 'gtk2.i686' 13 | - 'libXtst.i686' 14 | - 'libXft.i686' 15 | - 'libXtst.i686' 16 | - 'libnsl.i686' 17 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/RedHat.8.x86_64.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | dnf: 6 | name: "{{ packages }}" 7 | state: present 8 | skip_broken: yes 9 | vars: 10 | packages: 11 | - 'glibc.i686' 12 | - 'gtk2.i686' 13 | - 'libXtst.i686' 14 | - 'libXft.i686' 15 | - 'libXtst.i686' 16 | - 'libnsl.i686' 17 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/RedHat.9.ppc64le.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | dnf: 6 | name: "{{ packages }}" 7 | state: present 8 | skip_broken: yes 9 | vars: 10 | packages: 11 | - 'glibc' 12 | - 'gtk2' 13 | - 'libXtst' 14 | - 'libXft' 15 | - 'libXtst' 16 | - 'libnsl' 17 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/RedHat.9.s390x.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | dnf: 6 | name: "{{ packages }}" 7 | state: present 8 | skip_broken: yes 9 | vars: 10 | packages: 11 | - 'glibc' 12 | - 'gtk2' 13 | - 'libXtst' 14 | - 'libXft' 15 | - 'libXtst' 16 | - 'libnsl' 17 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/RedHat.9.x86_64.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | dnf: 6 | name: "{{ packages }}" 7 | state: present 8 | skip_broken: yes 9 | vars: 10 | packages: 11 | - 'glibc.i686' 12 | - 'gtk2.i686' 13 | - 'libXtst.i686' 14 | - 'libXft.i686' 15 | - 'libXtst.i686' 16 | - 'libnsl.i686' 17 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/SLES.12.x86_64.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | zypper: 6 | name: "{{ packages }}" 7 | state: present 8 | vars: 9 | packages: 10 | - 'libgtk-3-0-32bit' 11 | - 'libgthread-2_0-0-32bit' 12 | - 'libXtst6-32bit' 13 | 14 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/SLES.15.x86_64.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | zypper: 6 | name: "{{ packages }}" 7 | state: present 8 | vars: 9 | packages: 10 | - 'libgtk-3-0-32bit' 11 | - 'libgthread-2_0-0-32bit' 12 | - 'libXtst6-32bit' 13 | 14 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/Ubuntu.18.x86_64.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # specific OS supports wildcards 3 | - name: remove extraneous packages 4 | become: yes 5 | ignore_errors: yes 6 | apt: 7 | name: "{{ packages }}" 8 | state: absent 9 | autoremove: yes 10 | vars: 11 | packages: 12 | - "java*" 13 | - "samba*" 14 | - "valgrind*" 15 | - "tignervnc" 16 | 17 | - name: apt update 18 | become: yes 19 | apt: 20 | name: '*' 21 | state: latest 22 | 23 | - name: install packages 24 | become: yes 25 | apt: 26 | name: "{{ packages }}" 27 | state: present 28 | vars: 29 | packages: 30 | - 'libxtst6' 31 | - 'libgtk2.0-0' 32 | - 'libxft2' 33 | - 'zip' 34 | - 'unzip' 35 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/Ubuntu.20.x86_64.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | apt: 6 | name: "{{ packages }}" 7 | state: present 8 | vars: 9 | packages: 10 | - 'libxtst6' 11 | - 'libgtk2.0-0' 12 | - 'libc6:i386' 13 | - 'libgcc1:i386' 14 | - 'libgtk2.0-0:i386' 15 | - 'libstdc++6:i386' 16 | - 'libxft2' 17 | - 'libxft2:i386' 18 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/Ubuntu.22.x86_64.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install packages 4 | become: yes 5 | apt: 6 | name: "{{ packages }}" 7 | state: present 8 | vars: 9 | packages: 10 | - 'libxtst6' 11 | - 'libgtk2.0-0' 12 | - 'libc6:i386' 13 | - 'libgcc1:i386' 14 | - 'libgtk2.0-0:i386' 15 | - 'libstdc++6:i386' 16 | - 'libxft2' 17 | - 'libxft2:i386' 18 | -------------------------------------------------------------------------------- /ansible/roles/twas855x_ospreqs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "{{ ansible_distribution }}.{{ ansible_distribution_major_version }}.{{ ansible_architecture }}.yml" 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_clear_logs/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_clear_logs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_clear_logs.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_clear_logs/tasks/twas_cell_clear_logs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: remove logs directory 4 | file: 5 | path: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/logs" 6 | state: absent 7 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_cluster_servers/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_cluster_servers/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_cluster_servers.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_cluster_servers/tasks/twas_cell_cluster_servers.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: copy twas_cell_cluster_servers.py.j2 template 4 | template: 5 | src: "twas_cell_cluster_servers.py.j2" 6 | dest: "twas_cell_cluster_servers.py" 7 | 8 | - name: run the twas_cell_cluster_servers.py jython script 9 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/wsadmin.sh -lang jython -username {{ twas_username }} -password {{ twas_password }} -f twas_cell_cluster_servers.py" 10 | register: createServerOutput 11 | failed_when: ( createServerOutput.rc not in [ 0, 255 ] ) 12 | - name: createServerOutput 13 | debug: 14 | msg: "{{ createServerOutput.stdout_lines }}" 15 | 16 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_daytrader3/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | DefaultAppName: 'DayTrader3.0.9-ee6-src' 4 | DefaultEarFile: '{{ DefaultAppName }}.ear' 5 | 6 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_daytrader3/files/DayTrader3.0.9-ee6-src.ear: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/roles/twas_cell_daytrader3/files/DayTrader3.0.9-ee6-src.ear -------------------------------------------------------------------------------- /ansible/roles/twas_cell_daytrader3/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: http_defaults 5 | - role: twas_cell_common_daytrader 6 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_daytrader3/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_daytrader3.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_daytrader7/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | DefaultAppName: 'DayTrader-ee7.0.11' 4 | DefaultEarFile: '{{ DefaultAppName }}.ear' 5 | 6 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_daytrader7/files/DayTrader-ee7.0.11.ear: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/community-automation/d4a42e2424aafd1de9127eee264e5124d711a7d3/ansible/roles/twas_cell_daytrader7/files/DayTrader-ee7.0.11.ear -------------------------------------------------------------------------------- /ansible/roles/twas_cell_daytrader7/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: http_defaults 5 | - role: twas_cell_common_daytrader 6 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_daytrader7/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_daytrader7.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_db_drop_sib_tables/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_db_drop_sib_tables.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_db_drop_sib_tables/tasks/twas_cell_db_drop_sib_tables.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: copy db2_drop_sib.py script 4 | copy: 5 | src: db2_drop_sib.py 6 | dest: "~/db2_drop_sib.py" 7 | mode: 0755 8 | 9 | - name: run the db2_drop_sib.py script 10 | shell: "~/db2_drop_sib.py {{ db_name }}" 11 | register: dropSibOutput 12 | failed_when: ( dropSibOutput.rc not in [ 0, 255 ] ) 13 | - name: dropSibOutput 14 | debug: 15 | msg: "{{ dropSibOutput.stdout_lines }}" 16 | 17 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_enable_ldap_security/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: twas_cell_wsadminlib 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_enable_ldap_security/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_enable_ldap_security.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_enable_ldap_security/tasks/twas_cell_enable_ldap_security.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: copy 99_twas_enable_ldap_security.py.j2 template 4 | template: 5 | src: "99_twas_enable_ldap_security.py.j2" 6 | dest: "99_twas_enable_ldap_security.py" 7 | 8 | - name: run the enable ldap jython script 9 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/wsadmin.sh -lang jython -username {{ twas_username }} -password {{ twas_password }} -f 99_twas_enable_ldap_security.py" 10 | register: enableLdapOutput 11 | failed_when: ( enableLdapOutput.rc not in [ 0, 255 ] ) 12 | - name: enableLdapOutput 13 | debug: 14 | msg: "{{ enableLdapOutput.stdout_lines }}" 15 | 16 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_federate/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_federate/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_federate.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_federate/tasks/twas_cell_federate.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: addnode 4 | when: profile_type == 'managed' 5 | block: 6 | - name: addNode 7 | shell: "{{ install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/addNode.sh {{ groups.dmgr[0] }} -conntype SOAP -username {{ twas_username }} -password {{ twas_password }} -profileName {{ profile_name }}" 8 | register: addNodeOutput 9 | failed_when: ( addNodeOutput.rc not in [ 0, 1, 2 ] ) 10 | - name: addNodeOutput 11 | debug: 12 | msg: "{{ addNodeOutput.stdout_lines }}" 13 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_get_logs/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | history_days: '-3' 4 | env_name: mytWASEnv 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_get_logs/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_get_logs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_get_logs.yml -------------------------------------------------------------------------------- /ansible/roles/twas_cell_ihsplugin_gen_propagate/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: http_defaults 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_ihsplugin_gen_propagate/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_ihsplugin_gen_propagate.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_kill/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_kill/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_kill.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_profiles/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_profiles/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_profiles.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_put_db2jars/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_put_db2jars/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_put_db2jars.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_put_db2jars/tasks/twas_cell_put_db2jars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: db2jars dir ${WAS_INSTALL_ROOT}/db2jars 4 | file: 5 | path: '{{ item }}' 6 | state: directory 7 | recurse: yes 8 | with_items: 9 | - "{{install_base }}/{{ twas_dir }}/db2jars" 10 | 11 | - name: copy db2jcc4.jar ${WAS_INSTALL_ROOT}/db2jars 12 | copy: 13 | src: /tmp/db2jcc4.jar 14 | dest: "{{install_base }}/{{ twas_dir }}/db2jars/db2jcc4.jar" 15 | mode: '0755' 16 | - name: copy db2jcc_license_cu.jar ${WAS_INSTALL_ROOT}/db2jars 17 | copy: 18 | src: /tmp/db2jcc_license_cu.jar 19 | dest: "{{install_base }}/{{ twas_dir }}/db2jars/db2jcc_license_cu.jar" 20 | mode: '0755' 21 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_cluster/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: twas_cell_wsadminlib 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_cluster/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_start_cluster.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_cluster/tasks/twas_cell_start_cluster.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: copy start script 4 | copy: 5 | src: "twas_cell_start_cluster.py" 6 | dest: "~/twas_cell_start_cluster.py" 7 | 8 | - name: run the twas_cell_start_cluster.py script 9 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/wsadmin.sh -lang jython -username {{ twas_username }} -password {{ twas_password }} -f twas_cell_start_cluster.py cluster {{ clusterName }}" 10 | register: startClusterOutput 11 | failed_when: ( startClusterOutput.rc not in [ 0, 255 ] ) 12 | - name: startClusterOutput 13 | debug: 14 | msg: "{{ startClusterOutput.stdout_lines }}" 15 | 16 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_dmgr/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_dmgr/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_start_dmgr.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_dmgr/tasks/twas_cell_start_dmgr.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: dmgr 4 | when: profile_type == 'dmgr' 5 | block: 6 | - name: start the dmgr 7 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/startManager.sh" 8 | register: startManagerOutput 9 | failed_when: ( startManagerOutput.rc not in [ 0, 255 ] ) 10 | - name: startManagerOutput 11 | debug: 12 | msg: "{{ startManagerOutput.stdout_lines }}" 13 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_node/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_node/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_start_node.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_start_node/tasks/twas_cell_start_node.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: start node 4 | when: profile_type == 'managed' 5 | block: 6 | - name: start the node 7 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/startNode.sh" 8 | register: startNodeOutput 9 | failed_when: ( startNodeOutput.rc not in [ 0, 255 ] ) 10 | - name: startNodeOutput 11 | debug: 12 | msg: "{{ startNodeOutput.stdout_lines }}" 13 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_status/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: http_defaults 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_status/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_status.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_cluster/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: twas_cell_wsadminlib 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_cluster/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_stop_cluster.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_cluster/tasks/twas_cell_stop_cluster.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: copy stop script 4 | copy: 5 | src: "twas_cell_stop_cluster.py" 6 | dest: "~/twas_cell_stop_cluster.py" 7 | 8 | - name: run the twas_cell_stop_cluster.py script 9 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/wsadmin.sh -lang jython -username {{ twas_username }} -password {{ twas_password }} -f twas_cell_stop_cluster.py cluster {{ clusterName }}" 10 | register: stopClusterOutput 11 | failed_when: ( stopClusterOutput.rc not in [ 0, 255 ] ) 12 | - name: stopClusterOutput 13 | debug: 14 | msg: "{{ stopClusterOutput.stdout_lines }}" 15 | 16 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_dmgr/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_dmgr/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_stop_dmgr.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_dmgr/tasks/twas_cell_stop_dmgr.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: dmgr 4 | when: profile_type == 'dmgr' 5 | block: 6 | - name: stop the dmgr 7 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/stopManager.sh -username {{ twas_username }} -password {{ twas_password }}" 8 | register: stopManagerOutput 9 | failed_when: ( stopManagerOutput.rc not in [ 0, 255 ] ) 10 | - name: stopManagerOutput 11 | debug: 12 | msg: "{{ stopManagerOutput.stdout_lines }}" 13 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_node/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_node/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_stop_node.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_stop_node/tasks/twas_cell_stop_node.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: stop node 4 | when: profile_type == 'managed' 5 | block: 6 | - name: stop the node 7 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/stopNode.sh -username {{ twas_username }} -password {{ twas_password }}" 8 | register: stopNodeOutput 9 | failed_when: ( stopNodeOutput.rc not in [ 0, 255 ] ) 10 | - name: stopNodeOutput 11 | debug: 12 | msg: "{{ stopNodeOutput.stdout_lines }}" 13 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_test_ldap/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_test_ldap.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_test_ldap/tasks/twas_cell_test_ldap.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: test ldap connectivity 4 | shell: 'timeout 2 openssl s_client -connect {{ ldapServer }}:{{ ldapPort }} -tls1_2' 5 | register: ldapOutput 6 | failed_when: ( ldapOutput.rc not in [ 1 ] ) 7 | 8 | - name: ping ldapOutput 9 | debug: 10 | msg: "{{ ldapOutput.stdout_lines }}" 11 | failed_when: '"CONNECTED" not in ldapOutput.stdout' 12 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_tuning/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | applicationServerminHeap: 128 4 | applicationServermaxHeap: 1024 5 | 6 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_tuning/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: twas_cell_wsadminlib 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_tuning/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_tuning.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_tuning/tasks/twas_cell_tuning.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: copy tuneDayTrader.py.j2 template 4 | template: 5 | src: "tuneDayTrader.py.j2" 6 | dest: "tuneDayTrader.py" 7 | 8 | - name: run the daytrader tuning jython script 9 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/wsadmin.sh -lang jython -username {{ twas_username }} -password {{ twas_password }} -f tuneDayTrader.py cluster {{ clusterName }}" 10 | register: tuningOutput 11 | failed_when: ( tuningOutput.rc not in [ 0, 255 ] ) 12 | - name: tuningOutput 13 | debug: 14 | msg: "{{ tuningOutput.stdout_lines }}" 15 | 16 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_tuning/templates/resource_vars.py.j2: -------------------------------------------------------------------------------- 1 | #----------------------------------------------------------------- 2 | # WARNING: Jython/Python is extremely sensitive to indentation 3 | # errors. Please ensure that tabs are configured appropriately 4 | # for your editor of choice. 5 | #----------------------------------------------------------------- 6 | saveMe="true" 7 | syncAll="false" 8 | genericArgs = "-Xdisableexplicitgc -Xgcpolicy:gencon -Xmn1024m -Xshareclasses:none -javaagent:/opt/instana/instana-javaagent-1.0.0.jar -Dcom.ibm.tools.attach.enable=yes" 9 | # END 10 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_unamaged_web_node/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: http_defaults 5 | - role: ibm_installation_manager_cic_selector 6 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_unamaged_web_node/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_unamaged_web_node.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_wsadminlib/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_cell_wsadminlib.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_cell_wsadminlib/tasks/twas_cell_wsadminlib.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # IBM zLinux in POK problems getting here -> https://oneticket.devit.ibm.com/tickets/138131 4 | # get file on script engine first 5 | 6 | - name: get wsadminlib.py 7 | delegate_to: localhost 8 | get_url: 9 | url: "https://raw.githubusercontent.com/wsadminlib/wsadminlib/master/bin/wsadminlib.py" 10 | dest: "~/wsadminlib.py" 11 | validate_certs: false 12 | 13 | - name: copy wsadminlib.py 14 | copy: 15 | src: "~/wsadminlib.py" 16 | dest: "~/wsadminlib.py" 17 | -------------------------------------------------------------------------------- /ansible/roles/twas_config/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: twas_cell_wsadminlib 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_config/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_config.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_config/tasks/twas_config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: copy twas config scripts to target host 4 | copy: 5 | src: "scripts/" 6 | dest: ~/ 7 | mode: '0600' 8 | 9 | - name: run the config jython script 10 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/wsadmin.sh -lang jython -username {{ twas_username }} -password {{ twas_password }} -f ~/{{ twas_script }}" 11 | register: twasConfigOutput 12 | failed_when: ( twasConfigOutput.rc not in [ 0, 255 ] ) 13 | - name: twasConfigOutput 14 | debug: 15 | msg: "{{ twasConfigOutput.stdout_lines }}" 16 | -------------------------------------------------------------------------------- /ansible/roles/twas_default_swaggerui/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | - role: twas_cell_wsadminlib 5 | -------------------------------------------------------------------------------- /ansible/roles/twas_default_swaggerui/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_default_swaggerui.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_default_swaggerui/tasks/twas_default_swaggerui.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: deploySwaggerUI.py 4 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/wsadmin.sh -lang jython -conntype NONE -username {{ twas_username }} -password {{ twas_password }} -f {{install_base }}/{{ twas_dir }}/bin/deploySwaggerUI.py install 'WebSphere:cell={{ cellName }},node={{ profile_name }},server=server1'" 5 | register: twasConfigOutput 6 | failed_when: ( twasConfigOutput.rc not in [ 0, 105, 130, 255 ] ) 7 | - name: twasConfigOutput 8 | debug: 9 | msg: "{{ twasConfigOutput.stdout_lines }}" 10 | -------------------------------------------------------------------------------- /ansible/roles/twas_enable_wim_registry/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_enable_wim_registry/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_enable_wim_registry.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_enable_wim_registry/tasks/twas_enable_wim_registry.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: copy wim_wizard.py.j2 template 4 | template: 5 | src: "wim_wizard.py.j2" 6 | dest: "wim_wizard.py" 7 | 8 | - name: run the enable wim jython script 9 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/wsadmin.sh -lang jython -conntype NONE -username {{ twas_username }} -password {{ twas_password }} -f wim_wizard.py" 10 | register: enablewimOutput 11 | failed_when: ( enablewimOutput.rc not in [ 0, 255 ] ) 12 | - name: enablewimOutput 13 | debug: 14 | msg: "{{ enablewimOutput.stdout_lines }}" 15 | 16 | -------------------------------------------------------------------------------- /ansible/roles/twas_enable_wim_registry/templates/wim_wizard.py.j2: -------------------------------------------------------------------------------- 1 | param = '[-secureApps %s -secureLocalResources %s -userRegistryType %s -adminName %s -adminPassword %s]' % ('true','false','WIMUserRegistry','{{ twas_username }}','{{ twas_password }}') 2 | AdminTask.applyWizardSettings(param) 3 | AdminConfig.save() 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_ssl_client_props/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_ssl_client_props/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_ssl_client_props.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_ssl_client_props/tasks/twas_ssl_client_props.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: update ssl.client.props SSL/TLS 4 | replace: 5 | path: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/properties/ssl.client.props" 6 | regexp: 'SSL_TLSv2' 7 | replace: '{{ sslProtocol }}' 8 | -------------------------------------------------------------------------------- /ansible/roles/twas_start_default/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_start_default/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_start_default.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_stop_default/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: twas_cell_defaults 4 | -------------------------------------------------------------------------------- /ansible/roles/twas_stop_default/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: twas_stop_default.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/twas_stop_default/tasks/twas_stop_default.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: default 4 | when: profile_type == 'default' 5 | block: 6 | - name: stop the default server 7 | ignore_errors: true 8 | shell: "{{install_base }}/{{ twas_dir }}/profiles/{{ profile_name }}/bin/stopServer.sh server1 -username {{ twas_username }} -password {{ twas_password }}" 9 | register: stopDefaultOutput 10 | failed_when: ( stopDefaultOutput.rc not in [ 0,246,255 ] ) 11 | - name: stopDefaultOutput 12 | debug: 13 | msg: "{{ stopDefaultOutput.stdout_lines }}" 14 | -------------------------------------------------------------------------------- /ansible/roles/validate_ocp_install/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: ocp_login 4 | -------------------------------------------------------------------------------- /ansible/roles/validate_ocp_install/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: validate_cluster.yml 3 | -------------------------------------------------------------------------------- /ansible/roles/validate_ocp_install/tasks/validate_cluster.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: list cluster nodes 3 | shell: "oc get nodes" 4 | environment: 5 | KUBECONFIG: "{{ kubeconfig_location }}/config" 6 | register: node_results 7 | ignore_errors: yes 8 | 9 | - name: dump all pods 10 | shell: "oc get pods --all-namespaces" 11 | environment: 12 | KUBECONFIG: "{{ kubeconfig_location }}/config" 13 | register: pod_results 14 | ignore_errors: yes 15 | 16 | - name: logout of cluster 17 | shell: "oc logout" 18 | environment: 19 | KUBECONFIG: "{{ kubeconfig_location }}/config" 20 | register: logout_results 21 | ignore_errors: yes 22 | -------------------------------------------------------------------------------- /ansible/roles/vnc/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: "vnc.{{ ansible_distribution }}.yml" 3 | -------------------------------------------------------------------------------- /ansible/roles/wait_for_cluster_ready/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /ansible/roles/wait_for_cluster_ready/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - include_tasks: wait_for_cluster_ready.yaml 3 | -------------------------------------------------------------------------------- /ansible/roles/wait_for_cluster_ready/tasks/wait_for_cluster_ready.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks csi-cephfs install 3 | - name: Check for all pods Running or Complete, wait up to 60 minutes 4 | shell: "oc --no-headers=true get pods --all-namespaces | grep -v Running | grep -v Complete > /dev/null; echo $?" 5 | environment: 6 | KUBECONFIG: "{{ kubeconfig_location }}/config" 7 | register: cluster_ready_results 8 | until: 'cluster_ready_results.stdout != "0"' 9 | retries: 60 10 | delay: 60 11 | -------------------------------------------------------------------------------- /ansible/roles/was_automation_register/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include_tasks: twas.yml 4 | when: 5 | - waProductType == 'tWAS' 6 | - include_tasks: liberty.yml 7 | when: 8 | - waProductType == 'Liberty' -------------------------------------------------------------------------------- /ansible/roles/was_automation_register/templates/wa.pem.j2: -------------------------------------------------------------------------------- 1 | {{ wa_cert }} 2 | -------------------------------------------------------------------------------- /ansible/setup-autowas-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | -------------------------------------------------------------------------------- /ansible/setup-autowas-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [all] 2 | yourTargethostFQDN 3 | 4 | [all:vars] 5 | ansible_user=your_autoWASUser 6 | autowas_dir=/home/your_autoWASUser/autowas 7 | gsa_user=yourGSAid 8 | gsa_pass=yourGSApass 9 | -------------------------------------------------------------------------------- /ansible/setup-autowas-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/setup-autowas-play/setup-autowas-play: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | roles: 5 | - role: setup_autowas 6 | 7 | -------------------------------------------------------------------------------- /ansible/setup-new-fyre-host-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | password.hash 3 | -------------------------------------------------------------------------------- /ansible/setup-new-fyre-host-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [all] 2 | rhel1.fyre.ibm.com 3 | sles1.fyre.ibm.com 4 | ub1.fyre.ibm.com 5 | 6 | [all:vars] 7 | ansible_user=root 8 | -------------------------------------------------------------------------------- /ansible/setup-new-fyre-host-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/setup-new-fyre-host-play/setup-new-fyre-host-play.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | gather_facts: true 5 | vars: 6 | - ansible_ssh_common_args: -o StrictHostKeyChecking=no 7 | - ansible_user: root 8 | roles: 9 | - role: osprereqs 10 | - role: nestuser 11 | - role: fix_fyre_hosts_file 12 | - role: timezone 13 | - role: authorized_keys 14 | -------------------------------------------------------------------------------- /ansible/vnc-play/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory 2 | -------------------------------------------------------------------------------- /ansible/vnc-play/examples/inventory: -------------------------------------------------------------------------------- 1 | [all] 2 | myhost 3 | 4 | [all:vars] 5 | ansible_user=root 6 | -------------------------------------------------------------------------------- /ansible/vnc-play/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/vnc-play/vnc-play: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | roles: 5 | - role: vnc 6 | 7 | -------------------------------------------------------------------------------- /ansible/was-automation-register/.gitignore: -------------------------------------------------------------------------------- 1 | /inventory* 2 | /*.json 3 | /*.sh 4 | -------------------------------------------------------------------------------- /ansible/was-automation-register/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /ansible/was-automation-register/was-automation-register.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - hosts: all 4 | roles: 5 | - role: was_automation_register 6 | -------------------------------------------------------------------------------- /docs/other-repositories.md: -------------------------------------------------------------------------------- 1 | # Common Repositories 2 | 3 | Terraform Automation (VMWare, AWS, Google, and Azure) 4 | [tf_openshift_4](https://github.ibm.com/ICP-DevOps/tf_openshift_4) 5 | 6 | Some useful tools (cluster recovery scripts) 7 | [tf_openshift_4_tools](https://github.ibm.com/ICP-DevOps/tf_openshift_4_tools) 8 | -------------------------------------------------------------------------------- /docs/roks.md: -------------------------------------------------------------------------------- 1 | # ROKS Automation 2 | 3 | Refer to ROKS playbook now. [ROKS playbook](../ansible/request-ocp-roks-play/README.md) 4 | 5 | ROKS info being provided until an ansible solution can be worked out in this community repo 6 | 7 | [DTEs ROKS provisioning](https://github.ibm.com/dte/roksprovisioning) - See the README.md and I included an installer script to get started. 8 | 9 | The following is used in the above repo for reference: 10 | 11 | [Terraform docker image](https://hub.docker.com/r/ibmterraform/terraform-provider-ibm-docker) 12 | 13 | [Terraform Documentation](https://cloud.ibm.com/docs/terraform?topic=terraform-getting-started) -------------------------------------------------------------------------------- /scripts/README.md: -------------------------------------------------------------------------------- 1 | # Folder to contain extra shareable scripts 2 | This scripts folder will be used for non ansible scripts that can be shared within teams or across teams 3 | 4 | ## sub folders 5 | 6 | | Team | Comments | 7 | |------| -------- | 8 | | [common/](common/README.md) | scripts that could be use across teams | 9 | | [content-devops/](content-tools/README.md) | used by content devops team | 10 | | etc.../ | add your teams folder as applicable | 11 | -------------------------------------------------------------------------------- /scripts/common/community-docker-build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # currently being maintained by Ray Ashworth 4 | 5 | # bash best practices 6 | set -o errexit 7 | set -o nounset 8 | set -o pipefail 9 | 10 | git clone ../../. community-automation 11 | docker build . -t quay.io/rayashworth/community-ansible:latest 12 | docker login quay.io -u $quay_user -p $quay_password 13 | docker push quay.io/rayashworth/community-ansible:latest 14 | -------------------------------------------------------------------------------- /scripts/common/dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu 2 | USER root 3 | RUN apt -y update && \ 4 | apt -y install python3 python3-pip git curl wget sudo jq gpg vim sshpass && \ 5 | pip3 install --upgrade pip && \ 6 | pip3 install ansible 7 | COPY community-automation/ community-automation 8 | RUN ansible-galaxy collection install -r community-automation/ansible/prereq-play/requirements.yml && \ 9 | ansible-playbook -i community-automation/ansible/prereq-play/inventory community-automation/ansible/prereq-play/prereq-play.yml && \ 10 | rm -rf community-automation 11 | ENTRYPOINT [ "/bin/bash" ] 12 | WORKDIR /community-automation/ansible 13 | 14 | -------------------------------------------------------------------------------- /scripts/content-tools/README.md: -------------------------------------------------------------------------------- 1 | # content-tools devops scripts 2 | 3 | --------------------------------------------------------------------------------