├── .devcontainer ├── Dockerfile ├── README.md └── devcontainer.json ├── .editorconfig ├── .github └── workflows │ └── release.yml ├── .gitignore ├── .prettierrc.js ├── .releng └── docker-image.changelog ├── .shellcheckrc ├── Puppetfile ├── README.md ├── _build_jdk_image.sh ├── _common.sh ├── _liferay_common.sh ├── _release_common.sh ├── _test_common.sh ├── build_all_images.sh ├── build_base_image.sh ├── build_batch_image.sh ├── build_bundle_image.sh ├── build_caddy_image.sh ├── build_dynamic_rendering_image.sh ├── build_jar_runner_image.sh ├── build_jdk11_image.sh ├── build_jdk11_jdk8_image.sh ├── build_jdk21_image.sh ├── build_jdk21_jdk11_jdk8_image.sh ├── build_job_runner_image.sh ├── build_local_image.sh ├── build_nightly.sh ├── build_node_runner_image.sh ├── build_noop_image.sh ├── build_squid_image.sh ├── build_zabbix_server_image.sh ├── build_zabbix_web_image.sh ├── bundles.yml ├── demos └── job-runner │ ├── docker-compose.yml │ └── files │ ├── job-crontab │ └── jobs │ ├── every_minute.sh │ ├── every_three_minutes.sh │ ├── every_two_minutes.sh │ └── init.sh ├── download_trial_dxp_license.sh ├── lefthook.yml ├── narwhal ├── .gitignore ├── README.md ├── clean.sh ├── compose_samples │ ├── README-all.md │ ├── copy_license.sh │ ├── env.servers │ ├── license.xml │ ├── replace_ips.sh │ ├── server-1 │ │ ├── docker-compose.yml │ │ ├── pre.sh │ │ └── web-server │ │ │ ├── Dockerfile │ │ │ └── resources │ │ │ └── usr │ │ │ └── local │ │ │ └── bin │ │ │ └── web_server_entrypoint.sh │ ├── server-2 │ │ ├── docker-compose.yml │ │ ├── liferay │ │ │ ├── Dockerfile │ │ │ └── resources │ │ │ │ ├── opt │ │ │ │ └── liferay │ │ │ │ │ └── cluster-link-tcp.xml │ │ │ │ └── usr │ │ │ │ └── local │ │ │ │ ├── bin │ │ │ │ └── install_patch_on_build.sh │ │ │ │ └── liferay │ │ │ │ └── scripts │ │ │ │ └── pre-startup │ │ │ │ └── 10_wait_for_dependencies.sh │ │ ├── pre.sh │ │ └── search │ │ │ └── Dockerfile │ ├── server-3 │ │ ├── docker-compose.yml │ │ ├── liferay │ │ │ ├── Dockerfile │ │ │ └── resources │ │ │ │ ├── opt │ │ │ │ └── liferay │ │ │ │ │ └── cluster-link-tcp.xml │ │ │ │ └── usr │ │ │ │ └── local │ │ │ │ ├── bin │ │ │ │ └── install_patch_on_build.sh │ │ │ │ └── liferay │ │ │ │ └── scripts │ │ │ │ └── pre-startup │ │ │ │ └── 10_wait_for_dependencies.sh │ │ ├── pre.sh │ │ └── search │ │ │ └── Dockerfile │ └── server-4 │ │ ├── README.md │ │ ├── backup │ │ ├── Dockerfile │ │ └── resources │ │ │ ├── mnt │ │ │ └── liferay │ │ │ │ ├── job-crontab │ │ │ │ └── jobs │ │ │ │ └── backup.sh │ │ │ └── usr │ │ │ └── local │ │ │ └── bin │ │ │ ├── backup.sh │ │ │ ├── backup_db.sh │ │ │ └── backup_document_library.sh │ │ ├── docker-compose.yml │ │ ├── pre.sh │ │ └── search │ │ └── Dockerfile ├── install_jenkins_node.sh ├── jenkins │ └── jobs │ │ └── SCS_pipeline │ │ └── config.xml ├── puppet │ ├── Puppetfile │ ├── README.md │ ├── data │ │ └── common.eyaml │ ├── environment.conf │ ├── hiera.yaml │ ├── liferay │ ├── manifests │ │ ├── nodes │ │ │ ├── bob1.bud.liferay.com.pp │ │ │ ├── bob2.bud.liferay.com.pp │ │ │ ├── bob3.bud.liferay.com.pp │ │ │ ├── bob4.bud.liferay.com.pp │ │ │ ├── bob5.dso.lfr.pp │ │ │ ├── bob6.dso.lfr.pp │ │ │ ├── bob7.bud.liferay.com.pp │ │ │ ├── bob8.bud.liferay.com.pp │ │ │ ├── db_.orca.lfr.pp │ │ │ ├── gw1.orca.lfr.pp │ │ │ ├── ig11.dso.lfr.pp │ │ │ ├── jenkins.orca.lfr.pp │ │ │ ├── jumper.orca.lfr.pp │ │ │ ├── narwhalci.dso.lfr.pp │ │ │ ├── observer.orca.lfr.pp │ │ │ ├── puppet.dso.lfr.pp │ │ │ ├── search_.orca.lfr.pp │ │ │ └── web_.orca.lfr.pp │ │ └── site.pp │ └── modules │ │ └── liferay │ │ ├── pts_autoinstall │ │ ├── files │ │ │ └── var │ │ │ │ └── www │ │ │ │ └── puppet.dso.lfr │ │ │ │ └── docs │ │ │ │ └── a.yaml │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_ci_node │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_docker │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_hosts │ │ ├── README.md │ │ ├── manifests │ │ │ ├── add.pp │ │ │ ├── file.pp │ │ │ ├── init.pp │ │ │ ├── localhost.pp │ │ │ ├── params.pp │ │ │ └── primary.pp │ │ └── spec │ │ │ ├── classes │ │ │ └── init_spec.rb │ │ │ └── spec_helper.rb │ │ ├── pts_location │ │ ├── files │ │ │ └── usr │ │ │ │ └── local │ │ │ │ └── sbin │ │ │ │ └── ifdefault.sh │ │ ├── lib │ │ │ └── facter │ │ │ │ ├── ifdefault.rb │ │ │ │ ├── ipdefault.rb │ │ │ │ └── networkdefault.rb │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_orca │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_packages │ │ └── manifests │ │ │ ├── absent.pp │ │ │ ├── init.pp │ │ │ └── latest.pp │ │ ├── pts_puppet_agent │ │ ├── files │ │ │ └── usr │ │ │ │ └── local │ │ │ │ └── sbin │ │ │ │ └── puppet-agent.sh │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_schedule │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_ssh │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_system │ │ ├── files │ │ │ ├── root │ │ │ │ └── .bash_profile │ │ │ └── usr │ │ │ │ └── local │ │ │ │ └── bin │ │ │ │ └── ssh-clean-known_hosts.sh │ │ └── manifests │ │ │ ├── init.pp │ │ │ └── root.pp │ │ ├── pts_threatstack │ │ ├── files │ │ │ └── etc │ │ │ │ └── apt │ │ │ │ └── trusted.gpg.d │ │ │ │ └── threatstack.gpg │ │ └── manifests │ │ │ └── init.pp │ │ ├── pts_timezone │ │ └── manifests │ │ │ └── init.pp │ │ └── pts_users │ │ └── manifests │ │ ├── groups.pp │ │ ├── groups │ │ └── ptsaccess.pp │ │ ├── init.pp │ │ ├── users.pp │ │ └── users │ │ ├── akos_kreutz.pp │ │ ├── brian_chan.pp │ │ ├── jenkins.pp │ │ ├── peter_mezei.pp │ │ ├── richard_benko.pp │ │ ├── root.pp │ │ ├── tamas_papp.pp │ │ ├── ubuntu.pp │ │ └── zsolt_balogh.pp ├── push_release_builder.sh ├── release_notes │ ├── .gitignore │ └── generate_release_notes.sh ├── run_2023_q3_release.sh ├── run_release_builder.sh ├── run_test_7.2.x_hotfix.sh ├── run_test_7.4.x_hotfix.sh ├── source_code_sharing │ ├── .gitignore │ ├── _common.sh │ ├── get_ignore_zip_files_7.3.10.sh │ ├── ignore_zip_files_persistent.txt │ ├── update_liferay_dxp_source_code_of_hotfixes.sh │ └── update_liferay_dxp_source_code_of_versions.sh └── templates │ ├── ci │ ├── Dockerfile │ ├── docker-compose.yaml │ ├── narwhalci.orca.liferay.com.conf │ └── resources │ │ └── usr │ │ └── local │ │ └── sbin │ │ ├── jenkins-plugin-cli │ │ └── jenkins.sh │ ├── release-builder │ ├── Dockerfile │ └── resources │ │ └── usr │ │ └── local │ │ └── signature-util │ │ ├── com │ │ └── liferay │ │ │ └── tools │ │ │ └── patching │ │ │ └── signing │ │ │ └── GenerateSignature.java │ │ └── signature-util.iml │ └── tang-server │ ├── Dockerfile │ ├── docker-compose.yaml │ └── healtcheck.sh ├── orca ├── .gitignore ├── README.md ├── configs │ ├── demo.yml │ └── single_server.yml ├── scripts │ ├── _common.sh │ ├── build_services.sh │ ├── install_orca.sh │ ├── orca.sh │ └── validate_environment.sh └── templates │ ├── _common │ └── resources │ │ └── usr │ │ └── local │ │ └── bin │ │ └── fetch_orca_secrets.sh │ ├── antivirus │ └── Dockerfile │ ├── backup │ ├── Dockerfile │ └── resources │ │ ├── mnt │ │ └── liferay │ │ │ ├── job-crontab │ │ │ └── jobs │ │ │ ├── backup.sh │ │ │ └── fetch_secrets.sh │ │ └── usr │ │ └── local │ │ └── bin │ │ ├── backup.sh │ │ ├── backup_db.sh │ │ └── backup_document_library.sh │ ├── ci │ └── Dockerfile │ ├── db │ ├── Dockerfile │ └── resources │ │ └── usr │ │ └── local │ │ └── bin │ │ ├── connect_to_mysql.sh │ │ ├── liferay_entrypoint.sh │ │ ├── make_primary.sh │ │ └── wait_for_dependencies.sh │ ├── liferay │ ├── Dockerfile │ └── resources │ │ ├── opt │ │ └── liferay │ │ │ └── cluster-link-tcp.xml │ │ └── usr │ │ └── local │ │ ├── bin │ │ └── install_patch_on_build.sh │ │ └── liferay │ │ └── scripts │ │ ├── pre-configure │ │ └── 01_fetch_orca_secrets.sh │ │ └── pre-startup │ │ └── 10_wait_for_dependencies.sh │ ├── log-proxy │ ├── Dockerfile │ ├── build.sh │ └── modules.go │ ├── log-server │ ├── Dockerfile │ └── resources │ │ └── etc │ │ └── syslog-ng │ │ └── syslog-ng.conf │ ├── monitoring-gateway │ └── Dockerfile │ ├── monitoring-proxy-db │ └── Dockerfile │ ├── monitoring-proxy │ └── Dockerfile │ ├── search │ └── Dockerfile │ ├── teleport-agent-test │ └── Dockerfile │ ├── teleport-proxy │ ├── Dockerfile │ └── resources │ │ ├── etc │ │ └── teleport │ │ │ └── teleport.yaml │ │ ├── root │ │ └── github.yaml.tpl │ │ └── usr │ │ └── local │ │ └── bin │ │ └── teleport_server_entrypoint.sh │ ├── vault │ ├── Dockerfile │ └── resources │ │ ├── opt │ │ └── liferay │ │ │ └── vault │ │ │ ├── config.hcl │ │ │ ├── policy_backup.hcl │ │ │ ├── policy_db.hcl │ │ │ ├── policy_liferay.hcl │ │ │ └── policy_shared.hcl │ │ └── usr │ │ └── local │ │ └── bin │ │ ├── _common.sh │ │ ├── auto_unseal.sh │ │ ├── init_secrets.sh │ │ └── vault_entrypoint.sh │ └── web-server │ ├── Dockerfile │ └── resources │ └── usr │ └── local │ └── bin │ └── web_server_entrypoint.sh ├── patching_tool_version.sh ├── release ├── .gitignore ├── _bom.sh ├── _ci.sh ├── _git.sh ├── _github.sh ├── _hotfix.sh ├── _jdk.sh ├── _jira.sh ├── _package.sh ├── _patcher.sh ├── _product.sh ├── _product_info_json.sh ├── _promotion.sh ├── _publishing.sh ├── _releases_json.sh ├── bin │ └── com.liferay.workspace.bundle.url.codec.jar ├── build_release.sh ├── clean.sh ├── rebuild_bom_files.sh ├── rebuild_releases_json.sh ├── release_gold.sh ├── scan_docker_images.sh ├── supported-dxp-versions.txt ├── supported-portal-versions.txt ├── templates │ ├── release.api.pom.tpl │ ├── release.bom.compile.only.pom.tpl │ ├── release.bom.pom.tpl │ ├── release.bom.third.party.pom.tpl │ └── release.distro.pom.tpl ├── test-dependencies │ ├── actual │ │ ├── build.properties │ │ ├── bundles.yml │ │ ├── dxp.html │ │ ├── liferay-portal-ee │ │ │ └── release.properties │ │ ├── portal.html │ │ └── releases.json │ └── expected │ │ ├── liferay-portal-ee │ │ └── lib │ │ │ ├── development │ │ │ ├── activation.jar │ │ │ └── mail.jar │ │ │ └── portal │ │ │ ├── axis.jar │ │ │ ├── commons-discovery.jar │ │ │ ├── commons-logging.jar │ │ │ ├── jaxrpc.jar │ │ │ ├── portal-client.jar │ │ │ ├── saaj-api.jar │ │ │ ├── saaj-impl.jar │ │ │ └── wsdl4j.jar │ │ ├── releases.json │ │ ├── test.bom.dxp.release.bom.api.pom │ │ ├── test.bom.dxp.release.bom.compile.only.pom │ │ ├── test.bom.dxp.release.bom.distro.pom │ │ ├── test.bom.dxp.release.bom.pom │ │ ├── test.bom.dxp.release.bom.third.party.pom │ │ ├── test.bom.portal.release.bom.api.pom │ │ ├── test.bom.portal.release.bom.compile.only.pom │ │ ├── test.bom.portal.release.bom.distro.pom │ │ ├── test.bom.portal.release.bom.pom │ │ ├── test.bom.portal.release.bom.third.party.pom │ │ ├── test_publishing_bundles.yml │ │ ├── test_publishing_liferay-dxp-client-7.3.10-u36.txt │ │ ├── test_publishing_liferay-dxp-dependencies-7.3.10-u36.txt │ │ ├── test_release_gold_build_2024_q3_13.properties │ │ ├── test_release_gold_build_2025_q1_1-lts.properties │ │ ├── test_release_gold_build_2025_q2_1.properties │ │ ├── test_release_gold_check_usage_output.txt │ │ └── test_scan_docker_images_without_parameters_output.txt ├── test_bom.sh ├── test_ci.sh ├── test_git.sh ├── test_jdk.sh ├── test_package.sh ├── test_product.sh ├── test_promotion.sh ├── test_publishing.sh ├── test_release_gold.sh ├── test_releases_json.sh ├── test_releases_properties.sh ├── test_scan_docker_images.sh └── track_release_blockers.sh ├── release_notes.sh ├── run_tests.sh ├── setup_lefthook.sh ├── templates ├── _byoos │ └── Dockerfile ├── _common │ └── resources │ │ └── etc │ │ └── .gitkeep ├── _jdk │ ├── Dockerfile │ └── resources │ │ ├── home │ │ └── liferay │ │ │ └── .bashrc │ │ └── usr │ │ └── local │ │ └── bin │ │ └── set_java_version.sh ├── base │ ├── Dockerfile │ └── resources │ │ └── usr │ │ └── local │ │ └── bin │ │ ├── attach_ubuntu_pro_subscription.sh │ │ └── update_ubuntu.sh ├── batch │ ├── Dockerfile │ └── resources │ │ └── usr │ │ └── local │ │ └── bin │ │ └── liferay_batch_entrypoint.sh ├── bundle │ ├── Dockerfile │ └── resources │ │ ├── opt │ │ └── liferay │ │ │ └── container_status │ │ └── usr │ │ └── local │ │ └── bin │ │ ├── _liferay_bundle_common.sh │ │ ├── _liferay_common.sh │ │ ├── configure_liferay.sh │ │ ├── generate_database_report.sh │ │ ├── generate_heap_dump.sh │ │ ├── generate_thread_dump.sh │ │ ├── inspect_document_library.sh │ │ ├── liferay_entrypoint.sh │ │ ├── monitor_liferay_lifecycle.sh │ │ ├── patch_liferay.sh │ │ ├── probe_container_lifecycle.sh │ │ ├── probe_thread_dump.sh │ │ ├── start_liferay.sh │ │ └── startup_lock.sh ├── caddy │ ├── Dockerfile │ └── resources │ │ ├── etc │ │ └── caddy │ │ │ └── Caddyfile │ │ └── usr │ │ └── local │ │ └── bin │ │ └── liferay_caddy_entrypoint.sh ├── jar-runner │ ├── Dockerfile │ └── resources │ │ └── usr │ │ └── local │ │ └── bin │ │ └── liferay_jar_runner_entrypoint.sh ├── job-runner │ ├── Dockerfile │ └── resources │ │ └── usr │ │ └── local │ │ └── bin │ │ ├── job_wrapper.sh │ │ ├── liferay_job_runner_entrypoint.sh │ │ └── register_job.sh ├── node-runner │ ├── Dockerfile │ └── resources │ │ └── usr │ │ └── local │ │ └── bin │ │ └── liferay_node_runner_entrypoint.sh ├── noop │ ├── Dockerfile │ └── resources │ │ ├── noop │ │ └── noop.asm ├── squid │ ├── Dockerfile │ └── resources │ │ ├── etc │ │ └── squid │ │ │ └── squid.conf │ │ └── usr │ │ └── local │ │ └── bin │ │ └── liferay_squid_entrypoint.sh ├── test │ └── resources │ │ └── mnt │ │ └── liferay │ │ ├── files │ │ └── tomcat │ │ │ └── webapps │ │ │ └── ROOT │ │ │ └── test_docker_image_files.jsp │ │ └── scripts │ │ ├── test_docker_image_scripts_1.sh │ │ └── test_docker_image_scripts_2.sh ├── zabbix-server │ └── Dockerfile └── zabbix-web │ └── Dockerfile ├── test_build_all_images.sh ├── test_build_bundle_image.sh ├── test_bundle_image.sh ├── test_patching_tool_version.sh ├── test_release_common.sh └── update_permissions.sh /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VARIANT="jammy" 2 | 3 | FROM mcr.microsoft.com/vscode/devcontainers/base:0-${VARIANT} 4 | 5 | RUN export DEBIAN_FRONTEND=noninteractive && \ 6 | apt-get update && \ 7 | apt-get --yes install p7zip-full time 8 | 9 | RUN wget https://github.com/mikefarah/yq/releases/download/v4.25.1/yq_linux_amd64 -O /usr/bin/yq && \ 10 | chmod +x /usr/bin/yq -------------------------------------------------------------------------------- /.devcontainer/README.md: -------------------------------------------------------------------------------- 1 | # Dev Container 2 | 3 | ## GitHub Codespaces 4 | 5 | Read https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/setting-up-your-project-for-codespaces. 6 | 7 | ## Visual Studio Code 8 | 9 | Read https://code.visualstudio.com/docs/remote/containers. -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "build": { 3 | "dockerfile": "Dockerfile" 4 | }, 5 | "extensions": [ 6 | "ms-azuretools.vscode-docker" 7 | ], 8 | "features": { 9 | "docker-in-docker": "latest", 10 | "git": "os-provided", 11 | "java": "8" 12 | }, 13 | "name": "Ubuntu", 14 | "remoteUser": "vscode" 15 | } -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | indent_size = 4 6 | indent_style = tab 7 | insert_final_newline = false 8 | trim_trailing_whitespace = true 9 | 10 | [**.{js}] 11 | indent_size = 4 12 | indent_style = tab 13 | 14 | [**.{y(a)?ml}] 15 | indent_style = space -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | release: 3 | env: 4 | LIFERAY_RELEASE_GITHUB_PAT: ${{ secrets.LIFERAY_RELEASE_GITHUB_PAT }} 5 | if: (github.repository == 'liferay-release/liferay-docker') 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Checkout code 9 | uses: actions/checkout@v2 10 | - name: Execute run_tests.sh 11 | run: ./run_tests.sh 12 | shell: bash 13 | name: Release 14 | on: 15 | pull_request: 16 | types: [opened, reopened] -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | .DS_Store 3 | .idea 4 | downloads 5 | logs-* 6 | node_modules 7 | package-lock.json 8 | package.json 9 | temp-* 10 | templates/_common/resources/etc/created-date -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | bracketSpacing: false, 3 | quoteProps: 'consistent', 4 | singleQuote: true, 5 | tabWidth: 4, 6 | trailingComma: 'es5', 7 | useTabs: true, 8 | }; 9 | -------------------------------------------------------------------------------- /.shellcheckrc: -------------------------------------------------------------------------------- 1 | # 2 | # For example, go to https://github.com/koalaman/shellcheck/wiki/SC2236 for more 3 | # information about SC2236. 4 | # 5 | 6 | disable=SC1090 7 | disable=SC1091 8 | disable=SC2001 9 | disable=SC2016 10 | disable=SC2034 11 | disable=SC2046 12 | disable=SC2089 13 | disable=SC2090 14 | disable=SC2116 15 | disable=SC2119 16 | disable=SC2143 17 | disable=SC2153 18 | disable=SC2155 19 | disable=SC2181 20 | disable=SC2199 21 | disable=SC2236 -------------------------------------------------------------------------------- /Puppetfile: -------------------------------------------------------------------------------- 1 | narwhal/puppet/Puppetfile -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Documentation 2 | 3 | The documentation for the Liferay Docker images is available on the Docker Hub pages of each project: 4 | 5 | - [liferay/dxp](https://hub.docker.com/r/liferay/dxp) 6 | - [liferay/portal](https://hub.docker.com/r/liferay/portal) 7 | 8 | # Issue tracking 9 | 10 | If you have issues with the Docker Image scripts, please report an [DOCKER ticket](https://issues.liferay.com/browse/DOCKER) in Liferay's issue management system. -------------------------------------------------------------------------------- /build_base_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/base:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/base") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Base" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay Base" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | delete_local_images "${LIFERAY_DOCKER_REPOSITORY}/base" 43 | 44 | make_temp_directory templates/base 45 | 46 | log_in_to_docker_hub 47 | 48 | build_docker_image "${1}" 49 | 50 | clean_up_temp_directory 51 | } 52 | 53 | main "${@}" -------------------------------------------------------------------------------- /build_batch_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/batch:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/batch") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Batch" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay Batch" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | delete_local_images "${LIFERAY_DOCKER_REPOSITORY}/batch" 43 | 44 | make_temp_directory templates/batch 45 | 46 | log_in_to_docker_hub 47 | 48 | build_docker_image "${1}" 49 | 50 | clean_up_temp_directory 51 | } 52 | 53 | main "${@}" -------------------------------------------------------------------------------- /build_caddy_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/caddy:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/caddy") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Caddy" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay Caddy" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | make_temp_directory templates/caddy 43 | 44 | log_in_to_docker_hub 45 | 46 | build_docker_image "${1}" 47 | 48 | clean_up_temp_directory 49 | } 50 | 51 | main "${@}" -------------------------------------------------------------------------------- /build_dynamic_rendering_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/dynamic-rendering:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/dynamic-rendering") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Dynamic Rendering" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay Dynamic Rendering" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | make_temp_directory templates/dynamic-rendering 43 | 44 | log_in_to_docker_hub 45 | 46 | build_docker_image "${1}" 47 | 48 | clean_up_temp_directory 49 | } 50 | 51 | main "${@}" -------------------------------------------------------------------------------- /build_jar_runner_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/jar-runner:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/jar-runner") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay JAR Runner" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay JAR Runner" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | delete_local_images "${LIFERAY_DOCKER_REPOSITORY}/jar-runner" 43 | 44 | make_temp_directory templates/jar-runner 45 | 46 | log_in_to_docker_hub 47 | 48 | build_docker_image "${1}" 49 | 50 | clean_up_temp_directory 51 | } 52 | 53 | main "${@}" -------------------------------------------------------------------------------- /build_jdk11_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | source ./_build_jdk_image.sh 6 | 7 | function main { 8 | _build_docker_image "${1}" "JDK11" "base" "jdk11" "11" 9 | } 10 | 11 | main "${@}" -------------------------------------------------------------------------------- /build_jdk11_jdk8_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | source ./_build_jdk_image.sh 6 | 7 | function main { 8 | _build_docker_image "${1}" "JDK11 JDK8" "jdk11" "jdk11-jdk8" "8" 9 | } 10 | 11 | main "${@}" -------------------------------------------------------------------------------- /build_jdk21_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | source ./_build_jdk_image.sh 6 | 7 | function main { 8 | _build_docker_image "${1}" "JDK21" "base" "jdk21" "21" 9 | } 10 | 11 | main "${@}" -------------------------------------------------------------------------------- /build_jdk21_jdk11_jdk8_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | source ./_build_jdk_image.sh 6 | 7 | function main { 8 | _build_docker_image "${1}" "JDK21 JDK11 JDK8" "jdk11-jdk8" "jdk21-jdk11-jdk8" "21" 9 | } 10 | 11 | main "${@}" -------------------------------------------------------------------------------- /build_job_runner_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/job-runner:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/job-runner") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Job Runner" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay Job Runner" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | delete_local_images "${LIFERAY_DOCKER_REPOSITORY}/job-runner" 43 | 44 | make_temp_directory templates/job-runner 45 | 46 | log_in_to_docker_hub 47 | 48 | build_docker_image "${1}" 49 | 50 | clean_up_temp_directory 51 | } 52 | 53 | main "${@}" -------------------------------------------------------------------------------- /build_nightly.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | while true 4 | do 5 | git pull origin master 6 | 7 | if [ $(date +%w) == 0 ] 8 | then 9 | docker system prune --all --force 10 | 11 | git clean -dfx 12 | 13 | LIFERAY_DOCKER_DEVELOPER_MODE=true LIFERAY_DOCKER_IMAGE_FILTER=7.4.13.nightly ./build_all_images.sh --push 14 | else 15 | LIFERAY_DOCKER_IMAGE_FILTER=7.4.13.nightly ./build_all_images.sh --push 16 | fi 17 | 18 | echo "" 19 | echo `date` 20 | echo "" 21 | 22 | sleep 1d 23 | done -------------------------------------------------------------------------------- /build_node_runner_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/node-runner:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/node-runner") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Node Runner" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay Node Runner" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | delete_local_images "${LIFERAY_DOCKER_REPOSITORY}/node-runner" 43 | 44 | make_temp_directory templates/node-runner 45 | 46 | log_in_to_docker_hub 47 | 48 | build_docker_image "${1}" 49 | 50 | clean_up_temp_directory 51 | } 52 | 53 | main "${@}" -------------------------------------------------------------------------------- /build_noop_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/noop:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/noop") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay NOOP" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay NOOP" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | delete_local_images "${LIFERAY_DOCKER_REPOSITORY}/noop" 43 | 44 | make_temp_directory templates/noop 45 | 46 | log_in_to_docker_hub 47 | 48 | build_docker_image "${1}" 49 | 50 | clean_up_temp_directory 51 | } 52 | 53 | main "${@}" -------------------------------------------------------------------------------- /build_squid_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/squid:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/squid") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Squid" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --builder "liferay-buildkit" \ 23 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 24 | --push \ 25 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 26 | "${TEMP_DIR}" || exit 1 27 | else 28 | remove_temp_dockerfile_target_platform 29 | 30 | docker build \ 31 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 32 | --build-arg LABEL_NAME="Liferay Squid" \ 33 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 34 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 35 | --build-arg LABEL_VERSION="${image_version}" \ 36 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 37 | "${TEMP_DIR}" || exit 1 38 | fi 39 | } 40 | 41 | function main { 42 | make_temp_directory templates/squid 43 | 44 | log_in_to_docker_hub 45 | 46 | build_docker_image "${1}" 47 | 48 | clean_up_temp_directory 49 | } 50 | 51 | main "${@}" -------------------------------------------------------------------------------- /build_zabbix_server_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/zabbix-server:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/zabbix-server") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Zabbix Server" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --build-arg LABEL_ZABBIX_VERSION="${LIFERAY_DOCKER_ZABBIX_VERSION}" \ 23 | --builder "liferay-buildkit" \ 24 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 25 | --push \ 26 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 27 | "${TEMP_DIR}" || exit 1 28 | else 29 | remove_temp_dockerfile_target_platform 30 | 31 | docker build \ 32 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 33 | --build-arg LABEL_NAME="Liferay Zabbix Server" \ 34 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 35 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 36 | --build-arg LABEL_VERSION="${image_version}" \ 37 | --build-arg LABEL_ZABBIX_VERSION="${LIFERAY_DOCKER_ZABBIX_VERSION}" \ 38 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 39 | "${TEMP_DIR}" || exit 1 40 | fi 41 | } 42 | 43 | function main { 44 | delete_local_images "${LIFERAY_DOCKER_REPOSITORY}/zabbix-server" 45 | 46 | make_temp_directory templates/zabbix-server 47 | 48 | log_in_to_docker_hub 49 | 50 | build_docker_image "${1}" 51 | 52 | clean_up_temp_directory 53 | } 54 | 55 | main "${@}" -------------------------------------------------------------------------------- /build_zabbix_web_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | 5 | function build_docker_image { 6 | local image_version=$(./release_notes.sh get-version) 7 | 8 | DOCKER_IMAGE_TAGS=() 9 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/zabbix-web:${image_version}-${TIMESTAMP}") 10 | DOCKER_IMAGE_TAGS+=("${LIFERAY_DOCKER_REPOSITORY}/zabbix-web") 11 | 12 | if [ "${1}" == "push" ] 13 | then 14 | check_docker_buildx 15 | 16 | docker buildx build \ 17 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 18 | --build-arg LABEL_NAME="Liferay Zabbix Web" \ 19 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 20 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 21 | --build-arg LABEL_VERSION="${image_version}" \ 22 | --build-arg LABEL_ZABBIX_VERSION="${LIFERAY_DOCKER_ZABBIX_VERSION}" \ 23 | --builder "liferay-buildkit" \ 24 | --platform "${LIFERAY_DOCKER_IMAGE_PLATFORMS}" \ 25 | --push \ 26 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 27 | "${TEMP_DIR}" || exit 1 28 | else 29 | remove_temp_dockerfile_target_platform 30 | 31 | docker build \ 32 | --build-arg LABEL_BUILD_DATE=$(date "${CURRENT_DATE}" "+%Y-%m-%dT%H:%M:%SZ") \ 33 | --build-arg LABEL_NAME="Liferay Zabbix Web" \ 34 | --build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \ 35 | --build-arg LABEL_VCS_URL="https://github.com/liferay/liferay-docker" \ 36 | --build-arg LABEL_VERSION="${image_version}" \ 37 | --build-arg LABEL_ZABBIX_VERSION="${LIFERAY_DOCKER_ZABBIX_VERSION}" \ 38 | $(get_docker_image_tags_args "${DOCKER_IMAGE_TAGS[@]}") \ 39 | "${TEMP_DIR}" || exit 1 40 | fi 41 | } 42 | 43 | function main { 44 | delete_local_images "${LIFERAY_DOCKER_REPOSITORY}/zabbix-web" 45 | 46 | make_temp_directory templates/zabbix-web 47 | 48 | log_in_to_docker_hub 49 | 50 | build_docker_image "${1}" 51 | 52 | clean_up_temp_directory 53 | } 54 | 55 | main "${@}" -------------------------------------------------------------------------------- /demos/job-runner/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | job-runner: 3 | environment: 4 | "EVERY_THREE_MINUTES": "*/3 * * * *" 5 | image: liferay/job-runner:latest 6 | volumes: 7 | - ./files:/mnt/liferay -------------------------------------------------------------------------------- /demos/job-runner/files/job-crontab: -------------------------------------------------------------------------------- 1 | * * * * * /usr/local/bin/register_job.sh every_minute 2 | */2 * * * * /usr/local/bin/register_job.sh every_two_minutes 3 | ${EVERY_THREE_MINUTES} /usr/local/bin/register_job.sh every_three_minutes 4 | @reboot /usr/local/bin/register_job.sh init 5 | -------------------------------------------------------------------------------- /demos/job-runner/files/jobs/every_minute.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Run ${0} and sleep for 5 seconds." 4 | 5 | sleep 5 -------------------------------------------------------------------------------- /demos/job-runner/files/jobs/every_three_minutes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Run ${0} and sleep for 5 seconds." 4 | 5 | sleep 5 -------------------------------------------------------------------------------- /demos/job-runner/files/jobs/every_two_minutes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Run ${0} and sleep for 5 seconds." 4 | 5 | sleep 5 -------------------------------------------------------------------------------- /demos/job-runner/files/jobs/init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Initialize." -------------------------------------------------------------------------------- /download_trial_dxp_license.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | local license_dir=${1} 5 | local license_start_date=${2} 6 | 7 | if [ -z "${LIFERAY_DOCKER_LICENSE_API_HEADER}" ] 8 | then 9 | echo "Set the environment variable LIFERAY_DOCKER_LICENSE_API_HEADER to generate a trial DXP license." 10 | 11 | exit 1 12 | elif [ -z "${LIFERAY_DOCKER_LICENSE_API_URL}" ] 13 | then 14 | echo "Set the environment variable LIFERAY_DOCKER_LICENSE_API_URL to generate a trial DXP license." 15 | 16 | exit 1 17 | else 18 | mkdir -p "${license_dir}/deploy" 19 | 20 | local license_file_name="trial-dxp-license-${license_start_date}.xml" 21 | 22 | curl --header "${LIFERAY_DOCKER_LICENSE_API_HEADER}" --silent "${LIFERAY_DOCKER_LICENSE_API_URL}?licenseLifetime=$((1000 *60 * 60 * 24 * 90))&startDate=${license_start_date}&owner=docker%40liferay.com" > "${license_dir}/deploy/${license_file_name}.json" 23 | 24 | sed "s/\\\n//g" "${license_dir}/deploy/${license_file_name}.json" | 25 | sed "s/\\\t//g" | 26 | sed "s/\"\"/license>/" | 28 | sed 's/\\"/\"/g' | 29 | sed 's/\\\//\//g' > "${license_dir}/deploy/${license_file_name}" 30 | 31 | rm -f "${license_dir}/deploy/${license_file_name}.json" 32 | 33 | if [ ! -e "${license_dir}/deploy/${license_file_name}" ] 34 | then 35 | echo "Trial DXP license does not exist at ${license_dir}/deploy/${license_file_name}." 36 | 37 | exit 1 38 | elif ! grep -q "docker@liferay.com" "${license_dir}/deploy/${license_file_name}" 39 | then 40 | echo "Invalid trial DXP license exists at ${license_dir}/deploy/${license_file_name}." 41 | 42 | exit 1 43 | else 44 | echo "Valid Trial DXP license exists at ${license_dir}/deploy/${license_file_name}." 45 | fi 46 | fi 47 | } 48 | 49 | main "${@}" -------------------------------------------------------------------------------- /lefthook.yml: -------------------------------------------------------------------------------- 1 | pre-commit: 2 | commands: 3 | "shellchecker": 4 | glob: "*.sh" 5 | run: docker run --rm -v "$PWD:/mnt" koalaman/shellcheck:v0.8.0 {staged_files} -------------------------------------------------------------------------------- /narwhal/.gitignore: -------------------------------------------------------------------------------- 1 | **/*.class 2 | keystore-* -------------------------------------------------------------------------------- /narwhal/README.md: -------------------------------------------------------------------------------- 1 | # Release / hotfix builder 2 | 3 | ## Parameters 4 | 5 | See templates/release-builder/Dockerfile 6 | 7 | ## Other configuration 8 | - Add a release to the test_release folder as a .7z to test against 9 | - Delete the build folder after every build to get clean results -------------------------------------------------------------------------------- /narwhal/clean.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -fr ~/.release-builder-cache/build -------------------------------------------------------------------------------- /narwhal/compose_samples/README-all.md: -------------------------------------------------------------------------------- 1 | # Preperation 2 | 3 | ## Copy license and replace IPs 4 | 5 | Add Liferay DXP license to the `license.xml` file and edit the IP addresses of the servers in the env.servers file. 6 | 7 | ``` 8 | ./copy_license.sh 9 | ./replace_ips.sh 10 | ``` 11 | 12 | ## Deploy configuration 13 | 14 | Copy the server- directory to the servers respectively. Review the docker-compose files and add the missing details (e.g. passwords). 15 | 16 | ## Prepare the server environment 17 | 18 | Adjust the OS requirements: 19 | 20 | ``` 21 | ./pre.sh 22 | ``` 23 | 24 | # Start services 25 | 26 | ``` 27 | docker compose up 28 | 29 | ``` 30 | -------------------------------------------------------------------------------- /narwhal/compose_samples/copy_license.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cp -f license.xml server-2/liferay/resources/opt/liferay/deploy/ 4 | cp -f license.xml server-3/liferay/resources/opt/liferay/deploy/ 5 | -------------------------------------------------------------------------------- /narwhal/compose_samples/env.servers: -------------------------------------------------------------------------------- 1 | # Map server names to IP addresses 2 | 3 | __SERVER_1__="10.131.124.6" 4 | __SERVER_2__="10.131.124.70" 5 | __SERVER_3__="10.131.124.149" 6 | __SERVER_4__="10.131.124.183" 7 | -------------------------------------------------------------------------------- /narwhal/compose_samples/license.xml: -------------------------------------------------------------------------------- 1 | REPLACE THIS TEXT WITH THE LIFERAY LICENSE 2 | -------------------------------------------------------------------------------- /narwhal/compose_samples/replace_ips.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # shellcheck source=env.servers 4 | source env.servers 5 | 6 | find server-* -type f -exec sed -i \ 7 | -e "s/__SERVER_1__/${__SERVER_1__}/" \ 8 | -e "s/__SERVER_2__/${__SERVER_2__}/" \ 9 | -e "s/__SERVER_3__/${__SERVER_3__}/" \ 10 | -e "s/__SERVER_4__/${__SERVER_4__}/" \ 11 | {} \; 12 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-1/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | db: 3 | container_name: db 4 | environment: 5 | - MARIADB_DATABASE=lportal 6 | - MARIADB_EXTRA_FLAGS=--character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci 7 | - MARIADB_PASSWORD= 8 | - MARIADB_ROOT_HOST=localhost 9 | - MARIADB_ROOT_PASSWORD= 10 | - MARIADB_USER=lportal 11 | hostname: db-server-1 12 | image: mariadb:11.1 13 | ports: 14 | - "3306:3306" 15 | volumes: 16 | - /opt/liferay/db/data:/var/lib/mysql 17 | web-server: 18 | build: 19 | context: web-server 20 | container_name: web-server 21 | environment: 22 | - ORCA_WEB_SERVER_BALANCE_MEMBERS=__SERVER_2__:8009,__SERVER_3__:8009 23 | hostname: web-server-server-1 24 | image: web-server 25 | ports: 26 | - "80:80" -------------------------------------------------------------------------------- /narwhal/compose_samples/server-1/pre.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir -p /opt/liferay/db/data 4 | chmod 0775 /opt/liferay/db/data 5 | chown 999:999 /opt/liferay/db/data 6 | 7 | sysctl -w vm.max_map_count=262144 8 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-1/web-server/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM liferay/base:4.1.0-20220613095221 2 | FROM liferay/base@sha256:69216037cf0ba5e1b1aeca38c2c038713ec29bf3d824e5ad7a0debc0a822dceb 3 | 4 | RUN apt-get update && \ 5 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get --yes install apache2 && \ 6 | apt-get upgrade --yes && \ 7 | apt-get clean && \ 8 | rm -fr /var/lib/apt/lists/* 9 | 10 | RUN a2enmod proxy_ajp && \ 11 | a2enmod proxy_balancer && \ 12 | a2enmod lbmethod_byrequests 13 | 14 | COPY resources/usr/local/bin /usr/local/bin/ 15 | 16 | ENTRYPOINT ["tini", "--", "/usr/local/bin/web_server_entrypoint.sh"] 17 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-1/web-server/resources/usr/local/bin/web_server_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function generate_liferay_conf { 4 | function write { 5 | echo "${1}" >> "/etc/apache2/sites-available/liferay.conf" 6 | } 7 | 8 | rm -f "/etc/apache2/sites-available/liferay.conf" 9 | 10 | write "" 11 | write " CustomLog /proc/self/fd/1 vhost_combined" 12 | write " DocumentRoot /var/www/html" 13 | write " ErrorLog /proc/self/fd/2" 14 | write " ProxyPreserveHost On" 15 | write " ProxyPass \"/\" \"balancer://cluster/\"" 16 | write " ServerAdmin webmaster@localhost" 17 | write "" 18 | write " " 19 | 20 | for balance_member in ${ORCA_WEB_SERVER_BALANCE_MEMBERS//,/ } 21 | do 22 | local ajp_address="${balance_member##*::}" 23 | local route="${balance_member%%::*}" 24 | 25 | write " BalancerMember \"ajp://${ajp_address}\" loadfactor=1 route=${route}" 26 | done 27 | 28 | write " ProxySet stickysession=JSESSIONID" 29 | write " " 30 | write "" 31 | 32 | echo "Generated /etc/apache2/sites-available/liferay.conf:" 33 | echo "" 34 | 35 | cat /etc/apache2/sites-available/liferay.conf 36 | } 37 | 38 | function main { 39 | generate_liferay_conf 40 | 41 | set_up_sites 42 | 43 | start_apache2 44 | } 45 | 46 | function set_up_sites { 47 | a2dissite "000-default.conf" 48 | a2ensite "liferay.conf" 49 | } 50 | 51 | function start_apache2 { 52 | mkdir /var/run/apache2 53 | 54 | chown www-data:www-data /var/run/apache2 55 | 56 | # shellcheck disable=SC1091 57 | source /etc/apache2/envvars 58 | 59 | /usr/sbin/apache2 -DFOREGROUND 60 | } 61 | 62 | main 63 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-2/liferay/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM liferay/dxp:2023.q3.2 2 | FROM liferay/dxp@sha256:d56c597df45b9fabf90e60b7cfe7d39b3df82e90f8c61d4e131ef2ce168858bd 3 | 4 | USER 0 5 | 6 | RUN apt-get update && \ 7 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes mariadb-client && \ 8 | apt-get upgrade --yes && \ 9 | apt-get clean 10 | 11 | COPY --chown=liferay:liferay resources/opt/liferay /opt/liferay/ 12 | COPY resources/usr/local/bin /usr/local/bin/ 13 | COPY resources/usr/local/liferay/scripts /usr/local/liferay/scripts/ 14 | 15 | HEALTHCHECK --retries=3 \ 16 | CMD /usr/local/bin/probe_thread_dump.sh -d "http://localhost" -f "/c/portal/robots" -p 8080 -t 20 17 | 18 | USER liferay 19 | 20 | RUN /usr/local/bin/install_patch_on_build.sh 21 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-2/liferay/resources/opt/liferay/cluster-link-tcp.xml: -------------------------------------------------------------------------------- 1 | 6 | 17 | 23 | 27 | 28 | 32 | 35 | 36 | 40 | 41 | 45 | 49 | 53 | 57 | 60 | 61 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-2/liferay/resources/usr/local/bin/install_patch_on_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ $(find "/opt/liferay/patching-tool/patches" -maxdepth 1 -name "liferay-*.zip" -type f | wc -l) == 1 ] 4 | then 5 | /opt/liferay/patching-tool/patching-tool.sh install 6 | 7 | rm -fr /opt/liferay/osgi/state 8 | fi -------------------------------------------------------------------------------- /narwhal/compose_samples/server-2/liferay/resources/usr/local/liferay/scripts/pre-startup/10_wait_for_dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function wait_for_mysql { 4 | local jdbc_driver_class_name="${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_DRIVER_UPPERCASEC_LASS_UPPERCASEN_AME}" 5 | 6 | if [[ "${jdbc_driver_class_name}" != *mariadb* ]] && [[ "${jdbc_driver_class_name}" != *mysql* ]] 7 | then 8 | return 9 | fi 10 | 11 | local db_host="${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_URL}" 12 | 13 | db_host="${db_host##*://}" 14 | db_host="${db_host%%/*}" 15 | db_host="${db_host%%:*}" 16 | 17 | local db_password=${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_PASSWORD} 18 | 19 | local db_username=${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_USERNAME} 20 | 21 | echo "Connecting to database server '${db_username}'@'${db_host}'." 22 | 23 | while ! (echo "select 1" | mysql -h "${db_host}" -p"${db_password}" -u "${db_username}" &>/dev/null) 24 | do 25 | echo "Waiting for database server '${db_username}'@'${db_host}'." 26 | 27 | sleep 3 28 | done 29 | 30 | echo "Database server '${db_username}'@'${db_host}' is available." 31 | } 32 | 33 | function wait_for_search { 34 | if [ ! -n "${ORCA_LIFERAY_SEARCH_ADDRESSES}" ] 35 | then 36 | echo "Do not wait for search server because the environment variable ORCA_LIFERAY_SEARCH_ADDRESSES was not set." 37 | 38 | return 39 | fi 40 | 41 | echo "Connecting to '${ORCA_LIFERAY_SEARCH_ADDRESSES}'." 42 | 43 | while true 44 | do 45 | for search_address in ${ORCA_LIFERAY_SEARCH_ADDRESSES//,/ } 46 | do 47 | if ( curl --max-time 3 --silent "${search_address}/_cat/health" | grep "green" &>/dev/null) 48 | then 49 | echo "Search server ${search_address} is available." 50 | 51 | return 52 | fi 53 | done 54 | 55 | echo "Waiting for at least one search server to become available." 56 | 57 | sleep 3 58 | done 59 | } 60 | 61 | function main { 62 | wait_for_mysql 63 | 64 | wait_for_search 65 | } 66 | 67 | main 68 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-2/pre.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir /opt/liferay/liferay/data 4 | chmod 0775 /opt/liferay/liferay/data 5 | chown 1000:1000 /opt/liferay/liferay/data 6 | 7 | sysctl -w vm.max_map_count=262144 8 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-2/search/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.elastic.co/elasticsearch/elasticsearch:7.17.14 2 | 3 | RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install \ 4 | analysis-icu \ 5 | analysis-kuromoji \ 6 | analysis-smartcn \ 7 | analysis-stempel 8 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-3/liferay/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM liferay/dxp:2023.q3.2 2 | FROM liferay/dxp@sha256:d56c597df45b9fabf90e60b7cfe7d39b3df82e90f8c61d4e131ef2ce168858bd 3 | 4 | USER 0 5 | 6 | RUN apt-get update && \ 7 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes mariadb-client && \ 8 | apt-get upgrade --yes && \ 9 | apt-get clean 10 | 11 | COPY --chown=liferay:liferay resources/opt/liferay /opt/liferay/ 12 | COPY resources/usr/local/bin /usr/local/bin/ 13 | COPY resources/usr/local/liferay/scripts /usr/local/liferay/scripts/ 14 | 15 | HEALTHCHECK --retries=3 \ 16 | CMD /usr/local/bin/probe_thread_dump.sh -d "http://localhost" -f "/c/portal/robots" -p 8080 -t 20 17 | 18 | USER liferay 19 | 20 | RUN /usr/local/bin/install_patch_on_build.sh 21 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-3/liferay/resources/opt/liferay/cluster-link-tcp.xml: -------------------------------------------------------------------------------- 1 | 6 | 17 | 23 | 27 | 28 | 32 | 35 | 36 | 40 | 41 | 45 | 49 | 53 | 57 | 60 | 61 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-3/liferay/resources/usr/local/bin/install_patch_on_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ $(find "/opt/liferay/patching-tool/patches" -maxdepth 1 -name "liferay-*.zip" -type f | wc -l) == 1 ] 4 | then 5 | /opt/liferay/patching-tool/patching-tool.sh install 6 | 7 | rm -fr /opt/liferay/osgi/state 8 | fi -------------------------------------------------------------------------------- /narwhal/compose_samples/server-3/liferay/resources/usr/local/liferay/scripts/pre-startup/10_wait_for_dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function wait_for_mysql { 4 | local jdbc_driver_class_name="${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_DRIVER_UPPERCASEC_LASS_UPPERCASEN_AME}" 5 | 6 | if [[ "${jdbc_driver_class_name}" != *mariadb* ]] && [[ "${jdbc_driver_class_name}" != *mysql* ]] 7 | then 8 | return 9 | fi 10 | 11 | local db_host="${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_URL}" 12 | 13 | db_host="${db_host##*://}" 14 | db_host="${db_host%%/*}" 15 | db_host="${db_host%%:*}" 16 | 17 | local db_password=${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_PASSWORD} 18 | 19 | local db_username=${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_USERNAME} 20 | 21 | echo "Connecting to database server '${db_username}'@'${db_host}'." 22 | 23 | while ! (echo "select 1" | mysql -h "${db_host}" -p"${db_password}" -u "${db_username}" &>/dev/null) 24 | do 25 | echo "Waiting for database server '${db_username}'@'${db_host}'." 26 | 27 | sleep 3 28 | done 29 | 30 | echo "Database server '${db_username}'@'${db_host}' is available." 31 | } 32 | 33 | function wait_for_search { 34 | if [ ! -n "${ORCA_LIFERAY_SEARCH_ADDRESSES}" ] 35 | then 36 | echo "Do not wait for search server because the environment variable ORCA_LIFERAY_SEARCH_ADDRESSES was not set." 37 | 38 | return 39 | fi 40 | 41 | echo "Connecting to '${ORCA_LIFERAY_SEARCH_ADDRESSES}'." 42 | 43 | while true 44 | do 45 | for search_address in ${ORCA_LIFERAY_SEARCH_ADDRESSES//,/ } 46 | do 47 | if ( curl --max-time 3 --silent "${search_address}/_cat/health" | grep "green" &>/dev/null) 48 | then 49 | echo "Search server ${search_address} is available." 50 | 51 | return 52 | fi 53 | done 54 | 55 | echo "Waiting for at least one search server to become available." 56 | 57 | sleep 3 58 | done 59 | } 60 | 61 | function main { 62 | wait_for_mysql 63 | 64 | wait_for_search 65 | } 66 | 67 | main 68 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-3/pre.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir -p /opt/liferay/liferay/data 4 | chmod 775 /opt/liferay/liferay/data 5 | chown 1000:1000 /opt/liferay/liferay/data 6 | 7 | sysctl -w vm.max_map_count=262144 8 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-3/search/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.elastic.co/elasticsearch/elasticsearch:7.17.14 2 | 3 | RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install \ 4 | analysis-icu \ 5 | analysis-kuromoji \ 6 | analysis-smartcn \ 7 | analysis-stempel 8 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/README.md: -------------------------------------------------------------------------------- 1 | # Backup volume 2 | 3 | Have the backup volume mounted on `/opt/liferay/backups` as the persistent storage of the backup data. 4 | 5 | # Liferay persistent data 6 | 7 | Have Liferay's shared data volume mounted on `/opt/liferay/liferay/data`, so the `backup` docker image can copy it to the backup volume. 8 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/backup/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM liferay/job-runner:5.0.53-20231110013150 2 | FROM liferay/job-runner@sha256:e6522b92f5628132a143b69eb21622552909a4da23d5d796df42d07bd63f63d2 3 | 4 | RUN apt-get update && \ 5 | apt-get --yes install mariadb-client && \ 6 | apt-get upgrade --yes && \ 7 | apt-get clean && \ 8 | rm -fr /var/lib/apt/lists/* 9 | 10 | COPY resources/mnt/liferay /mnt/liferay/ 11 | COPY resources/usr/local/bin /usr/local/bin/ 12 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/backup/resources/mnt/liferay/job-crontab: -------------------------------------------------------------------------------- 1 | ${ORCA_BACKUP_CRON_EXPRESSION} /usr/local/bin/register_job.sh backup 2 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/backup/resources/mnt/liferay/jobs/backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /usr/local/bin/backup.sh 4 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/backup/resources/usr/local/bin/backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function date { 4 | export TZ=UTC 5 | 6 | if [ -z ${1+x} ] || [ -z ${2+x} ] 7 | then 8 | if [ "$(uname)" == "Darwin" ] 9 | then 10 | /bin/date 11 | elif [ -e /bin/date ] 12 | then 13 | /bin/date --iso-8601=seconds 14 | else 15 | /usr/bin/date --iso-8601=seconds 16 | fi 17 | else 18 | if [ "$(uname)" == "Darwin" ] 19 | then 20 | /bin/date -jf "%a %b %e %H:%M:%S %Z %Y" "${1}" "${2}" 21 | elif [ -e /bin/date ] 22 | then 23 | /bin/date -d "${1}" "${2}" 24 | else 25 | /usr/bin/date -d "${1}" "${2}" 26 | fi 27 | fi 28 | } 29 | 30 | function main { 31 | local current_date=$(date) 32 | 33 | local backup_dir=$(date "${current_date}" "+%Y-%m/%Y-%m-%d/%Y-%m-%d-%H%M%S") 34 | 35 | backup_dir=/opt/liferay/backups/${backup_dir} 36 | 37 | mkdir -p ${backup_dir} 38 | 39 | echo "Starting backup at ${backup_dir}." 40 | 41 | local timestamp=$(date "${current_date}" "+%Y%m%d%H%M%S") 42 | 43 | backup_db.sh ${backup_dir} ${timestamp} & 44 | 45 | backup_document_library.sh ${backup_dir} ${timestamp} & 46 | 47 | wait 48 | 49 | echo "Exited backup at ${backup_dir}." 50 | 51 | ls -hl ${backup_dir} 52 | } 53 | 54 | main "${@}" 55 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/backup/resources/usr/local/bin/backup_db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | set -o pipefail 5 | 6 | function check_usage { 7 | if [ ! -n ${ORCA_DB_ADDRESSES} ] 8 | then 9 | echo "Set the environment variable ORCA_DB_ADDRESSES to a comma separated list of database servers (e.g. db-1:3306,db-2:3306)." 10 | 11 | exit 1 12 | fi 13 | } 14 | 15 | function main { 16 | check_usage 17 | 18 | echo "Starting database backup." 19 | 20 | local db_address 21 | 22 | for db_address in ${ORCA_DB_ADDRESSES//,/ } 23 | do 24 | echo "Dumping '${db_address}'." 25 | 26 | local db_host=${db_address%%:*} 27 | 28 | if (mysqldump -h ${db_host} -p${ORCA_DB_PASSWORD} -u root lportal | gzip > ${1}/db-lportal-${2}.sql.gz) 29 | then 30 | local success=1 31 | 32 | break 33 | fi 34 | done 35 | 36 | if [ -n "${success}" ] 37 | then 38 | echo "Database backup was completed successfully." 39 | else 40 | echo "Database backup failed, please check the logs." 41 | fi 42 | } 43 | 44 | main "${@}" 45 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/backup/resources/usr/local/bin/backup_document_library.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | function main { 6 | echo "Starting document library backup." 7 | 8 | cd /opt/liferay/data 9 | 10 | tar cz document-library > ${1}/document-library-${2}.tar.gz 11 | 12 | echo "Document library backup was completed successfully." 13 | } 14 | 15 | main "${@}" 16 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | antivirus: 3 | container_name: antivirus 4 | hostname: antivirus-server-4 5 | # clamav:1.2 6 | image: clamav/clamav@sha256:5968dbccd6c38ec86f2c7503070207d9a11bbdd7bee4c05ecf649fef2d3987c9 7 | ports: 8 | - "3310:3310" 9 | 10 | backup: 11 | build: 12 | context: backup 13 | container_name: backup 14 | environment: 15 | - ORCA_BACKUP_CRON_EXPRESSION=0 */4 * * * 16 | - ORCA_DB_ADDRESSES=__SERVER_1__:3306 17 | - ORCA_DB_PASSWORD= 18 | hostname: backup-server-4 19 | image: backup 20 | volumes: 21 | - /opt/liferay/backups:/opt/liferay/backups 22 | - /opt/liferay/liferay/data:/opt/liferay/data 23 | 24 | search: 25 | build: 26 | context: search 27 | container_name: search 28 | environment: 29 | - cluster.initial_master_nodes=search-server-2,search-server-3,search-server-4 30 | - cluster.name=liferay-search 31 | - discovery.seed_hosts=__SERVER_2__:9300,__SERVER_3__:9300 32 | - network.publish_host=__SERVER_4__ 33 | - node.name=search-server-4 34 | - xpack.ml.enabled=false 35 | - xpack.monitoring.enabled=false 36 | - xpack.security.enabled=false 37 | - xpack.sql.enabled=false 38 | - xpack.watcher.enabled=false 39 | healthcheck: 40 | interval: 40s 41 | retries: 3 42 | test: curl localhost:9200/_cat/health | grep green 43 | timeout: 5s 44 | hostname: search-server-4 45 | image: search 46 | mem_limit: 8G 47 | ports: 48 | - "9200:9200" 49 | - "9300:9300" 50 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/pre.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sysctl -w vm.max_map_count=262144 4 | -------------------------------------------------------------------------------- /narwhal/compose_samples/server-4/search/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.elastic.co/elasticsearch/elasticsearch:7.17.14 2 | 3 | RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install \ 4 | analysis-icu \ 5 | analysis-kuromoji \ 6 | analysis-smartcn \ 7 | analysis-stempel 8 | -------------------------------------------------------------------------------- /narwhal/install_jenkins_node.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir -m 0755 /etc/apt/keyrings 4 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg 5 | 6 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list 7 | 8 | apt-get update 9 | apt-get --yes install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin jq openjdk-11-jdk p7zip-full 10 | snap install yq 11 | 12 | adduser jenkins 13 | usermod -a -G docker jenkins 14 | mkdir -p /home/jenkins/.ssh 15 | echo "${JENKINS_SSH_PUB_KEY}" > /home/jenkins/.ssh/authorized_keys 16 | chown -R jenkins:jenkins /home/jenkins/.ssh -------------------------------------------------------------------------------- /narwhal/puppet/Puppetfile: -------------------------------------------------------------------------------- 1 | moduledir 'narwhal/puppet/modules/r10k' 2 | 3 | mod 'accounts', 4 | :git => 'https://github.com/puppetlabs/puppetlabs-accounts.git', 5 | :ref => 'main' 6 | 7 | mod 'apt', 8 | :git => 'https://github.com/puppetlabs/puppetlabs-apt.git', 9 | :ref => 'main' 10 | 11 | mod 'concat', 12 | :git => 'https://github.com/puppetlabs/puppetlabs-concat.git', 13 | :ref => 'main' 14 | 15 | mod 'debconf', 16 | :git => 'https://github.com/smoeding/puppet-debconf.git' 17 | 18 | mod 'docker', 19 | :git => 'https://github.com/puppetlabs/puppetlabs-docker.git', 20 | :ref => 'main' 21 | 22 | mod 'inifle', 23 | :git => 'https://github.com/puppetlabs/puppetlabs-inifile.git', 24 | :ref => 'main' 25 | 26 | mod 'locales', 27 | :git => 'https://github.com/saz/puppet-locales.git' 28 | 29 | mod 'snap', 30 | :git => 'https://github.com/root-expert/puppet-snap.git' 31 | 32 | mod 'ssh', 33 | :git => 'https://github.com/saz/puppet-ssh.git' 34 | 35 | mod 'stdlib', 36 | :git => 'https://github.com/puppetlabs/puppetlabs-stdlib', 37 | :ref => 'main' 38 | 39 | mod 'sudo', 40 | :git => 'https://github.com/saz/puppet-sudo.git' 41 | 42 | mod 'systemd', 43 | :git => 'https://github.com/voxpupuli/puppet-systemd.git', 44 | 45 | mod 'timezone', 46 | :git => 'https://github.com/saz/puppet-timezone.git' 47 | -------------------------------------------------------------------------------- /narwhal/puppet/data/common.eyaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/narwhal/puppet/data/common.eyaml -------------------------------------------------------------------------------- /narwhal/puppet/environment.conf: -------------------------------------------------------------------------------- 1 | modulepath = ./modules/liferay:./modules/r10k 2 | -------------------------------------------------------------------------------- /narwhal/puppet/hiera.yaml: -------------------------------------------------------------------------------- 1 | version: 5 2 | defaults: 3 | datadir: data 4 | hierarchy: 5 | - name: "Secret data: per-node, per-datacenter, common" 6 | # eyaml backend 7 | lookup_key: eyaml_lookup_key 8 | paths: 9 | - "secrets/nodes/%{trusted.certname}.eyaml" # Include explicit file extension 10 | - "secrets/location/%{facts.whereami}.eyaml" 11 | - "common.eyaml" 12 | options: 13 | pkcs7_private_key: /etc/puppetlabs/puppet/keys/private_key.pkcs7.pem 14 | pkcs7_public_key: /etc/puppetlabs/puppet/keys/public_key.pkcs7.pem 15 | -------------------------------------------------------------------------------- /narwhal/puppet/liferay: -------------------------------------------------------------------------------- 1 | modules/liferay -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/bob1.bud.liferay.com.pp: -------------------------------------------------------------------------------- 1 | node 'bob1.bud.liferay.com' { 2 | include pts_ci_node 3 | include pts_system 4 | include pts_users::users::akos_kreutz 5 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/bob2.bud.liferay.com.pp: -------------------------------------------------------------------------------- 1 | node 'bob2.bud.liferay.com' { 2 | include pts_ci_node 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/bob3.bud.liferay.com.pp: -------------------------------------------------------------------------------- 1 | node 'bob3.bud.liferay.com' { 2 | include pts_ci_node 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/bob4.bud.liferay.com.pp: -------------------------------------------------------------------------------- 1 | node 'bob4.bud.liferay.com' { 2 | include pts_ci_node 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/bob5.dso.lfr.pp: -------------------------------------------------------------------------------- 1 | node 'bob5.dso.lfr' { 2 | include pts_ci_node 3 | include pts_system 4 | include pts_threatstack 5 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/bob6.dso.lfr.pp: -------------------------------------------------------------------------------- 1 | node 'bob6.dso.lfr' { 2 | include pts_ci_node 3 | include pts_system 4 | include pts_threatstack 5 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/bob7.bud.liferay.com.pp: -------------------------------------------------------------------------------- 1 | node 'bob7.bud.liferay.com' { 2 | include pts_system 3 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/bob8.bud.liferay.com.pp: -------------------------------------------------------------------------------- 1 | node 'bob8.bud.liferay.com' { 2 | include pts_system 3 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/db_.orca.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^db.*orca.lfr$/ { 2 | include pts_orca 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/gw1.orca.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^gw1.orca.lfr$/ { 2 | include pts_system 3 | 4 | pts_hosts::add { 5 | '10.111.111.111': fqdn => 'db1.orca.lfr' 6 | } 7 | 8 | pts_hosts::add { 9 | '10.111.111.112': fqdn => 'db2.orca.lfr' 10 | } 11 | 12 | pts_hosts::add { 13 | '10.111.111.11': fqdn => 'jenkins.orca.lfr' 14 | } 15 | 16 | pts_hosts::add { 17 | '10.111.111.10': fqdn => 'jumper.orca.lfr' 18 | } 19 | 20 | pts_hosts::add { 21 | '10.111.111.12': fqdn => 'observer.orca.lfr' 22 | } 23 | 24 | pts_hosts::add { 25 | '10.111.111.121': fqdn => 'search1.orca.lfr' 26 | } 27 | 28 | pts_hosts::add { 29 | '10.111.111.122': fqdn => 'search2.orca.lfr' 30 | } 31 | 32 | pts_hosts::add { 33 | '10.111.111.123': fqdn => 'search3.orca.lfr' 34 | } 35 | 36 | pts_hosts::add { 37 | '10.111.111.101': fqdn => 'web1.orca.lfr' 38 | } 39 | 40 | pts_hosts::add { 41 | '10.111.111.102': fqdn => 'web2.orca.lfr' 42 | } 43 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/ig11.dso.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^ig11.dso.lfr$/ { 2 | include pts_system 3 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/jenkins.orca.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^jenkins.orca.lfr$/ { 2 | include pts_orca 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/jumper.orca.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^jumper.orca.lfr$/ { 2 | include pts_orca 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/narwhalci.dso.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^narwhalci.dso.lfr$/ { 2 | include pts_docker 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/observer.orca.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^observer.orca.lfr$/ { 2 | include pts_orca 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/puppet.dso.lfr.pp: -------------------------------------------------------------------------------- 1 | node 'puppet.dso.lfr' { 2 | include pts_autoinstall 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/search_.orca.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^search.*orca.lfr$/ { 2 | include pts_orca 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/nodes/web_.orca.lfr.pp: -------------------------------------------------------------------------------- 1 | node /^web.*orca.lfr$/ { 2 | include pts_orca 3 | include pts_system 4 | } -------------------------------------------------------------------------------- /narwhal/puppet/manifests/site.pp: -------------------------------------------------------------------------------- 1 | File { 2 | backup => false 3 | } 4 | 5 | node default { 6 | } -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_autoinstall/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_autoinstall { 2 | file { 3 | '/var/www/puppet.dso.lfr/docs/a.yaml': 4 | group => root, 5 | mode => '0644', 6 | owner => root, 7 | source => "puppet:///modules/${module_name}/var/www/puppet.dso.lfr/docs/a.yaml", 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_ci_node/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_ci_node { 2 | Class['docker'] -> Class['pts_users::users::jenkins'] 3 | include docker 4 | include pts_threatstack 5 | include pts_users::users::jenkins 6 | include snap 7 | 8 | file { 9 | '/data': 10 | ensure => directory, 11 | group => root, 12 | mode => '0755', 13 | owner => root, 14 | } 15 | 16 | file { 17 | '/data/jenkins': 18 | ensure => directory, 19 | group => root, 20 | mode => '0755', 21 | owner => root, 22 | require => File['/data'], 23 | } 24 | 25 | file { 26 | '/data/jenkins/narwhal': 27 | ensure => directory, 28 | group => jenkins, 29 | mode => '0755', 30 | owner => jenkins, 31 | require => File['/data/jenkins'], 32 | } 33 | 34 | package { 35 | [ 36 | 'jq', 37 | 'openjdk-11-jdk', 38 | 'p7zip-full', 39 | ]: 40 | ensure => latest 41 | } 42 | 43 | package { 44 | 'yq': 45 | ensure => installed, 46 | provider => 'snap', 47 | } 48 | } -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_docker/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_docker { 2 | class { 3 | 'docker': 4 | use_upstream_package_source => true 5 | } 6 | 7 | package { 8 | 'docker-compose': 9 | ensure => latest, 10 | require => Package['docker'], 11 | } 12 | } -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_hosts/manifests/add.pp: -------------------------------------------------------------------------------- 1 | define pts_hosts::add($aliases = undef, $fqdn, $ipaddr = $name) { 2 | unless ($ipaddr =~ String) { 3 | fail("Error: IP Address ${ipaddr} does not look like an IP Address") 4 | } 5 | 6 | unless ($fqdn =~ String) { 7 | fail('Error: fqdn must be a string') 8 | } 9 | 10 | if ($aliases =~ Array or $aliases =~ String) { 11 | $host_aliases = $aliases 12 | } 13 | elsif ($aliases == undef) { 14 | $host_aliases = undef 15 | } 16 | else { 17 | fail('Error: aliases should be a string or an array.') 18 | 19 | } 20 | 21 | host { 22 | $ipaddr: 23 | ensure => 'present', 24 | name => $fqdn, 25 | host_aliases => $aliases, 26 | ip => $ipaddr, 27 | target => $pts_hosts::hostsfile, 28 | } 29 | } -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_hosts/manifests/file.pp: -------------------------------------------------------------------------------- 1 | class pts_hosts::file { 2 | file { 3 | $pts_hosts::hostsfile: 4 | group => $pts_hosts::group, 5 | mode => $pts_hosts::mode, 6 | owner => $pts_hosts::owner, 7 | } 8 | 9 | if ($pts_hosts::purge == true) { 10 | 11 | resources { 12 | 'host': 13 | purge => true 14 | } 15 | 16 | } 17 | 18 | } -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_hosts/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_hosts ($group = $pts_hosts::params::group, 2 | $hostsfile = $pts_hosts::params::hostsfile, 3 | $localhost = $pts_hosts::params::localhost, 4 | $mode = $pts_hosts::params::mode, 5 | $owner = $pts_hosts::params::owner, $primary = $pts_hosts::params::primary, 6 | $purge = $pts_hosts::params::purge 7 | ) inherits pts_hosts::params { 8 | 9 | anchor { 10 | 'pts_hosts::begin': 11 | } -> class { 12 | 'pts_hosts::file': 13 | } -> class { 14 | 'pts_hosts::localhost': 15 | } -> class { 16 | 'pts_hosts::primary': 17 | } -> anchor { 18 | 'pts_hosts::end': 19 | } 20 | 21 | } -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_hosts/manifests/localhost.pp: -------------------------------------------------------------------------------- 1 | class pts_hosts::localhost { 2 | 3 | unless ($pts_hosts::localhost == false) { 4 | 5 | pts_hosts::add { 6 | '127.0.0.1': 7 | aliases => 'localhost', 8 | fqdn => 'localhost.localdomain', 9 | } 10 | 11 | pts_hosts::add { 12 | '::1': 13 | aliases => [ 14 | 'ip6-localhost', 15 | 'ip6-loopback', 16 | 'localhost6', 17 | ], 18 | fqdn => 'localhost6.localdomain6', 19 | } 20 | 21 | pts_hosts::add { 22 | 'fe00::0': 23 | fqdn => 'ip6-localnet' 24 | } 25 | 26 | pts_hosts::add { 27 | 'ff00::0': 28 | fqdn => 'ip6-mcastprefix' 29 | } 30 | 31 | pts_hosts::add { 32 | 'ff02::1': 33 | fqdn => 'ip6-allnodes' 34 | } 35 | 36 | pts_hosts::add { 37 | 'ff02::2' : 38 | fqdn => 'ip6-allrouters' 39 | } 40 | 41 | } 42 | 43 | } -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_hosts/manifests/params.pp: -------------------------------------------------------------------------------- 1 | class pts_hosts::params { 2 | 3 | $group = 'root' 4 | $hostsfile = '/etc/hosts' 5 | $localhost = true 6 | $mode = '0644' 7 | $owner = 'root' 8 | $primary = true 9 | $purge = false 10 | 11 | } 12 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_hosts/manifests/primary.pp: -------------------------------------------------------------------------------- 1 | class pts_hosts::primary { 2 | 3 | unless ( $pts_hosts::primary == false ) { 4 | 5 | pts_hosts::add { 6 | $::ipdefault: 7 | aliases => $::hostname, 8 | fqdn => $::fqdn, 9 | } 10 | 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_hosts/spec/classes/init_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | describe 'hosts' do 3 | 4 | context 'with defaults for all parameters' do 5 | it { should contain_class('hosts') } 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_hosts/spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | require 'puppetlabs_spec_helper/module_spec_helper' 2 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_location/files/usr/local/sbin/ifdefault.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | LANG="C" 4 | OS=$(uname -s) 5 | 6 | if [ "$OS" == "Linux" ] 7 | then 8 | DEFAULT_IF=$(netstat -rn | awk '$1=="0.0.0.0" { print $8 }') 9 | else 10 | echo "Unsupported OS" 11 | exit 255 12 | fi 13 | 14 | echo "${DEFAULT_IF}" 15 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_location/lib/facter/ifdefault.rb: -------------------------------------------------------------------------------- 1 | require 'facter' 2 | Facter.add(:ifdefault) do 3 | setcode do 4 | Facter::Util::Resolution::exec("/bin/ip route | /usr/bin/awk '/^default/ {print $5}'") 5 | end 6 | end 7 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_location/lib/facter/ipdefault.rb: -------------------------------------------------------------------------------- 1 | require 'facter' 2 | 3 | Facter.add(:ipdefault) do 4 | setcode do 5 | # Load all default facts 6 | Facter.loadfacts() 7 | 8 | # Retrieve our custom fact, ifdefault 9 | ifdefault=Facter.value(:ifdefault) 10 | 11 | # Now compose the name of the fact containing the ip address of the 12 | # interface returned by ifdefault, and return it 13 | Facter["ipaddress_#{ifdefault}"].value() 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_location/lib/facter/networkdefault.rb: -------------------------------------------------------------------------------- 1 | require 'facter' 2 | 3 | Facter.add(:networkdefault) do 4 | setcode do 5 | # Load all default facts 6 | Facter.loadfacts() 7 | 8 | # Retrieve our custom fact, ifdefault 9 | ifdefault=Facter.value(:ifdefault) 10 | 11 | # Now compose the name of the fact containing the ip address of the 12 | # interface returned by ifdefault, and return it 13 | Facter["network_#{ifdefault}"].value() 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_location/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_location { 2 | 3 | file { 4 | '/usr/local/sbin/ifdefault.sh': 5 | group => 'root', 6 | mode => '0755', 7 | owner => 'root', 8 | source => "puppet:///modules/${module_name}/usr/local/sbin/ifdefault.sh", 9 | } 10 | 11 | case $::networkdefault { 12 | '10.111.111.0': { 13 | $location = 'lfr-bpo-intra' 14 | $prompt_host_color = 'blue' 15 | $puppet_server_alias = 'pts-bpo.bud.liferay.com' 16 | $puppet_server_hostname = 'puppet.dso.lfr' 17 | $puppet_server_ip = '192.168.238.3' 18 | $puppet_server_alias = 'pts-bpo.bud.liferay.com' 19 | $timezone = 'Europe/Budapest' 20 | } 21 | 22 | '192.168.232.0','192.168.238.0': { 23 | $location = 'bpo-ci' 24 | $prompt_host_color = 'cyan' 25 | $puppet_server_alias = 'pts-bpo.bud.liferay.com' 26 | $puppet_server_hostname = 'puppet.dso.lfr' 27 | $puppet_server_ip = '192.168.238.3' 28 | $timezone = 'Europe/Budapest' 29 | } 30 | 31 | default: { 32 | fail("\n\nCannot identify location, unknown default network: ${::networkdefault}!\n\n") 33 | } 34 | } 35 | 36 | notify { 37 | "Location: ${location}": 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_orca/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_orca { 2 | 3 | include pts_docker 4 | include snap 5 | 6 | package { 7 | 'pwgen': 8 | ensure => latest 9 | } 10 | 11 | package { 12 | 'yq': 13 | ensure => installed, 14 | provider => 'snap', 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_packages/manifests/absent.pp: -------------------------------------------------------------------------------- 1 | class pts_packages::absent { 2 | 3 | package { 4 | [ 5 | 'apt-xapian-index', 6 | 'aptitude', 7 | 'aptitude-common', 8 | 'cloud-init', 9 | 'ed', 10 | 'fwupd', 11 | 'landscape-client', 12 | 'laptop-detect', 13 | 'libmm-glib0', 14 | 'libpam-cracklib', 15 | 'modemmanager', 16 | 'nscd', 17 | 'open-vm-tools', 18 | 'packagekit', 19 | 'pollinate', 20 | 'popularity-contest', 21 | 'ppp', 22 | 'pppconfig', 23 | 'pppoeconf', 24 | 'python-debian', 25 | 'ubuntu-advantage-tools', 26 | 'ufw', 27 | 'unattended-upgrades', 28 | 'wireless-tools', 29 | 'wpasupplicant', 30 | ]: 31 | ensure => purged, 32 | schedule => daily2 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_packages/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_packages { 2 | 3 | include pts_packages::absent 4 | include pts_packages::latest 5 | 6 | $minute = fqdn_rand(59) 7 | 8 | file { 9 | '/etc/cron.d/backup-deb-packages-export': 10 | content => "${minute} 22 * * * root /usr/bin/dpkg -l | grep '^ii' | awk \'{ print \$2 }\' | sort > /etc/deb_packages.list 2>&1\n", 11 | group => 'root', 12 | mode => '0644', 13 | owner => 'root', 14 | } 15 | 16 | } 17 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_packages/manifests/latest.pp: -------------------------------------------------------------------------------- 1 | class pts_packages::latest { 2 | 3 | package { 4 | [ 5 | 'acpid', 6 | 'augeas-lenses', 7 | 'base-files', 8 | 'bash', 9 | 'bash-completion', 10 | 'bc', 11 | 'binutils', 12 | 'bsdmainutils', 13 | 'bsdutils', 14 | 'bzip2', 15 | 'ca-certificates', 16 | 'colordiff', 17 | 'colortail', 18 | 'cpio', 19 | 'curl', 20 | 'dbus', 21 | 'debconf', 22 | 'dialog', 23 | 'distro-info-data', 24 | 'dpkg', 25 | 'dstat', 26 | 'ethtool', 27 | 'file', 28 | 'gawk', 29 | 'git', 30 | 'haveged', 31 | 'host', 32 | 'htop', 33 | 'iotop', 34 | 'iptables', 35 | 'iputils-ping', 36 | 'less', 37 | 'lsb-release', 38 | 'mc', 39 | 'mtr-tiny', 40 | 'net-tools', 41 | 'netplan.io', 42 | 'ngrep', 43 | 'openssl', 44 | 'passwd', 45 | 'procps', 46 | 'rsync', 47 | 'rsyslog', 48 | 'strace', 49 | 'sysstat', 50 | 'systemd', 51 | 'systemd-sysv', 52 | 'tar', 53 | 'telnet', 54 | 'tmux', 55 | 'ubuntu-keyring', 56 | 'udev', 57 | 'util-linux', 58 | 'virt-what', 59 | ]: 60 | ensure => latest, 61 | schedule => daily, 62 | } 63 | 64 | if $facts['virtual'] != 'lxc' { 65 | 66 | package { [ 67 | 'kpartx', 68 | 'linux-firmware', 69 | 'linux-image-generic-hwe-22.04', 70 | 'linux-tools-common', 71 | 'linux-tools-generic', 72 | ]: 73 | ensure => latest, 74 | schedule => daily, 75 | } 76 | 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_puppet_agent/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_puppet_agent { 2 | 3 | package { 4 | 'puppet-agent': 5 | ensure => latest 6 | } 7 | 8 | ini_setting { 9 | '/etc/puppetlabs/puppet/puppet.conf - server': 10 | ensure => present, 11 | key_val_separator => ' = ', 12 | path => '/etc/puppetlabs/puppet/puppet.conf', 13 | require => Package['puppet-agent'], 14 | section => 'main', 15 | setting => 'server', 16 | value => $pts_location::puppet_server_hostname, 17 | } 18 | 19 | ini_setting { 20 | '/etc/puppetlabs/puppet/puppet.conf - number_of_facts_soft_limit': 21 | ensure => present, 22 | key_val_separator => ' = ', 23 | path => '/etc/puppetlabs/puppet/puppet.conf', 24 | require => Package['puppet-agent'], 25 | section => 'main', 26 | setting => 'number_of_facts_soft_limit', 27 | value => '4096', 28 | } 29 | 30 | file { 31 | '/usr/local/sbin/puppet-agent.sh': 32 | group => root, 33 | mode => '0755', 34 | owner => root, 35 | source => "puppet:///modules/${module_name}/usr/local/sbin/puppet-agent.sh", 36 | } 37 | 38 | $minute = fqdn_rand(59) 39 | 40 | file { 41 | '/etc/cron.d/puppet-agent': 42 | content => "# MANAGED BY PUPPET\n${minute} * * * * root /usr/local/sbin/puppet-agent.sh\n", 43 | group => root, 44 | mode => '0664', 45 | owner => root, 46 | } 47 | 48 | if $facts['fqdn'] != $pts_location::puppet_server_hostname { 49 | pts_hosts::add { 50 | $pts_location::puppet_server_ip: 51 | fqdn => $pts_location::puppet_server_hostname, 52 | aliases => [ $pts_location::puppet_server_alias ] 53 | } 54 | 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_schedule/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_schedule { 2 | 3 | schedule { 4 | 'daily2': 5 | period => daily, 6 | repeat => 2, 7 | } 8 | 9 | } 10 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_ssh/manifests/init.pp: -------------------------------------------------------------------------------- 1 | # documentation: https://forge.puppetlabs.com/saz/ssh 2 | 3 | class pts_ssh { 4 | 5 | class { 6 | 'ssh': 7 | server_options => { 8 | 'AuthorizedKeysFile' => '.ssh/authorized_keys /etc/ssh/auths/%u.pub', 9 | 'PasswordAuthentication' => 'no', 10 | 'PermitRootLogin' => 'no', 11 | 'PrintMotd' => 'yes', 12 | 'UseDNS' => 'no', 13 | 'X11Forwarding' => 'yes', 14 | }, 15 | storeconfigs_enabled => false, 16 | validate_sshd_file => true, 17 | } 18 | 19 | file { 20 | '/etc/ssh/auths': 21 | ensure => directory, 22 | group => 'root', 23 | mode => '0755', 24 | owner => 'root', 25 | } 26 | 27 | file { 28 | '/etc/systemd/system/sshd.service.d': 29 | ensure => directory, 30 | group => 'root', 31 | mode => '0755', 32 | owner => 'root', 33 | } 34 | 35 | file { 36 | '/etc/systemd/system/sshd.service.d/override.conf': 37 | content => "[Service]\nUMask=007\nOOMScoreAdjust=-900\n", 38 | group => 'root', 39 | mode => '0644', 40 | notify => [ Service['ssh'],Exec['systemd_reload'] ], 41 | owner => 'root', 42 | require => File['/etc/systemd/system/sshd.service.d'], 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_system/files/root/.bash_profile: -------------------------------------------------------------------------------- 1 | # MANAGED BY PUPPET, LOCAL CHANGES WILL BE OVERRIDDEN 2 | # vim: set ft=bash: 3 | 4 | alias egrep='egrep --color=auto' 5 | alias fgrep='fgrep --color=auto' 6 | alias grep='grep --color=auto' 7 | alias ls='ls --color=auto --show-control-chars -A -F' 8 | alias pat='puppet agent -t' 9 | alias pat='puppet agent -t' 10 | alias pate='puppet agent -t --environment' 11 | alias patn='puppet agent -t --noop' 12 | alias patne='puppet agent -t --noop --noop --environment' 13 | export COLOR_BLUE="\e[1;34m" 14 | export COLOR_CYAN="\e[1;36m" 15 | export COLOR_GREEN="\e[1;32m" 16 | export COLOR_NOCOLOR="\e[00m" 17 | export COLOR_PURPLE="\e[1;35m" 18 | export COLOR_RED="\e[1;31m" 19 | export EDITOR=vim 20 | export HISTFILESIZE=100000 21 | export HISTSIZE=100000 22 | export HISTTIMEFORMAT="%y-%m-%d %T " 23 | export LC_ALL=en_US.UTF-8 24 | export PS1="\[${COLOR_CYAN}\]\h\[${COLOR_BLUE}\] \w # \[${COLOR_NOCOLOR}\]" 25 | export VISUAL=vim 26 | 27 | if [ -f ~/.bash_local ] 28 | then 29 | # shellcheck disable=SC1090 30 | . ~/.bash_local 31 | fi 32 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_system/files/usr/local/bin/ssh-clean-known_hosts.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z "${1}" ] 4 | then 5 | echo "No hostname provided!" 6 | exit 1 7 | fi 8 | 9 | FORWARD_HOST="$1" 10 | IP=$(host "${FORWARD_HOST}" | awk '{ print $4 }') 11 | REVERSE_HOST=$(host "${IP}" | awk '{ print $5 }') 12 | 13 | ssh-keygen -f "$HOME/.ssh/known_hosts" -R "${FORWARD_HOST}" 14 | ssh-keygen -f "$HOME/.ssh/known_hosts" -R "${IP}" 15 | ssh-keygen -f "$HOME/.ssh/known_hosts" -R "${REVERSE_HOST}" 16 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_system/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_system { 2 | 3 | Class['pts_ssh'] -> Class['pts_users'] 4 | 5 | include pts_location 6 | include pts_packages 7 | include pts_puppet_agent 8 | include pts_schedule 9 | include pts_ssh 10 | include pts_system::root 11 | include pts_timezone 12 | include pts_users 13 | 14 | class { 15 | 'locales': 16 | default_locale => 'C.UTF-8', 17 | locales => [ 18 | 'en_US.UTF-8 UTF-8', 19 | ], 20 | } 21 | 22 | class { 23 | 'pts_hosts': 24 | purge => true 25 | } 26 | 27 | class { 28 | 'sudo': 29 | package_ensure => latest, 30 | purge_ignore => '*[!_puppet]', 31 | suffix => '_puppet', 32 | } 33 | 34 | exec { 35 | 'systemd_reload': 36 | command => '/bin/systemctl daemon-reload', 37 | refreshonly => true, 38 | } 39 | 40 | file { 41 | '/usr/local/bin/ssh-clean-known_hosts.sh': 42 | group => root, 43 | mode => '0755', 44 | owner => root, 45 | source => "puppet:///modules/${module_name}/usr/local/bin/ssh-clean-known_hosts.sh", 46 | } 47 | 48 | file_line { 49 | '/etc/systemd/system.conf - DefaultLimitNOFILE': 50 | line => 'DefaultLimitNOFILE=65534', 51 | match => '^#?DefaultLimitNOFILE=', 52 | path => '/etc/systemd/system.conf', 53 | } 54 | 55 | sudo::conf { 56 | 'git_env': 57 | content => 'Defaults env_keep = "PATH XAUTHORITY SSH_AUTH_SOCK GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL"', 58 | priority => 20, 59 | } 60 | 61 | sudo::conf { 62 | 'admins': 63 | content => '%admins ALL=(ALL) NOPASSWD:ALL', 64 | ensure => present, 65 | } 66 | 67 | sudo::conf { 68 | 'sudo': 69 | content => '%sudo ALL=(ALL) NOPASSWD:ALL', 70 | ensure => present, 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_system/manifests/root.pp: -------------------------------------------------------------------------------- 1 | class pts_system::root { 2 | file { 3 | '/root/.bash_profile': 4 | group => root, 5 | mode => '0644', 6 | owner => root, 7 | source => "puppet:///modules/${module_name}/root/.bash_profile", 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_threatstack/files/etc/apt/trusted.gpg.d/threatstack.gpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/narwhal/puppet/modules/liferay/pts_threatstack/files/etc/apt/trusted.gpg.d/threatstack.gpg -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_threatstack/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_threatstack { 2 | file { '/etc/apt/trusted.gpg.d/threatstack.gpg': 3 | group => root, 4 | mode => '0644', 5 | owner => root, 6 | source => "puppet:///modules/${module_name}/etc/apt/trusted.gpg.d/threatstack.gpg", 7 | } 8 | 9 | file { '/etc/apt/sources.list.d/threatstack.list': 10 | content => "deb https://pkg.threatstack.com/v2/Ubuntu ${facts['os']['distro']['codename']} main\n", 11 | group => root, 12 | mode => '0644', 13 | notify => Class['apt::update'], 14 | owner => root, 15 | require => File['/etc/apt/trusted.gpg.d/threatstack.gpg'], 16 | } 17 | 18 | package { 'threatstack-agent': 19 | ensure => latest, 20 | require => File['/etc/apt/sources.list.d/threatstack.list'], 21 | } 22 | 23 | service { 'threatstack': 24 | ensure => running, 25 | provider => 'systemd', 26 | require => Package['threatstack-agent'], 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_timezone/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_timezone { 2 | 3 | class { 4 | 'timezone': 5 | autoupgrade => true, 6 | hwutc => true, 7 | timezone => $pts_location::timezone, 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/groups.pp: -------------------------------------------------------------------------------- 1 | class pts_users::groups { 2 | 3 | include pts_users::groups::ptsaccess 4 | 5 | } 6 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/groups/ptsaccess.pp: -------------------------------------------------------------------------------- 1 | class pts_users::groups::ptsaccess { 2 | 3 | group { 4 | 'ptsaccess': 5 | ensure => present, 6 | } 7 | 8 | sudo::conf { 9 | 'ptsaccess': 10 | content => '%ptsaccess ALL=(ALL) NOPASSWD: ALL', 11 | priority => 10, 12 | } 13 | 14 | } 15 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/init.pp: -------------------------------------------------------------------------------- 1 | class pts_users { 2 | 3 | Class['pts_users::groups'] -> Class['pts_users::users'] 4 | 5 | include pts_users::groups 6 | include pts_users::users 7 | } 8 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users { 2 | 3 | $hostname_uppercase = upcase($::hostname) 4 | 5 | # The 'ubuntu' users should only be deleted, if we are agreed on it with the IT team 6 | # accounts::user { 'ubuntu': 7 | # ensure => absent 8 | # } 9 | 10 | include pts_users::users::brian_chan 11 | include pts_users::users::peter_mezei 12 | include pts_users::users::richard_benko 13 | include pts_users::users::root 14 | include pts_users::users::szantina_szanto 15 | include pts_users::users::tamas_papp 16 | include pts_users::users::ubuntu 17 | include pts_users::users::zsolt_balogh 18 | 19 | } 20 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/akos_kreutz.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::akos_kreutz { 2 | 3 | $user = 'akos.kreutz' 4 | $real_name = 'Akos Kreutz' 5 | 6 | accounts::user { 7 | $user: 8 | comment => "${real_name} (${pts_users::users::hostname_uppercase})", 9 | groups => [ 10 | 'sudo', 11 | 'ptsaccess', 12 | ], 13 | sshkey_custom_path => "/etc/ssh/auths/${user}.pub", 14 | sshkey_group => 'root', 15 | sshkey_mode => '0644', 16 | sshkey_owner => 'root', 17 | sshkeys => [ 18 | 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC9jWlqzMEHbOfyTHvjK7xFtqUTVS8A8fn2TwUE4HLIpjPzbaST760gju/rUZMTczgjEr6QR99TTZH5nnoPrwBZlk1W3/2IiTuM7F4OydTuIRfdZOp7e9PpnMcGa9vulkt1Aj1trJ1IF9qAnLyReFUaUrRqvq9Yj3s6kfer2OSRqHkhd49Al9KYJinoiuZh891Q9O+SWua0bGV49mOyhdgNa5X06OrYRUb54MiHH8TTxYUsU+2c2yN4oK0jQuvyjDbsFPSkulQefyPkTVd/InfngWjTUXRkRX84MLAGK5r+Yl2XR1w7Yhuo8npqUEubtjObvgVopMj2tC4gBfRvqsm5Bd8Oc5GYcxwhSnE6siefr1Rp16M/mOQiBe+jUxNqkcfBWy2FyeRiV7bopZR6zNDqLsXJ/xKdkyOq027L380sxGpnDYNHi+glRF15kLpL87t1MxWbqZcPGAFujOnRHbNSktaw+aFEuRrGSiu6v3aKRGzEDTJJkUlMjwcybQG+aJd6CHRKSOqESxJmozvNpdsjpZT24glxZ/PLpeUqkC5EEw6iE1KWvCTc7s5ncR7EdwIs5rQhGH/1rRv5S1pjQZNnzeBIERuRLY11QStedy6Ck0vp5DFrZ7dT3MfSNjBokvpcKd+ZP9R/CCVJReFbHvFxksROsCX2ymRZuXoLpEiLFw== akos.kreutz@private', 19 | ], 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/brian_chan.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::brian_chan { 2 | 3 | $user = 'brian.chan' 4 | $real_name = 'Brian Chan' 5 | 6 | accounts::user { 7 | $user: 8 | comment => "${real_name} (${pts_users::users::hostname_uppercase})", 9 | groups => [ 10 | 'ptsaccess', 11 | 'sudo', 12 | ], 13 | sshkey_custom_path => "/etc/ssh/auths/${user}.pub", 14 | sshkey_group => 'root', 15 | sshkey_mode => '0644', 16 | sshkey_owner => 'root', 17 | } 18 | 19 | } 20 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/jenkins.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::jenkins { 2 | 3 | $user = 'jenkins' 4 | $real_name = 'Jenkins' 5 | 6 | accounts::user { 7 | $user: 8 | comment => "${real_name} (${pts_users::users::hostname_uppercase})", 9 | gid => 1000, 10 | groups => [ 11 | 'docker', 12 | 'ptsaccess', 13 | ], 14 | sshkey_custom_path => "/etc/ssh/auths/${user}.pub", 15 | sshkey_group => 'root', 16 | sshkey_mode => '0644', 17 | sshkey_owner => 'root', 18 | sshkeys => 19 | [ 20 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGELcHwoA3CySiLLnHICPzF9NeK9uTknapOoCFU2Mk7j root@bob1', 21 | 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDLzDAYeiaDNX1bZo3v6UfzIuTXHZ5a3+bmMZ/ekYsnLdynrOV2Czo+k0srjz7oSDY0DK4sMJWMfGMMGMSicUteCVYVyA/KLWweF6GpSzCh27VISwG9t05CDqTNm8/E8ACSJ2IZwTUeAZnTkDPqT/wZC+0xhfMX+aMmdWSHo0NVNhtU75FyCKeYStz5BLi3UNx+/w8dqQUHkGSmtLzj0IwXUgsQ1TS4reAthRNnihF7LBA2RntHaO2d1zgy4pvRPtPgVt9m0FHddhGedZ8YlSNcCG6AxlaZ6eI8Rl0OJarEg8nnwwZaOfqoJIXp6/TBq9LCxpt4RhYLTPd5XPWn8w/sQfpEZyJwohRoSOXPYDUChykDxn+V/78QcDF7oIJq9dcTs/D1jgHaMWi/REG77lENuuoAhEpk7S/6eGjGgG8yHghS65xQ6JWTH+FHnUqksFwyvVYQNkJ2nLrc5GCYIuSnCWHVxl5yk2jzs8da9DNNiGsSwFWYAOTzBV11Leq5GSM= jenkins@bob1', 22 | ], 23 | uid => 1000, 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/peter_mezei.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::peter_mezei { 2 | 3 | $user = 'peter.mezei' 4 | $real_name = 'Peter Mezei' 5 | 6 | accounts::user { $user: 7 | comment => "${real_name} (${pts_users::users::hostname_uppercase})", 8 | groups => [ 9 | 'ptsaccess', 10 | 'sudo', 11 | ], 12 | sshkey_custom_path => "/etc/ssh/auths/${user}.pub", 13 | sshkey_group => 'root', 14 | sshkey_mode => '0644', 15 | sshkey_owner => 'root', 16 | sshkeys => [ 17 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBGONWS4sxm0N2gVn9cg02yeVV4Op32gZonA+4pgXN0q peter.mezei@liferay', 18 | ], 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/richard_benko.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::richard_benko { 2 | 3 | $user = 'richard.benko' 4 | $real_name = 'Richard Benko' 5 | 6 | accounts::user { 7 | $user: 8 | comment => "${real_name} (${pts_users::users::hostname_uppercase})", 9 | groups => [ 10 | 'sudo', 11 | 'ptsaccess', 12 | ], 13 | sshkey_custom_path => "/etc/ssh/auths/${user}.pub", 14 | sshkey_group => 'root', 15 | sshkey_mode => '0644', 16 | sshkey_owner => 'root', 17 | sshkeys => [ 18 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFOmhD5Dhh+Ek6ZYmZ07zTQwKyuFqjFELAsN6hzR6mgD richard.benko@liferay', 19 | ], 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/root.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::root { 2 | $user = 'root' 3 | 4 | accounts::user { 5 | $user: 6 | comment => "${pts_users::users::hostname_uppercase} root", 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/tamas_papp.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::tamas_papp { 2 | 3 | $user = 'tamas.papp' 4 | $real_name = 'Tamas Papp' 5 | 6 | accounts::user { 7 | $user: 8 | comment => "Tamas PAPP (${pts_users::users::hostname_uppercase})", 9 | groups => [ 10 | 'sudo', 11 | 'ptsaccess', 12 | ], 13 | sshkey_custom_path => "/etc/ssh/auths/${user}.pub", 14 | sshkey_group => 'root', 15 | sshkey_mode => '0644', 16 | sshkey_owner => 'root', 17 | sshkeys => [ 18 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIK9cRilRehhA3bBKZfd8OITMFVyzQBUvCjvbejLsJavD tamas.papp@private', 19 | ], 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/ubuntu.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::ubuntu { 2 | 3 | $user = 'ubuntu' 4 | $real_name = 'ubuntu' 5 | 6 | accounts::user { 7 | $user: 8 | comment => "${real_name} (${pts_users::users::hostname_uppercase})", 9 | groups => [ 10 | 'ptsaccess', 11 | 'sudo', 12 | ], 13 | password => '$6$Asmc5Pso43vsKDTE$E8rWaDRbfwiv.fCz4g1gr0wB5P1FqpWJ58yB70x9zB2udfdhJRfkwaIsOzpk9qokrBuex3Mdult8wn0KnB1fE1', 14 | sshkeys => 15 | [ 16 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAZpQXHpHb+8SuvAsJgK0DihB70GovopWAW7gwKUIK6Q kiyoshi.lee@liferay.com', 17 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAijYuQ009czfSEiVtWyra39vNy9Y803yJcaD2IDJ9zK ansible-runner', 18 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBGONWS4sxm0N2gVn9cg02yeVV4Op32gZonA+4pgXN0q petermezei-lr@github/76655988 # ssh-import-id gh:petermezei-lr', 19 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFOmhD5Dhh+Ek6ZYmZ07zTQwKyuFqjFELAsN6hzR6mgD richard.benko@liferay.com', 20 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFkDDO7aanqo0rc93PLS/GLzautv01ldODB4ES3HMlfU william.forsyth@liferay.com', 21 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIK9cRilRehhA3bBKZfd8OITMFVyzQBUvCjvbejLsJavD tompos@private', 22 | 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDBDvSl4fpegUX1BAoVrNJUGrY9/VXDCOlzkbuSb3mUvYfK+ZmeU8Si4GjSbtJPjB64TR83MxEygxXV2SNXgdyHyFrNNrgBJox2RScOnHE8/FvDAMl7iXMAdcQG5XST+DVX4WoomD/vhuS4UJPQnDSE9mXPrd7g4dU8Jw9hOmnrR0qPA0eSKjn2jJdV9haSP0cwF4gach91xqCHAFOfeBcSV5NzZB52uc24UeJzkHFXdgSb3B16bRJK5Bj/uK2ttDsiukkYfWojB14gSm1XwkxgiJ3ktBCjfnPeI9z5BPWkY1M/Okh9tKJCBGkq5VQBQtcnDLJmnUsLY6uuq1kZO22IYoN2sdn/8QVxAIAIjkH2qJcAN1GN/85dZlB7IjwErCO1idWpo1v24cqZCIDE3tR2DWuO8SFwrzfr6BHsBlHC53V3iF1yooWk5bj2eKspKZzt6TJxAWGxYb3EEssh0NpV4guh6QtERpaeYALV5ZkmHNlJnG9tzghx7Q1qzjiJrk8= me@liferay-m0t0d7m6', 23 | ], 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /narwhal/puppet/modules/liferay/pts_users/manifests/users/zsolt_balogh.pp: -------------------------------------------------------------------------------- 1 | class pts_users::users::zsolt_balogh { 2 | 3 | $user = 'zsolt.balogh' 4 | $real_name = 'Zsolt Balogh' 5 | 6 | accounts::user { 7 | $user: 8 | comment => "Zsolt Balogh (${pts_users::users::hostname_uppercase})", 9 | groups => [ 10 | 'ptsaccess', 11 | 'sudo', 12 | ], 13 | sshkey_custom_path => "/etc/ssh/auths/${user}.pub", 14 | sshkey_group => 'root', 15 | sshkey_mode => '0644', 16 | sshkey_owner => 'root', 17 | sshkeys => [ 18 | 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILOu945eSM8vlNkxMmnYrIYkoFaPO0L7+M0cWnV8/tH2 zsolt.balogh@liferay', 19 | ], 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /narwhal/push_release_builder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd templates/hotfix-builder || exit 3 4 | 5 | docker build . -t zsoltbalogh/hotfix-builder 6 | docker push zsoltbalogh/hotfix-builder -------------------------------------------------------------------------------- /narwhal/release_notes/.gitignore: -------------------------------------------------------------------------------- 1 | *.sql -------------------------------------------------------------------------------- /narwhal/release_notes/generate_release_notes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../../_liferay_common.sh 4 | source ../../_release_common.sh 5 | 6 | function find_git_dir { 7 | lc_cd "${HOME}/dev/projects/liferay-portal-ee" 8 | } 9 | 10 | function generate_release_notes { 11 | local version="${1}" 12 | 13 | local ga_version 14 | 15 | if is_quarterly_release "${version}" 16 | then 17 | ga_version=7.4.13-ga1 18 | else 19 | ga_version=${version%%-u*}-ga1 20 | fi 21 | 22 | local fixed_issues=$(git log "tags/${ga_version}..tags/${version}" --pretty=%s | grep -E "^[A-Z][A-Z0-9]*-[0-9]*" | sed -e "s/^\([A-Z][A-Z0-9]*-[0-9]*\).*/\\1/" | sort | uniq | grep -v POSHI | grep -v RELEASE | grep -v LRQA | grep -v LRCI | paste -sd,) 23 | 24 | echo "UPDATE OSB_PatcherProjectVersion SET fixedIssues='${fixed_issues}' WHERE committish='${version}';" >> "${OUTPUT_FILE}" 25 | } 26 | 27 | function main { 28 | OUTPUT_FILE=$(pwd)/release_notes_update.sql 29 | 30 | rm -f "${OUTPUT_FILE}" 31 | 32 | lc_time_run find_git_dir 33 | 34 | lc_time_run update_git 35 | 36 | if [ -n "${1}" ] 37 | then 38 | lc_time_run generate_release_notes "${1}" 39 | else 40 | for tag in $(git ls-remote --tags upstream | grep -E "([0-9][0-9][0-9][0-9].q[1-4].|7\.[0-4]\.1[03]-u[0-9]*)" | sed -e "s#.*/tags/##") 41 | do 42 | lc_time_run generate_release_notes "${tag}" 43 | done 44 | fi 45 | } 46 | 47 | function update_git { 48 | git fetch upstream --force --tags 49 | } 50 | 51 | main "${@}" -------------------------------------------------------------------------------- /narwhal/run_2023_q3_release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # shellcheck disable=SC2086,SC2068 4 | ./run_release_builder.sh \ 5 | -e LIFERAY_COMMON_DEBUG_ENABLED=1 \ 6 | -e NARWHAL_GIT_SHA=7.4.13-u92 \ 7 | ${@} -------------------------------------------------------------------------------- /narwhal/run_release_builder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CACHE_DIR="${HOME}/.liferay/release-builder" 4 | 5 | if [ ! -d "${CACHE_DIR}" ] 6 | then 7 | echo "Creating the builder cache folder in ${CACHE_DIR}." 8 | 9 | sudo install -d "${CACHE_DIR}" -m 0775 -o 1000 10 | fi 11 | 12 | cd templates/release-builder || exit 3 13 | 14 | if [ -e "${HOME}/.1password/agent.sock" ] 15 | then 16 | SSH_CONFIG="-e SSH_AUTH_SOCK=/ssh-agent -v ${HOME}/.1password/agent.sock:/ssh-agent" 17 | elif [ ! -s "${SSH_AUTH_SOCK}" ] 18 | then 19 | SSH_CONFIG="-e SSH_AUTH_SOCK=/ssh-agent -v ${SSH_AUTH_SOCK}:/ssh-agent" 20 | else 21 | if [ -f "$HOME/.ssh/id_ed25519" ] 22 | then 23 | SSH_PUBKEY_FILE="$HOME/.ssh/id_ed25519" 24 | elif [ -f "$HOME/.ssh/id_rsa" ]; 25 | then 26 | SSH_PUBKEY_FILE="$HOME/.ssh/id_rsa" 27 | else 28 | echo "No \${SSH_AUTH_SOCK} or public key present. Exiting." 29 | 30 | exit 1 31 | fi 32 | SSH_CONFIG="-e NARWHAL_GITHUB_SSH_KEY=\"$(cat "${HOME}"/"${SSH_PUBKEY_FILE}")\"" 33 | fi 34 | 35 | # shellcheck disable=SC2086,SC2068 36 | docker run --cpus=8 -it -m 10g -v "${CACHE_DIR}:/opt/liferay/" ${SSH_CONFIG} ${@} $(docker -l warning build . --quiet -t release-builder) -------------------------------------------------------------------------------- /narwhal/run_test_7.2.x_hotfix.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # shellcheck disable=SC2086,SC2068 4 | ./run_release_builder.sh -e LIFERAY_COMMON_DEBUG_ENABLED=1 -e NARWHAL_OUTPUT=hotfix -e NARWHAL_HOTFIX_TESTING_TAG=test-fix-pack-new-builder -e NARWHAL_HOTFIX_TESTING_SHA=768f6f3952d147585dfc647a75adaa150fad08a6 ${@} -------------------------------------------------------------------------------- /narwhal/run_test_7.4.x_hotfix.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # shellcheck disable=SC2086,SC2068 4 | ./run_release_builder.sh -e LIFERAY_COMMON_DEBUG_ENABLED=1 -e NARWHAL_GIT_SHA=7.4.13-u92 -e NARWHAL_FIXED_ISSUES=LPS-1 -e NARWHAL_OUTPUT=hotfix -e NARWHAL_HOTFIX_TESTING_TAG=test-fix-pack-new-builder-7.4 -e NARWHAL_HOTFIX_TESTING_SHA=4e9a87e9bd09bb818061932eccfd0cbf9205f7e7 ${@} -------------------------------------------------------------------------------- /narwhal/source_code_sharing/.gitignore: -------------------------------------------------------------------------------- 1 | liferay-dxp 2 | liferay-portal-ee 3 | logs -------------------------------------------------------------------------------- /narwhal/source_code_sharing/get_ignore_zip_files_7.3.10.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | IFS=$'\n\t' 6 | 7 | source "$(dirname "$(readlink /proc/$$/fd/255 2>/dev/null)")/_common.sh" 8 | 9 | ZIP_CACHE_DIR="${LIFERAY_COMMON_DOWNLOAD_CACHE_DIR}/storage.bud.liferay.com/public/files.liferay.com/private/ee/fix-packs/7.3.10/hotfix" 10 | 11 | function get_list_zip_files { 12 | 13 | local zip_file 14 | 15 | for zip_file in "${ZIP_CACHE_DIR}"/*.zip 16 | do 17 | if (unzip -p "${zip_file}" fixpack_documentation.json | jq -r '.patch.requirements' | grep -E -q "^(base-|dxp-|sp[1-9])") 18 | then 19 | zip_file="${zip_file##*/}" 20 | 21 | echo "${zip_file}" 22 | fi 23 | done 24 | } 25 | 26 | get_list_zip_files 27 | -------------------------------------------------------------------------------- /narwhal/templates/ci/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM liferay/jdk11 2 | 3 | CMD ["bash"] 4 | 5 | COPY resources/ / 6 | 7 | ENV DEBIAN_FRONTEND=noninteractive 8 | ENV JAVA_VERSION=zulu11 9 | ENV TZ=Etc/UTC 10 | 11 | ENTRYPOINT ["/usr/bin/tini", "--", "/usr/local/sbin/jenkins.sh"] 12 | 13 | RUN groupadd -g 1001 jenkins && \ 14 | useradd -d /var/lib/jenkins -g 1001 -u 1001 jenkins 15 | 16 | RUN curl -fsSL https://github.com/jenkinsci/plugin-installation-manager-tool/releases/download/2.12.11/jenkins-plugin-manager-2.12.11.jar -o /opt/jenkins-plugin-manager.jar 17 | 18 | RUN curl -fsSL https://pkg.jenkins.io/debian-stable/jenkins.io-2023.key > /usr/share/keyrings/jenkins-keyring.asc && \ 19 | echo deb [signed-by=/usr/share/keyrings/jenkins-keyring.asc] https://pkg.jenkins.io/debian-stable binary/ > /etc/apt/sources.list.d/jenkins.list && \ 20 | apt-get update && \ 21 | apt-get install --no-install-recommends -y fontconfig git jenkins=2.414.2 tini && \ 22 | mkdir -p /var/cache/jenkins/war && \ 23 | chown -R jenkins:jenkins /var/cache/jenkins && \ 24 | apt-get upgrade -y && \ 25 | apt-get clean && \ 26 | update-java-alternatives -s zulu-11-amd64 27 | 28 | USER jenkins -------------------------------------------------------------------------------- /narwhal/templates/ci/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | jenkins: 3 | image: ci:latest 4 | restart: on-failure 5 | volumes: 6 | - /opt/docker/jenkins:/var/lib/jenkins 7 | proxy: 8 | image: nginx:latest 9 | ports: 10 | - 80:80 11 | - 443:443 12 | restart: on-failure 13 | volumes: 14 | - ./narwhalci.orca.liferay.com.conf:/etc/nginx/conf.d/narwhalci.orca.liferay.com.conf 15 | - /opt/docker/acme.sh:/acme.sh 16 | version: '3' 17 | -------------------------------------------------------------------------------- /narwhal/templates/ci/narwhalci.orca.liferay.com.conf: -------------------------------------------------------------------------------- 1 | map $http_upgrade $connection_upgrade { 2 | default upgrade; 3 | '' close; 4 | } 5 | server { 6 | access_log off; 7 | error_log off; 8 | listen 80 default_server; 9 | return 301 https://$host$request_uri; 10 | server_name _; 11 | } 12 | 13 | server { 14 | access_log /var/log/nginx/access.log; 15 | client_body_buffer_size 128k; 16 | client_max_body_size 10m; 17 | error_log /var/log/nginx/error.log; 18 | http2 on; 19 | ignore_invalid_headers off; 20 | listen 443 ssl; 21 | 22 | location / { 23 | proxy_buffering off; 24 | proxy_connect_timeout 90; 25 | proxy_http_version 1.1; 26 | proxy_max_temp_file_size 0; 27 | proxy_pass http://jenkins; 28 | proxy_read_timeout 90; 29 | proxy_redirect default; 30 | proxy_request_buffering off; # Required for HTTP CLI commands 31 | proxy_send_timeout 90; 32 | proxy_set_header Connection $connection_upgrade; 33 | proxy_set_header Host $host; 34 | proxy_set_header Upgrade $http_upgrade; 35 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 36 | proxy_set_header X-Forwarded-Proto $scheme; 37 | proxy_set_header X-Real-IP $remote_addr; 38 | proxy_set_header Connection ""; # Clear for keepalive 39 | } 40 | 41 | sendfile off; 42 | server_name _; 43 | ssl_certificate /acme.sh/narwhalci.orca.liferay.com_ecc/fullchain.cer; 44 | ssl_certificate_key /acme.sh/narwhalci.orca.liferay.com_ecc/narwhalci.orca.liferay.com.key; 45 | ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384; 46 | ssl_dhparam /acme.sh/dhparam.pem; 47 | ssl_prefer_server_ciphers on; 48 | ssl_protocols TLSv1.2 TLSv1.3; 49 | ssl_session_cache shared:NixCraftSSL:10m; 50 | ssl_session_tickets off; 51 | ssl_session_timeout 1d; 52 | } 53 | 54 | upstream jenkins { 55 | keepalive 32; 56 | server jenkins:8080; 57 | } 58 | -------------------------------------------------------------------------------- /narwhal/templates/ci/resources/usr/local/sbin/jenkins-plugin-cli: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # COPIED FROM jenkins/jenkins:2.390-jdk11 5 | # 6 | 7 | # read JAVA_OPTS into array to avoid need for eval (and associated vulnerabilities) 8 | java_opts_array=() 9 | while IFS= read -r -d '' item; do 10 | java_opts_array+=( "$item" ) 11 | done < <([[ $JAVA_OPTS ]] && xargs printf '%s\0' <<<"$JAVA_OPTS") 12 | 13 | exec java "${java_opts_array[@]}" -jar /opt/jenkins-plugin-manager.jar "$@" 14 | -------------------------------------------------------------------------------- /narwhal/templates/ci/resources/usr/local/sbin/jenkins.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /usr/bin/java \ 4 | -Dhudson.lifecycle=hudson.lifecycle.ExitLifecycle \ 5 | -Duser.home=/var/lib/jenkins \ 6 | -jar /usr/share/java/jenkins.war \ 7 | --httpPort=8080 \ 8 | --webroot=/var/cache/jenkins/war 9 | -------------------------------------------------------------------------------- /narwhal/templates/release-builder/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM liferay/jdk11-jdk8:5.0.9-20230124135117 2 | 3 | RUN apt-get update && \ 4 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes ant apt-transport-https ca-certificates gnupg git lsof ssh p7zip-full zip && \ 5 | echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" > /etc/apt/sources.list.d/google-cloud-sdk.list && \ 6 | curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && \ 7 | apt-get update && \ 8 | apt-get install google-cloud-cli && \ 9 | apt-get upgrade --yes && \ 10 | apt-get clean 11 | 12 | 13 | ENTRYPOINT ["tini", "--", "/usr/local/bin/liferay_release_builder_entrypoint.sh"] 14 | 15 | ENV ANT_OPTS="-Xmx10G" 16 | 17 | # 18 | # The id of the hotfix 19 | # 20 | 21 | ENV NARWHAL_BUILD_ID=1 22 | 23 | # 24 | # The git tag or branch to check out from the liferay-portal-ee 25 | # 26 | ENV NARWHAL_GIT_SHA=7.2.x 27 | 28 | # 29 | # Either release or fix pack 30 | # 31 | ENV NARWHAL_OUTPUT=release 32 | 33 | # 34 | # The github username used to check out on the liferay-portal-ee repository. Should be used only for debugging purposes 35 | # 36 | ENV NARWHAL_REMOTE=liferay 37 | 38 | # 39 | # Tag name in the liferay-portal-ee repository which contains the hotfix testing SHA-s if you would like to build a test hotfix 40 | # 41 | ENV NARWHAL_HOTFIX_TESTING_TAG= 42 | 43 | # 44 | # Git SHA which would be cherry-picked on NARWHAL_GIT_SHA from the tree of NARWHAL_HOTFIX_TESTING_TAG to build a test hotfix 45 | # 46 | ENV NARWHAL_HOTFIX_TESTING_SHA= 47 | 48 | # 49 | # If this is set, the files will be uploaded to the designated buckets 50 | # 51 | ENV NARWHAL_UPLOAD= 52 | 53 | # 54 | # The name of the GCS bucket where the internal files should be copied 55 | # 56 | ENV NARWHAL_GCS_INTERNAL_BUCKET=patcher-storage 57 | 58 | COPY resources/ / -------------------------------------------------------------------------------- /narwhal/templates/release-builder/resources/usr/local/signature-util/com/liferay/tools/patching/signing/GenerateSignature.java: -------------------------------------------------------------------------------- 1 | package com.liferay.tools.patching.signing; 2 | 3 | import java.io.BufferedInputStream; 4 | import java.io.FileInputStream; 5 | import java.math.BigInteger; 6 | import java.security.KeyStore; 7 | import java.security.PrivateKey; 8 | import java.security.Signature; 9 | 10 | public class GenerateSignature { 11 | 12 | public static void main(String[] arguments) { 13 | if (arguments.length < 5) { 14 | System.err.println("Required parameters: keystore-file keystore-password key-alias key-password file-to-sign"); 15 | 16 | System.exit(1); 17 | } 18 | 19 | try { 20 | KeyStore keyStore = KeyStore.getInstance("PKCS12"); 21 | FileInputStream keyStoreFis = new FileInputStream(arguments[0]); 22 | keyStore.load(keyStoreFis, arguments[1].toCharArray()); 23 | 24 | System.out.println(arguments[3]); 25 | PrivateKey privateKey = (PrivateKey) keyStore.getKey( 26 | arguments[2], arguments[3].toCharArray()); 27 | 28 | Signature signature = Signature.getInstance("SHA256withRSA"); 29 | 30 | signature.initSign(privateKey); 31 | 32 | FileInputStream fis = new FileInputStream(arguments[4]); 33 | BufferedInputStream bufin = new BufferedInputStream(fis); 34 | 35 | byte[] buffer = new byte[1024]; 36 | int len; 37 | 38 | while ((len = bufin.read(buffer)) >= 0) { 39 | signature.update(buffer, 0, len); 40 | } 41 | 42 | bufin.close(); 43 | 44 | byte[] realSig = signature.sign(); 45 | 46 | System.out.println(_toHex(realSig)); 47 | } 48 | catch (Exception e) { 49 | e.printStackTrace(); 50 | 51 | System.exit(1); 52 | } 53 | } 54 | 55 | private static String _toHex(byte[] bytes) { 56 | BigInteger bi = new BigInteger(1, bytes); 57 | return String.format("%0" + (bytes.length << 1) + "X", bi); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /narwhal/templates/release-builder/resources/usr/local/signature-util/signature-util.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /narwhal/templates/tang-server/Dockerfile: -------------------------------------------------------------------------------- 1 | # FROM ubuntu:jammy-20230804 2 | FROM ubuntu:jammy@sha256:56887c5194fddd8db7e36ced1c16b3569d89f74c801dc8a5adbf48236fb34564 AS builder 3 | 4 | ARG TANG_COMMIT_SHA=100265e32f56e33c8120fca83de419155ac8db5e 5 | 6 | ENV DEBIAN_FRONTEND=noninteractive 7 | ENV TZ=Etc/UTC 8 | 9 | RUN apt-get update && \ 10 | apt-get install --no-install-recommends -y \ 11 | ca-certificates \ 12 | curl \ 13 | gcc \ 14 | gcovr \ 15 | git \ 16 | iproute2 \ 17 | jose \ 18 | libhttp-parser-dev \ 19 | libhttp-parser2.9 \ 20 | libjose-dev \ 21 | meson \ 22 | pkg-config \ 23 | socat \ 24 | systemd 25 | 26 | RUN git clone https://github.com/latchset/tang.git && \ 27 | cd tang && \ 28 | git checkout ${TANG_COMMIT_SHA} && \ 29 | mkdir build && \ 30 | cd build && \ 31 | meson .. --prefix=/usr/local && \ 32 | ninja install 33 | 34 | # Production image 35 | FROM ubuntu:jammy 36 | 37 | COPY --from=builder /usr/local/bin/tang-show-keys /usr/local/bin/tang-show-keys 38 | COPY --from=builder /usr/local/libexec/tangd /usr/local/bin/tangd 39 | COPY --from=builder /usr/local/libexec/tangd-keygen /usr/local/bin/tangd-keygen 40 | COPY --from=builder /usr/local/libexec/tangd-rotate-keys /usr/local/bin/tangd-rotate-keys 41 | COPY healtcheck.sh /usr/local/bin/healtcheck.sh 42 | 43 | ENTRYPOINT [ "tini", "-s", "-v", "-w", "--", "/usr/local/bin/tangd", "-l", "/db" ] 44 | 45 | ENV DEBIAN_FRONTEND=noninteractive 46 | ENV TZ=Etc/UTC 47 | 48 | HEALTHCHECK --start-period=5s \ 49 | --timeout=3s \ 50 | CMD "healtcheck.sh" 51 | 52 | RUN apt-get update && \ 53 | apt-get -y --no-install-recommends install \ 54 | curl \ 55 | jose \ 56 | libhttp-parser2.9 \ 57 | openssl \ 58 | python3-http-parser \ 59 | socat \ 60 | tini \ 61 | wget && \ 62 | rm -rf /var/cache/apt/archives /var/lib/apt/lists && \ 63 | groupadd -g 1001 tang && \ 64 | useradd -g 1001 -s /bin/false -u 1001 -d /db tang 65 | 66 | USER tang 67 | -------------------------------------------------------------------------------- /narwhal/templates/tang-server/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | tang-server: 3 | image: tang:latest 4 | ports: 5 | - 80:9090 6 | restart: on-failure 7 | volumes: 8 | - /opt/docker/tang:/db 9 | version: '3' 10 | -------------------------------------------------------------------------------- /narwhal/templates/tang-server/healtcheck.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | curl -fs http://127.0.0.1:9090/adv > /dev/null 6 | -------------------------------------------------------------------------------- /orca/.gitignore: -------------------------------------------------------------------------------- 1 | builds 2 | configs 3 | docker-build -------------------------------------------------------------------------------- /orca/README.md: -------------------------------------------------------------------------------- 1 | # Managed DXP - Orca 2 | 3 | Simple default configuration to deploy Liferay DXP Clusters on Linux servers, only using simple tools. 4 | 5 | ## Host requirements 6 | 7 | 1. must have `amd64` CPU 8 | * The host must run on an `amd64` architecture (Intel). `arm64` is currently not supported, so you won't be able to use the tool for example on computers with M1 chips from Apple or various arm-based AWS instances. 9 | * The support for `arm64` may be added in the future, but it's currently not a priority. 10 | 11 | ## OS requirements 12 | 13 | 1. must be _Ubuntu_ 14 | * Only Ubuntu (20.04+, 22.04+) is currently supported. If you need to run in another OS, please use a VM with the proper Ubuntu version inside. 15 | 16 | 2. Create a new mounted filesystem (xfs recommended) to `/opt/gluster-data/gv0` 17 | 18 | * Execute the following commands on all servers: 19 | 20 | ``` 21 | $ curl https://raw.githubusercontent.com/liferay/liferay-docker/master/orca/scripts/install_orca.sh -o /tmp/install_orca.sh 22 | $ . /tmp/install_orca.sh 23 | ``` 24 | * Then log in to the first server and execute the following: 25 | 26 | ``` 27 | $ gluster peer probe 28 | $ gluster peer probe 29 | $ ... 30 | $ gluster volume create gv0 replica 3 :/opt/gluster-data/gv0/ :/opt/gluster-data/gv0/ :/opt/gluster-data/gv0/ 31 | $ gluster volume start gv0 32 | $ gluster volume info 33 | $ mount /opt/liferay/shared-volume 34 | ``` 35 | -------------------------------------------------------------------------------- /orca/configs/demo.yml: -------------------------------------------------------------------------------- 1 | hosts: 2 | db1: 3 | ip: 10.111.111.111 4 | services: 5 | - db 6 | - log-proxy 7 | db2: 8 | ip: 10.111.111.112 9 | services: 10 | - db 11 | - log-proxy 12 | jenkins: 13 | ip: 10.111.111.11 14 | services: 15 | - ci 16 | - log-proxy 17 | jumper: 18 | ip: 10.111.111.10 19 | services: 20 | - log-proxy 21 | - teleport-proxy 22 | observer: 23 | ip: 10.111.111.12 24 | services: 25 | - log-proxy 26 | - log-server 27 | - monitoring-gateway 28 | - monitoring-proxy 29 | search1: 30 | ip: 10.111.111.121 31 | services: 32 | - log-proxy 33 | - search 34 | search2: 35 | ip: 10.111.111.122 36 | services: 37 | - log-proxy 38 | - search 39 | search3: 40 | ip: 10.111.111.123 41 | services: 42 | - log-proxy 43 | - search 44 | web1: 45 | ip: 10.111.111.101 46 | services: 47 | - liferay 48 | - log-proxy 49 | - web-server 50 | web2: 51 | ip: 10.111.111.102 52 | services: 53 | - liferay 54 | - log-proxy 55 | - web-server 56 | web2: 57 | ip: 10.111.111.102 58 | services: 59 | - liferay 60 | - log-proxy 61 | - web-server -------------------------------------------------------------------------------- /orca/configs/single_server.yml: -------------------------------------------------------------------------------- 1 | development: true 2 | hosts: 3 | localhost: 4 | services: 5 | - antivirus 6 | - backup 7 | - ci 8 | - db 9 | - liferay 10 | - log-proxy 11 | - log-server 12 | - monitoring-gateway 13 | - monitoring-proxy 14 | - monitoring-proxy-db 15 | - search 16 | - teleport-agent-test 17 | - teleport-proxy 18 | - vault 19 | - web-server -------------------------------------------------------------------------------- /orca/scripts/_common.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function check_utils { 4 | for util in "${@}" 5 | do 6 | if (! command -v "${util}" &>/dev/null) 7 | then 8 | echo "The utility ${util} is not installed." 9 | 10 | ORCA_VALIDATION_ERROR=1 11 | fi 12 | done 13 | } 14 | 15 | function docker_compose { 16 | if (command -v docker-compose &>/dev/null) 17 | then 18 | docker-compose "${@}" 19 | else 20 | docker compose "${@}" 21 | fi 22 | } 23 | 24 | function lcd { 25 | cd "${1}" || exit 3 26 | } -------------------------------------------------------------------------------- /orca/scripts/install_orca.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function lcd { 4 | cd "${1}" || exit 3 5 | } 6 | 7 | function main { 8 | apt-get update 9 | apt-get --yes install docker-compose git glusterfs-server pwgen 10 | 11 | if (! command -v yq &> /dev/null) 12 | then 13 | snap install yq 14 | fi 15 | 16 | mkdir -p /opt/liferay/orca 17 | 18 | lcd /opt/liferay/orca 19 | 20 | git init 21 | git remote add origin https://github.com/liferay/liferay-docker.git 22 | git config core.sparseCheckout true 23 | 24 | echo "orca" >> .git/info/sparse-checkout 25 | 26 | git pull origin master 27 | 28 | # 29 | # TODO Fix /opt/liferay/orca/orca 30 | # 31 | 32 | lcd orca 33 | 34 | # 35 | # TODO install 36 | # 37 | 38 | scripts/orca.sh install 39 | } 40 | 41 | main -------------------------------------------------------------------------------- /orca/scripts/validate_environment.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source $(dirname "$(readlink /proc/$$/fd/255 2>/dev/null)")/_common.sh 4 | 5 | function create_dir { 6 | local dir="${1}" 7 | local service_uid="${2}" 8 | 9 | if [ -d "${dir}" ] 10 | then 11 | if [ ! "$(stat -c '%u' "${dir}")" -eq "${service_uid}" ] 12 | then 13 | echo -n "Setting owner of ${dir} to ${service_uid}... " 14 | 15 | if (sudo chown "${service_uid}" "${dir}") 16 | then 17 | echo "done." 18 | else 19 | echo "failed." 20 | 21 | ORCA_VALIDATION_ERROR=1 22 | fi 23 | fi 24 | else 25 | echo -n "${dir} does not exist, creating... " 26 | 27 | if (sudo install -d "${dir}" -o "${service_uid}") 28 | then 29 | echo "done." 30 | else 31 | echo "failed." 32 | 33 | ORCA_VALIDATION_ERROR=1 34 | fi 35 | fi 36 | } 37 | 38 | function create_dirs { 39 | local db_uid=1001 40 | local default_uid=1000 41 | 42 | if [ "${DOCKER_HOST}" == "unix:///run/user/$(id -u)/docker.sock" ] 43 | then 44 | db_uid=1000 45 | default_uid=166535 46 | fi 47 | 48 | create_dir "/opt/liferay/backups" ${default_uid} 49 | create_dir "/opt/liferay/db-data" ${db_uid} 50 | create_dir "/opt/liferay/jenkins-home" ${default_uid} 51 | create_dir "/opt/liferay/monitoring-proxy-db-data" ${db_uid} 52 | create_dir "/opt/liferay/shared-volume" ${default_uid} 53 | create_dir "/opt/liferay/shared-volume/document-library" ${default_uid} 54 | create_dir "/opt/liferay/shared-volume/logs" ${default_uid} 55 | create_dir "/opt/liferay/vault/data" ${default_uid} 56 | } 57 | 58 | function main { 59 | check_utils docker yq 60 | 61 | create_dirs 62 | 63 | set_vm_max_map_count 64 | 65 | if [ -n "${ORCA_VALIDATION_ERROR}" ] 66 | then 67 | echo "There was at least one error during validation. Please fix them before starting the services." 68 | 69 | exit 1 70 | fi 71 | } 72 | 73 | function set_vm_max_map_count { 74 | echo "Setting sysctl value: \"vm.max_map_count=262144\"... " 75 | 76 | if (sudo sysctl -w vm.max_map_count=262144) 77 | then 78 | echo "done." 79 | else 80 | echo "failed." 81 | 82 | ORCA_VALIDATION_ERROR=1 83 | fi 84 | } 85 | 86 | main -------------------------------------------------------------------------------- /orca/templates/antivirus/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM clamav/clamav:1.0.1-1 2 | FROM clamav/clamav@sha256:eb1057073c6178d3e7f3e6e1edb43feec84db6809f12fbf900d9f7056380793a -------------------------------------------------------------------------------- /orca/templates/backup/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM liferay/job-runner:5.0.0-20220830072327 2 | FROM liferay/job-runner@sha256:1f77e45c00c325cca3b80d13fc3de0871b0e24a4024478b7a7296b5730f3c087 3 | 4 | RUN apt-get update && \ 5 | apt-get --yes install mariadb-client && \ 6 | apt-get upgrade --yes && \ 7 | apt-get clean && \ 8 | rm -fr /var/lib/apt/lists/* 9 | 10 | COPY resources/ / -------------------------------------------------------------------------------- /orca/templates/backup/resources/mnt/liferay/job-crontab: -------------------------------------------------------------------------------- 1 | @reboot /usr/local/bin/register_job.sh fetch_secrets 2 | ${ORCA_BACKUP_CRON_EXPRESSION} /usr/local/bin/register_job.sh backup 3 | -------------------------------------------------------------------------------- /orca/templates/backup/resources/mnt/liferay/jobs/backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /usr/local/bin/backup.sh -------------------------------------------------------------------------------- /orca/templates/backup/resources/mnt/liferay/jobs/fetch_secrets.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /usr/local/bin/fetch_orca_secrets.sh backup mysql_root_password -------------------------------------------------------------------------------- /orca/templates/backup/resources/usr/local/bin/backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function date { 4 | export TZ=UTC 5 | 6 | if [ -z ${1+x} ] || [ -z ${2+x} ] 7 | then 8 | if [ "$(uname)" == "Darwin" ] 9 | then 10 | /bin/date 11 | elif [ -e /bin/date ] 12 | then 13 | /bin/date --iso-8601=seconds 14 | else 15 | /usr/bin/date --iso-8601=seconds 16 | fi 17 | else 18 | if [ "$(uname)" == "Darwin" ] 19 | then 20 | /bin/date -jf "%a %b %e %H:%M:%S %Z %Y" "${1}" "${2}" 21 | elif [ -e /bin/date ] 22 | then 23 | /bin/date -d "${1}" "${2}" 24 | else 25 | /usr/bin/date -d "${1}" "${2}" 26 | fi 27 | fi 28 | } 29 | 30 | function main { 31 | local current_date=$(date) 32 | 33 | local backup_dir=$(date "${current_date}" "+%Y-%m/%Y-%m-%d/%Y-%m-%d-%H%M%S") 34 | 35 | backup_dir=/opt/liferay/backups/${backup_dir} 36 | 37 | mkdir -p ${backup_dir} 38 | 39 | echo "Starting backup at ${backup_dir}." 40 | 41 | local timestamp=$(date "${current_date}" "+%Y%m%d%H%M%S") 42 | 43 | backup_db.sh ${backup_dir} ${timestamp} & 44 | 45 | backup_document_library.sh ${backup_dir} ${timestamp} & 46 | 47 | wait 48 | 49 | echo "Exited backup at ${backup_dir}." 50 | 51 | ls -hl ${backup_dir} 52 | } 53 | 54 | main "${@}" -------------------------------------------------------------------------------- /orca/templates/backup/resources/usr/local/bin/backup_db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | set -o pipefail 5 | 6 | function check_usage { 7 | if [ ! -n ${ORCA_DB_ADDRESSES} ] 8 | then 9 | echo "Set the environment variable ORCA_DB_ADDRESSES to a comma separated list of database servers (e.g. db-1:3306,db-2:3306)." 10 | 11 | exit 1 12 | fi 13 | } 14 | 15 | function main { 16 | check_usage 17 | 18 | echo "Starting database backup." 19 | 20 | local db_address 21 | 22 | for db_address in ${ORCA_DB_ADDRESSES//,/ } 23 | do 24 | echo "Dumping ${db_address}." 25 | 26 | local db_host=${db_address%%:*} 27 | 28 | if (mysqldump -h ${db_host} -p$(cat /tmp/orca-secrets/mysql_root_password) -u root lportal | gzip > ${1}/db-lportal-${2}.sql.gz) 29 | then 30 | local success=1 31 | 32 | break 33 | fi 34 | done 35 | 36 | if [ -n "${success}" ] 37 | then 38 | echo "Database backup was completed successfully." 39 | else 40 | echo "Database backup failed, please check the logs." 41 | fi 42 | } 43 | 44 | main "${@}" -------------------------------------------------------------------------------- /orca/templates/backup/resources/usr/local/bin/backup_document_library.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | function main { 6 | echo "Starting document library backup." 7 | 8 | cd /opt/liferay/shared-volume 9 | 10 | tar cz document-library > ${1}/document-library-${2}.tar.gz 11 | 12 | echo "Document library backup was completed successfully." 13 | } 14 | 15 | main "${@}" -------------------------------------------------------------------------------- /orca/templates/ci/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jenkins/jenkins:lts-jdk11 -------------------------------------------------------------------------------- /orca/templates/db/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM bitnami/mariadb-galera:10.5 2 | FROM bitnami/mariadb-galera@sha256:cd63ec64f535e1a7554316429d2597cef94a60a0e877740e21a323c81afd2d4c 3 | 4 | USER 0 5 | 6 | RUN apt-get update && \ 7 | apt-get --yes install curl && \ 8 | apt-get upgrade --yes && \ 9 | apt-get clean && \ 10 | rm -fr /var/lib/apt/lists/* 11 | 12 | CMD [ "/opt/bitnami/scripts/mariadb-galera/run.sh" ] 13 | 14 | COPY resources/ / 15 | 16 | ENTRYPOINT [ "/usr/local/bin/liferay_entrypoint.sh" ] -------------------------------------------------------------------------------- /orca/templates/db/resources/usr/local/bin/connect_to_mysql.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mysql -p$(cat /run/secrets/mysql_root_password) -u root lportal -------------------------------------------------------------------------------- /orca/templates/db/resources/usr/local/bin/liferay_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | fetch_orca_secrets.sh db mysql_backup_password mysql_liferay_password mysql_root_password 4 | 5 | wait_for_dependencies.sh 6 | 7 | /opt/bitnami/scripts/mariadb-galera/entrypoint.sh ${@} -------------------------------------------------------------------------------- /orca/templates/db/resources/usr/local/bin/make_primary.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | local db_password=$(cat ${MARIADB_ROOT_PASSWORD_FILE}) 4 | 5 | echo "SET GLOBAL wsrep_provider_options='pc.bootstrap=YES'" | mysql -h127.0.0.1 -p${db_password} -u root -------------------------------------------------------------------------------- /orca/templates/db/resources/usr/local/bin/wait_for_dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | if [ -n "${MARIADB_GALERA_CLUSTER_BOOTSTRAP}" ] 5 | then 6 | echo "Do not wait for dependencies because the environment variable MARIADB_GALERA_CLUSTER_BOOTSTRAP was set." 7 | 8 | return 9 | fi 10 | 11 | if [ -n "${ORCA_DB_SKIP_WAIT}" ] 12 | then 13 | echo "Do not wait for dependencies because the environment variable ORCA_DB_SKIP_WAIT was set." 14 | 15 | return 16 | fi 17 | 18 | if [ ! -e "/bitnami/mariadb/data/grastate.dat" ] 19 | then 20 | echo "Do not wait for dependencies because Galera state file does not exist." 21 | 22 | return 23 | fi 24 | 25 | if (grep "safe_to_bootstrap: 1" "/bitnami/mariadb/data/grastate.dat" &>/dev/null) 26 | then 27 | echo "Do not wait for dependencies because this is the master node." 28 | 29 | return 30 | fi 31 | 32 | local db_password=$(cat ${MARIADB_PASSWORD_FILE}) 33 | 34 | while true 35 | do 36 | for db_address in ${ORCA_DB_ADDRESSES//,/ } 37 | do 38 | local db_host="${db_address%%:*}" 39 | 40 | if (echo "select 1" | mysql --connect-timeout=3 -h${db_host} -p${db_password} -u${MARIADB_USER} &>/dev/null) 41 | then 42 | echo "Connected to ${db_host}." 43 | 44 | return 45 | fi 46 | done 47 | 48 | echo "Unable to connect to ${ORCA_DB_ADDRESSES}. Waiting." 49 | 50 | sleep 3 51 | done 52 | } 53 | 54 | main -------------------------------------------------------------------------------- /orca/templates/liferay/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM liferay/dxp:2023.q3.1-d5.0.52-20231031145025 2 | FROM liferay/dxp@sha256:a3bb5bb08892d282c904f4ad828296c2e24973917a4566ee21582468b30553d4 3 | 4 | USER 0 5 | 6 | RUN apt-get update && \ 7 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes mariadb-client && \ 8 | apt-get upgrade --yes && \ 9 | apt-get clean 10 | 11 | COPY --chown=liferay:liferay resources/opt/liferay /opt/liferay/ 12 | COPY resources/usr/ /usr 13 | 14 | HEALTHCHECK --retries=3 \ 15 | CMD /usr/local/bin/probe_thread_dump.sh -d "http://localhost" -f "/c/portal/robots" -p 8080 -t 20 16 | 17 | USER liferay 18 | 19 | RUN /usr/local/bin/install_patch_on_build.sh -------------------------------------------------------------------------------- /orca/templates/liferay/resources/opt/liferay/cluster-link-tcp.xml: -------------------------------------------------------------------------------- 1 | 6 | 17 | 23 | 27 | 28 | 32 | 35 | 36 | 40 | 41 | 45 | 49 | 53 | 57 | 60 | 61 | -------------------------------------------------------------------------------- /orca/templates/liferay/resources/usr/local/bin/install_patch_on_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ $(find "/opt/liferay/patching-tool/patches" -maxdepth 1 -name "liferay-*.zip" -type f | wc -l) == 1 ] 4 | then 5 | /opt/liferay/patching-tool/patching-tool.sh install 6 | 7 | rm -fr /opt/liferay/osgi/state 8 | fi -------------------------------------------------------------------------------- /orca/templates/liferay/resources/usr/local/liferay/scripts/pre-configure/01_fetch_orca_secrets.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /usr/local/bin/fetch_orca_secrets.sh liferay mysql_liferay_password -------------------------------------------------------------------------------- /orca/templates/liferay/resources/usr/local/liferay/scripts/pre-startup/10_wait_for_dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function wait_for_mysql { 4 | local jdbc_driver_class_name="${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_DRIVER_UPPERCASEC_LASS_UPPERCASEN_AME}" 5 | 6 | if [[ "${jdbc_driver_class_name}" != *mariadb* ]] && [[ "${jdbc_driver_class_name}" != *mysql* ]] 7 | then 8 | return 9 | fi 10 | 11 | local db_host="${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_URL}" 12 | 13 | db_host="${db_host##*://}" 14 | db_host="${db_host%%/*}" 15 | db_host="${db_host%%:*}" 16 | 17 | local db_password=${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_PASSWORD} 18 | 19 | if [ -n "${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_PASSWORD_FILE}" ] 20 | then 21 | db_password=$(cat "${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_PASSWORD_FILE}") 22 | fi 23 | 24 | local db_username=${LIFERAY_JDBC_PERIOD_DEFAULT_PERIOD_USERNAME} 25 | 26 | echo "Connecting to database server ${db_username}@${db_host}." 27 | 28 | while ! (echo "select 1" | mysql -h "${db_host}" -p"${db_password}" -u "${db_username}" &>/dev/null) 29 | do 30 | echo "Waiting for database server ${db_username}@${db_host}." 31 | 32 | sleep 3 33 | done 34 | 35 | echo "Database server ${db_username}@${db_host} is available." 36 | } 37 | 38 | function wait_for_search { 39 | if [ ! -n "${ORCA_LIFERAY_SEARCH_ADDRESSES}" ] 40 | then 41 | echo "Do not wait for search server because the environment variable ORCA_LIFERAY_SEARCH_ADDRESSES was not set." 42 | 43 | return 44 | fi 45 | 46 | echo "Connecting to ${ORCA_LIFERAY_SEARCH_ADDRESSES}." 47 | 48 | while true 49 | do 50 | for search_address in ${ORCA_LIFERAY_SEARCH_ADDRESSES//,/ } 51 | do 52 | if ( curl --max-time 3 --silent "${search_address}/_cat/health" | grep "green" &>/dev/null) 53 | then 54 | echo "Search server ${search_address} is available." 55 | 56 | return 57 | fi 58 | done 59 | 60 | echo "Waiting for at least one search server to become available." 61 | 62 | sleep 3 63 | done 64 | } 65 | 66 | function main { 67 | wait_for_mysql 68 | 69 | wait_for_search 70 | } 71 | 72 | main -------------------------------------------------------------------------------- /orca/templates/log-proxy/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM gliderlabs/logspout:v3.2.14 2 | FROM gliderlabs/logspout@sha256:f32a5944c8e3ba12a773ebb4e1b0660a9ddf6199139a6ec8db8a719f6e473292 -------------------------------------------------------------------------------- /orca/templates/log-proxy/build.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/orca/templates/log-proxy/build.sh -------------------------------------------------------------------------------- /orca/templates/log-proxy/modules.go: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/orca/templates/log-proxy/modules.go -------------------------------------------------------------------------------- /orca/templates/log-server/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM balabit/syslog-ng:3.38.1 2 | FROM balabit/syslog-ng@sha256:1a13ed478136ed7a2638095f87da510121fa4d4d15b6171b543bab42948e4ae7 3 | 4 | COPY resources/ / -------------------------------------------------------------------------------- /orca/templates/log-server/resources/etc/syslog-ng/syslog-ng.conf: -------------------------------------------------------------------------------- 1 | @include scl.conf 2 | @version: 3.37 3 | 4 | destination d_local { 5 | file("/var/log/syslogng/${PROGRAM}.${HOST}-${YEAR}-${MONTH}-${DAY}.log"); 6 | }; 7 | 8 | log { 9 | source(s_network); 10 | 11 | destination(d_local); 12 | }; 13 | 14 | source s_network { 15 | default-network-drivers(); 16 | }; -------------------------------------------------------------------------------- /orca/templates/monitoring-gateway/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM zabbix/zabbix-java-gateway:6.2.8-ubuntu 2 | FROM zabbix/zabbix-java-gateway@sha256:047736d95d8dd2b4364779f9c9e4aa405e8e5b18a24fae5ee0c469bfd8becac2 -------------------------------------------------------------------------------- /orca/templates/monitoring-proxy-db/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM bitnami/mariadb:10.5 2 | FROM bitnami/mariadb@sha256:130cb5f51b0f235c7a02f21361398cf9c9b2d9404ea05debeaf17bd2059aadc6 -------------------------------------------------------------------------------- /orca/templates/monitoring-proxy/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM zabbix/zabbix-proxy-mysql:6.2.8-ubuntu 2 | FROM zabbix/zabbix-proxy-mysql@sha256:f7b0a8bec1532eeffa5151255c51ef95bffb4c2ac037ef67b9609238615d43d4 -------------------------------------------------------------------------------- /orca/templates/search/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM elasticsearch:7.17.9 2 | FROM elasticsearch@sha256:56789f44fd8c451fdeb40a095c5089367e588c7a24e0a03cdbd6ba53ebd84649 3 | 4 | RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install \ 5 | analysis-icu \ 6 | analysis-kuromoji \ 7 | analysis-smartcn \ 8 | analysis-stempel -------------------------------------------------------------------------------- /orca/templates/teleport-agent-test/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:jammy 2 | 3 | CMD ["teleport", "start", "--auth-server=teleport-proxy", "--roles=node", "--token=/agent-test/token.txt"] 4 | 5 | ENV DEBIAN_FRONTEND=noninteractive 6 | ENV TZ=Etc/UTC 7 | 8 | RUN apt update && \ 9 | apt upgrade -y && \ 10 | apt install --no-install-recommends -y openssh-server curl ca-certificates 11 | 12 | RUN curl https://apt.releases.teleport.dev/gpg -o /usr/share/keyrings/teleport-archive-keyring.asc && \ 13 | echo "deb [signed-by=/usr/share/keyrings/teleport-archive-keyring.asc] https://apt.releases.teleport.dev/ubuntu jammy stable/v12" > /etc/apt/sources.list.d/teleport.list && \ 14 | apt update && \ 15 | apt install -y teleport && \ 16 | apt clean && \ 17 | rm -fr /var/lib/apt/lists/* 18 | 19 | RUN adduser liferay --gecos "Liferay common user" --disabled-password && \ 20 | install -d /home/liferay/.ssh -g liferay -m 0700 -o liferay && \ 21 | echo "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIK9cRilRehhA3bBKZfd8OITMFVyzQBUvCjvbejLsJavD tamas.papp@liferay" > /home/liferay/.ssh/authorized_keys && \ 22 | echo "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILOu945eSM8vlNkxMmnYrIYkoFaPO0L7+M0cWnV8/tH2 zsolt.balogh@liferay" >> /home/liferay/.ssh/authorized_keys && \ 23 | chown -R liferay:liferay /home/liferay && \ 24 | chmod 600 /home/liferay/.ssh/authorized_keys -------------------------------------------------------------------------------- /orca/templates/teleport-proxy/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/gravitational/teleport:12 2 | 3 | COPY resources/ / 4 | 5 | ENV DEBIAN_FRONTEND=noninteractive 6 | ENV TZ=Etc/UTC 7 | 8 | ENTRYPOINT ["/usr/bin/dumb-init", "--", "/usr/local/bin/teleport_server_entrypoint.sh"] 9 | 10 | RUN apt update && \ 11 | apt upgrade -y && \ 12 | apt clean && \ 13 | rm -fr /var/lib/apt/lists/* -------------------------------------------------------------------------------- /orca/templates/teleport-proxy/resources/etc/teleport/teleport.yaml: -------------------------------------------------------------------------------- 1 | auth_service: 2 | authentication: 3 | type: github 4 | cluster_name: jumper 5 | enabled: "yes" 6 | listen_addr: 0.0.0.0:3025 7 | proxy_listener_mode: multiplex 8 | proxy_service: 9 | acme: {} 10 | enabled: "yes" 11 | https_keypairs: 12 | - cert_file: /etc/teleport/server.crt 13 | key_file: /etc/teleport/server.key 14 | https_keypairs_reload_interval: 0s 15 | ssh_service: 16 | commands: 17 | - name: hostname 18 | command: [hostname] 19 | period: 1m0s 20 | enabled: "yes" 21 | teleport: 22 | ca_pin: "" 23 | data_dir: /var/lib/teleport 24 | diag_addr: "" 25 | log: 26 | format: 27 | output: text 28 | output: stderr 29 | severity: INFO 30 | nodename: localhost 31 | version: v3 32 | -------------------------------------------------------------------------------- /orca/templates/teleport-proxy/resources/root/github.yaml.tpl: -------------------------------------------------------------------------------- 1 | kind: github 2 | metadata: 3 | name: github 4 | spec: 5 | client_id: __GITHUB_ID__ 6 | client_secret: __GITHUB_SECRET__ 7 | display: GitHub 8 | redirect_url: https://__GITHUB_REDIRECT_HOST__:3080/v1/webapi/github/callback 9 | teams_to_roles: 10 | - organization: liferay-orca-test 11 | roles: 12 | - access 13 | - editor 14 | team: admins 15 | version: v3 -------------------------------------------------------------------------------- /orca/templates/teleport-proxy/resources/usr/local/bin/teleport_server_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ ! -f /etc/teleport/server.crt ] 4 | then 5 | openssl req -days 3650 -keyout /etc/teleport/server.key -new -newkey rsa:2048 -nodes -out /etc/teleport/server.crt -sha256 -subj "/CN=192.168.233.141/O=Liferay/C=HU" -x509 6 | fi 7 | 8 | teleport start -c /etc/teleport/teleport.yaml -------------------------------------------------------------------------------- /orca/templates/vault/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM liferay/base:latest 2 | 3 | RUN apt-get update && \ 4 | apt-get --yes install gnupg jq libcap2-bin lsb-release pwgen && \ 5 | curl https://apt.releases.hashicorp.com/gpg | gpg --dearmor > /usr/share/keyrings/hashicorp-archive-keyring.gpg && \ 6 | echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" > /etc/apt/sources.list.d/hashicorp.list && \ 7 | apt-get update && \ 8 | apt-get install vault 9 | 10 | # 11 | # Workaround for https://github.com/hashicorp/vault/issues/10924 12 | # 13 | 14 | RUN setcap -r /usr/bin/vault 15 | 16 | COPY resources/ / 17 | 18 | ENTRYPOINT ["tini", "--", "/usr/local/bin/vault_entrypoint.sh"] 19 | 20 | ENV VAULT_ADDR='http://127.0.0.1:8200' 21 | 22 | USER liferay 23 | 24 | WORKDIR /opt/liferay/vault -------------------------------------------------------------------------------- /orca/templates/vault/resources/opt/liferay/vault/config.hcl: -------------------------------------------------------------------------------- 1 | api_addr = "http://127.0.0.1:8200" 2 | cluster_addr = "https://127.0.0.1:8201" 3 | 4 | disable_mlock = true 5 | 6 | listener "tcp" { 7 | address="0.0.0.0:8200" 8 | tls_disable="true" 9 | } 10 | 11 | storage "raft" { 12 | path="/opt/liferay/vault/data" 13 | } 14 | 15 | ui = true -------------------------------------------------------------------------------- /orca/templates/vault/resources/opt/liferay/vault/policy_backup.hcl: -------------------------------------------------------------------------------- 1 | path "secret/data/mysql_root_password" { 2 | capabilities = ["read"] 3 | } -------------------------------------------------------------------------------- /orca/templates/vault/resources/opt/liferay/vault/policy_db.hcl: -------------------------------------------------------------------------------- 1 | path "secret/data/mysql_backup_password" { 2 | capabilities = ["read"] 3 | } 4 | 5 | path "secret/data/mysql_liferay_password" { 6 | capabilities = ["read"] 7 | } 8 | 9 | path "secret/data/mysql_root_password" { 10 | capabilities = ["read"] 11 | } -------------------------------------------------------------------------------- /orca/templates/vault/resources/opt/liferay/vault/policy_liferay.hcl: -------------------------------------------------------------------------------- 1 | path "secret/data/mysql_liferay_password" { 2 | capabilities = ["read"] 3 | } -------------------------------------------------------------------------------- /orca/templates/vault/resources/opt/liferay/vault/policy_shared.hcl: -------------------------------------------------------------------------------- 1 | path "secret/data/shared" { 2 | capabilities = ["list"] 3 | } -------------------------------------------------------------------------------- /orca/templates/vault/resources/usr/local/bin/_common.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function wait_for_operator { 4 | while true 5 | do 6 | if ( curl --max-time 3 --silent "http://localhost:8200/v1/sys/health" | grep "${1}" &>/dev/null) 7 | then 8 | echo "Vault operator is available." 9 | 10 | break 11 | fi 12 | 13 | echo "Waiting for the operator to become available." 14 | 15 | sleep 1 16 | done 17 | } -------------------------------------------------------------------------------- /orca/templates/vault/resources/usr/local/bin/auto_unseal.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source /usr/local/bin/_common.sh 4 | 5 | wait_for_operator "\"standby\":true" 6 | 7 | vault operator unseal "$(cat /opt/liferay/vault/data/unseal_key)" -------------------------------------------------------------------------------- /orca/templates/vault/resources/usr/local/bin/vault_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ "${ORCA_DEVELOPMENT_MODE}" == "true" ] 4 | then 5 | if [ ! -e /opt/liferay/vault/data/vault.db ] 6 | then 7 | init_secrets.sh & 8 | else 9 | auto_unseal.sh & 10 | fi 11 | fi 12 | 13 | vault server -config=/opt/liferay/vault/config.hcl -------------------------------------------------------------------------------- /orca/templates/web-server/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM liferay/base:4.1.0-20220613095221 2 | FROM liferay/base@sha256:69216037cf0ba5e1b1aeca38c2c038713ec29bf3d824e5ad7a0debc0a822dceb 3 | 4 | RUN apt-get update && \ 5 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get --yes install apache2 && \ 6 | apt-get upgrade --yes && \ 7 | apt-get clean && \ 8 | rm -fr /var/lib/apt/lists/* 9 | 10 | RUN a2enmod proxy_ajp && \ 11 | a2enmod proxy_balancer && \ 12 | a2enmod lbmethod_byrequests 13 | 14 | COPY resources/ / 15 | 16 | ENTRYPOINT ["tini", "--", "/usr/local/bin/web_server_entrypoint.sh"] -------------------------------------------------------------------------------- /orca/templates/web-server/resources/usr/local/bin/web_server_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function generate_liferay_conf { 4 | function write { 5 | echo "${1}" >> "/etc/apache2/sites-available/liferay.conf" 6 | } 7 | 8 | rm -f "/etc/apache2/sites-available/liferay.conf" 9 | 10 | write "" 11 | write " CustomLog /proc/self/fd/1 vhost_combined" 12 | write " DocumentRoot /var/www/html" 13 | write " ErrorLog /proc/self/fd/2" 14 | write " ProxyPreserveHost On" 15 | write " ProxyPass \"/\" \"balancer://cluster/\"" 16 | write " ServerAdmin webmaster@localhost" 17 | write "" 18 | write " " 19 | 20 | for balance_member in ${ORCA_WEB_SERVER_BALANCE_MEMBERS//,/ } 21 | do 22 | local ajp_address="${balance_member##*::}" 23 | local route="${balance_member%%::*}" 24 | 25 | write " BalancerMember \"ajp://${ajp_address}\" loadfactor=1 route=${route}" 26 | done 27 | 28 | write " ProxySet stickysession=JSESSIONID" 29 | write " " 30 | write "" 31 | 32 | echo "Generated /etc/apache2/sites-available/liferay.conf." 33 | 34 | cat /etc/apache2/sites-available/liferay.conf 35 | } 36 | 37 | function main { 38 | generate_liferay_conf 39 | 40 | set_up_sites 41 | 42 | start_apache2 43 | } 44 | 45 | function set_up_sites { 46 | a2dissite "000-default.conf" 47 | a2ensite "liferay.conf" 48 | } 49 | 50 | function start_apache2 { 51 | mkdir /var/run/apache2 52 | 53 | chown www-data:www-data /var/run/apache2 54 | 55 | source /etc/apache2/envvars 56 | 57 | /usr/sbin/apache2 -DFOREGROUND 58 | } 59 | 60 | main -------------------------------------------------------------------------------- /patching_tool_version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_common.sh 4 | source ./_liferay_common.sh 5 | 6 | function check_usage { 7 | if [ ! -n "${1}" ] 8 | then 9 | echo "Usage: ${0} " 10 | echo "" 11 | echo "This script requires the first parameter to be set to the major and minor version of the Patching Tool (e.g. 2.0)." 12 | 13 | exit 1 14 | fi 15 | } 16 | 17 | function get_full_version { 18 | if [ "${1}" == "1.0" ] 19 | then 20 | echo "1.0.24" 21 | elif [ "${1}" == "2.0" ] 22 | then 23 | echo $(lc_curl https://releases.liferay.com/tools/patching-tool/LATEST-2.0.txt) 24 | elif [ "${1}" == "3.0" ] 25 | then 26 | echo $(lc_curl https://releases.liferay.com/tools/patching-tool/LATEST-3.0.txt) 27 | elif [ "${1}" == "4.0" ] 28 | then 29 | echo $(lc_curl https://releases.liferay.com/tools/patching-tool/LATEST-4.0.txt) 30 | else 31 | echo "Unable to get full version for ${1}." 32 | 33 | exit 2 34 | fi 35 | } 36 | 37 | function main { 38 | check_usage "${@}" 39 | 40 | get_full_version "${@}" 41 | } 42 | 43 | main "${@}" -------------------------------------------------------------------------------- /release/.gitignore: -------------------------------------------------------------------------------- 1 | release-data -------------------------------------------------------------------------------- /release/_github.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_liferay_common.sh 4 | 5 | function invoke_github_api_delete { 6 | _invoke_github_api "${1}" "${2}" "${3}" "DELETE" 7 | 8 | echo $? 9 | } 10 | 11 | function invoke_github_api_post { 12 | _invoke_github_api "${1}" "${2}" "${3}" "POST" 13 | 14 | echo $? 15 | } 16 | 17 | function _invoke_github_api { 18 | local curl_response=$(\ 19 | curl \ 20 | "https://api.github.com/repos/${1}/${2}" \ 21 | --data "${3}" \ 22 | --fail \ 23 | --header "Accept: application/vnd.github+json" \ 24 | --header "Authorization: Bearer ${LIFERAY_RELEASE_GITHUB_PAT}" \ 25 | --header "X-GitHub-Api-Version: 2022-11-28" \ 26 | --include \ 27 | --max-time 10 \ 28 | --request "${4}" \ 29 | --retry 3 \ 30 | --silent) 31 | 32 | if ! [[ $(echo "${curl_response}" | awk '/^HTTP/{print $2}') =~ ^2 ]] 33 | then 34 | return "${LIFERAY_COMMON_EXIT_CODE_SKIPPED}" 35 | fi 36 | 37 | return "${LIFERAY_COMMON_EXIT_CODE_OK}" 38 | } -------------------------------------------------------------------------------- /release/_jdk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_liferay_common.sh 4 | source ../_release_common.sh 5 | 6 | function set_jdk_version_and_parameters { 7 | local jdk_version="zulu8" 8 | 9 | if is_quarterly_release 10 | then 11 | if [[ "$(get_release_year)" -ge 2025 ]] 12 | then 13 | jdk_version="openjdk17" 14 | fi 15 | fi 16 | 17 | if is_ga_release 18 | then 19 | if [[ "$(get_release_version_trivial)" -ge 132 ]] 20 | then 21 | jdk_version="openjdk17" 22 | fi 23 | fi 24 | 25 | if [[ "$(get_release_version)" == "7.4.13" ]] && 26 | [[ "$(get_release_version_trivial)" -ge 132 ]] 27 | then 28 | jdk_version="openjdk17" 29 | fi 30 | 31 | if [ ! -d "/opt/java/${jdk_version}" ] 32 | then 33 | lc_log INFO "JDK ${jdk_version} is not installed." 34 | 35 | jdk_version=$(echo "${jdk_version}" | sed -r "s/(openjdk|zulu)/jdk/g") 36 | 37 | if [ ! -d "/opt/java/${jdk_version}" ] 38 | then 39 | lc_log INFO "JDK ${jdk_version} is not installed." 40 | 41 | return "${LIFERAY_COMMON_EXIT_CODE_BAD}" 42 | fi 43 | fi 44 | 45 | lc_log INFO "Using JDK ${jdk_version} for release ${_PRODUCT_VERSION}." 46 | 47 | export JAVA_HOME="/opt/java/${jdk_version}" 48 | 49 | if [[ "${jdk_version}" == *"8"* ]] && [[ ! "${JAVA_OPTS}" =~ "-XX:MaxPermSize" ]] 50 | then 51 | JAVA_OPTS="${JAVA_OPTS} -XX:MaxPermSize=256m" 52 | fi 53 | 54 | if [[ "${jdk_version}" == *"17"* ]] 55 | then 56 | JAVA_OPTS=$(echo "${JAVA_OPTS}" | sed "s/-XX:MaxPermSize=[^ ]*//g") 57 | fi 58 | 59 | export JAVA_OPTS 60 | 61 | export PATH="${JAVA_HOME}/bin:${PATH}" 62 | } -------------------------------------------------------------------------------- /release/_jira.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_liferay_common.sh 4 | 5 | function add_jira_issue { 6 | local data=$( 7 | cat <<- END 8 | { 9 | "fields": { 10 | "assignee": { 11 | "id": "${1}" 12 | }, 13 | "components": [ 14 | { 15 | "name": "${2}" 16 | } 17 | ], 18 | "issuetype": { 19 | "name": "${3}" 20 | }, 21 | "project": { 22 | "key": "${4}" 23 | }, 24 | "summary": "${5}", 25 | "${6}": "${7}" 26 | } 27 | } 28 | END 29 | ) 30 | 31 | _invoke_jira_api "https://liferay.atlassian.net/rest/api/3/issue/" "${data}" 32 | } 33 | 34 | function add_jira_issue_comment { 35 | local data=$( 36 | cat <<- END 37 | { 38 | "body": { 39 | "content": [ 40 | { 41 | "content": [ 42 | { 43 | "text": "${1}", 44 | "type": "text" 45 | } 46 | ], 47 | "type": "paragraph" 48 | } 49 | ], 50 | "type": "doc", 51 | "version": 1 52 | } 53 | } 54 | END 55 | ) 56 | 57 | _invoke_jira_api "https://liferay.atlassian.net/rest/api/3/issue/${2}/comment" "${data}" 58 | } 59 | 60 | function _invoke_jira_api { 61 | local http_response=$(curl \ 62 | "${1}" \ 63 | --data "${2}" \ 64 | --fail \ 65 | --header "Accept: application/json" \ 66 | --header "Content-Type: application/json" \ 67 | --max-time 10 \ 68 | --request "POST" \ 69 | --retry 3 \ 70 | --silent \ 71 | --user "${LIFERAY_RELEASE_JIRA_USER}:${LIFERAY_RELEASE_JIRA_TOKEN}") 72 | 73 | if [ "$(echo "${http_response}" | jq --exit-status '.id?')" != "null" ] 74 | then 75 | echo "${http_response}" | jq --raw-output '.key' 76 | 77 | return "${LIFERAY_COMMON_EXIT_CODE_OK}" 78 | fi 79 | 80 | echo "${LIFERAY_COMMON_EXIT_CODE_BAD}" 81 | } -------------------------------------------------------------------------------- /release/_patcher.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function lc_time_run_error { 4 | report_patcher_status &>/dev/null 5 | } 6 | 7 | function report_jenkins_url { 8 | if [ -z "${LIFERAY_RELEASE_HOTFIX_BUILD_ID}" ] || 9 | [ -z "${LIFERAY_RELEASE_PATCHER_REQUEST_KEY}" ] 10 | then 11 | echo "Set the environment variables LIFERAY_RELEASE_HOTFIX_BUILD_ID and LIFERAY_RELEASE_PATCHER_REQUEST_KEY." 12 | 13 | return "${LIFERAY_COMMON_EXIT_CODE_SKIPPED}" 14 | fi 15 | 16 | mkdir -p "${_BUILD_DIR}"/patcher-status/production/osbPatcherStatus/build/jenkins 17 | 18 | lc_cd "${_BUILD_DIR}"/patcher-status/production/osbPatcherStatus/build/jenkins 19 | 20 | ( 21 | echo "{" 22 | echo " \"patcherRequestKey\": \"${LIFERAY_RELEASE_PATCHER_REQUEST_KEY}\"," 23 | echo " \"status\": \"pending\"," 24 | echo " \"statusURL\": \"${BUILD_URL}\"" 25 | echo "}" 26 | ) > "${LIFERAY_RELEASE_HOTFIX_BUILD_ID}" 27 | 28 | rsync -Dlprtvz --chown=501:501 --no-perms "${_BUILD_DIR}"/patcher-status/ test-3-1::patcher/ 29 | 30 | ssh test-3-1 "chown -R 501:501 /mnt/mfs-hdd1-172.16.168/patcher" 31 | } 32 | 33 | function report_patcher_status { 34 | lc_cd "${_BUILD_DIR}"/patcher-status/production/osbPatcherStatus/build/jenkins 35 | 36 | ( 37 | echo "{" 38 | 39 | if [ -n "${LC_TIME_RUN_ERROR_EXIT_CODE}" ] 40 | then 41 | echo " \"exitValue\": ${LC_TIME_RUN_ERROR_EXIT_CODE}," 42 | echo " \"output\": \"Problem during ${LC_TIME_RUN_ERROR_FUNCTION}.\"," 43 | else 44 | echo " \"exitValue\": 0," 45 | echo " \"fileName\": \"${_PRODUCT_VERSION}/${_HOTFIX_FILE_NAME}\"," 46 | fi 47 | 48 | echo " \"patcherRequestKey\": \"${LIFERAY_RELEASE_PATCHER_REQUEST_KEY}\"," 49 | echo " \"patcherUserId\": ${LIFERAY_RELEASE_PATCHER_USER_ID}" 50 | echo "}" 51 | ) > "${LIFERAY_RELEASE_HOTFIX_BUILD_ID}" 52 | 53 | cat "${LIFERAY_RELEASE_HOTFIX_BUILD_ID}" 54 | 55 | rsync -Dlprtvz --chown=501:501 --no-perms "${_BUILD_DIR}"/patcher-status/ test-3-1::patcher/ 56 | 57 | ssh test-3-1 "chown -R 501:501 /mnt/mfs-hdd1-172.16.168/patcher" 58 | } -------------------------------------------------------------------------------- /release/bin/com.liferay.workspace.bundle.url.codec.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/bin/com.liferay.workspace.bundle.url.codec.jar -------------------------------------------------------------------------------- /release/clean.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -fr release-data/build release-data/promotion -------------------------------------------------------------------------------- /release/rebuild_releases_json.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_liferay_common.sh 4 | source ./_releases_json.sh 5 | 6 | _RELEASE_ROOT_DIR=$(pwd) 7 | 8 | _PROMOTION_DIR="${_RELEASE_ROOT_DIR}/release-data/promotion/files" 9 | 10 | rm -fr "${_PROMOTION_DIR}" 11 | 12 | mkdir -p "${_PROMOTION_DIR}" 13 | 14 | lc_cd "${_PROMOTION_DIR}" 15 | 16 | generate_releases_json "regenerate" -------------------------------------------------------------------------------- /release/supported-dxp-versions.txt: -------------------------------------------------------------------------------- 1 | 7.2 2 | 7.3 3 | 7.4 4 | 2023.q3 5 | 2023.q4 6 | 2024.q1 7 | 2024.q2 8 | 2024.q3 9 | 2024.q4 10 | 2025.q1 11 | 2025.q2 -------------------------------------------------------------------------------- /release/supported-portal-versions.txt: -------------------------------------------------------------------------------- 1 | 7.2 2 | 7.3 3 | 7.4 -------------------------------------------------------------------------------- /release/templates/release.api.pom.tpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | __ARTIFACT_ID__ 11 | __ARTIFACT_RC_VERSION__ 12 | -------------------------------------------------------------------------------- /release/templates/release.bom.compile.only.pom.tpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | __ARTIFACT_ID__ 11 | __ARTIFACT_RC_VERSION__ 12 | pom 13 | 14 | 15 | LGPL 2.1 16 | http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt 17 | 18 | 19 | 20 | 21 | Brian Wing Shun Chan 22 | Liferay, Inc. 23 | http://www.liferay.com 24 | 25 | 26 | 27 | scm:git:git@github.com:liferay/__GITHUB_REPOSITORY__.git 28 | scm:git:git@github.com:liferay/__GITHUB_REPOSITORY__.git 29 | __PRODUCT_VERSION__ 30 | https://github.com/liferay/__GITHUB_REPOSITORY__ 31 | 32 | 33 | 34 | liferay-public-releases 35 | Liferay Public Releases 36 | https://repository-cdn.liferay.com/nexus/content/repositories/liferay-public-releases/ 37 | 38 | 39 | 40 | 41 | 42 | com.liferay.portal 43 | __RELEASE_API_DEPENDENCY__ 44 | __ARTIFACT_RC_VERSION__ 45 | -------------------------------------------------------------------------------- /release/templates/release.bom.pom.tpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | __ARTIFACT_ID__ 11 | __ARTIFACT_RC_VERSION__ 12 | pom 13 | 14 | 15 | LGPL 2.1 16 | http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt 17 | 18 | 19 | 20 | 21 | Brian Wing Shun Chan 22 | Liferay, Inc. 23 | http://www.liferay.com 24 | 25 | 26 | 27 | scm:git:git@github.com:liferay/__GITHUB_REPOSITORY__.git 28 | scm:git:git@github.com:liferay/__GITHUB_REPOSITORY__.git 29 | __PRODUCT_VERSION__ 30 | https://github.com/liferay/__GITHUB_REPOSITORY__ 31 | 32 | 33 | 34 | liferay-public-releases 35 | Liferay Public Releases 36 | https://repository-cdn.liferay.com/nexus/content/repositories/liferay-public-releases/ 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /release/templates/release.bom.third.party.pom.tpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | __ARTIFACT_ID__ 11 | __ARTIFACT_RC_VERSION__ 12 | pom 13 | 14 | 15 | LGPL 2.1 16 | http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt 17 | 18 | 19 | 20 | 21 | Brian Wing Shun Chan 22 | Liferay, Inc. 23 | http://www.liferay.com 24 | 25 | 26 | 27 | scm:git:git@github.com:liferay/__GITHUB_REPOSITORY__.git 28 | scm:git:git@github.com:liferay/__GITHUB_REPOSITORY__.git 29 | __PRODUCT_VERSION__ 30 | https://github.com/liferay/__GITHUB_REPOSITORY__ 31 | 32 | 33 | -------------------------------------------------------------------------------- /release/templates/release.distro.pom.tpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | __ARTIFACT_ID__ 11 | __ARTIFACT_RC_VERSION__ 12 | -------------------------------------------------------------------------------- /release/test-dependencies/actual/bundles.yml: -------------------------------------------------------------------------------- 1 | 7.4.3.120: 2 | 7.4.3.120-ga120: 3 | bundle_url: releases-cdn.liferay.com/portal/7.4.3.120-ga120/liferay-portal-tomcat-7.4.3.120-ga120-1718225443.7z 4 | 7.4.13: 5 | 7.4.13-u129: 6 | 7.4.13.nightly: 7 | bundle_url: releases-cdn.liferay.com/dxp/nightly/liferay-dxp-tomcat-7.4.13.nightly.7z 8 | quarterly: 9 | 2024.q3.0: 10 | latest: true -------------------------------------------------------------------------------- /release/test-dependencies/actual/liferay-portal-ee/release.properties: -------------------------------------------------------------------------------- 1 | release.info.version.major=7 2 | release.info.version.minor=4 3 | 4 | release.info.version.display.name[master-private]=2025.Q1.0 LTS 5 | 6 | release.info.version.trivial=129 7 | 8 | release.info.version.bug.fix[master-private]=13 9 | 10 | release.info.version.bug.fix=3 11 | -------------------------------------------------------------------------------- /release/test-dependencies/actual/portal.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Index of /portal 6 | 7 | 8 | 9 |

Index of /portal

10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 21 | 22 | 23 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 81 | 82 |
NameLast modifiedSizeDescription
19 |
20 |
24 | [DIR] 25 | 7.2.0-ga1/06-Mar-2024 02:19- 
33 | [DIR] 34 | 7.2.1-ga2/06-Mar-2024 02:19- 
43 | [DIR] 44 | 7.3.6-ga7/06-Mar-2024 02:19- 
52 | [DIR] 53 | 7.3.7-ga8/06-Mar-2024 02:19- 
61 | [DIR] 62 | 7.4.3.129-ga129/09-Dec-2024 15:10- 
70 | [DIR] 71 | 7.4.3.132-ga132/18-Feb-2025 13:33- 
79 |
80 |
83 | 84 | 85 | -------------------------------------------------------------------------------- /release/test-dependencies/actual/releases.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "product": "dxp", 4 | "productGroupVersion": "2025.q1", 5 | "productMajorVersion": "DXP 2025.Q1 LTS", 6 | "productVersion": "DXP 2025.Q1.0 LTS", 7 | "promoted": "true", 8 | "tags": [ 9 | "recommended" 10 | ], 11 | "releaseKey": "dxp-2025.q1.0-lts", 12 | "targetPlatformVersion": "2025.q1.0", 13 | "url": "https://releases-cdn.liferay.com/dxp/2025.q1.0-lts" 14 | }, 15 | { 16 | "product": "dxp", 17 | "productGroupVersion": "2024.q2", 18 | "productMajorVersion": "DXP 2024.Q2", 19 | "productVersion": "DXP 2024.Q2.12", 20 | "promoted": "true", 21 | "releaseKey": "dxp-2024.q2.12", 22 | "targetPlatformVersion": "2024.q2.12", 23 | "url": "https://releases-cdn.liferay.com/dxp/2024.q2.12" 24 | }, 25 | { 26 | "product": "dxp", 27 | "productGroupVersion": "2024.q2", 28 | "productMajorVersion": "DXP 2024.Q2", 29 | "productVersion": "DXP 2024.Q2.11", 30 | "promoted": "false", 31 | "releaseKey": "dxp-2024.q2.11", 32 | "targetPlatformVersion": "2024.q2.11", 33 | "url": "https://releases-cdn.liferay.com/dxp/2024.q2.11" 34 | }, 35 | { 36 | "product": "dxp", 37 | "productGroupVersion": "2024.q1", 38 | "productMajorVersion": "DXP 2024.Q1", 39 | "productVersion": "DXP 2024.Q1.12", 40 | "promoted": "true", 41 | "releaseKey": "dxp-2024.q1.12", 42 | "targetPlatformVersion": "2024.q1.12", 43 | "url": "https://releases-cdn.liferay.com/dxp/2024.q1.12" 44 | }, 45 | { 46 | "product": "dxp", 47 | "productGroupVersion": "2024.q1", 48 | "productMajorVersion": "DXP 2024.Q1", 49 | "productVersion": "DXP 2024.Q1.11", 50 | "promoted": "false", 51 | "releaseKey": "dxp-2024.q1.11", 52 | "targetPlatformVersion": "2024.q1.11", 53 | "url": "https://releases-cdn.liferay.com/dxp/2024.q1.11" 54 | } 55 | ] -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/development/activation.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/development/activation.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/development/mail.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/development/mail.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/portal/axis.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/portal/axis.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/portal/commons-discovery.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/portal/commons-discovery.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/portal/commons-logging.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/portal/commons-logging.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/portal/jaxrpc.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/portal/jaxrpc.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/portal/portal-client.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/portal/portal-client.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/portal/saaj-api.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/portal/saaj-api.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/portal/saaj-impl.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/portal/saaj-impl.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/liferay-portal-ee/lib/portal/wsdl4j.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/release/test-dependencies/expected/liferay-portal-ee/lib/portal/wsdl4j.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/test.bom.dxp.release.bom.api.pom: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | release.dxp.api 11 | 2024.q2.6-12345 12 | -------------------------------------------------------------------------------- /release/test-dependencies/expected/test.bom.dxp.release.bom.distro.pom: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | release.dxp.distro 11 | 2024.q2.6-12345 12 | -------------------------------------------------------------------------------- /release/test-dependencies/expected/test.bom.portal.release.bom.api.pom: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | release.portal.api 11 | 7.4.3.120-12345 12 | -------------------------------------------------------------------------------- /release/test-dependencies/expected/test.bom.portal.release.bom.distro.pom: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 4.0.0 9 | com.liferay.portal 10 | release.portal.distro 11 | 7.4.3.120-12345 12 | -------------------------------------------------------------------------------- /release/test-dependencies/expected/test_publishing_bundles.yml: -------------------------------------------------------------------------------- 1 | 7.4.3.120: 2 | 7.4.3.120-ga120: 3 | bundle_url: releases-cdn.liferay.com/portal/7.4.3.120-ga120/liferay-portal-tomcat-7.4.3.120-ga120-1718225443.7z 4 | 7.4.3.125: 5 | 7.4.3.125-ga125: 6 | bundle_url: releases-cdn.liferay.com/portal/7.4.3.125-ga125/liferay-portal-tomcat-7.4.3.125-ga125-1726242956.7z 7 | latest: true 8 | 7.4.13: 9 | 7.4.13-u129: 10 | 7.4.13-u130: 11 | 7.4.13.nightly: 12 | bundle_url: releases-cdn.liferay.com/dxp/nightly/liferay-dxp-tomcat-7.4.13.nightly.7z 13 | quarterly: 14 | 2024.q3.0: 15 | 2024.q3.1: 16 | latest: true -------------------------------------------------------------------------------- /release/test-dependencies/expected/test_publishing_liferay-dxp-client-7.3.10-u36.txt: -------------------------------------------------------------------------------- 1 | activation.jar 2 | axis.jar 3 | commons-discovery.jar 4 | commons-logging.jar 5 | jaxrpc.jar 6 | mail.jar 7 | portal-client.jar 8 | saaj-api.jar 9 | saaj-impl.jar 10 | wsdl4j.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/test_publishing_liferay-dxp-dependencies-7.3.10-u36.txt: -------------------------------------------------------------------------------- 1 | com.liferay.petra.concurrent.jar 2 | com.liferay.petra.executor.jar 3 | com.liferay.petra.function.jar 4 | com.liferay.petra.io.jar 5 | com.liferay.petra.lang.jar 6 | com.liferay.petra.memory.jar 7 | com.liferay.petra.nio.jar 8 | com.liferay.petra.process.jar 9 | com.liferay.petra.reflect.jar 10 | com.liferay.petra.sql.dsl.api.jar 11 | com.liferay.petra.sql.dsl.spi.jar 12 | com.liferay.petra.string.jar 13 | com.liferay.petra.url.pattern.mapper.jar 14 | com.liferay.registry.api.jar 15 | hsql.jar 16 | portal-kernel.jar 17 | portlet.jar -------------------------------------------------------------------------------- /release/test-dependencies/expected/test_release_gold_check_usage_output.txt: -------------------------------------------------------------------------------- 1 | Usage: LIFERAY_RELEASE_RC_BUILD_TIMESTAMP= LIFERAY_RELEASE_VERSION= ./test_release_gold.sh 2 | 3 | The script reads the following environment variables: 4 | 5 | LIFERAY_RELEASE_GCS_TOKEN (optional): *.json file containing the token to authenticate with Google Cloud Storage 6 | LIFERAY_RELEASE_GITHUB_PAT (optional): GitHub personal access token used to tag releases 7 | LIFERAY_RELEASE_NEXUS_REPOSITORY_PASSWORD (optional): Nexus user's password 8 | LIFERAY_RELEASE_NEXUS_REPOSITORY_USER (optional): Nexus user with the right to upload BOM files 9 | LIFERAY_RELEASE_PATCHER_PORTAL_EMAIL_ADDRESS: Email address to the release team's Liferay Patcher user 10 | LIFERAY_RELEASE_PATCHER_PORTAL_PASSWORD: Password to the release team's Liferay Patcher user 11 | LIFERAY_RELEASE_PREPARE_NEXT_RELEASE_BRANCH: Set to "true" to prepare the next release branch. The default is "false". 12 | LIFERAY_RELEASE_PRODUCT_NAME (optional): Set to "portal" for CE. The default is "DXP". 13 | LIFERAY_RELEASE_RC_BUILD_TIMESTAMP: Timestamp of the build to publish 14 | LIFERAY_RELEASE_REPOSITORY_OWNER (optional): Set to "EnterpriseReleaseHU" for development. The default is "liferay". 15 | LIFERAY_RELEASE_VERSION: DXP or portal version of the release to publish 16 | 17 | Example: LIFERAY_RELEASE_PREPARE_NEXT_RELEASE_BRANCH=true LIFERAY_RELEASE_RC_BUILD_TIMESTAMP=1695892964 LIFERAY_RELEASE_VERSION=2023.q3.0 ./test_release_gold.sh 18 | -------------------------------------------------------------------------------- /release/test-dependencies/expected/test_scan_docker_images_without_parameters_output.txt: -------------------------------------------------------------------------------- 1 | Usage: LIFERAY_IMAGE_NAMES= ./scan_docker_images.sh 2 | 3 | The script reads the following environment variables: 4 | 5 | LIFERAY_IMAGE_NAMES: Comma separated list of DXP or Portal Docker images 6 | LIFERAY_PRISMA_CLOUD_ACCESS_KEY: Prisma Cloud access key 7 | LIFERAY_PRISMA_CLOUD_SECRET: Prisma Cloud secret 8 | 9 | Example: LIFERAY_IMAGE_NAMES=liferay/dxp:2025.q1.5-lts,liferay/dxp:2024.q2.2 ./scan_docker_images.sh -------------------------------------------------------------------------------- /release/test_ci.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_liferay_common.sh 4 | source ../_test_common.sh 5 | source ./_ci.sh 6 | 7 | function main { 8 | set_up 9 | 10 | test_ci_get_test_portal_branch_name 11 | test_ci_not_trigger_ci_test_suite 12 | 13 | tear_down 14 | } 15 | 16 | function set_up { 17 | export TRIGGER_CI_TEST_SUITE="false" 18 | } 19 | 20 | function tear_down { 21 | unset TRIGGER_CI_TEST_SUITE 22 | } 23 | 24 | function test_ci_get_test_portal_branch_name { 25 | _test_ci_get_test_portal_branch_name "release-2025.q1" "release-2025.q1" 26 | _test_ci_get_test_portal_branch_name "release-7.4.13.135" "master" 27 | _test_ci_get_test_portal_branch_name "release-7.4.3.132-ga132" "master" 28 | } 29 | 30 | function test_ci_not_trigger_ci_test_suite { 31 | trigger_ci_test_suite &> /dev/null 32 | 33 | assert_equals "${?}" "${LIFERAY_COMMON_EXIT_CODE_SKIPPED}" 34 | } 35 | 36 | function _test_ci_get_test_portal_branch_name { 37 | echo -e "Running _test_ci_get_test_portal_branch_name for ${1}.\n" 38 | 39 | assert_equals "$(_get_test_portal_branch_name "${1}")" "${2}" 40 | } 41 | 42 | main -------------------------------------------------------------------------------- /release/test_git.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_liferay_common.sh 4 | source ../_test_common.sh 5 | source ./_git.sh 6 | 7 | function main { 8 | set_up 9 | 10 | if [ $? -eq "${LIFERAY_COMMON_EXIT_CODE_SKIPPED}" ] 11 | then 12 | return "${LIFERAY_COMMON_EXIT_CODE_SKIPPED}" 13 | fi 14 | 15 | test_git_generate_release_notes 16 | 17 | tear_down 18 | } 19 | 20 | function set_up { 21 | export LIFERAY_RELEASE_PRODUCT_NAME="dxp" 22 | export _PRODUCT_VERSION="2024.q2.0" 23 | 24 | export _RELEASE_ROOT_DIR="${PWD}" 25 | 26 | export _PROJECTS_DIR="${_RELEASE_ROOT_DIR}"/../.. 27 | 28 | if [ ! -d "${_PROJECTS_DIR}/liferay-portal-ee" ] 29 | then 30 | echo "The directory ${_PROJECTS_DIR}/liferay-portal-ee does not exist." 31 | 32 | return "${LIFERAY_COMMON_EXIT_CODE_SKIPPED}" 33 | fi 34 | 35 | export _BUILD_DIR="${_PROJECTS_DIR}/liferay-portal-ee" 36 | 37 | lc_cd "${_PROJECTS_DIR}"/liferay-portal-ee 38 | 39 | git restore . 40 | 41 | git checkout --detach &> /dev/null 42 | 43 | git fetch --no-tags upstream master:master &> /dev/null 44 | 45 | git checkout master &> /dev/null 46 | 47 | mkdir -p "${_PROJECTS_DIR}/liferay-portal-ee/release" 48 | } 49 | 50 | function tear_down { 51 | unset LIFERAY_RELEASE_PRODUCT_NAME 52 | unset _BUILD_DIR 53 | unset _PRODUCT_VERSION 54 | unset _RELEASE_ROOT_DIR 55 | 56 | rm -fr "${_PROJECTS_DIR}/liferay-portal-ee/release" 57 | 58 | unset _PROJECTS_DIR 59 | } 60 | 61 | function test_git_generate_release_notes { 62 | generate_release_notes 63 | 64 | assert_equals \ 65 | $(grep -q "\-," "${_PROJECTS_DIR}/liferay-portal-ee/release/release-notes.txt"; echo "${?}") \ 66 | "${LIFERAY_COMMON_EXIT_CODE_BAD}" \ 67 | $(grep -q "LPD-27038" "${_PROJECTS_DIR}/liferay-portal-ee/release/release-notes.txt"; echo "${?}") \ 68 | "${LIFERAY_COMMON_EXIT_CODE_OK}" 69 | } 70 | 71 | main -------------------------------------------------------------------------------- /release/test_promotion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_liferay_common.sh 4 | source ../_test_common.sh 5 | source ./_bom.sh 6 | source ./_promotion.sh 7 | 8 | function main { 9 | set_up 10 | 11 | test_promotion_generate_distro_jar 12 | 13 | tear_down 14 | } 15 | 16 | function set_up { 17 | export LIFERAY_RELEASE_PRODUCT_NAME="dxp" 18 | export LIFERAY_RELEASE_VERSION="2024.q2.6" 19 | export _RELEASE_ROOT_DIR="${PWD}" 20 | 21 | export _ARTIFACT_RC_VERSION="${LIFERAY_RELEASE_VERSION}" 22 | export _BUILD_DIR="${_RELEASE_ROOT_DIR}/release-data/build" 23 | export _BUNDLES_DIR="${_RELEASE_ROOT_DIR}/test-dependencies/liferay-dxp" 24 | export _PRODUCT_VERSION="${LIFERAY_RELEASE_VERSION}" 25 | export _PROJECTS_DIR="${_RELEASE_ROOT_DIR}"/../.. 26 | 27 | lc_cd "${_RELEASE_ROOT_DIR}/test-dependencies" 28 | 29 | lc_download \ 30 | https://releases-cdn.liferay.com/dxp/2024.q2.6/liferay-dxp-tomcat-2024.q2.6-1721635298.zip \ 31 | liferay-dxp-tomcat-2024.q2.6-1721635298.zip 1> /dev/null 32 | 33 | unzip -q liferay-dxp-tomcat-2024.q2.6-1721635298.zip 34 | 35 | mkdir -p "${_RELEASE_ROOT_DIR}/release-data/build/boms" 36 | } 37 | 38 | function tear_down { 39 | pgrep --full --list-name "${_BUNDLES_DIR}" | awk '{print $1}' | xargs --no-run-if-empty kill -9 40 | 41 | rm -fr "${_BUNDLES_DIR}" 42 | rm -fr "${_RELEASE_ROOT_DIR}/release-data/build/boms" 43 | rm -f "${_RELEASE_ROOT_DIR}/test-dependencies/liferay-dxp-tomcat-2024.q2.6-1721635298.zip" 44 | 45 | unset LIFERAY_RELEASE_PRODUCT_NAME 46 | unset LIFERAY_RELEASE_VERSION 47 | unset _BUILD_DIR 48 | unset _BUNDLES_DIR 49 | unset _PRODUCT_VERSION 50 | unset _PROJECTS_DIR 51 | unset _RELEASE_ROOT_DIR 52 | } 53 | 54 | function test_promotion_generate_distro_jar { 55 | generate_distro_jar &> /dev/null 56 | 57 | assert_equals "$(find "${_RELEASE_ROOT_DIR}" -name "release.dxp.distro-${LIFERAY_RELEASE_VERSION}*.jar" | grep -c /)" 1 58 | } 59 | 60 | main -------------------------------------------------------------------------------- /release/test_releases_properties.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_liferay_common.sh 4 | source ../_test_common.sh 5 | source ./_package.sh 6 | 7 | function main { 8 | set_up 9 | 10 | test_release_properties_generate_file_portal 11 | 12 | LIFERAY_RELEASE_PRODUCT_NAME="dxp" 13 | _PRODUCT_VERSION="7.4.13-u36" 14 | 15 | test_release_properties_generate_file_dxp 16 | 17 | tear_down 18 | } 19 | 20 | function set_up { 21 | export LIFERAY_RELEASE_PRODUCT_NAME="portal" 22 | export _BUNDLES_DIR="${PWD}" 23 | export _PRODUCT_VERSION="7.4.3.120-ga120" 24 | 25 | mkdir -p "${_BUNDLES_DIR}/tomcat" 26 | 27 | echo "Apache Tomcat Version 9.9.99" > "${_BUNDLES_DIR}/tomcat/RELEASE-NOTES" 28 | } 29 | 30 | function tear_down { 31 | unset LIFERAY_RELEASE_PRODUCT_NAME 32 | unset _BUNDLES_DIR 33 | unset _PRODUCT_VERSION 34 | 35 | rm -f release.properties 36 | rm -fr tomcat 37 | } 38 | 39 | function test_release_properties_generate_file_portal { 40 | generate_release_properties_file &>/dev/null 41 | 42 | assert_equals \ 43 | "$(grep 'target.platform.version' release.properties | cut -d '=' -f 2)" \ 44 | $(echo "${_PRODUCT_VERSION}" | cut -d '-' -f 1) 45 | } 46 | 47 | function test_release_properties_generate_file_dxp { 48 | generate_release_properties_file &>/dev/null 49 | 50 | assert_equals \ 51 | "$(grep 'target.platform.version' release.properties | cut -d '=' -f 2)" \ 52 | "${_PRODUCT_VERSION/-/.}" 53 | } 54 | 55 | main -------------------------------------------------------------------------------- /release/test_scan_docker_images.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ../_test_common.sh 4 | 5 | function main { 6 | set_up 7 | 8 | test_scan_docker_images_with_invalid_image 9 | 10 | tear_down 11 | 12 | test_scan_docker_images_without_parameters 13 | } 14 | 15 | function set_up { 16 | export LIFERAY_IMAGE_NAMES="liferay/dxp:test-image" 17 | export LIFERAY_PRISMA_CLOUD_ACCESS_KEY="key" 18 | export LIFERAY_PRISMA_CLOUD_SECRET="secret" 19 | } 20 | 21 | function tear_down { 22 | unset LIFERAY_IMAGE_NAMES 23 | unset LIFERAY_PRISMA_CLOUD_ACCESS_KEY 24 | unset LIFERAY_PRISMA_CLOUD_SECRET 25 | } 26 | 27 | function test_scan_docker_images_with_invalid_image { 28 | assert_equals \ 29 | "$(./scan_docker_images.sh | cut -d ' ' -f 2-)" \ 30 | "[ERROR] Unable to find liferay/dxp:test-image locally." 31 | } 32 | 33 | function test_scan_docker_images_without_parameters { 34 | assert_equals \ 35 | "$(./scan_docker_images.sh)" \ 36 | "$(cat test-dependencies/expected/test_scan_docker_images_without_parameters_output.txt)" 37 | } 38 | 39 | main -------------------------------------------------------------------------------- /run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | local tests_results=$(\ 5 | find . \( -name "test_build_*.sh" -o -name "test_patching_tool_version.sh" \) -type f -exec ./{} \; && \ 6 | \ 7 | cd release && \ 8 | \ 9 | find . -name "test_*.sh" -type f -exec ./{} \;) 10 | 11 | echo "${tests_results}" 12 | 13 | if [[ "${tests_results}" == *"FAILED"* ]] 14 | then 15 | exit 1 16 | fi 17 | } 18 | 19 | main -------------------------------------------------------------------------------- /setup_lefthook.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function check_permission { 4 | if [ "$(id -u)" -ne 0 ] 5 | then 6 | echo "This script must be run by root." 7 | 8 | exit 1 9 | fi 10 | } 11 | 12 | function configure_lefthook { 13 | cd "$(dirname "$0")" || exit 14 | 15 | npx lefthook add pre-commit 16 | 17 | if [ "${exit_code}" -gt 0 ] 18 | then 19 | echo "Unable to configure Lefthook." 20 | 21 | exit 1 22 | fi 23 | } 24 | 25 | function install_lefthook { 26 | npm install @arkweid/lefthook@0.7.7 --save-dev 27 | 28 | local exit_code=$? 29 | 30 | if [ "${exit_code}" -gt 0 ] 31 | then 32 | echo "Unable to install Lefthook." 33 | 34 | exit 1 35 | fi 36 | } 37 | 38 | function install_npm { 39 | apt install npm 40 | 41 | local exit_code=$? 42 | 43 | if [ "${exit_code}" -gt 0 ] 44 | then 45 | echo "Unable to install NPM." 46 | 47 | exit 1 48 | fi 49 | } 50 | 51 | function main { 52 | check_permission 53 | 54 | install_npm 55 | 56 | install_lefthook 57 | 58 | configure_lefthook 59 | } 60 | 61 | main "${@}" -------------------------------------------------------------------------------- /templates/_byoos/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM liferay/dxp AS liferay-dxp 2 | 3 | FROM azul/zulu-openjdk-alpine 4 | 5 | COPY --from=liferay-dxp /opt/liferay /opt/liferay 6 | COPY --from=liferay-dxp /usr/local/bin /usr/local/bin 7 | 8 | ENTRYPOINT ["tini", "--", "/usr/local/bin/liferay_entrypoint.sh"] 9 | 10 | ENV LIFERAY_HOME=/opt/liferay 11 | ENV LIFERAY_PID="${LIFERAY_HOME}/liferay.pid" 12 | ENV LIFERAY_PRODUCT_NAME=Liferay 13 | 14 | EXPOSE 8000 8009 8080 11311 15 | 16 | RUN apk add bash tini 17 | 18 | WORKDIR /opt/liferay -------------------------------------------------------------------------------- /templates/_common/resources/etc/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/templates/_common/resources/etc/.gitkeep -------------------------------------------------------------------------------- /templates/_jdk/resources/home/liferay/.bashrc: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function configure_jdk { 4 | JAVA_HOME=/usr/lib/jvm/${JAVA_VERSION} 5 | PATH=/usr/lib/jvm/${JAVA_VERSION}/bin/:${PATH} 6 | } 7 | 8 | function customize_aliases { 9 | alias la="ls -la --group-directories-first" 10 | } 11 | 12 | function customize_prompt { 13 | PS1="\[\e]0;\w\a\]\n\[\e[32m\]\u@\h \[\e[33m\]\${PWD}\[\e[0m\] \\n\$ " 14 | } 15 | 16 | configure_jdk 17 | customize_aliases 18 | customize_prompt -------------------------------------------------------------------------------- /templates/_jdk/resources/usr/local/bin/set_java_version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function create_symlink { 4 | local target_dir 5 | 6 | if [ -e "/usr/lib/jvm/${2}-${1}" ] 7 | then 8 | target_dir=/usr/lib/jvm/"${2}"-"${1}" 9 | elif [ -e "/usr/lib/jvm/${2}-crac-${1}" ] 10 | then 11 | target_dir="/usr/lib/jvm/${2}-crac-${1}" 12 | fi 13 | 14 | if [ -n "${target_dir}" ] && [ ! -e "/usr/lib/jvm/${2//-/}" ] 15 | then 16 | ln -sf ${target_dir} "/usr/lib/jvm/${2//-/}" 17 | fi 18 | } 19 | 20 | function main { 21 | if [ -n "${JAVA_VERSION}" ] 22 | then 23 | if [ ! -e "/usr/lib/jvm/${JAVA_VERSION}" ] 24 | then 25 | local architecture=$(dpkg --print-architecture) 26 | local zulu_version=$(echo "${JAVA_VERSION}" | tr -dc '0-9') 27 | 28 | create_symlink "${architecture}" "zulu-${zulu_version}" 29 | update-java-alternatives -s zulu-"${zulu_version}"-"${architecture}" 30 | fi 31 | 32 | local zulu_jdks=$(ls /usr/lib/jvm/ | grep "zulu-.*-.*" | awk -F- '{print $1$2}' | paste -s -d "," | sed "s/,/, /g") 33 | 34 | if [ -e "/usr/lib/jvm/${JAVA_VERSION}" ] 35 | then 36 | JAVA_HOME=/usr/lib/jvm/${JAVA_VERSION} 37 | PATH=/usr/lib/jvm/${JAVA_VERSION}/bin/:${PATH} 38 | 39 | echo "[LIFERAY] Using ${JAVA_VERSION} JDK. You can use another JDK by setting the \"JAVA_VERSION\" environment variable." 40 | echo "[LIFERAY] Available JDKs: ${zulu_jdks}." 41 | else 42 | echo "[LIFERAY] \"${JAVA_VERSION}\" JDK is not available in this Docker image." 43 | echo "[LIFERAY] Available JDKs: ${zulu_jdks}." 44 | 45 | exit 1 46 | fi 47 | fi 48 | } 49 | 50 | main -------------------------------------------------------------------------------- /templates/base/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM --platform=${TARGETPLATFORM} ubuntu:oracular AS ubuntu-oracular 2 | 3 | COPY resources/ / 4 | 5 | RUN apt-get update && \ 6 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes bash ca-certificates curl jq less libnss3 telnet tini tree unzip && \ 7 | apt-get upgrade --yes && \ 8 | apt-get clean 9 | 10 | RUN userdel ubuntu && \ 11 | useradd --create-home --home-dir /home/liferay --shell /usr/sbin/nologin --uid 1000 --user-group liferay 12 | 13 | FROM ubuntu-oracular 14 | 15 | ARG LABEL_BUILD_DATE 16 | ARG LABEL_NAME 17 | ARG LABEL_VCS_REF 18 | ARG LABEL_VCS_URL 19 | ARG LABEL_VERSION 20 | ARG TARGETPLATFORM 21 | 22 | ENV LANG="C.UTF-8" 23 | 24 | LABEL org.label-schema.build-date="${LABEL_BUILD_DATE}" 25 | LABEL org.label-schema.name="${LABEL_NAME}" 26 | LABEL org.label-schema.schema-version="1.0" 27 | LABEL org.label-schema.vcs-ref="${LABEL_VCS_REF}" 28 | LABEL org.label-schema.vcs-url="${LABEL_VCS_URL}" 29 | LABEL org.label-schema.vendor="Liferay, Inc." 30 | LABEL org.label-schema.version="${LABEL_VERSION}" -------------------------------------------------------------------------------- /templates/base/resources/usr/local/bin/attach_ubuntu_pro_subscription.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -n "${1}" ] 4 | then 5 | LIFERAY_DOCKER_UBUNTU_PRO_TOKEN="${1}" 6 | fi 7 | 8 | if [ -z "${LIFERAY_DOCKER_UBUNTU_PRO_TOKEN}" ] 9 | then 10 | echo "Set the environment variable LIFERAY_DOCKER_UBUNTU_PRO_TOKEN." 11 | 12 | exit 1 13 | fi 14 | 15 | if (pro status 2>/dev/null | grep -q "Subscription: Ubuntu Pro" 2>/dev/null) 16 | then 17 | echo "Ubuntu Pro subscription is already active." 18 | 19 | exit 0 20 | fi 21 | 22 | apt-get update 23 | 24 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes ubuntu-advantage-tools 25 | 26 | pro attach "${LIFERAY_DOCKER_UBUNTU_PRO_TOKEN}" -------------------------------------------------------------------------------- /templates/base/resources/usr/local/bin/update_ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Updating Ubuntu." 4 | 5 | apt-get update 6 | 7 | apt-get upgrade --yes 8 | 9 | apt-get clean -------------------------------------------------------------------------------- /templates/batch/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM liferay/base:latest 2 | 3 | ARG LABEL_BUILD_DATE 4 | ARG LABEL_NAME 5 | ARG LABEL_VCS_REF 6 | ARG LABEL_VCS_URL 7 | ARG LABEL_VERSION 8 | 9 | COPY resources/ / 10 | 11 | ENTRYPOINT ["tini", "-v", "--", "/usr/local/bin/liferay_batch_entrypoint.sh"] 12 | 13 | ENV LANG="C.UTF-8" 14 | 15 | LABEL org.label-schema.build-date="${LABEL_BUILD_DATE}" 16 | LABEL org.label-schema.name="${LABEL_NAME}" 17 | LABEL org.label-schema.schema-version="1.0" 18 | LABEL org.label-schema.vcs-ref="${LABEL_VCS_REF}" 19 | LABEL org.label-schema.vcs-url="${LABEL_VCS_URL}" 20 | LABEL org.label-schema.vendor="Liferay, Inc." 21 | LABEL org.label-schema.version="${LABEL_VERSION}" 22 | 23 | USER liferay:liferay 24 | 25 | WORKDIR /opt/liferay -------------------------------------------------------------------------------- /templates/bundle/resources/opt/liferay/container_status: -------------------------------------------------------------------------------- 1 | container-created -------------------------------------------------------------------------------- /templates/bundle/resources/usr/local/bin/_liferay_bundle_common.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function execute_scripts { 4 | if [ -e "${1}" ] && [[ $(find "${1}" -maxdepth 1 -name "*.sh" -printf "%f\n") ]] 5 | then 6 | echo "[LIFERAY] Executing scripts in ${1}:" 7 | 8 | for SCRIPT_NAME in $(find "${1}" -maxdepth 1 -name "*.sh" -printf "%f\n" | sort) 9 | do 10 | echo "" 11 | echo "[LIFERAY] Executing ${SCRIPT_NAME}." 12 | 13 | source "${1}/${SCRIPT_NAME}" 14 | done 15 | 16 | echo "" 17 | fi 18 | } 19 | 20 | function update_container_status { 21 | if [[ "${LIFERAY_CONTAINER_STATUS_ENABLED}" != "true" ]] 22 | then 23 | return 24 | fi 25 | 26 | local old_status=$(grep status= /opt/liferay/container_status) 27 | 28 | old_status=${old_status#status=} 29 | 30 | if [ "${old_status}" == "${1}" ] 31 | then 32 | touch /opt/liferay/container_status 33 | 34 | return 35 | fi 36 | 37 | echo "Container status: ${1}" 38 | 39 | ( 40 | echo "status=${1}" 41 | echo "update_time=$(date +%s)" 42 | ) > /opt/liferay/container_status 43 | 44 | } -------------------------------------------------------------------------------- /templates/bundle/resources/usr/local/bin/generate_database_report.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source /usr/local/bin/_liferay_common.sh 4 | 5 | function check_usage { 6 | lc_check_utils mysql || exit 1 7 | 8 | _REPORTS_DIRECTORY="${LIFERAY_HOME}/data/reports" 9 | 10 | mkdir -p "${_REPORTS_DIRECTORY}" 11 | 12 | _REPORTS_FILE="${_REPORTS_DIRECTORY}"/query_report_$(date +'%Y-%m-%d_%H-%M-%S').html 13 | } 14 | 15 | function main { 16 | check_usage 17 | 18 | lc_time_run run_query INFORMATION_SCHEMA "SHOW ENGINE INNODB STATUS;" 19 | 20 | lc_time_run run_query INFORMATION_SCHEMA "SELECT * FROM INNODB_LOCK_WAITS;" 21 | 22 | lc_time_run run_query INFORMATION_SCHEMA "SELECT * FROM INNODB_LOCKS WHERE LOCK_TRX_ID IN (SELECT BLOCKING_TRX_ID FROM INNODB_LOCK_WAITS);" 23 | 24 | lc_time_run run_query INFORMATION_SCHEMA "SELECT * FROM TABLES;" 25 | 26 | lc_time_run run_query "${LCP_SECRET_DATABASE_NAME}" "SELECT * FROM VirtualHost;" 27 | 28 | lc_time_run run_query "${LCP_SECRET_DATABASE_NAME}" "SELECT * FROM DDMTemplate;" 29 | 30 | lc_time_run run_query "${LCP_SECRET_DATABASE_NAME}" "SELECT * FROM FragmentEntryLink;" 31 | 32 | lc_time_run run_query "${LCP_SECRET_DATABASE_NAME}" "SELECT * FROM QUARTZ_TRIGGERS;" 33 | } 34 | 35 | function run_query { 36 | echo "

${2}

" >> "${_REPORTS_FILE}" 37 | 38 | mysql --connect-timeout=10 -D "${1}" -e "${2}" -H -p"${LCP_SECRET_DATABASE_PASSWORD}" -u "${LCP_SECRET_DATABASE_USER}" >> "${_REPORTS_FILE}" 39 | } 40 | 41 | main "${@}" -------------------------------------------------------------------------------- /templates/bundle/resources/usr/local/bin/generate_heap_dump.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function check_usage { 4 | HEAP_DUMPS_DIR="${LIFERAY_HOME}/data/sre/heap_dumps" 5 | 6 | while [ "${1}" != "" ] 7 | do 8 | case ${1} in 9 | -d) 10 | shift 11 | 12 | HEAP_DUMPS_DIR=${1} 13 | 14 | ;; 15 | -h) 16 | print_help 17 | 18 | ;; 19 | *) 20 | print_help 21 | 22 | ;; 23 | esac 24 | 25 | shift 26 | done 27 | } 28 | 29 | function generate_heap_dump { 30 | local date=$(date +'%Y-%m-%d') 31 | 32 | mkdir -p "${HEAP_DUMPS_DIR}/${date}" 33 | 34 | local time=$(date +'%H-%M-%S') 35 | 36 | echo "[Liferay] Generating ${HEAP_DUMPS_DIR}/${date}/heap_dump-${time}.txt" 37 | 38 | jattach $(cat "${LIFERAY_PID}") dumpheap "${HEAP_DUMPS_DIR}/${date}/heap_dump-${time}.txt" 39 | } 40 | 41 | function main { 42 | check_usage "${@}" 43 | 44 | mkdir -p "${HEAP_DUMPS_DIR}" 45 | 46 | generate_heap_dump 47 | 48 | echo "[Liferay] Generated heap dump" 49 | } 50 | 51 | function print_help { 52 | echo "Usage: ${0}" 53 | echo "" 54 | echo "The script can be configured with the following arguments:" 55 | echo "" 56 | echo " -d (optional): Directory path to which heap dumps are saved" 57 | echo "" 58 | echo "Example: ${0} -d \"${HEAP_DUMPS_DIR}\"" 59 | 60 | exit 2 61 | } 62 | 63 | main "${@}" -------------------------------------------------------------------------------- /templates/bundle/resources/usr/local/bin/generate_thread_dump.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function check_usage { 4 | NUMBER_OF_THREAD_DUMPS=20 5 | SLEEP=5 6 | THREAD_DUMPS_DIR="${LIFERAY_HOME}/data/sre/thread_dumps" 7 | 8 | while [ "${1}" != "" ] 9 | do 10 | case ${1} in 11 | -d) 12 | shift 13 | 14 | THREAD_DUMPS_DIR=${1} 15 | 16 | ;; 17 | -h) 18 | print_help 19 | 20 | ;; 21 | -n) 22 | shift 23 | 24 | NUMBER_OF_THREAD_DUMPS=${1} 25 | 26 | ;; 27 | -s) 28 | shift 29 | 30 | SLEEP=${1} 31 | 32 | ;; 33 | *) 34 | print_help 35 | 36 | ;; 37 | esac 38 | 39 | shift 40 | done 41 | } 42 | 43 | function generate_thread_dump { 44 | local date=$(date +'%Y-%m-%d') 45 | 46 | mkdir -p "${THREAD_DUMPS_DIR}/${date}" 47 | 48 | local id=${1} 49 | local time=$(date +'%H-%M-%S') 50 | 51 | echo "[Liferay] Generating ${THREAD_DUMPS_DIR}/${date}/thread_dump-${HOSTNAME}-${time}-${id}.txt.gz" 52 | 53 | local thread_dump=$(jattach $(cat "${LIFERAY_PID}") threaddump) 54 | 55 | echo -e "${thread_dump}" | gzip > "${THREAD_DUMPS_DIR}/${date}/thread_dump-${HOSTNAME}-${time}-${id}.txt.gz" 56 | } 57 | 58 | function main { 59 | check_usage "${@}" 60 | 61 | for i in $(seq 1 "${NUMBER_OF_THREAD_DUMPS}") 62 | do 63 | generate_thread_dump "${i}" 64 | 65 | sleep "${SLEEP}" 66 | done 67 | } 68 | 69 | function print_help { 70 | echo "Usage: ${0}" 71 | echo "" 72 | echo "The script can be configured with the following arguments:" 73 | echo "" 74 | echo " -d (optional): Directory path to which the thread dumps are saved" 75 | echo " -n (optional): Number of thread dumps to generate" 76 | echo " -s (optional): Sleep in seconds between two thread dumps" 77 | echo "" 78 | echo "Example: ${0} -d \"${THREAD_DUMPS_DIR}\" -n ${NUMBER_OF_THREAD_DUMPS} -s ${SLEEP}" 79 | 80 | exit 2 81 | } 82 | 83 | main "${@}" -------------------------------------------------------------------------------- /templates/bundle/resources/usr/local/bin/probe_container_lifecycle.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [[ "${LIFERAY_CONTAINER_STATUS_ENABLED}" != "true" ]] 4 | then 5 | echo "Set the environment variable LIFERAY_CONTAINER_STATUS_ENABLED to \"true\" to enable ${0}." 6 | 7 | exit 2 8 | fi 9 | 10 | if [ ! -e /opt/liferay/container_status ] 11 | then 12 | echo "The file /opt/liferay/container_status does not exist." 13 | 14 | exit 4 15 | fi 16 | 17 | if [ "$(find /opt/liferay/container_status -mmin +2)" ] 18 | then 19 | echo "The file /opt/liferay/container_status file has not been updated for more than two minutes." 20 | 21 | exit 5 22 | fi 23 | 24 | cat /opt/liferay/container_status 25 | 26 | if (cat /opt/liferay/container_status | grep -q "status=live") 27 | then 28 | exit 0 29 | else 30 | exit 1 31 | fi -------------------------------------------------------------------------------- /templates/bundle/resources/usr/local/bin/start_liferay.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | echo "" 5 | echo "[LIFERAY] Starting ${LIFERAY_PRODUCT_NAME}. To stop the container with CTRL-C, run this container with the option \"-it\"." 6 | echo "" 7 | 8 | if [ "${LIFERAY_JPDA_ENABLED}" == "true" ] 9 | then 10 | exec "${LIFERAY_HOME}"/tomcat/bin/catalina.sh jpda run 11 | else 12 | exec "${LIFERAY_HOME}"/tomcat/bin/catalina.sh run 13 | fi 14 | } 15 | 16 | main -------------------------------------------------------------------------------- /templates/bundle/resources/usr/local/bin/startup_lock.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function add_lock { 4 | echo "Acquiring lock." 5 | 6 | hostname > "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}" 7 | 8 | sleep 2 9 | 10 | if [ "$(hostname)" != "$(cat "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}")" ] 11 | then 12 | echo "Unable to acquire lock." 13 | 14 | wait_until_free 15 | 16 | add_lock 17 | else 18 | echo "Lock acquired by $(hostname)." 19 | fi 20 | } 21 | 22 | function wait_until_free { 23 | while 24 | [ -e "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}" ] && 25 | [ -n "$(cat "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}")" ] && 26 | [ "$(hostname)" != "$(cat "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}")" ] 27 | do 28 | echo "Wait for $(cat "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}") to start up." 29 | 30 | if [ "$(find "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}" -mmin +2)" ] 31 | then 32 | echo "Lock created by $(cat "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}") was not updated for 2 minutes, unlocking." 33 | 34 | echo "" > "${LIFERAY_CONTAINER_STARTUP_LOCK_FILE}" 35 | 36 | break 37 | fi 38 | 39 | sleep 3 40 | done 41 | } 42 | 43 | function main { 44 | local delay=$((RANDOM % 10 + 1)) 45 | 46 | echo "Delaying lock check for ${delay} seconds." 47 | 48 | sleep ${delay} 49 | 50 | wait_until_free 51 | 52 | add_lock 53 | } 54 | 55 | main -------------------------------------------------------------------------------- /templates/caddy/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM caddy:2.5.0 2 | 3 | ARG LABEL_BUILD_DATE 4 | ARG LABEL_NAME 5 | ARG LABEL_VCS_REF 6 | ARG LABEL_VCS_URL 7 | ARG LABEL_VERSION 8 | 9 | COPY resources/ / 10 | 11 | ENTRYPOINT ["tini", "-v", "--", "/usr/local/bin/liferay_caddy_entrypoint.sh"] 12 | 13 | ENV LANG="C.UTF-8" 14 | 15 | LABEL org.label-schema.build-date="${LABEL_BUILD_DATE}" 16 | LABEL org.label-schema.name="${LABEL_NAME}" 17 | LABEL org.label-schema.schema-version="1.0" 18 | LABEL org.label-schema.vcs-ref="${LABEL_VCS_REF}" 19 | LABEL org.label-schema.vcs-url="${LABEL_VCS_URL}" 20 | LABEL org.label-schema.vendor="Liferay, Inc." 21 | LABEL org.label-schema.version="${LABEL_VERSION}" 22 | 23 | RUN apk add --no-cache bash tini && \ 24 | \ 25 | mkdir -p /etc/caddy.d /public_html -------------------------------------------------------------------------------- /templates/caddy/resources/etc/caddy/Caddyfile: -------------------------------------------------------------------------------- 1 | { 2 | admin off 3 | auto_https off 4 | 5 | log { 6 | format json 7 | } 8 | } 9 | 10 | :80 { 11 | encode gzip 12 | file_server 13 | 14 | header { 15 | Access-Control-Allow-Headers * 16 | Access-Control-Allow-Methods "GET,OPTIONS,POST" 17 | Vary "Origin" 18 | } 19 | 20 | import /etc/caddy.d/* 21 | 22 | log { 23 | format json 24 | output stdout 25 | } 26 | 27 | respond / 200 28 | 29 | @options method OPTIONS 30 | respond @options 204 31 | 32 | root * /public_html 33 | } 34 | -------------------------------------------------------------------------------- /templates/caddy/resources/usr/local/bin/liferay_caddy_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | if [ ! -n "${LIFERAY_ROUTES_DXP}" ] 5 | then 6 | LIFERAY_ROUTES_DXP="/etc/liferay/lxc/dxp-metadata" 7 | fi 8 | 9 | local protocol=$(cat ${LIFERAY_ROUTES_DXP}/com.liferay.lxc.dxp.server.protocol 2>/dev/null) 10 | 11 | for i in $(cat ${LIFERAY_ROUTES_DXP}/com.liferay.lxc.dxp.domains 2>/dev/null) 12 | do 13 | local url="${protocol}://${i}" 14 | 15 | cat >> /etc/caddy.d/liferay_caddy_file << EOF 16 | @origin${url} header Origin ${url} 17 | header @origin${url} Access-Control-Allow-Origin "${url}" 18 | EOF 19 | done 20 | 21 | if [ -n "${LIFERAY_CADDY_404_URL}" ] 22 | then 23 | cat >> /etc/caddy.d/liferay_caddy_file << EOF 24 | handle_errors { 25 | 26 | @404 expression {http.error.status_code} == 404 27 | handle @404 { 28 | redir * ${LIFERAY_CADDY_404_URL} 301 29 | } 30 | 31 | } 32 | EOF 33 | fi 34 | 35 | caddy run --adapter caddyfile --config /etc/caddy/Caddyfile 36 | } 37 | 38 | main -------------------------------------------------------------------------------- /templates/jar-runner/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM --platform=${TARGETPLATFORM} liferay/jdk21-jdk11-jdk8:latest AS liferay-jdk21-jdk11-jdk8 2 | 3 | COPY resources/etc/created-date /etc/created-date 4 | 5 | FROM liferay-jdk21-jdk11-jdk8 6 | 7 | ARG LABEL_BUILD_DATE 8 | ARG LABEL_NAME 9 | ARG LABEL_VCS_REF 10 | ARG LABEL_VCS_URL 11 | ARG LABEL_VERSION 12 | ARG TARGETPLATFORM 13 | 14 | COPY resources/ / 15 | 16 | ENTRYPOINT ["tini", "-v", "--", "/usr/local/bin/liferay_jar_runner_entrypoint.sh"] 17 | 18 | ENV JAVA_VERSION=zulu21 19 | ENV JDK_JAVA_OPTIONS="--add-opens=java.base/java.lang=ALL-UNNAMED \ 20 | --add-opens=java.base/java.lang.invoke=ALL-UNNAMED \ 21 | --add-opens=java.base/java.lang.reflect=ALL-UNNAMED \ 22 | --add-opens=java.base/java.net=ALL-UNNAMED \ 23 | --add-opens=java.base/sun.net.www.protocol.http=ALL-UNNAMED \ 24 | --add-opens=java.base/sun.net.www.protocol.https=ALL-UNNAMED \ 25 | --add-opens=java.base/sun.util.calendar=ALL-UNNAMED \ 26 | --add-opens=jdk.zipfs/jdk.nio.zipfs=ALL-UNNAMED" 27 | ENV LANG="C.UTF-8" 28 | ENV LIFERAY_JAR_RUNNER_JAVA_OPTS="-Xmx512m" 29 | 30 | LABEL org.label-schema.build-date="${LABEL_BUILD_DATE}" 31 | LABEL org.label-schema.name="${LABEL_NAME}" 32 | LABEL org.label-schema.schema-version="1.0" 33 | LABEL org.label-schema.vcs-ref="${LABEL_VCS_REF}" 34 | LABEL org.label-schema.vcs-url="${LABEL_VCS_URL}" 35 | LABEL org.label-schema.vendor="Liferay, Inc." 36 | LABEL org.label-schema.version="${LABEL_VERSION}" 37 | 38 | USER liferay:liferay 39 | 40 | WORKDIR /opt/liferay -------------------------------------------------------------------------------- /templates/jar-runner/resources/usr/local/bin/liferay_jar_runner_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | export JAVA_HOME=/usr/lib/jvm/${JAVA_VERSION} 5 | export PATH=/usr/lib/jvm/${JAVA_VERSION}/bin/:${PATH} 6 | 7 | if [ -e /usr/local/bin/liferay_jar_runner_set_up.sh ] 8 | then 9 | /usr/local/bin/liferay_jar_runner_set_up.sh 10 | fi 11 | 12 | java ${LIFERAY_JAR_RUNNER_JAVA_OPTS} -jar /opt/liferay/jar-runner.jar "${@}" 13 | 14 | if [ -e /usr/local/bin/liferay_jar_runner_tear_down.sh ] 15 | then 16 | /usr/local/bin/liferay_jar_runner_tear_down.sh 17 | fi 18 | } 19 | 20 | main "${@}" -------------------------------------------------------------------------------- /templates/job-runner/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM --platform=${TARGETPLATFORM} liferay/jdk11-jdk8:latest AS liferay-jdk11-jdk8 2 | 3 | COPY resources/etc/created-date /etc/created-date 4 | 5 | RUN apt-get update && \ 6 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes cron gettext && \ 7 | apt-get clean 8 | 9 | FROM liferay-jdk11-jdk8 10 | 11 | ARG LABEL_BUILD_DATE 12 | ARG LABEL_NAME 13 | ARG LABEL_VCS_REF 14 | ARG LABEL_VCS_URL 15 | ARG LABEL_VERSION 16 | ARG TARGETPLATFORM 17 | 18 | COPY resources/ / 19 | 20 | ENTRYPOINT ["tini", "-v", "--", "/usr/local/bin/liferay_job_runner_entrypoint.sh"] 21 | 22 | ENV LANG="C.UTF-8" 23 | 24 | LABEL org.label-schema.build-date="${LABEL_BUILD_DATE}" 25 | LABEL org.label-schema.name="${LABEL_NAME}" 26 | LABEL org.label-schema.schema-version="1.0" 27 | LABEL org.label-schema.vcs-ref="${LABEL_VCS_REF}" 28 | LABEL org.label-schema.vcs-url="${LABEL_VCS_URL}" 29 | LABEL org.label-schema.vendor="Liferay, Inc." 30 | LABEL org.label-schema.version="${LABEL_VERSION}" 31 | 32 | WORKDIR /opt/liferay -------------------------------------------------------------------------------- /templates/job-runner/resources/usr/local/bin/job_wrapper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | echo "" 5 | echo "Running job ${1}." 6 | echo "" 7 | 8 | time /bin/bash "/mnt/liferay/jobs/${1}.sh" 9 | } 10 | 11 | main "${@}" -------------------------------------------------------------------------------- /templates/job-runner/resources/usr/local/bin/liferay_job_runner_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function init { 4 | . /usr/local/bin/set_java_version.sh 5 | 6 | mkdir -p /opt/liferay/job-queue 7 | } 8 | 9 | function main { 10 | init 11 | 12 | register_crontab 13 | 14 | cron 15 | 16 | run_jobs 17 | } 18 | 19 | function register_crontab { 20 | if [ ! -e /mnt/liferay/job-crontab ] 21 | then 22 | echo "The file /mnt/liferay/job-crontab does not exist." 23 | 24 | exit 2 25 | fi 26 | 27 | ( 28 | crontab -l 2>/dev/null 29 | 30 | cat /mnt/liferay/job-crontab | envsubst 31 | ) | crontab - 32 | 33 | echo "Registered crontab: " 34 | 35 | crontab -l 36 | 37 | echo "" 38 | } 39 | 40 | function run_jobs { 41 | while true 42 | do 43 | if [ $(ls /opt/liferay/job-queue | wc -l) -gt 0 ] 44 | then 45 | local job=$(ls -tr /opt/liferay/job-queue | head -n 1) 46 | 47 | rm "/opt/liferay/job-queue/${job}" 48 | 49 | job_wrapper.sh "${job}" 50 | else 51 | sleep 10 52 | fi 53 | done 54 | } 55 | 56 | main -------------------------------------------------------------------------------- /templates/job-runner/resources/usr/local/bin/register_job.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | mkdir -p /opt/liferay/job-queue 5 | 6 | if [ ! -e "/opt/liferay/job-queue/${1}" ] 7 | then 8 | touch "/opt/liferay/job-queue/${1}" 9 | 10 | echo "Registering ${1}." 11 | else 12 | echo "Skipping ${1} because it is already registered." 13 | fi 14 | } 15 | 16 | main "${@}" -------------------------------------------------------------------------------- /templates/node-runner/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM --platform=linux/arm64 liferay/base:latest AS liferay-base-arm64 2 | 3 | ARG LABEL_NODE_RUNNER_AMD64_VERSION 4 | ARG LABEL_NODE_RUNNER_ARM64_VERSION 5 | ARG LABEL_NODE_RUNNER_VERSION=${LABEL_NODE_RUNNER_ARM64_VERSION} 6 | 7 | FROM --platform=linux/amd64 liferay/base:latest AS liferay-base-amd64 8 | 9 | ARG LABEL_NODE_RUNNER_AMD64_VERSION 10 | ARG LABEL_NODE_RUNNER_ARM64_VERSION 11 | ARG LABEL_NODE_RUNNER_VERSION=${LABEL_NODE_RUNNER_AMD64_VERSION} 12 | 13 | FROM liferay-base-${TARGETARCH} 14 | 15 | ARG LABEL_BUILD_DATE 16 | ARG LABEL_NAME 17 | ARG LABEL_VCS_REF 18 | ARG LABEL_VCS_URL 19 | ARG LABEL_VERSION 20 | ARG NODE_VERSION=16 21 | ARG TARGETARCH 22 | ARG TARGETPLATFORM 23 | 24 | COPY resources/ / 25 | 26 | ENTRYPOINT ["tini", "-v", "--", "/usr/local/bin/liferay_node_runner_entrypoint.sh"] 27 | 28 | ENV LANG="C.UTF-8" 29 | ENV LIFERAY_NODE_RUNNER_START="npm start" 30 | 31 | LABEL org.label-schema.build-date="${LABEL_BUILD_DATE}" 32 | LABEL org.label-schema.name="${LABEL_NAME}" 33 | LABEL org.label-schema.node_version="${NODE_VERSION}" 34 | LABEL org.label-schema.schema-version="1.0" 35 | LABEL org.label-schema.vcs-ref="${LABEL_VCS_REF}" 36 | LABEL org.label-schema.vcs-url="${LABEL_VCS_URL}" 37 | LABEL org.label-schema.vendor="Liferay, Inc." 38 | LABEL org.label-schema.version="${LABEL_VERSION}" 39 | 40 | RUN apt-get update && \ 41 | apt-get upgrade --yes && \ 42 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes gnupg && \ 43 | mkdir -p /etc/apt/keyrings && \ 44 | curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \ 45 | echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_16.x nodistro main" > /etc/apt/sources.list.d/nodesource.list && \ 46 | apt-get update && \ 47 | DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --no-install-recommends --yes nodejs npm && \ 48 | apt-get clean 49 | 50 | USER liferay:liferay 51 | 52 | WORKDIR /opt/liferay -------------------------------------------------------------------------------- /templates/node-runner/resources/usr/local/bin/liferay_node_runner_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | if [ -e /usr/local/bin/liferay_node_runner_set_up.sh ] 5 | then 6 | /usr/local/bin/liferay_node_runner_set_up.sh 7 | fi 8 | 9 | ${LIFERAY_NODE_RUNNER_START} 10 | 11 | if [ -e /usr/local/bin/liferay_node_runner_tear_down.sh ] 12 | then 13 | /usr/local/bin/liferay_node_runner_tear_down.sh 14 | fi 15 | } 16 | 17 | main -------------------------------------------------------------------------------- /templates/noop/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM --platform=${TARGETPLATFORM} scratch 2 | 3 | ARG LABEL_BUILD_DATE 4 | ARG LABEL_NAME 5 | ARG LABEL_VCS_REF 6 | ARG LABEL_VCS_URL 7 | ARG LABEL_VERSION 8 | ARG TARGETPLATFORM 9 | 10 | COPY resources/noop / 11 | 12 | CMD [ "/noop" ] 13 | 14 | ENV LANG="C.UTF-8" 15 | 16 | LABEL org.label-schema.build-date="${LABEL_BUILD_DATE}" 17 | LABEL org.label-schema.name="${LABEL_NAME}" 18 | LABEL org.label-schema.schema-version="1.0" 19 | LABEL org.label-schema.vcs-ref="${LABEL_VCS_REF}" 20 | LABEL org.label-schema.vcs-url="${LABEL_VCS_URL}" 21 | LABEL org.label-schema.vendor="Liferay, Inc." 22 | LABEL org.label-schema.version="${LABEL_VERSION}" -------------------------------------------------------------------------------- /templates/noop/resources/noop: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liferay/liferay-docker/5bf6d081d5c066be4371033c28cde2e5f0480e1a/templates/noop/resources/noop -------------------------------------------------------------------------------- /templates/noop/resources/noop.asm: -------------------------------------------------------------------------------- 1 | ; Build command: nasm -f bin -o noop noop.asm 2 | ; 3 | ; See https://www.muppetlabs.com/~breadbox/software/tiny/teensy.html 4 | 5 | BITS 32 6 | 7 | org 0x00010000 8 | 9 | db 0x7F, "ELF" ; e_ident 10 | dd 1 ; p_type 11 | dd 0 ; p_offset 12 | dd $$ ; p_vaddr 13 | dw 2 ; e_type ; p_paddr 14 | dw 3 ; e_machine 15 | dd _start ; e_version ; p_filesz 16 | dd _start ; e_entry ; p_memsz 17 | dd 4 ; e_phoff ; p_flags 18 | _start: 19 | mov bl, 0 ; e_shoff ; p_align 20 | xor eax, eax 21 | inc eax ; e_flags 22 | int 0x80 23 | db 0 24 | dw 0x34 ; e_ehsize 25 | dw 0x20 ; e_phentsize 26 | db 1 ; e_phnum 27 | ; e_shentsize 28 | ; e_shnum 29 | ; e_shstrndx 30 | 31 | filesize equ $ - $$ -------------------------------------------------------------------------------- /templates/squid/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye-slim 2 | 3 | ARG LABEL_BUILD_DATE 4 | ARG LABEL_NAME 5 | ARG LABEL_VCS_REF 6 | ARG LABEL_VCS_URL 7 | ARG LABEL_VERSION 8 | 9 | COPY resources/usr/ /usr 10 | 11 | ENTRYPOINT ["tini", "-v", "--", "/usr/local/bin/liferay_squid_entrypoint.sh"] 12 | 13 | EXPOSE 3128 3129 14 | 15 | LABEL org.label-schema.build-date="${LABEL_BUILD_DATE}" 16 | LABEL org.label-schema.name="${LABEL_NAME}" 17 | LABEL org.label-schema.schema-version="1.0" 18 | LABEL org.label-schema.vcs-ref="${LABEL_VCS_REF}" 19 | LABEL org.label-schema.vcs-url="${LABEL_VCS_URL}" 20 | LABEL org.label-schema.vendor="Liferay, Inc." 21 | LABEL org.label-schema.version="${LABEL_VERSION}" 22 | 23 | RUN apt-get update && \ 24 | apt-get install --yes openssl squid-openssl tini 25 | 26 | COPY resources/etc/squid/squid.conf /etc/squid/squid.conf -------------------------------------------------------------------------------- /templates/squid/resources/etc/squid/squid.conf: -------------------------------------------------------------------------------- 1 | access_log DEBUG 2 | acl CONNECT method CONNECT 3 | acl CONNECT method CONNECT 4 | acl SSL_ports port 443 5 | acl SSL_ports port 443 6 | acl Safe_ports port 1025-65535 7 | acl Safe_ports port 21 8 | acl Safe_ports port 210 9 | acl Safe_ports port 280 10 | acl Safe_ports port 443 11 | acl Safe_ports port 488 12 | acl Safe_ports port 591 13 | acl Safe_ports port 70 14 | acl Safe_ports port 777 15 | acl Safe_ports port 80 16 | acl localnet src 10.0.0.0/8 17 | acl localnet src 172.16.0.0/12 18 | acl localnet src 192.168.0.0/16 19 | acl localnet src fc00::/7 20 | acl localnet src fe80::/10 21 | cache_dir ufs /var/spool/squid 10240 16 256 22 | cache_log DEBUG 23 | coredump_dir /var/spool/squid 24 | http_access allow SSL_ports 25 | http_access allow localhost 26 | http_access allow localhost 27 | http_access allow localhost manager 28 | http_access allow localnet 29 | http_access deny all 30 | http_access deny manager 31 | http_port 3128 32 | http_port 3129 ssl-bump generate-host-certificates=ondynamic_cert_mem_cache_size=20MB cert=/etc/squid/seeder.crt key=/etc/squid/seeder.key 33 | maximum_object_size 1 GB 34 | ssl_bump server-first all 35 | sslcrtd_program /usr/lib/squid/security_file_certgen -s /var/lib/squid/ssl_db -M 20MB 36 | sslproxy_cert_error allow all 37 | 38 | refresh_pattern ^ftp: 1440 20% 10080 39 | refresh_pattern ^gopher: 1440 0% 1440 40 | refresh_pattern -i (/cgi-bin/|\?) 0 0% 0 41 | refresh_pattern . 0 20% 4320 -------------------------------------------------------------------------------- /templates/squid/resources/usr/local/bin/liferay_squid_entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function main { 4 | rm -f /run/squid.pid 5 | 6 | if [ ! -e /etc/squid/seeder.crt ] 7 | then 8 | mkdir -p /var/lib/squid 9 | 10 | /usr/lib/squid/security_file_certgen \ 11 | -c \ 12 | -s /var/lib/squid/ssl_db \ 13 | -M 20MB 14 | 15 | chown -R proxy:proxy /var/lib/squid 16 | 17 | openssl req \ 18 | -days 365 \ 19 | -keyout /etc/squid/seeder.key \ 20 | -new \ 21 | -newkey rsa:2048 \ 22 | -nodes \ 23 | -out /etc/squid/seeder.crt \ 24 | -subj /C=US/ST=CA/L=LAX/O=Liferay/OU=IT/CN=localhost \ 25 | -x509 26 | fi 27 | 28 | squid -z && rm -f /run/squid.pid 29 | 30 | squid -CNYd 1 31 | 32 | # 33 | # curl --location https://dlcdn.apache.org/netbeans/netbeans/19/netbeans-19-bin.zip --output netbeans-19-bin.zip --preproxy localhost:3129 34 | # curl --location https://www.bbc.com/robots.txt --preproxy localhost:3129 35 | # 36 | } 37 | 38 | main -------------------------------------------------------------------------------- /templates/test/resources/mnt/liferay/files/tomcat/webapps/ROOT/test_docker_image_files.jsp: -------------------------------------------------------------------------------- 1 | <% 2 | out.println("TEST"); 3 | %> -------------------------------------------------------------------------------- /templates/test/resources/mnt/liferay/scripts/test_docker_image_scripts_1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "TEST1" > /opt/liferay/tomcat/webapps/ROOT/test_docker_image_scripts_1.jsp 4 | echo "TEST1" > /opt/liferay/tomcat/webapps/ROOT/test_docker_image_scripts_2.jsp -------------------------------------------------------------------------------- /templates/test/resources/mnt/liferay/scripts/test_docker_image_scripts_2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "TEST2" > /opt/liferay/tomcat/webapps/ROOT/test_docker_image_scripts_2.jsp -------------------------------------------------------------------------------- /test_patching_tool_version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | source ./_liferay_common.sh 4 | source ./_test_common.sh 5 | 6 | function main { 7 | test_patching_tool_version 8 | } 9 | 10 | function test_patching_tool_version { 11 | _test_patching_tool_version "1.0" 12 | _test_patching_tool_version "2.0" 13 | _test_patching_tool_version "3.0" 14 | _test_patching_tool_version "4.0" 15 | } 16 | 17 | function _test_patching_tool_version { 18 | echo -e "Running _test_patching_tool_version for ${1}.\n" 19 | 20 | local latest_patching_tool_version=$(./patching_tool_version.sh "${1}") 21 | 22 | if [ "${1}" == "1.0" ] 23 | then 24 | assert_equals "${latest_patching_tool_version}" "1.0.24" 25 | else 26 | assert_equals \ 27 | "${latest_patching_tool_version}" \ 28 | $(lc_curl https://releases.liferay.com/tools/patching-tool/LATEST-${1}.txt) 29 | fi 30 | } 31 | 32 | main -------------------------------------------------------------------------------- /update_permissions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | find . -name "*.sh" ! -name "_*.sh" -type f -exec chmod 755 {} ";" 4 | 5 | find . -name "_*.sh" -type f -exec chmod 644 {} ";" --------------------------------------------------------------------------------