├── playbooks ├── atom ├── archivematica-noble │ ├── .gitignore │ ├── ansible.cfg │ ├── requirements-qa.yml │ ├── requirements.yml │ ├── singlenode.yml │ ├── Vagrantfile │ ├── README.md │ ├── vars-singlenode-qa.yml │ └── vars-singlenode-1.18.yml ├── archivematica-centos7 │ ├── .gitignore │ ├── ansible.cfg │ ├── requirements.yml │ ├── singlenode.yml │ ├── singlenode-indexless.yml │ ├── Vagrantfile.openstack │ ├── Vagrantfile │ ├── vars-singlenode-1.8.yml │ ├── vars-singlenode-1.11.yml │ ├── vars-singlenode-1.12.yml │ ├── vars-singlenode-1.9.yml │ ├── vars-singlenode-1.10.yml │ ├── vars-singlenode-qa.yml │ ├── vars-singlenode-1.14.yml │ ├── vars-singlenode-1.13.yml │ └── README.md ├── atom-bionic │ ├── ansible.cfg │ ├── .gitignore │ ├── singlenode.yml │ ├── requirements.yml │ ├── README.md │ ├── Vagrantfile │ └── vars-singlenode-qa.yml ├── atom-focal │ ├── ansible.cfg │ ├── .gitignore │ ├── singlenode.yml │ ├── requirements.yml │ ├── README.md │ ├── Vagrantfile │ └── vars-singlenode-qa.yml ├── atom-noble │ ├── ansible.cfg │ ├── .gitignore │ ├── requirements.yml │ ├── singlenode.yml │ ├── README.md │ ├── Vagrantfile │ └── vars-singlenode-qa.yml ├── atom-rocky9 │ ├── ansible.cfg │ ├── .gitignore │ ├── requirements.yml │ ├── README.md │ ├── Vagrantfile │ ├── singlenode.yml │ └── vars-singlenode-qa.yml ├── archivematica-bionic │ ├── .gitignore │ ├── ansible.cfg │ ├── requirements-qa.yml │ ├── requirements.yml │ ├── singlenode-indexless.yml │ ├── singlenode.yml │ ├── Vagrantfile │ ├── Vagrantfile.openstack │ ├── vars-singlenode-1.8.yml │ ├── vars-singlenode-1.11.yml │ ├── vars-singlenode-1.10.yml │ ├── vars-singlenode-1.9.yml │ ├── vars-singlenode-1.12.yml │ ├── vars-singlenode-qa.yml │ ├── vars-singlenode-1.13.yml │ ├── vars-singlenode-1.14.yml │ ├── README.md │ └── .Jenkinsfile └── archivematica-jammy │ ├── .gitignore │ ├── ansible.cfg │ ├── requirements-qa.yml │ ├── requirements.yml │ ├── singlenode.yml │ ├── Vagrantfile │ ├── Vagrantfile.openstack │ ├── README.md │ ├── vars-singlenode-qa.yml │ ├── vars-singlenode-1.17.yml │ └── .Jenkinsfile ├── .gitignore ├── tests ├── dip-upload │ ├── .gitignore │ ├── requirements.txt │ ├── compose.yaml │ ├── Dockerfile │ ├── requirements.yml │ ├── archivematica.yml │ ├── archivematica-vars.yml │ ├── atom-vars.yml │ ├── README.md │ └── atom.yml ├── elasticsearch-replication │ ├── .gitignore │ ├── ansible.cfg │ ├── README.md │ ├── hosts │ ├── populate-index.sh │ ├── Vagrantfile │ └── playbook.yml ├── archivematica-upgrade │ ├── .gitignore │ ├── requirements.txt │ ├── compose.yaml │ ├── Dockerfile │ ├── playbook.yml │ └── README.md └── archivematica-acceptance-tests │ ├── .gitignore │ ├── requirements.txt │ ├── compose.yaml │ ├── requirements.yml │ ├── playbook.yml │ ├── Dockerfile │ ├── vars.yml │ └── README.md ├── roles ├── elasticsearch-restore │ └── tasks │ │ ├── main.yml │ │ ├── deps.yml │ │ └── restore.yml ├── elasticsearch-repository │ └── tasks │ │ ├── main.yml │ │ ├── deps.yml │ │ └── repository.yml ├── elasticsearch-snapshot │ └── tasks │ │ ├── main.yml │ │ ├── deps.yml │ │ └── snapshot.yml ├── nfs-client │ ├── defaults │ │ └── main.yml │ └── tasks │ │ └── main.yml └── nfs │ ├── handlers │ └── main.yml │ ├── vars │ ├── RedHat.yml │ └── Debian.yml │ ├── meta │ └── main.yml │ ├── defaults │ └── main.yml │ ├── README.md │ └── tasks │ └── main.yml ├── COPYRIGHT ├── README.md └── .github └── workflows └── archivematica-playbook.yml /playbooks/atom: -------------------------------------------------------------------------------- 1 | atom-focal -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vagrant/ 2 | *.retry 3 | -------------------------------------------------------------------------------- /tests/dip-upload/.gitignore: -------------------------------------------------------------------------------- 1 | ssh_pub_key 2 | .venv 3 | roles 4 | -------------------------------------------------------------------------------- /tests/elasticsearch-replication/.gitignore: -------------------------------------------------------------------------------- 1 | /ansible.log 2 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/.gitignore: -------------------------------------------------------------------------------- 1 | /roles 2 | /.vagrant 3 | /src 4 | -------------------------------------------------------------------------------- /tests/archivematica-upgrade/.gitignore: -------------------------------------------------------------------------------- 1 | ssh_pub_key 2 | .venv 3 | roles 4 | -------------------------------------------------------------------------------- /tests/archivematica-acceptance-tests/.gitignore: -------------------------------------------------------------------------------- 1 | ssh_pub_key 2 | .venv 3 | roles 4 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/.gitignore: -------------------------------------------------------------------------------- 1 | /roles 2 | /.vagrant 3 | /src 4 | .retry 5 | -------------------------------------------------------------------------------- /playbooks/atom-bionic/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | nocows = 1 3 | 4 | [ssh_connection] 5 | pipelining = True 6 | -------------------------------------------------------------------------------- /playbooks/atom-focal/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | nocows = 1 3 | 4 | [ssh_connection] 5 | pipelining = True 6 | -------------------------------------------------------------------------------- /playbooks/atom-noble/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | nocows = 1 3 | 4 | [ssh_connection] 5 | pipelining = True 6 | -------------------------------------------------------------------------------- /playbooks/atom-rocky9/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | nocows = 1 3 | 4 | [ssh_connection] 5 | pipelining = True 6 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/.gitignore: -------------------------------------------------------------------------------- 1 | /roles 2 | /.vagrant 3 | /src 4 | /ubuntu-bionic-18.04-cloudimg-console.log 5 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | nocows = 1 3 | 4 | [ssh_connection] 5 | pipelining = True 6 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | nocows = 1 3 | 4 | [ssh_connection] 5 | pipelining = True 6 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/.gitignore: -------------------------------------------------------------------------------- 1 | /roles 2 | /.vagrant 3 | /src 4 | /ubuntu-bionic-18.04-cloudimg-console.log 5 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | nocows = 1 3 | 4 | [ssh_connection] 5 | pipelining = True 6 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | nocows = 1 3 | 4 | [ssh_connection] 5 | pipelining = True 6 | -------------------------------------------------------------------------------- /playbooks/atom-bionic/.gitignore: -------------------------------------------------------------------------------- 1 | /roles 2 | /.vagrant 3 | /src 4 | /singlenode.retry 5 | /ubuntu-*-cloudimg-console.log 6 | -------------------------------------------------------------------------------- /playbooks/atom-focal/.gitignore: -------------------------------------------------------------------------------- 1 | /roles 2 | /.vagrant 3 | /src 4 | /singlenode.retry 5 | /ubuntu-*-cloudimg-console.log 6 | -------------------------------------------------------------------------------- /playbooks/atom-noble/.gitignore: -------------------------------------------------------------------------------- 1 | /roles 2 | /.vagrant 3 | /src 4 | /singlenode.retry 5 | /ubuntu-*-cloudimg-console.log 6 | -------------------------------------------------------------------------------- /playbooks/atom-rocky9/.gitignore: -------------------------------------------------------------------------------- 1 | /roles 2 | /.vagrant 3 | /src 4 | /singlenode.retry 5 | /ubuntu-*-cloudimg-console.log 6 | -------------------------------------------------------------------------------- /roles/elasticsearch-restore/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include: "deps.yml" 4 | sudo: "yes" 5 | 6 | - include: "restore.yml" 7 | -------------------------------------------------------------------------------- /roles/elasticsearch-repository/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include: "deps.yml" 4 | sudo: "yes" 5 | 6 | - include: "repository.yml" -------------------------------------------------------------------------------- /roles/elasticsearch-snapshot/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - include: "deps.yml" 4 | sudo: "yes" 5 | 6 | - include: "snapshot.yml" 7 | -------------------------------------------------------------------------------- /roles/nfs-client/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | nfs_client_pkgs: 4 | - "nfs-common" 5 | - "portmap" 6 | 7 | nfs_client_imports: [] -------------------------------------------------------------------------------- /tests/dip-upload/requirements.txt: -------------------------------------------------------------------------------- 1 | ansible 2 | git+https://github.com/containers/podman-compose.git@2681566580b4eaadfc5e6000ad19e49e56006e2b#egg=podman-compose 3 | -------------------------------------------------------------------------------- /tests/elasticsearch-replication/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | roles_path = /etc/ansible/roles:../../roles 3 | log_path = ./ansible.log 4 | host_key_checking = false 5 | -------------------------------------------------------------------------------- /tests/archivematica-upgrade/requirements.txt: -------------------------------------------------------------------------------- 1 | ansible 2 | git+https://github.com/containers/podman-compose.git@2681566580b4eaadfc5e6000ad19e49e56006e2b#egg=podman-compose 3 | -------------------------------------------------------------------------------- /tests/archivematica-acceptance-tests/requirements.txt: -------------------------------------------------------------------------------- 1 | ansible 2 | git+https://github.com/containers/podman-compose.git@2681566580b4eaadfc5e6000ad19e49e56006e2b#egg=podman-compose 3 | -------------------------------------------------------------------------------- /COPYRIGHT: -------------------------------------------------------------------------------- 1 | Role: https://github.com/Traackr/ansible-elasticsearch 2 | License: MIT 3 | Author: George Stathis 4 | 5 | Role: https://github.com/atsaki/ansible-nfs 6 | License: BSD 7 | Author: Atsushi Sasaki 8 | -------------------------------------------------------------------------------- /roles/elasticsearch-snapshot/tasks/deps.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Install python-pip" 4 | apt: "pkg=python-pip state=present update_cache=yes cache_valid_time=300" 5 | 6 | - name: "Install elasticsearch-curator" 7 | pip: "name=elasticsearch-curator" -------------------------------------------------------------------------------- /roles/elasticsearch-repository/tasks/deps.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Install python-pip" 4 | apt: "pkg=python-pip state=present update_cache=yes cache_valid_time=300" 5 | 6 | - name: "Install elasticsearch-curator" 7 | pip: "name=elasticsearch-curator" -------------------------------------------------------------------------------- /roles/nfs/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: refresh exports 4 | command: exportfs -ra 5 | sudo: yes 6 | 7 | - name: restart nfs services 8 | service: name="{{ item }}" state=restarted 9 | with_items: nfs_services 10 | sudo: yes 11 | -------------------------------------------------------------------------------- /roles/nfs/vars/RedHat.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | nfs_packages: 4 | - nfs-utils 5 | 6 | nfs_services: 7 | - rpcbind 8 | - nfs 9 | 10 | nfs_default_owner: nfsnobody 11 | nfs_default_group: nfsnobody 12 | 13 | nfs_config_file: /etc/sysconfig/nfs 14 | -------------------------------------------------------------------------------- /roles/nfs/vars/Debian.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | nfs_packages: 4 | - nfs-kernel-server 5 | 6 | nfs_services: 7 | - nfs-kernel-server 8 | 9 | nfs_default_owner: nobody 10 | nfs_default_group: nogroup 11 | 12 | nfs_config_file: /etc/default/nfs-kernel-server 13 | -------------------------------------------------------------------------------- /tests/archivematica-upgrade/compose.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: archivematica-upgrade-test 3 | 4 | services: 5 | 6 | archivematica: 7 | build: 8 | args: 9 | UBUNTU_VERSION: "22.04" 10 | ports: 11 | - "2222:22" 12 | - "8000:80" 13 | - "8001:8000" 14 | -------------------------------------------------------------------------------- /roles/elasticsearch-restore/tasks/deps.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Install packages" 4 | apt: "pkg={{ item }} state=present update_cache=yes cache_valid_time=300" 5 | with_items: 6 | - "python-pip" 7 | - "curl" 8 | 9 | - name: "Install elasticsearch-curator" 10 | pip: "name=elasticsearch-curator" 11 | -------------------------------------------------------------------------------- /roles/nfs/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | galaxy_info: 3 | author: Atsushi Sasaki 4 | description: NFS installation and setup 5 | license: license (BSD) 6 | min_ansible_version: 1.4 7 | platforms: 8 | - name: EL 9 | versions: 10 | - 6 11 | - name: Ubuntu 12 | versions: 13 | - precise 14 | categories: 15 | - system 16 | dependencies: [] 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | deploy-pub 2 | 3 | A set of ansible playbooks that can be used to deploy 4 | Artefactual projects like AtoM and Archivematica. 5 | 6 | These are samples, that require some local customization 7 | 8 | To create a working development environment, you can use the Vagrantfiles 9 | included with each playbook. See the READMEs in each playbook for details. 10 | -------------------------------------------------------------------------------- /roles/elasticsearch-repository/tasks/repository.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Setup snapshot repository" 4 | command: "es_repo_mgr create fs --repository {{ elasticsearch_repository_name }} --location {{ elasticsearch_repository_location }}" 5 | register: setup_snapshot_repository 6 | changed_when: "'Repository test creation validated' in setup_snapshot_repository.stdout" 7 | -------------------------------------------------------------------------------- /roles/nfs/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | nfs_exported_directories: [] 4 | nfs_ports: 5 | - {name: LOCKD_TCPPORT, value: 32803} 6 | - {name: LOCKD_UDPPORT, value: 32769} 7 | - {name: MOUNTD_PORT, value: 892} 8 | - {name: RQUOTAD_PORT, value: 875} 9 | - {name: STATD_PORT, value: 662} 10 | - {name: STATD_OUTGOING_PORT, value: 2020} 11 | -------------------------------------------------------------------------------- /tests/archivematica-acceptance-tests/compose.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: archivematica-acceptance-test 3 | 4 | services: 5 | 6 | archivematica: 7 | build: 8 | args: 9 | DOCKER_IMAGE_NAME: "${DOCKER_IMAGE_NAME:-ubuntu}" 10 | DOCKER_IMAGE_TAG: "${DOCKER_IMAGE_TAG:-22.04}" 11 | TARGET: server 12 | 13 | ports: 14 | - "2222:22" 15 | - "8000:80" 16 | - "8001:8000" 17 | -------------------------------------------------------------------------------- /tests/dip-upload/compose.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: dip-upload-test 3 | 4 | services: 5 | 6 | archivematica: 7 | build: 8 | args: 9 | UBUNTU_VERSION: "22.04" 10 | ports: 11 | - "2222:22" 12 | - "8000:80" 13 | - "8001:8000" 14 | 15 | atom: 16 | build: 17 | args: 18 | UBUNTU_VERSION: "24.04" 19 | ports: 20 | - "9222:22" 21 | - "9000:80" 22 | links: 23 | - "archivematica" 24 | -------------------------------------------------------------------------------- /roles/nfs-client/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Install NFS packages" 4 | action: "apt pkg={{ item }} state=present" 5 | with_items: "nfs_client_pkgs" 6 | 7 | - name: "Ensure that mount points exist" 8 | file: "path={{ item.name }} state=directory" 9 | with_items: "nfs_client_imports" 10 | 11 | - name: "Mount NFS imports" 12 | mount: "fstype=nfs src={{ item.src }} name={{ item.name }} opts={{ item.opts }} state=mounted" 13 | with_items: "nfs_client_imports" -------------------------------------------------------------------------------- /tests/archivematica-upgrade/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG UBUNTU_VERSION=22.04 2 | 3 | FROM ubuntu:${UBUNTU_VERSION} 4 | 5 | ENV DEBIAN_FRONTEND noninteractive 6 | 7 | RUN apt-get update && apt-get install -y sudo openssh-server rsync locales && apt-get clean 8 | 9 | RUN useradd --home-dir /home/ubuntu --system ubuntu 10 | 11 | COPY --chown=ubuntu:ubuntu --chmod=600 ssh_pub_key /home/ubuntu/.ssh/authorized_keys 12 | 13 | RUN mkdir -p /etc/sudoers.d/ && echo 'ubuntu ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers.d/ubuntu 14 | 15 | EXPOSE 22 16 | EXPOSE 80 17 | EXPOSE 8000 18 | 19 | CMD [ "/sbin/init" ] 20 | -------------------------------------------------------------------------------- /roles/elasticsearch-snapshot/tasks/snapshot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Delete previous snapshot" 4 | command: "curator delete snapshots --repository {{ elasticsearch_snapshot_repository }} --snapshot='{{ item }}'" 5 | when: "elasticsearch_snapshot_overwrite is defined and elasticsearch_snapshot_overwrite|bool" 6 | with_items: "elasticsearch_snapshot_indices" 7 | ignore_errors: yes 8 | 9 | - name: "Take snapshot" 10 | command: "curator snapshot --name='{{ elasticsearch_snapshot_name }}' --repository='{{ elasticsearch_snapshot_repository }}' indices --index='{{ item }}'" 11 | with_items: "elasticsearch_snapshot_indices" 12 | -------------------------------------------------------------------------------- /tests/elasticsearch-replication/README.md: -------------------------------------------------------------------------------- 1 | Steps: 2 | 3 | Create the virtual machines: 4 | 5 | ```bash 6 | vagrant up --no-provision --parallel 7 | ``` 8 | 9 | Provision the NFS server: 10 | 11 | ```bash 12 | vagrant provision nfs 13 | ``` 14 | 15 | Install ES in all the nodes of the cluster and configure the repository: 16 | 17 | ```bash 18 | ansible-playbook -i hosts --limit es_servers --tags elasticsearch,elasticsearch-repository 19 | ``` 20 | 21 | Populate index in the cluster: 22 | 23 | ```bash 24 | $ ./populate-index.sh 25 | ``` 26 | 27 | Take a snapshot and test restoring it: 28 | 29 | ```bash 30 | $ ansible-playbook -i hosts --limit es_servers --tags elasticsearch-snapshot,elasticsearch-restore 31 | ``` 32 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/requirements-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-gearman" 12 | version: "master" 13 | name: "artefactual.gearman" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-nginx" 16 | version: "master" 17 | name: "artefactual.nginx" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 20 | version: "qa/1.x" 21 | name: "artefactual.archivematica-src" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-clamav" 24 | version: "master" 25 | name: "artefactual.clamav" 26 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/requirements-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-gearman" 12 | version: "master" 13 | name: "artefactual.gearman" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-nginx" 16 | version: "master" 17 | name: "artefactual.nginx" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 20 | version: "qa/1.x" 21 | name: "artefactual.archivematica-src" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-clamav" 24 | version: "master" 25 | name: "artefactual.clamav" 26 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/requirements-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-gearman" 12 | version: "master" 13 | name: "artefactual.gearman" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-nginx" 16 | version: "master" 17 | name: "artefactual.nginx" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 20 | version: "qa/1.x" 21 | name: "artefactual.archivematica-src" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-clamav" 24 | version: "master" 25 | name: "artefactual.clamav" 26 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-gearman" 12 | version: "master" 13 | name: "artefactual.gearman" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-nginx" 16 | version: "master" 17 | name: "artefactual.nginx" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 20 | version: "stable/1.14.x" 21 | name: "artefactual.archivematica-src" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-clamav" 24 | version: "master" 25 | name: "artefactual.clamav" 26 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-gearman" 12 | version: "master" 13 | name: "artefactual.gearman" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-nginx" 16 | version: "master" 17 | name: "artefactual.nginx" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 20 | version: "stable/1.17.x" 21 | name: "artefactual.archivematica-src" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-clamav" 24 | version: "master" 25 | name: "artefactual.clamav" 26 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-gearman" 12 | version: "master" 13 | name: "artefactual.gearman" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-nginx" 16 | version: "master" 17 | name: "artefactual.nginx" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 20 | version: "stable/1.18.x" 21 | name: "artefactual.archivematica-src" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-clamav" 24 | version: "master" 25 | name: "artefactual.clamav" 26 | -------------------------------------------------------------------------------- /tests/archivematica-acceptance-tests/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-gearman" 12 | version: "master" 13 | name: "artefactual.gearman" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-nginx" 16 | version: "master" 17 | name: "artefactual.nginx" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 20 | version: "qa/1.x" 21 | name: "artefactual.archivematica-src" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-clamav" 24 | version: "master" 25 | name: "artefactual.clamav" 26 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-nginx" 12 | version: "master" 13 | name: "artefactual.nginx" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-clamav" 16 | version: "master" 17 | name: "artefactual.clamav" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-gearman.git" 20 | version: "master" 21 | name: "artefactual.gearman" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 24 | version: "stable/1.14.x" 25 | name: "artefactual.archivematica-src" 26 | -------------------------------------------------------------------------------- /playbooks/atom-bionic/singlenode.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "atom-local" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars-singlenode-qa.yml" 7 | tags: 8 | - "always" 9 | 10 | roles: 11 | 12 | - role: "artefactual.elasticsearch" 13 | become: "yes" 14 | tags: 15 | - "elasticsearch" 16 | 17 | - role: "artefactual.percona" 18 | become: "yes" 19 | tags: 20 | - "percona" 21 | 22 | - role: "artefactual.memcached" 23 | become: "yes" 24 | tags: 25 | - "memcached" 26 | 27 | - role: "artefactual.gearman" 28 | become: "yes" 29 | tags: 30 | - "gearman" 31 | 32 | - role: "artefactual.nginx" 33 | become: "yes" 34 | tags: 35 | - "nginx" 36 | 37 | - role: "artefactual.atom" 38 | become: "yes" 39 | tags: 40 | - "atom" 41 | -------------------------------------------------------------------------------- /playbooks/atom-focal/singlenode.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "atom-local" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars-singlenode-qa.yml" 7 | tags: 8 | - "always" 9 | 10 | roles: 11 | 12 | - role: "artefactual.elasticsearch" 13 | become: "yes" 14 | tags: 15 | - "elasticsearch" 16 | 17 | - role: "artefactual.percona" 18 | become: "yes" 19 | tags: 20 | - "percona" 21 | 22 | - role: "artefactual.memcached" 23 | become: "yes" 24 | tags: 25 | - "memcached" 26 | 27 | - role: "artefactual.gearman" 28 | become: "yes" 29 | tags: 30 | - "gearman" 31 | 32 | - role: "artefactual.nginx" 33 | become: "yes" 34 | tags: 35 | - "nginx" 36 | 37 | - role: "artefactual.atom" 38 | become: "yes" 39 | tags: 40 | - "atom" 41 | -------------------------------------------------------------------------------- /tests/dip-upload/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG UBUNTU_VERSION=22.04 2 | 3 | FROM ubuntu:${UBUNTU_VERSION} 4 | 5 | ENV DEBIAN_FRONTEND noninteractive 6 | 7 | # Ubuntu 24.04 and later Docker images include a default user with UID (1000) 8 | # and GID (1000). Remove this user to prevent conflicts with the USER_ID and 9 | # GROUP_ID build arguments. 10 | RUN set -ex \ 11 | && id -u ubuntu >/dev/null 2>&1 \ 12 | && userdel --remove ubuntu || true 13 | 14 | RUN apt-get update && apt-get install -y sudo openssh-server rsync locales && apt-get clean 15 | 16 | RUN useradd --home-dir /home/ubuntu --system ubuntu 17 | 18 | COPY --chown=ubuntu:ubuntu --chmod=600 ssh_pub_key /home/ubuntu/.ssh/authorized_keys 19 | 20 | RUN mkdir -p /etc/sudoers.d/ && echo 'ubuntu ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers.d/ubuntu 21 | 22 | EXPOSE 22 23 | EXPOSE 80 24 | EXPOSE 8000 25 | 26 | CMD [ "/sbin/init" ] 27 | -------------------------------------------------------------------------------- /roles/elasticsearch-restore/tasks/restore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Close index" 4 | command: "curator close indices --index='{{ item }}'" 5 | ignore_errors: yes 6 | with_items: "elasticsearch_restore_indices_overwrite" 7 | 8 | - set_fact: 9 | url: "http://127.0.0.1:9200/_snapshot/{{ elasticsearch_restore_repository }}/{{ elasticsearch_restore_snapshot }}/_restore?wait_for_completion=true" 10 | body_json: > 11 | { 12 | "ignore_unavailable": false, 13 | "include_global_state": false, 14 | "rename_pattern": "(.+)", 15 | "rename_replacement": "$1{{ elasticsearch_restore_suffix }}" 16 | } 17 | 18 | - name: "Restore snapshot" 19 | command: "curl {{ url }} -d '{{ body_json }}'" 20 | 21 | - name: "Open index" 22 | command: "curator open indices --index='{{ item }}'" 23 | with_items: "elasticsearch_restore_indices_overwrite" 24 | -------------------------------------------------------------------------------- /playbooks/atom-bionic/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | path: "roles/" 7 | 8 | - src: "https://github.com/artefactual-labs/ansible-percona" 9 | version: "master" 10 | name: "artefactual.percona" 11 | path: "roles/" 12 | 13 | - src: "https://github.com/artefactual-labs/ansible-memcached" 14 | version: "master" 15 | name: "artefactual.memcached" 16 | path: "roles/" 17 | 18 | - src: "https://github.com/artefactual-labs/ansible-gearman" 19 | version: "master" 20 | name: "artefactual.gearman" 21 | path: "roles/" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-nginx" 24 | version: "master" 25 | name: "artefactual.nginx" 26 | path: "roles/" 27 | 28 | - src: "https://github.com/artefactual-labs/ansible-atom" 29 | version: "master" 30 | name: "artefactual.atom" 31 | path: "roles/" 32 | -------------------------------------------------------------------------------- /playbooks/atom-focal/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | path: "roles/" 7 | 8 | - src: "https://github.com/artefactual-labs/ansible-percona" 9 | version: "master" 10 | name: "artefactual.percona" 11 | path: "roles/" 12 | 13 | - src: "https://github.com/artefactual-labs/ansible-memcached" 14 | version: "master" 15 | name: "artefactual.memcached" 16 | path: "roles/" 17 | 18 | - src: "https://github.com/artefactual-labs/ansible-gearman" 19 | version: "master" 20 | name: "artefactual.gearman" 21 | path: "roles/" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-nginx" 24 | version: "master" 25 | name: "artefactual.nginx" 26 | path: "roles/" 27 | 28 | - src: "https://github.com/artefactual-labs/ansible-atom" 29 | version: "master" 30 | name: "artefactual.atom" 31 | path: "roles/" 32 | -------------------------------------------------------------------------------- /playbooks/atom-noble/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | path: "roles/" 7 | 8 | - src: "https://github.com/artefactual-labs/ansible-percona" 9 | version: "master" 10 | name: "artefactual.percona" 11 | path: "roles/" 12 | 13 | - src: "https://github.com/artefactual-labs/ansible-memcached" 14 | version: "master" 15 | name: "artefactual.memcached" 16 | path: "roles/" 17 | 18 | - src: "https://github.com/artefactual-labs/ansible-gearman" 19 | version: "master" 20 | name: "artefactual.gearman" 21 | path: "roles/" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-nginx" 24 | version: "master" 25 | name: "artefactual.nginx" 26 | path: "roles/" 27 | 28 | - src: "https://github.com/artefactual-labs/ansible-atom" 29 | version: "master" 30 | name: "artefactual.atom" 31 | path: "roles/" 32 | -------------------------------------------------------------------------------- /playbooks/atom-rocky9/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | path: "roles/" 7 | 8 | - src: "https://github.com/artefactual-labs/ansible-percona" 9 | version: "master" 10 | name: "artefactual.percona" 11 | path: "roles/" 12 | 13 | - src: "https://github.com/artefactual-labs/ansible-memcached" 14 | version: "master" 15 | name: "artefactual.memcached" 16 | path: "roles/" 17 | 18 | - src: "https://github.com/artefactual-labs/ansible-gearman" 19 | version: "master" 20 | name: "artefactual.gearman" 21 | path: "roles/" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-nginx" 24 | version: "master" 25 | name: "artefactual.nginx" 26 | path: "roles/" 27 | 28 | - src: "https://github.com/artefactual-labs/ansible-atom" 29 | version: "master" 30 | name: "artefactual.atom" 31 | path: "roles/" 32 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/singlenode.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "am-local-centos7" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars-singlenode-1.14.yml" 7 | tags: 8 | - "always" 9 | 10 | roles: 11 | 12 | - role: "artefactual.elasticsearch" 13 | become: "yes" 14 | tags: 15 | - "elasticsearch" 16 | when: "archivematica_src_search_enabled|bool" 17 | 18 | - role: "artefactual.percona" 19 | become: "yes" 20 | tags: 21 | - "percona" 22 | 23 | - role: "artefactual.nginx" 24 | become: "yes" 25 | tags: 26 | - "nginx" 27 | 28 | - role: "artefactual.gearman" 29 | become: "yes" 30 | tags: 31 | - "gearman" 32 | 33 | - role: "artefactual.clamav" 34 | become: "yes" 35 | tags: 36 | - "clamav" 37 | 38 | - role: "artefactual.archivematica-src" 39 | become: "yes" 40 | tags: 41 | - "archivematica-src" 42 | -------------------------------------------------------------------------------- /tests/elasticsearch-replication/hosts: -------------------------------------------------------------------------------- 1 | nfs ansible_ssh_host=192.168.100.50 2 | es01 ansible_ssh_host=192.168.100.100 elasticsearch_node_name=es01 elasticsearch_network_publish_host=192.168.100.100 3 | es02 ansible_ssh_host=192.168.100.101 elasticsearch_node_name=es02 elasticsearch_network_publish_host=192.168.100.101 4 | es03 ansible_ssh_host=192.168.100.102 elasticsearch_node_name=es03 elasticsearch_network_publish_host=192.168.100.102 5 | 6 | [es_servers] 7 | es01 8 | es02 9 | es03 10 | 11 | # READ http://blog.trifork.com/2013/10/24/how-to-avoid-the-split-brain-problem-in-elasticsearch/ 12 | 13 | [es_servers:vars] 14 | elasticsearch_version=1.5.0 15 | elasticsearch_discovery_zen_minimum_master_nodes=1 16 | elasticsearch_discovery_zen_ping_multicast_enabled=true 17 | elasticsearch_cluster_name=my-elasticsearch-cluster 18 | elasticsearch_timezone=America/Vancouver 19 | elasticsearch_network_bind_host=0.0.0.0 20 | 21 | [all:vars] 22 | ansible_ssh_user=vagrant 23 | ansible_ssh_pass=vagrant 24 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/singlenode-indexless.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "am-local" 3 | 4 | pre_tasks: 5 | 6 | - fail: 7 | msg: "This playbook is temporarily disabled, see https://github.com/artefactual-labs/archivematica-acceptance-tests/pull/25 for more details." 8 | 9 | - include_vars: "vars-singlenode-qa.yml" 10 | tags: 11 | - "always" 12 | 13 | - name: "Install packages for development convenience" 14 | apt: 15 | pkg: "{{ item }}" 16 | state: "latest" 17 | with_items: 18 | - "fish" 19 | become: "yes" 20 | 21 | roles: 22 | 23 | - role: "artefactual.percona" 24 | become: "yes" 25 | tags: 26 | - "percona" 27 | 28 | - role: "artefactual.gearman" 29 | become: "yes" 30 | tags: 31 | - "gearman" 32 | 33 | - role: "artefactual.clamav" 34 | become: "yes" 35 | tags: 36 | - "clamav" 37 | 38 | - role: "artefactual.archivematica-src" 39 | become: "yes" 40 | tags: 41 | - "archivematica-src" 42 | -------------------------------------------------------------------------------- /tests/dip-upload/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - src: "https://github.com/artefactual-labs/ansible-elasticsearch" 4 | version: "master" 5 | name: "artefactual.elasticsearch" 6 | 7 | - src: "https://github.com/artefactual-labs/ansible-percona" 8 | version: "master" 9 | name: "artefactual.percona" 10 | 11 | - src: "https://github.com/artefactual-labs/ansible-gearman" 12 | version: "master" 13 | name: "artefactual.gearman" 14 | 15 | - src: "https://github.com/artefactual-labs/ansible-nginx" 16 | version: "master" 17 | name: "artefactual.nginx" 18 | 19 | - src: "https://github.com/artefactual-labs/ansible-archivematica-src" 20 | version: "qa/1.x" 21 | name: "artefactual.archivematica-src" 22 | 23 | - src: "https://github.com/artefactual-labs/ansible-clamav" 24 | version: "master" 25 | name: "artefactual.clamav" 26 | 27 | - src: "https://github.com/artefactual-labs/ansible-memcached" 28 | version: "master" 29 | name: "artefactual.memcached" 30 | 31 | - src: "https://github.com/artefactual-labs/ansible-atom" 32 | version: "master" 33 | name: "artefactual-atom" 34 | -------------------------------------------------------------------------------- /playbooks/atom-noble/singlenode.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "atom-local" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars-singlenode-qa.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: "Upgrade all system packages" 11 | become: "yes" 12 | apt: 13 | name: "*" 14 | cache_valid_time: 3600 15 | state: latest 16 | update_cache: true 17 | tags: 18 | - "system-upgrade" 19 | 20 | roles: 21 | 22 | - role: "artefactual.elasticsearch" 23 | become: "yes" 24 | tags: 25 | - "elasticsearch" 26 | 27 | - role: "artefactual.percona" 28 | become: "yes" 29 | tags: 30 | - "percona" 31 | 32 | - role: "artefactual.memcached" 33 | become: "yes" 34 | tags: 35 | - "memcached" 36 | 37 | - role: "artefactual.gearman" 38 | become: "yes" 39 | tags: 40 | - "gearman" 41 | 42 | - role: "artefactual.nginx" 43 | become: "yes" 44 | tags: 45 | - "nginx" 46 | 47 | - role: "artefactual.atom" 48 | become: "yes" 49 | tags: 50 | - "atom" 51 | -------------------------------------------------------------------------------- /tests/archivematica-acceptance-tests/playbook.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "all" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: "Change home dir perms (to make transfer source visible)" 11 | command: "chmod 755 $HOME" 12 | become: "no" 13 | 14 | roles: 15 | 16 | - role: "artefactual.elasticsearch" 17 | become: "yes" 18 | 19 | - role: "artefactual.percona" 20 | become: "yes" 21 | 22 | - role: "artefactual.gearman" 23 | become: "yes" 24 | 25 | - role: "artefactual.clamav" 26 | become: "yes" 27 | 28 | - role: "artefactual.nginx" 29 | become: "yes" 30 | 31 | - role: "artefactual.archivematica-src" 32 | become: "yes" 33 | tags: 34 | - "archivematica-src" 35 | 36 | post_tasks: 37 | 38 | - name: "restart clamav daemons" 39 | become: "yes" 40 | service: 41 | name: "{{ item }}" 42 | state: restarted 43 | loop: 44 | - "clamav-freshclam" 45 | - "clamav-daemon" 46 | when: ansible_os_family == "Debian" 47 | tags: "restart-clamav" 48 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/singlenode.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "am-local" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars-singlenode-1.14.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: "Install packages for development convenience" 11 | apt: 12 | pkg: "{{ item }}" 13 | state: "latest" 14 | with_items: 15 | - "fish" 16 | become: "yes" 17 | 18 | roles: 19 | 20 | - role: "artefactual.elasticsearch" 21 | become: "yes" 22 | tags: 23 | - "elasticsearch" 24 | when: "archivematica_src_search_enabled|bool" 25 | 26 | - role: "artefactual.percona" 27 | become: "yes" 28 | tags: 29 | - "percona" 30 | 31 | - role: "artefactual.nginx" 32 | become: "yes" 33 | tags: 34 | - "nginx" 35 | 36 | - role: "artefactual.gearman" 37 | become: "yes" 38 | tags: 39 | - "gearman" 40 | 41 | - role: "artefactual.clamav" 42 | become: "yes" 43 | tags: 44 | - "clamav" 45 | 46 | - role: "artefactual.archivematica-src" 47 | become: "yes" 48 | tags: 49 | - "archivematica-src" 50 | -------------------------------------------------------------------------------- /roles/nfs/README.md: -------------------------------------------------------------------------------- 1 | Role Name 2 | ======== 3 | 4 | This role installs and setup NFS server. 5 | 6 | Requirements 7 | ------------ 8 | 9 | Ansible 1.4 or higher. 10 | 11 | Role Variables 12 | -------------- 13 | 14 | ```yaml 15 | nfs_exported_directories: [] 16 | nfs_ports: 17 | - {name: LOCKD_TCPPORT, value: 32803} 18 | - {name: LOCKD_UDPPORT, value: 32769} 19 | - {name: MOUNTD_PORT, value: 892} 20 | - {name: RQUOTAD_PORT, value: 875} 21 | - {name: STATD_PORT, value: 662} 22 | - {name: STATD_OUTGOING_PORT, value: 2020} 23 | ``` 24 | 25 | Dependencies 26 | ------------ 27 | 28 | None. 29 | 30 | Example Playbook 31 | ------------------------- 32 | 33 | ```yaml 34 | 35 | - role: nfs 36 | nfs_exported_directories: 37 | - path: /export/test1 38 | hosts: 39 | - {name: 192.168.0.0/16, options: ["ro", "sync"]} 40 | - {name: 10.0.0.5, options: ["rw", "sync", "no_root_squash"]} 41 | - path: /export/test2 42 | hosts: 43 | - {name: "*", options: []} 44 | ``` 45 | 46 | License 47 | ------- 48 | 49 | BSD 50 | 51 | Author Information 52 | ------------------ 53 | 54 | This role was created in 2014 by Atsushi Sasaki (@atsaki). 55 | -------------------------------------------------------------------------------- /tests/elasticsearch-replication/populate-index.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -o errexit 4 | set -o nounset 5 | set -o pipefail 6 | 7 | ESHOST="http://192.168.100.100:9200" 8 | ESINDEX="twitter" 9 | ESTYPE="tweet" 10 | ESBASE="$ESHOST/$ESINDEX/$ESTYPE" 11 | 12 | # Delete index 13 | curl -XDELETE "$ESHOST/$ESINDEX" 14 | echo 15 | 16 | # Create index 17 | curl -XPUT "$ESHOST/$ESINDEX" -d '{ 18 | "settings": { 19 | "number_of_shards": 3, 20 | "number_of_replicas": 2 21 | } 22 | }' 23 | echo 24 | 25 | # Create type 26 | curl -XPUT "$ESHOST/$ESINDEX/_mapping/$ESTYPE" -d '{ 27 | "'$ESTYPE'": { 28 | "properties": { 29 | "message": {"type": "string", "store": true } 30 | } 31 | } 32 | }' 33 | echo 34 | 35 | # Add documents 36 | for i in {1..100}; do 37 | curl -XPUT "$ESBASE/$i" -d '{ "message": "Tweet '$i'" }' 38 | echo 39 | done; 40 | 41 | # Refresh index 42 | curl -XPOST "$ESHOST/$ESINDEX/_refresh" 43 | echo 44 | 45 | # Show state 46 | echo 47 | echo "> -- Cluster health:" 48 | curl -XGET "$ESHOST/_cat/health" 49 | echo 50 | echo "> -- Available nodes:" 51 | curl -XGET "$ESHOST/_cat/nodes" 52 | echo 53 | echo "> -- Available indices:" 54 | curl -XGET "$ESHOST/_cat/indices" 55 | 56 | echo 57 | -------------------------------------------------------------------------------- /playbooks/atom-bionic/README.md: -------------------------------------------------------------------------------- 1 | # AtoM Playbook 2 | 3 | The provided playbook installs AtoM on a local Vagrant virtual machine. 4 | 5 | ## Requirements 6 | 7 | - Vagrant 2.1.4 or newer 8 | - Ansible 2.6.1 or newer 9 | 10 | ## How to use 11 | 12 | Dowload the Ansible roles 13 | 14 | $ ansible-galaxy install -f -p roles/ -r requirements.yml 15 | 16 | Create the virtual machine and provision it: 17 | 18 | $ vagrant up 19 | 20 | To ssh to the VM, run: 21 | 22 | $ vagrant ssh 23 | 24 | If you want to forward your SSH agent too, run: 25 | 26 | $ vagrant ssh -- -A 27 | 28 | To (re-)provision the VM, using Vagrant: 29 | 30 | $ vagrant provision 31 | 32 | To (re-)provision the VM, using Ansible commands directly: 33 | 34 | $ ansible-playbook singlenode.yml 35 | --inventory-file=".vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory" \ 36 | --user="vagrant" \ 37 | --private-key=".vagrant/machines/atom-local/virtualbox/private_key" \ 38 | --extra-vars="atom_dir=/vagrant/src atom_environment_type=development" \ 39 | --verbose 40 | 41 | To (re-)provision the VM, passing your own arguments to `Ansible`: 42 | 43 | $ ANSIBLE_ARGS="--tags=elasticsearch,percona,memcached,gearman,nginx" vagrant provision 44 | -------------------------------------------------------------------------------- /playbooks/atom-focal/README.md: -------------------------------------------------------------------------------- 1 | # AtoM Playbook 2 | 3 | The provided playbook installs AtoM on a local Vagrant virtual machine. 4 | 5 | ## Requirements 6 | 7 | - Vagrant 2.1.4 or newer 8 | - Ansible 2.6.1 or newer 9 | 10 | ## How to use 11 | 12 | Dowload the Ansible roles 13 | 14 | $ ansible-galaxy install -f -p roles/ -r requirements.yml 15 | 16 | Create the virtual machine and provision it: 17 | 18 | $ vagrant up 19 | 20 | To ssh to the VM, run: 21 | 22 | $ vagrant ssh 23 | 24 | If you want to forward your SSH agent too, run: 25 | 26 | $ vagrant ssh -- -A 27 | 28 | To (re-)provision the VM, using Vagrant: 29 | 30 | $ vagrant provision 31 | 32 | To (re-)provision the VM, using Ansible commands directly: 33 | 34 | $ ansible-playbook singlenode.yml 35 | --inventory-file=".vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory" \ 36 | --user="vagrant" \ 37 | --private-key=".vagrant/machines/atom-local/virtualbox/private_key" \ 38 | --extra-vars="atom_dir=/vagrant/src atom_environment_type=development" \ 39 | --verbose 40 | 41 | To (re-)provision the VM, passing your own arguments to `Ansible`: 42 | 43 | $ ANSIBLE_ARGS="--tags=elasticsearch,percona,memcached,gearman,nginx" vagrant provision 44 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/singlenode.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "am-local" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars-singlenode-1.17.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: "Install packages for development convenience" 11 | apt: 12 | pkg: "{{ item }}" 13 | state: "latest" 14 | with_items: 15 | - "fish" 16 | become: "yes" 17 | 18 | roles: 19 | 20 | - role: "artefactual.elasticsearch" 21 | become: "yes" 22 | tags: 23 | - "elasticsearch" 24 | when: "archivematica_src_search_enabled|bool" 25 | 26 | - role: "artefactual.percona" 27 | become: "yes" 28 | tags: 29 | - "percona" 30 | 31 | - role: "artefactual.nginx" 32 | become: "yes" 33 | tags: 34 | - "nginx" 35 | 36 | - role: "artefactual.gearman" 37 | become: "yes" 38 | tags: 39 | - "gearman" 40 | 41 | - role: "artefactual.clamav" 42 | become: "yes" 43 | tags: 44 | - "clamav" 45 | 46 | - role: "artefactual.archivematica-src" 47 | become: "yes" 48 | tags: 49 | - "archivematica-src" 50 | 51 | post_tasks: 52 | 53 | - name: "change home dir perms (to make transfer source visible)" 54 | become: "no" 55 | command: "chmod 755 $HOME" 56 | tags: 57 | - "homeperms" 58 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/singlenode.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "am-local" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars-singlenode-1.18.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: "Install packages for development convenience" 11 | apt: 12 | pkg: "{{ item }}" 13 | state: "latest" 14 | with_items: 15 | - "fish" 16 | become: "yes" 17 | 18 | roles: 19 | 20 | - role: "artefactual.elasticsearch" 21 | become: "yes" 22 | tags: 23 | - "elasticsearch" 24 | when: "archivematica_src_search_enabled|bool" 25 | 26 | - role: "artefactual.percona" 27 | become: "yes" 28 | tags: 29 | - "percona" 30 | 31 | - role: "artefactual.nginx" 32 | become: "yes" 33 | tags: 34 | - "nginx" 35 | 36 | - role: "artefactual.gearman" 37 | become: "yes" 38 | tags: 39 | - "gearman" 40 | 41 | - role: "artefactual.clamav" 42 | become: "yes" 43 | tags: 44 | - "clamav" 45 | 46 | - role: "artefactual.archivematica-src" 47 | become: "yes" 48 | tags: 49 | - "archivematica-src" 50 | 51 | post_tasks: 52 | 53 | - name: "change home dir perms (to make transfer source visible)" 54 | become: "no" 55 | command: "chmod 755 $HOME" 56 | tags: 57 | - "homeperms" 58 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/singlenode-indexless.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "am-local-centos7" 3 | 4 | pre_tasks: 5 | 6 | - fail: 7 | msg: "This playbook is temporarily disabled, see https://github.com/artefactual-labs/archivematica-acceptance-tests/pull/25 for more details. Uncomment this task if you want to test." 8 | 9 | - include_vars: "vars-singlenode.yml" 10 | tags: 11 | - "always" 12 | 13 | 14 | - name: "Install SELinux related python packages" 15 | become: "yes" 16 | yum: 17 | name: "{{ item }}" 18 | state: "present" 19 | with_items: 20 | - libsemanage-python 21 | - policycoreutils-python 22 | 23 | - name: "Set SELinux to Permissive" 24 | command: "setenforce Permissive" 25 | become: "yes" 26 | 27 | roles: 28 | - { role: "artefactual.percona", tags: ["percona"], become: "yes" } 29 | - { role: "artefactual.clamav", tags: ["clamav"], become: "yes" } 30 | - { role: "artefactual.gearman", tags: ["gearman"], become: "yes" } 31 | - { role: "artefactual.nginx", tags: ["nginx"], become: "yes" } 32 | - { role: "artefactual.archivematica-src", tags: ["archivematica-src"], become: "yes" } 33 | 34 | tasks: 35 | - name: "change home dir perms (to make transfer source visible)" 36 | command: "chmod 755 $HOME" 37 | tags: "homeperms" 38 | become: "no" 39 | 40 | -------------------------------------------------------------------------------- /tests/archivematica-upgrade/playbook.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "all" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "../../playbooks/archivematica-noble/vars-singlenode-{{ am_version }}.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: "Install packages for development convenience" 11 | apt: 12 | pkg: "{{ item }}" 13 | state: "latest" 14 | with_items: 15 | - "fish" 16 | become: "yes" 17 | 18 | roles: 19 | 20 | - role: "artefactual.elasticsearch" 21 | become: "yes" 22 | tags: 23 | - "elasticsearch" 24 | when: "archivematica_src_search_enabled|bool" 25 | 26 | - role: "artefactual.percona" 27 | become: "yes" 28 | tags: 29 | - "percona" 30 | 31 | - role: "artefactual.nginx" 32 | become: "yes" 33 | tags: 34 | - "nginx" 35 | 36 | - role: "artefactual.gearman" 37 | become: "yes" 38 | tags: 39 | - "gearman" 40 | 41 | - role: "artefactual.clamav" 42 | become: "yes" 43 | tags: 44 | - "clamav" 45 | 46 | - role: "artefactual.archivematica-src" 47 | become: "yes" 48 | tags: 49 | - "archivematica-src" 50 | 51 | post_tasks: 52 | 53 | - name: "change home dir perms (to make transfer source visible)" 54 | become: "no" 55 | command: "chmod 755 $HOME" 56 | tags: 57 | - "homeperms" 58 | -------------------------------------------------------------------------------- /playbooks/atom-rocky9/README.md: -------------------------------------------------------------------------------- 1 | # AtoM Playbook 2 | 3 | The provided playbook installs AtoM on a local Vagrant virtual machine. 4 | 5 | ## Requirements 6 | 7 | - Vagrant 2.1.4 or newer 8 | - Ansible 2.10 or newer 9 | 10 | Tested with Ansible 2.15.6 and Vagrant 2.4.3-1 11 | 12 | ## How to use 13 | 14 | Dowload the Ansible roles 15 | 16 | $ ansible-galaxy install -f -p roles/ -r requirements.yml 17 | 18 | Create the virtual machine and provision it: 19 | 20 | $ vagrant up 21 | 22 | To ssh to the VM, run: 23 | 24 | $ vagrant ssh 25 | 26 | If you want to forward your SSH agent too, run: 27 | 28 | $ vagrant ssh -- -A 29 | 30 | To (re-)provision the VM, using Vagrant: 31 | 32 | $ vagrant provision 33 | 34 | To (re-)provision the VM, using Ansible commands directly: 35 | 36 | $ ansible-playbook singlenode.yml 37 | --inventory-file=".vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory" \ 38 | --user="vagrant" \ 39 | --private-key=".vagrant/machines/atom-local/virtualbox/private_key" \ 40 | --extra-vars="atom_dir=/vagrant/src atom_environment_type=development" \ 41 | --verbose 42 | 43 | To (re-)provision the VM, passing your own arguments to `Ansible`: 44 | 45 | $ ANSIBLE_ARGS="--tags=elasticsearch,percona,memcached,gearman,nginx" vagrant provision 46 | 47 | You can access AtoM service: http://192.168.168.200 (user: demo@example.com, pass: demo) 48 | -------------------------------------------------------------------------------- /playbooks/atom-noble/README.md: -------------------------------------------------------------------------------- 1 | # AtoM Playbook 2 | 3 | The provided playbook installs AtoM on a local Vagrant virtual machine. 4 | 5 | ## Requirements 6 | 7 | - Vagrant 2.1.4 or newer 8 | - Ansible 2.10 or newer 9 | 10 | Tested with Ansible 2.15.6 and Vagrant 2.4.3-1 11 | 12 | ## How to use 13 | 14 | Dowload the Ansible roles 15 | 16 | $ ansible-galaxy install -f -p roles/ -r requirements.yml 17 | 18 | Create the virtual machine and provision it: 19 | 20 | $ vagrant up 21 | 22 | To ssh to the VM, run: 23 | 24 | $ vagrant ssh 25 | 26 | If you want to forward your SSH agent too, run: 27 | 28 | $ vagrant ssh -- -A 29 | 30 | To (re-)provision the VM, using Vagrant: 31 | 32 | $ vagrant provision 33 | 34 | To (re-)provision the VM, using Ansible commands directly: 35 | 36 | $ ansible-playbook singlenode.yml 37 | --inventory-file=".vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory" \ 38 | --user="vagrant" \ 39 | --private-key=".vagrant/machines/atom-local/virtualbox/private_key" \ 40 | --extra-vars="atom_dir=/vagrant/src atom_environment_type=development" \ 41 | --verbose 42 | 43 | To (re-)provision the VM, passing your own arguments to `Ansible`: 44 | 45 | $ ANSIBLE_ARGS="--tags=elasticsearch,percona,memcached,gearman,nginx" vagrant provision 46 | 47 | You can access AtoM service: http://192.168.168.200 (user: demo@example.com, pass: demo) 48 | 49 | -------------------------------------------------------------------------------- /roles/nfs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Include OS specific variables 4 | include_vars: "{{ ansible_os_family }}.yml" 5 | 6 | - name: Install nfs-utils when ansible_os_family is RedHat 7 | yum: name="{{ item }}" state=installed 8 | with_items: nfs_packages 9 | when: ansible_os_family == "RedHat" 10 | 11 | - name: Install nfs-kernel-server when ansible_os_family is Debian 12 | apt: name="{{ item }}" state=installed 13 | with_items: nfs_packages 14 | when: ansible_os_family == "Debian" 15 | 16 | - name: Ensure NFS service started and enabled 17 | service: name="{{ item }}" state=started enabled=yes 18 | with_items: nfs_services 19 | 20 | - name: Setup Ports 21 | lineinfile: dest="{{ nfs_config_file }}" 22 | regexp="^#?{{ item.name }}" 23 | line='{{ item.name }}={{ item.value }}' 24 | with_items: nfs_ports 25 | notify: restart nfs services 26 | 27 | - name: Ensure exported directory exists 28 | file: path="{{ item.path }}" state=directory 29 | owner='{{ item.owner|default(nfs_default_owner) }}' 30 | group='{{ item.group|default(nfs_default_group) }}' 31 | mode='{{ item.mode|default("0777") }}' 32 | with_items: nfs_exported_directories 33 | 34 | - name: Ensure exported directories are in /etc/exports 35 | lineinfile: dest=/etc/exports 36 | regexp="^{{ item.path }}\s" 37 | line='{{ item.path }} {% for host in item.hosts %} {{ host.name }}({{ host.options|default([])|join(",") }}){% endfor %}' 38 | with_items: nfs_exported_directories 39 | notify: refresh exports -------------------------------------------------------------------------------- /playbooks/atom-focal/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | config.vm.box = ENV.fetch("VAGRANT_BOX", "ubuntu/focal64") 10 | 11 | { 12 | "atom-local" => { 13 | "ip" => "192.168.168.200", 14 | "memory" => "4096", 15 | "cpus" => "2", 16 | }, 17 | }.each do |short_name, properties| 18 | 19 | # Define guest 20 | config.vm.define short_name do |host| 21 | host.vm.network "private_network", ip: properties.fetch("ip") 22 | end 23 | 24 | # Set the amount of RAM and virtual CPUs for the virtual machine 25 | config.vm.provider :virtualbox do |vb| 26 | vb.customize ["modifyvm", :id, "--memory", properties.fetch("memory")] 27 | vb.customize ["modifyvm", :id, "--cpus", properties.fetch("cpus")] 28 | end 29 | 30 | end 31 | 32 | config.vm.synced_folder "src/atom", "/usr/share/nginx/atom", create: true 33 | 34 | # Ansible provisioning 35 | config.vm.provision :ansible do |ansible| 36 | ansible.playbook = "./singlenode.yml" 37 | ansible.host_key_checking = false 38 | ansible.extra_vars = { 39 | "atom_user" => "vagrant", 40 | "atom_group" => "vagrant", 41 | "atom_environment_type" => "development", 42 | "atom_auto_init" => "yes", 43 | "es_config" => { 44 | "network.host" => "127.0.0.1" 45 | } 46 | } 47 | ansible.verbose = 'v' 48 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'] 49 | end 50 | 51 | end 52 | -------------------------------------------------------------------------------- /playbooks/atom-noble/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | config.vm.box = ENV.fetch("VAGRANT_BOX", "bento/ubuntu-24.04") 10 | 11 | { 12 | "atom-local" => { 13 | "ip" => "192.168.168.200", 14 | "memory" => "4096", 15 | "cpus" => "2", 16 | }, 17 | }.each do |short_name, properties| 18 | 19 | # Define guest 20 | config.vm.define short_name do |host| 21 | host.vm.network "private_network", ip: properties.fetch("ip") 22 | end 23 | 24 | # Set the amount of RAM and virtual CPUs for the virtual machine 25 | config.vm.provider :virtualbox do |vb| 26 | vb.customize ["modifyvm", :id, "--memory", properties.fetch("memory")] 27 | vb.customize ["modifyvm", :id, "--cpus", properties.fetch("cpus")] 28 | end 29 | 30 | end 31 | 32 | config.vm.synced_folder "src/atom", "/usr/share/nginx/atom", create: true 33 | 34 | # Ansible provisioning 35 | config.vm.provision :ansible do |ansible| 36 | ansible.playbook = "./singlenode.yml" 37 | ansible.host_key_checking = false 38 | ansible.extra_vars = { 39 | "atom_user" => "vagrant", 40 | "atom_group" => "vagrant", 41 | "atom_environment_type" => "production", 42 | "atom_auto_init" => "yes", 43 | "es_config" => { 44 | "network.host" => "127.0.0.1" 45 | } 46 | } 47 | ansible.verbose = 'v' 48 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'] 49 | end 50 | 51 | end 52 | -------------------------------------------------------------------------------- /playbooks/atom-rocky9/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | config.vm.box = ENV.fetch("VAGRANT_BOX", "bento/rockylinux-9") 10 | 11 | { 12 | "atom-local" => { 13 | "ip" => "192.168.168.200", 14 | "memory" => "4096", 15 | "cpus" => "2", 16 | }, 17 | }.each do |short_name, properties| 18 | 19 | # Define guest 20 | config.vm.define short_name do |host| 21 | host.vm.network "private_network", ip: properties.fetch("ip") 22 | end 23 | 24 | # Set the amount of RAM and virtual CPUs for the virtual machine 25 | config.vm.provider :virtualbox do |vb| 26 | vb.customize ["modifyvm", :id, "--memory", properties.fetch("memory")] 27 | vb.customize ["modifyvm", :id, "--cpus", properties.fetch("cpus")] 28 | end 29 | 30 | end 31 | 32 | config.vm.synced_folder "src/atom", "/usr/share/nginx/atom", create: true 33 | 34 | # Ansible provisioning 35 | config.vm.provision :ansible do |ansible| 36 | ansible.playbook = "./singlenode.yml" 37 | ansible.host_key_checking = false 38 | ansible.extra_vars = { 39 | "atom_user" => "vagrant", 40 | "atom_group" => "vagrant", 41 | "atom_environment_type" => "production", 42 | "atom_auto_init" => "yes", 43 | "es_config" => { 44 | "network.host" => "127.0.0.1" 45 | } 46 | } 47 | ansible.verbose = 'v' 48 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'] 49 | end 50 | 51 | end 52 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/Vagrantfile.openstack: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # 5 | # This is quite the minimal configuration necessary 6 | # to start an OpenStack instance using Vagrant. 7 | # 8 | # This example assumes a floating IP is needed to 9 | # reach the machine, although you might remove the 10 | # floating_ip_pool parameter if you are able to join 11 | # the instance using its private IP address (e.g. 12 | # through a VPN). 13 | # 14 | Vagrant.configure('2') do |config| 15 | 16 | config.ssh.username = "centos" 17 | 18 | config.vm.provider :openstack do |os, ov| 19 | os.openstack_auth_url = ENV['OS_AUTH_URL'] 20 | os.tenant_name = ENV['OS_TENANT_NAME'] 21 | os.username = ENV['OS_USERNAME'] 22 | os.password = ENV['OS_PASSWORD'] 23 | os.region = ENV['OS_REGION_NAME'] 24 | os.flavor = ENV['OS_FLAVOR'] 25 | ov.vm.allowed_synced_folder_types = :rsync 26 | ov.nfs.functional = false 27 | end 28 | 29 | config.vm.define 'am-local-centos7' do |s| 30 | s.vm.provider :openstack do |os, override| 31 | os.image = ENV['OS_IMAGE'] 32 | os.server_name = 'archivematica-centos7' 33 | end 34 | end 35 | 36 | config.vm.provision :ansible do |ansible| 37 | ansible.playbook = "./singlenode.yml" 38 | ansible.host_key_checking = false 39 | ansible.extra_vars = { 40 | "archivematica_src_dir" => "/opt/archivematica", 41 | "archivematica_src_environment_type" => "development", 42 | } 43 | # Accept multiple arguments, separated by colons 44 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'].to_s.split(':') 45 | end 46 | 47 | end 48 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | config.vm.box = ENV.fetch("VAGRANT_BOX", "ubuntu/bionic64") 10 | 11 | { 12 | "am-local" => { 13 | "ip" => "192.168.168.198", 14 | "memory" => "4096", 15 | "cpus" => "2", 16 | }, 17 | }.each do |short_name, properties| 18 | 19 | # Define guest 20 | config.vm.define short_name do |host| 21 | host.vm.network "private_network", ip: properties.fetch("ip") 22 | host.vm.hostname = "#{short_name}.myapp.dev" 23 | end 24 | 25 | # Set the amount of RAM and virtual CPUs for the virtual machine 26 | config.vm.provider :virtualbox do |vb| 27 | vb.customize ["modifyvm", :id, "--memory", properties.fetch("memory")] 28 | vb.customize ["modifyvm", :id, "--cpus", properties.fetch("cpus")] 29 | end 30 | 31 | end 32 | 33 | # Make the project root available to the guest VM 34 | config.vm.synced_folder '.', '/vagrant', mount_options: ["uid=333", "gid=333"] 35 | 36 | # Ansible provisioning 37 | config.vm.provision "shell", inline: "sudo apt-get update -y && apt-get install -y python" 38 | 39 | config.vm.provision :ansible do |ansible| 40 | ansible.playbook = "./singlenode.yml" 41 | ansible.host_key_checking = false 42 | ansible.extra_vars = { 43 | "archivematica_src_dir" => "/vagrant/src", 44 | "archivematica_src_environment_type" => "development", 45 | } 46 | # Accept multiple arguments, separated by colons 47 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'].to_s.split(':') 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | config.vm.box = ENV.fetch("VAGRANT_BOX", "ubuntu/jammy64") 10 | 11 | { 12 | "am-local" => { 13 | "ip" => "192.168.168.198", 14 | "memory" => "4096", 15 | "cpus" => "2", 16 | }, 17 | }.each do |short_name, properties| 18 | 19 | # Define guest 20 | config.vm.define short_name do |host| 21 | host.vm.network "private_network", ip: properties.fetch("ip") 22 | host.vm.hostname = "#{short_name}.myapp.dev" 23 | end 24 | 25 | # Set the amount of RAM and virtual CPUs for the virtual machine 26 | config.vm.provider :virtualbox do |vb| 27 | vb.customize ["modifyvm", :id, "--memory", properties.fetch("memory")] 28 | vb.customize ["modifyvm", :id, "--cpus", properties.fetch("cpus")] 29 | end 30 | 31 | end 32 | 33 | # Make the project root available to the guest VM 34 | config.vm.synced_folder '.', '/vagrant', mount_options: ["uid=333", "gid=333"] 35 | 36 | # Ansible provisioning 37 | config.vm.provision "shell", inline: "sudo apt-get update -y && apt-get install -y python3" 38 | 39 | config.vm.provision :ansible do |ansible| 40 | ansible.playbook = "./singlenode.yml" 41 | ansible.host_key_checking = false 42 | ansible.extra_vars = { 43 | "archivematica_src_dir" => "/vagrant/src", 44 | "archivematica_src_environment_type" => "development", 45 | } 46 | # Accept multiple arguments, separated by colons 47 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'].to_s.split(':') 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | config.vm.box = ENV.fetch("VAGRANT_BOX", "bento/ubuntu-24.04") 10 | 11 | { 12 | "am-local" => { 13 | "ip" => "192.168.168.198", 14 | "memory" => "4096", 15 | "cpus" => "2", 16 | }, 17 | }.each do |short_name, properties| 18 | 19 | # Define guest 20 | config.vm.define short_name do |host| 21 | host.vm.network "private_network", ip: properties.fetch("ip") 22 | host.vm.hostname = "#{short_name}.myapp.dev" 23 | end 24 | 25 | # Set the amount of RAM and virtual CPUs for the virtual machine 26 | config.vm.provider :virtualbox do |vb| 27 | vb.customize ["modifyvm", :id, "--memory", properties.fetch("memory")] 28 | vb.customize ["modifyvm", :id, "--cpus", properties.fetch("cpus")] 29 | end 30 | 31 | end 32 | 33 | # Make the project root available to the guest VM 34 | config.vm.synced_folder '.', '/vagrant', mount_options: ["uid=333", "gid=333"] 35 | 36 | # Ansible provisioning 37 | config.vm.provision "shell", inline: "sudo apt-get update -y && apt-get install -y python3" 38 | 39 | config.vm.provision :ansible do |ansible| 40 | ansible.playbook = "./singlenode.yml" 41 | ansible.host_key_checking = false 42 | ansible.extra_vars = { 43 | "archivematica_src_dir" => "/vagrant/src", 44 | "archivematica_src_environment_type" => "development", 45 | } 46 | # Accept multiple arguments, separated by colons 47 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'].to_s.split(':') 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | # note the official centos box doesn't have guest additions 10 | # using geerlingguy box instead ( to have synced folders ) 11 | config.vm.box = ENV.fetch("VAGRANT_BOX", "centos/7") 12 | 13 | { 14 | "am-local-centos7" => { 15 | "ip" => "192.168.168.197", 16 | "memory" => "3072", 17 | "cpus" => "2", 18 | }, 19 | }.each do |short_name, properties| 20 | 21 | # Define guest 22 | config.vm.define short_name do |host| 23 | host.vm.network "private_network", ip: properties.fetch("ip") 24 | end 25 | 26 | # Set the amount of RAM and virtual CPUs for the virtual machine 27 | config.vm.provider :virtualbox do |vb| 28 | vb.customize ["modifyvm", :id, "--memory", properties.fetch("memory")] 29 | vb.customize ["modifyvm", :id, "--cpus", properties.fetch("cpus")] 30 | end 31 | 32 | end 33 | 34 | # Make the project root available to the guest VM 35 | config.vm.synced_folder '.', '/vagrant', mount_options: ["uid=333", "gid=333"] 36 | 37 | # Ansible provisioning 38 | config.vm.provision :ansible do |ansible| 39 | ansible.playbook = "./singlenode.yml" 40 | ansible.host_key_checking = false 41 | ansible.extra_vars = { 42 | "archivematica_src_dir" => "/vagrant/src", 43 | "archivematica_src_environment_type" => "development", 44 | } 45 | #ansible.raw_arguments = ENV['ANSIBLE_ARGS'] 46 | # accept multiple arguments, separated by colons 47 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'].to_s.split(':') 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/Vagrantfile.openstack: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # 5 | # This is quite the minimal configuration necessary 6 | # to start an OpenStack instance using Vagrant. 7 | # 8 | # This example assumes a floating IP is needed to 9 | # reach the machine, although you might remove the 10 | # floating_ip_pool parameter if you are able to join 11 | # the instance using its private IP address (e.g. 12 | # through a VPN). 13 | # 14 | Vagrant.configure('2') do |config| 15 | 16 | config.ssh.username = "ubuntu" 17 | 18 | config.vm.provider :openstack do |os, ov| 19 | os.openstack_auth_url = ENV['OS_AUTH_URL'] 20 | os.tenant_name = ENV['OS_TENANT_NAME'] 21 | os.username = ENV['OS_USERNAME'] 22 | os.password = ENV['OS_PASSWORD'] 23 | os.region = ENV['OS_REGION_NAME'] 24 | os.flavor = ENV['OS_FLAVOR'] 25 | ov.vm.allowed_synced_folder_types = :rsync 26 | ov.nfs.functional = false 27 | end 28 | 29 | config.vm.define 'am-local' do |s| 30 | s.vm.provider :openstack do |os, override| 31 | os.image = ENV['OS_IMAGE'] 32 | os.server_name = 'archivematica-jammy' 33 | end 34 | end 35 | 36 | # Ansible provisioning 37 | config.vm.provision "shell", inline: "sudo apt-get update -y && apt-get install -y python" 38 | 39 | config.vm.provision :ansible do |ansible| 40 | ansible.playbook = "./singlenode.yml" 41 | ansible.host_key_checking = false 42 | ansible.extra_vars = { 43 | "archivematica_src_dir" => "/opt/archivematica", 44 | "archivematica_src_environment_type" => "development", 45 | } 46 | # Accept multiple arguments, separated by colons 47 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'].to_s.split(':') 48 | end 49 | 50 | end 51 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/Vagrantfile.openstack: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # 5 | # This is quite the minimal configuration necessary 6 | # to start an OpenStack instance using Vagrant. 7 | # 8 | # This example assumes a floating IP is needed to 9 | # reach the machine, although you might remove the 10 | # floating_ip_pool parameter if you are able to join 11 | # the instance using its private IP address (e.g. 12 | # through a VPN). 13 | # 14 | Vagrant.configure('2') do |config| 15 | 16 | config.ssh.username = "ubuntu" 17 | 18 | config.vm.provider :openstack do |os, ov| 19 | os.openstack_auth_url = ENV['OS_AUTH_URL'] 20 | os.tenant_name = ENV['OS_TENANT_NAME'] 21 | os.username = ENV['OS_USERNAME'] 22 | os.password = ENV['OS_PASSWORD'] 23 | os.region = ENV['OS_REGION_NAME'] 24 | os.flavor = ENV['OS_FLAVOR'] 25 | ov.vm.allowed_synced_folder_types = :rsync 26 | ov.nfs.functional = false 27 | end 28 | 29 | config.vm.define 'am-local' do |s| 30 | s.vm.provider :openstack do |os, override| 31 | os.image = ENV['OS_IMAGE'] 32 | os.server_name = 'archivematica-bionic' 33 | end 34 | end 35 | 36 | # Ansible provisioning 37 | config.vm.provision "shell", inline: "sudo apt-get update -y && apt-get install -y python" 38 | 39 | config.vm.provision :ansible do |ansible| 40 | ansible.playbook = "./singlenode.yml" 41 | ansible.host_key_checking = false 42 | ansible.extra_vars = { 43 | "archivematica_src_dir" => "/opt/archivematica", 44 | "archivematica_src_environment_type" => "development", 45 | } 46 | # Accept multiple arguments, separated by colons 47 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'].to_s.split(':') 48 | end 49 | 50 | end 51 | -------------------------------------------------------------------------------- /playbooks/atom-rocky9/singlenode.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "atom-local" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "vars-singlenode-qa.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: Ensure the `libselinux-python` package is installed (required for older systems) 11 | become: yes 12 | ansible.builtin.package: 13 | name: "{{ 'python3-libselinux' if ansible_distribution_major_version | int >= 8 else 'libselinux-python' }}" 14 | state: present 15 | tags: 16 | - "selinux" 17 | 18 | - name: Set SELinux to permissive mode 19 | become: yes 20 | ansible.posix.selinux: 21 | state: permissive 22 | policy: targeted 23 | tags: 24 | - "selinux" 25 | 26 | - name: Ensure SELinux is set to permissive in /etc/selinux/config 27 | become: yes 28 | ansible.builtin.lineinfile: 29 | path: /etc/selinux/config 30 | regexp: '^SELINUX=' 31 | line: 'SELINUX=permissive' 32 | tags: 33 | - "selinux" 34 | 35 | roles: 36 | 37 | - role: "artefactual.elasticsearch" 38 | become: "yes" 39 | tags: 40 | - "elasticsearch" 41 | 42 | - role: "artefactual.percona" 43 | become: "yes" 44 | tags: 45 | - "percona" 46 | 47 | - role: "artefactual.memcached" 48 | become: "yes" 49 | tags: 50 | - "memcached" 51 | 52 | - role: "artefactual.gearman" 53 | become: "yes" 54 | tags: 55 | - "gearman" 56 | 57 | - role: "artefactual.nginx" 58 | become: "yes" 59 | tags: 60 | - "nginx" 61 | 62 | - role: "artefactual.atom" 63 | become: "yes" 64 | tags: 65 | - "atom" 66 | 67 | post_tasks: 68 | 69 | - name: Allow HTTP traffic (port 80) 70 | become: "yes" 71 | firewalld: 72 | service: http 73 | permanent: true 74 | state: enabled 75 | immediate: true 76 | tags: 77 | - "firewalld" 78 | -------------------------------------------------------------------------------- /playbooks/atom-bionic/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | config.vm.box = ENV.fetch("VAGRANT_BOX", "ubuntu/bionic64") 10 | 11 | { 12 | "atom-local" => { 13 | "ip" => "192.168.168.199", 14 | "memory" => "4096", 15 | "cpus" => "2", 16 | }, 17 | }.each do |short_name, properties| 18 | 19 | # Define guest 20 | config.vm.define short_name do |host| 21 | host.vm.network "private_network", ip: properties.fetch("ip") 22 | end 23 | 24 | # Set the amount of RAM and virtual CPUs for the virtual machine 25 | config.vm.provider :virtualbox do |vb| 26 | vb.customize ["modifyvm", :id, "--memory", properties.fetch("memory")] 27 | vb.customize ["modifyvm", :id, "--cpus", properties.fetch("cpus")] 28 | end 29 | 30 | end 31 | 32 | config.vm.provision "shell", inline: <<-SHELL 33 | # ubuntu/bionic64 doesn't include Python 34 | if [ ! -f /usr/bin/python ]; then 35 | sudo apt-get update && sudo apt-get --yes install python-minimal 36 | fi 37 | 38 | # ubuntu/bionic64 missing host entry in /etc/hosts? 39 | sed -i "s/127\.0\.0\.1 localhost/127\.0\.0\.1 localhost ubuntu-bionic/g" /etc/hosts 40 | SHELL 41 | 42 | config.vm.synced_folder "src/atom", "/usr/share/nginx/atom", create: true 43 | 44 | # Ansible provisioning 45 | config.vm.provision :ansible do |ansible| 46 | ansible.playbook = "./singlenode.yml" 47 | ansible.host_key_checking = false 48 | ansible.extra_vars = { 49 | "atom_user" => "vagrant", 50 | "atom_group" => "vagrant", 51 | "atom_environment_type" => "development", 52 | "atom_auto_init" => "yes", 53 | "es_config" => { 54 | "network.host" => "127.0.0.1" 55 | } 56 | } 57 | ansible.verbose = 'v' 58 | ansible.raw_arguments = ENV['ANSIBLE_ARGS'] 59 | end 60 | 61 | end 62 | -------------------------------------------------------------------------------- /tests/elasticsearch-replication/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 8 | 9 | config.vm.box = "ubuntu/trusty64" 10 | config.vm.synced_folder '.', '/vagrant' 11 | 12 | config.vm.define "nfs" do |nfs| 13 | nfs.vm.hostname = "nfs" 14 | nfs.vm.network :private_network, ip: "192.168.100.50" 15 | nfs.vm.provider :virtualbox do |vb| 16 | vb.customize ["modifyvm", :id, "--memory", "1024"] 17 | vb.customize ["modifyvm", :id, "--cpus", 1] 18 | end 19 | end 20 | 21 | config.vm.define "es01" do |es01| 22 | es01.vm.hostname = "es01" 23 | es01.vm.network :private_network, ip: "192.168.100.100" 24 | es01.vm.provider :virtualbox do |vb| 25 | vb.customize ["modifyvm", :id, "--memory", "1024"] 26 | vb.customize ["modifyvm", :id, "--cpus", 1] 27 | end 28 | end 29 | 30 | config.vm.define "es02" do |es02| 31 | es02.vm.hostname = "es02" 32 | es02.vm.network :private_network, ip: "192.168.100.101" 33 | es02.vm.provider :virtualbox do |vb| 34 | vb.customize ["modifyvm", :id, "--memory", "1024"] 35 | vb.customize ["modifyvm", :id, "--cpus", 1] 36 | end 37 | end 38 | 39 | config.vm.define "es03" do |es03| 40 | es03.vm.hostname = "es03" 41 | es03.vm.network :private_network, ip: "192.168.100.102" 42 | es03.vm.provider :virtualbox do |vb| 43 | vb.customize ["modifyvm", :id, "--memory", "1024"] 44 | vb.customize ["modifyvm", :id, "--cpus", 1] 45 | end 46 | end 47 | 48 | config.vm.provision :ansible do |ansible| 49 | ansible.inventory_path = "hosts" 50 | ansible.playbook = "playbook.yml" 51 | ansible.host_key_checking = false 52 | ansible.verbose = "vvvvv" 53 | end 54 | 55 | # if Vagrant.has_plugin?("vagrant-cachier") 56 | # config.cache.scope = :box 57 | # config.cache.synced_folder_opts = { 58 | # type: :nfs, 59 | # mount_options: ['rw', 'vers=3', 'tcp', 'nolock'] 60 | # } 61 | # end 62 | 63 | end 64 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/vars-singlenode-1.8.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "no" 6 | archivematica_src_am_version: "stable/1.8.x" 7 | archivematica_src_ss_version: "stable/0.13.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | archivematica_src_am_multi_venvs: "yes" 11 | 12 | # reset setup vars, commented out here, uncomment if needed 13 | #archivematica_src_reset_am_all: "true" 14 | #archivematica_src_reset_ss_db: "true" 15 | 16 | # elasticsearch role, general settings 17 | # (instance specific settings are in the playbook ) 18 | es_scripts: false 19 | es_templates: false 20 | es_version_lock: false 21 | es_heap_size: 348m 22 | es_major_version: "1.7" 23 | es_version: "1.7.6" 24 | es_start_service: true 25 | es_java_install: true 26 | update_java: true 27 | es_restart_on_change: true 28 | es_allow_downgrades: false 29 | es_enable_xpack: false 30 | es_xpack_features: [] 31 | 32 | # Percona variables 33 | mysql_databases: 34 | - name: "{{ archivematica_src_am_db_name }}" 35 | collation: "utf8_general_ci" 36 | encoding: "utf8" 37 | mysql_users: 38 | - name: "{{ archivematica_src_am_db_user }}" 39 | pass: "{{ archivematica_src_am_db_password }}" 40 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 41 | host: "{{ archivematica_src_am_db_host }}" 42 | mysql_root_password: "ChangeMe!" 43 | 44 | #AM configure vars 45 | 46 | archivematica_src_configure_dashboard: "yes" 47 | archivematica_src_configure_ss: "yes" 48 | archivematica_src_configure_ss_user: "admin" 49 | archivematica_src_configure_ss_password: "archivematica" 50 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 51 | archivematica_src_configure_ss_url: "http://192.168.168.197:8000" 52 | archivematica_src_configure_ss_email: "admin@example.com" 53 | archivematica_src_configure_am_user: "admin" 54 | archivematica_src_configure_am_password: "archivematica" 55 | archivematica_src_configure_am_email: "admin@example.com" 56 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 57 | archivematica_src_configure_am_site_url: "http://192.168.168.197" 58 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/vars-singlenode-1.8.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "yes" 6 | archivematica_src_am_version: "stable/1.8.x" 7 | archivematica_src_ss_version: "stable/0.13.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | archivematica_src_am_multi_venvs: "true" 11 | archivematica_src_dir: "/opt/archivematica" 12 | 13 | # elasticsearch role 14 | 15 | elasticsearch_version: "1.7.6" 16 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 17 | elasticsearch_apt_repos: [] 18 | elasticsearch_apt_dependencies: [] 19 | elasticsearch_java_home: "/usr/lib/jvm/java-8-openjdk-amd64" 20 | elasticsearch_heap_size: "640m" 21 | elasticsearch_max_open_files: "65535" 22 | elasticsearch_node_max_local_storage_nodes: "1" 23 | elasticsearch_index_mapper_dynamic: "true" 24 | elasticsearch_memory_bootstrap_mlockall: "true" 25 | elasticsearch_install_java: "true" 26 | elasticsearch_thread_pools: 27 | - "threadpool.bulk.type: fixed" 28 | - "threadpool.bulk.size: 50" 29 | - "threadpool.bulk.queue_size: 1000" 30 | elasticsearch_network_host: "127.0.0.1" 31 | 32 | # percona role 33 | 34 | mysql_databases: 35 | - name: "{{ archivematica_src_am_db_name }}" 36 | collation: "utf8_general_ci" 37 | encoding: "utf8" 38 | 39 | mysql_users: 40 | - name: "{{ archivematica_src_am_db_user }}" 41 | pass: "{{ archivematica_src_am_db_password }}" 42 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 43 | host: "{{ archivematica_src_am_db_host }}" 44 | 45 | mysql_root_password: "ChangeMe!" 46 | 47 | #AM configure vars 48 | 49 | archivematica_src_configure_dashboard: "yes" 50 | archivematica_src_configure_ss: "yes" 51 | archivematica_src_configure_ss_user: "admin" 52 | archivematica_src_configure_ss_password: "archivematica" 53 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 54 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 55 | archivematica_src_configure_ss_email: "admin@example.com" 56 | archivematica_src_configure_am_user: "admin" 57 | archivematica_src_configure_am_password: "archivematica" 58 | archivematica_src_configure_am_email: "admin@example.com" 59 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 60 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 61 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/vars-singlenode-1.11.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "yes" 6 | archivematica_src_am_version: "stable/1.11.x" 7 | archivematica_src_ss_version: "stable/0.16.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | 11 | # elasticsearch role 12 | 13 | elasticsearch_version: "6.5.4" 14 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 15 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 16 | elasticsearch_heap_size: "1g" 17 | elasticsearch_max_open_files: "65535" 18 | elasticsearch_timezone: "UTC" 19 | elasticsearch_node_max_local_storage_nodes: "1" 20 | elasticsearch_index_mapper_dynamic: "true" 21 | elasticsearch_memory_bootstrap_mlockall: "true" 22 | elasticsearch_install_java: "true" 23 | elasticsearch_thread_pools: 24 | - "thread_pool.write.size: 2" 25 | - "thread_pool.write.queue_size: 1000" 26 | elasticsearch_network_http_max_content_lengtht: 1024mb 27 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 28 | elasticsearch_max_locked_memory: "unlimited" 29 | elasticsearch_network_host: "127.0.0.1" 30 | 31 | # percona role 32 | 33 | mysql_databases: 34 | - name: "{{ archivematica_src_am_db_name }}" 35 | collation: "utf8_general_ci" 36 | encoding: "utf8" 37 | 38 | mysql_users: 39 | - name: "{{ archivematica_src_am_db_user }}" 40 | pass: "{{ archivematica_src_am_db_password }}" 41 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 42 | host: "{{ archivematica_src_am_db_host }}" 43 | 44 | mysql_root_password: "MYSQLROOTPASSWORD" 45 | 46 | #AM configure vars 47 | 48 | archivematica_src_configure_dashboard: "yes" 49 | archivematica_src_configure_ss: "yes" 50 | archivematica_src_configure_ss_user: "admin" 51 | archivematica_src_configure_ss_password: "archivematica" 52 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 53 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 54 | archivematica_src_configure_ss_email: "admin@example.com" 55 | archivematica_src_configure_am_user: "admin" 56 | archivematica_src_configure_am_password: "archivematica" 57 | archivematica_src_configure_am_email: "admin@example.com" 58 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 59 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 60 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/vars-singlenode-1.10.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "yes" 6 | archivematica_src_am_version: "stable/1.10.x" 7 | archivematica_src_ss_version: "stable/0.15.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | archivematica_src_am_multi_venvs: "true" 11 | 12 | # elasticsearch role 13 | 14 | elasticsearch_version: "6.5.4" 15 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 16 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 17 | elasticsearch_heap_size: "1g" 18 | elasticsearch_max_open_files: "65535" 19 | elasticsearch_timezone: "UTC" 20 | elasticsearch_node_max_local_storage_nodes: "1" 21 | elasticsearch_index_mapper_dynamic: "true" 22 | elasticsearch_memory_bootstrap_mlockall: "true" 23 | elasticsearch_install_java: "true" 24 | elasticsearch_thread_pools: 25 | - "thread_pool.write.size: 2" 26 | - "thread_pool.write.queue_size: 1000" 27 | elasticsearch_network_http_max_content_lengtht: 1024mb 28 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 29 | elasticsearch_max_locked_memory: "unlimited" 30 | elasticsearch_network_host: "127.0.0.1" 31 | 32 | # percona role 33 | 34 | mysql_databases: 35 | - name: "{{ archivematica_src_am_db_name }}" 36 | collation: "utf8_general_ci" 37 | encoding: "utf8" 38 | 39 | mysql_users: 40 | - name: "{{ archivematica_src_am_db_user }}" 41 | pass: "{{ archivematica_src_am_db_password }}" 42 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 43 | host: "{{ archivematica_src_am_db_host }}" 44 | 45 | mysql_root_password: "MYSQLROOTPASSWORD" 46 | 47 | #AM configure vars 48 | 49 | archivematica_src_configure_dashboard: "yes" 50 | archivematica_src_configure_ss: "yes" 51 | archivematica_src_configure_ss_user: "admin" 52 | archivematica_src_configure_ss_password: "archivematica" 53 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 54 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 55 | archivematica_src_configure_ss_email: "admin@example.com" 56 | archivematica_src_configure_am_user: "admin" 57 | archivematica_src_configure_am_password: "archivematica" 58 | archivematica_src_configure_am_email: "admin@example.com" 59 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 60 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 61 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/vars-singlenode-1.9.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "yes" 6 | archivematica_src_am_version: "stable/1.9.x" 7 | archivematica_src_ss_version: "stable/0.14.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | archivematica_src_am_multi_venvs: "true" 11 | 12 | # elasticsearch role 13 | 14 | elasticsearch_version: "6.5.4" 15 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 16 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 17 | elasticsearch_heap_size: "1g" 18 | elasticsearch_max_open_files: "65535" 19 | elasticsearch_timezone: "UTC" 20 | elasticsearch_node_max_local_storage_nodes: "1" 21 | elasticsearch_index_mapper_dynamic: "true" 22 | elasticsearch_memory_bootstrap_mlockall: "true" 23 | elasticsearch_install_java: "true" 24 | elasticsearch_thread_pools: 25 | - "thread_pool.write.size: 2" 26 | - "thread_pool.write.queue_size: 1000" 27 | elasticsearch_network_http_max_content_lengtht: 1024mb 28 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 29 | elasticsearch_max_locked_memory: "unlimited" 30 | elasticsearch_network_host: "127.0.0.1" 31 | 32 | # percona role 33 | 34 | mysql_databases: 35 | - name: "{{ archivematica_src_am_db_name }}" 36 | collation: "utf8_general_ci" 37 | encoding: "utf8" 38 | 39 | mysql_users: 40 | - name: "{{ archivematica_src_am_db_user }}" 41 | pass: "{{ archivematica_src_am_db_password }}" 42 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 43 | host: "{{ archivematica_src_am_db_host }}" 44 | 45 | mysql_root_password: "MYSQLROOTPASSWORD" 46 | 47 | #AM configure vars 48 | 49 | archivematica_src_configure_dashboard: "yes" 50 | archivematica_src_configure_ss: "yes" 51 | archivematica_src_configure_ss_user: "admin" 52 | archivematica_src_configure_ss_password: "archivematica" 53 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 54 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 55 | archivematica_src_configure_ss_email: "admin@example.com" 56 | archivematica_src_configure_am_user: "admin" 57 | archivematica_src_configure_am_password: "archivematica" 58 | archivematica_src_configure_am_email: "admin@example.com" 59 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 60 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 61 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/vars-singlenode-1.12.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "no" 6 | archivematica_src_am_version: "stable/1.12.x" 7 | archivematica_src_ss_version: "stable/0.17.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | 11 | # reset setup vars, commented out here, uncomment if needed 12 | #archivematica_src_reset_am_all: "true" 13 | #archivematica_src_reset_ss_db: "true" 14 | 15 | # elasticsearch role 16 | 17 | elasticsearch_version: "6.5.4" 18 | elasticsearch_java_home: "/usr/lib/jvm/jre-1.8.0" 19 | elasticsearch_heap_size: "1g" 20 | elasticsearch_max_open_files: "65535" 21 | elasticsearch_timezone: "UTC" 22 | elasticsearch_node_max_local_storage_nodes: "1" 23 | elasticsearch_index_mapper_dynamic: "true" 24 | elasticsearch_memory_bootstrap_mlockall: "true" 25 | elasticsearch_install_java: "true" 26 | elasticsearch_thread_pools: 27 | - "thread_pool.bulk.size: 2" 28 | - "thread_pool.bulk.queue_size: 1000" 29 | elasticsearch_network_http_max_content_lengtht: 1024mb 30 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 31 | elasticsearch_max_locked_memory: "unlimited" 32 | elasticsearch_network_host: "127.0.0.1" 33 | 34 | # Percona variables 35 | mysql_databases: 36 | - name: "{{ archivematica_src_am_db_name }}" 37 | collation: "utf8_general_ci" 38 | encoding: "utf8" 39 | mysql_users: 40 | - name: "{{ archivematica_src_am_db_user }}" 41 | pass: "{{ archivematica_src_am_db_password }}" 42 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 43 | host: "{{ archivematica_src_am_db_host }}" 44 | mysql_root_password: "ChangeMe!" 45 | 46 | #AM configure vars 47 | 48 | archivematica_src_configure_dashboard: "yes" 49 | archivematica_src_configure_ss: "yes" 50 | archivematica_src_configure_ss_user: "admin" 51 | archivematica_src_configure_ss_password: "archivematica" 52 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 53 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 54 | archivematica_src_configure_ss_email: "admin@example.com" 55 | archivematica_src_configure_am_user: "admin" 56 | archivematica_src_configure_am_password: "archivematica" 57 | archivematica_src_configure_am_email: "admin@example.com" 58 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 59 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 60 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/vars-singlenode-1.11.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "no" 6 | archivematica_src_am_version: "stable/1.11.x" 7 | archivematica_src_ss_version: "stable/0.16.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | 11 | # reset setup vars, commented out here, uncomment if needed 12 | #archivematica_src_reset_am_all: "true" 13 | #archivematica_src_reset_ss_db: "true" 14 | 15 | # elasticsearch role 16 | 17 | elasticsearch_version: "6.5.4" 18 | elasticsearch_java_home: "/usr/lib/jvm/jre-1.8.0" 19 | elasticsearch_heap_size: "1g" 20 | elasticsearch_max_open_files: "65535" 21 | elasticsearch_timezone: "UTC" 22 | elasticsearch_node_max_local_storage_nodes: "1" 23 | elasticsearch_index_mapper_dynamic: "true" 24 | elasticsearch_memory_bootstrap_mlockall: "true" 25 | elasticsearch_install_java: "true" 26 | elasticsearch_thread_pools: 27 | - "thread_pool.bulk.size: 2" 28 | - "thread_pool.bulk.queue_size: 1000" 29 | elasticsearch_network_http_max_content_lengtht: 1024mb 30 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 31 | elasticsearch_max_locked_memory: "unlimited" 32 | elasticsearch_network_host: "127.0.0.1" 33 | 34 | # Percona variables 35 | mysql_databases: 36 | - name: "{{ archivematica_src_am_db_name }}" 37 | collation: "utf8_general_ci" 38 | encoding: "utf8" 39 | mysql_users: 40 | - name: "{{ archivematica_src_am_db_user }}" 41 | pass: "{{ archivematica_src_am_db_password }}" 42 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 43 | host: "{{ archivematica_src_am_db_host }}" 44 | mysql_root_password: "ChangeMe!" 45 | 46 | #AM configure vars 47 | 48 | archivematica_src_configure_dashboard: "yes" 49 | archivematica_src_configure_ss: "yes" 50 | archivematica_src_configure_ss_user: "admin" 51 | archivematica_src_configure_ss_password: "archivematica" 52 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 53 | archivematica_src_configure_ss_url: "http://192.168.168.197:8000" 54 | archivematica_src_configure_ss_email: "admin@example.com" 55 | archivematica_src_configure_am_user: "admin" 56 | archivematica_src_configure_am_password: "archivematica" 57 | archivematica_src_configure_am_email: "admin@example.com" 58 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 59 | archivematica_src_configure_am_site_url: "http://192.168.168.197" 60 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/vars-singlenode-1.12.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "no" 6 | archivematica_src_am_version: "stable/1.12.x" 7 | archivematica_src_ss_version: "stable/0.17.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | 11 | # reset setup vars, commented out here, uncomment if needed 12 | #archivematica_src_reset_am_all: "true" 13 | #archivematica_src_reset_ss_db: "true" 14 | 15 | # elasticsearch role 16 | 17 | elasticsearch_version: "6.5.4" 18 | elasticsearch_java_home: "/usr/lib/jvm/jre-1.8.0" 19 | elasticsearch_heap_size: "1g" 20 | elasticsearch_max_open_files: "65535" 21 | elasticsearch_timezone: "UTC" 22 | elasticsearch_node_max_local_storage_nodes: "1" 23 | elasticsearch_index_mapper_dynamic: "true" 24 | elasticsearch_memory_bootstrap_mlockall: "true" 25 | elasticsearch_install_java: "true" 26 | elasticsearch_thread_pools: 27 | - "thread_pool.bulk.size: 2" 28 | - "thread_pool.bulk.queue_size: 1000" 29 | elasticsearch_network_http_max_content_lengtht: 1024mb 30 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 31 | elasticsearch_max_locked_memory: "unlimited" 32 | elasticsearch_network_host: "127.0.0.1" 33 | 34 | # Percona variables 35 | mysql_databases: 36 | - name: "{{ archivematica_src_am_db_name }}" 37 | collation: "utf8_general_ci" 38 | encoding: "utf8" 39 | mysql_users: 40 | - name: "{{ archivematica_src_am_db_user }}" 41 | pass: "{{ archivematica_src_am_db_password }}" 42 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 43 | host: "{{ archivematica_src_am_db_host }}" 44 | mysql_root_password: "ChangeMe!" 45 | 46 | #AM configure vars 47 | 48 | archivematica_src_configure_dashboard: "yes" 49 | archivematica_src_configure_ss: "yes" 50 | archivematica_src_configure_ss_user: "admin" 51 | archivematica_src_configure_ss_password: "archivematica" 52 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 53 | archivematica_src_configure_ss_url: "http://192.168.168.197:8000" 54 | archivematica_src_configure_ss_email: "admin@example.com" 55 | archivematica_src_configure_am_user: "admin" 56 | archivematica_src_configure_am_password: "archivematica" 57 | archivematica_src_configure_am_email: "admin@example.com" 58 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 59 | archivematica_src_configure_am_site_url: "http://192.168.168.197" 60 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/vars-singlenode-1.9.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "no" 6 | archivematica_src_am_version: "stable/1.9.x" 7 | archivematica_src_ss_version: "stable/0.14.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | archivematica_src_am_multi_venvs: "yes" 11 | 12 | # reset setup vars, commented out here, uncomment if needed 13 | #archivematica_src_reset_am_all: "true" 14 | #archivematica_src_reset_ss_db: "true" 15 | 16 | # elasticsearch role 17 | 18 | elasticsearch_version: "6.5.4" 19 | elasticsearch_java_home: "/usr/lib/jvm/jre-1.8.0" 20 | elasticsearch_heap_size: "1g" 21 | elasticsearch_max_open_files: "65535" 22 | elasticsearch_timezone: "UTC" 23 | elasticsearch_node_max_local_storage_nodes: "1" 24 | elasticsearch_index_mapper_dynamic: "true" 25 | elasticsearch_memory_bootstrap_mlockall: "true" 26 | elasticsearch_install_java: "true" 27 | elasticsearch_thread_pools: 28 | - "thread_pool.bulk.size: 2" 29 | - "thread_pool.bulk.queue_size: 1000" 30 | elasticsearch_network_http_max_content_lengtht: 1024mb 31 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 32 | elasticsearch_max_locked_memory: "unlimited" 33 | elasticsearch_network_host: "127.0.0.1" 34 | 35 | # Percona variables 36 | mysql_databases: 37 | - name: "{{ archivematica_src_am_db_name }}" 38 | collation: "utf8_general_ci" 39 | encoding: "utf8" 40 | mysql_users: 41 | - name: "{{ archivematica_src_am_db_user }}" 42 | pass: "{{ archivematica_src_am_db_password }}" 43 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 44 | host: "{{ archivematica_src_am_db_host }}" 45 | mysql_root_password: "ChangeMe!" 46 | 47 | #AM configure vars 48 | 49 | archivematica_src_configure_dashboard: "yes" 50 | archivematica_src_configure_ss: "yes" 51 | archivematica_src_configure_ss_user: "admin" 52 | archivematica_src_configure_ss_password: "archivematica" 53 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 54 | archivematica_src_configure_ss_url: "http://192.168.168.197:8000" 55 | archivematica_src_configure_ss_email: "admin@example.com" 56 | archivematica_src_configure_am_user: "admin" 57 | archivematica_src_configure_am_password: "archivematica" 58 | archivematica_src_configure_am_email: "admin@example.com" 59 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 60 | archivematica_src_configure_am_site_url: "http://192.168.168.197" 61 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/vars-singlenode-1.10.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_install_devtools: "no" 6 | archivematica_src_am_version: "stable/1.10.x" 7 | archivematica_src_ss_version: "stable/0.15.x" 8 | archivematica_src_ss_gunicorn: "true" 9 | archivematica_src_am_dashboard_gunicorn: "true" 10 | archivematica_src_am_multi_venvs: "yes" 11 | 12 | # reset setup vars, commented out here, uncomment if needed 13 | #archivematica_src_reset_am_all: "true" 14 | #archivematica_src_reset_ss_db: "true" 15 | 16 | # elasticsearch role 17 | 18 | elasticsearch_version: "6.5.4" 19 | elasticsearch_java_home: "/usr/lib/jvm/jre-1.8.0" 20 | elasticsearch_heap_size: "1g" 21 | elasticsearch_max_open_files: "65535" 22 | elasticsearch_timezone: "UTC" 23 | elasticsearch_node_max_local_storage_nodes: "1" 24 | elasticsearch_index_mapper_dynamic: "true" 25 | elasticsearch_memory_bootstrap_mlockall: "true" 26 | elasticsearch_install_java: "true" 27 | elasticsearch_thread_pools: 28 | - "thread_pool.bulk.size: 2" 29 | - "thread_pool.bulk.queue_size: 1000" 30 | elasticsearch_network_http_max_content_lengtht: 1024mb 31 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 32 | elasticsearch_max_locked_memory: "unlimited" 33 | elasticsearch_network_host: "127.0.0.1" 34 | 35 | # Percona variables 36 | mysql_databases: 37 | - name: "{{ archivematica_src_am_db_name }}" 38 | collation: "utf8_general_ci" 39 | encoding: "utf8" 40 | mysql_users: 41 | - name: "{{ archivematica_src_am_db_user }}" 42 | pass: "{{ archivematica_src_am_db_password }}" 43 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 44 | host: "{{ archivematica_src_am_db_host }}" 45 | mysql_root_password: "ChangeMe!" 46 | 47 | #AM configure vars 48 | 49 | archivematica_src_configure_dashboard: "yes" 50 | archivematica_src_configure_ss: "yes" 51 | archivematica_src_configure_ss_user: "admin" 52 | archivematica_src_configure_ss_password: "archivematica" 53 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 54 | archivematica_src_configure_ss_url: "http://192.168.168.197:8000" 55 | archivematica_src_configure_ss_email: "admin@example.com" 56 | archivematica_src_configure_am_user: "admin" 57 | archivematica_src_configure_am_password: "archivematica" 58 | archivematica_src_configure_am_email: "admin@example.com" 59 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 60 | archivematica_src_configure_am_site_url: "http://192.168.168.197" 61 | -------------------------------------------------------------------------------- /tests/dip-upload/archivematica.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "all" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "archivematica-vars.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: "Change home dir perms (to make transfer source visible)" 11 | command: "chmod 755 $HOME" 12 | become: "no" 13 | 14 | roles: 15 | 16 | - role: "artefactual.elasticsearch" 17 | become: "yes" 18 | 19 | - role: "artefactual.percona" 20 | become: "yes" 21 | 22 | - role: "artefactual.gearman" 23 | become: "yes" 24 | 25 | - role: "artefactual.clamav" 26 | become: "yes" 27 | 28 | - role: "artefactual.nginx" 29 | become: "yes" 30 | 31 | - role: "artefactual.archivematica-src" 32 | become: "yes" 33 | tags: 34 | - "archivematica-src" 35 | 36 | post_tasks: 37 | 38 | - name: "restart clamav daemons" 39 | become: "yes" 40 | service: 41 | name: "{{ item }}" 42 | state: restarted 43 | loop: 44 | - "clamav-freshclam" 45 | - "clamav-daemon" 46 | when: ansible_os_family == "Debian" 47 | tags: "restart-clamav" 48 | 49 | - name: "Configure Dashboard settings" 50 | command: > 51 | mysql --user="{{ archivematica_src_am_db_user }}" 52 | --password="{{ archivematica_src_am_db_password }}" 53 | --host="{{ archivematica_src_am_db_host }}" 54 | "{{ archivematica_src_am_db_name }}" 55 | --batch --skip-column-names 56 | --execute="update DashboardSettings set value=\"{{ item.value }}\" where name=\"{{ item.key }}\";" 57 | with_dict: "{{ custom_archivematica_src_configure_dashboardsettings }}" 58 | no_log: True 59 | when: 60 | - archivematica_src_configure_dashboard|bool 61 | - custom_archivematica_src_configure_dashboardsettings is defined 62 | 63 | - name: "Configure AtoM DIP Upload in AM host" 64 | block: 65 | - name: "Create rsa for user archivematica" 66 | user: 67 | name: "archivematica" 68 | generate_ssh_key: "yes" 69 | ssh_key_file: ".ssh/id_rsa" 70 | 71 | - name: "Use StrictHostKeyChecking=no ssh option for archivematica user" 72 | lineinfile: 73 | create: "yes" 74 | path: "/var/lib/archivematica/.ssh/config" 75 | owner: "archivematica" 76 | group: "archivematica" 77 | mode: "0600" 78 | line: "StrictHostKeyChecking no" 79 | become: true 80 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/README.md: -------------------------------------------------------------------------------- 1 | # Archivematica playbook 2 | 3 | The provided playbook installs Archivematica on a local vagrant virtual 4 | machine. 5 | 6 | ## Requirements 7 | 8 | - Vagrant 1.9 or newer 9 | - Ansible 2.2 or newer 10 | 11 | ## How to use 12 | 13 | 14 | 1. Download the Ansible roles: 15 | ``` 16 | $ ansible-galaxy install -f -p roles/ -r requirements.yml 17 | ``` 18 | 19 | 2. Create the virtual machine and provision it: 20 | ``` 21 | $ vagrant up 22 | ``` 23 | 24 | 3. To ssh to the VM, run: 25 | ``` 26 | $ vagrant ssh 27 | ``` 28 | 29 | 4. If you want to forward your SSH agent too, run: 30 | ``` 31 | $ vagrant ssh -- -A 32 | ``` 33 | 34 | 5. To (re-)provision the VM, run: 35 | * Using vagrant: 36 | ``` 37 | $ vagrant provision 38 | ``` 39 | * Using vagrant and custom ANSIBLE_ARGS. Use colons (:) to separate multiple parameters. For example to pass a tag to install Storage Service only, and verbose flag: 40 | ``` 41 | $ ANSIBLE_ARGS="--tags=amsrc-ss:-vvv" vagrant provision 42 | ``` 43 | Note that it is not possible to pass the (--extra-vars to ansible using the above, because extra_vars is reassigned in the Vagrantfile) 44 | * Using ansible commands directly (this allows you to pass ansible-specific parameters, 45 | such as tags and the verbose flag; remember to use extra-vars to pass the variables in the Vagrantfile ): 46 | ``` 47 | $ ansible-playbook -i .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory singlenode.yml \ 48 | -u vagrant \ 49 | --private-key .vagrant/machines/am-local/virtualbox/private_key \ 50 | --extra-vars="archivematica_src_dir=/vagrant/src archivematica_src_environment_type=development" \ 51 | --tags="amsrc-pipeline-instcode" \ 52 | -v 53 | ``` 54 | 55 | 6. The ansible playbook `singlenode.yml` specified in the Vagrantfile will provision using qa branches of archivematica. To provision using the stable 1.17.x/0.23.x branches, replace "vars-singlenode-qa.yml" with "vars-singlenode-1.17.yml" in `singlenode.yml`. You can also modify create a custom vars file and pass it instead (to modify role variables to deploy custom branches, etc.) 56 | 57 | # Login and credentials 58 | 59 | If you are using the default values in vars-singlenode-XXXX.yml and Vagrantfile files, the login URLS are: 60 | 61 | * Dashboard: http://192.168.168.198 62 | * Storage Service: http://192.168.168.198:8000 63 | 64 | Credentials: 65 | 66 | * user: admin 67 | * password: archivematica 68 | 69 | For more archivematica development information, see: https://github.com/artefactual/archivematica/tree/qa/1.x/hack#archivematica-development-on-docker-compose 70 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/README.md: -------------------------------------------------------------------------------- 1 | # Archivematica playbook 2 | 3 | The provided playbook installs Archivematica on a local vagrant virtual 4 | machine. 5 | 6 | ## Requirements 7 | 8 | - Vagrant 1.9 or newer 9 | - Ansible 2.2 or newer 10 | 11 | ## How to use 12 | 13 | 14 | 1. Download the Ansible roles: 15 | ``` 16 | $ ansible-galaxy install -f -p roles/ -r requirements.yml 17 | ``` 18 | 19 | 2. Create the virtual machine and provision it: 20 | ``` 21 | $ vagrant up 22 | ``` 23 | 24 | 3. To ssh to the VM, run: 25 | ``` 26 | $ vagrant ssh 27 | ``` 28 | 29 | 4. If you want to forward your SSH agent too, run: 30 | ``` 31 | $ vagrant ssh -- -A 32 | ``` 33 | 34 | 5. To (re-)provision the VM, run: 35 | * Using vagrant: 36 | ``` 37 | $ vagrant provision 38 | ``` 39 | * Using vagrant and custom ANSIBLE_ARGS. Use colons (:) to separate multiple parameters. For example to pass a tag to install Storage Service only, and verbose flag: 40 | ``` 41 | $ ANSIBLE_ARGS="--tags=amsrc-ss:-vvv" vagrant provision 42 | ``` 43 | Note that it is not possible to pass the (--extra-vars to ansible using the above, because extra_vars is reassigned in the Vagrantfile) 44 | * Using ansible commands directly (this allows you to pass ansible-specific parameters, 45 | such as tags and the verbose flag; remember to use extra-vars to pass the variables in the Vagrantfile ): 46 | ``` 47 | $ ansible-playbook -i .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory singlenode.yml \ 48 | -u vagrant \ 49 | --private-key .vagrant/machines/am-local/virtualbox/private_key \ 50 | --extra-vars="archivematica_src_dir=/vagrant/src archivematica_src_environment_type=development" \ 51 | --tags="amsrc-pipeline-instcode" \ 52 | -v 53 | ``` 54 | 55 | 6. The ansible playbook `singlenode.yml` specified in the Vagrantfile will provision using qa branches of archivematica. To provision using the stable 1.18.x/0.24.x branches, replace "vars-singlenode-qa.yml" with "vars-singlenode-1.18.yml" in `singlenode.yml`. You can also modify create a custom vars file and pass it instead (to modify role variables to deploy custom branches, etc.) 56 | 57 | # Login and credentials 58 | 59 | If you are using the default values in vars-singlenode-XXXX.yml and Vagrantfile files, the login URLS are: 60 | 61 | * Dashboard: http://192.168.168.198 62 | * Storage Service: http://192.168.168.198:8000 63 | 64 | Credentials: 65 | 66 | * user: admin 67 | * password: archivematica 68 | 69 | For more archivematica development information, see: https://github.com/artefactual/archivematica/tree/qa/1.x/hack#archivematica-development-on-docker-compose 70 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/vars-singlenode-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "qa/1.x" 6 | archivematica_src_ss_version: "qa/0.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_am_db_password: "aaGKHyMls.20ki" 11 | archivematica_src_ss_db_password: "aaGKHyMls.20ki" 12 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 13 | 14 | archivematica_src_configure_dashboard: "yes" 15 | archivematica_src_configure_ss: "yes" 16 | archivematica_src_configure_ss_user: "admin" 17 | archivematica_src_configure_ss_password: "archivematica" 18 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 19 | archivematica_src_configure_ss_url: "http://192.168.168.197:8000" 20 | archivematica_src_configure_ss_email: "admin@example.com" 21 | archivematica_src_configure_am_user: "admin" 22 | archivematica_src_configure_am_password: "archivematica" 23 | archivematica_src_configure_am_email: "admin@example.com" 24 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 25 | archivematica_src_configure_am_site_url: "http://192.168.168.197" 26 | 27 | # elasticsearch role 28 | 29 | elasticsearch_version: "6.5.4" 30 | elasticsearch_java_home: "/usr/lib/jvm/jre-1.8.0" 31 | elasticsearch_heap_size: "1g" 32 | elasticsearch_max_open_files: "65535" 33 | elasticsearch_timezone: "UTC" 34 | elasticsearch_node_max_local_storage_nodes: "1" 35 | elasticsearch_index_mapper_dynamic: "true" 36 | elasticsearch_memory_bootstrap_mlockall: "true" 37 | elasticsearch_install_java: "true" 38 | elasticsearch_thread_pools: 39 | - "thread_pool.bulk.size: 2" 40 | - "thread_pool.bulk.queue_size: 1000" 41 | elasticsearch_network_http_max_content_lengtht: 1024mb 42 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 43 | elasticsearch_max_locked_memory: "unlimited" 44 | elasticsearch_network_host: "127.0.0.1" 45 | 46 | # Percona variables 47 | 48 | mysql_databases: 49 | - name: "{{ archivematica_src_am_db_name }}" 50 | collation: "utf8_general_ci" 51 | encoding: "utf8" 52 | - name: "{{ archivematica_src_ss_db_name }}" 53 | collation: "utf8_general_ci" 54 | encoding: "utf8" 55 | 56 | mysql_users: 57 | - name: "{{ archivematica_src_am_db_user }}" 58 | pass: "{{ archivematica_src_am_db_password }}" 59 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 60 | host: "{{ archivematica_src_am_db_host }}" 61 | - name: "{{ archivematica_src_ss_db_user }}" 62 | pass: "{{ archivematica_src_ss_db_password }}" 63 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 64 | host: "{{ archivematica_src_ss_db_host }}" 65 | 66 | mysql_root_password: "aaGKHyMls.20ki$" 67 | 68 | mysql_version_major: "5" 69 | mysql_version_minor: "7" 70 | 71 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/vars-singlenode-1.14.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "stable/1.14.x" 6 | archivematica_src_ss_version: "stable/0.20.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_am_db_password: "aaGKHyMls.20ki" 11 | archivematica_src_ss_db_password: "aaGKHyMls.20ki" 12 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 13 | 14 | archivematica_src_configure_dashboard: "yes" 15 | archivematica_src_configure_ss: "yes" 16 | archivematica_src_configure_ss_user: "admin" 17 | archivematica_src_configure_ss_password: "archivematica" 18 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 19 | archivematica_src_configure_ss_url: "http://192.168.168.197:8000" 20 | archivematica_src_configure_ss_email: "admin@example.com" 21 | archivematica_src_configure_am_user: "admin" 22 | archivematica_src_configure_am_password: "archivematica" 23 | archivematica_src_configure_am_email: "admin@example.com" 24 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 25 | archivematica_src_configure_am_site_url: "http://192.168.168.197" 26 | 27 | # elasticsearch role 28 | 29 | elasticsearch_version: "6.5.4" 30 | elasticsearch_java_home: "/usr/lib/jvm/jre-1.8.0" 31 | elasticsearch_heap_size: "1g" 32 | elasticsearch_max_open_files: "65535" 33 | elasticsearch_timezone: "UTC" 34 | elasticsearch_node_max_local_storage_nodes: "1" 35 | elasticsearch_index_mapper_dynamic: "true" 36 | elasticsearch_memory_bootstrap_mlockall: "true" 37 | elasticsearch_install_java: "true" 38 | elasticsearch_thread_pools: 39 | - "thread_pool.bulk.size: 2" 40 | - "thread_pool.bulk.queue_size: 1000" 41 | elasticsearch_network_http_max_content_lengtht: 1024mb 42 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 43 | elasticsearch_max_locked_memory: "unlimited" 44 | elasticsearch_network_host: "127.0.0.1" 45 | 46 | # Percona variables 47 | 48 | mysql_databases: 49 | - name: "{{ archivematica_src_am_db_name }}" 50 | collation: "utf8_general_ci" 51 | encoding: "utf8" 52 | - name: "{{ archivematica_src_ss_db_name }}" 53 | collation: "utf8_general_ci" 54 | encoding: "utf8" 55 | 56 | mysql_users: 57 | - name: "{{ archivematica_src_am_db_user }}" 58 | pass: "{{ archivematica_src_am_db_password }}" 59 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 60 | host: "{{ archivematica_src_am_db_host }}" 61 | - name: "{{ archivematica_src_ss_db_user }}" 62 | pass: "{{ archivematica_src_ss_db_password }}" 63 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 64 | host: "{{ archivematica_src_ss_db_host }}" 65 | 66 | mysql_root_password: "aaGKHyMls.20ki$" 67 | 68 | mysql_version_major: "5" 69 | mysql_version_minor: "7" 70 | 71 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/vars-singlenode-1.13.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "stable/1.13.x" 6 | archivematica_src_ss_version: "stable/0.19.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_ss_db_password: "demo" 11 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 12 | 13 | archivematica_src_configure_dashboard: "yes" 14 | archivematica_src_configure_ss: "yes" 15 | archivematica_src_configure_ss_user: "admin" 16 | archivematica_src_configure_ss_password: "archivematica" 17 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 18 | archivematica_src_configure_ss_url: "http://192.168.168.197:8000" 19 | archivematica_src_configure_ss_email: "admin@example.com" 20 | archivematica_src_configure_am_user: "admin" 21 | archivematica_src_configure_am_password: "archivematica" 22 | archivematica_src_configure_am_email: "admin@example.com" 23 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 24 | archivematica_src_configure_am_site_url: "http://192.168.168.197" 25 | 26 | # elasticsearch role 27 | 28 | elasticsearch_version: "6.5.4" 29 | elasticsearch_java_home: "/usr/lib/jvm/jre-1.8.0" 30 | elasticsearch_heap_size: "1g" 31 | elasticsearch_max_open_files: "65535" 32 | elasticsearch_timezone: "UTC" 33 | elasticsearch_node_max_local_storage_nodes: "1" 34 | elasticsearch_index_mapper_dynamic: "true" 35 | elasticsearch_memory_bootstrap_mlockall: "true" 36 | elasticsearch_install_java: "true" 37 | elasticsearch_thread_pools: 38 | - "thread_pool.bulk.size: 2" 39 | - "thread_pool.bulk.queue_size: 1000" 40 | elasticsearch_network_http_max_content_lengtht: 1024mb 41 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 42 | elasticsearch_max_locked_memory: "unlimited" 43 | elasticsearch_network_host: "127.0.0.1" 44 | 45 | # Percona variables 46 | 47 | mysql_databases: 48 | - name: "{{ archivematica_src_am_db_name }}" 49 | collation: "utf8_general_ci" 50 | encoding: "utf8" 51 | - name: "{{ archivematica_src_ss_db_name }}" 52 | collation: "utf8_general_ci" 53 | encoding: "utf8" 54 | 55 | mysql_users: 56 | - name: "{{ archivematica_src_am_db_user }}" 57 | pass: "{{ archivematica_src_am_db_password }}" 58 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 59 | host: "{{ archivematica_src_am_db_host }}" 60 | - name: "{{ archivematica_src_ss_db_user }}" 61 | pass: "{{ archivematica_src_ss_db_password }}" 62 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 63 | host: "{{ archivematica_src_ss_db_host }}" 64 | 65 | mysql_root_password: "ChangeMe!" 66 | 67 | archivematica_src_ss_environment: 68 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:3306/{{ archivematica_src_ss_db_name }}" 69 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/vars-singlenode-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "qa/1.x" 6 | archivematica_src_ss_version: "qa/0.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_ss_db_password: "demo" 11 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 12 | 13 | archivematica_src_configure_dashboard: "yes" 14 | archivematica_src_configure_ss: "yes" 15 | archivematica_src_configure_ss_user: "admin" 16 | archivematica_src_configure_ss_password: "archivematica" 17 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 18 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 19 | archivematica_src_configure_ss_email: "admin@example.com" 20 | archivematica_src_configure_am_user: "admin" 21 | archivematica_src_configure_am_password: "archivematica" 22 | archivematica_src_configure_am_email: "admin@example.com" 23 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 24 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 25 | 26 | # elasticsearch role 27 | 28 | elasticsearch_version: "6.5.4" 29 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 30 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 31 | elasticsearch_heap_size: "1g" 32 | elasticsearch_max_open_files: "65535" 33 | elasticsearch_timezone: "UTC" 34 | elasticsearch_node_max_local_storage_nodes: "1" 35 | elasticsearch_index_mapper_dynamic: "true" 36 | elasticsearch_memory_bootstrap_mlockall: "true" 37 | elasticsearch_install_java: "true" 38 | elasticsearch_thread_pools: 39 | - "thread_pool.write.size: 2" 40 | - "thread_pool.write.queue_size: 1000" 41 | elasticsearch_network_http_max_content_lengtht: 1024mb 42 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 43 | elasticsearch_max_locked_memory: "unlimited" 44 | elasticsearch_network_host: "127.0.0.1" 45 | 46 | # percona role 47 | 48 | mysql_databases: 49 | - name: "{{ archivematica_src_am_db_name }}" 50 | collation: "utf8_general_ci" 51 | encoding: "utf8" 52 | - name: "{{ archivematica_src_ss_db_name }}" 53 | collation: "utf8_general_ci" 54 | encoding: "utf8" 55 | 56 | mysql_users: 57 | - name: "{{ archivematica_src_am_db_user }}" 58 | pass: "{{ archivematica_src_am_db_password }}" 59 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 60 | host: "{{ archivematica_src_am_db_host }}" 61 | - name: "{{ archivematica_src_ss_db_user }}" 62 | pass: "{{ archivematica_src_ss_db_password }}" 63 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 64 | host: "{{ archivematica_src_ss_db_host }}" 65 | 66 | mysql_root_password: "MYSQLROOTPASSWORD" 67 | 68 | archivematica_src_ss_environment: 69 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:3306/{{ archivematica_src_ss_db_name }}" 70 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/vars-singlenode-1.13.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "stable/1.13.x" 6 | archivematica_src_ss_version: "stable/0.19.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_ss_db_password: "demo" 11 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 12 | 13 | archivematica_src_configure_dashboard: "yes" 14 | archivematica_src_configure_ss: "yes" 15 | archivematica_src_configure_ss_user: "admin" 16 | archivematica_src_configure_ss_password: "archivematica" 17 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 18 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 19 | archivematica_src_configure_ss_email: "admin@example.com" 20 | archivematica_src_configure_am_user: "admin" 21 | archivematica_src_configure_am_password: "archivematica" 22 | archivematica_src_configure_am_email: "admin@example.com" 23 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 24 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 25 | 26 | # elasticsearch role 27 | 28 | elasticsearch_version: "6.5.4" 29 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 30 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 31 | elasticsearch_heap_size: "1g" 32 | elasticsearch_max_open_files: "65535" 33 | elasticsearch_timezone: "UTC" 34 | elasticsearch_node_max_local_storage_nodes: "1" 35 | elasticsearch_index_mapper_dynamic: "true" 36 | elasticsearch_memory_bootstrap_mlockall: "true" 37 | elasticsearch_install_java: "true" 38 | elasticsearch_thread_pools: 39 | - "thread_pool.write.size: 2" 40 | - "thread_pool.write.queue_size: 1000" 41 | elasticsearch_network_http_max_content_lengtht: 1024mb 42 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 43 | elasticsearch_max_locked_memory: "unlimited" 44 | elasticsearch_network_host: "127.0.0.1" 45 | 46 | # percona role 47 | 48 | mysql_databases: 49 | - name: "{{ archivematica_src_am_db_name }}" 50 | collation: "utf8_general_ci" 51 | encoding: "utf8" 52 | - name: "{{ archivematica_src_ss_db_name }}" 53 | collation: "utf8_general_ci" 54 | encoding: "utf8" 55 | 56 | mysql_users: 57 | - name: "{{ archivematica_src_am_db_user }}" 58 | pass: "{{ archivematica_src_am_db_password }}" 59 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 60 | host: "{{ archivematica_src_am_db_host }}" 61 | - name: "{{ archivematica_src_ss_db_user }}" 62 | pass: "{{ archivematica_src_ss_db_password }}" 63 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 64 | host: "{{ archivematica_src_ss_db_host }}" 65 | 66 | mysql_root_password: "MYSQLROOTPASSWORD" 67 | 68 | archivematica_src_ss_environment: 69 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:3306/{{ archivematica_src_ss_db_name }}" 70 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/vars-singlenode-1.14.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "stable/1.14.x" 6 | archivematica_src_ss_version: "stable/0.20.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_ss_db_password: "demo" 11 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 12 | 13 | archivematica_src_configure_dashboard: "yes" 14 | archivematica_src_configure_ss: "yes" 15 | archivematica_src_configure_ss_user: "admin" 16 | archivematica_src_configure_ss_password: "archivematica" 17 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 18 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 19 | archivematica_src_configure_ss_email: "admin@example.com" 20 | archivematica_src_configure_am_user: "admin" 21 | archivematica_src_configure_am_password: "archivematica" 22 | archivematica_src_configure_am_email: "admin@example.com" 23 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 24 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 25 | 26 | # elasticsearch role 27 | 28 | elasticsearch_version: "6.5.4" 29 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 30 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 31 | elasticsearch_heap_size: "1g" 32 | elasticsearch_max_open_files: "65535" 33 | elasticsearch_timezone: "UTC" 34 | elasticsearch_node_max_local_storage_nodes: "1" 35 | elasticsearch_index_mapper_dynamic: "true" 36 | elasticsearch_memory_bootstrap_mlockall: "true" 37 | elasticsearch_install_java: "true" 38 | elasticsearch_thread_pools: 39 | - "thread_pool.write.size: 2" 40 | - "thread_pool.write.queue_size: 1000" 41 | elasticsearch_network_http_max_content_lengtht: 1024mb 42 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 43 | elasticsearch_max_locked_memory: "unlimited" 44 | elasticsearch_network_host: "127.0.0.1" 45 | 46 | # percona role 47 | 48 | mysql_databases: 49 | - name: "{{ archivematica_src_am_db_name }}" 50 | collation: "utf8_general_ci" 51 | encoding: "utf8" 52 | - name: "{{ archivematica_src_ss_db_name }}" 53 | collation: "utf8_general_ci" 54 | encoding: "utf8" 55 | 56 | mysql_users: 57 | - name: "{{ archivematica_src_am_db_user }}" 58 | pass: "{{ archivematica_src_am_db_password }}" 59 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 60 | host: "{{ archivematica_src_am_db_host }}" 61 | - name: "{{ archivematica_src_ss_db_user }}" 62 | pass: "{{ archivematica_src_ss_db_password }}" 63 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 64 | host: "{{ archivematica_src_ss_db_host }}" 65 | 66 | mysql_root_password: "MYSQLROOTPASSWORD" 67 | 68 | archivematica_src_ss_environment: 69 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:3306/{{ archivematica_src_ss_db_name }}" 70 | -------------------------------------------------------------------------------- /tests/elasticsearch-replication/playbook.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "Update virtual machines" 4 | hosts: "all" 5 | tasks: 6 | - name: "Update distro" 7 | apt: "update_cache=yes cache_valid_time=300 upgrade=safe" 8 | sudo: "yes" 9 | 10 | # Installing a NFS server. You need this if you have multiple nodes in a ES 11 | # cluster you'll want all the nodes having access to the same repository. 12 | - name: "Setup NFS server" 13 | hosts: "nfs" 14 | sudo: "yes" 15 | roles: 16 | - role: "nfs" 17 | nfs_exported_directories: 18 | - path: "/export/elasticsearch-repository" 19 | hosts: 20 | - name: "192.168.100.0/24" 21 | options: ["rw", "sync"] 22 | 23 | # Here we are just installing Elasticsearch. 24 | - name: "Setup Elasticsearch cluster" 25 | hosts: "es_servers" 26 | sudo: "yes" 27 | tags: ["elasticsearch"] 28 | roles: 29 | - role: "elasticsearch" 30 | - role: "nfs-client" 31 | nfs_client_imports: 32 | - src: "192.168.100.50:/export/elasticsearch-repository" 33 | name: "/mnt/elasticsearch-repository" 34 | opts: "rw" 35 | 36 | # I suspect that we need to register the repository individually in every node, 37 | # just because the fact that the mount pount could be different depending on 38 | # the location, right? 39 | - name: "Setup Elasticsearch snapshot repository" 40 | hosts: "es_servers" 41 | tags: ["elasticsearch-repository"] 42 | roles: 43 | - role: "elasticsearch-repository" 44 | elasticsearch_repository_name: "atom" 45 | elasticsearch_repository_location: "/mnt/elasticsearch-repository" 46 | elasticsearch_repository_compress: "false" 47 | 48 | # You need to take the snapshot only from one node. The node will spread the word. 49 | # Note that I am targetting es01, it'd work with es02 or es03 too. 50 | - name: "Take snapshot" 51 | hosts: "es01" 52 | tags: ["elasticsearch-snapshot"] 53 | roles: 54 | - role: "elasticsearch-snapshot" 55 | elasticsearch_snapshot_name: "the-one" 56 | elasticsearch_snapshot_repository: "atom" 57 | elasticsearch_snapshot_indices: ["twitter"] 58 | elasticsearch_snapshot_overwrite: "yes" 59 | 60 | # This role is going to restore all the indices available in the snapshot 61 | # given. The only option at the moment is to add a suffix, e.g. from "atom" 62 | # you could have "atom_ro", which is enough for now. 63 | # 64 | # It needs to be executed only against one of the nodes available. 65 | # 66 | # We need indices_overwrite because the indices that are going to be rewritten 67 | # have to be closed first. 68 | - name: "Restore snapshot" 69 | hosts: "es01" 70 | tags: ["elasticsearch-restore"] 71 | roles: 72 | - role: "elasticsearch-restore" 73 | elasticsearch_restore_snapshot: "the-one" 74 | elasticsearch_restore_repository: "atom" 75 | elasticsearch_restore_suffix: "_ro" 76 | elasticsearch_restore_indices_overwrite: ["twitter_ro"] 77 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/README.md: -------------------------------------------------------------------------------- 1 | # Archivematica playbook 2 | 3 | The provided playbook installs Archivematica on a local vagrant virtual 4 | machine. 5 | 6 | ## Requirements 7 | 8 | - Vagrant 1.9 or newer 9 | - Ansible 2.2 or newer 10 | 11 | ## How to use 12 | 13 | 14 | 1. Download the Ansible roles: 15 | ``` 16 | $ ansible-galaxy install -f -p roles/ -r requirements.yml 17 | ``` 18 | 19 | 2. Create the virtual machine and provision it: 20 | ``` 21 | $ vagrant up 22 | ``` 23 | 24 | 3. To ssh to the VM, run: 25 | ``` 26 | $ vagrant ssh 27 | ``` 28 | 29 | 4. If you want to forward your SSH agent too, run: 30 | ``` 31 | $ vagrant ssh -- -A 32 | ``` 33 | 34 | 5. To (re-)provision the VM, run: 35 | * Using vagrant: 36 | ``` 37 | $ vagrant provision 38 | ``` 39 | * Using vagrant and custom ANSIBLE_ARGS. Use colons (:) to separate multiple parameters. For example to pass a tag to install Storage Service only, and verbose flag: 40 | ``` 41 | $ ANSIBLE_ARGS="--tags=amsrc-ss:-vvv" vagrant provision 42 | ``` 43 | Note that it is not possible to pass the (--extra-vars to ansible using the above, because extra_vars is reassigned in the Vagrantfile) 44 | * Using ansible commands directly (this allows you to pass ansible-specific parameters, 45 | such as tags and the verbose flag; remember to use extra-vars to pass the variables in the Vagrantfile ): 46 | ``` 47 | $ ansible-playbook -i .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory singlenode.yml \ 48 | -u vagrant \ 49 | --private-key .vagrant/machines/am-local/virtualbox/private_key \ 50 | --extra-vars="archivematica_src_dir=/vagrant/src archivematica_src_environment_type=development" \ 51 | --tags="amsrc-pipeline-instcode" \ 52 | -v 53 | ``` 54 | 55 | 6. The ansible playbook `singlenode.yml` specified in the Vagrantfile will provision using qa branches of archivematica. To provision using the stable 1.7.x/0.12.x branches, replace "vars-singlenode-qa.yml" with "vars-singlenode-1.7.yml" in `singlenode.yml`. You can also modify create a custom vars file and pass it instead (to modify role variables to deploy custom branches, etc.) 56 | 57 | 7. If you get errors regarding the Vagrant shared folders, they are usually due 58 | to different versions of VirtualBox. One way to fix it is using a vagrant 59 | plugin that installs the host's VirtualBox Guest Additions on the guest system: 60 | ``` 61 | $ vagrant plugin install vagrant-vbguest 62 | $ vagrant vbguest 63 | ``` 64 | 65 | # Login and credentials 66 | 67 | If you are using the default values in vars-singlenode-XXXX.yml and Vagrantfile files, the login URLS are: 68 | 69 | * Dashboard: http://192.168.168.198 70 | * Storage Service: http://192.168.168.198:8000 71 | 72 | Credentials: 73 | 74 | * user: admin 75 | * password: archivematica 76 | 77 | For more archivematica development information, see: https://wiki.archivematica.org/Getting_started 78 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/vars-singlenode-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "qa/1.x" 6 | archivematica_src_ss_version: "qa/0.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_ss_db_password: "demo" 11 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 12 | 13 | archivematica_src_configure_dashboard: "yes" 14 | archivematica_src_configure_ss: "yes" 15 | archivematica_src_configure_ss_user: "admin" 16 | archivematica_src_configure_ss_password: "archivematica" 17 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 18 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 19 | archivematica_src_configure_ss_email: "admin@example.com" 20 | archivematica_src_configure_am_user: "admin" 21 | archivematica_src_configure_am_password: "archivematica" 22 | archivematica_src_configure_am_email: "admin@example.com" 23 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 24 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 25 | 26 | nodejs_version: "22.x" 27 | 28 | # elasticsearch role 29 | 30 | elasticsearch_version: "6.5.4" 31 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 32 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 33 | elasticsearch_heap_size: "1g" 34 | elasticsearch_max_open_files: "65535" 35 | elasticsearch_timezone: "UTC" 36 | elasticsearch_node_max_local_storage_nodes: "1" 37 | elasticsearch_index_mapper_dynamic: "true" 38 | elasticsearch_memory_bootstrap_mlockall: "true" 39 | elasticsearch_install_java: "true" 40 | elasticsearch_thread_pools: 41 | - "thread_pool.write.size: 2" 42 | - "thread_pool.write.queue_size: 1000" 43 | elasticsearch_network_http_max_content_lengtht: 1024mb 44 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 45 | elasticsearch_max_locked_memory: "unlimited" 46 | elasticsearch_network_host: "127.0.0.1" 47 | 48 | # percona role 49 | 50 | mysql_version_major: "8" 51 | mysql_version_minor: "0" 52 | mysql_character_set_server: "utf8mb4" 53 | mysql_collation_server: "utf8mb4_0900_ai_ci" 54 | 55 | mysql_databases: 56 | - name: "{{ archivematica_src_am_db_name }}" 57 | collation: "{{ archivematica_src_am_db_collation }}" 58 | encoding: "{{ archivematica_src_am_db_encoding }}" 59 | - name: "{{ archivematica_src_ss_db_name }}" 60 | collation: "{{ archivematica_src_ss_db_collation }}" 61 | encoding: "{{ archivematica_src_ss_db_encoding }}" 62 | 63 | mysql_users: 64 | - name: "{{ archivematica_src_am_db_user }}" 65 | pass: "{{ archivematica_src_am_db_password }}" 66 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 67 | host: "{{ archivematica_src_am_db_host }}" 68 | - name: "{{ archivematica_src_ss_db_user }}" 69 | pass: "{{ archivematica_src_ss_db_password }}" 70 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 71 | host: "{{ archivematica_src_ss_db_host }}" 72 | 73 | mysql_root_password: "MYSQLROOTPASSWORD" 74 | 75 | archivematica_src_ss_environment: 76 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:3306/{{ archivematica_src_ss_db_name }}" 77 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/vars-singlenode-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "qa/1.x" 6 | archivematica_src_ss_version: "qa/0.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_ss_db_password: "demo" 11 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 12 | 13 | archivematica_src_configure_dashboard: "yes" 14 | archivematica_src_configure_ss: "yes" 15 | archivematica_src_configure_ss_user: "admin" 16 | archivematica_src_configure_ss_password: "archivematica" 17 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 18 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 19 | archivematica_src_configure_ss_email: "admin@example.com" 20 | archivematica_src_configure_am_user: "admin" 21 | archivematica_src_configure_am_password: "archivematica" 22 | archivematica_src_configure_am_email: "admin@example.com" 23 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 24 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 25 | 26 | nodejs_version: "22.x" 27 | 28 | # elasticsearch role 29 | 30 | elasticsearch_version: "8.19.2" 31 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 32 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 33 | elasticsearch_heap_size: "1g" 34 | elasticsearch_max_open_files: "65535" 35 | elasticsearch_timezone: "UTC" 36 | elasticsearch_node_max_local_storage_nodes: "1" 37 | elasticsearch_index_mapper_dynamic: "true" 38 | elasticsearch_memory_bootstrap_mlockall: "true" 39 | elasticsearch_install_java: "true" 40 | elasticsearch_thread_pools: 41 | - "thread_pool.write.size: 2" 42 | - "thread_pool.write.queue_size: 1000" 43 | elasticsearch_network_http_max_content_lengtht: 1024mb 44 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 45 | elasticsearch_max_locked_memory: "unlimited" 46 | elasticsearch_network_host: "127.0.0.1" 47 | 48 | # percona role 49 | 50 | mysql_version_major: "8" 51 | mysql_version_minor: "0" 52 | mysql_character_set_server: "utf8mb4" 53 | mysql_collation_server: "utf8mb4_0900_ai_ci" 54 | 55 | mysql_databases: 56 | - name: "{{ archivematica_src_am_db_name }}" 57 | collation: "{{ archivematica_src_am_db_collation }}" 58 | encoding: "{{ archivematica_src_am_db_encoding }}" 59 | - name: "{{ archivematica_src_ss_db_name }}" 60 | collation: "{{ archivematica_src_ss_db_collation }}" 61 | encoding: "{{ archivematica_src_ss_db_encoding }}" 62 | 63 | mysql_users: 64 | - name: "{{ archivematica_src_am_db_user }}" 65 | pass: "{{ archivematica_src_am_db_password }}" 66 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 67 | host: "{{ archivematica_src_am_db_host }}" 68 | - name: "{{ archivematica_src_ss_db_user }}" 69 | pass: "{{ archivematica_src_ss_db_password }}" 70 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 71 | host: "{{ archivematica_src_ss_db_host }}" 72 | 73 | mysql_root_password: "MYSQLROOTPASSWORD" 74 | 75 | archivematica_src_ss_environment: 76 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:3306/{{ archivematica_src_ss_db_name }}" 77 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/vars-singlenode-1.17.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "stable/1.17.x" 6 | archivematica_src_ss_version: "stable/0.23.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_ss_db_password: "demo" 11 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 12 | 13 | archivematica_src_configure_dashboard: "yes" 14 | archivematica_src_configure_ss: "yes" 15 | archivematica_src_configure_ss_user: "admin" 16 | archivematica_src_configure_ss_password: "archivematica" 17 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 18 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 19 | archivematica_src_configure_ss_email: "admin@example.com" 20 | archivematica_src_configure_am_user: "admin" 21 | archivematica_src_configure_am_password: "archivematica" 22 | archivematica_src_configure_am_email: "admin@example.com" 23 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 24 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 25 | 26 | nodejs_version: "22.x" 27 | 28 | # elasticsearch role 29 | 30 | elasticsearch_version: "6.5.4" 31 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 32 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 33 | elasticsearch_heap_size: "1g" 34 | elasticsearch_max_open_files: "65535" 35 | elasticsearch_timezone: "UTC" 36 | elasticsearch_node_max_local_storage_nodes: "1" 37 | elasticsearch_index_mapper_dynamic: "true" 38 | elasticsearch_memory_bootstrap_mlockall: "true" 39 | elasticsearch_install_java: "true" 40 | elasticsearch_thread_pools: 41 | - "thread_pool.write.size: 2" 42 | - "thread_pool.write.queue_size: 1000" 43 | elasticsearch_network_http_max_content_lengtht: 1024mb 44 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 45 | elasticsearch_max_locked_memory: "unlimited" 46 | elasticsearch_network_host: "127.0.0.1" 47 | 48 | # percona role 49 | 50 | mysql_version_major: "8" 51 | mysql_version_minor: "0" 52 | mysql_character_set_server: "utf8mb4" 53 | mysql_collation_server: "utf8mb4_0900_ai_ci" 54 | 55 | mysql_databases: 56 | - name: "{{ archivematica_src_am_db_name }}" 57 | collation: "{{ archivematica_src_am_db_collation }}" 58 | encoding: "{{ archivematica_src_am_db_encoding }}" 59 | - name: "{{ archivematica_src_ss_db_name }}" 60 | collation: "{{ archivematica_src_ss_db_collation }}" 61 | encoding: "{{ archivematica_src_ss_db_encoding }}" 62 | 63 | mysql_users: 64 | - name: "{{ archivematica_src_am_db_user }}" 65 | pass: "{{ archivematica_src_am_db_password }}" 66 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 67 | host: "{{ archivematica_src_am_db_host }}" 68 | - name: "{{ archivematica_src_ss_db_user }}" 69 | pass: "{{ archivematica_src_ss_db_password }}" 70 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 71 | host: "{{ archivematica_src_ss_db_host }}" 72 | 73 | mysql_root_password: "MYSQLROOTPASSWORD" 74 | 75 | archivematica_src_ss_environment: 76 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:3306/{{ archivematica_src_ss_db_name }}" 77 | -------------------------------------------------------------------------------- /playbooks/archivematica-noble/vars-singlenode-1.18.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_am_version: "stable/1.18.x" 6 | archivematica_src_ss_version: "stable/0.24.x" 7 | 8 | archivematica_src_ss_db_name: "SS" 9 | archivematica_src_ss_db_user: "ss" 10 | archivematica_src_ss_db_password: "demo" 11 | archivematica_src_ss_db_host: "{{ archivematica_src_am_db_host }}" 12 | 13 | archivematica_src_configure_dashboard: "yes" 14 | archivematica_src_configure_ss: "yes" 15 | archivematica_src_configure_ss_user: "admin" 16 | archivematica_src_configure_ss_password: "archivematica" 17 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 18 | archivematica_src_configure_ss_url: "http://192.168.168.198:8000" 19 | archivematica_src_configure_ss_email: "admin@example.com" 20 | archivematica_src_configure_am_user: "admin" 21 | archivematica_src_configure_am_password: "archivematica" 22 | archivematica_src_configure_am_email: "admin@example.com" 23 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 24 | archivematica_src_configure_am_site_url: "http://192.168.168.198" 25 | 26 | nodejs_version: "22.x" 27 | 28 | # elasticsearch role 29 | 30 | elasticsearch_version: "8.19.2" 31 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 32 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 33 | elasticsearch_heap_size: "1g" 34 | elasticsearch_max_open_files: "65535" 35 | elasticsearch_timezone: "UTC" 36 | elasticsearch_node_max_local_storage_nodes: "1" 37 | elasticsearch_index_mapper_dynamic: "true" 38 | elasticsearch_memory_bootstrap_mlockall: "true" 39 | elasticsearch_install_java: "true" 40 | elasticsearch_thread_pools: 41 | - "thread_pool.write.size: 2" 42 | - "thread_pool.write.queue_size: 1000" 43 | elasticsearch_network_http_max_content_lengtht: 1024mb 44 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 45 | elasticsearch_max_locked_memory: "unlimited" 46 | elasticsearch_network_host: "127.0.0.1" 47 | 48 | # percona role 49 | 50 | mysql_version_major: "8" 51 | mysql_version_minor: "0" 52 | mysql_character_set_server: "utf8mb4" 53 | mysql_collation_server: "utf8mb4_0900_ai_ci" 54 | 55 | mysql_databases: 56 | - name: "{{ archivematica_src_am_db_name }}" 57 | collation: "{{ archivematica_src_am_db_collation }}" 58 | encoding: "{{ archivematica_src_am_db_encoding }}" 59 | - name: "{{ archivematica_src_ss_db_name }}" 60 | collation: "{{ archivematica_src_ss_db_collation }}" 61 | encoding: "{{ archivematica_src_ss_db_encoding }}" 62 | 63 | mysql_users: 64 | - name: "{{ archivematica_src_am_db_user }}" 65 | pass: "{{ archivematica_src_am_db_password }}" 66 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 67 | host: "{{ archivematica_src_am_db_host }}" 68 | - name: "{{ archivematica_src_ss_db_user }}" 69 | pass: "{{ archivematica_src_ss_db_password }}" 70 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 71 | host: "{{ archivematica_src_ss_db_host }}" 72 | 73 | mysql_root_password: "MYSQLROOTPASSWORD" 74 | 75 | archivematica_src_ss_environment: 76 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:3306/{{ archivematica_src_ss_db_name }}" 77 | -------------------------------------------------------------------------------- /tests/archivematica-acceptance-tests/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG TARGET=server 2 | ARG DOCKER_IMAGE_NAME=ubuntu 3 | ARG DOCKER_IMAGE_TAG=22.04 4 | 5 | FROM ubuntu:20.04 AS install_ubuntu_20.04 6 | 7 | ENV DEBIAN_FRONTEND noninteractive 8 | 9 | RUN set -ex \ 10 | && apt-get update \ 11 | && apt-get install -y \ 12 | locales \ 13 | openssh-server \ 14 | rsync \ 15 | sudo \ 16 | && apt-get clean 17 | 18 | RUN locale-gen en_US.UTF-8 19 | ENV LANG en_US.UTF-8 20 | ENV LANGUAGE en_US:en 21 | ENV LC_ALL en_US.UTF-8 22 | 23 | FROM ubuntu:22.04 AS install_ubuntu_22.04 24 | 25 | ENV DEBIAN_FRONTEND noninteractive 26 | 27 | RUN set -ex \ 28 | && apt-get update \ 29 | && apt-get install -y \ 30 | locales \ 31 | openssh-server \ 32 | rsync \ 33 | sudo \ 34 | && apt-get clean 35 | 36 | RUN locale-gen en_US.UTF-8 37 | ENV LANG en_US.UTF-8 38 | ENV LANGUAGE en_US:en 39 | ENV LC_ALL en_US.UTF-8 40 | 41 | FROM ubuntu:24.04 AS install_ubuntu_24.04 42 | 43 | ENV DEBIAN_FRONTEND noninteractive 44 | 45 | RUN set -ex \ 46 | && apt-get update \ 47 | && apt-get install -y \ 48 | locales \ 49 | openssh-server \ 50 | rsync \ 51 | sudo \ 52 | && apt-get clean 53 | 54 | RUN locale-gen en_US.UTF-8 55 | ENV LANG en_US.UTF-8 56 | ENV LANGUAGE en_US:en 57 | ENV LC_ALL en_US.UTF-8 58 | 59 | FROM rockylinux:8 AS install_rockylinux_8 60 | 61 | RUN set -ex \ 62 | && dnf -y update \ 63 | && dnf -y install coreutils --allowerasing \ 64 | && dnf -y install \ 65 | glibc-langpack-en \ 66 | openssh-server \ 67 | sudo \ 68 | && dnf clean all 69 | 70 | FROM rockylinux:9 AS install_rockylinux_9 71 | 72 | RUN set -ex \ 73 | && dnf -y update \ 74 | && dnf -y install coreutils --allowerasing \ 75 | && dnf -y install \ 76 | glibc-langpack-en \ 77 | openssh-server \ 78 | python-unversioned-command \ 79 | sudo \ 80 | && dnf clean all 81 | 82 | FROM almalinux:9 AS install_almalinux_9 83 | 84 | RUN set -ex \ 85 | && dnf -y update \ 86 | && dnf -y install coreutils --allowerasing \ 87 | && dnf -y install \ 88 | glibc-langpack-en \ 89 | openssh-server \ 90 | python-unversioned-command \ 91 | sudo \ 92 | && dnf clean all 93 | 94 | FROM oraclelinux:9 AS install_oraclelinux_9 95 | 96 | RUN set -ex \ 97 | && dnf -y update \ 98 | && dnf -y install coreutils --allowerasing \ 99 | && dnf -y install \ 100 | glibc-langpack-en \ 101 | openssh-server \ 102 | python-unversioned-command \ 103 | sudo \ 104 | && dnf clean all 105 | 106 | FROM install_${DOCKER_IMAGE_NAME}_${DOCKER_IMAGE_TAG} as server 107 | 108 | # Create ubuntu user only if it doesn't already exist 109 | RUN id -u ubuntu 2>/dev/null || useradd --home-dir /home/ubuntu --system ubuntu 110 | 111 | COPY --chown=ubuntu:ubuntu --chmod=600 ssh_pub_key /home/ubuntu/.ssh/authorized_keys 112 | 113 | RUN mkdir -p /etc/sudoers.d/ && echo 'ubuntu ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers.d/ubuntu 114 | 115 | EXPOSE 22 116 | EXPOSE 80 117 | EXPOSE 8000 118 | 119 | CMD [ "/sbin/init" ] 120 | 121 | FROM ${TARGET} 122 | -------------------------------------------------------------------------------- /.github/workflows/archivematica-playbook.yml: -------------------------------------------------------------------------------- 1 | name: Archivematica Playbook Test 2 | on: 3 | workflow_dispatch: 4 | pull_request: 5 | paths: 6 | - "playbooks/archivematica-noble/**" 7 | push: 8 | branches: 9 | - "master" 10 | paths: 11 | - "playbooks/archivematica-noble/**" 12 | schedule: 13 | - cron: "0 2 * * *" 14 | jobs: 15 | test: 16 | name: Test Archivematica playbook 17 | runs-on: ubuntu-24.04 18 | steps: 19 | - name: Check out code 20 | uses: actions/checkout@v4 21 | - name: "Create Vagrant boxes directory" 22 | run: | 23 | mkdir -p /home/runner/.vagrant.d/boxes 24 | - name: "Cache Vagrant boxes" 25 | uses: "actions/cache@v4" 26 | with: 27 | path: "/home/runner/.vagrant.d/boxes" 28 | key: "${{ runner.os }}-boxes" 29 | - name: Install Vagrant 30 | run: | 31 | wget -O- https://apt.releases.hashicorp.com/gpg | sudo gpg --dearmor -o /usr/share/keyrings/hashicorp-archive-keyring.gpg 32 | echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list 33 | sudo apt update && sudo apt install vagrant 34 | - name: Install VirtualBox 35 | run: | 36 | wget -O- https://www.virtualbox.org/download/oracle_vbox_2016.asc | sudo gpg --yes --output /usr/share/keyrings/oracle-virtualbox-2016.gpg --dearmor 37 | echo "deb [arch=amd64 signed-by=/usr/share/keyrings/oracle-virtualbox-2016.gpg] https://download.virtualbox.org/virtualbox/debian $(lsb_release -cs) contrib" | sudo tee /etc/apt/sources.list.d/virtualbox.list 38 | sudo apt update && sudo apt install virtualbox-7.1 39 | - name: Update vbox networks 40 | run: | 41 | sudo mkdir -p /etc/vbox/ 42 | echo "* 192.168.168.198/24" | sudo tee -a /etc/vbox/networks.conf 43 | - name: Set the user environment as VirtualBox expects it 44 | run: | 45 | echo "USER=$USER" >> $GITHUB_ENV 46 | echo "LOGNAME=$USER" >> $GITHUB_ENV 47 | - name: Download the Ansible roles 48 | working-directory: ${{ github.workspace }}/playbooks/archivematica-noble 49 | run: | 50 | ansible-galaxy install -f -p roles/ -r requirements.yml 51 | - name: Create the virtual machine and provision it 52 | working-directory: ${{ github.workspace }}/playbooks/archivematica-noble 53 | run: | 54 | vagrant up 55 | - name: Test AM API - Get processing configurations 56 | run: | 57 | test $( \ 58 | curl \ 59 | --silent \ 60 | --header 'Authorization: ApiKey admin:this_is_the_am_api_key' \ 61 | --header 'Content-Type: application/json' \ 62 | 'http://192.168.168.198/api/processing-configuration/' \ 63 | | jq -r '.processing_configurations == ["automated", "default"]' \ 64 | ) == true 65 | - name: Test SS API - Get pipeline count 66 | run: | 67 | test $( \ 68 | curl \ 69 | --silent \ 70 | --header 'Authorization: ApiKey admin:this_is_the_ss_api_key' \ 71 | --header 'Content-Type: application/json' \ 72 | 'http://192.168.168.198:8000/api/v2/pipeline/' \ 73 | | jq -r '.meta.total_count == 1' \ 74 | ) == true 75 | -------------------------------------------------------------------------------- /tests/archivematica-acceptance-tests/vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_configure_dashboard: "yes" 6 | archivematica_src_configure_ss: "yes" 7 | 8 | archivematica_src_configure_am_user: "admin" 9 | archivematica_src_configure_am_password: "archivematica" 10 | archivematica_src_configure_am_email: "admin@example.com" 11 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 12 | archivematica_src_configure_am_site_url: "http://archivematica" 13 | 14 | archivematica_src_configure_ss_user: "admin" 15 | archivematica_src_configure_ss_password: "archivematica" 16 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 17 | archivematica_src_configure_ss_url: "http://archivematica:8000" 18 | archivematica_src_configure_ss_email: "admin@example.com" 19 | 20 | archivematica_src_am_db_password: "aaGKHyMls.20ki$" 21 | archivematica_src_ss_db_password: "aaGKHyMls.20ki$" 22 | 23 | # By default the archivematica-src role sets `MCP` and `SS` as the database 24 | # names and a single `archivematica` user for both services. The 25 | # artefactual.percona overwrites existing user privileges when it creates 26 | # databases (it should set `append_privs: true` on the `mysql_user` module call) 27 | # so the SS database privileges overwrite the MCP ones. Setting different 28 | # users for each database works around this issue. 29 | archivematica_src_am_db_user: "archivematica" 30 | archivematica_src_ss_db_user: "ss" 31 | 32 | # elasticsearch role 33 | 34 | elasticsearch_version: "8.19.2" 35 | 36 | # percona role 37 | 38 | mysql_version_major: "8" 39 | mysql_version_minor: "0" 40 | 41 | mysql_root_password: "aaGKHyMls.20ki$" 42 | 43 | mysql_databases: 44 | - name: "{{ archivematica_src_am_db_name }}" 45 | collation: "{{ archivematica_src_am_db_collation }}" 46 | encoding: "{{ archivematica_src_am_db_encoding }}" 47 | - name: "{{ archivematica_src_ss_db_name }}" 48 | collation: "{{ archivematica_src_ss_db_collation }}" 49 | encoding: "{{ archivematica_src_ss_db_encoding }}" 50 | 51 | mysql_users: 52 | - name: "{{ archivematica_src_am_db_user }}" 53 | pass: "{{ archivematica_src_am_db_password }}" 54 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 55 | host: "{{ archivematica_src_am_db_host }}" 56 | - name: "{{ archivematica_src_ss_db_user }}" 57 | pass: "{{ archivematica_src_ss_db_password }}" 58 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 59 | host: "{{ archivematica_src_ss_db_host }}" 60 | 61 | archivematica_src_ss_environment: 62 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:{{ archivematica_src_ss_db_port }}/{{ archivematica_src_ss_db_name }}" 63 | 64 | # Enable XML metadata validation 65 | 66 | archivematica_src_am_mcpclient_environment: 67 | ARCHIVEMATICA_MCPCLIENT_MCPCLIENT_METADATA_XML_VALIDATION_ENABLED: "true" 68 | METADATA_XML_VALIDATION_SETTINGS_FILE: "/home/{{ ansible_user_id }}/archivematica-sampledata/xml-validation/xml_validation.py" 69 | XML_CATALOG_FILES: "/home/{{ ansible_user_id }}/archivematica-sampledata/xml-validation/catalog.xml" 70 | 71 | # Disable FITS 72 | 73 | archivematica_src_configure_fpcommand: 74 | FITS: 75 | enabled: '0' 76 | field_name: 'description' 77 | 78 | archivematica_src_configure_fprule: 79 | c3b06895-ef9d-401e-8c51-ac585f955655: 80 | enabled: '0' 81 | field_name: 'uuid' 82 | -------------------------------------------------------------------------------- /playbooks/archivematica-centos7/README.md: -------------------------------------------------------------------------------- 1 | # Archivematica playbook 2 | 3 | The provided playbook installs Archivematica on a local vagrant virtual 4 | machine. For instructions on using deploy-pub to install Archivematica on a 5 | Digital Ocean droplet, see the [Digital Ocean Droplet 6 | Deploy](docs/digital-ocean-install-example.rst) document. 7 | 8 | ## Requirements 9 | 10 | - Vagrant 1.9.2 or newer (note that vagrant 1.9.1 has a bug when restarting network services in RHEL https://github.com/mitchellh/vagrant/pull/8148). Vagrant has changed its image repository URLs, so when using an old Vagrant version, see https://github.com/hashicorp/vagrant/issues/9442 11 | - Ansible 2.2 or newer 12 | 13 | ## How to use 14 | 15 | 1. Download the Ansible roles: 16 | ``` 17 | $ ansible-galaxy install -f -p roles/ -r requirements.yml 18 | ``` 19 | 20 | 2. Create the virtual machine and provision it: 21 | ``` 22 | $ vagrant up 23 | ``` 24 | After provisioning ends, Achivematica UI should be accessible at http://xxx.xxx.xxx.xxx and the Storage Service UI at http://xxx.xxx.xxx.xxx:8000 where xxx.xxx.xxx.xxx is the IP address specified in the `ip` variable of the Vagrantfile 25 | 26 | 3. To ssh to the VM, run: 27 | ``` 28 | $ vagrant ssh 29 | ``` 30 | 31 | 4. If you want to forward your SSH agent too, run: 32 | ``` 33 | $ vagrant ssh -- -A 34 | ``` 35 | 36 | 5. To (re-)provision the VM, run: 37 | * Using vagrant: 38 | ``` 39 | $ vagrant provision 40 | ``` 41 | * Using vagrant and custom ANSIBLE_ARGS. Use colons (:) to separate multiple parameters. For example to pass a tag to install Storage Service only, and verbose flag: 42 | ``` 43 | $ ANSIBLE_ARGS="--tags=amsrc-ss:-vvv" vagrant provision (in bash) 44 | $ env ANSIBLE_ARGS="--tags=amsrc-ss:-vvv" vagrant provision (in fish) 45 | ``` 46 | Note that it is not possible to pass the (--extra-vars to ansible using the above, because extra_vars is reassigned in the Vagrantfile) 47 | * Using ansible commands directly (this allows you to pass ansible-specific parameters, 48 | such as tags and the verbose flag; remember to use extra-vars to pass the variables in the Vagrantfile ): 49 | ``` 50 | $ ansible-playbook -i .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory singlenode.yml \ 51 | -u vagrant \ 52 | --private-key .vagrant/machines/am-local/virtualbox/private_key \ 53 | --extra-vars="archivematica_src_dir=/vagrant/src archivematica_src_environment_type=development" \ 54 | --tags="amsrc-pipeline-instcode" \ 55 | -v 56 | ``` 57 | 58 | 6. The ansible playbook `singlenode.yml` specified in the Vagrantfile will provision using the branches of archivematica specfied in the file `vars-singlenode.yml`. Edit this file if need to deploy other branches. 59 | 60 | 7. If you get errors regarding the Vagrant shared folders, they are usually due 61 | to different versions of VirtualBox. One way to fix it is using a Vagrant 62 | plugin that installs the host's VirtualBox Guest Additions on the guest system: 63 | ``` 64 | $ vagrant plugin install vagrant-vbguest 65 | $ vagrant vbguest 66 | ``` 67 | 68 | # Login and credentials 69 | 70 | If you are using the default values in vars-singlenode-XXXX.yml and Vagrantfile files, the login URLS are: 71 | 72 | * Dashboard: http://192.168.168.197 73 | * Storage Service: http://192.168.168.197:8000 74 | 75 | Credentials: 76 | 77 | * user: admin 78 | * password: archivematica 79 | 80 | For more archivematica development information, see: https://wiki.archivematica.org/Getting_started 81 | -------------------------------------------------------------------------------- /playbooks/atom-bionic/vars-singlenode-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # PLEASE NOTE THAT THE PASSWORD VALUES USED HERE ARE NOT SAFE 4 | 5 | # 6 | # atom role 7 | # 8 | 9 | atom_path: "/usr/share/nginx/atom" 10 | atom_repository_url: "https://github.com/artefactual/atom.git" 11 | atom_repository_version: "stable/2.6.x" 12 | atom_config_db_hostname: "127.0.0.1" 13 | atom_config_db_name: "atom" 14 | atom_config_db_username: "atom-user" 15 | atom_config_db_password: "ATOMPASSWORD" 16 | atom_config_db_port: "3306" 17 | atom_config_db_encoding: "utf8mb4" 18 | atom_es_host: "127.0.0.1" 19 | atom_es_port: "9200" 20 | atom_es_config_version: "2.5" 21 | atom_mysql_user_name: "{{ atom_config_db_username }}" 22 | atom_mysql_user_pass: "{{ atom_config_db_password }}" 23 | atom_mysql_user_priv: "atom.*:ALL,GRANT" 24 | atom_mysql_user_host: "%" 25 | 26 | # 27 | # nginx role 28 | # 29 | 30 | nginx_configs: 31 | atom_backend: 32 | - upstream atom { 33 | server unix:/var/run/php-fpm.atom.sock; 34 | } 35 | 36 | nginx_sites: 37 | atom: 38 | - listen 80 39 | - set $atom_path {{ atom_path }} 40 | - root $atom_path 41 | - server_name _ 42 | - client_max_body_size 72M 43 | - location / { try_files $uri /index.php?$args; } 44 | - location ~ /\. { 45 | deny all; 46 | return 404; 47 | } 48 | - location ~* (\.yml|\.ini|\.tmpl)$ { 49 | deny all; 50 | return 404; 51 | } 52 | - location ~* /(?:uploads|files)/.*\.php$ { 53 | deny all; 54 | return 404; 55 | } 56 | - location ~* /uploads/r/(.*)/conf/ { } 57 | - location ~* ^/uploads/r/(.*)$ { 58 | include /etc/nginx/fastcgi_params; 59 | set $index /index.php; 60 | fastcgi_param SCRIPT_FILENAME $document_root$index; 61 | fastcgi_param SCRIPT_NAME $index; 62 | fastcgi_pass atom; 63 | } 64 | - location ~ ^/private/(.*)$ { 65 | internal; 66 | alias $atom_path/$1; 67 | } 68 | - location ~ ^/(index|qubit_dev)\.php(/|$) { 69 | include /etc/nginx/fastcgi_params; 70 | fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; 71 | fastcgi_split_path_info ^(.+\.php)(/.*)$; 72 | fastcgi_pass atom; 73 | } 74 | - location ~* \.php$ { 75 | deny all; 76 | return 404; 77 | } 78 | 79 | # elasticsearch role 80 | 81 | elasticsearch_version: "5.6.0" 82 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 83 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 84 | elasticsearch_heap_size: "1g" 85 | elasticsearch_max_open_files: "65535" 86 | elasticsearch_timezone: "UTC" 87 | elasticsearch_node_max_local_storage_nodes: "1" 88 | elasticsearch_index_mapper_dynamic: "true" 89 | elasticsearch_memory_bootstrap_mlockall: "true" 90 | elasticsearch_install_java: "true" 91 | #elasticsearch_thread_pools: 92 | # - "thread_pool.write.size: 2" 93 | # - "thread_pool.write.queue_size: 1000" 94 | elasticsearch_network_http_max_content_lengtht: 1024mb 95 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 96 | elasticsearch_max_locked_memory: "unlimited" 97 | elasticsearch_network_host: "127.0.0.1" 98 | 99 | # 100 | # percona role 101 | # 102 | 103 | mysql_version_major: "8" 104 | mysql_version_minor: "0" 105 | 106 | 107 | mysql_databases: 108 | - name: "{{ atom_config_db_name }}" 109 | collation: "utf8mb4_0900_ai_ci" 110 | encoding: "{{ atom_config_db_encoding }}" 111 | 112 | mysql_users: 113 | - name: "{{ atom_mysql_user_name }}" 114 | pass: "{{ atom_mysql_user_pass }}" 115 | priv: "{{ atom_mysql_user_priv }}" 116 | host: "{{ atom_mysql_user_host }}" 117 | 118 | 119 | mysql_optimizer_switch: "'block_nested_loop=off'" 120 | mysql_sql_mode: "STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION" 121 | mysql_root_password: "MYSQLROOTPASSWORD" 122 | mysql_bind_address: "0.0.0.0" 123 | mysql_default_authentication_plugin: "mysql_native_password" 124 | -------------------------------------------------------------------------------- /tests/dip-upload/archivematica-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # archivematica-src role 4 | 5 | archivematica_src_configure_dashboard: "yes" 6 | archivematica_src_configure_ss: "yes" 7 | 8 | archivematica_src_configure_am_user: "admin" 9 | archivematica_src_configure_am_password: "archivematica" 10 | archivematica_src_configure_am_email: "admin@example.com" 11 | archivematica_src_configure_am_api_key: "this_is_the_am_api_key" 12 | archivematica_src_configure_am_site_url: "http://archivematica" 13 | 14 | archivematica_src_configure_ss_user: "admin" 15 | archivematica_src_configure_ss_password: "archivematica" 16 | archivematica_src_configure_ss_api_key: "this_is_the_ss_api_key" 17 | archivematica_src_configure_ss_url: "http://archivematica:8000" 18 | archivematica_src_configure_ss_email: "admin@example.com" 19 | 20 | archivematica_src_am_db_password: "aaGKHyMls.20ki$" 21 | archivematica_src_ss_db_password: "aaGKHyMls.20ki$" 22 | 23 | # By default the archivematica-src role sets `MCP` and `SS` as the database 24 | # names and a single `archivematica` user for both services. The 25 | # artefactual.percona overwrites existing user privileges when it creates 26 | # databases (it should set `append_privs: true` on the `mysql_user` module call) 27 | # so the SS database privileges overwrite the MCP ones. Setting different 28 | # users for each database works around this issue. 29 | archivematica_src_am_db_user: "archivematica" 30 | archivematica_src_ss_db_user: "ss" 31 | 32 | # elasticsearch role 33 | 34 | elasticsearch_version: "8.19.2" 35 | 36 | # percona role 37 | 38 | mysql_version_major: "8" 39 | mysql_version_minor: "0" 40 | 41 | mysql_root_password: "aaGKHyMls.20ki$" 42 | 43 | mysql_databases: 44 | - name: "{{ archivematica_src_am_db_name }}" 45 | collation: "{{ archivematica_src_am_db_collation }}" 46 | encoding: "{{ archivematica_src_am_db_encoding }}" 47 | - name: "{{ archivematica_src_ss_db_name }}" 48 | collation: "{{ archivematica_src_ss_db_collation }}" 49 | encoding: "{{ archivematica_src_ss_db_encoding }}" 50 | 51 | mysql_users: 52 | - name: "{{ archivematica_src_am_db_user }}" 53 | pass: "{{ archivematica_src_am_db_password }}" 54 | priv: "{{ archivematica_src_am_db_name }}.*:ALL,GRANT" 55 | host: "{{ archivematica_src_am_db_host }}" 56 | - name: "{{ archivematica_src_ss_db_user }}" 57 | pass: "{{ archivematica_src_ss_db_password }}" 58 | priv: "{{ archivematica_src_ss_db_name }}.*:ALL,GRANT" 59 | host: "{{ archivematica_src_ss_db_host }}" 60 | 61 | archivematica_src_ss_environment: 62 | SS_DB_URL: "mysql://{{ archivematica_src_ss_db_user }}:{{ archivematica_src_ss_db_password }}@{{ archivematica_src_ss_db_host }}:{{ archivematica_src_ss_db_port }}/{{ archivematica_src_ss_db_name }}" 63 | 64 | # Enable XML metadata validation 65 | 66 | archivematica_src_am_mcpclient_environment: 67 | ARCHIVEMATICA_MCPCLIENT_MCPCLIENT_METADATA_XML_VALIDATION_ENABLED: "true" 68 | METADATA_XML_VALIDATION_SETTINGS_FILE: "/home/{{ ansible_user_id }}/archivematica-sampledata/xml-validation/xml_validation.py" 69 | 70 | # Disable FITS 71 | 72 | archivematica_src_configure_fpcommand: 73 | FITS: 74 | enabled: '0' 75 | field_name: 'description' 76 | 77 | archivematica_src_configure_fprule: 78 | c3b06895-ef9d-401e-8c51-ac585f955655: 79 | enabled: '0' 80 | field_name: 'uuid' 81 | 82 | # DIP upload 83 | 84 | # The ansible-archivematica-src role supports configuring dashboard settings, 85 | # but it connects to the AtoM host automatically to set SSH credentials. In the 86 | # two VMs scenario of this test, the AtoM host is provisioned after the 87 | # Archivematica host so that approach does not work. 88 | # 89 | # Instead, these custom settings are populated in the post tasks of the 90 | # Archivematica provisioning playbook. 91 | custom_archivematica_src_configure_dashboardsettings: 92 | url: "http://atom" 93 | rsync_target: "atom:/home/archivematica/atom_sword_deposit" 94 | email: "dip_upload@example.com" 95 | password: "dip_upload@example.com" 96 | key: "this_is_the_atom_dip_upload_api_key" 97 | -------------------------------------------------------------------------------- /playbooks/atom-focal/vars-singlenode-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # PLEASE NOTE THAT THE PASSWORD VALUES USED HERE ARE NOT SAFE 4 | 5 | # 6 | # atom role 7 | # 8 | 9 | atom_path: "/usr/share/nginx/atom" 10 | atom_repository_url: "https://github.com/artefactual/atom.git" 11 | atom_repository_version: "qa/2.x" 12 | atom_config_db_hostname: "127.0.0.1" 13 | atom_config_db_name: "atom" 14 | atom_config_db_username: "atom-user" 15 | atom_config_db_password: "ATOMPASSWORD" 16 | atom_config_db_port: "3306" 17 | atom_config_db_encoding: "utf8mb4" 18 | atom_es_host: "127.0.0.1" 19 | atom_es_port: "9200" 20 | atom_es_config_version: "2.5" 21 | atom_mysql_user_name: "{{ atom_config_db_username }}" 22 | atom_mysql_user_pass: "{{ atom_config_db_password }}" 23 | atom_mysql_user_priv: "atom.*:ALL,GRANT" 24 | atom_mysql_user_host: "%" 25 | atom_csrf_protection: "yes" 26 | 27 | # 28 | # nginx role 29 | # 30 | 31 | nginx_configs: 32 | atom_backend: 33 | - upstream atom { 34 | server unix:/var/run/php-fpm.atom.sock; 35 | } 36 | 37 | nginx_sites: 38 | atom: 39 | - listen 80 40 | - set $atom_path {{ atom_path }} 41 | - root $atom_path 42 | - server_name _ 43 | - client_max_body_size 72M 44 | - location / { try_files $uri /index.php?$args; } 45 | - location ~ /\. { 46 | deny all; 47 | return 404; 48 | } 49 | - location ~* (\.yml|\.ini|\.tmpl)$ { 50 | deny all; 51 | return 404; 52 | } 53 | - location ~* /(?:uploads|files)/.*\.php$ { 54 | deny all; 55 | return 404; 56 | } 57 | - location ~* /uploads/r/(.*)/conf/ { } 58 | - location ~* ^/uploads/r/(.*)$ { 59 | include /etc/nginx/fastcgi_params; 60 | set $index /index.php; 61 | fastcgi_param SCRIPT_FILENAME $document_root$index; 62 | fastcgi_param SCRIPT_NAME $index; 63 | fastcgi_pass atom; 64 | } 65 | - location ~ ^/private/(.*)$ { 66 | internal; 67 | alias $atom_path/$1; 68 | } 69 | - location ~ ^/(index|qubit_dev)\.php(/|$) { 70 | include /etc/nginx/fastcgi_params; 71 | fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; 72 | fastcgi_split_path_info ^(.+\.php)(/.*)$; 73 | fastcgi_pass atom; 74 | } 75 | - location ~* \.php$ { 76 | deny all; 77 | return 404; 78 | } 79 | 80 | # elasticsearch role 81 | 82 | elasticsearch_version: "5.6.0" 83 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 84 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 85 | elasticsearch_heap_size: "1g" 86 | elasticsearch_max_open_files: "65535" 87 | elasticsearch_timezone: "UTC" 88 | elasticsearch_node_max_local_storage_nodes: "1" 89 | elasticsearch_index_mapper_dynamic: "true" 90 | elasticsearch_memory_bootstrap_mlockall: "true" 91 | elasticsearch_install_java: "true" 92 | #elasticsearch_thread_pools: 93 | # - "thread_pool.write.size: 2" 94 | # - "thread_pool.write.queue_size: 1000" 95 | elasticsearch_network_http_max_content_lengtht: 1024mb 96 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 97 | elasticsearch_max_locked_memory: "unlimited" 98 | elasticsearch_network_host: "127.0.0.1" 99 | 100 | # 101 | # percona role 102 | # 103 | 104 | mysql_version_major: "8" 105 | mysql_version_minor: "0" 106 | 107 | 108 | mysql_databases: 109 | - name: "{{ atom_config_db_name }}" 110 | collation: "utf8mb4_0900_ai_ci" 111 | encoding: "{{ atom_config_db_encoding }}" 112 | 113 | mysql_users: 114 | - name: "{{ atom_mysql_user_name }}" 115 | pass: "{{ atom_mysql_user_pass }}" 116 | priv: "{{ atom_mysql_user_priv }}" 117 | host: "{{ atom_mysql_user_host }}" 118 | 119 | 120 | mysql_optimizer_switch: "'block_nested_loop=off'" 121 | mysql_sql_mode: "STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION" 122 | mysql_root_password: "MYSQLROOTPASSWORD" 123 | mysql_bind_address: "0.0.0.0" 124 | mysql_default_authentication_plugin: "caching_sha2_password" 125 | -------------------------------------------------------------------------------- /tests/archivematica-acceptance-tests/README.md: -------------------------------------------------------------------------------- 1 | # Archivematica Acceptance Tests (AMAUATs) 2 | 3 | ## Software requirements 4 | 5 | - Podman 6 | - crun >= 1.15 7 | - Python 3 8 | - curl 9 | - Latest Google Chrome with chromedriver or Firefox with geckodriver 10 | - 7-Zip 11 | 12 | ## Tested Docker images 13 | 14 | This playbook has been tested with Podman 3.4.4 and podman-compose 1.1.0 15 | using any of the following Docker images and tags: 16 | 17 | - rockylinux:9 18 | - rockylinux:8 19 | - ubuntu:24.04 20 | - ubuntu:22.04 21 | 22 | ## Installing Ansible 23 | 24 | Create a virtual environment and activate it: 25 | 26 | ```shell 27 | python3 -m venv .venv 28 | source .venv/bin/activate 29 | ``` 30 | 31 | Install the Python requirements: 32 | 33 | ```shell 34 | python3 -m pip install -r requirements.txt 35 | ``` 36 | 37 | Install the playbook requirements: 38 | 39 | ```shell 40 | ansible-galaxy install -f -p roles/ -r requirements.yml 41 | ``` 42 | 43 | ## Starting the Compose environment 44 | 45 | Copy your SSH public key as the `ssh_pub_key` file next to the `Dockerfile`: 46 | 47 | ```shell 48 | cp $HOME/.ssh/id_rsa.pub ssh_pub_key 49 | ``` 50 | 51 | Set the Docker image and tag to use for the Compose services: 52 | 53 | ```shell 54 | export DOCKER_IMAGE_NAME=ubuntu 55 | export DOCKER_IMAGE_TAG=22.04 56 | ``` 57 | 58 | Start the Compose services: 59 | 60 | ```shell 61 | podman-compose up --detach 62 | ``` 63 | 64 | ## Installing Archivematica 65 | 66 | Run the Archivematica installation playbook: 67 | 68 | ```shell 69 | export ANSIBLE_HOST_KEY_CHECKING=False 70 | export ANSIBLE_REMOTE_PORT=2222 71 | ansible-playbook -i localhost, playbook.yml \ 72 | -u ubuntu \ 73 | -v 74 | ``` 75 | 76 | Add the `ubuntu` user to the `archivematica` group so it can copy AIPs 77 | from the shared directory: 78 | 79 | ```shell 80 | podman-compose exec --user root archivematica usermod -a -G archivematica ubuntu 81 | ``` 82 | 83 | The AMAUATs expect the Archivematica sample data to be in the 84 | `/home/archivematica` directory: 85 | 86 | ```shell 87 | podman-compose exec --user root archivematica ln -s /home/ubuntu /home/archivematica 88 | ``` 89 | 90 | ## Testing the Archivematica installation 91 | 92 | Call an Archivematica API endpoint: 93 | 94 | ```shell 95 | curl --header "Authorization: ApiKey admin:this_is_the_am_api_key" http://localhost:8000/api/processing-configuration/ 96 | ``` 97 | 98 | Call a Storage Service API endpoint: 99 | 100 | ```shell 101 | curl --header "Authorization: ApiKey admin:this_is_the_ss_api_key" http://localhost:8001/api/v2/pipeline/ 102 | ``` 103 | 104 | ## Running an Acceptance Test 105 | 106 | Clone the AMAUATs repository: 107 | 108 | ```shell 109 | git clone https://github.com/artefactual-labs/archivematica-acceptance-tests AMAUATs 110 | cd AMAUATs 111 | ``` 112 | 113 | Install the AMAUATs requirements: 114 | 115 | ```shell 116 | python3 -m pip install -r requirements.txt 117 | ``` 118 | 119 | Run any [feature file](https://github.com/artefactual-labs/archivematica-acceptance-tests/tree/qa/1.x/features/black_box) 120 | in the AMAUATs using its filename. This example shows how to run the 121 | `create-aip.feature` file with `Chrome`. You need to pass your SSH identity file: 122 | 123 | ```shell 124 | env HEADLESS=1 behave -i create-aip.feature \ 125 | -v \ 126 | --no-capture \ 127 | --no-capture-stderr \ 128 | --no-logcapture \ 129 | --no-skipped \ 130 | -D am_version=1.9 \ 131 | -D driver_name=Chrome \ 132 | -D am_username=admin \ 133 | -D am_password=archivematica \ 134 | -D am_url=http://localhost:8000/ \ 135 | -D am_api_key="this_is_the_am_api_key" \ 136 | -D ss_username=admin \ 137 | -D ss_password=archivematica \ 138 | -D ss_api_key="this_is_the_ss_api_key" \ 139 | -D ss_url=http://localhost:8001/ \ 140 | -D home=ubuntu \ 141 | -D server_user=ubuntu \ 142 | -D transfer_source_path=/home/ubuntu/archivematica-sampledata/TestTransfers/acceptance-tests \ 143 | -D ssh_identity_file=$HOME/.ssh/id_rsa 144 | ``` 145 | 146 | Some feature files (AIP encryption and UUIDs for directories) copy AIPs from 147 | the remote host using `scp` but they assume port 22 is used for the SSH service. 148 | You can set this in your `$HOME/.ssh/config` file to make them work with port 149 | 2222: 150 | 151 | ```console 152 | Host localhost 153 | Port 2222 154 | ``` 155 | -------------------------------------------------------------------------------- /tests/archivematica-upgrade/README.md: -------------------------------------------------------------------------------- 1 | # Archivematica playbook upgrade test 2 | 3 | ## Software requirements 4 | 5 | - Podman 6 | - crun >= 1.14.4 7 | - Python 3 8 | - curl 9 | 10 | ## Installing Ansible 11 | 12 | Create a virtual environment and activate it: 13 | 14 | ```shell 15 | python3 -m venv .venv 16 | source .venv/bin/activate 17 | ``` 18 | 19 | Install the Python requirements (these versions are compatible with 20 | symbolic links which are used in the the artefactual-atom role): 21 | 22 | ```shell 23 | python3 -m pip install -r requirements.txt 24 | ``` 25 | 26 | ## Starting the Compose environment 27 | 28 | Copy your SSH public key as the `ssh_pub_key` file next to the `Dockerfile`: 29 | 30 | ```shell 31 | cp $HOME/.ssh/id_rsa.pub ssh_pub_key 32 | ``` 33 | 34 | Start the Compose services: 35 | 36 | ```shell 37 | podman-compose up --detach 38 | ``` 39 | 40 | ## Installing the stable version of Archivematica 41 | 42 | Install the requirements of the stable version: 43 | 44 | ```shell 45 | ansible-galaxy install -f -p roles/ -r ../../playbooks/archivematica-noble/requirements.yml 46 | ``` 47 | 48 | Run the Archivematica installation playbook passing the stable version as the 49 | `am_version` variable and the proper URLs for the Compose environment: 50 | 51 | ```shell 52 | export ANSIBLE_HOST_KEY_CHECKING=False 53 | export ANSIBLE_REMOTE_PORT=2222 54 | ansible-playbook -i localhost, playbook.yml \ 55 | -u ubuntu \ 56 | -e "am_version=1.16" \ 57 | -e "archivematica_src_configure_am_site_url=http://archivematica" \ 58 | -e "archivematica_src_configure_ss_url=http://archivematica:8000" \ 59 | -v 60 | ``` 61 | 62 | ## Testing the stable version of Archivematica 63 | 64 | Get the Archivematica stable version: 65 | 66 | ```shell 67 | curl \ 68 | --silent \ 69 | --dump-header - \ 70 | --header "Authorization: ApiKey admin:this_is_the_am_api_key" \ 71 | http://localhost:8000/api/processing-configuration/ | grep X-Archivematica-Version 72 | ``` 73 | 74 | Call an Archivematica API endpoint: 75 | 76 | ```shell 77 | curl --header "Authorization: ApiKey admin:this_is_the_am_api_key" http://localhost:8000/api/processing-configuration/ 78 | ``` 79 | 80 | Call a Storage Service API endpoint: 81 | 82 | ```shell 83 | curl --header "Authorization: ApiKey admin:this_is_the_ss_api_key" http://localhost:8001/api/v2/pipeline/ 84 | ``` 85 | 86 | ## Upgrading to the QA version of Archivematica 87 | 88 | Uninstall Elasticsearch 6.x: 89 | 90 | ```shell 91 | podman-compose exec --user root archivematica bash -c "apt-get purge -y elasticsearch" 92 | podman-compose exec --user root archivematica bash -c "rm -rf /etc/elasticsearch/ /var/lib/elasticsearch /var/log/elasticsearch" 93 | ``` 94 | 95 | Delete the requirements directory used for the stable version: 96 | 97 | ```shell 98 | rm -rf roles 99 | ``` 100 | 101 | Install the requirements of the QA version: 102 | 103 | ```shell 104 | ansible-galaxy install -f -p roles/ -r ../../playbooks/archivematica-noble/requirements-qa.yml 105 | ``` 106 | 107 | Run the Archivematica installation playbook passing the QA version as the 108 | `am_version` variable, the proper URLs for the Compose environment and 109 | the tag to upgrade installations: 110 | 111 | ```shell 112 | export ANSIBLE_HOST_KEY_CHECKING=False 113 | export ANSIBLE_REMOTE_PORT=2222 114 | ansible-playbook -i localhost, playbook.yml \ 115 | -u ubuntu \ 116 | -e "am_version=qa" \ 117 | -e "archivematica_src_configure_am_site_url=http://archivematica" \ 118 | -e "archivematica_src_configure_ss_url=http://archivematica:8000" \ 119 | -e "elasticsearch_version=8.19.2" \ 120 | -t "elasticsearch,archivematica-src" \ 121 | -v 122 | ``` 123 | 124 | ## Testing the QA version of Archivematica 125 | 126 | Get the Archivematica QA version: 127 | 128 | ```shell 129 | curl \ 130 | --silent \ 131 | --dump-header - \ 132 | --header "Authorization: ApiKey admin:this_is_the_am_api_key" \ 133 | http://localhost:8000/api/processing-configuration/ | grep X-Archivematica-Version 134 | ``` 135 | 136 | Call an Archivematica API endpoint: 137 | 138 | ```shell 139 | curl --header "Authorization: ApiKey admin:this_is_the_am_api_key" http://localhost:8000/api/processing-configuration/ 140 | ``` 141 | 142 | Call a Storage Service API endpoint: 143 | 144 | ```shell 145 | curl --header "Authorization: ApiKey admin:this_is_the_ss_api_key" http://localhost:8001/api/v2/pipeline/ 146 | ``` 147 | -------------------------------------------------------------------------------- /tests/dip-upload/atom-vars.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # atom role 4 | 5 | atom_repository_version: "qa/2.x" 6 | 7 | atom_user_email: "demo@example.com" 8 | atom_user_username: "demo" 9 | atom_user_password: "demo" 10 | 11 | atom_config_db_hostname: "127.0.0.1" 12 | atom_config_db_name: "atom" 13 | atom_config_db_username: "atom-user" 14 | atom_config_db_password: "aaGKHyMls.20ki$" 15 | 16 | atom_mysql_user_name: "{{ atom_config_db_username }}" 17 | atom_mysql_user_pass: "{{ atom_config_db_password }}" 18 | atom_mysql_user_priv: "atom.*:ALL,GRANT" 19 | atom_mysql_user_host: "%" 20 | 21 | atom_csrf_protection: "yes" 22 | 23 | atom_auto_init: "yes" 24 | 25 | atom_sword_deposit_dir: "/home/archivematica/atom_sword_deposit" 26 | 27 | atom_dip_upload_atom_database: "atom" 28 | atom_dip_upload_atom_user: "dip_upload" 29 | atom_dip_upload_atom_email: "dip_upload@example.com" 30 | atom_dip_upload_atom_password: "dip_upload@example.com" 31 | atom_dip_upload_atom_api_key: "this_is_the_atom_dip_upload_api_key" 32 | 33 | atom_es_config_version: "2.10" 34 | 35 | atom_themes: [] 36 | 37 | # nginx role 38 | 39 | nginx_configs: 40 | atom_backend: 41 | - upstream atom { 42 | server unix:/var/run/php-fpm.atom.sock; 43 | } 44 | 45 | nginx_sites: 46 | atom: 47 | - listen 80 48 | - server_name _ 49 | - '{%- if atom_revision_directory|bool -%} 50 | set $atom_path {{ atom_path }}/{{ atom_revision_directory_latest_symlink_dir }} 51 | {%- else -%} 52 | set $atom_path {{ atom_path }} 53 | {%- endif -%}' 54 | - root $atom_path 55 | - client_max_body_size {{ atom_pool_php_post_max_size | default('520M') }} 56 | - proxy_max_temp_file_size {{ atom_nginx_proxy_max_temp_file_size | default('1024m') }} 57 | - '{% if nginx_auth_basic_files|length > 0 -%} 58 | satisfy any; 59 | allow 127.0.0.1; 60 | {%- if atom_http_auth_allowed_hosts is defined and atom_http_auth_allowed_hosts| length >0 -%} 61 | {%- for allowed_hosts in atom_http_auth_allowed_hosts -%} 62 | allow {{ allowed_hosts }}; 63 | {%- endfor -%} 64 | {%- endif -%} 65 | deny all; 66 | auth_basic "Restricted"; 67 | auth_basic_user_file /etc/nginx/auth_basic/htpasswd.{{ site | default("atom") }} 68 | {%- endif -%}' 69 | - location ~* ^/(css|dist|js|images|plugins|vendor)/.*\.(css|gif|ico|jpg|js|map|pdf|png|svg|ttf|woff|woff2)$ { } 70 | - location ~* ^/(downloads)/.*\.(csv|html|pdf|rtf|xml|zip)$ { } 71 | - location ~ ^/(ead.dtd|favicon.ico|robots.txt|sitemap.*)$ { } 72 | - location / { 73 | try_files $uri /index.php?$args; 74 | if (-f $request_filename) { 75 | return 403; 76 | } 77 | } 78 | - location ~* /uploads/r/(.*)/conf/ { } 79 | - location ~* ^/uploads/r/(.*)$ { 80 | include /etc/nginx/fastcgi_params; 81 | set $index /index.php; 82 | fastcgi_param SCRIPT_FILENAME $document_root$index; 83 | fastcgi_param SCRIPT_NAME $index; 84 | fastcgi_pass atom; 85 | } 86 | - location ~ ^/private/(.*)$ { 87 | internal; 88 | alias $atom_path/$1; 89 | } 90 | - location ~ ^/(index|qubit_dev)\.php(/|$) { 91 | include /etc/nginx/fastcgi_params; 92 | fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; 93 | fastcgi_split_path_info ^(.+\.php)(/.*)$; 94 | fastcgi_read_timeout {{ atom_pool_php_max_execution_time }}; 95 | fastcgi_pass atom; 96 | } 97 | 98 | nginx_http_params: 99 | - sendfile "on" 100 | - tcp_nopush "on" 101 | - tcp_nodelay "on" 102 | - keepalive_timeout "65" 103 | - log_format timed_combined '[$time_local] $http_x_forwarded_for - $remote_addr $host $remote_user ' '"$request" $status $body_bytes_sent ' '"$http_referer" "$http_user_agent" $request_time $upstream_response_time $pipe' 104 | - access_log "{{ nginx_log_dir }}/access.log" timed_combined 105 | - error_log "{{ nginx_log_dir }}/error.log" 106 | - server_tokens off 107 | - types_hash_max_size 2048 108 | - server_names_hash_bucket_size 128 109 | - client_max_body_size 72M 110 | 111 | # percona role 112 | 113 | mysql_version_major: "8" 114 | mysql_version_minor: "0" 115 | 116 | mysql_root_password: "aaGKHyMls.20ki$" 117 | 118 | mysql_databases: 119 | - name: "{{ atom_config_db_name }}" 120 | collation: "{{ mysql_collation_server }}" 121 | encoding: "{{ mysql_character_set_server }}" 122 | 123 | mysql_users: 124 | - name: "{{ atom_mysql_user_name }}" 125 | pass: "{{ atom_mysql_user_pass }}" 126 | priv: "{{ atom_mysql_user_priv }}" 127 | host: "{{ atom_mysql_user_host }}" 128 | 129 | mysql_optimizer_switch: "'block_nested_loop=off'" 130 | mysql_sql_mode: "STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION" 131 | mysql_bind_address: "0.0.0.0" 132 | mysql_default_authentication_plugin: "caching_sha2_password" 133 | 134 | # elasticsearch role 135 | 136 | elasticsearch_version: "7.10.2" 137 | -------------------------------------------------------------------------------- /playbooks/atom-noble/vars-singlenode-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # PLEASE NOTE THAT THE PASSWORD VALUES USED HERE ARE NOT SAFE 4 | 5 | # 6 | # atom role 7 | # 8 | 9 | atom_path: "/usr/share/nginx/atom" 10 | atom_repository_url: "https://github.com/artefactual/atom.git" 11 | atom_repository_version: "v2.9.0" 12 | atom_config_db_hostname: "127.0.0.1" 13 | atom_config_db_name: "atom" 14 | atom_config_db_username: "atom-user" 15 | atom_config_db_password: "ATOMPASSWORD" 16 | atom_config_db_port: "3306" 17 | atom_config_db_encoding: "utf8mb4" 18 | atom_es_host: "127.0.0.1" 19 | atom_es_port: "9200" 20 | atom_es_config_version: "2.9" 21 | atom_mysql_user_name: "{{ atom_config_db_username }}" 22 | atom_mysql_user_pass: "{{ atom_config_db_password }}" 23 | atom_mysql_user_priv: "atom.*:ALL,GRANT" 24 | atom_mysql_user_host: "%" 25 | atom_csrf_protection: "yes" 26 | 27 | atom_php_install_memprof_module: "yes" 28 | atom_worker_systemd_memory_limit: "1000M" 29 | atom_worker_systemd_execstart_php_extra_args: "-dextension=memprof.so" 30 | atom_worker_systemd_start_limit_burst: "0" 31 | atom_worker_systemd_restart_sec: "2" 32 | atom_worker_systemd_execstart_worker_extra_args: "--max-job-count=1 --max-mem-usage=200000" 33 | 34 | atom_pool_php_envs: 35 | ATOM_DEBUG_IP: "127.0.0.1" 36 | ATOM_READ_ONLY: "{% if atom_app_read_only|bool %}on{% else %}off{% endif %}" 37 | # PATH: "/usr/local/bin:/usr/bin:/bin" # Needed by Rocky9 to run which command (for pdf index, pdfinfo, etc) 38 | 39 | # 40 | # gearman role 41 | # 42 | 43 | gearman_queue_parameters: "--queue-type=builtin --job-retries=1" 44 | 45 | # 46 | # nginx role 47 | # 48 | 49 | nginx_configs: 50 | atom_backend: 51 | - upstream atom { 52 | server unix:/var/run/php-fpm.atom.sock; 53 | } 54 | 55 | nginx_http_params: 56 | - sendfile "off" # REquired when VirtualBox shared folders (vboxsf) do not support sendfile() properly, causing the Invalid Argument (22) error. 57 | - tcp_nopush "on" 58 | - tcp_nodelay "on" 59 | - keepalive_timeout "65" 60 | - access_log "{{ nginx_log_dir }}/access.log" 61 | - error_log "{{ nginx_log_dir }}/error.log" 62 | - server_tokens off 63 | - types_hash_max_size 2048 64 | 65 | nginx_sites: 66 | atom: 67 | - listen 80 68 | - set $atom_path {{ atom_path }} 69 | - root $atom_path 70 | - server_name _ 71 | - client_max_body_size 72M 72 | - location ~* ^/(css|dist|js|images|plugins|vendor)/.*\.(css|gif|ico|jpg|js|map|pdf|png|svg|ttf|woff|woff2)$ { } 73 | - location ~* ^/(downloads)/.*\.(csv|html|pdf|rtf|xml|zip)$ { } 74 | - location ~ ^/(ead.dtd|favicon.ico|robots.txt|sitemap.*)$ { } 75 | - location / { 76 | try_files $uri /index.php?$args; 77 | if (-f $request_filename) { 78 | return 403; 79 | } 80 | } 81 | - location ~* /uploads/r/(.*)/conf/ { } 82 | - location ~* ^/uploads/r/(.*)$ { 83 | include /etc/nginx/fastcgi_params; 84 | set $index /index.php; 85 | fastcgi_param SCRIPT_FILENAME $document_root$index; 86 | fastcgi_param SCRIPT_NAME $index; 87 | fastcgi_pass atom; 88 | } 89 | - location ~ ^/private/(.*)$ { 90 | internal; 91 | alias $atom_path/$1; 92 | } 93 | - location ~ ^/(index|qubit_dev)\.php(/|$) { 94 | include /etc/nginx/fastcgi_params; 95 | fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; 96 | fastcgi_split_path_info ^(.+\.php)(/.*)$; 97 | fastcgi_pass atom; 98 | } 99 | 100 | # elasticsearch role 101 | 102 | elasticsearch_version: "6.8.21" 103 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 104 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 105 | elasticsearch_heap_size: "1g" 106 | elasticsearch_max_open_files: "65535" 107 | elasticsearch_timezone: "UTC" 108 | elasticsearch_node_max_local_storage_nodes: "1" 109 | elasticsearch_index_mapper_dynamic: "true" 110 | elasticsearch_memory_bootstrap_mlockall: "true" 111 | elasticsearch_install_java: "true" 112 | #elasticsearch_thread_pools: 113 | # - "thread_pool.write.size: 2" 114 | # - "thread_pool.write.queue_size: 1000" 115 | elasticsearch_network_http_max_content_lengtht: 1024mb 116 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 117 | elasticsearch_max_locked_memory: "unlimited" 118 | elasticsearch_network_host: "127.0.0.1" 119 | 120 | # 121 | # percona role 122 | # 123 | 124 | mysql_version_major: "8" 125 | mysql_version_minor: "0" 126 | 127 | 128 | mysql_databases: 129 | - name: "{{ atom_config_db_name }}" 130 | collation: "utf8mb4_0900_ai_ci" 131 | encoding: "{{ atom_config_db_encoding }}" 132 | 133 | mysql_users: 134 | - name: "{{ atom_mysql_user_name }}" 135 | pass: "{{ atom_mysql_user_pass }}" 136 | priv: "{{ atom_mysql_user_priv }}" 137 | host: "{{ atom_mysql_user_host }}" 138 | 139 | mysql_optimizer_switch: "'block_nested_loop=off'" 140 | mysql_sql_mode: "ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION" 141 | mysql_root_password: "MYSQLROOTPASSWORD" 142 | mysql_bind_address: "0.0.0.0" 143 | mysql_default_authentication_plugin: "mysql_native_password" 144 | -------------------------------------------------------------------------------- /playbooks/atom-rocky9/vars-singlenode-qa.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # PLEASE NOTE THAT THE PASSWORD VALUES USED HERE ARE NOT SAFE 4 | 5 | # 6 | # atom role 7 | # 8 | 9 | atom_path: "/usr/share/nginx/atom" 10 | atom_repository_url: "https://github.com/artefactual/atom.git" 11 | atom_repository_version: "v2.9.0" 12 | atom_config_db_hostname: "127.0.0.1" 13 | atom_config_db_name: "atom" 14 | atom_config_db_username: "atom-user" 15 | atom_config_db_password: "ATOMPASSWORD" 16 | atom_config_db_port: "3306" 17 | atom_config_db_encoding: "utf8mb4" 18 | atom_es_host: "127.0.0.1" 19 | atom_es_port: "9200" 20 | atom_es_config_version: "2.9" 21 | atom_php_version: "83" 22 | atom_mysql_user_name: "{{ atom_config_db_username }}" 23 | atom_mysql_user_pass: "{{ atom_config_db_password }}" 24 | atom_mysql_user_priv: "atom.*:ALL,GRANT" 25 | atom_mysql_user_host: "%" 26 | atom_csrf_protection: "yes" 27 | atom_pool_listen_owner: "{{ nginx_user }}" 28 | atom_pool_listen_group: "{{ nginx_user }}" 29 | 30 | atom_worker_systemd_start_limit_burst: "0" 31 | atom_worker_systemd_restart_sec: "2" 32 | atom_worker_systemd_execstart_worker_extra_args: "--max-job-count=1 --max-mem-usage=200000" 33 | 34 | atom_pool_php_envs: 35 | ATOM_DEBUG_IP: "127.0.0.1" 36 | ATOM_READ_ONLY: "{% if atom_app_read_only|bool %}on{% else %}off{% endif %}" 37 | PATH: "/usr/local/bin:/usr/bin:/bin" # Needed by Rocky9 to run which command (for pdf index, pdfinfo, etc) 38 | 39 | # 40 | # gearman role 41 | # 42 | 43 | gearman_queue_parameters: "--queue-type=builtin --job-retries=1" 44 | 45 | # 46 | # nginx role 47 | # 48 | 49 | nginx_user: "nginx" 50 | nginx_group: "{{ nginx_user }}" 51 | 52 | nginx_configs: 53 | atom_backend: 54 | - upstream atom { 55 | server unix:/var/run/php-fpm.atom.sock; 56 | } 57 | 58 | nginx_http_params: 59 | - sendfile "off" # REquired when VirtualBox shared folders (vboxsf) do not support sendfile() properly, causing the Invalid Argument (22) error. 60 | - tcp_nopush "on" 61 | - tcp_nodelay "on" 62 | - keepalive_timeout "65" 63 | - access_log "{{ nginx_log_dir }}/access.log" 64 | - error_log "{{ nginx_log_dir }}/error.log" 65 | - server_tokens off 66 | - types_hash_max_size 2048 67 | 68 | 69 | nginx_sites: 70 | atom: 71 | - listen 80 72 | - set $atom_path {{ atom_path }} 73 | - root $atom_path 74 | - server_name _ 75 | - client_max_body_size 72M 76 | - location ~* ^/(css|dist|js|images|plugins|vendor)/.*\.(css|gif|ico|jpg|js|map|pdf|png|svg|ttf|woff|woff2)$ { } 77 | - location ~* ^/(downloads)/.*\.(csv|html|pdf|rtf|xml|zip)$ { } 78 | - location ~ ^/(ead.dtd|favicon.ico|robots.txt|sitemap.*)$ { } 79 | - location / { 80 | try_files $uri /index.php?$args; 81 | if (-f $request_filename) { 82 | return 403; 83 | } 84 | } 85 | - location ~* /uploads/r/(.*)/conf/ { } 86 | - location ~* ^/uploads/r/(.*)$ { 87 | include /etc/nginx/fastcgi_params; 88 | set $index /index.php; 89 | fastcgi_param SCRIPT_FILENAME $document_root$index; 90 | fastcgi_param SCRIPT_NAME $index; 91 | fastcgi_pass atom; 92 | } 93 | - location ~ ^/private/(.*)$ { 94 | internal; 95 | alias $atom_path/$1; 96 | } 97 | - location ~ ^/(index|qubit_dev)\.php(/|$) { 98 | include /etc/nginx/fastcgi_params; 99 | fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; 100 | fastcgi_split_path_info ^(.+\.php)(/.*)$; 101 | fastcgi_pass atom; 102 | } 103 | 104 | # elasticsearch role 105 | 106 | elasticsearch_version: "6.8.21" 107 | elasticsearch_apt_java_package: "openjdk-8-jre-headless" 108 | elasticsearch_java_home: "/usr/lib/jvm/java-1.8.0-openjdk-amd64" 109 | elasticsearch_heap_size: "1g" 110 | elasticsearch_max_open_files: "65535" 111 | elasticsearch_timezone: "UTC" 112 | elasticsearch_node_max_local_storage_nodes: "1" 113 | elasticsearch_index_mapper_dynamic: "true" 114 | elasticsearch_memory_bootstrap_mlockall: "true" 115 | elasticsearch_install_java: "true" 116 | #elasticsearch_thread_pools: 117 | # - "thread_pool.write.size: 2" 118 | # - "thread_pool.write.queue_size: 1000" 119 | elasticsearch_network_http_max_content_lengtht: 1024mb 120 | elasticsearch_discovery_zen_ping_multicast_enabled: "false" 121 | elasticsearch_max_locked_memory: "unlimited" 122 | elasticsearch_network_host: "127.0.0.1" 123 | 124 | # 125 | # percona role 126 | # 127 | 128 | mysql_version_major: "8" 129 | mysql_version_minor: "0" 130 | 131 | 132 | mysql_databases: 133 | - name: "{{ atom_config_db_name }}" 134 | collation: "utf8mb4_0900_ai_ci" 135 | encoding: "{{ atom_config_db_encoding }}" 136 | 137 | mysql_users: 138 | - name: "{{ atom_mysql_user_name }}" 139 | pass: "{{ atom_mysql_user_pass }}" 140 | priv: "{{ atom_mysql_user_priv }}" 141 | host: "{{ atom_mysql_user_host }}" 142 | 143 | 144 | mysql_optimizer_switch: "'block_nested_loop=off'" 145 | mysql_sql_mode: "ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION" 146 | mysql_root_password: "MySQLROOT.PASSWORD.001$" 147 | mysql_bind_address: "0.0.0.0" 148 | mysql_default_authentication_plugin: "mysql_native_password" 149 | -------------------------------------------------------------------------------- /tests/dip-upload/README.md: -------------------------------------------------------------------------------- 1 | # DIP upload test 2 | 3 | ## Software requirements 4 | 5 | - Podman 6 | - crun >= 1.14.4 7 | - Python 3 8 | - curl 9 | 10 | ## Installing Ansible 11 | 12 | Create a virtual environment and activate it: 13 | 14 | ```shell 15 | python3 -m venv .venv 16 | source .venv/bin/activate 17 | ``` 18 | 19 | Install the Python requirements (these versions are compatible with 20 | symbolic links which are used in the the artefactual-atom role): 21 | 22 | ```shell 23 | python3 -m pip install -r requirements.txt 24 | ``` 25 | 26 | Install the playbook requirements: 27 | 28 | ```shell 29 | ansible-galaxy install -f -p roles/ -r requirements.yml 30 | ``` 31 | 32 | ## Starting the Compose environment 33 | 34 | Copy your SSH public key as the `ssh_pub_key` file next to the `Containerfile`: 35 | 36 | ```shell 37 | cp $HOME/.ssh/id_rsa.pub ssh_pub_key 38 | ``` 39 | 40 | Start the Compose services: 41 | 42 | ```shell 43 | podman-compose up --detach 44 | ``` 45 | 46 | ## Installing Archivematica 47 | 48 | Run the Archivematica installation playbook: 49 | 50 | ```shell 51 | export ANSIBLE_HOST_KEY_CHECKING=False 52 | export ANSIBLE_REMOTE_PORT=2222 53 | ansible-playbook -i localhost, archivematica.yml \ 54 | -u ubuntu \ 55 | -v 56 | ``` 57 | 58 | Add the `ubuntu` user to the `archivematica` group so it can copy AIPs 59 | from the shared directory: 60 | 61 | ```shell 62 | podman-compose exec --user root archivematica usermod -a -G archivematica ubuntu 63 | ``` 64 | 65 | Get the SSH public key of the `archivematica` user so we can use it when 66 | installing AtoM: 67 | 68 | ```shell 69 | AM_SSH_PUB_KEY=$(podman-compose exec --user archivematica archivematica cat /var/lib/archivematica/.ssh/id_rsa.pub) 70 | ``` 71 | 72 | ## Installing AtoM 73 | 74 | Run the AtoM installation playbook passing the `archivematica_ssh_pub_key` 75 | variable with the contents of `$AM_SSH_PUB_KEY`: 76 | 77 | ```shell 78 | export ANSIBLE_HOST_KEY_CHECKING=False 79 | export ANSIBLE_REMOTE_PORT=9222 80 | ansible-playbook -i localhost, atom.yml \ 81 | -u ubuntu \ 82 | -e "archivematica_ssh_pub_key='$AM_SSH_PUB_KEY'" \ 83 | -v 84 | ``` 85 | 86 | ## Testing the Archivematica installation 87 | 88 | Call an Archivematica API endpoint: 89 | 90 | ```shell 91 | curl --header "Authorization: ApiKey admin:this_is_the_am_api_key" http://localhost:8000/api/processing-configuration/ 92 | ``` 93 | 94 | Call a Storage Service API endpoint: 95 | 96 | ```shell 97 | curl --header "Authorization: ApiKey admin:this_is_the_ss_api_key" http://localhost:8001/api/v2/pipeline/ 98 | ``` 99 | 100 | ## Testing the AtoM installation 101 | 102 | Call an AtoM API endpoint: 103 | 104 | ```shell 105 | curl --header "REST-API-Key: this_is_the_atom_dip_upload_api_key" http://localhost:9000/index.php/api/informationobjects 106 | ``` 107 | 108 | ## Testing DIP upload 109 | 110 | Create a processing configuration for DIP upload: 111 | 112 | ```shell 113 | podman-compose exec --user archivematica archivematica cp /var/archivematica/sharedDirectory/sharedMicroServiceTasksConfigs/processingMCPConfigs/automatedProcessingMCP.xml /var/archivematica/sharedDirectory/sharedMicroServiceTasksConfigs/processingMCPConfigs/dipuploadProcessingMCP.xml 114 | ``` 115 | 116 | Update the DIP upload processing configuration: 117 | 118 | ```shell 119 | # Change 'Normalize for preservation' to 'Normalize for preservation and access' 120 | podman-compose exec --user archivematica archivematica sed --in-place 's|612e3609-ce9a-4df6-a9a3-63d634d2d934|b93cecd4-71f2-4e28-bc39-d32fd62c5a94|g' /var/archivematica/sharedDirectory/sharedMicroServiceTasksConfigs/processingMCPConfigs/dipuploadProcessingMCP.xml 121 | # Change 'Do not upload DIP' to 'Upload DIP to AtoM/Binder' 122 | podman-compose exec --user archivematica archivematica sed --in-place 's|6eb8ebe7-fab3-4e4c-b9d7-14de17625baa|0fe9842f-9519-4067-a691-8a363132ae24|g' /var/archivematica/sharedDirectory/sharedMicroServiceTasksConfigs/processingMCPConfigs/dipuploadProcessingMCP.xml 123 | ``` 124 | 125 | Import the Atom sample data: 126 | 127 | ```shell 128 | podman-compose exec --user www-data --workdir /usr/share/nginx/atom/ atom php -d memory_limit=-1 symfony csv:import /usr/share/nginx/atom/lib/task/import/example/isad/example_information_objects_isad.csv 129 | podman-compose exec --user www-data --workdir /usr/share/nginx/atom/ atom php -d memory_limit=-1 symfony propel:build-nested-set 130 | podman-compose exec --user www-data --workdir /usr/share/nginx/atom/ atom php -d memory_limit=-1 symfony cc 131 | podman-compose exec --user www-data --workdir /usr/share/nginx/atom/ atom php -d memory_limit=-1 symfony search:populate 132 | ``` 133 | 134 | Start a transfer and upload the DIP to the sample archival description: 135 | 136 | ```shell 137 | curl \ 138 | --header "Authorization: ApiKey admin:this_is_the_am_api_key" \ 139 | --request POST \ 140 | --data "{ \ 141 | \"name\": \"dip-upload-test\", \ 142 | \"path\": \"$(echo -n '/home/ubuntu/archivematica-sampledata/SampleTransfers/DemoTransferCSV' | base64 -w 0)\", \ 143 | \"type\": \"standard\", \ 144 | \"processing_config\": \"dipupload\", \ 145 | \"access_system_id\": \"example-item\" \ 146 | }" \ 147 | http://localhost:8000/api/v2beta/package 148 | ``` 149 | 150 | Wait for the transfer to finish: 151 | 152 | ```shell 153 | sleep 120 154 | ``` 155 | 156 | Verify a digital object was uploaded and attached to the sample archival description: 157 | 158 | ```shell 159 | curl \ 160 | --header "REST-API-Key: this_is_the_atom_dip_upload_api_key" \ 161 | --silent \ 162 | http://localhost:9000/index.php/api/informationobjects/beihai-guanxi-china-1988 | python3 -m json.tool | grep '"parent": "example-item"' 163 | ``` 164 | -------------------------------------------------------------------------------- /tests/dip-upload/atom.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: "all" 3 | 4 | pre_tasks: 5 | 6 | - include_vars: "atom-vars.yml" 7 | tags: 8 | - "always" 9 | 10 | - name: "Install acl package" 11 | package: 12 | name: "acl" 13 | state: "present" 14 | become: "yes" 15 | 16 | roles: 17 | 18 | - role: "artefactual.elasticsearch" 19 | become: "yes" 20 | 21 | - role: "artefactual.percona" 22 | become: "yes" 23 | 24 | - role: "artefactual.memcached" 25 | become: "yes" 26 | 27 | - role: "artefactual.gearman" 28 | become: "yes" 29 | 30 | - role: "artefactual.nginx" 31 | become: "yes" 32 | 33 | - role: "artefactual-atom" 34 | become: "yes" 35 | tags: 36 | - "atom" 37 | 38 | post_tasks: 39 | 40 | - name: "Define atom_revision_path" 41 | set_fact: 42 | atom_revision_path: "{{ atom_path + '/' + atom_revision_directory_latest_symlink_dir|default('src') if (atom_revision_directory is defined and atom_revision_directory|bool) else atom_path }}" 43 | 44 | - name: "Configure AtoM DIP Upload in AtoM host" 45 | block: 46 | - name: "Create archivematica user in AtoM server" 47 | user: 48 | name: "archivematica" 49 | group: "users" 50 | system: True 51 | home: "/home/archivematica" 52 | createhome: True 53 | generate_ssh_key: True 54 | shell: "/bin/bash" 55 | 56 | - name: "Set /home/archivematica permissions to 755" 57 | file: 58 | path: "/home/archivematica" 59 | mode: "0755" 60 | state: directory 61 | owner: "archivematica" 62 | group: "users" 63 | 64 | - name: "Enable AtoM plug-ins" 65 | shell: "php symfony tools:atom-plugins add {{ item }}" 66 | args: 67 | chdir: "{{ atom_revision_path }}" 68 | with_items: 69 | - "qtSwordPlugin" 70 | - "arRestApiPlugin" 71 | - "arStorageServicePlugin" 72 | 73 | - name: "Get nginx user from AtoM (delegated) VM" 74 | set_fact: 75 | __atom_user: "{% if ansible_os_family in ['RedHat', 'Rocky'] %}nginx{% else %}www-data{% endif %}" 76 | 77 | - name: "Clear AtoM site cache" 78 | become_user: "{{ atom_user | default(__atom_user) }}" 79 | command: "{{ item }}" 80 | args: 81 | chdir: "{{ atom_revision_path }}" 82 | with_items: 83 | - "php symfony cc" 84 | 85 | - name: "Restart services" 86 | service: 87 | daemon_reload: yes 88 | name: "{{ item }}" 89 | state: restarted 90 | with_items: 91 | - "atom-worker" 92 | 93 | - name: "Authorize archivematica SSH key" 94 | authorized_key: 95 | user: "archivematica" 96 | state: "present" 97 | key: "{{ archivematica_ssh_pub_key }}" 98 | 99 | - name: "List MySQL AtoM users" 100 | command: mysql {{ atom_dip_upload_atom_database }} -Ns -e "select id from user where username='{{ atom_dip_upload_atom_user }}' limit 1;" 101 | register: atom_dip_user_id 102 | 103 | - name: "Get property id when {{ atom_dip_upload_atom_user }} AtoM user already exists" 104 | command: mysql {{ atom_dip_upload_atom_database }} -Ns -e "select id from property where name='RestApiKey' and object_id='{{ atom_dip_user_id.stdout }}' limit 1;" 105 | register: atom_dip_property_id 106 | when: atom_dip_user_id.stdout != "" 107 | 108 | - name: "Update Rest API key when {{ atom_dip_upload_atom_user }} AtoM user already exists" 109 | command: mysql {{ atom_dip_upload_atom_database }} -Ns -e "UPDATE property_i18n SET value='{{ atom_dip_upload_atom_api_key }}' where id='{{ atom_dip_property_id.stdout }}';" 110 | when: atom_dip_property_id.stdout is defined and atom_dip_property_id.stdout != "" 111 | 112 | - name: "Create {{ atom_dip_upload_atom_user }} AtoM user" 113 | shell: "php symfony tools:add-superuser --email='{{ atom_dip_upload_atom_email }}' --password='{{ atom_dip_upload_atom_password }}' {{ atom_dip_upload_atom_user }}" 114 | args: 115 | chdir: "{{ atom_revision_path}}" 116 | when: atom_dip_user_id.stdout == "" 117 | 118 | - name: "List MySQL AtoM users again" 119 | command: mysql {{ atom_dip_upload_atom_database }} -Ns -e "select id from user where username='{{ atom_dip_upload_atom_user }}' limit 1;" 120 | register: atom_dip_user_id_new_user 121 | when: atom_dip_user_id.stdout == "" 122 | 123 | - name: "Create Rest API key for new user: {{ atom_dip_upload_atom_user }}" 124 | command: mysql {{ atom_dip_upload_atom_database }} -Ns -e "INSERT INTO property (object_id, name, source_culture,id) VALUES( {{ atom_dip_user_id_new_user.stdout }}, 'RestApiKey', 'en', NULL); INSERT INTO property_i18n (value, id, culture) VALUES( '{{ atom_dip_upload_atom_api_key }}', LAST_INSERT_ID(), 'en');" 125 | when: atom_dip_user_id.stdout == "" and atom_dip_user_id_new_user.stdout != "" 126 | 127 | become: true 128 | 129 | - name: "Configure SWORD deposit in AtoM host" 130 | block: 131 | - name: "Create SWORD deposit directory" 132 | file: 133 | path: "{{ atom_sword_deposit_dir }}" 134 | state: "directory" 135 | mode: 0770 136 | owner: "archivematica" 137 | group: "{{ nginx_group }}" 138 | 139 | - name: "Install acl package" 140 | package: 141 | name: "acl" 142 | state: "present" 143 | 144 | - name: "Configure ACL on SWORD deposit directory" 145 | acl: 146 | default: "yes" # -d option 147 | etype: "user" 148 | entity: "{{ nginx_user }}" 149 | path: "{{ atom_sword_deposit_dir }}" 150 | permissions: "rwX" 151 | state: "present" 152 | 153 | - name: "Change SWORD deposit in AtoM database" 154 | become_user: "{{ nginx_user }}" 155 | command: "php symfony tools:settings set sword_deposit_dir {{ atom_sword_deposit_dir }}" 156 | args: 157 | chdir: "{{ atom_path + '/' + atom_revision_directory_latest_symlink_dir|default('src') if (atom_revision_directory is defined and atom_revision_directory|bool) else atom_path }}" 158 | 159 | become: true 160 | -------------------------------------------------------------------------------- /playbooks/archivematica-bionic/.Jenkinsfile: -------------------------------------------------------------------------------- 1 | node { 2 | timestamps { 3 | stage('Get code') { 4 | // If environment variables are defined, honour them 5 | env.AM_BRANCH = sh(script: 'echo ${AM_BRANCH:-"stable/1.14.x"}', returnStdout: true).trim() 6 | env.AM_VERSION = sh(script: 'echo ${AM_VERSION:-"1.14"}', returnStdout: true).trim() 7 | env.SS_BRANCH = sh(script: 'echo ${SS_BRANCH:-"stable/0.20.x"}', returnStdout: true).trim() 8 | env.DEPLOYPUB_BRANCH = sh(script: 'echo ${DEPLOYPUB_BRANCH:-"master"}', returnStdout: true).trim() 9 | env.AMAUAT_BRANCH = sh(script: 'echo ${AMAUAT_BRANCH:-"master"}', returnStdout: true).trim() 10 | env.DISPLAY = sh(script: 'echo ${DISPLAY:-:50}', returnStdout: true).trim() 11 | env.WEBDRIVER = sh(script: 'echo ${WEBDRIVER:-"Firefox"}', returnStdout: true).trim() 12 | env.ACCEPTANCE_TAGS = sh(script: 'echo ${ACCEPTANCE_TAGS:-"uuids-dirs mo-aip-reingest icc tpc picc aip-encrypt-mirror"}', returnStdout: true).trim() 13 | env.VAGRANT_PROVISION = sh(script: 'echo ${VAGRANT_PROVISION:-"true"}', returnStdout: true).trim() 14 | env.VAGRANT_VAGRANTFILE = sh(script: 'echo ${VAGRANT_VAGRANTFILE:-Vagrantfile.openstack}', returnStdout: true).trim() 15 | env.OS_IMAGE = sh(script: 'echo ${OS_IMAGE:-"Ubuntu 18.04"}', returnStdout: true).trim() 16 | env.DESTROY_VM = sh(script: 'echo ${DESTROY_VM:-"true"}', returnStdout: true).trim() 17 | // Set build name 18 | currentBuild.displayName = "#${BUILD_NUMBER} AM:${AM_BRANCH} SS:${SS_BRANCH}" 19 | currentBuild.description = "OS: Ubuntu 18.04
Tests: ${ACCEPTANCE_TAGS}" 20 | 21 | git branch: env.AM_BRANCH, poll: false, 22 | url: 'https://github.com/artefactual/archivematica' 23 | git branch: env.SS_BRANCH, poll: false, 24 | url: 'https://github.com/artefactual/archivematica-storage-service' 25 | 26 | checkout([$class: 'GitSCM', 27 | branches: [[name: env.DEPLOYPUB_BRANCH]], 28 | doGenerateSubmoduleConfigurations: false, 29 | extensions: 30 | [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'deploy-pub']], 31 | submoduleCfg: [], 32 | userRemoteConfigs: [[url: 'https://github.com/artefactual/deploy-pub']]]) 33 | 34 | } 35 | 36 | stage ('Create vm') { 37 | sh ''' 38 | echo Building Archivematica $AM_BRANCH and Storage Service $SS_BRANCH 39 | cd deploy-pub/playbooks/archivematica-bionic 40 | source ~/.secrets/openrc.sh 41 | rm -rf roles/ 42 | ansible-galaxy install -f -p roles -r requirements.yml 43 | export ANSIBLE_ARGS="-e archivematica_src_am_version=${AM_BRANCH} \ 44 | archivematica_src_ss_version=${SS_BRANCH} \ 45 | archivematica_src_configure_am_api_key="HERE_GOES_THE_AM_API_KEY" \ 46 | archivematica_src_configure_ss_api_key="HERE_GOES_THE_SS_API_KEY" \ 47 | archivematica_src_reset_am_all=True \ 48 | archivematica_src_reset_ss_db=True" 49 | vagrant up --no-provision 50 | cat ~/.ssh/authorized_keys | vagrant ssh -c "cat >> .ssh/authorized_keys" 51 | 52 | if $VAGRANT_PROVISION; then 53 | vagrant provision 54 | vagrant ssh -c "sudo adduser ubuntu archivematica" 55 | fi 56 | vagrant ssh-config | tee >( grep HostName | awk '{print $2}' > $WORKSPACE/.host) \ 57 | >( grep User | awk '{print $2}' > $WORKSPACE/.user ) \ 58 | >( grep IdentityFile | awk '{print $2}' > $WORKSPACE/.key ) 59 | 60 | ''' 61 | 62 | env.SERVER = sh(script: "cat .host", returnStdout: true).trim() 63 | env.USER = sh(script: "cat .user", returnStdout: true).trim() 64 | env.KEY = sh(script: "cat .key", returnStdout: true).trim() 65 | } 66 | 67 | stage('Configure acceptance tests') { 68 | git branch: env.AMAUAT_BRANCH, url: 'https://github.com/artefactual-labs/archivematica-acceptance-tests' 69 | properties([disableConcurrentBuilds(), 70 | gitLabConnection(''), 71 | [$class: 'RebuildSettings', 72 | autoRebuild: false, 73 | rebuildDisabled: false], 74 | pipelineTriggers([pollSCM('*/5 * * * *')])]) 75 | 76 | 77 | sh ''' 78 | virtualenv -p python3 env 79 | env/bin/pip install -r requirements.txt 80 | env/bin/pip install behave2cucumber 81 | # Launch vnc server 82 | VNCPID=$(ps aux | grep Xtig[h] | grep ${DISPLAY} | awk '{print $2}') 83 | if [ "x$VNCPID" == "x" ]; then 84 | tightvncserver -geometry 1920x1080 ${DISPLAY} 85 | fi 86 | 87 | mkdir -p results/ 88 | rm -rf results/* 89 | ''' 90 | } 91 | 92 | stage('Run tests') { 93 | sh ''' 94 | echo "Running $ACCEPTANCE_TAGS" 95 | for i in $ACCEPTANCE_TAGS; do 96 | case "$i" in 97 | premis-events) TIMEOUT=60m;; 98 | ipc) TIMEOUT=60m;; 99 | aip-encrypt) TIMEOUT=45m;; 100 | *) TIMEOUT=15m;; 101 | esac 102 | timeout $TIMEOUT env/bin/behave \ 103 | --tags=$i \ 104 | --no-skipped \ 105 | -D am_version=${AM_VERSION} \ 106 | -D driver_name=${WEBDRIVER} \ 107 | -D am_username=admin \ 108 | -D am_password=archivematica \ 109 | -D am_url=http://${SERVER}/ \ 110 | -D ss_username=admin \ 111 | -D ss_password=archivematica \ 112 | -D ss_api_key="HERE_GOES_THE_SS_API_KEY" \ 113 | -D ss_url=http://${SERVER}:8000/ \ 114 | -D home=${USER} \ 115 | -D server_user=${USER} \ 116 | -D transfer_source_path=${USER}/archivematica-sampledata/TestTransfers/acceptance-tests \ 117 | -D ssh_identity_file=${KEY} \ 118 | --junit --junit-directory=results/ -v \ 119 | -f=json -o=results/output-$i.json \ 120 | --no-skipped || true 121 | 122 | env/bin/python -m behave2cucumber -i results/output-$i.json -o results/cucumber-$i.json || true 123 | done 124 | ''' 125 | } 126 | 127 | stage('Archive results') { 128 | junit allowEmptyResults: false, keepLongStdio: true, testResults: 'results/*.xml' 129 | cucumber 'results/cucumber-*.json' 130 | } 131 | 132 | stage('Cleanup') { 133 | sh ''' 134 | # Kill vnc server 135 | VNCPID=$(ps aux | grep Xtig[h] | grep ${DISPLAY} | awk '{print $2}') 136 | if [ "x$VNCPID" != "x" ]; then 137 | kill $VNCPID 138 | fi 139 | # Remove vm 140 | if $DESTROY_VM; then 141 | cd deploy-pub/playbooks/archivematica-bionic/ 142 | source ~/.secrets/openrc.sh 143 | vagrant destroy 144 | fi 145 | ''' 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /playbooks/archivematica-jammy/.Jenkinsfile: -------------------------------------------------------------------------------- 1 | node { 2 | timestamps { 3 | stage('Get code') { 4 | // If environment variables are defined, honour them 5 | env.AM_BRANCH = sh(script: 'echo ${AM_BRANCH:-"stable/1.17.x"}', returnStdout: true).trim() 6 | env.AM_VERSION = sh(script: 'echo ${AM_VERSION:-"1.17"}', returnStdout: true).trim() 7 | env.SS_BRANCH = sh(script: 'echo ${SS_BRANCH:-"stable/0.23.x"}', returnStdout: true).trim() 8 | env.DEPLOYPUB_BRANCH = sh(script: 'echo ${DEPLOYPUB_BRANCH:-"master"}', returnStdout: true).trim() 9 | env.AMAUAT_BRANCH = sh(script: 'echo ${AMAUAT_BRANCH:-"master"}', returnStdout: true).trim() 10 | env.DISPLAY = sh(script: 'echo ${DISPLAY:-:50}', returnStdout: true).trim() 11 | env.WEBDRIVER = sh(script: 'echo ${WEBDRIVER:-"Firefox"}', returnStdout: true).trim() 12 | env.ACCEPTANCE_TAGS = sh(script: 'echo ${ACCEPTANCE_TAGS:-"uuids-dirs mo-aip-reingest icc tpc picc aip-encrypt-mirror"}', returnStdout: true).trim() 13 | env.VAGRANT_PROVISION = sh(script: 'echo ${VAGRANT_PROVISION:-"true"}', returnStdout: true).trim() 14 | env.VAGRANT_VAGRANTFILE = sh(script: 'echo ${VAGRANT_VAGRANTFILE:-Vagrantfile.openstack}', returnStdout: true).trim() 15 | env.OS_IMAGE = sh(script: 'echo ${OS_IMAGE:-"Ubuntu 22.04"}', returnStdout: true).trim() 16 | env.DESTROY_VM = sh(script: 'echo ${DESTROY_VM:-"true"}', returnStdout: true).trim() 17 | // Set build name 18 | currentBuild.displayName = "#${BUILD_NUMBER} AM:${AM_BRANCH} SS:${SS_BRANCH}" 19 | currentBuild.description = "OS: Ubuntu 22.04
Tests: ${ACCEPTANCE_TAGS}" 20 | 21 | git branch: env.AM_BRANCH, poll: false, 22 | url: 'https://github.com/artefactual/archivematica' 23 | git branch: env.SS_BRANCH, poll: false, 24 | url: 'https://github.com/artefactual/archivematica-storage-service' 25 | 26 | checkout([$class: 'GitSCM', 27 | branches: [[name: env.DEPLOYPUB_BRANCH]], 28 | doGenerateSubmoduleConfigurations: false, 29 | extensions: 30 | [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'deploy-pub']], 31 | submoduleCfg: [], 32 | userRemoteConfigs: [[url: 'https://github.com/artefactual/deploy-pub']]]) 33 | 34 | } 35 | 36 | stage ('Create vm') { 37 | sh ''' 38 | echo Building Archivematica $AM_BRANCH and Storage Service $SS_BRANCH 39 | cd deploy-pub/playbooks/archivematica-bionic 40 | source ~/.secrets/openrc.sh 41 | rm -rf roles/ 42 | ansible-galaxy install -f -p roles -r requirements.yml 43 | export ANSIBLE_ARGS="-e archivematica_src_am_version=${AM_BRANCH} \ 44 | archivematica_src_ss_version=${SS_BRANCH} \ 45 | archivematica_src_configure_am_api_key="HERE_GOES_THE_AM_API_KEY" \ 46 | archivematica_src_configure_ss_api_key="HERE_GOES_THE_SS_API_KEY" \ 47 | archivematica_src_reset_am_all=True \ 48 | archivematica_src_reset_ss_db=True" 49 | vagrant up --no-provision 50 | cat ~/.ssh/authorized_keys | vagrant ssh -c "cat >> .ssh/authorized_keys" 51 | 52 | if $VAGRANT_PROVISION; then 53 | vagrant provision 54 | vagrant ssh -c "sudo adduser ubuntu archivematica" 55 | fi 56 | vagrant ssh-config | tee >( grep HostName | awk '{print $2}' > $WORKSPACE/.host) \ 57 | >( grep User | awk '{print $2}' > $WORKSPACE/.user ) \ 58 | >( grep IdentityFile | awk '{print $2}' > $WORKSPACE/.key ) 59 | 60 | ''' 61 | 62 | env.SERVER = sh(script: "cat .host", returnStdout: true).trim() 63 | env.USER = sh(script: "cat .user", returnStdout: true).trim() 64 | env.KEY = sh(script: "cat .key", returnStdout: true).trim() 65 | } 66 | 67 | stage('Configure acceptance tests') { 68 | git branch: env.AMAUAT_BRANCH, url: 'https://github.com/artefactual-labs/archivematica-acceptance-tests' 69 | properties([disableConcurrentBuilds(), 70 | gitLabConnection(''), 71 | [$class: 'RebuildSettings', 72 | autoRebuild: false, 73 | rebuildDisabled: false], 74 | pipelineTriggers([pollSCM('*/5 * * * *')])]) 75 | 76 | 77 | sh ''' 78 | virtualenv -p python3 env 79 | env/bin/pip install -r requirements.txt 80 | env/bin/pip install behave2cucumber 81 | # Launch vnc server 82 | VNCPID=$(ps aux | grep Xtig[h] | grep ${DISPLAY} | awk '{print $2}') 83 | if [ "x$VNCPID" == "x" ]; then 84 | tightvncserver -geometry 1920x1080 ${DISPLAY} 85 | fi 86 | 87 | mkdir -p results/ 88 | rm -rf results/* 89 | ''' 90 | } 91 | 92 | stage('Run tests') { 93 | sh ''' 94 | echo "Running $ACCEPTANCE_TAGS" 95 | for i in $ACCEPTANCE_TAGS; do 96 | case "$i" in 97 | premis-events) TIMEOUT=60m;; 98 | ipc) TIMEOUT=60m;; 99 | aip-encrypt) TIMEOUT=45m;; 100 | *) TIMEOUT=15m;; 101 | esac 102 | timeout $TIMEOUT env/bin/behave \ 103 | --tags=$i \ 104 | --no-skipped \ 105 | -D am_version=${AM_VERSION} \ 106 | -D driver_name=${WEBDRIVER} \ 107 | -D am_username=admin \ 108 | -D am_password=archivematica \ 109 | -D am_url=http://${SERVER}/ \ 110 | -D ss_username=admin \ 111 | -D ss_password=archivematica \ 112 | -D ss_api_key="HERE_GOES_THE_SS_API_KEY" \ 113 | -D ss_url=http://${SERVER}:8000/ \ 114 | -D home=${USER} \ 115 | -D server_user=${USER} \ 116 | -D transfer_source_path=${USER}/archivematica-sampledata/TestTransfers/acceptance-tests \ 117 | -D ssh_identity_file=${KEY} \ 118 | --junit --junit-directory=results/ -v \ 119 | -f=json -o=results/output-$i.json \ 120 | --no-skipped || true 121 | 122 | env/bin/python -m behave2cucumber -i results/output-$i.json -o results/cucumber-$i.json || true 123 | done 124 | ''' 125 | } 126 | 127 | stage('Archive results') { 128 | junit allowEmptyResults: false, keepLongStdio: true, testResults: 'results/*.xml' 129 | cucumber 'results/cucumber-*.json' 130 | } 131 | 132 | stage('Cleanup') { 133 | sh ''' 134 | # Kill vnc server 135 | VNCPID=$(ps aux | grep Xtig[h] | grep ${DISPLAY} | awk '{print $2}') 136 | if [ "x$VNCPID" != "x" ]; then 137 | kill $VNCPID 138 | fi 139 | # Remove vm 140 | if $DESTROY_VM; then 141 | cd deploy-pub/playbooks/archivematica-bionic/ 142 | source ~/.secrets/openrc.sh 143 | vagrant destroy 144 | fi 145 | ''' 146 | } 147 | } 148 | } 149 | --------------------------------------------------------------------------------