├── .env_sample ├── .gitignore ├── README.md ├── config ├── schema.xml └── schemas │ ├── CESSDA_CMM.tsv │ ├── cmdi-oral-history.tsv │ └── cv_voc.json ├── cvmanager ├── Dockerfile ├── README.md ├── app.py ├── requirements.txt └── templates │ ├── datasetAutocompleteFieldForEditFragment.xhtml │ ├── dataverse_template.xhtml │ └── metadataFragment.xhtml ├── demostart.sh ├── distros ├── BeFAIR │ ├── README.md │ ├── configs │ │ └── Readme.md │ ├── configuration │ │ └── Readme.md │ └── docker-compose.yml ├── C19M │ ├── docker-compose.yml │ ├── rules-infra.toml │ └── traefik.toml ├── CM2022 │ ├── did │ │ └── docker-compose.yaml │ ├── doccano-docker-compose.yml │ ├── docker-compose.yml │ ├── rules-infra.toml │ ├── setup.sh │ ├── skosmos-docker-compose.yml │ ├── stop-all.sh │ ├── superset-docker-compose.yml │ ├── traefik.toml │ └── weblate-docker-compose.yml ├── ODISSEI │ ├── Readme.md │ ├── docker-compose.yml │ ├── rules-infra.toml │ └── traefik.toml ├── Readme.md ├── Superset │ ├── Readme.md │ ├── config │ │ ├── config-docker-compose.ttl │ │ └── skosmos.ttl │ └── docker-compose.yml ├── docker-compose-ssl.yml ├── docker-compose.yml ├── odissei-init.sh ├── test-docker.dataverse.no │ ├── configs │ │ ├── http-ssl-test.conf │ │ ├── http-ssl.conf │ │ └── httpd.conf │ ├── docker-compose.yaml │ └── init.d │ │ ├── 006-s3-aws-storage.sh │ │ ├── 01-persistent-id.sh │ │ ├── 010-languages.sh │ │ ├── 010-mailrelay-set.sh │ │ ├── 011-local-storage.sh │ │ ├── 012-minio-bucket1.sh │ │ ├── 013-minio-bucket2.sh │ │ ├── 02-controlled-voc.sh │ │ ├── 03-doi-set.sh │ │ ├── 04-setdomain.sh │ │ ├── 05-reindex.sh │ │ ├── 07-previewers.sh │ │ ├── 08-federated-login.sh │ │ ├── 1001-webhooks.sh │ │ └── 1002-custom-metadata.sh └── vanilla │ ├── .env │ ├── .env_sample │ ├── config │ ├── schema.xml │ └── schemas │ │ ├── CESSDA_CMM.tsv │ │ ├── cmdi-oral-history.tsv │ │ └── cv_voc.json │ ├── docker-compose-dev.yml │ ├── docker-compose.yml │ ├── init.d │ ├── 006-s3-aws-storage.sh │ ├── 01-persistent-id.sh │ ├── 010-languages.sh │ ├── 010-mailrelay-set.sh │ ├── 011-local-storage.sh │ ├── 012-minio-bucket1.sh │ ├── 013-minio-bucket2.sh │ ├── 02-controlled-voc.sh │ ├── 03-doi-set.sh │ ├── 04-setdomain.sh │ ├── 05-reindex.sh │ ├── 07-previewers.sh │ ├── 08-federated-login.sh │ ├── 1001-webhooks.sh │ └── 1002-custom-metadata.sh │ └── secrets │ ├── admin │ └── password │ ├── api │ └── key │ ├── db │ └── password │ ├── db_asadmin │ ├── doi │ └── password │ └── doi_asadmin ├── docker-compose-local.yml ├── docker-compose.yml ├── init.d ├── 006-s3-aws-storage.sh ├── 01-persistent-id.sh ├── 010-languages.sh ├── 010-mailrelay-set.sh ├── 011-local-storage.sh ├── 012-minio-bucket1.sh ├── 013-minio-bucket2.sh ├── 02-controlled-voc.sh ├── 03-doi-set.sh ├── 04-setdomain.sh ├── 05-reindex.sh ├── 07-previewers.sh ├── 08-federated-login.sh ├── 1001-webhooks.sh └── 1002-custom-metadata.sh ├── kubernetes ├── README.md └── infrastructure │ ├── docker │ ├── letsencrypt │ │ ├── Dockerfile │ │ ├── entrypoint.sh │ │ ├── google-cloud-sdk.repo │ │ ├── kubernetes.repo │ │ └── secret-patch-template.json │ └── mailrelay │ │ ├── Dockerfile │ │ └── config │ │ ├── postfix │ │ ├── main.cf │ │ └── sasl_passwd │ │ ├── rsyslog │ │ ├── listen.conf │ │ └── rsyslog.conf │ │ └── supervisor │ │ └── supervisord.conf │ └── k8s │ ├── deployments │ ├── dataverse-dev-bridge.yaml │ ├── dataverse-dev-dv-en.yaml │ ├── dataverse-dev-postgres.yaml │ └── dataverse-dev-solr.yaml │ ├── ingress │ └── dataverse-dev-ingress.yml │ ├── letsencrypt │ ├── dataverse-dev-letsencrypt-secret.yml │ └── dataverse-dev-letsencrypt.yml │ ├── mailrelay │ ├── dataverse-dev-mailrelay-deployment.yaml │ └── dataverse-dev-mailrelay-service.yaml │ ├── namespace │ └── dataverse-dev-namespace.yaml │ ├── persistentvolume │ └── dataverse-dev-pv-pvc.yaml │ └── services │ ├── dataverse-dev-bridge.yaml │ ├── dataverse-dev-dataverse-en.yaml │ ├── dataverse-dev-postgres.yaml │ └── dataverse-dev-solr.yaml ├── postgresql ├── Dockerfile ├── init-postgres ├── init.sql ├── install.sh ├── pg_hba.conf ├── testdata │ ├── doc │ │ └── sphinx-guides │ │ │ └── source │ │ │ └── _static │ │ │ └── util │ │ │ ├── createsequence.sql │ │ │ └── pg8-createsequence-prep.sql │ ├── jhove.conf │ ├── schema.xml │ └── scripts │ │ ├── api │ │ ├── bin │ │ │ └── list-dvs │ │ ├── data-deposit │ │ │ ├── create-dataset │ │ │ ├── create-dataset-805-rights-license │ │ │ ├── create-dataset-894-invisible-character │ │ │ ├── create-dataset-899-expansion │ │ │ ├── data │ │ │ │ ├── atom-entry-study-894-invisible-character.xml │ │ │ │ ├── atom-entry-study-899-expansion.xml │ │ │ │ └── example.zip │ │ │ ├── dataset-field │ │ │ ├── delete-dataset │ │ │ ├── delete-file │ │ │ ├── edit-dataset-1430-edit-subject │ │ │ ├── edit-dataset-805-rights-license │ │ │ ├── get │ │ │ ├── list-datasets │ │ │ ├── pipeline │ │ │ ├── publish-dataset │ │ │ ├── publish-dataverse │ │ │ ├── replace-dataset-metadata │ │ │ ├── service-document │ │ │ ├── show-atom-entry │ │ │ ├── show-files │ │ │ ├── show-statement │ │ │ ├── unsupported-download-files │ │ │ └── upload-file │ │ ├── data │ │ │ ├── authentication-providers │ │ │ │ ├── base-oauth.json │ │ │ │ ├── builtin.json │ │ │ │ ├── echo-dignified.json │ │ │ │ └── orcid-sandbox.json │ │ │ ├── dataset-bad-missingInitialVersion.json │ │ │ ├── dataset-create-new.json │ │ │ ├── dataset-create-new2.json │ │ │ ├── dataset-create-new3.json │ │ │ ├── dataset-updated-version.json │ │ │ ├── dataset-updated-version2.json │ │ │ ├── dataset-version.json │ │ │ ├── dv-pete-sub-normal.json │ │ │ ├── dv-pete-sub-restricted.json │ │ │ ├── dv-pete-sub-secret.json │ │ │ ├── dv-pete-top.json │ │ │ ├── dv-root.json │ │ │ ├── dv-uma-deletable.json │ │ │ ├── dv-uma-sub1.json │ │ │ ├── dv-uma-sub2.json │ │ │ ├── dv-uma-top.json │ │ │ ├── explicit-group-2nd.json │ │ │ ├── explicit-group-first-edit.json │ │ │ ├── explicit-group-first.json │ │ │ ├── ipGroup-all-ipv4.json │ │ │ ├── ipGroup-all.json │ │ │ ├── ipGroup-localhost.json │ │ │ ├── ipGroup-single-IPv4.json │ │ │ ├── ipGroup-single-IPv6.json │ │ │ ├── ipGroup1.json │ │ │ ├── ipGroup2.json │ │ │ ├── ipGroupDuplicate-v1.json │ │ │ ├── ipGroupDuplicate-v2.json │ │ │ ├── metadatablocks │ │ │ │ ├── astrophysics.tsv │ │ │ │ ├── biomedical.tsv │ │ │ │ ├── citation.tsv │ │ │ │ ├── customARCS.tsv │ │ │ │ ├── customCHIA.tsv │ │ │ │ ├── customDigaai.tsv │ │ │ │ ├── customGSD.tsv │ │ │ │ ├── customMRA.tsv │ │ │ │ ├── customPSI.tsv │ │ │ │ ├── customPSRI.tsv │ │ │ │ ├── custom_hbgdki.tsv │ │ │ │ ├── geospatial.tsv │ │ │ │ ├── journals.tsv │ │ │ │ └── social_science.tsv │ │ │ ├── role-admin.json │ │ │ ├── role-assign-eg1-curator.json │ │ │ ├── role-assign-localhost-curator.json │ │ │ ├── role-assign.json │ │ │ ├── role-assignee-list.json │ │ │ ├── role-contrib.json │ │ │ ├── role-creator.json │ │ │ ├── role-curator.json │ │ │ ├── role-dsContributor.json │ │ │ ├── role-dvContributor.json │ │ │ ├── role-editor.json │ │ │ ├── role-filedownloader.json │ │ │ ├── role-fullContributor.json │ │ │ ├── role-guest.json │ │ │ ├── role-manager.json │ │ │ ├── role-member.json │ │ │ ├── shibGroupHarvard.json │ │ │ ├── shibGroupMit.json │ │ │ ├── shibGroupTestShib.json │ │ │ ├── tsv │ │ │ │ └── tsv2json │ │ │ ├── user-admin.json │ │ │ ├── userCathy.json │ │ │ ├── userGabbi.json │ │ │ ├── userNick.json │ │ │ ├── userPete.json │ │ │ ├── userUma.json │ │ │ └── workflows │ │ │ │ ├── internal-httpSR-workflow.json │ │ │ │ ├── internal-no-pause-long-workflow.json │ │ │ │ ├── internal-no-pause-workflow.json │ │ │ │ └── internal-pause-workflow.json │ │ ├── download │ │ │ ├── .gitignore │ │ │ ├── dbquery │ │ │ ├── download │ │ │ └── tsv2files │ │ ├── post-install-api-block.sh │ │ ├── py_api_wrapper │ │ │ ├── api_fun.py │ │ │ ├── dataverse_api_link.py │ │ │ ├── msg_util.py │ │ │ ├── readme.md │ │ │ └── single_api_spec.py │ │ ├── setup-all.sh │ │ ├── setup-builtin-roles.sh │ │ ├── setup-datasetfields.sh │ │ ├── setup-dvs.sh │ │ ├── setup-identity-providers.sh │ │ ├── setup-optional-harvard.sh │ │ ├── setup-optional-publish-terms.sh │ │ ├── setup-optional.sh │ │ ├── setup-users.sh │ │ ├── testBlockEndpoints.sh │ │ └── update-datasetfields.sh │ │ ├── backup │ │ └── run_backup │ │ │ ├── README_HOWTO.txt │ │ │ ├── README_IMPLEMENTATION.txt │ │ │ ├── backup.py │ │ │ ├── backup_ssh.py │ │ │ ├── backup_swift.py │ │ │ ├── backupdb.sql │ │ │ ├── config.ini │ │ │ ├── config.py │ │ │ ├── database.py │ │ │ ├── email_notification.py │ │ │ ├── requirements.txt │ │ │ ├── run_backup.py │ │ │ ├── storage.py │ │ │ ├── storage_filesystem.py │ │ │ └── storage_s3.py │ │ ├── database │ │ ├── 3561-update.sql │ │ ├── drop-all.sh │ │ ├── drop-create.sh │ │ ├── facetlist.sql │ │ ├── fedora │ │ │ └── rebuild-and-test │ │ ├── homebrew │ │ │ ├── convert │ │ │ ├── create-database │ │ │ ├── create-role │ │ │ ├── create-role-superuser │ │ │ ├── custom-build-number │ │ │ ├── delete-all │ │ │ ├── devinstall │ │ │ ├── drop-database │ │ │ ├── drop-role │ │ │ ├── dump │ │ │ ├── keys2tmp │ │ │ ├── kill9glassfish │ │ │ ├── rebuild-and-test │ │ │ ├── restore │ │ │ ├── run-post-create-post-deploy │ │ │ ├── run-reference_data.sql │ │ │ ├── set-env-for-setup │ │ │ └── superuser-password-update │ │ ├── reference_data.sql │ │ └── upgrades │ │ │ ├── upgrade_v4.0.1_to_v4.1.sql │ │ │ ├── upgrade_v4.0_to_v4.0.1.sql │ │ │ ├── upgrade_v4.1_to_v4.2.sql │ │ │ ├── upgrade_v4.2.1_to_v4.2.2.sql │ │ │ ├── upgrade_v4.2.4_to_4.3.sql │ │ │ ├── upgrade_v4.4_to_v4.5.sql │ │ │ ├── upgrade_v4.5.1_to_v4.6.sql │ │ │ ├── upgrade_v4.5_to_v4.5.1.sql │ │ │ ├── upgrade_v4.6.1_to_v4.6.2.sql │ │ │ ├── upgrade_v4.6.2_to_v4.7.sql │ │ │ ├── upgrade_v4.6_to_v4.6.1.sql │ │ │ ├── upgrade_v4.7.1_to_v4.8.sql │ │ │ ├── upgrade_v4.7_to_v4.7.1.sql │ │ │ └── upgrade_v4.8.3_to_v4.8.4.sql │ │ ├── deploy │ │ └── phoenix.dataverse.org │ │ │ ├── cert.md │ │ │ ├── deploy │ │ │ ├── dv-root.json │ │ │ ├── install │ │ │ ├── post │ │ │ ├── prep │ │ │ └── rebuild │ │ ├── installer │ │ ├── Makefile │ │ ├── README.txt │ │ ├── dvinstall │ │ │ ├── glassfish-setup.sh │ │ │ └── pgdriver │ │ │ │ ├── postgresql-42.1.4.jar │ │ │ │ ├── postgresql-8.4-703.jdbc4.jar │ │ │ │ ├── postgresql-9.0-802.jdbc4.jar │ │ │ │ ├── postgresql-9.1-902.jdbc4.jar │ │ │ │ ├── postgresql-9.2-1004.jdbc4.jar │ │ │ │ ├── postgresql-9.3-1104.jdbc4.jar │ │ │ │ └── postgresql-9.4.1212.jar │ │ ├── glassfish-setup.sh │ │ ├── install │ │ └── pgdriver │ │ │ ├── postgresql-42.1.4.jar │ │ │ ├── postgresql-8.4-703.jdbc4.jar │ │ │ ├── postgresql-9.0-802.jdbc4.jar │ │ │ ├── postgresql-9.1-902.jdbc4.jar │ │ │ ├── postgresql-9.2-1004.jdbc4.jar │ │ │ ├── postgresql-9.3-1104.jdbc4.jar │ │ │ └── postgresql-9.4.1212.jar │ │ ├── issues │ │ ├── 796 │ │ │ └── builtin2shib │ │ ├── 907 │ │ │ └── batchImportDv │ │ │ │ └── version1.xml │ │ ├── 1262 │ │ │ ├── create-sparrow1 │ │ │ ├── search-sparrow │ │ │ └── sparrow1.json │ │ ├── 1380 │ │ │ ├── 01-add.localhost.sh │ │ │ ├── 02-build-dv-structure.sh │ │ │ ├── add-ip-group.sh │ │ │ ├── add-user │ │ │ ├── data │ │ │ │ ├── 3-eg1.json │ │ │ │ ├── guest.json │ │ │ │ ├── locals.json │ │ │ │ ├── pete.json │ │ │ │ └── uma.json │ │ │ ├── db-list-dvs │ │ │ ├── delete-ip-group │ │ │ ├── dvs.gv │ │ │ ├── dvs.pdf │ │ │ ├── explicitGroup1.json │ │ │ ├── explicitGroup2.json │ │ │ ├── keys.txt │ │ │ ├── list-groups-for │ │ │ ├── list-ip-groups.sh │ │ │ ├── truth-table.numbers │ │ │ └── users.out │ │ ├── 2013 │ │ │ ├── download-zip.sh │ │ │ └── hit-homepage.sh │ │ ├── 2021 │ │ │ └── sort-files │ │ ├── 2036 │ │ │ ├── delete-ned-assignment │ │ │ └── grant-role-then-revoke │ │ ├── 2102 │ │ │ ├── dataset-metadata-next.json │ │ │ ├── dataset-metadata.json │ │ │ ├── ready-state.sql │ │ │ └── setup.sh │ │ ├── 2132 │ │ │ ├── find-multiple-drafts.sql │ │ │ └── one-draft-version-per-dataset-constraint.sql │ │ ├── 2438 │ │ │ └── download.R │ │ ├── 2454 │ │ │ ├── anAuthUser.json │ │ │ ├── anotherAuthUser.json │ │ │ ├── assignment.json │ │ │ ├── dataverse.json │ │ │ ├── group.json │ │ │ ├── rollback.sh │ │ │ └── run-test.sh │ │ ├── 2595 │ │ │ ├── monitor.py │ │ │ ├── numconnacquired.tsv │ │ │ └── plot.py │ │ ├── 2598 │ │ │ ├── detect-duplicate-dataverse-aliases.sql │ │ │ └── insert-duplicate-alias.sql │ │ ├── 2648 │ │ │ └── reproduce │ │ ├── 2649 │ │ │ └── reproduce │ │ ├── 2681 │ │ │ └── create-files │ │ ├── 3354 │ │ │ ├── createDatasetWithSha1Files.sh │ │ │ ├── datasetWithSha1Files.json │ │ │ └── mydata │ │ ├── 3543 │ │ │ ├── dv-peteDelete1.json │ │ │ ├── dv-peteDelete2.json │ │ │ ├── dv-peteDelete3.json │ │ │ ├── dv-peteDeleteTop.json │ │ │ ├── setup.sh │ │ │ └── test.sh │ │ ├── 3544 │ │ │ └── delete.sh │ │ └── guestbook │ │ │ └── insert-guestbook-responses.sh │ │ ├── migration │ │ ├── HarvardCustomFields.csv │ │ ├── HarvardPreMigrationDataScrub.sql │ │ ├── custom_field_map.sql │ │ ├── datafile_pub_date.sql │ │ ├── files_destination_step1_ │ │ ├── files_source_ │ │ ├── migrate_datasets.sql │ │ ├── migrate_dataverses.sql │ │ ├── migrate_links.sql │ │ ├── migrate_passwords.sql │ │ ├── migrate_permissions.sql │ │ ├── migrate_to_workflows.sql │ │ ├── migrate_users.sql │ │ ├── migration_instructions.txt │ │ ├── migration_presteps.txt │ │ ├── scrub_duplicate_emails.sql │ │ ├── scrub_email_usernames.sql │ │ ├── sequence_script.sql │ │ ├── versions_source_ │ │ └── versions_source_step2_ │ │ ├── rapache │ │ └── build.sh │ │ ├── search │ │ ├── .gitignore │ │ ├── add │ │ ├── assumptions │ │ ├── clear │ │ ├── compare │ │ ├── create │ │ ├── create-bird-dvs1 │ │ ├── create-psi-dvs │ │ ├── create-tree-dvs1 │ │ ├── create-users │ │ ├── data │ │ │ ├── binary │ │ │ │ ├── 1000files.zip │ │ │ │ ├── 100files.zip │ │ │ │ ├── 3files.zip │ │ │ │ ├── health.zip │ │ │ │ ├── trees.png │ │ │ │ └── trees.zip │ │ │ ├── dv-birds1.tsv │ │ │ ├── dv-psi.tsv │ │ │ ├── dv-trees1.tsv │ │ │ ├── group-explicit-trees.json │ │ │ ├── in │ │ │ │ ├── dataverses.birds │ │ │ │ │ ├── 4 │ │ │ │ │ ├── 5 │ │ │ │ │ └── 6 │ │ │ │ ├── dataverses.root │ │ │ │ │ ├── 2 │ │ │ │ │ └── 3 │ │ │ │ └── dataverses.trees │ │ │ │ │ ├── 7 │ │ │ │ │ └── 9 │ │ │ ├── mkpaths.xsl │ │ │ ├── nodes.xml │ │ │ ├── replace_test │ │ │ │ ├── 003.txt │ │ │ │ ├── 004.txt │ │ │ │ ├── 005.txt │ │ │ │ └── growing_file │ │ │ │ │ ├── 2016-01 │ │ │ │ │ └── data.tsv │ │ │ │ │ ├── 2016-02 │ │ │ │ │ └── data.tsv │ │ │ │ │ └── 2016-03 │ │ │ │ │ └── data.tsv │ │ │ ├── savedSearchAdvanced.json │ │ │ ├── savedSearchBasic.json │ │ │ ├── savedSearchInvalidJson.json │ │ │ ├── savedSearchInvalidJsonNoQuery.json │ │ │ ├── savedSearchMaliBasicHealth.json │ │ │ └── tabular │ │ │ │ ├── 120745.dta │ │ │ │ ├── 1char │ │ │ │ ├── 50by1000.dta │ │ │ │ └── 50by1000.dta.zip │ │ ├── dataset-add │ │ ├── dbbuiltin2shib │ │ ├── dbdatasetversion │ │ ├── dbdbobject │ │ ├── dblinks │ │ ├── dblinks-delete │ │ ├── dbperms │ │ ├── dbsavedsearch │ │ ├── dbsavedsearch-delete │ │ ├── dbshibgroups │ │ ├── dbusers │ │ ├── ds.tsv │ │ ├── dv.tsv │ │ ├── empty-entityid-check │ │ ├── export-keys │ │ ├── files │ │ ├── go │ │ ├── index │ │ ├── index-status │ │ ├── json2ids │ │ ├── populate │ │ ├── populate-bird-dvs1 │ │ ├── populate-psi-dvs │ │ ├── populate-tree-dvs1 │ │ ├── populate-users │ │ ├── query │ │ ├── saved-search │ │ ├── saved-search-setup │ │ ├── saved-search-test │ │ ├── search │ │ ├── solr-delete-id │ │ ├── spellcheck │ │ ├── tab2json │ │ ├── tab2json-dvs │ │ ├── tab2json-users │ │ ├── tests │ │ │ ├── add-members-to-trees-group │ │ │ ├── create-all-and-test │ │ │ ├── create-saved-search-and-test │ │ │ ├── data │ │ │ │ ├── dataset-finch1.json │ │ │ │ ├── dataset-finch2.json │ │ │ │ ├── dataset-mali1.json │ │ │ │ ├── dataset-mali2.json │ │ │ │ ├── dataset-trees1-edit-subject.xml │ │ │ │ ├── dataset-trees1-edit.xml │ │ │ │ ├── dataset-trees1.xml │ │ │ │ └── dv-dash.json │ │ │ ├── delete-all-and-test │ │ │ ├── destroy-dataset-finch1 │ │ │ ├── destroy-dataset-spruce1 │ │ │ ├── edit-dataset-finch1 │ │ │ ├── expected │ │ │ │ ├── anon │ │ │ │ ├── anon-empty │ │ │ │ ├── anon3 │ │ │ │ ├── anon3-full │ │ │ │ ├── anontest3 │ │ │ │ ├── finch1 │ │ │ │ ├── finch3 │ │ │ │ ├── nosuchuser │ │ │ │ ├── saved-search │ │ │ │ ├── saved-search-links │ │ │ │ ├── solr-down │ │ │ │ ├── spruce1 │ │ │ │ ├── spruce2 │ │ │ │ └── zero │ │ │ ├── explicit-group-add │ │ │ ├── files │ │ │ ├── grant-authusers-add-on-root │ │ │ ├── grant-finch-admin-on-spruce │ │ │ ├── grant-ipgroup3-add-on-root │ │ │ ├── grant-shibgroup1-add-on-root │ │ │ ├── grant-spruce-admin-on-birds │ │ │ ├── ipgroup-add │ │ │ ├── permissions1 │ │ │ ├── permissions2 │ │ │ ├── permissions3 │ │ │ ├── permissions3-full-anon │ │ │ ├── publish-dataset-spruce1 │ │ │ ├── publish-dataverse-birds │ │ │ ├── publish-dataverse-finches │ │ │ ├── publish-dataverse-root │ │ │ ├── publish-dataverse-spruce │ │ │ ├── publish-dataverse-trees │ │ │ ├── publish-spruce1-and-test │ │ │ ├── revoke-finch-admin-on-spruce │ │ │ ├── revoke-spruce-admin-on-birds │ │ │ ├── solr-down │ │ │ ├── special-characters │ │ │ └── upload-1000-files │ │ └── users.tsv │ │ ├── setup │ │ └── asadmin-setup.sh │ │ ├── trello │ │ └── trello │ │ └── vagrant │ │ ├── install-dataverse.sh │ │ ├── install-tworavens.sh │ │ ├── rpmbuild.sh │ │ ├── setup-solr.sh │ │ ├── setup.sh │ │ └── test.sh └── testscripts │ ├── db.sh │ ├── install │ └── post ├── secrets ├── admin │ └── password ├── api │ └── key ├── db │ └── password ├── db_asadmin ├── doi │ └── password └── doi_asadmin ├── solr ├── 4.6.0 │ ├── readme.me │ ├── schema.xml │ ├── schema.xml.4.6.0.dist │ ├── stopwords_en.txt │ └── stopwords_en.txt.dist ├── Dockerfile └── schema.xml ├── solr7 ├── 7.3.0 │ ├── readme.me │ ├── schema.xml │ └── solrconfig.xml ├── Dockerfile ├── backup_cron.sh ├── entrypoint.sh ├── solrconfig_master.xml └── solrconfig_slave.xml └── triggers ├── affiliations.sql ├── external-service.sql ├── external-services.py └── lang-properties-convert.py /.gitignore: -------------------------------------------------------------------------------- 1 | dataverse.war 2 | 3 | #Ignoring IDE files 4 | .idea 5 | .idea/* 6 | 7 | #Ignoring letsencrpt folders for SSL 8 | letsencrypt 9 | letsencrypt/* 10 | -------------------------------------------------------------------------------- /cvmanager/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:2.7 2 | MAINTAINER Vyacheslav Tykhonov 3 | COPY . / 4 | WORKDIR / 5 | RUN pip install -r requirements.txt 6 | ENTRYPOINT ["python"] 7 | CMD ["app.py"] 8 | -------------------------------------------------------------------------------- /cvmanager/README.md: -------------------------------------------------------------------------------- 1 | ## CVManager is API endpoint extension of Dataverse to support CESSDA controlled vocabularies 2 | This software is developed and supported by the [DANS](http://dans.knaw.nl) in the collaboration with [GESIS](https://www.gesis.org) 3 | -------------------------------------------------------------------------------- /cvmanager/requirements.txt: -------------------------------------------------------------------------------- 1 | flask 2 | simplejson 3 | requests 4 | httpretty 5 | -------------------------------------------------------------------------------- /demostart.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | docker network create traefik 3 | cp .env_sample .env 4 | docker-compose up -d 5 | -------------------------------------------------------------------------------- /distros/BeFAIR/configs/Readme.md: -------------------------------------------------------------------------------- 1 | # Config files for BeFAIR 2 | -------------------------------------------------------------------------------- /distros/BeFAIR/configuration/Readme.md: -------------------------------------------------------------------------------- 1 | # SSH keys for traefik and more 2 | -------------------------------------------------------------------------------- /distros/C19M/traefik.toml: -------------------------------------------------------------------------------- 1 | [global] 2 | sendAnonymousUsage = false 3 | 4 | [log] 5 | level = "DEBUG" 6 | 7 | [providers] 8 | [providers.docker] 9 | endpoint = "unix:///var/run/docker.sock" 10 | watch = true 11 | exposedByDefault = true 12 | swarmMode = false 13 | [providers.file] 14 | filename="/etc/traefik/conf.d/rules.toml" 15 | watch=true 16 | 17 | [api] 18 | dashboard = true 19 | debug = false 20 | insecure = true 21 | 22 | [entryPoints] 23 | [entryPoints.insecure] 24 | address = ":80" 25 | -------------------------------------------------------------------------------- /distros/CM2022/did/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | did: 4 | image: oydeu/oydid-base 5 | container_name: did_service 6 | networks: 7 | - traefik 8 | environment: 9 | - "DID_DB=external" 10 | ports: 11 | - "3000:3000" 12 | dbbox: 13 | networks: 14 | - traefik 15 | image: postgres:12.1 16 | container_name: db 17 | environment: 18 | POSTGRES_HOST_AUTH_METHOD: "trust" 19 | volumes: 20 | - ./did_data:/var/lib/postgresql/data 21 | ports: 22 | - "5435:5432" 23 | 24 | networks: 25 | traefik: 26 | external: true 27 | -------------------------------------------------------------------------------- /distros/CM2022/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | git clone https://github.com/apache/superset 4 | cp superset-docker-compose.yml ./superset/ 5 | cd superset 6 | docker-compose -f superset-docker-compose.yml up -d 7 | cd ../did 8 | docker-compose up -d 9 | cd .. 10 | git clone https://github.com/doccano/doccano 11 | cp doccano-docker-compose.yml ./doccano/docker/docker-compose.yml 12 | cd doccano 13 | docker/.env.example ./.env 14 | docker-compose -f docker/docker-compose.yml up -d 15 | cd .. 16 | git clone https://github.com/NatLibFi/Skosmos 17 | cp skosmos-docker-compose.yml ./Skosmos/dockerfiles/ 18 | cd Skosmos 19 | docker-compose -f dockerfiles/skosmos-docker-compose.yml up -d 20 | cd .. 21 | git clone https://github.com/WeblateOrg/docker-compose.git weblate-docker 22 | cp weblate-docker-compose.yml ./weblate-docker/ 23 | cd weblate-docker 24 | docker-compose -f weblate-docker-compose.yml up -d 25 | -------------------------------------------------------------------------------- /distros/CM2022/stop-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd superset 4 | docker-compose -f superset-docker-compose.yml down 5 | cd ../did 6 | docker-compose down 7 | cd ../doccano 8 | docker-compose -f docker/docker-compose.yml down 9 | cd ../Skosmos 10 | docker-compose -f dockerfiles/skosmos-docker-compose.yml down 11 | cd ../weblate-docker 12 | docker-compose -f weblate-docker-compose.yml down 13 | -------------------------------------------------------------------------------- /distros/CM2022/traefik.toml: -------------------------------------------------------------------------------- 1 | [global] 2 | sendAnonymousUsage = false 3 | 4 | [log] 5 | level = "DEBUG" 6 | 7 | [providers] 8 | [providers.docker] 9 | endpoint = "unix:///var/run/docker.sock" 10 | watch = true 11 | exposedByDefault = true 12 | swarmMode = false 13 | [providers.file] 14 | filename="/etc/traefik/conf.d/rules.toml" 15 | watch=true 16 | 17 | [api] 18 | dashboard = true 19 | debug = false 20 | insecure = true 21 | 22 | [entryPoints] 23 | [entryPoints.insecure] 24 | address = ":80" 25 | -------------------------------------------------------------------------------- /distros/CM2022/weblate-docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | weblate: 4 | image: weblate/weblate 5 | container_name: weblate 6 | ports: 7 | - 8096:8080 8 | tmpfs: 9 | - /app/cache 10 | volumes: 11 | - weblate-data:/app/data 12 | env_file: 13 | - ./environment 14 | restart: always 15 | depends_on: 16 | - database 17 | - cache 18 | networks: 19 | - traefik 20 | - default 21 | database: 22 | image: postgres:14-alpine 23 | env_file: 24 | - ./environment 25 | volumes: 26 | - postgres-data:/var/lib/postgresql/data 27 | restart: always 28 | networks: 29 | - traefik 30 | - default 31 | cache: 32 | image: redis:6-alpine 33 | restart: always 34 | command: [redis-server, --save, '60', '1'] 35 | volumes: 36 | - redis-data:/data 37 | networks: 38 | - traefik 39 | - default 40 | volumes: 41 | weblate-data: {} 42 | postgres-data: {} 43 | redis-data: {} 44 | networks: 45 | default: 46 | traefik: 47 | external: true 48 | -------------------------------------------------------------------------------- /distros/ODISSEI/Readme.md: -------------------------------------------------------------------------------- 1 | # The integration of ODISSEI services with Dataverse 2 | 3 | Clone source code in the distro folder and follow instructions to install it: 4 | ``` 5 | git clone https://github.com/apache/superset 6 | git clone https://github.com/apache/airflow 7 | git clone https://github.com/NatLibFi/Skosmos 8 | ``` 9 | -------------------------------------------------------------------------------- /distros/ODISSEI/traefik.toml: -------------------------------------------------------------------------------- 1 | [global] 2 | sendAnonymousUsage = false 3 | 4 | [log] 5 | level = "DEBUG" 6 | 7 | [providers] 8 | [providers.docker] 9 | endpoint = "unix:///var/run/docker.sock" 10 | watch = true 11 | exposedByDefault = true 12 | swarmMode = false 13 | [providers.file] 14 | filename="/etc/traefik/conf.d/rules.toml" 15 | watch=true 16 | 17 | [api] 18 | dashboard = true 19 | debug = false 20 | 21 | [entryPoints] 22 | [entryPoints.insecure] 23 | address = ":80" 24 | -------------------------------------------------------------------------------- /distros/Readme.md: -------------------------------------------------------------------------------- 1 | # Dataverse distributions 2 | You can use different Dataverse distributions depending from your use case. Dataverse distribution, or distro, is a kind of Open Science system which use Dataverse platform as a core and supporting user programs, libraries and other containers. Every user can create own distro where Dataverse core will be centrally maintained but other components will be customized. 3 | 4 | To switch to another distribution you should change the variable COMPOSE_FILE in your .env file to the yaml file below. 5 | For example, edit .env file and change this variable here: 6 | ``` 7 | COMPOSE_FILE=./docker-compose.yml 8 | ``` 9 | Apply the specification to run distribution with ssl support: 10 | ``` 11 | COMPOSE_FILE=./distros/docker-compose-ssl.yml 12 | ``` 13 | 14 | Available distributions (more is coming): 15 | * docker-compose.yml (standard http only) 16 | * BeFAIR (with FAIR assessment) 17 | * docker-compose-ssl.yml (standard with letsencrypt https) 18 | * docker-compose-shibboleth.yml (Shibboleth support included) 19 | -------------------------------------------------------------------------------- /distros/Superset/Readme.md: -------------------------------------------------------------------------------- 1 | # The integration of Apache Superset with Dataverse 2 | 3 | Clone Superset source code in the distro folder and follow instruction: 4 | ``` 5 | git clone https://github.com/apache/superset 6 | git clone https://github.com/NatLibFi/Skosmos 7 | ``` 8 | -------------------------------------------------------------------------------- /distros/odissei-init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #cp .env_sample .env 4 | curl -L -o stw.ttl.zip http://zbw.eu/stw/version/latest/download/stw.ttl.zip 5 | unzip -o stw.ttl.zip 6 | curl -I -X POST -H Content-Type:text/turtle -T stw.ttl -G http://0.0.0.0:3030/skosmos/data --data-urlencode graph=http://zbw.eu/stw/ 7 | curl -L -o unescothes.ttl http://skos.um.es/unescothes/unescothes.ttl 8 | curl -I -X POST -H Content-Type:text/turtle -T unescothes.ttl -G http://0.0.0.0:3030/skosmos/data --data-urlencode graph=http://skos.um.es/unescothes/ 9 | 10 | # wget from github and store CBS vocabulary in ./config/cbs-variables-thesaurus.ttl 11 | curl -I -X POST -H Content-Type:text/turtle -T ./config/cbs-variables-thesaurus.ttl -G http://0.0.0.0:3030/skosmos/data --data-urlencode graph=http://cbs.nl/variables/ 12 | 13 | echo "Checking search index..." 14 | curl "http://0.0.0.0:8000/rest/v1/search?vocab=stw&query=a*" 15 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/01-persistent-id.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Setting up the settings" >> /tmp/status.log 3 | echo "- Allow internal signup" >> /tmp/status.log 4 | SERVER=http://${DATAVERSE_URL}/api 5 | echo $SERVER 6 | curl -X PUT -d yes "$SERVER/admin/settings/:AllowSignUp" 7 | curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl" 8 | curl -X PUT -d CV "$SERVER/admin/settings/:CV" 9 | curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY 10 | curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy 11 | curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods 12 | curl -X PUT -d solr:8983 "$SERVER/admin/settings/:SolrHostColonPort" 13 | echo 14 | 15 | # Demo server with FAKE DOIs if doi_authority is empty 16 | if [ -z "${doi_authority}" ]; then 17 | curl -X PUT -d doi "$SERVER/admin/settings/:Protocol" 18 | curl -X PUT -d 10.5072 "$SERVER/admin/settings/:Authority" 19 | curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder" 20 | curl -X PUT -d FAKE "$SERVER/admin/settings/:DoiProvider" 21 | fi 22 | 23 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/010-mailrelay-set.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Setup mail relay 4 | # https://guides.dataverse.org/en/latest/developers/troubleshooting.html 5 | if [ "${system_email}" ]; then 6 | curl -X PUT -d ${system_email} http://localhost:8080/api/admin/settings/:SystemEmail 7 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} delete-javamail-resource mail/notifyMailSession 8 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-javamail-resource --mailhost ${mailhost} --mailuser ${mailuser} --fromaddress ${no_reply_email} --property mail.smtp.auth=false:mail.smtp.password=${smtp_password}:mail.smtp.port=${smtp_port}:mail.smtp.socketFactory.port=${socket_port}:mail.smtp.socketFactory.fallback=false mail/notifyMailSession 9 | fi 10 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/011-local-storage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Enable file folder in local storage 4 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.type\=file" 5 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.label\=file" 6 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.directory\=/data" 7 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/02-controlled-voc.sh: -------------------------------------------------------------------------------- 1 | if [ "${CVM_SERVER_NAME}" ]; then 2 | echo "Uploading ${CVM_SERVER_NAME} metadatablock" >> /tmp/status.log 3 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/cvmm.tsv -H "Content-type: text/tab-separated-values" 4 | 5 | #curl -H "Content-Type: application/json" -X PUT --data-binary @data/cvm-setting.json "$SERVER/admin/settings/:CVMConf" 6 | echo "Uploading cvm-setting.json" >> /tmp/status.log 7 | 8 | fi 9 | 10 | if [ "${CVM_CONFIG}" ]; then 11 | echo "Dowload keywords configuration file from ${CVM_TSV_SOURCE}" >> /tmp/status.log; 12 | wget -O ${HOME_DIR}/dvinstall/data/metadatablocks/keys_config.json ${CVM_CONFIG} 13 | wget -O ${HOME_DIR}/dvinstall/data/metadatablocks/cvm.sql ${CVM_SQL} 14 | curl -H "Content-Type: application/json" -X PUT --data-binary @${HOME_DIR}/dvinstall/data/metadatablocks/keys_config.json http://localhost:8080/api/admin/settings/:CVMConf 15 | psql -U dvnuser dvndb -h postgres -f ${HOME_DIR}/dvinstall/data/metadatablocks/cvm.sql 16 | fi 17 | 18 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/03-doi-set.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Setup DOI parameters 4 | # https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring 5 | if [ "${doi_authority}" ]; then 6 | curl -X PUT -d ${doi_authority} http://localhost:8080/api/admin/settings/:Authority 7 | curl -X PUT -d ${doi_provider} http://localhost:8080/api/admin/settings/:DoiProvider 8 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.username\=${doi_username}" 9 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.password\=${doi_password}" 10 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.dataciterestapiurlstring\=${dataciterestapiurlstring}" 11 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.baseurlstring\=${baseurlstring}" 12 | if [ "${doi_shoulder}" ]; then 13 | curl -X PUT -d "${doi_shoulder}/" "$SERVER/admin/settings/:Shoulder" 14 | fi 15 | fi 16 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/04-setdomain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Setup domain name 3 | hostname=${hostname} 4 | echo $hostname 5 | hostnamecmd=dataverse.fqdn=${hostname} 6 | echo $hostnamecmd 7 | siteURLcmd=dataverse.siteUrl='https\:\/\/'${hostname} 8 | echo $siteURLcmd 9 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $siteURLcmd 10 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $hostnamecmd 11 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/05-reindex.sh: -------------------------------------------------------------------------------- 1 | # Reindex all datasets 2 | curl http://localhost:8080/api/admin/index/clear 3 | curl http://localhost:8080/api/admin/index 4 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/08-federated-login.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Federated login activation 4 | # https://guides.dataverse.org/en/latest/installation/shibboleth.html 5 | if [ "${federated_json_file}" ]; then 6 | curl -X POST -H 'Content-type: application/json' --upload-file ${federated_json_file} http://localhost:8080/api/admin/authenticationProviders 7 | fi 8 | -------------------------------------------------------------------------------- /distros/test-docker.dataverse.no/init.d/1001-webhooks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Running python script to invoke webhooks 4 | if [ "${WEBHOOK}" ]; then 5 | PGPASSWORD=${POSTGRES_PASSWORD};export PGPASSWORD 6 | psql -U ${POSTGRES_USER} ${POSTGRES_DATABASE} -h ${POSTGRES_SERVER} -f ${HOME_DIR}/triggers/external-service.sql 7 | /usr/bin/python3.6 ${WEBHOOK} & 8 | echo 'Setting webhook on ' + ${WEBHOOK} >> /tmp/status.log 9 | fi 10 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/01-persistent-id.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Setting up the settings" >> /tmp/status.log 3 | echo "- Allow internal signup" >> /tmp/status.log 4 | SERVER=http://${DATAVERSE_URL}/api 5 | echo $SERVER 6 | curl -X PUT -d yes "$SERVER/admin/settings/:AllowSignUp" 7 | curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl" 8 | curl -X PUT -d CV "$SERVER/admin/settings/:CV" 9 | curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY 10 | curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy 11 | curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods 12 | curl -X PUT -d solr:8983 "$SERVER/admin/settings/:SolrHostColonPort" 13 | echo 14 | 15 | # Demo server with FAKE DOIs if doi_authority is empty 16 | if [ -z "${doi_authority}" ]; then 17 | curl -X PUT -d doi "$SERVER/admin/settings/:Protocol" 18 | curl -X PUT -d 10.5072 "$SERVER/admin/settings/:Authority" 19 | curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder" 20 | curl -X PUT -d FAKE "$SERVER/admin/settings/:DoiProvider" 21 | fi 22 | 23 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/010-mailrelay-set.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Setup mail relay 4 | # https://guides.dataverse.org/en/latest/developers/troubleshooting.html 5 | if [ "${system_email}" ]; then 6 | curl -X PUT -d ${system_email} http://localhost:8080/api/admin/settings/:SystemEmail 7 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} delete-javamail-resource mail/notifyMailSession 8 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-javamail-resource --mailhost ${mailhost} --mailuser ${mailuser} --fromaddress ${no_reply_email} --property mail.smtp.auth=false:mail.smtp.password=${smtp_password}:mail.smtp.port=${smtp_port}:mail.smtp.socketFactory.port=${socket_port}:mail.smtp.socketFactory.fallback=false mail/notifyMailSession 9 | fi 10 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/011-local-storage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Enable file folder in local storage 4 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.type\=file" 5 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.label\=file" 6 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.directory\=/data" 7 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/02-controlled-voc.sh: -------------------------------------------------------------------------------- 1 | if [ "${CVM_SERVER_NAME}" ]; then 2 | echo "Uploading ${CVM_SERVER_NAME} metadatablock" >> /tmp/status.log 3 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/cvmm.tsv -H "Content-type: text/tab-separated-values" 4 | 5 | #curl -H "Content-Type: application/json" -X PUT --data-binary @data/cvm-setting.json "$SERVER/admin/settings/:CVMConf" 6 | echo "Uploading cvm-setting.json" >> /tmp/status.log 7 | 8 | fi 9 | 10 | if [ "${CVM_CONFIG}" ]; then 11 | echo "Dowload keywords configuration file from ${CVM_TSV_SOURCE}" >> /tmp/status.log; 12 | wget -O ${HOME_DIR}/dvinstall/data/metadatablocks/keys_config.json ${CVM_CONFIG} 13 | wget -O ${HOME_DIR}/dvinstall/data/metadatablocks/cvm.sql ${CVM_SQL} 14 | curl -H "Content-Type: application/json" -X PUT --data-binary @${HOME_DIR}/dvinstall/data/metadatablocks/keys_config.json http://localhost:8080/api/admin/settings/:CVMConf 15 | psql -U dvnuser dvndb -h postgres -f ${HOME_DIR}/dvinstall/data/metadatablocks/cvm.sql 16 | fi 17 | 18 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/03-doi-set.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Setup DOI parameters 4 | # https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring 5 | if [ "${doi_authority}" ]; then 6 | curl -X PUT -d ${doi_authority} http://localhost:8080/api/admin/settings/:Authority 7 | curl -X PUT -d ${doi_provider} http://localhost:8080/api/admin/settings/:DoiProvider 8 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.username\=${doi_username}" 9 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.password\=${doi_password}" 10 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.dataciterestapiurlstring\=${dataciterestapiurlstring}" 11 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.baseurlstring\=${baseurlstring}" 12 | if [ "${doi_shoulder}" ]; then 13 | curl -X PUT -d "${doi_shoulder}/" "$SERVER/admin/settings/:Shoulder" 14 | fi 15 | fi 16 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/04-setdomain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Setup domain name 3 | hostname=${hostname} 4 | echo $hostname 5 | hostnamecmd=dataverse.fqdn=${hostname} 6 | echo $hostnamecmd 7 | siteURLcmd=dataverse.siteUrl='https\:\/\/'${hostname} 8 | echo $siteURLcmd 9 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $siteURLcmd 10 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $hostnamecmd 11 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/05-reindex.sh: -------------------------------------------------------------------------------- 1 | # Reindex all datasets 2 | curl http://localhost:8080/api/admin/index/clear 3 | curl http://localhost:8080/api/admin/index 4 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/08-federated-login.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Federated login activation 4 | # https://guides.dataverse.org/en/latest/installation/shibboleth.html 5 | if [ "${federated_json_file}" ]; then 6 | curl -X POST -H 'Content-type: application/json' --upload-file ${federated_json_file} http://localhost:8080/api/admin/authenticationProviders 7 | fi 8 | -------------------------------------------------------------------------------- /distros/vanilla/init.d/1001-webhooks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Running python script to invoke webhooks 4 | if [ "${WEBHOOK}" ]; then 5 | PGPASSWORD=${POSTGRES_PASSWORD};export PGPASSWORD 6 | psql -U ${POSTGRES_USER} ${POSTGRES_DATABASE} -h ${POSTGRES_SERVER} -f ${HOME_DIR}/triggers/external-service.sql 7 | /usr/bin/python3.6 ${WEBHOOK} & 8 | echo 'Setting webhook on ' + ${WEBHOOK} >> /tmp/status.log 9 | fi 10 | -------------------------------------------------------------------------------- /distros/vanilla/secrets/admin/password: -------------------------------------------------------------------------------- 1 | admin1 2 | -------------------------------------------------------------------------------- /distros/vanilla/secrets/api/key: -------------------------------------------------------------------------------- 1 | supersecret 2 | -------------------------------------------------------------------------------- /distros/vanilla/secrets/db/password: -------------------------------------------------------------------------------- 1 | dvnsecret 2 | -------------------------------------------------------------------------------- /distros/vanilla/secrets/db_asadmin: -------------------------------------------------------------------------------- 1 | AS_ADMIN_ALIASPASSWORD=dvnsecret 2 | -------------------------------------------------------------------------------- /distros/vanilla/secrets/doi/password: -------------------------------------------------------------------------------- 1 | changeme 2 | -------------------------------------------------------------------------------- /distros/vanilla/secrets/doi_asadmin: -------------------------------------------------------------------------------- 1 | AS_ADMIN_ALIASPASSWORD=changeme 2 | -------------------------------------------------------------------------------- /init.d/01-persistent-id.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Setting up the settings" >> /tmp/status.log 3 | echo "- Allow internal signup" >> /tmp/status.log 4 | SERVER=http://${DATAVERSE_URL}/api 5 | echo $SERVER 6 | curl -X PUT -d yes "$SERVER/admin/settings/:AllowSignUp" 7 | curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl" 8 | curl -X PUT -d CV "$SERVER/admin/settings/:CV" 9 | curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY 10 | curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy 11 | curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods 12 | curl -X PUT -d solr:8983 "$SERVER/admin/settings/:SolrHostColonPort" 13 | echo 14 | 15 | # Demo server with FAKE DOIs if doi_authority is empty 16 | if [ -z "${doi_authority}" ]; then 17 | curl -X PUT -d doi "$SERVER/admin/settings/:Protocol" 18 | curl -X PUT -d 10.5072 "$SERVER/admin/settings/:Authority" 19 | curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder" 20 | curl -X PUT -d FAKE "$SERVER/admin/settings/:DoiProvider" 21 | fi 22 | 23 | -------------------------------------------------------------------------------- /init.d/010-mailrelay-set.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Setup mail relay 4 | # https://guides.dataverse.org/en/latest/developers/troubleshooting.html 5 | if [ "${system_email}" ]; then 6 | curl -X PUT -d ${system_email} http://localhost:8080/api/admin/settings/:SystemEmail 7 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} delete-javamail-resource mail/notifyMailSession 8 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-javamail-resource --mailhost ${mailhost} --mailuser ${mailuser} --fromaddress ${no_reply_email} --property mail.smtp.auth=false:mail.smtp.password=${smtp_password}:mail.smtp.port=${smtp_port}:mail.smtp.socketFactory.port=${socket_port}:mail.smtp.socketFactory.fallback=false mail/notifyMailSession 9 | fi 10 | -------------------------------------------------------------------------------- /init.d/011-local-storage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Enable file folder in local storage 4 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.type\=file" 5 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.label\=file" 6 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.directory\=/data" 7 | -------------------------------------------------------------------------------- /init.d/02-controlled-voc.sh: -------------------------------------------------------------------------------- 1 | if [ "${CVM_SERVER_NAME}" ]; then 2 | echo "Uploading ${CVM_SERVER_NAME} metadatablock" >> /tmp/status.log 3 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/cvmm.tsv -H "Content-type: text/tab-separated-values" 4 | 5 | #curl -H "Content-Type: application/json" -X PUT --data-binary @data/cvm-setting.json "$SERVER/admin/settings/:CVMConf" 6 | echo "Uploading cvm-setting.json" >> /tmp/status.log 7 | 8 | fi 9 | 10 | if [ "${CVM_CONFIG}" ]; then 11 | echo "Dowload keywords configuration file from ${CVM_TSV_SOURCE}" >> /tmp/status.log; 12 | wget -O ${HOME_DIR}/dvinstall/data/metadatablocks/keys_config.json ${CVM_CONFIG} 13 | wget -O ${HOME_DIR}/dvinstall/data/metadatablocks/cvm.sql ${CVM_SQL} 14 | curl -H "Content-Type: application/json" -X PUT --data-binary @${HOME_DIR}/dvinstall/data/metadatablocks/keys_config.json http://localhost:8080/api/admin/settings/:CVMConf 15 | psql -U dvnuser dvndb -h postgres -f ${HOME_DIR}/dvinstall/data/metadatablocks/cvm.sql 16 | fi 17 | 18 | -------------------------------------------------------------------------------- /init.d/03-doi-set.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Setup DOI parameters 4 | # https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring 5 | if [ "${doi_authority}" ]; then 6 | curl -X PUT -d ${doi_authority} http://localhost:8080/api/admin/settings/:Authority 7 | curl -X PUT -d ${doi_provider} http://localhost:8080/api/admin/settings/:DoiProvider 8 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.username\=${doi_username}" 9 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.password\=${doi_password}" 10 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.dataciterestapiurlstring\=${dataciterestapiurlstring}" 11 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.baseurlstring\=${baseurlstring}" 12 | if [ "${doi_shoulder}" ]; then 13 | curl -X PUT -d "${doi_shoulder}/" "$SERVER/admin/settings/:Shoulder" 14 | fi 15 | fi 16 | -------------------------------------------------------------------------------- /init.d/04-setdomain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Setup domain name 3 | hostname=${hostname} 4 | echo $hostname 5 | hostnamecmd=dataverse.fqdn=${hostname} 6 | echo $hostnamecmd 7 | siteURLcmd=dataverse.siteUrl='https\:\/\/'${hostname} 8 | echo $siteURLcmd 9 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $siteURLcmd 10 | asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $hostnamecmd 11 | -------------------------------------------------------------------------------- /init.d/05-reindex.sh: -------------------------------------------------------------------------------- 1 | # Reindex all datasets 2 | curl http://localhost:8080/api/admin/index/clear 3 | curl http://localhost:8080/api/admin/index 4 | -------------------------------------------------------------------------------- /init.d/08-federated-login.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Federated login activation 4 | # https://guides.dataverse.org/en/latest/installation/shibboleth.html 5 | if [ "${federated_json_file}" ]; then 6 | curl -X POST -H 'Content-type: application/json' --upload-file ${federated_json_file} http://localhost:8080/api/admin/authenticationProviders 7 | fi 8 | -------------------------------------------------------------------------------- /init.d/1001-webhooks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Running python script to invoke webhooks 4 | if [ "${WEBHOOK}" ]; then 5 | PGPASSWORD=${POSTGRES_PASSWORD};export PGPASSWORD 6 | psql -U ${POSTGRES_USER} ${POSTGRES_DATABASE} -h ${POSTGRES_SERVER} -f ${HOME_DIR}/triggers/external-service.sql 7 | /usr/bin/python3.6 ${WEBHOOK} & 8 | echo 'Setting webhook on ' + ${WEBHOOK} >> /tmp/status.log 9 | fi 10 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/docker/letsencrypt/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM fedora:25 2 | 3 | RUN dnf install certbot -y && dnf install procps -y && dnf install curl -y && dnf install wget -y && dnf install which -y && dnf clean all 4 | 5 | RUN mkdir /etc/letsencrypt 6 | 7 | COPY google-cloud-sdk.repo /etc/yum.repos.d/google-cloud-sdk.repo 8 | RUN dnf install -y google-cloud-sdk 9 | 10 | COPY kubernetes.repo /etc/yum.repos.d/kubernetes.repo 11 | RUN dnf install -y kubectl 12 | 13 | 14 | COPY secret-patch-template.json / 15 | COPY entrypoint.sh / 16 | 17 | 18 | EXPOSE 80 443 19 | 20 | VOLUME ["/etc/letsencrypt"] 21 | 22 | CMD ["/entrypoint.sh"] -------------------------------------------------------------------------------- /kubernetes/infrastructure/docker/letsencrypt/google-cloud-sdk.repo: -------------------------------------------------------------------------------- 1 | [google-cloud-sdk] 2 | name=Google Cloud SDK 3 | baseurl=https://packages.cloud.google.com/yum/repos/cloud-sdk-el7-x86_64 4 | enabled=1 5 | gpgcheck=1 6 | repo_gpgcheck=1 7 | gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg 8 | https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg -------------------------------------------------------------------------------- /kubernetes/infrastructure/docker/letsencrypt/kubernetes.repo: -------------------------------------------------------------------------------- 1 | [kubernetes] 2 | name=Kubernetes 3 | baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64 4 | enabled=1 5 | gpgcheck=1 6 | repo_gpgcheck=1 7 | gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg -------------------------------------------------------------------------------- /kubernetes/infrastructure/docker/letsencrypt/secret-patch-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "kind": "Secret", 3 | "apiVersion": "v1", 4 | "metadata": { 5 | "name": "NAME", 6 | "namespace": "NAMESPACE" 7 | }, 8 | "data": { 9 | "tls.crt": "TLSCERT", 10 | "tls.key": "TLSKEY" 11 | }, 12 | "type": "Opaque" 13 | } 14 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/docker/mailrelay/Dockerfile: -------------------------------------------------------------------------------- 1 | # CentOS Base Image 2 | from debian:stretch 3 | 4 | # Package Update and install Postfix 5 | RUN apt-get update && apt-get upgrade -y && apt-get install postfix rsyslog mailutils -y && apt-get clean -y 6 | 7 | # Copy Postfix Configuration file and Entrypoint 8 | ## Postfix ## 9 | COPY config/postfix/main.cf /etc/postfix/main.cf 10 | COPY config/postfix/sasl_passwd /etc/postfix/sasl_passwd 11 | 12 | ## Rsyslog ## 13 | COPY config/rsyslog/rsyslog.conf /etc/rsyslog/rsyslog.conf 14 | COPY config/rsyslog/listen.conf /etc/rsyslog.d/listen.conf 15 | 16 | 17 | # Change Permissions 18 | RUN chmod 0400 /etc/postfix/sasl_passwd 19 | 20 | # Generate SASL DB 21 | RUN postmap /etc/postfix/sasl_passwd 22 | 23 | 24 | # Expose SMTP 25 | EXPOSE 25 26 | 27 | 28 | # Entrypoint 29 | CMD cp /etc/resolv.conf /var/spool/postfix/etc/resolv.conf && service rsyslog start && postfix start && tail -f /var/log/mail.log 30 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/docker/mailrelay/config/postfix/sasl_passwd: -------------------------------------------------------------------------------- 1 | # sasl_passwd 2 | # This file define user:pass to login into SMTP relay hosts 3 | # 4 | # SMTP_RELAY_HOST USERNAME:PASSWORD 5 | 6 | <> <> 7 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/docker/mailrelay/config/rsyslog/listen.conf: -------------------------------------------------------------------------------- 1 | $SystemLogSocketName /dev/log 2 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/docker/mailrelay/config/supervisor/supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | logfile = /var/log/supervisord.log 3 | logfuke_backups = 0 4 | 5 | [program:rsyslog] 6 | command=/usr/sbin/rsyslogd -n 7 | 8 | [program:postfix] 9 | process_name=postfix-master 10 | directory=/etc/postfix 11 | command=/usr/sbin/postfix -c /etc/postfix start 12 | startsecs=0 13 | autorestart=false 14 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/deployments/dataverse-dev-bridge.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | name: bridge 5 | namespace: dataverse-eu 6 | labels: 7 | component: bridge 8 | spec: 9 | replicas: 1 10 | strategy: 11 | type: Recreate 12 | template: 13 | metadata: 14 | labels: 15 | component: bridge 16 | spec: 17 | containers: 18 | - name: bridge 19 | image: vtycloud/dvnbridge 20 | ports: 21 | - containerPort: 8592 22 | protocol: TCP 23 | - containerPort: 9285 24 | protocol: TCP 25 | env: 26 | - name: doiProvider 27 | value : "DARA" 28 | - name: doiUsername 29 | value : "dveu" 30 | - name: doiPassword 31 | value : "test_dveu3" 32 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/deployments/dataverse-dev-postgres.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | name: postgres 5 | namespace: dataverse-eu 6 | labels: 7 | app: postgres 8 | spec: 9 | replicas: 1 10 | strategy: 11 | type: Recreate 12 | template: 13 | metadata: 14 | labels: 15 | app: postgres 16 | spec: 17 | containers: 18 | - name: postgres-db 19 | image: vtycloud/postgres:latest 20 | ports: 21 | - containerPort: 5432 22 | protocol: TCP 23 | name: postgres-port 24 | volumeMounts: 25 | - name: postgres-storage 26 | mountPath: "/var/lib/postgresql/data" 27 | subPath: "postgres_res" 28 | restartPolicy: Always 29 | volumes: 30 | - name: postgres-storage 31 | persistentVolumeClaim: 32 | claimName: dataverse-pvc 33 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/deployments/dataverse-dev-solr.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | name: solr 5 | namespace: dataverse-eu 6 | spec: 7 | replicas: 1 8 | strategy: 9 | type: Recreate 10 | template: 11 | metadata: 12 | labels: 13 | component: solr 14 | spec: 15 | containers: 16 | - name: solr 17 | image: vtycloud/dvndarasolr 18 | ports: 19 | - containerPort: 8983 20 | protocol: TCP 21 | env: 22 | - name: SOLR_HOST 23 | value : "solr" 24 | volumeMounts: 25 | - name: solr-storage 26 | mountPath: "/usr/local/solr-4.6.0/example/solr/collection1/data" 27 | subPath: "solr_res" 28 | volumes: 29 | - name: solr-storage 30 | persistentVolumeClaim: 31 | claimName: dataverse-pvc -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/ingress/dataverse-dev-ingress.yml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Ingress 3 | metadata: 4 | name: "dataverse-dev-ingress-service" 5 | namespace: dataverse-eu 6 | annotations: 7 | kubernetes.io/ingress.global-static-ip-name: "dataverse-eu-dev-ingress" 8 | kubernetes.io/ingress.allow-http: "false" 9 | spec: 10 | tls: 11 | - secretName: letsencrypt-certs 12 | hosts: 13 | - dataverse-dev.cessda.eu 14 | rules: 15 | - http: 16 | paths: 17 | - backend: 18 | serviceName: dataverse-en 19 | servicePort: 8080 20 | - path: /.well-known/acme-challenge/* 21 | backend: 22 | serviceName: letsencrypt 23 | servicePort: 80 24 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/letsencrypt/dataverse-dev-letsencrypt-secret.yml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: letsencrypt-certs 5 | namespace: dataverse-eu 6 | type: Opaque -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/mailrelay/dataverse-dev-mailrelay-deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | namespace: dataverse-eu 5 | labels: 6 | app: cessda-dataverse-eu-mailrelay-dev 7 | name: cessda-dataverse-eu-mailrelay-dev 8 | spec: 9 | replicas: 1 10 | template: 11 | metadata: 12 | labels: 13 | app: cessda-dataverse-eu-mailrelay-dev 14 | spec: 15 | containers: 16 | - name: cessda-dataverse-eu-mailrelay-dev 17 | image: <>/mail-relay:latest 18 | ports: 19 | - containerPort: 25 20 | name: "smtp" 21 | restartPolicy: Always 22 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/mailrelay/dataverse-dev-mailrelay-service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | namespace: dataverse-eu 5 | labels: 6 | app: cessda-dataverse-eu-mailrelay-dev 7 | name: cessda-dataverse-eu-mailrelay-dev 8 | spec: 9 | ports: 10 | - name: "smtp" 11 | port: 25 12 | targetPort: 25 13 | selector: 14 | app: cessda-dataverse-eu-mailrelay-dev 15 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/namespace/dataverse-dev-namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: dataverse-eu -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/persistentvolume/dataverse-dev-pv-pvc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolume 3 | metadata: 4 | name: dataverse-pv 5 | namespace: dataverse-eu 6 | labels: 7 | name: dataverse-pv 8 | spec: 9 | storageClassName: manual 10 | capacity: 11 | storage: 10Gi 12 | accessModes: 13 | - ReadWriteMany 14 | gcePersistentDisk: 15 | fsType: "ext4" 16 | pdName: "dataverse-disk" 17 | 18 | --- 19 | kind: PersistentVolumeClaim 20 | apiVersion: v1 21 | metadata: 22 | name: dataverse-pvc 23 | namespace: dataverse-eu 24 | spec: 25 | storageClassName: manual 26 | accessModes: 27 | - ReadWriteMany 28 | resources: 29 | requests: 30 | storage: 310Gi 31 | selector: 32 | matchLabels: 33 | name: dataverse-pv -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/services/dataverse-dev-bridge.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: bridge 5 | namespace: dataverse-eu 6 | labels: 7 | component: bridge 8 | spec: 9 | ports: 10 | - port: 8592 11 | targetPort: 8592 12 | protocol: TCP 13 | name: api-port 14 | - port: 9285 15 | targetPort: 9285 16 | protocol: TCP 17 | name: admin-port 18 | selector: 19 | component: bridge 20 | 21 | -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/services/dataverse-dev-dataverse-en.yaml: -------------------------------------------------------------------------------- 1 | kind: Service 2 | apiVersion: v1 3 | metadata: 4 | name: dataverse-en 5 | namespace: dataverse-eu 6 | labels: 7 | component: dataverse-en 8 | spec: 9 | type: NodePort 10 | selector: 11 | component: dataverse-en 12 | ports: 13 | - name: "http" 14 | port: 8080 15 | targetPort: 8080 16 | protocol: TCP -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/services/dataverse-dev-postgres.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: postgres 5 | namespace: dataverse-eu 6 | labels: 7 | component: postgres 8 | spec: 9 | ports: 10 | - port: 5432 11 | targetPort: 5432 12 | protocol: TCP 13 | selector: 14 | app: postgres -------------------------------------------------------------------------------- /kubernetes/infrastructure/k8s/services/dataverse-dev-solr.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: solr 5 | namespace: dataverse-eu 6 | labels: 7 | component: solr 8 | spec: 9 | selector: 10 | component: solr 11 | ports: 12 | - port: 8983 13 | protocol: TCP 14 | targetPort: 8983 15 | -------------------------------------------------------------------------------- /postgresql/init.sql: -------------------------------------------------------------------------------- 1 | -- CREATE ROLE postgres LOGIN; 2 | -------------------------------------------------------------------------------- /postgresql/install.sh: -------------------------------------------------------------------------------- 1 | cd /opt/testdata 2 | ./scripts/deploy/phoenix.dataverse.org/prep 3 | ./db.sh 4 | ./install # modified from phoenix 5 | -------------------------------------------------------------------------------- /postgresql/testdata/doc/sphinx-guides/source/_static/util/createsequence.sql: -------------------------------------------------------------------------------- 1 | -- A script for creating a numeric identifier sequence, and an external 2 | -- stored procedure, for accessing the sequence from inside the application, 3 | -- in a non-hacky, JPA way. 4 | 5 | -- NOTE: 6 | 7 | -- 1. The database user name "dvnapp" is hard-coded here - it may 8 | -- need to be changed to match your database user name; 9 | 10 | -- 2. In the code below, the sequence starts with 1, but it can be adjusted by 11 | -- changing the MINVALUE as needed. 12 | 13 | CREATE SEQUENCE datasetidentifier_seq 14 | INCREMENT 1 15 | MINVALUE 1 16 | MAXVALUE 9223372036854775807 17 | START 1 18 | CACHE 1; 19 | 20 | ALTER TABLE datasetidentifier_seq OWNER TO "dvnapp"; 21 | 22 | -- And now create a PostgreSQL FUNCTION, for JPA to 23 | -- access as a NamedStoredProcedure: 24 | 25 | CREATE OR REPLACE FUNCTION generateIdentifierAsSequentialNumber( 26 | OUT identifier int) 27 | RETURNS int AS 28 | $BODY$ 29 | BEGIN 30 | select nextval('datasetidentifier_seq') into identifier; 31 | END; 32 | $BODY$ 33 | LANGUAGE plpgsql; 34 | -------------------------------------------------------------------------------- /postgresql/testdata/doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql: -------------------------------------------------------------------------------- 1 | -- handle absence of CREATE OR REPLACE LANGUAGE for postgresql 8.4 or older 2 | -- courtesy of the postgres wiki: https://wiki.postgresql.org/wiki/CREATE_OR_REPLACE_LANGUAGE 3 | CREATE OR REPLACE FUNCTION make_plpgsql() 4 | RETURNS VOID 5 | LANGUAGE SQL 6 | AS $$ 7 | CREATE LANGUAGE plpgsql; 8 | $$; 9 | 10 | SELECT 11 | CASE 12 | WHEN EXISTS( 13 | SELECT 1 14 | FROM pg_catalog.pg_language 15 | WHERE lanname='plpgsql' 16 | ) 17 | THEN NULL 18 | ELSE make_plpgsql() END; 19 | 20 | DROP FUNCTION make_plpgsql(); 21 | 22 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/bin/list-dvs: -------------------------------------------------------------------------------- 1 | curl http://localhost:8080/api/dvs 2 | echo 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/create-dataset: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . scripts/search/export-keys 3 | SERVER=localhost:8181 4 | DATAVERSE_ALIAS=trees 5 | curl -s --insecure --data-binary "@doc/sphinx-guides/source/api/sword-atom-entry.xml" -H "Content-Type: application/atom+xml" -u $SPRUCEKEY: https://$SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ 6 | | xmllint -format - 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/create-dataset-805-rights-license: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | USERNAME=spruce 3 | PASSWORD=spruce 4 | SERVER=localhost:8181 5 | DATAVERSE_ALIAS=spruce 6 | curl -s --insecure --data-binary "@scripts/search/tests/data/dataset-trees1.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ 7 | | xmllint -format - 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/create-dataset-894-invisible-character: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | USERNAME=pete 3 | PASSWORD=pete 4 | DVN_SERVER=localhost:8181 5 | DATAVERSE_ALIAS=peteTop 6 | curl -s --insecure --data-binary "@scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ 7 | | xmllint -format - 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/create-dataset-899-expansion: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | USERNAME=pete 3 | PASSWORD=pete 4 | DVN_SERVER=localhost:8181 5 | DATAVERSE_ALIAS=peteTop 6 | curl -s --insecure --data-binary "@scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ 7 | | xmllint -format - 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/data/example.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/api/data-deposit/data/example.zip -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/dataset-field: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | if [ -z "$1" ]; then 3 | curl -s http://localhost:8080/api/datasetfield 4 | else 5 | curl -s http://localhost:8080/api/datasetfield/$1 6 | fi 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/delete-dataset: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | USERNAME=spruce 3 | PASSWORD=spruce 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` 7 | else 8 | GLOBAL_ID=$1 9 | #GLOBAL_ID=doi:10.5072/FK2/17 10 | fi 11 | #curl --insecure -X DELETE https://$DVN_SERVER/api/datasets/$DATABASE_ID?key=$USERNAME 12 | curl --insecure -i -X DELETE -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID 13 | #| xmllint -format - 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/delete-file: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | USERNAME=spruce 3 | PASSWORD=spruce 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | FILE_ID=`scripts/api/data-deposit/show-files | cut -d'/' -f1` 7 | #echo $FILE_ID 8 | #exit 9 | else 10 | FILE_ID=$1 11 | fi 12 | #curl --insecure -X DELETE https://$DVN_SERVER/api/datasets/$DATABASE_ID?key=$USERNAME 13 | curl --insecure -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/file/$FILE_ID 14 | #| xmllint -format - 15 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/edit-dataset-1430-edit-subject: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # not working right now: SWORD: "Replacing metadata for a dataset" broken, throws exception - https://github.com/IQSS/dataverse/issues/1554 3 | USERNAME=spruce 4 | PASSWORD=spruce 5 | SERVER=localhost:8181 6 | if [ -z "$1" ]; then 7 | GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` 8 | else 9 | GLOBAL_ID=$1 10 | #GLOBAL_ID=doi:10.5072/FK2/5555 11 | fi 12 | curl --insecure --upload-file "scripts/search/tests/data/dataset-trees1-edit.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ 13 | | xmllint -format - \ 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/edit-dataset-805-rights-license: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | USERNAME=spruce 3 | PASSWORD=spruce 4 | SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` 7 | else 8 | GLOBAL_ID=$1 9 | #GLOBAL_ID=doi:10.5072/FK2/5555 10 | fi 11 | curl --insecure --upload-file "scripts/search/tests/data/dataset-trees1-edit.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ 12 | | xmllint -format - \ 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/get: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | USERNAME=pete 3 | PASSWORD=pete 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | echo "Please provide a URL to GET" 7 | exit 1 8 | fi 9 | curl --insecure -s -u $USERNAME:$PASSWORD $1 | xmllint -format - 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/list-datasets: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | USERNAME=spruce 3 | PASSWORD=spruce 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | DATAVERSE_ALIAS=spruce 7 | #DATAVERSE_ALIAS=root 8 | else 9 | DATAVERSE_ALIAS=$1 10 | fi 11 | curl --insecure -s -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ 12 | | xmllint -format - 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/publish-dataset: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | USERNAME=pete 3 | PASSWORD=pete 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` 7 | else 8 | GLOBAL_ID=$1 9 | #GLOBAL_ID=doi:10.5072/FK2/5555 10 | fi 11 | # We cat /dev/null so that contentLength is zero. This makes headersOnly true:: https://github.com/swordapp/JavaServer2.0/blob/sword2-server-1.0/src/main/java/org/swordapp/server/ContainerAPI.java#L338 12 | # 'to tell curl to read the format from stdin you write "@-"' -- http://curl.haxx.se/docs/manpage.html 13 | cat /dev/null | curl -s --insecure -X POST -H "In-Progress: false" --data-binary @- https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ 14 | | xmllint --format - 15 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/publish-dataverse: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | USERNAME=pete 3 | PASSWORD=pete 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | echo "Please supply a dataverse alias" 7 | exit 1 8 | else 9 | DATAVERSE_ALIAS=$1 10 | #DATAVERSE_ALIAS=peteTop 11 | fi 12 | cat /dev/null | curl -s --insecure -X POST -H "In-Progress: false" --data-binary @- https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/dataverse/$DATAVERSE_ALIAS \ 13 | | xmllint --format - 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/replace-dataset-metadata: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | USERNAME=pete 3 | PASSWORD=pete 4 | SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` 7 | else 8 | GLOBAL_ID=$1 9 | #GLOBAL_ID=doi:10.5072/FK2/5555 10 | fi 11 | curl --insecure --upload-file "scripts/search/tests/data/dataset-versioning03-setup.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ 12 | | xmllint -format - \ 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/service-document: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . scripts/search/export-keys 3 | if [ -z "$1" ]; then 4 | HOSTNAME=localhost:8181 5 | else 6 | HOSTNAME=$1 7 | fi 8 | URL=https://$HOSTNAME/dvn/api/data-deposit/v1/swordv2/service-document 9 | echo Retrieving service document from $URL >&2 10 | OUTPUT=`curl -s --insecure -u $ADMINKEY: $URL` 11 | echo $OUTPUT 12 | echo $OUTPUT | xmllint -format - 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/show-atom-entry: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | USERNAME=pete 3 | PASSWORD=pete 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` 7 | else 8 | GLOBAL_ID=$1 9 | #GLOBAL_ID=doi:10.5072/FK2/5555 10 | fi 11 | curl --insecure -s -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ 12 | | xmllint -format - 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/show-files: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | #scripts/api/data-deposit/show-statement | xpath "//entry/content/@*[name()='type' or name()='src']" 3 | scripts/api/data-deposit/show-statement | xpath '//entry/id/text()' | cut -d'/' -f11,12,13 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/show-statement: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | USERNAME=spruce 3 | PASSWORD=spruce 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` 7 | else 8 | GLOBAL_ID=$1 9 | #GLOBAL_ID=doi:10.5072/FK2/5555 10 | fi 11 | curl --insecure -s https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/statement/study/$GLOBAL_ID \ 12 | | xmllint -format - \ 13 | #| xpath '//entry/title' 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/unsupported-download-files: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | USERNAME=pete 3 | PASSWORD=pete 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` 7 | else 8 | GLOBAL_ID=$1 9 | #GLOBAL_ID=doi:10.5072/FK2/5555 10 | fi 11 | curl -s --insecure https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/$GLOBAL_ID \ 12 | | xmllint -format - 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data-deposit/upload-file: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | USERNAME=spruce 3 | PASSWORD=spruce 4 | DVN_SERVER=localhost:8181 5 | if [ -z "$1" ]; then 6 | EDIT_MEDIA_URL=`scripts/api/data-deposit/list-datasets | xpath 'string(//link/@href)' 2>/dev/null` 7 | else 8 | EDIT_MEDIA_URL=$1 9 | fi 10 | curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H "Content-Disposition: filename=trees.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" -u $USERNAME:$PASSWORD $EDIT_MEDIA_URL \ 11 | | xmllint -format - 12 | #curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H "Content-Disposition: filename=trees.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/doi:10.5072/FK2/19 \ 13 | #| xmllint -format - 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/authentication-providers/base-oauth.json: -------------------------------------------------------------------------------- 1 | { 2 | "id":"base-oauth", 3 | "factoryAlias":"oauth2", 4 | "title":"sample base definition file for oauth2 providers.", 5 | "subtitle":"A base file, though - do not run this.", 6 | "factoryData":"type:idOfOAuthService | name1: value1|name2: value2 value2.1 value 2.1.1 | name: value42", 7 | "enabled":true 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/authentication-providers/builtin.json: -------------------------------------------------------------------------------- 1 | { 2 | "id":"builtin", 3 | "factoryAlias":"BuiltinAuthenticationProvider", 4 | "title":"Dataverse Local", 5 | "subtitle":"Datavers' Internal Authentication provider", 6 | "factoryData":"", 7 | "enabled":true 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/authentication-providers/echo-dignified.json: -------------------------------------------------------------------------------- 1 | { 2 | "id":"echo-dignified", 3 | "factoryAlias":"Echo", 4 | "title":"Dignified Echo provider", 5 | "subtitle":"Approves everyone, based on their credentials, and adds some flair", 6 | "factoryData":"Sir,Esq.", 7 | "enabled":true 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/authentication-providers/orcid-sandbox.json: -------------------------------------------------------------------------------- 1 | { 2 | "id":"orcid-v2-sandbox", 3 | "factoryAlias":"oauth2", 4 | "title":"ORCID Sandbox", 5 | "subtitle":"ORCiD - sandbox (v2)", 6 | "factoryData":"type: orcid | userEndpoint: https://api.sandbox.orcid.org/v2.0/{ORCID}/person | clientId: APP-HIV99BRM37FSWPH6 | clientSecret: ee844b70-f223-4f15-9b6f-4991bf8ed7f0", 7 | "enabled":true 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dataset-bad-missingInitialVersion.json: -------------------------------------------------------------------------------- 1 | { 2 | "authority": "anAuthority", 3 | "identifier": "dataset-one", 4 | "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", 5 | "protocol": "chadham-house-rule" 6 | } -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-pete-sub-normal.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "peteSubNormal", 3 | "name": "Pete's public place", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": false, 6 | "description": "Where Pete stores normal data", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "pete@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Law"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-pete-sub-restricted.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "peteSubRestricted", 3 | "name": "Pete's restricted data", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": false, 6 | "description": "Where Pete stores restricted data, to be shared in moderation", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "pete@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Chemistry"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-pete-sub-secret.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "peteSubSecret", 3 | "name": "Pete's secrets", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": true, 6 | "description": "Where Pete stores secret data", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "pete@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Astronomy and Astrophysics"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-pete-top.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "peteTop", 3 | "name": "Top dataverse of Pete", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": false, 6 | "description": "Pete's top level dataverse", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "pete@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Arts and Humanities"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-root.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "root", 3 | "name": "Root", 4 | "permissionRoot": false, 5 | "facetRoot": true, 6 | "description": "The root dataverse.", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "root@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["ALL"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-uma-deletable.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "umaDeletable", 3 | "name": "Uma's deletable", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": true, 6 | "description": "Forgettable, deletable, temporary.", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "Uma@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Business and Management"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-uma-sub1.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "umaSub1", 3 | "name": "Uma's first", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": false, 6 | "description": "Some data of Uma", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "Uma@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Medicine, Health & Life Sciences"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-uma-sub2.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "umaSub2", 3 | "name": "Uma's restricted", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": true, 6 | "description": "Pete can't get here", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "Uma@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Engineering"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/dv-uma-top.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "umaTop", 3 | "name": "Top dataverse of Uma", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": false, 6 | "description": "Uma's top level dataverse", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "Uma@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Mathematical Sciences"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/explicit-group-2nd.json: -------------------------------------------------------------------------------- 1 | { 2 | "description":"The second explicit group", 3 | "displayName":"Explicit Group number two", 4 | "aliasInOwner":"EG:II" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/explicit-group-first-edit.json: -------------------------------------------------------------------------------- 1 | { 2 | "description":"This is the description field", 3 | "displayName":"Explicit Group number one (edited)", 4 | "aliasInOwner":"EG-1" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/explicit-group-first.json: -------------------------------------------------------------------------------- 1 | { 2 | "description":"This is the description field", 3 | "displayName":"Explicit Group number one", 4 | "aliasInOwner":"EG-1" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroup-all-ipv4.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"all-ipv4", 3 | "name":"IP group to match all IPv4 addresses", 4 | "ranges" : [["0.0.0.0", "255.255.255.255"]] 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroup-all.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"ipGroup3", 3 | "name":"IP group to match all IPv4 and IPv6 addresses", 4 | "ranges" : [["0.0.0.0", "255.255.255.255"], 5 | ["::", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"]] 6 | } 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroup-localhost.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"localhost", 3 | "name":"Localhost connections", 4 | "addresses": [ "::1", "127.0.0.1" ] 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroup-single-IPv4.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"singleIPv4", 3 | "name":"Single IPv4", 4 | "addresses" : ["128.0.0.7"] 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroup-single-IPv6.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"singleIPv6", 3 | "name":"Single IPv6", 4 | "addresses" : ["aa:bb:cc:dd:ee:ff::1"] 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroup1.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"ipGroup1", 3 | "name":"The first IP Group", 4 | "ranges" : [["60.0.0.0", "60.0.0.255"], 5 | ["128.0.0.0", "129.0.255.255"], 6 | ["ff:abcd:eff::ffff", "ff:abcd:eff::0"]] 7 | } -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroup2.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"ipGroup2", 3 | "name":"The second IP Group", 4 | "ranges" : [["207.0.0.0", "207.0.0.255"], 5 | ["128.0.0.0", "129.0.255.255"], 6 | ["dd:2:2:2:2:2:2:2","dd:a:a:a:a:a:a:a"] 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroupDuplicate-v1.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"ipGroup-dup", 3 | "name":"IP Group with duplicate files (1)", 4 | "description":"This is the FIRST version of the group", 5 | "ranges" : [["60.0.0.0", "60.0.0.255"], 6 | ["60::1", "60::ffff"]] 7 | } 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/ipGroupDuplicate-v2.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"ipGroup-dup", 3 | "name":"IP Group with duplicate files-v2", 4 | "description":"This is the second version of the group", 5 | "ranges" : [["70.0.0.0", "70.0.0.255"], 6 | ["70::1", "70::ffff"]] 7 | } 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-admin.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"admin", 3 | "name":"Admin", 4 | "description":"A person who has all permissions for dataverses, datasets, and files.", 5 | "permissions":[ 6 | "ALL" 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-assign-eg1-curator.json: -------------------------------------------------------------------------------- 1 | { 2 | "assignee": "&explicit/1-EG-1", 3 | "role": "curator" 4 | } 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-assign-localhost-curator.json: -------------------------------------------------------------------------------- 1 | { 2 | "assignee": "&ip/localhost", 3 | "role": "curator" 4 | } 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-assign.json: -------------------------------------------------------------------------------- 1 | { 2 | "assignee": "@gabbi", 3 | "role": "curator" 4 | } 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-assignee-list.json: -------------------------------------------------------------------------------- 1 | ["@admin",":guest","&ip/ipGroup1",":authenticated-users"] 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-contrib.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "contrib", 3 | "name": "Dataverse Contributor", 4 | "description": "Someone that can add data to a dataverse, but not remove it.", 5 | "permissions": [ 6 | "Access", 7 | "AccessRestrictedMetadata", 8 | "UndoableEdit", 9 | "EditMetadata" 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-creator.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "creator", 3 | "name": "Creator", 4 | "description": "Allows creation of DataSet/Verse", 5 | "permissions": [ 6 | "CreateDataverse", 7 | "CreateDataset" 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-curator.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"curator", 3 | "name":"Curator", 4 | "description":"For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets.", 5 | "permissions":[ 6 | "ViewUnpublishedDataset", 7 | "EditDataset", 8 | "DownloadFile", 9 | "DeleteDatasetDraft", 10 | "PublishDataset", 11 | "ManageDatasetPermissions", 12 | "AddDataverse", 13 | "AddDataset", 14 | "ViewUnpublishedDataverse" 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-dsContributor.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "dsContributor", 3 | "name": "Dataset Creator", 4 | "description": "A person who can add datasets within a dataverse.", 5 | "permissions": [ 6 | "AddDataset" 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-dvContributor.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "dvContributor", 3 | "name": "Dataverse Creator", 4 | "description": "A person who can add subdataverses within a dataverse.", 5 | "permissions": [ 6 | "AddDataverse" 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-editor.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"editor", 3 | "name":"Contributor", 4 | "description":"For datasets, a person who can edit License + Terms, and then submit them for review.", 5 | "permissions":[ 6 | "ViewUnpublishedDataset", 7 | "EditDataset", 8 | "DownloadFile", 9 | "DeleteDatasetDraft" 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-filedownloader.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"fileDownloader", 3 | "name":"File Downloader", 4 | "description":"A person who can download a published file.", 5 | "permissions":[ 6 | "DownloadFile" 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-fullContributor.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "fullContributor", 3 | "name": "Dataverse + Dataset Creator", 4 | "description": "A person who can add subdataverses and datasets within a dataverse.", 5 | "permissions": [ 6 | "AddDataverse", 7 | "AddDataset" 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-guest.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "guest-role", 3 | "name": "What guests can do", 4 | "description": "Guests can browse", 5 | "permissions": [ 6 | "Discover" 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-manager.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"manager", 3 | "name":"Curator", 4 | "description":"For datasets, a person who can add a dataset, edit License + Terms, and submit datasets for review.", 5 | "permissions":[ 6 | "ViewUnpublishedDataset", 7 | "EditDataset", 8 | "DownloadFile", 9 | "DeleteDatasetDraft" 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/role-member.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"member", 3 | "name":"Member", 4 | "description":"A person who can view both unpublished dataverses and datasets.", 5 | "permissions":[ 6 | "ViewUnpublishedDataset", 7 | "ViewUnpublishedDataverse", 8 | "DownloadFile" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/shibGroupHarvard.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "All Harvard PIN/Shibboleth Users", 3 | "attribute": "Shib-Identity-Provider", 4 | "pattern": "https://fed.huit.harvard.edu/idp/shibboleth" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/shibGroupMit.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "All MIT Shibboleth Users", 3 | "attribute": "Shib-Identity-Provider", 4 | "pattern": "urn:mace:incommon:mit.edu" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/shibGroupTestShib.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "All testshib.org Shibboleth Users", 3 | "attribute": "Shib-Identity-Provider", 4 | "pattern": "https://idp.testshib.org/idp/shibboleth" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/user-admin.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName":"Dataverse", 3 | "lastName":"Admin", 4 | "userName":"dataverseAdmin", 5 | "affiliation":"Dataverse.org", 6 | "position":"Admin", 7 | "email":"dataverse@mailinator.com" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/userCathy.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName":"Cathy", 3 | "lastName":"Collaborator", 4 | "userName":"cathy", 5 | "affiliation":"mid", 6 | "position":"Data Scientist", 7 | "email":"cathy@malinator.com", 8 | "phone":"(888) 888-8888" 9 | } 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/userGabbi.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName":"Gabbi", 3 | "lastName":"Guest", 4 | "userName":"gabbi", 5 | "affiliation":"low", 6 | "position":"A Guest", 7 | "email":"gabbi@malinator.com", 8 | "phone":"(888) 888-8888" 9 | } -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/userNick.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName":"Nick", 3 | "lastName":"NSA", 4 | "userName":"nick", 5 | "affiliation":"gov", 6 | "position":"Signals Intelligence", 7 | "email":"nick@malinator.com", 8 | "phone":"(888) 888-8888" 9 | } 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/userPete.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName":"Pete", 3 | "lastName":"Privileged", 4 | "userName":"pete", 5 | "affiliation":"Top", 6 | "position":"The Boss", 7 | "email":"pete@malinator.com", 8 | "phone":"(888) 888-8888" 9 | } -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/userUma.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName":"Uma", 3 | "lastName":"Underprivileged", 4 | "userName":"uma", 5 | "affiliation":"mid", 6 | "position":"The Intern", 7 | "email":"Uma@malinator.com", 8 | "phone":"(888) 888-8888" 9 | } -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/workflows/internal-no-pause-workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Internal steps only, no with pause", 3 | "steps": [ 4 | { 5 | "provider":":internal", 6 | "stepType":"log", 7 | "parameters": { 8 | "step":1, 9 | "stepName":"first step" 10 | } 11 | }, 12 | { 13 | "provider":":internal", 14 | "stepType":"log", 15 | "parameters": { 16 | "number":42, 17 | "anotherMessage": "This is the last step before releasing." 18 | } 19 | } 20 | ] 21 | } 22 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/data/workflows/internal-pause-workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Internal steps with pause", 3 | "steps": [ 4 | { 5 | "provider":":internal", 6 | "stepType":"log", 7 | "parameters": { 8 | "step":1, 9 | "stepName":"first step" 10 | } 11 | }, 12 | { 13 | "provider":":internal", 14 | "stepType":"pause", 15 | "parameters": { 16 | "paramName":"parameter value with a longer name." 17 | } 18 | }, 19 | { 20 | "provider":":internal", 21 | "stepType":"log", 22 | "parameters": { 23 | "number":42, 24 | "anotherMessage": "This is the last step before releasing." 25 | } 26 | } 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/download/.gitignore: -------------------------------------------------------------------------------- 1 | files.tsv 2 | files 3 | downloaded-files 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/download/dbquery: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | if [ -z "$1" ]; then 3 | echo "No start date in YYYY-MM-DD format provided." 4 | exit 1 5 | else 6 | if [ -z "$2" ]; then 7 | echo "No end date in YYYY-MM-DD format provided." 8 | exit 1 9 | fi 10 | START_DATE=$1 11 | END_DATE=$2 12 | fi 13 | psql -h $DB_SERVER -U $DB_USER -p $DB_PORT $DB_NAME -F $'\t' --no-align --pset footer -c "select dvobject.id, dvobject.createdate, dvobject.owner_id, datafile.* from dvobject, datafile where dvobject.id=datafile.id and dvobject.dtype='DataFile' and dvobject.createdate>to_date('$START_DATE','YYYY-MM-DD') and dvobject.createdate files.tsv 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/download/download: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | DOWNLOAD_DIR=downloaded-files 3 | rm -rf $DOWNLOAD_DIR 4 | mkdir -p $DOWNLOAD_DIR && \ 5 | cat files | while read i; do 6 | echo "Downloding file id $i..." 7 | cd $DOWNLOAD_DIR && mkdir $i && cd $i && \ 8 | curl -s -k -O -J https://$DATAVERSE_SERVER/api/access/datafile/$i?key=$API_TOKEN && \ 9 | cd ../.. 10 | done 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/download/tsv2files: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | cut -f1 files.tsv | tail -n +2 > files 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/py_api_wrapper/msg_util.py: -------------------------------------------------------------------------------- 1 | import sys 2 | def msg(s): print s 3 | def dashes(char='-'): msg(40*char) 4 | def msgt(s): dashes(); msg(s); dashes() 5 | def msgx(s): dashes('\/'); msg(s); dashes('\/'); sys.exit(0) 6 | 7 | """ 8 | 9 | curl -H "Content-type:application/json" -X POST -d user_params.json "http://dvn-build.hmdc.harvard.edu/api/builtin-users?password=linus" 10 | """ 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/setup-datasetfields.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl http://localhost:8080/api/admin/datasetfield/loadNAControlledVocabularyValue 3 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values" 4 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values" 5 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values" 6 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values" 7 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values" 8 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values" 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/setup-identity-providers.sh: -------------------------------------------------------------------------------- 1 | SERVER=http://localhost:8080/api 2 | 3 | # Setup the authentication providers 4 | echo "Setting up internal user provider" 5 | curl -H "Content-type:application/json" -d @data/authentication-providers/builtin.json http://localhost:8080/api/admin/authenticationProviders/ 6 | 7 | #echo "Setting up Echo providers" 8 | #curl -H "Content-type:application/json" -d @data/authentication-providers/echo.json http://localhost:8080/api/admin/authenticationProviders/ 9 | #curl -H "Content-type:application/json" -d @data/authentication-providers/echo-dignified.json http://localhost:8080/api/admin/authenticationProviders/ 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/setup-optional-publish-terms.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | SERVER=http://localhost:8080/api 5 | 6 | echo "- Enabling Publish Popup Custom Text" 7 | curl -s -X PUT -d true "$SERVER/admin/settings/:DatasetPublishPopupCustomTextOnAllVersions" 8 | curl -X PUT -d "By default datasets are published with the CC0-“Public Domain Dedication” waiver. Learn more about the CC0 waiver here.

To publish with custom Terms of Use, click the Cancel button and go to the Terms tab for this dataset." $SERVER/admin/settings/:DatasetPublishPopupCustomText -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/setup-optional.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | command -v jq >/dev/null 2>&1 || { echo >&2 '`jq` ("sed for JSON") is required, but not installed. Download the binary for your platform from http://stedolan.github.io/jq/ and make sure it is in your $PATH (/usr/bin/jq is fine) and executable with `sudo chmod +x /usr/bin/jq`. On Mac, you can install it with `brew install jq` if you use homebrew: http://brew.sh . Aborting.'; exit 1; } 3 | 4 | # OPTIONAL USERS AND DATAVERSES 5 | TMP=setup.temp 6 | ./setup-users.sh | tee $TMP 7 | 8 | PETE=$(cat $TMP | grep :result: | grep Pete | cut -d: -f4) 9 | UMA=$(cat $TMP | grep :result: | grep Uma | cut -d: -f4) 10 | 11 | ./setup-dvs.sh $PETE $UMA 12 | 13 | rm $TMP 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/api/update-datasetfields.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values" 3 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values" 4 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values" 5 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values" 6 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values" 7 | curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values" -------------------------------------------------------------------------------- /postgresql/testdata/scripts/backup/run_backup/backup.py: -------------------------------------------------------------------------------- 1 | import io 2 | import re 3 | #import backup_swift #TODO 4 | from backup_ssh import (backup_file_ssh) 5 | from config import (ConfigSectionMap) 6 | 7 | def backup_file (file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size): 8 | storage_type = ConfigSectionMap("Backup")['storagetype'] 9 | 10 | if storage_type == 'swift': 11 | #backup_file_swift(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) 12 | raise NotImplementedError('no backup_swift yet') 13 | elif storage_type == 'ssh': 14 | backup_file_ssh(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) 15 | else: 16 | raise ValueError("only ssh/sftp and swift are supported as backup storage media") 17 | 18 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/backup/run_backup/backup_swift.py: -------------------------------------------------------------------------------- 1 | import io 2 | import re 3 | import swiftclient 4 | from config import (ConfigSectionMap) 5 | 6 | def backup_file_swift (file_input, dataset_authority, dataset_identifier, storage_identifier): 7 | auth_url = ConfigSectionMap("Backup")['swiftauthurl'] 8 | auth_version = ConfigSectionMap("Backup")['swiftauthversion'] 9 | user = ConfigSectionMap("Backup")['swiftuser'] 10 | tenant = ConfigSectionMap("Backup")['swifttenant'] 11 | key = ConfigSectionMap("Backup")['swiftkey'] 12 | 13 | conn = swiftclient.Connection( 14 | authurl=auth_url, 15 | user=user, 16 | key=key, 17 | tenant_name=tenant, 18 | auth_version=auth_version 19 | ) 20 | 21 | container_name = dataset_authority + ":" + dataset_identifier 22 | conn.put(container_name) 23 | 24 | conn.put_object(container_name, storage_identifier, file_input) 25 | 26 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/backup/run_backup/backupdb.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE datafilestatus ( 2 | id integer NOT NULL, 3 | datasetidentifier character varying(255), 4 | storageidentifier character varying(255), 5 | status character varying(255), 6 | createdate timestamp without time zone, 7 | lastbackuptime timestamp without time zone, 8 | lastbackupmethod character varying(16) 9 | ); 10 | 11 | ALTER TABLE datafilestatus OWNER TO dvnapp; 12 | 13 | CREATE SEQUENCE datafilestatus_id_seq 14 | START WITH 1 15 | INCREMENT BY 1 16 | NO MINVALUE 17 | NO MAXVALUE 18 | CACHE 1; 19 | 20 | 21 | ALTER TABLE datafilestatus_id_seq OWNER TO dvnapp; 22 | 23 | ALTER SEQUENCE datafilestatus_id_seq OWNED BY datafilestatus.id; 24 | 25 | ALTER TABLE ONLY datafilestatus 26 | ADD CONSTRAINT datafilestatus_pkey PRIMARY KEY (id); 27 | 28 | ALTER TABLE ONLY datafilestatus ALTER COLUMN id SET DEFAULT nextval('datafilestatus_id_seq'::regclass); 29 | 30 | ALTER TABLE ONLY datafilestatus 31 | ADD CONSTRAINT datafilestatus_storageidentifier_key UNIQUE (storageidentifier); -------------------------------------------------------------------------------- /postgresql/testdata/scripts/backup/run_backup/config.py: -------------------------------------------------------------------------------- 1 | import ConfigParser 2 | import sys 3 | Config = ConfigParser.ConfigParser() 4 | Config.read("config.ini") 5 | 6 | def ConfigSectionMap(section): 7 | dict1 = {} 8 | options = Config.options(section) 9 | for option in options: 10 | try: 11 | dict1[option] = Config.get(section, option) 12 | if dict1[option] == -1: 13 | sys.stderr.write("skip: %s\n" % option) 14 | except: 15 | print("exception on %s!" % option) 16 | dict1[option] = None 17 | return dict1 18 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/backup/run_backup/email_notification.py: -------------------------------------------------------------------------------- 1 | from config import (ConfigSectionMap) 2 | from subprocess import Popen, PIPE, STDOUT 3 | from time import (time) 4 | from datetime import (datetime) 5 | 6 | def send_notification(text): 7 | try: 8 | notification_address = ConfigSectionMap("Notifications")['email'] 9 | except: 10 | notification_address = None 11 | 12 | if (notification_address is None): 13 | raise ValueError('Notification email address is not configured') 14 | 15 | nowdate_str = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M') 16 | subject_str = ('Dataverse datafile backup report [%s]' % nowdate_str) 17 | 18 | p = Popen(['mail','-s',subject_str,notification_address], stdout=PIPE, stdin=PIPE, stderr=PIPE) 19 | stdout_data = p.communicate(input=text)[0] 20 | 21 | def main(): 22 | send_notification('backup report: test, please disregard') 23 | 24 | if __name__ == "__main__": 25 | main() 26 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/backup/run_backup/requirements.txt: -------------------------------------------------------------------------------- 1 | # python2 requirements 2 | 3 | psycopg2 4 | boto3 5 | paramiko 6 | # TODO: where to get `swiftclient` from 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/backup/run_backup/storage_filesystem.py: -------------------------------------------------------------------------------- 1 | import io 2 | import re 3 | from config import (ConfigSectionMap) 4 | 5 | def open_storage_object_filesystem(dataset_authority, dataset_identifier, object_location, is_tabular_data): 6 | filesystem_directory = ConfigSectionMap("Repository")['filesystemdirectory'] 7 | if (is_tabular_data is not None): 8 | object_location += ".orig" 9 | file_path = filesystem_directory+"/"+dataset_authority+"/"+dataset_identifier+"/"+object_location 10 | byte_stream = io.open(file_path, "rb") 11 | return byte_stream 12 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/backup/run_backup/storage_s3.py: -------------------------------------------------------------------------------- 1 | import io 2 | import re 3 | import boto3 4 | 5 | def open_storage_object_s3(dataset_authority, dataset_identifier, object_location, is_tabular_data): 6 | s3 = boto3.resource('s3') 7 | bucket_name,object_name = object_location.split(":",1) 8 | key = dataset_authority + "/" + dataset_identifier + "/" + object_name; 9 | if (is_tabular_data is not None): 10 | key += ".orig" 11 | s3_obj = s3.Object(bucket_name=bucket_name, key=key) 12 | # "Body" is a byte stream associated with the object: 13 | return s3_obj.get()['Body'] 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/drop-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | PSQL=psql 3 | DB_NAME=dvndb 4 | SQL_FILENAME=dropall.sql 5 | 6 | $PSQL $DB_NAME -t -c"SELECT 'drop table \"' || tablename || '\" cascade;' FROM pg_tables WHERE schemaname='public';" > $SQL_FILENAME 7 | $PSQL $DB_NAME -a -f $SQL_FILENAME 8 | rm $SQL_FILENAME 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/drop-create.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Drops and creates the database. Assumes pg_dump and psql are in $PATH, and that the db does not need password. 4 | DUMP=pg_dump 5 | PSQL=psql 6 | DB=dvndb 7 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 8 | 9 | $DUMP -s $DB > temp-schema.sql 10 | $PSQL -d $DB -f $DIR/drop-all.sql 11 | $PSQL -d $DB -f temp-schema.sql 12 | rm temp-schema.sql -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/facetlist.sql: -------------------------------------------------------------------------------- 1 | -- default facets defined in https://redmine.hmdc.harvard.edu/issues/3490 2 | -- show selected facets by displayorder 3 | SELECT title,name,datasetfield.id FROM dataversefacet, datasetfield WHERE dataversefacet.datasetfield_id = datasetfield.id ORDER BY dataversefacet.displayorder; 4 | -- more detail 5 | -- SELECT dataversefacet.id, title, name, datasetfield.id, dataversefacet.displayorder, dataverse_id FROM dataversefacet, datasetfield WHERE dataversefacet.datasetfield_id = datasetfield.id ORDER BY displayorder; 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/fedora/rebuild-and-test: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | GLASSFISH_HOME=$HOME/tools/devguide-gf4/glassfish4 3 | ASADMIN=$GLASSFISH_HOME/glassfish/bin/asadmin 4 | $ASADMIN stop-domain 5 | psql -U postgres -c 'DROP DATABASE "dvnDb"' 6 | scripts/search/clear 7 | psql -U postgres -c 'CREATE DATABASE "dvnDb" WITH OWNER = "dvnApp"' 8 | mvn package 9 | $ASADMIN start-domain 10 | # should probably use this instead: https://maven-glassfish-plugin.java.net/deploy-mojo.html 11 | cp target/dataverse-4.0.war $GLASSFISH_HOME/glassfish/domains/domain1/autodeploy 12 | sleep 30 13 | psql -U postgres dvnDb -f scripts/database/reference_data.sql 14 | cd scripts/api 15 | ./datasetfields.sh 16 | ./setup-users.sh 17 | ./setup-dvs.sh 18 | cd ../.. 19 | scripts/search/tests/permissions 20 | scripts/search/tests/delete-dataverse 21 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/convert: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | sed -i -e "s/postgres/$USER/" /tmp/dataverse_db.sql 3 | sed -i -e 's/dvnapp/dataverse_app/' /tmp/dataverse_db.sql 4 | sed -i -e 's/dvn-vm7.hmdc.harvard.edu:8983/localhost:8983/' /tmp/dataverse_db.sql 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/create-database: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql -c 'CREATE DATABASE "dataverse_db" WITH OWNER = "dataverse_app"' template1 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/create-role: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql -c "CREATE ROLE dataverse_app UNENCRYPTED PASSWORD 'secret' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN" template1 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/create-role-superuser: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # so you don't have to sudo to postgres to create roles, etc. 3 | ~/.homebrew/bin/psql -c "CREATE ROLE $USER UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN REPLICATION" template1 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/custom-build-number: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | if [ -z "$1" ]; then 3 | BRANCH_COMMIT=$(git rev-parse --abbrev-ref HEAD)-$(git log --oneline | head -1 | awk '{print $1}') 4 | echo "No custom build number specified. Using $BRANCH_COMMIT" 5 | echo "build.number=$BRANCH_COMMIT" > src/main/java/BuildNumber.properties 6 | else 7 | echo "build.number=$@" > src/main/java/BuildNumber.properties 8 | fi 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/delete-all: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | /Applications/NetBeans/glassfish4/glassfish/bin/asadmin stop-domain 3 | rm -rf /Applications/NetBeans/glassfish4/glassfish/domains/domain1/generated 4 | scripts/database/homebrew/drop-database 5 | scripts/search/clear 6 | rm -rf ~/dataverse/files 7 | scripts/database/homebrew/create-database 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/devinstall: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | cd scripts/installer 3 | export DB_NAME=dataverse_db 4 | export DB_PORT=5432 5 | export DB_HOST=localhost 6 | export DB_USER=dataverse_app 7 | export DB_PASS=secret 8 | export RSERVE_HOST=localhost 9 | export RSERVE_PORT=6311 10 | export RSERVE_USER=rserve 11 | export RSERVE_PASS=rserve 12 | export SMTP_SERVER=localhost 13 | export HOST_ADDRESS=`hostname` 14 | export FILES_DIR=$HOME/dataverse/files 15 | export MEM_HEAP_SIZE=2048 16 | export GLASSFISH_DOMAIN=domain1 17 | export GLASSFISH_ROOT=/Applications/NetBeans/glassfish4 18 | cp pgdriver/postgresql-9.1-902.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib 19 | cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf 20 | ./glassfish-setup.sh 21 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/drop-database: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql -c 'DROP DATABASE "dataverse_db"' template1 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/drop-role: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql -c "DROP ROLE dataverse_app" template1 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/dump: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/pg_dump dataverse_db -f /tmp/dataverse_db.sql 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/keys2tmp: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | DIR=/tmp/keys 3 | mkdir -p $DIR 4 | key2tmp () { 5 | #export $2=`grep apiToken /tmp/setup-all.sh.out | grep $1 | jq .data.apiToken | grep -v null | sed s/\"//g` 6 | echo `grep apiToken /tmp/setup-all.sh.out | grep $1 | jq .data.apiToken | grep -v null | sed s/\"//g` > $DIR/$1 7 | } 8 | key2tmp pete PETEKEY 9 | key2tmp uma UMAKEY 10 | key2tmp gabbi GABBIKEY 11 | key2tmp cathy CATHYKEY 12 | key2tmp nick NICKKEY 13 | #echo "pete's key: $PETEKEY" 14 | #echo "uma's key: $UMAKEY" 15 | #echo "gabbi's key: $GABBIKEY" 16 | #echo "cathy's key: $CATHYKEY" 17 | #echo "nick's key: $NICKKEY" 18 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/kill9glassfish: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | kill -9 `jps | grep ASMain | awk '{print $1}'` 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/rebuild-and-test: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | scripts/database/homebrew/run-post-create-post-deploy 3 | echo "Publishing root dataverse" 4 | scripts/search/tests/publish-dataverse-root 5 | echo "---" 6 | echo "Creating search users" 7 | scripts/search/populate-users > /dev/null 8 | scripts/search/create-users > /dev/null 9 | scripts/search/tests/grant-authusers-add-on-root 10 | scripts/search/tests/create-all-and-test 11 | #scripts/search/tests/create-saved-search-and-test 12 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/restore: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql dataverse_db -f /tmp/dataverse_db.sql 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/run-post-create-post-deploy: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | scripts/database/homebrew/run-reference_data.sql > /tmp/run-reference_data.sql 3 | psql dataverse_db -f doc/sphinx-guides/source/_static/util/createsequence.sql 4 | psql -c 'ALTER TABLE datasetidentifier_seq OWNER TO "dataverse_app";' dataverse_db 5 | cd scripts/api 6 | ./setup-all.sh --insecure > /tmp/setup-all.sh.out 2> /tmp/setup-all.sh.err 7 | cd ../.. 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/run-reference_data.sql: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql dataverse_db -f $HOME/NetBeansProjects/dataverse/scripts/database/reference_data.sql 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/set-env-for-setup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # put these variables into your env with `source path/to/script` 3 | 4 | export GLASSFISH_ROOT='/Applications/NetBeans/glassfish4' 5 | export -n GLASSFISH_ROOT 6 | unset GLASSFISH_ROOT 7 | echo $GLASSFISH_ROOT 8 | 9 | export DB_NAME_CUSTOM='dataverse_db' 10 | #export -n DB_NAME_CUSTOM 11 | #unset DB_NAME_CUSTOM 12 | echo $DB_NAME_CUSTOM 13 | 14 | export DB_USER_CUSTOM='dataverse_app' 15 | #export -n DB_USER_CUSTOM 16 | #unset DB_USER_CUSTOM 17 | echo $DB_USER_CUSTOM 18 | 19 | export DB_PASS_CUSTOM='secret' 20 | #export -n DB_PASS_CUSTOM 21 | #unset DB_PASS_CUSTOM 22 | echo $DB_PASS_CUSTOM 23 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/homebrew/superuser-password-update: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # default "admin" password 3 | # $2a$10$H8jca9BBbvCQAs2fU6TaseQeyD6ho3vZuIBKdlknDaR5lh69effde 4 | ~/.homebrew/bin/psql -c 'select username,encryptedpassword from builtinuser where id = 1' dataverse_db 5 | ~/.homebrew/bin/psql -c "update builtinuser set encryptedpassword='\$2a\$10\$H8jca9BBbvCQAs2fU6TaseQeyD6ho3vZuIBKdlknDaR5lh69effde' where id = 1" dataverse_db 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.0.1_to_v4.1.sql: -------------------------------------------------------------------------------- 1 | /* ---------------------------------------- 2 | Add unique constraint to prevent multiple drafts 3 | Ticket 2132 4 | */ ---------------------------------------- 5 | 6 | ALTER TABLE datasetversion 7 | ADD CONSTRAINT uq_datasetversion UNIQUE(dataset_id, versionnumber, minorversionnumber); 8 | 9 | -- make sure Member role has DownloadFilePermission 10 | update dataverserole set permissionbits=28 where alias='member'; -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql: -------------------------------------------------------------------------------- 1 | -- A Private URL is a specialized role assignment with a token. 2 | ALTER TABLE roleassignment ADD COLUMN privateurltoken character varying(255); 3 | -- "Last Export Time" added to the dataset: 4 | ALTER TABLE dataset ADD COLUMN lastExportTime TIMESTAMP; 5 | -- Direct link to the harvesting configuration, for harvested datasets: 6 | ALTER TABLE dataset ADD COLUMN harvestingClient_id bigint; 7 | -- For harveted datasets, native OAI identifier used by the original OAI server 8 | ALTER TABLE dataset ADD COLUMN harvestIdentifier VARCHAR(255); 9 | -- Add extra rules to the Dublin Core import logic: 10 | INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (18, ':publisher', 'producerName', FALSE, NULL, 1 ); 11 | INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (19, ':language', 'language', FALSE, NULL, 1 ); 12 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE datafile ADD COLUMN checksumtype character varying(255); 2 | UPDATE datafile SET checksumtype = 'MD5'; 3 | ALTER TABLE datafile ALTER COLUMN checksumtype SET NOT NULL; 4 | -- alternate statement for sbgrid.org and others interested in SHA-1 support 5 | -- note that in the database we use "SHA1" (no hyphen) but the GUI will show "SHA-1" 6 | --UPDATE datafile SET checksumtype = 'SHA1'; 7 | ALTER TABLE datafile RENAME md5 TO checksumvalue; 8 | ALTER TABLE filemetadata ADD COLUMN directorylabel character varying(255); 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE authenticateduser ADD COLUMN emailconfirmed timestamp without time zone; 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.6.1_to_v4.6.2.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE dataset ADD COLUMN useGenericThumbnail boolean; 2 | ALTER TABLE maplayermetadata ADD COLUMN lastverifiedtime timestamp without time zone; 3 | ALTER TABLE maplayermetadata ADD COLUMN lastverifiedstatus bigint; 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.6.2_to_v4.7.sql: -------------------------------------------------------------------------------- 1 | --Uncomment to preserve "Dataverse" at end of each dataverse name. 2 | --UPDATE dataverse SET name = name || ' Dataverse'; -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.6_to_v4.6.1.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM authenticationproviderrow where id = 'echo-simple'; 2 | DELETE FROM authenticationproviderrow where id = 'echo-dignified'; 3 | -- For DataFile, file replace functionality: 4 | ALTER TABLE datafile ADD COLUMN rootdatafileid bigint default -1; 5 | ALTER TABLE datafile ADD COLUMN previousdatafileid bigint default null; 6 | -- For existing DataFile objects, update rootDataFileId values: 7 | UPDATE datafile SET rootdatafileid = -1; 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.7.1_to_v4.8.sql: -------------------------------------------------------------------------------- 1 | -- Updates the database to add a storage identifier to each DvObject 2 | ALTER TABLE dvobject ADD COLUMN storageidentifier character varying(255); 3 | 4 | UPDATE dvobject 5 | SET storageidentifier=(SELECT datafile.filesystemname 6 | FROM datafile 7 | WHERE datafile.id=dvobject.id AND dvobject.dtype='DataFile') where dvobject.dtype='DataFile'; 8 | 9 | UPDATE dvobject 10 | SET storageidentifier=(select concat('file://',authority::text,ds.doiseparator::text,ds.identifier::text) 11 | FROM dataset ds 12 | WHERE dvobject.id=ds.id) 13 | WHERE storageidentifier IS NULL; 14 | 15 | ALTER TABLE datafile DROP COLUMN filesystemname; 16 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/database/upgrades/upgrade_v4.8.3_to_v4.8.4.sql: -------------------------------------------------------------------------------- 1 | -- Google login has used 131 characters. 64 is not enough. 2 | ALTER TABLE oauth2tokendata ALTER COLUMN accesstoken TYPE text; 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/deploy/phoenix.dataverse.org/cert.md: -------------------------------------------------------------------------------- 1 | Note that `-sha256` is used but the important thing is making sure SHA-1 is not selected when uploading the CSR to https://cert-manager.com/customer/InCommon 2 | 3 | openssl genrsa -out phoenix.dataverse.org.key 2048 4 | 5 | openssl req -new -sha256 -key phoenix.dataverse.org.key -out phoenix.dataverse.org.csr 6 | 7 | Country Name (2 letter code) [XX]:US 8 | State or Province Name (full name) []:Massachusetts 9 | Locality Name (eg, city) [Default City]:Cambridge 10 | Organization Name (eg, company) [Default Company Ltd]:Harvard College 11 | Organizational Unit Name (eg, section) []:IQSS 12 | Common Name (eg, your name or your server's hostname) []:phoenix.dataverse.org 13 | Email Address []:support@dataverse.org 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/deploy/phoenix.dataverse.org/deploy: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | scripts/deploy/phoenix.dataverse.org/prep 3 | sudo /home/jenkins/dataverse/scripts/deploy/phoenix.dataverse.org/rebuild 4 | scripts/deploy/phoenix.dataverse.org/post 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/deploy/phoenix.dataverse.org/dv-root.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "root", 3 | "name": "Root", 4 | "permissionRoot": false, 5 | "facetRoot": true, 6 | "description": "Welcome! phoenix.dataverse.org is so named because data here is deleted on every build of the latest Dataverse code: http://guides.dataverse.org/en/latest/developers", 7 | "dataverseSubjects": [ 8 | "Other" 9 | ], 10 | "dataverseContacts": [ 11 | { 12 | "contactEmail": "root@mailinator.com" 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/deploy/phoenix.dataverse.org/install: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | export HOST_ADDRESS=phoenix.dataverse.org 3 | export GLASSFISH_ROOT=/usr/local/glassfish4 4 | export FILES_DIR=/usr/local/glassfish4/glassfish/domains/domain1/files 5 | export DB_NAME=dvndb 6 | export DB_PORT=5432 7 | export DB_HOST=localhost 8 | export DB_USER=dvnapp 9 | export DB_PASS=secret 10 | export RSERVE_HOST=localhost 11 | export RSERVE_PORT=6311 12 | export RSERVE_USER=rserve 13 | export RSERVE_PASS=rserve 14 | export SMTP_SERVER=localhost 15 | export MEM_HEAP_SIZE=2048 16 | export GLASSFISH_DOMAIN=domain1 17 | cd scripts/installer 18 | cp pgdriver/postgresql-8.4-703.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib 19 | cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf 20 | ./glassfish-setup.sh 21 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/deploy/phoenix.dataverse.org/post: -------------------------------------------------------------------------------- 1 | #/bin/sh 2 | cd scripts/api 3 | ./setup-all.sh --insecure | tee /tmp/setup-all.sh.out 4 | cd ../.. 5 | psql -U dvnapp dvndb -f scripts/database/reference_data.sql 6 | psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql 7 | psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql 8 | scripts/search/tests/publish-dataverse-root 9 | git checkout scripts/api/data/dv-root.json 10 | scripts/search/tests/grant-authusers-add-on-root 11 | scripts/search/populate-users 12 | scripts/search/create-users 13 | scripts/search/tests/create-all-and-test 14 | scripts/search/tests/publish-spruce1-and-test 15 | java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest 16 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/deploy/phoenix.dataverse.org/prep: -------------------------------------------------------------------------------- 1 | #/bin/bash -x 2 | cp scripts/deploy/phoenix.dataverse.org/dv-root.json scripts/api/data/dv-root.json 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-42.1.4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-42.1.4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-8.4-703.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-8.4-703.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.0-802.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.0-802.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.1-902.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.1-902.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.2-1004.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.2-1004.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.3-1104.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.3-1104.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.4.1212.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.4.1212.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/pgdriver/postgresql-42.1.4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/pgdriver/postgresql-42.1.4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/pgdriver/postgresql-8.4-703.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/pgdriver/postgresql-8.4-703.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/pgdriver/postgresql-9.0-802.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.0-802.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/pgdriver/postgresql-9.2-1004.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.2-1004.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/pgdriver/postgresql-9.3-1104.jdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.3-1104.jdbc4.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/installer/pgdriver/postgresql-9.4.1212.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.4.1212.jar -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1262/create-sparrow1: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl -s -X POST -H "Content-type:application/json" -d @scripts/issues/1262/sparrow1.json "http://localhost:8080/api/dataverses/sparrows/datasets/?key=$SPARROWKEY" 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1262/search-sparrow: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # relies on experimental SearchApiNonPublicAllowed feature, see https://github.com/IQSS/dataverse/issues/1299 3 | curl "http://localhost:8080/api/search?key=$SPARROWKEY&show_relevance=true&q=sparrow" 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/01-add.localhost.sh: -------------------------------------------------------------------------------- 1 | # Add the localhost group to the system. 2 | curl -X POST -H"Content-Type:application/json" -d@../../api/data/ipGroup-localhost.json localhost:8080/api/admin/groups/ip 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/02-build-dv-structure.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo Run this after running setup-users.sh, and making Pete an 4 | echo admin on the root dataverse. 5 | 6 | 7 | PETE=$(grep :result: users.out | grep Pete | cut -f4 -d: | tr -d \ ) 8 | UMA=$(grep :result: users.out | grep Uma | cut -f4 -d: | tr -d \ ) 9 | 10 | pushd ../../api 11 | ./setup-dvs.sh $PETE $UMA 12 | popd 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/add-ip-group.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Add the passed group to the system. 4 | curl -X POST -H"Content-Type:application/json" -d@../../api/data/$1 localhost:8080/api/admin/groups/ip 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/add-user: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # add-user dv group user api-token 3 | curl -H "Content-type:application/json" -X POST -d"[$3]" localhost:8080/api/dataverses/$1/groups/$2/roleAssignees?key=$4 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/data/3-eg1.json: -------------------------------------------------------------------------------- 1 | ["&explicit/3-eg1"] 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/data/guest.json: -------------------------------------------------------------------------------- 1 | [":guest"] 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/data/locals.json: -------------------------------------------------------------------------------- 1 | ["&ip/localhost"] 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/data/pete.json: -------------------------------------------------------------------------------- 1 | ["@pete"] 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/data/uma.json: -------------------------------------------------------------------------------- 1 | ["@uma"] 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/db-list-dvs: -------------------------------------------------------------------------------- 1 | psql dvndb -c "select dvobject.id, name, alias, owner_id from dvobject inner join dataverse on dvobject.id = dataverse.id" 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/delete-ip-group: -------------------------------------------------------------------------------- 1 | #/bin/bahx 2 | if [ $# -eq 0 ] 3 | then 4 | echo "Please provide IP group id" 5 | echo "e.g $0 845" 6 | exit 1 7 | fi 8 | 9 | curl -X DELETE http://localhost:8080/api/admin/groups/ip/$1 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/dvs.gv: -------------------------------------------------------------------------------- 1 | digraph { 2 | d1[label="Root"] 3 | d2[label="Top dataverse of Pete"] 4 | d3[label="Pete's public place"] 5 | d4[label="Pete's restricted data"] 6 | d5[label="Pete's secrets"] 7 | d6[label="Top dataverse of Uma"] 8 | d7[label="Uma's first"] 9 | d8[label="Uma's restricted"] 10 | 11 | d1 -> d2 12 | d2 -> d3 13 | d2 -> d4 14 | d2 -> d5 15 | d1 -> d6 16 | d6 -> d7 17 | d6 -> d8 18 | 19 | } 20 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/dvs.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/issues/1380/dvs.pdf -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/explicitGroup1.json: -------------------------------------------------------------------------------- 1 | { 2 | "description":"Sample Explicit Group", 3 | "displayName":"Close Collaborators", 4 | "aliasInOwner":"eg1" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/explicitGroup2.json: -------------------------------------------------------------------------------- 1 | { 2 | "description":"Sample Explicit Group", 3 | "displayName":"Not-So-Close Collaborators", 4 | "aliasInOwner":"eg2" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/keys.txt: -------------------------------------------------------------------------------- 1 | Keys for P e t e and U m a. Produced by running setup-all.sh from the /scripts/api folder. 2 | Pete:757a6493-456a-4bf0-943e-9b559d551a3f 3 | Uma:8797f19b-b8aa-4f96-a789-1b99506f2eab 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/list-groups-for: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | curl -s -X GET http://localhost:8080/api/test/explicitGroups/$1 | jq . 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/list-ip-groups.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | curl -X GET http://localhost:8080/api/admin/groups/ip | jq . 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/1380/truth-table.numbers: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/issues/1380/truth-table.numbers -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2013/download-zip.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | APACHE_PORT=8888 3 | GLASSFISH_PORT=8088 4 | PORT=$APACHE_PORT 5 | count=0; while true; do echo "downloading 4 GB file as zip attempt $((++count))"; curl -s http://127.0.0.1:$PORT/api/access/datafiles/3 > /tmp/3; done 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2013/hit-homepage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | count=0; while true; echo "hitting homepage attempt $((++count))"; do (curl -s -i http://127.0.0.1:8888 | head -9); sleep 3; done 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2021/sort-files: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | OUT=`curl -s "http://localhost:8080/api/admin/index/filemetadata/50825?maxResults=0&sort=$1&order=$2"` 3 | echo $OUT 4 | echo $OUT | jq . 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2036/delete-ned-assignment: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql -c " 3 | select * from roleassignment where assigneeidentifier = '@ned' 4 | " dataverse_db 5 | ~/.homebrew/bin/psql -c " 6 | delete from roleassignment where assigneeidentifier = '@ned' 7 | " dataverse_db 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2036/grant-role-then-revoke: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | SERVER=http://localhost:8080 3 | if [ -z "$1" ]; then 4 | DATAVERSE=togo 5 | else 6 | DATAVERSE=$1 7 | fi 8 | USERID="@pdurbin" 9 | ROLE=admin 10 | 11 | echo "Assigning $ROLE to $USERID on $DATAVERSE..." 12 | OUT_ASSIGN=`time curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"$USERID\",\"role\": \"$ROLE\"}" "$SERVER/api/dataverses/$DATAVERSE/assignments?key=$API_TOKEN"` 13 | echo $OUT_ASSIGN | jq '.data | {assignee,_roleAlias}' 14 | 15 | echo "Retrieving ID of role to revoke..." 16 | ASSIGNMENTS=`time curl -s "$SERVER/api/dataverses/$DATAVERSE/assignments?key=$API_TOKEN"` 17 | echo $ASSIGNMENTS | jq ".data[] | select(.assignee==\"$USERID\") | ." 18 | 19 | echo "Revoking $ROLE from $USERID on $DATAVERSE..." 20 | ID_TO_REVOKE=`echo $ASSIGNMENTS | jq ".data[] | select(.assignee==\"$USERID\") | .id"` 21 | OUT_REVOKE=`time curl -s -X DELETE "$SERVER/api/dataverses/$DATAVERSE/assignments/$ID_TO_REVOKE?key=$API_TOKEN"` 22 | echo $OUT_REVOKE | jq '.data.message' 23 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2102/setup.sh: -------------------------------------------------------------------------------- 1 | ENDPOINT=https://localhost:8181 2 | APIKEY=a65048f8-875c-4479-a91d-33cb8cd12821 3 | DATASET=3 4 | 5 | echo Calling: 6 | echo curl --insecure $ENDPOINT/api/datasets/$DATASET/versions/:latest?key=$APIKEY 7 | echo 8 | echo curl --insecure -X PUT -H "Content-Type:application/json" -d@dataset-metadata-next.json $ENDPOINT/api/datasets/$DATASET/versions/:draft?key=$APIKEY 9 | echo 10 | 11 | 12 | # get data: 13 | # curl --insecure $ENDPOINT/api/datasets/$DATASET/versions/:latest?key=$APIKEY 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2132/find-multiple-drafts.sql: -------------------------------------------------------------------------------- 1 | select dataset_id, count(*) from datasetversion where versionstate='DRAFT' group by dataset_id having count(*) >1; 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2132/one-draft-version-per-dataset-constraint.sql: -------------------------------------------------------------------------------- 1 | CREATE UNIQUE INDEX one_draft_version_per_dataset ON datasetversion (dataset_id) WHERE versionstate='DRAFT'; 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2438/download.R: -------------------------------------------------------------------------------- 1 | arg <- commandArgs(trailingOnly = TRUE) 2 | 3 | download.dataverse.file <- function(url) { 4 | if (length(url) == 0L) { 5 | return( 6 | "Please provide a URL to a file: http://guides.dataverse.org/en/latest/api/dataaccess.html" 7 | ) 8 | } 9 | # Examples of URLs for tsv, original, RData, JSON, DDI/XML: 10 | # https://groups.google.com/d/msg/dataverse-community/fFrJi7NnBus/LNpfXItbtZYJ 11 | # 12 | # This script assume the tsv URL is used. File id 91 is just an example. You must 13 | # look up the id of the file. As of this writing the easiest way is via SWORD: 14 | # https://github.com/IQSS/dataverse/issues/1837#issuecomment-121736332 15 | # 16 | # url.to.download = 'https://demo.dataverse.org/api/v1/access/datafile/91' 17 | url.to.download = url 18 | tsvfile = 'file.tsv' 19 | download.file(url = url.to.download, destfile = 20 | tsvfile, method = 'curl') 21 | mydata <- read.table(tsvfile, header = TRUE, sep = "\t") 22 | print(mydata) 23 | unlink(tsvfile) 24 | } 25 | 26 | download.dataverse.file(arg) 27 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2454/anAuthUser.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName":"Anau", 3 | "lastName":"Thuser", 4 | "userName":"anAuthUser", 5 | "affiliation":"current Dataverse", 6 | "position":"above and beyond", 7 | "email":"anAuthUser@malinator.com", 8 | "phone":"(888) 888-8888" 9 | } 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2454/anotherAuthUser.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName":"Another", 3 | "lastName":"Authuser", 4 | "userName":"anotherAuthUser", 5 | "affiliation":"current Dataverse", 6 | "position":"above and beyond", 7 | "email":"anotherAuthUser@malinator.com", 8 | "phone":"(888) 888-8888" 9 | } 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2454/assignment.json: -------------------------------------------------------------------------------- 1 | {"assignee":":authenticated-users", "role":"curator"} 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2454/dataverse.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "permissionsTestDv", 3 | "name": "PermissionsTest", 4 | "affiliation": "Affiliation value", 5 | "permissionRoot": true, 6 | "description": "A Dataverse where we test permissions", 7 | "dataverseContacts": [ 8 | { 9 | "contactEmail": "test.script@mailinator.com" 10 | } 11 | ], 12 | "dataverseSubjects": ["Arts and Humanities"] 13 | } 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2454/group.json: -------------------------------------------------------------------------------- 1 | { 2 | "displayName":"Permission test group", 3 | "description":"Group for testing permissions", 4 | "aliasInOwner":"PTG" 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2454/rollback.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ENDPOINT=http://localhost:8080/api 4 | DB="psql dvndb -At -c " 5 | ROOT_USER=dataverseAdmin 6 | ROOT_KEY=$($DB "select tokenstring \ 7 | from authenticateduser au inner join apitoken apt \ 8 | on au.id=apt.authenticateduser_id \ 9 | where useridentifier='$ROOT_USER'") 10 | 11 | echo $ROOT_USER api key is $ROOT_KEY 12 | 13 | 14 | # delete DV 15 | curl -X DELETE $ENDPOINT/dataverses/permissionsTestDv?key=$ROOT_KEY 16 | echo 17 | echo dataverses deleted 18 | echo 19 | 20 | # delete user 21 | for USER_NICK in anAuthUser anotherAuthUser 22 | do 23 | echo deleting user $USER_NICK 24 | QUERY="select id from authenticateduser where useridentifier='$USER_NICK'" 25 | AUTH_USER_ID=$($DB "$QUERY") 26 | echo Auth user id is $AUTH_USER_ID 27 | $DB "delete from apitoken where authenticateduser_id=$AUTH_USER_ID" 28 | $DB "delete from authenticateduserlookup where authenticateduser_id=$AUTH_USER_ID" 29 | $DB "delete from authenticateduser where id=$AUTH_USER_ID" 30 | $DB "delete from builtinuser where id=$AUTH_USER_ID" 31 | done 32 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2595/numconnacquired.tsv: -------------------------------------------------------------------------------- 1 | lastsampletime count 2 | 2015-10-14 09:34:10.553000 81572 3 | 2015-10-14 09:49:10.695000 82053 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2595/plot.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | import matplotlib.dates as mdates 6 | days, impressions = np.loadtxt("numconnacquired.tsv", delimiter='\t', skiprows=1, unpack=True, 7 | converters={ 0: mdates.strpdate2num('%Y-%m-%d %H:%M:%S.%f')}) 8 | plt.plot_date(x=days, y=impressions, fmt="r-") 9 | plt.title("Number of logical connections acquired from the pool") 10 | plt.ylabel("numconnacquired") 11 | plt.grid(True) 12 | plt.gcf().autofmt_xdate() 13 | plt.savefig('out.png') 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2598/detect-duplicate-dataverse-aliases.sql: -------------------------------------------------------------------------------- 1 | select alias from dataverse where lower(alias) in (select lower(alias) from dataverse group by lower(alias) having count(*) >1); 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2598/insert-duplicate-alias.sql: -------------------------------------------------------------------------------- 1 | -- This script should fail to insert a duplicate datavers alias (different case) 2 | -- after a constraint has been added in https://github.com/IQSS/dataverse/issues/2598 3 | DELETE FROM dataverse where id = 100; 4 | DELETE FROM dataverse where id = 101; 5 | DELETE FROM dvobject where id = 100; 6 | DELETE FROM dvobject where id = 101; 7 | INSERT INTO dvobject (id, createdate, modificationtime) VALUES (100, NOW(), NOW()); 8 | INSERT INTO dataverse (id, alias, name, dataversetype, defaultcontributorrole_id) VALUES (100, 'foo', 'foo is mine', 'UNCATEGORIZED', 1); 9 | INSERT INTO dvobject (id, createdate, modificationtime) VALUES (101, NOW(), NOW()); 10 | INSERT INTO dataverse (id, alias, name, dataversetype, defaultcontributorrole_id) VALUES (101, 'FOO', 'uppercase foo', 'UNCATEGORIZED', 1); 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2649/reproduce: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # "File Downloader" role has already been assigned to "finch" to one of: 3 | # - trees.png file 4 | # - Spruce Goose dataset 5 | # - Spruce dataverse 6 | curl -s 'http://localhost:8080/api/mydata/retrieve?selected_page=1&dvobject_types=DataFile&published_states=Published&published_states=Unpublished&published_states=Draft&published_states=In+Review&published_states=Deaccessioned&role_ids=1&role_ids=2&role_ids=6&mydata_search_term=&userIdentifier=finch' | jq . 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/2681/create-files: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | NUM_FILES=10 3 | if [ ! -z "$1" ]; then 4 | NUM_FILES=$1 5 | fi 6 | TMP="/tmp" 7 | DIR_NAME="$TMP/${NUM_FILES}files" 8 | TMP_DIR="$DIR_NAME" 9 | rm -rf $TMP_DIR 10 | mkdir $TMP_DIR 11 | cd $TMP_DIR 12 | for i in `seq -f "%04g" $NUM_FILES`; do 13 | echo $i > $i.txt 14 | done 15 | cd $TMP 16 | ZIP=${DIR_NAME}.zip 17 | ls $DIR_NAME/* 18 | zip $ZIP $DIR_NAME/* 19 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3354/createDatasetWithSha1Files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # existing, works, no files, commenting out 3 | #curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-finch1.json "http://localhost:8080/api/dataverses/root/datasets/?key=$API_TOKEN" 4 | # new, has files 5 | curl -s -X POST -H "Content-type:application/json" -d @scripts/issues/3354/datasetWithSha1Files.json "http://localhost:8080/api/dataverses/root/datasets/?key=$API_TOKEN" 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3354/mydata: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # FIXME: Make this into a REST Assured test. 3 | curl -s "http://localhost:8080/api/mydata/retrieve?key=$API_TOKEN&role_ids=1&dvobject_types=DataFile&published_states=Published&published_states=Unpublished&published_states=Draft&published_states=In+Review&published_states=Deaccessioned" | jq .data.items 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3543/dv-peteDelete1.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"peteDelete1", 3 | "name":"A dataverse for testing", 4 | "affiliation":"Affiliation value", 5 | "contactEmail":"pete@mailinator.com", 6 | "permissionRoot":false, 7 | "description":"A dataverse that's added for testing purposes." 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3543/dv-peteDelete2.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"peteDelete2", 3 | "name":"A dataverse for testing", 4 | "affiliation":"Affiliation value", 5 | "contactEmail":"pete@mailinator.com", 6 | "permissionRoot":false, 7 | "description":"A dataverse that's added for testing purposes." 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3543/dv-peteDelete3.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"peteDelete3", 3 | "name":"A dataverse for testing", 4 | "affiliation":"Affiliation value", 5 | "contactEmail":"pete@mailinator.com", 6 | "permissionRoot":false, 7 | "description":"A dataverse that's added for testing purposes." 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3543/dv-peteDeleteTop.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"peteDeleteTop", 3 | "name":"A dataverse for testing", 4 | "affiliation":"Affiliation value", 5 | "contactEmail":"pete@mailinator.com", 6 | "permissionRoot":false, 7 | "description":"A dataverse that's added for testing purposes." 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3543/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo Setting up dataverses for deletion, as described in https://redmine.hmdc.harvard.edu/issues/3543 3 | 4 | curl -H"Content-type:application/json" -d @dv-peteDeleteTop.json http://localhost:8080/api/dataverses/peteTop?key=pete 5 | curl -H"Content-type:application/json" -d @dv-peteDelete1.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete 6 | curl -H"Content-type:application/json" -d @dv-peteDelete2.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete 7 | curl -H"Content-type:application/json" -d @dv-peteDelete3.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3543/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curl -X DELETE http://localhost:8080/api/dataverses/peteDelete1?key=pete 4 | curl -X DELETE http://localhost:8080/api/dataverses/peteDelete2?key=pete 5 | curl -X DELETE http://localhost:8080/api/dataverses/peteDelete3?key=pete 6 | curl -X DELETE http://localhost:8080/api/dataverses/peteDeleteTop?key=pete 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/3544/delete.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # deleting an unreleased dataset, with a bunch of unreleased files. 4 | # seems to be working like a charm - ? 5 | # -- Leonid 6 | 7 | curl -X DELETE http://localhost:8080/api/datasets/43?key=pete 8 | 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/796/builtin2shib: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo args: $@ 3 | if [ -z "$1" ]; then 4 | echo "call with foo:bar:baz" 5 | else 6 | OUTPUT=`curl -s -X PUT -d "$@" http://localhost:8080/api/test/user/convert/builtin2shib` 7 | echo $OUTPUT 8 | echo 9 | echo $OUTPUT | jq . 10 | fi 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/issues/guestbook/insert-guestbook-responses.sh: -------------------------------------------------------------------------------- 1 | -- select * from guestbookresponse; 2 | -- 150K would be a better test, see https://github.com/IQSS/dataverse/issues/3609#issuecomment-322559209 3 | --for i in {0..2000}; do psql dataverse_db -f scripts/issues/3845/insert-guestbook-responses.sh; done 4 | -- id | downloadtype | email | institution | name | position | responsetime | sessionid | authenticateduser_id | datafile_id | dataset_id | datasetversion_id | guestbook_id 5 | insert into guestbookresponse values (default, 1, null, null, null, null, null, null, null, 104, 103, null, 2); 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/migration/custom_field_map.sql: -------------------------------------------------------------------------------- 1 | 2 | delete from customfieldmap; 3 | 4 | COPY customfieldmap( sourcetemplate, sourcedatasetfield, targetdatasetfield) FROM '/scripts/migration/HarvardCustomFields.csv' DELIMITER ',' CSV HEADER; 5 | 6 | 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/migration/datafile_pub_date.sql: -------------------------------------------------------------------------------- 1 | UPDATE dvobject 2 | SET publicationdate = x.releasetime 3 | FROM (SELECT f.id, f.filesystemname, min(v.releasetime) as releasetime 4 | FROM datafile f, dvobject d, datasetversion v, filemetadata m 5 | WHERE f.id = d.id 6 | AND d.publicationdate IS null 7 | AND m.datafile_id = f.id 8 | AND m.datasetversion_id = v.id 9 | AND v.versionstate = 'RELEASED' 10 | -- AND (NOT f.filesystemname IS null AND NOT f.filesystemname LIKE 'http%') 11 | GROUP BY f.id, f.filesystemname) x WHERE x.id = dvobject.id; 12 | 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/migration/files_destination_step1_: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | use DBI; 4 | 5 | my $host = "localhost"; 6 | my $username = "xxxxx"; 7 | my $password = 'xxxxx'; 8 | my $database = "xxxxx"; 9 | 10 | my $dbh = DBI->connect("DBI:Pg:dbname=$database;host=$host",$username,$password); 11 | my $sth = $dbh->prepare(qq {SELECT d.protocol, d.authority, d.identifier, d.id, v.id, v.versionnumber FROM dataset d, datasetversion v WHERE v.dataset_id = d.id ORDER BY d.id, v.versionnumber}); 12 | $sth->execute(); 13 | 14 | my $offset= 0; 15 | 16 | while ( @_ = $sth->fetchrow() ) 17 | { 18 | $protocol = $_[0]; 19 | $authority = $_[1]; 20 | $identifier = $_[2]; 21 | $id = $_[3]; 22 | $vid = $_[4]; 23 | $vnum = $_[5]; 24 | 25 | print $protocol . ":" . $authority . "/" . $identifier . "\t" . $id . "\t" . $vid . "\t" . $vnum . "\n"; 26 | 27 | $offset = $id; 28 | } 29 | 30 | $sth->finish; 31 | $dbh->disconnect; 32 | 33 | print STDERR "last ID in DVOBJECT table: " . $offset . "\n"; 34 | 35 | exit 0; 36 | 37 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/migration/migrate_passwords.sql: -------------------------------------------------------------------------------- 1 | update builtinuser 2 | set passwordencryptionversion = 0, 3 | encryptedpassword= _dvn3_vdcuser.encryptedpassword 4 | from _dvn3_vdcuser 5 | where _dvn3_vdcuser.username=builtinuser.username; 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/migration/scrub_email_usernames.sql: -------------------------------------------------------------------------------- 1 | -- first, find users with e-mails as usernames 2 | select id, username, email from vdcuser where username like '%@%' 3 | --and username != email; 4 | 5 | -- then find which those which would create duplicates after truncating 6 | -- (verify that adding 1 would be OK; if not, you may need to update some individually) 7 | select u1.id, u1.username, u2.id, u2.username from vdcuser u1, vdcuser u2 8 | where u1.id != u2.id 9 | and u1.username like '%@%' 10 | and split_part (u1.username, '@', 1) = u2.username 11 | 12 | -- for those usernames, truncate and add 1, so no duplicates 13 | update vdcuser set username = split_part (username, '@', 1) ||'1' 14 | where id in ( 15 | select u1.id from vdcuser u1, vdcuser u2 16 | where u1.id != u2.id 17 | and u1.username like '%@%' 18 | and split_part (u1.username, '@', 1) = u2.username 19 | ) 20 | 21 | -- now truncate the rest 22 | update vdcuser set username = split_part (username, '@', 1) where username like '%@%' 23 | 24 | -- confirm no duplicates 25 | select id, username, email from vdcuser where username in ( 26 | select username from vdcuser 27 | group by username having count(*) > 1 28 | ) -------------------------------------------------------------------------------- /postgresql/testdata/scripts/migration/sequence_script.sql: -------------------------------------------------------------------------------- 1 | SELECT setval('datafile_id_seq', (SELECT MAX(id) FROM datafile)); 2 | SELECT setval('datafilecategory_id_seq', (SELECT MAX(id) FROM datafilecategory)); 3 | SELECT setval('datatable_id_seq', (SELECT MAX(id) FROM datatable)); 4 | SELECT setval('datavariable_id_seq', (SELECT MAX(id) FROM datavariable)); 5 | SELECT setval('dvobject_id_seq', (SELECT MAX(id) FROM dvobject)); 6 | SELECT setval('filemetadata_id_seq', (SELECT MAX(id) FROM filemetadata)); 7 | SELECT setval('variablecategory_id_seq', (SELECT MAX(id) FROM variablecategory)); 8 | SELECT setval('guestbook_id_seq', (SELECT MAX(id) FROM guestbook)); 9 | SELECT setval('guestbookresponse_id_seq', (SELECT MAX(id) FROM guestbookresponse)); 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/migration/versions_source_step2_: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | 4 | unless ( -d "/tmp/ddi" ) 5 | { 6 | mkdir "/tmp/ddi"; 7 | } 8 | 9 | while (<>) 10 | { 11 | chop; 12 | @_ = split ("\t"); 13 | $alias = $_[0]; 14 | $studyid = $_[1]; 15 | $ddifile = $_[2]; 16 | 17 | $ddifile = "/nfs/iqss/DVN/data/" . $ddifile; 18 | 19 | if ( -f $ddifile ) 20 | { 21 | $total += (stat($study))[7]; 22 | $tmpdir = "/tmp/ddi/" . $alias; 23 | unless ( -d $tmpdir ) 24 | { 25 | mkdir $tmpdir; 26 | } 27 | 28 | $tmpfile = $tmpdir . "/" . $studyid . "\.xml"; 29 | system "cp $ddifile $tmpfile"; 30 | 31 | } 32 | else 33 | { 34 | print STDERR "warning: missing ddi file! (" . $ddifile . ")\n"; 35 | } 36 | } 37 | 38 | print "Total of " . $total . " bytes copied.\n"; 39 | 40 | 41 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/rapache/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | mkdir -p ~/rpmbuild/SOURCES 3 | mkdir -p ~/rpmbuild/SPECS 4 | wget https://github.com/jeffreyhorner/rapache/archive/v1.2.7.tar.gz -O rapache-1.2.7.tar.gz 5 | tar xzvf rapache-1.2.7.tar.gz rapache-1.2.7/rpm/rapache.spec --strip-components 2 6 | # Move to build dirs 7 | cp -f rapache-1.2.7.tar.gz ~/rpmbuild/SOURCES/ 8 | cp -f rapache.spec ~/rpmbuild/SPECS/ 9 | cd ~ 10 | rpmbuild -ba ~/rpmbuild/SPECS/rapache.spec 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/.gitignore: -------------------------------------------------------------------------------- 1 | data/in/users 2 | data/in/dv-birds1 3 | data/in/dv-trees1 4 | data/in/dv-psi 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/clear: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo "deleting all data from Solr" 3 | curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" 4 | # this was for elasticsearch 5 | #curl -XDELETE http://localhost:9200/dataverse/ 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/create-bird-dvs1: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | DIR='scripts/search/data/in/dv-birds1' 3 | USERDIR=/tmp/searchusers 4 | ROOT_DV=root 5 | FINCHKEY=`cat $USERDIR/1 | jq .data.apiToken | tr -d \"` 6 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/$ROOT_DV?key=$FINCHKEY" 7 | echo 8 | 9 | #PARENT=`xsltproc scripts/search/data/mkpaths.xsl scripts/search/data/nodes.xml | grep '/sparrows$' | tr / " " | awk '{print $(NF-1)}'` 10 | PARENT=birds 11 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/2 "http://localhost:8080/api/dataverses/$PARENT?key=$FINCHKEY" 12 | echo 13 | 14 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/3 "http://localhost:8080/api/dataverses/birds?key=$FINCHKEY" 15 | echo 16 | 17 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/4 "http://localhost:8080/api/dataverses/birds?key=$FINCHKEY" 18 | echo 19 | 20 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/5 "http://localhost:8080/api/dataverses/sparrows?key=$FINCHKEY" 21 | echo 22 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/create-psi-dvs: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | DIR='scripts/search/data/in/dv-psi' 4 | USERDIR=/tmp/searchusers 5 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/root?key=$PSIADMINKEY" 6 | echo 7 | 8 | PARENT=psi 9 | for i in {2..9}; do 10 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/$i "http://localhost:8080/api/dataverses/$PARENT?key=$PSIADMINKEY" 11 | echo 12 | done 13 | 14 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/10 "http://localhost:8080/api/dataverses/psimali?key=$PSIADMINKEY" 15 | echo 16 | 17 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/11 "http://localhost:8080/api/dataverses/psimali?key=$PSIADMINKEY" 18 | echo 19 | 20 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/12 "http://localhost:8080/api/dataverses/psimalihealth?key=$PSIADMINKEY" 21 | echo 22 | 23 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/13 "http://localhost:8080/api/dataverses/psimalihealthchild?key=$PSIADMINKEY" 24 | echo 25 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/create-tree-dvs1: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | DIR='scripts/search/data/in/dv-trees1' 3 | USERDIR=/tmp/searchusers 4 | ROOT_DV=root 5 | SPRUCEKEY=`cat $USERDIR/4 | jq .data.apiToken | tr -d \"` 6 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/$ROOT_DV?key=$SPRUCEKEY" 7 | echo 8 | 9 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/2 "http://localhost:8080/api/dataverses/trees?key=$SPRUCEKEY" 10 | echo 11 | 12 | curl -s -H "Content-type:application/json" -X POST -d @$DIR/3 "http://localhost:8080/api/dataverses/trees?key=$SPRUCEKEY" 13 | echo 14 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/create-users: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SERVER='http://localhost:8080/api' 3 | BURRITO='burrito' 4 | USERDIR='scripts/search/data/in/users' 5 | OUTDIR='/tmp/searchusers' 6 | rm -rf $OUTDIR 7 | mkdir -p $OUTDIR 8 | 9 | create () { 10 | pass=`cat $1 | jq .userName | tr -d \"` 11 | echo $pass 12 | resp=$(curl -s -H "Content-type:application/json" -X POST -d @$1 "$SERVER/builtin-users?password=$pass&key=$BURRITO") 13 | echo $resp | jq . > $OUTDIR/$1 14 | } 15 | 16 | cd $USERDIR 17 | for i in `ls`; do 18 | create $i 19 | done 20 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/binary/1000files.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/binary/1000files.zip -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/binary/100files.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/binary/100files.zip -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/binary/3files.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/binary/3files.zip -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/binary/health.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/binary/health.zip -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/binary/trees.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/binary/trees.png -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/binary/trees.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/binary/trees.zip -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/dv-birds1.tsv: -------------------------------------------------------------------------------- 1 | name alias permissionRoot subject contactEmail description affiliation 2 | Birds birds true Arts and Humanities birds@birds.com A bird dataverse with some trees Birds Inc. 3 | Finches finches false Chemistry finches@birds.com A dataverse with finches Birds Inc. 4 | Sparrows sparrows false Law sparrows@birds.com A dataverse featuring sparrows Birds Inc. 5 | Wrens wrens false Medicine, Health and Life Sciences wrens@birds.com A dataverse full of wrens Birds Inc. 6 | Chestnut Sparrows chestnutsparrows false Other chestnutsparrows@birds.com A dataverse with chestnut sparrows Birds Inc. 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/dv-psi.tsv: -------------------------------------------------------------------------------- 1 | name alias permissionRoot subject contactEmail description affiliation 2 | PSI psi true Social Science psi@mailinator.com PSI PSI 3 | China psichina true Social Science psi@mailinator.com PSI PSI 4 | Russia psirussia true Social Science psi@mailinator.com PSI PSI 5 | India psiindia true Social Science psi@mailinator.com PSI PSI 6 | Haiti psihaiti true Social Science psi@mailinator.com PSI PSI 7 | Laos psilaos true Social Science psi@mailinator.com PSI PSI 8 | Nepal psinepal true Social Science psi@mailinator.com PSI PSI 9 | Togo psitogo true Social Science psi@mailinator.com PSI PSI 10 | Mali psimali true Social Science psi@mailinator.com PSI PSI 11 | Mali Health psimalihealth true Social Science psi@mailinator.com PSI PSI 12 | Women in Mali psimaliwomen true Social Science psi@mailinator.com PSI PSI 13 | Child of Mali Health psimalihealthchild true Social Science psi@mailinator.com PSI PSI 14 | Grandchild of Mali Health psimalihealthgrandchild true Social Science psi@mailinator.com PSI PSI 15 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/dv-trees1.tsv: -------------------------------------------------------------------------------- 1 | name alias permissionRoot subject contactEmail description affiliation 2 | Trees trees true Other trees@trees.com A tree dataverse with some birds Trees Inc. 3 | Spruce spruce false Other spruce@trees.com A spruce with some birds Trees Inc. 4 | Chestnut Trees chestnuttrees false Other chestnuttrees@trees.com A dataverse with chestnut trees and an oriole Trees Inc. 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/group-explicit-trees.json: -------------------------------------------------------------------------------- 1 | { 2 | "aliasInOwner": "trees", 3 | "displayName": "Trees Dataverse Contributors", 4 | "description": "Contributors to the Trees Dataverse." 5 | } 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/in/dataverses.birds/4: -------------------------------------------------------------------------------- 1 | { 2 | "affiliation": "Birds Inc.", 3 | "alias": "finches", 4 | "contactEmail": "finches@birds.com", 5 | "description": "A dataverse with finches", 6 | "name": "Finches", 7 | "permissionRoot": "false" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/in/dataverses.birds/5: -------------------------------------------------------------------------------- 1 | { 2 | "affiliation": "Birds Inc.", 3 | "alias": "sparrows", 4 | "contactEmail": "sparrows@birds.com", 5 | "description": "A dataverse featuring sparrows", 6 | "name": "Sparrows", 7 | "permissionRoot": "false" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/in/dataverses.birds/6: -------------------------------------------------------------------------------- 1 | { 2 | "affiliation": "Birds Inc.", 3 | "alias": "wrens", 4 | "contactEmail": "wrens@birds.com", 5 | "description": "A dataverse full of wrens", 6 | "name": "Wrens", 7 | "permissionRoot": "false" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/in/dataverses.root/2: -------------------------------------------------------------------------------- 1 | { 2 | "affiliation": "Birds Inc.", 3 | "alias": "birds", 4 | "contactEmail": "birds@birds.com", 5 | "description": "A bird dataverse with some trees", 6 | "name": "Birds", 7 | "permissionRoot": "false" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/in/dataverses.root/3: -------------------------------------------------------------------------------- 1 | { 2 | "affiliation": "Trees Inc.", 3 | "alias": "trees", 4 | "contactEmail": "trees@trees.com", 5 | "description": "A tree dataverse with some birds", 6 | "name": "Trees", 7 | "permissionRoot": "false" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/in/dataverses.trees/7: -------------------------------------------------------------------------------- 1 | { 2 | "affiliation": "Trees Inc.", 3 | "alias": "spruce", 4 | "contactEmail": "spruce@trees.com", 5 | "description": "A spruce with some birds", 6 | "name": "Spruce", 7 | "permissionRoot": "false" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/in/dataverses.trees/9: -------------------------------------------------------------------------------- 1 | { 2 | "affiliation": "Trees Inc.", 3 | "alias": "chestnuttrees", 4 | "contactEmail": "chestnuttrees@trees.com", 5 | "description": "A dataverse with chestnut trees and an oriole", 6 | "name": "Chestnut Trees", 7 | "permissionRoot": "false" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/mkpaths.xsl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/nodes.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/replace_test/003.txt: -------------------------------------------------------------------------------- 1 | 3 -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/replace_test/004.txt: -------------------------------------------------------------------------------- 1 | 4 -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/replace_test/005.txt: -------------------------------------------------------------------------------- 1 | 5 -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-01/data.tsv: -------------------------------------------------------------------------------- 1 | 2016-01 7 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-02/data.tsv: -------------------------------------------------------------------------------- 1 | 2016-01 7 2 | 2016-02 9 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-03/data.tsv: -------------------------------------------------------------------------------- 1 | 2016-01 7 2 | 2016-02 9 3 | 2016-03 8 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/savedSearchAdvanced.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": "*", 3 | "definitionPointId": 2, 4 | "filterQueries": [ 5 | "date:2015" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/savedSearchBasic.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": "png", 3 | "definitionPointId": 2 4 | } 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/savedSearchInvalidJson.json: -------------------------------------------------------------------------------- 1 | [] 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/savedSearchInvalidJsonNoQuery.json: -------------------------------------------------------------------------------- 1 | { 2 | "quarry": "can't spell" 3 | } 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/savedSearchMaliBasicHealth.json: -------------------------------------------------------------------------------- 1 | { 2 | "definitionPointId": 22, 3 | "query": "health", 4 | "filterQueries": [ 5 | "dvObjectType:(dataverses OR datasets OR files)", 6 | "subtreePaths:\"/13/21\"" 7 | ], 8 | "creatorId": 1 9 | } 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/tabular/120745.dta: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/tabular/120745.dta -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/tabular/1char: -------------------------------------------------------------------------------- 1 | a 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/tabular/50by1000.dta: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/tabular/50by1000.dta -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/data/tabular/50by1000.dta.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IQSS/dataverse-docker/039ad12c5cd2ed9346b737e6581a4962e063aa4e/postgresql/testdata/scripts/search/data/tabular/50by1000.dta.zip -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dataset-add: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl http://localhost:8080/api/datasets?owner=birds -H 'Content-type:application/json' --data-binary @data/in/datasets/1 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dbbuiltin2shib: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | #psql -c "select id,name,useridentifier from authenticateduser order by id;" dataverse_db 3 | psql -c "select * from authenticateduser order by id;" dataverse_db 4 | psql -c "select * from authenticateduserlookup order by id;" dataverse_db 5 | psql -c "select * from builtinuser order by id;" dataverse_db 6 | #psql -c "select id,encryptedpassword,firstname,lastname,username from builtinuser order by id;" dataverse_db 7 | exit 8 | psql -c "select * from roleassignment;" dataverse_db 9 | psql -c "select datasetversionid,useridentifier from datasetversion_dataverseuser;" dataverse_db 10 | exit 11 | psql -c "select * from explicitgroup;" dataverse_db 12 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dbdatasetversion: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql -c " 3 | select id,dataset_id,versionstate,license,termsofuse from datasetversion; 4 | ---select * from datasetversion; 5 | " dataverse_db 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dbdbobject: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | ~/.homebrew/bin/psql -c "select id, dtype, modificationtime, indextime, permissionmodificationtime, permissionindextime from dvobject order by id;" dataverse_db 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dblinks: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | ~/.homebrew/bin/psql -c "select * from dataverselinkingdataverse order by id;" dataverse_db 3 | ~/.homebrew/bin/psql -c "select * from datasetlinkingdataverse order by id;" dataverse_db 4 | exit 5 | ~/.homebrew/bin/psql -c "select id, alias from dataverse order by id;" dataverse_db 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dblinks-delete: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | ~/.homebrew/bin/psql -c "delete from dataverselinkingdataverse;" dataverse_db 3 | ~/.homebrew/bin/psql -c "delete from datasetlinkingdataverse;" dataverse_db 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dbperms: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql -c " 3 | select dv.id as dvObject, au.id as user 4 | from dvobject dv, roleassignment ra, authenticateduser au 5 | where 1=1 6 | and dv.id = $1 7 | and dv.id = ra.definitionpoint_id 8 | and '@'|| au.useridentifier = ra.assigneeidentifier; 9 | " dataverse_db 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dbsavedsearch: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | ~/.homebrew/bin/psql -c "select * from savedsearch order by id;" dataverse_db 3 | ~/.homebrew/bin/psql -c "select * from savedsearchfilterquery order by id;" dataverse_db 4 | exit 5 | ~/.homebrew/bin/psql -c "drop table savedsearch cascade;" dataverse_db 6 | ~/.homebrew/bin/psql -c "drop table savedsearchfilterquery cascade;" dataverse_db 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dbsavedsearch-delete: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | ~/.homebrew/bin/psql -c "delete from savedsearchfilterquery;" dataverse_db 3 | ~/.homebrew/bin/psql -c "delete from savedsearch cascade;" dataverse_db 4 | exit 5 | ~/.homebrew/bin/psql -c "drop table savedsearch cascade;" dataverse_db 6 | ~/.homebrew/bin/psql -c "drop table savedsearchfilterquery cascade;" dataverse_db 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dbshibgroups: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | psql -c "select * from shibgroup;" dataverse_db 3 | psql -c "select * from authenticateduser;" dataverse_db 4 | psql -c "select * from persistedglobalgroup;" dataverse_db 5 | psql -c "select * from roleassignment;" dataverse_db 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dbusers: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/.homebrew/bin/psql -c " 3 | select * from builtinuser; 4 | " dataverse_db 5 | ~/.homebrew/bin/psql -c " 6 | select * from authenticateduser; 7 | " dataverse_db 8 | ~/.homebrew/bin/psql -c " 9 | select * from authenticateduserlookup; 10 | " dataverse_db 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/ds.tsv: -------------------------------------------------------------------------------- 1 | id title author owner description citationDate distributor 2 | 1 general dataset Dr. Doctor 1 About birds 2013-12-11 For All 3 | 2 bird dataset Dr. Bird 2 bird study 1 2003-12-11 For the Birds 4 | 3 bird dataset Dr. Bird 2 bird study 2 2003-12-11 For the Birds 5 | 4 finch dataset Dr. Bird 3 bird study 2 2003-12-11 For the Birds 6 | 5 goldfinch dataset Dr. Bird 5 bird study 2 2003-12-11 For the Birds 7 | 6 tree dataset Dr. Tree 4 tree study 2 2003-12-11 For the Trees 8 | 7 chestnut dataset Dr. Tree 6 tree study 2003-12-11 For the Trees 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/dv.tsv: -------------------------------------------------------------------------------- 1 | id name alias owner contactEmail description affiliation 2 | 1 Nature nature root@nature.com (not used) Earth Inc. 3 | 2 Birds birds 1 birds@birds.com A bird dataverse with some trees Birds Inc. 4 | 3 Trees trees 1 trees@trees.com A tree dataverse with some birds Trees Inc. 5 | 4 Finches finches 2 finches@birds.com A dataverse with finches Birds Inc. 6 | 5 Sparrows sparrows 2 sparrows@birds.com A dataverse featuring sparrows Birds Inc. 7 | 6 Wrens wrens 2 wrens@birds.com A dataverse full of wrens Birds Inc. 8 | 7 Spruce spruce 3 spruce@trees.com A spruce with some birds Trees Inc. 9 | 8 Chestnut Sparrows chestnutsparrows 5 chestnutsparrows@birds.com A dataverse with chestnut sparrows Birds Inc. 10 | 9 Chestnut Trees chestnuttrees 3 chestnuttrees@trees.com A dataverse with chestnut trees and an oriole Trees Inc. 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/empty-entityid-check: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # see also https://redmine.hmdc.harvard.edu/issues/3809 3 | curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=-entityid:*' 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/export-keys: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # `source path/to/this/file` so you can use these keys elsewhere 3 | export ADMINKEY=`cat /tmp/setup-all.sh.out | grep apiToken| jq .data.apiToken | tr -d \"` 4 | export SEARCH_USER_DIR=/tmp/searchusers 5 | export FINCHKEY=`cat $SEARCH_USER_DIR/1 | jq .data.apiToken | tr -d \"` 6 | export SPARROWKEY=`cat $SEARCH_USER_DIR/2 | jq .data.apiToken | tr -d \"` 7 | export WRENKEY=`cat $SEARCH_USER_DIR/3 | jq .data.apiToken | tr -d \"` 8 | export SPRUCEKEY=`cat $SEARCH_USER_DIR/4 | jq .data.apiToken | tr -d \"` 9 | export CHESTNUTKEY=`cat $SEARCH_USER_DIR/5 | jq .data.apiToken | tr -d \"` 10 | export PSIADMINKEY=`cat $SEARCH_USER_DIR/6 | jq .data.apiToken | tr -d \"` 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/files: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl http://localhost:8080/api/index 3 | curl -s 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=*&fq=dvtype:files' | jq '.response.docs[] | {name_sort, id, parentid}' 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/go: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | ./clear 3 | sleep .5 4 | #./populate 5 | #./create 6 | ./add 7 | # elasticsearch might need more time before query 8 | sleep 1 9 | ./query 10 | ./search 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/index: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # curl -s "http://localhost:8080/api/admin/index?numPartitions=$1&partitionIdToProcess=$2&previewOnly=$3" 3 | scripts/search/clear 4 | curl -s -X DELETE http://localhost:8080/api/admin/index/timestamps 5 | curl -s "http://localhost:8080/api/admin/index/continue?numPartitions=1&partitionIdToProcess=0&previewOnly=true" | jq .data.previewOfPartitionWorkload.dvContainerIds.dataverses[] | while read j; do curl http://localhost:8080/api/admin/index/dataverses/$j; done 6 | curl -s "http://localhost:8080/api/admin/index/continue?numPartitions=1&partitionIdToProcess=0&previewOnly=true" | jq .data.previewOfPartitionWorkload.dvContainerIds.datasets[] | while read i; do curl http://localhost:8080/api/admin/index/datasets/$i; done 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/index-status: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl -s http://localhost:8080/api/admin/index/status | jq . 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/json2ids: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """Find ids in JSON document""" 3 | import sys 4 | try: 5 | import json 6 | except ImportError: 7 | import simplejson as json 8 | import optparse 9 | parser = optparse.OptionParser(description=__doc__) 10 | options, args = parser.parse_args() 11 | 12 | if not args: 13 | print "Please supply a filename to process" 14 | sys.exit(1) 15 | 16 | json_data=open(args[0]) 17 | data = json.load(json_data) 18 | ids=[] 19 | for i in data: 20 | id = i["entityid_l"] 21 | ids.append(str(id)) 22 | print ' '.join(ids) 23 | json_data.close() 24 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/populate: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | DVDIR='data/in/dataverses' 3 | DVDIR_ROOT='data/in/dataverses.root' 4 | DVDIR_BIRDS='data/in/dataverses.birds' 5 | DVDIR_TREES='data/in/dataverses.trees' 6 | #DSDIR='data/in/datasets' 7 | #FILESDIR='data/in/files' 8 | #mkdir -p $DSDIR 9 | #mkdir -p $FILESDIR 10 | rm -rf data/in 11 | mkdir -p $DVDIR 12 | mkdir -p $DVDIR_ROOT 13 | mkdir -p $DVDIR_BIRDS 14 | mkdir -p $DVDIR_TREES 15 | count=1; ./tab2json dv.tsv | while read i; do echo $i | python -m json.tool > $DVDIR/$count; let count++; done 16 | rm $DVDIR/1 17 | mv $DVDIR/2 $DVDIR_ROOT/2 18 | mv $DVDIR/3 $DVDIR_ROOT/3 19 | mv $DVDIR/4 $DVDIR_BIRDS/4 20 | mv $DVDIR/5 $DVDIR_BIRDS/5 21 | mv $DVDIR/6 $DVDIR_BIRDS/6 22 | mv $DVDIR/7 $DVDIR_TREES/7 23 | rm $DVDIR/8 24 | mv $DVDIR/9 $DVDIR_TREES/9 25 | rmdir $DVDIR 26 | #count=1; ./tab2json ds.tsv | while read i; do echo $i | python -m json.tool > $DSDIR/$count; let count++; done 27 | #count=1; ./tab2json files.tsv | while read i; do echo $i | python -m json.tool > $FILESDIR/$count; let count++; done 28 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/populate-bird-dvs1: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | BASEDIR='scripts/search' 3 | OUTDIR='data/in/dv-birds1' 4 | FULL_OUTDIR="$BASEDIR/$OUTDIR" 5 | rm -rf $FULL_OUTDIR 6 | mkdir -p $FULL_OUTDIR 7 | cd $BASEDIR 8 | count=1; ./tab2json-dvs data/dv-birds1.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/populate-psi-dvs: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | BASEDIR='scripts/search' 3 | OUTDIR='data/in/dv-psi' 4 | FULL_OUTDIR="$BASEDIR/$OUTDIR" 5 | rm -rf $FULL_OUTDIR 6 | mkdir -p $FULL_OUTDIR 7 | cd $BASEDIR 8 | count=1; ./tab2json-dvs data/dv-psi.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/populate-tree-dvs1: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | BASEDIR='scripts/search' 3 | OUTDIR='data/in/dv-trees1' 4 | FULL_OUTDIR="$BASEDIR/$OUTDIR" 5 | rm -rf $FULL_OUTDIR 6 | mkdir -p $FULL_OUTDIR 7 | cd $BASEDIR 8 | count=1; ./tab2json-dvs data/dv-trees1.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/populate-users: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | BASEDIR='scripts/search' 3 | USERDIR='data/in/users' 4 | FULL_USERDIR="$BASEDIR/$USERDIR" 5 | rm -rf $FULL_USERDIR 6 | mkdir -p $FULL_USERDIR 7 | cd $BASEDIR 8 | count=1; ./tab2json-users users.tsv | while read i; do echo $i | python -m json.tool > $USERDIR/$count; let count++; done 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/query: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl -s 'http://localhost:8983/solr/collection1/select?rows=1000000&wt=json&indent=true&q=*%3A*' 3 | # show combination of public stuff OR pete's private stuff 4 | # curl -s --globoff 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=*&fq=({!join+from=groups_s+to=perms_ss}id:group_public+OR+{!join+from=groups_s+to=perms_ss}id:group_user2)' | jq '.response.docs[] | {name_sort}' 5 | # https://github.com/IQSS/dataverse/issues/1262 6 | # curl 'http://localhost:8983/solr/collection1/select?rows=1000000&wt=json&indent=true&hl=true&hl.fl=*&q=wright&hl.snippets=10' 7 | # remember elasticsearch? :) 8 | #curl 'http://localhost:9200/_search?pretty=true&q=*' 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/saved-search: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . scripts/search/export-keys 3 | # 2015-03-26 11:48:50.43 4 | curl -s http://localhost:8080/api/admin/savedsearches/list?key=$ADMINKEY | jq . 5 | if [ ! -z "$1" ]; then 6 | curl -s http://localhost:8080/api/dataverses/$1/links?key=$ADMINKEY | jq . 7 | fi 8 | if [ ! -z "$2" ]; then 9 | curl -s http://localhost:8080/api/datasets/$2/links?key=$ADMINKEY | jq . 10 | fi 11 | exit 12 | curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchBasic.json | jq . 13 | curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchAdvanced.json | jq . 14 | # curl -s -X DELETE http://localhost:8080/api/admin/savedsearches/999 15 | scripts/search/dbsavedsearch 16 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/saved-search-setup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | curl -X PUT -d true http://localhost:8080/api/admin/settings/:SearchApiNonPublicAllowed 3 | echo 4 | curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchMaliBasicHealth.json | jq . 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/saved-search-test: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . scripts/search/export-keys 3 | #curl -s -X PUT http://localhost:8080/api/admin/savedsearches/makelinks/all | jq . 4 | diff -u scripts/search/tests/expected/saved-search <(curl -s "http://localhost:8080/api/search?key=$ADMINKEY&sort=name&subtree=psimalihealth&q=*" | jq '.data.items[] | {name,type}') 5 | diff -u scripts/search/tests/expected/saved-search-links <(curl -s http://localhost:8080/api/dataverses/psimalihealth/links?key=$ADMINKEY | jq .data) 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/search: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | if [ -z "$1" ]; then 3 | curl -H "X-Dataverse-key: $API_TOKEN" -s 'http://localhost:8080/api/search?q=*' 4 | #curl -s 'http://localhost:8080/api/search?q=*&key=pete' 5 | else 6 | # i.e. ./search 'q=*&fq=filetype_s:"image"&fq=dvtype:files' 7 | # i.e. ./search 'q=*&start=10' 8 | # i.e. ./search 'q=*&sort=name_sort&order=asc' 9 | # i.e. ./search 'q=*&sort=name_sort&order=asc' | jq '.itemsJson[] | {name_sort}' 10 | curl -H "X-Dataverse-key: $API_TOKEN" -s "http://localhost:8080/api/search?$1" 11 | fi 12 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/solr-delete-id: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [ -z "$1" ]; then 3 | echo "No Solr ID provided." 4 | exit 1 5 | else 6 | echo "Deleting Solr id $1" 7 | OUTPUT=`curl -s http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"id:$1\"}}"` 8 | # exit code 7 is expected when Solr is down 9 | EXIT_CODE=$? 10 | #echo $EXIT_CODE 11 | #echo $OUTPUT 12 | fi 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/spellcheck: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # output: 3 | # "hits",1, 4 | # "misspellingsAndCorrections",["datvrse","dataverse"] 5 | curl -s 'http://localhost:8983/solr/spell?spellcheck=true&wt=json&indent=true&q=datvrse' 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tab2json-dvs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from optparse import OptionParser 4 | import csv 5 | try: 6 | import json 7 | except ImportError: 8 | import simplejson as json 9 | 10 | parser = OptionParser() 11 | options, args = parser.parse_args() 12 | 13 | if args: 14 | csv_file = open(args[0]) 15 | else: 16 | csv_file = sys.stdin 17 | 18 | reader = csv.DictReader(csv_file, delimiter="\t") 19 | rows = [row for row in reader] 20 | for row in rows: 21 | if "contactEmail" in row: 22 | contactArray = [] 23 | contactHash = {} 24 | contactHash["contactEmail"] = row["contactEmail"] 25 | contactArray.append(contactHash) 26 | row["dataverseContacts"] = contactArray 27 | del row["contactEmail"] 28 | if "subject" in row: 29 | subjectsArray = [] 30 | subjectsArray.append(row["subject"]) 31 | row["dataverseSubjects"] = subjectsArray 32 | del row["subject"] 33 | print json.dumps(row) 34 | csv_file.close() 35 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tab2json-users: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | from optparse import OptionParser 4 | import csv 5 | try: 6 | import json 7 | except ImportError: 8 | import simplejson as json 9 | 10 | parser = OptionParser() 11 | options, args = parser.parse_args() 12 | 13 | if args: 14 | csv_file = open(args[0]) 15 | else: 16 | csv_file = sys.stdin 17 | 18 | reader = csv.DictReader(csv_file, delimiter="\t") 19 | rows = [row for row in reader] 20 | for row in rows: 21 | print json.dumps(row) 22 | csv_file.close() 23 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/add-members-to-trees-group: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl -X PUT "http://localhost:8080/api/dataverses/root/groups/trees/roleAssignees/@chestnut?key=$ADMINKEY" 3 | curl -X PUT "http://localhost:8080/api/dataverses/root/groups/trees/roleAssignees/@spruce?key=$ADMINKEY" 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/data/dataset-trees1-edit-subject.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Spruce Goose 4 | Spruce, Sabrina 5 | What the Spruce Goose was really made of. 6 | Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/ 7 | 12 | 13 | Engineering 14 | 15 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/data/dataset-trees1-edit.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Spruce Goose 4 | Spruce, Sabrina 5 | What the Spruce Goose was *really* made of. 6 | NONE 7 | 12 | 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/data/dataset-trees1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Spruce Goose 4 | Spruce, Sabrina 5 | What the Spruce Goose was really made of. 6 | Downloader will not use the Materials in any way prohibited by applicable laws. 7 | 12 | 18 | 19 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/data/dv-dash.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias":"dash", 3 | "name":"Titanic - 1999", 4 | "affiliation":"Affiliation value", 5 | "contactEmail":"pete@mailinator.com", 6 | "permissionRoot":false, 7 | "description":"A dataverse with a - (a dash) in the description" 8 | } 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/destroy-dataset-finch1: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . scripts/search/export-keys 3 | . scripts/search/assumptions 4 | echo $FIRST_FINCH_DATASET_ID 5 | OUTPUT=`curl -s -X DELETE http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/destroy?key=$FINCHKEY` 6 | echo $OUTPUT 7 | echo $OUTPUT | jq . 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/destroy-dataset-spruce1: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # destroying requires publishing so uncomment this if need be 3 | # scripts/search/tests/publish-spruce1-and-test 4 | sleep 2 5 | . scripts/search/export-keys 6 | . scripts/search/assumptions 7 | OUTPUT=`curl -s -X DELETE http://localhost:8080/api/datasets/$FIRST_SPRUCE_DATASET_ID/destroy?key=$ADMINKEY` 8 | echo $OUTPUT 9 | echo $OUTPUT | jq . 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/edit-dataset-finch1: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . scripts/search/export-keys 3 | . scripts/search/assumptions 4 | GET_VERSION_OUTPUT=`curl -s GET http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/versions/:latest?key=$FINCHKEY` 5 | echo $GET_VERSION_OUTPUT | jq .data > /tmp/old 6 | cp /tmp/old /tmp/new 7 | sed -i -e "s/Darwin's Finches/Darwin's Galápagos Finches/" /tmp/new 8 | EDIT_OUTPUT=`curl -s -H "Content-type:application/json" -X PUT -d @/tmp/new http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/versions/:draft?key=$FINCHKEY` 9 | echo $EDIT_OUTPUT 10 | echo $EDIT_OUTPUT | jq . 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/anon: -------------------------------------------------------------------------------- 1 | "Please provide a key query parameter (?key=XXX) or via the HTTP header X-Dataverse-key" 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/anon-empty: -------------------------------------------------------------------------------- 1 | [] 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/anon3: -------------------------------------------------------------------------------- 1 | 4 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/anontest3: -------------------------------------------------------------------------------- 1 | [ 2 | "files:trees.png", 3 | "datasets:Spruce Goose", 4 | "dataverses:Trees", 5 | "dataverses:Spruce" 6 | ] 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/finch1: -------------------------------------------------------------------------------- 1 | [ 2 | "files:data.tsv", 3 | "datasets:Darwin's Finches", 4 | "dataverses:Birds", 5 | "dataverses:Finches", 6 | "dataverses:Sparrows", 7 | "dataverses:Wrens", 8 | "dataverses:Chestnut Sparrows" 9 | ] 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/finch3: -------------------------------------------------------------------------------- 1 | [ 2 | "files:trees.png", 3 | "datasets:Spruce Goose", 4 | "datasets:Darwin's Finches", 5 | "dataverses:Birds", 6 | "dataverses:Finches", 7 | "dataverses:Sparrows", 8 | "dataverses:Wrens", 9 | "dataverses:Chestnut Sparrows", 10 | "dataverses:Trees", 11 | "dataverses:Spruce" 12 | ] 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/nosuchuser: -------------------------------------------------------------------------------- 1 | "Bad api key 'nosuchuser'" 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/saved-search: -------------------------------------------------------------------------------- 1 | { 2 | "type": "dataverse", 3 | "name": "Child of Mali Health" 4 | } 5 | { 6 | "type": "dataverse", 7 | "name": "Grandchild of Mali Health" 8 | } 9 | { 10 | "type": "dataset", 11 | "name": "Mali health dataset 1" 12 | } 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/saved-search-links: -------------------------------------------------------------------------------- 1 | { 2 | "datasets that the psimalihealth has linked to": [ 3 | "Mali health dataset 1" 4 | ], 5 | "dataverses that link to the psimalihealth": [], 6 | "dataverses that the psimalihealth dataverse has linked to": [] 7 | } 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/solr-down: -------------------------------------------------------------------------------- 1 | { 2 | "message": "Exception running search for [*] with filterQueries [] and paginationStart [0]: edu.harvard.iq.dataverse.search.SearchException: Internal Dataverse Search Engine Error org.apache.solr.client.solrj.SolrServerException org.apache.solr.client.solrj.SolrServerException: Server refused connection at: http://localhost:8983/solr org.apache.http.conn.HttpHostConnectException org.apache.http.conn.HttpHostConnectException: Connection to http://localhost:8983 refused java.net.ConnectException java.net.ConnectException: Connection refused ", 3 | "status": "ERROR" 4 | } 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/spruce1: -------------------------------------------------------------------------------- 1 | [ 2 | "files:trees.png", 3 | "datasets:Spruce Goose", 4 | "dataverses:Trees", 5 | "dataverses:Spruce", 6 | "dataverses:Chestnut Trees" 7 | ] 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/spruce2: -------------------------------------------------------------------------------- 1 | [ 2 | "files:trees.png", 3 | "datasets:Spruce Goose", 4 | "dataverses:Birds", 5 | "dataverses:Trees", 6 | "dataverses:Spruce", 7 | "dataverses:Chestnut Trees" 8 | ] 9 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/expected/zero: -------------------------------------------------------------------------------- 1 | 0 2 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/explicit-group-add: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | curl -X POST http://localhost:8080/api/dataverses/root/groups?key=$ADMINKEY -H "Content-type: application/json" --upload-file scripts/search/data/group-explicit-trees.json 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/files: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | OUT=`curl -s "http://localhost:8080/api/admin/index/filesearch?persistentId=$1&q=$2"` 3 | echo $OUT | jq . 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/grant-authusers-add-on-root: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \":authenticated-users\",\"role\": \"fullContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` 4 | echo $OUTPUT 5 | echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/grant-finch-admin-on-spruce: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . scripts/search/assumptions 3 | curl -s -X POST -H 'Content-Type: application/x-www-form-urlencoded' "http://localhost:8080/api/roles/assignments?username=$FINCH_USERNAME&roleId=$ADMIN_ROLE&definitionPointId=$SPRUCE_DATAVERSE&key=$SPRUCEKEY" | jq ' .data | {assignee,_roleAlias}' 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/grant-ipgroup3-add-on-root: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"&ip/ipGroup3\",\"role\": \"dvContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` 4 | echo $OUTPUT 5 | echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/grant-shibgroup1-add-on-root: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"&shib/1\",\"role\": \"dvContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` 4 | echo $OUTPUT 5 | echo $OUTPUT | jq . 6 | #echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/grant-spruce-admin-on-birds: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/assumptions 3 | OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"@spruce\",\"role\": \"admin\"}" "http://localhost:8080/api/dataverses/birds/assignments?key=$ADMINKEY"` 4 | echo $OUTPUT 5 | echo 6 | echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/ipgroup-add: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | OUTPUT=`curl -s -X POST -d @scripts/api/data/ipGroup-all.json http://localhost:8080/api/admin/groups/ip -H "Content-type:application/json"` 4 | echo $OUTPUT 5 | echo $OUTPUT | jq . 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/permissions2: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$FINCHKEY" | jq .data) scripts/search/tests/expected/finch1 3 | 4 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPRUCEKEY" | jq .data) scripts/search/tests/expected/spruce2 5 | 6 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPARROWKEY" | jq .data) scripts/search/tests/expected/anon-empty 7 | 8 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$WRENKEY" | jq .data) scripts/search/tests/expected/anon-empty 9 | 10 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$CHESTNUTKEY" | jq .data) scripts/search/tests/expected/anon-empty 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/permissions3: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | diff <(curl -s "http://localhost:8080/api/search?q=*" | jq .data.count_in_response) scripts/search/tests/expected/anon3 3 | 4 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$FINCHKEY" | jq .data) scripts/search/tests/expected/finch3 5 | 6 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPRUCEKEY" | jq .data) scripts/search/tests/expected/spruce2 7 | 8 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPARROWKEY" | jq .data) scripts/search/tests/expected/anontest3 9 | 10 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$WRENKEY" | jq .data) scripts/search/tests/expected/anontest3 11 | 12 | diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$CHESTNUTKEY" | jq .data) scripts/search/tests/expected/anontest3 13 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/permissions3-full-anon: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #curl -s "http://localhost:8080/api/search?q=*" | python -m json.tool > scripts/search/tests/expected/anon3-full 3 | diff <(curl -s "http://localhost:8080/api/search?q=*" | python -m json.tool) scripts/search/tests/expected/anon3-full 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/publish-dataset-spruce1: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | . scripts/search/assumptions 4 | OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- "https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/study/$FIRST_SPRUCE_DOI"` 5 | echo $OUTPUT 6 | echo 7 | echo $OUTPUT | xmllint -format - 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/publish-dataverse-birds: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | OUTPUT=`cat /dev/null | curl -s --insecure -X POST -H 'In-Progress: false' --data-binary @- https://admin:admin@localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/birds` 3 | echo $OUTPUT 4 | echo 5 | echo $OUTPUT | xmllint -format - 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/publish-dataverse-finches: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | OUTPUT=`cat /dev/null | curl -s --insecure -X POST -H 'In-Progress: false' --data-binary @- https://finch:finch@localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/finches` 3 | echo $OUTPUT 4 | echo 5 | echo $OUTPUT | xmllint -format - 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/publish-dataverse-root: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | OUTPUT=`cat /dev/null | curl -s --insecure -u $ADMINKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/root` 4 | echo $OUTPUT 5 | echo 6 | echo $OUTPUT | xmllint -format - 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/publish-dataverse-spruce: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/spruce` 4 | echo $OUTPUT 5 | echo 6 | echo $OUTPUT | xmllint -format - 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/publish-dataverse-trees: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/trees` 4 | echo $OUTPUT 5 | echo 6 | echo $OUTPUT | xmllint -format - 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/publish-spruce1-and-test: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | scripts/search/tests/publish-dataverse-root 3 | scripts/search/tests/publish-dataverse-trees 4 | scripts/search/tests/publish-dataverse-spruce 5 | scripts/search/tests/publish-dataset-spruce1 6 | #scripts/search/tests/permissions3 7 | #scripts/search/tests/permissions3-full-anon 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/revoke-finch-admin-on-spruce: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/assumptions 3 | curl -s -X DELETE "http://localhost:8080/api/dataverses/$SPRUCE_DATAVERSE/assignments/$FINCH_ADMIN_ON_SPRUCE?key=$SPRUCEKEY" | jq .data.message 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/revoke-spruce-admin-on-birds: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/assumptions 3 | OUTPUT=`curl -s -X DELETE "http://localhost:8080/api/dataverses/$BIRDS_DATAVERSE/assignments/$SPRUCE_ADMIN_ON_BIRDS?key=$FINCHKEY"` 4 | echo $OUTPUT 5 | echo 6 | echo $OUTPUT | jq .data.message 7 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/solr-down: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | diff <(curl -s 'http://localhost:8080/api/search?q=*' | jq .) scripts/search/tests/expected/solr-down 3 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/special-characters: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # curl -H "Content-type:application/json" -X POST -d @scripts/search/tests/data/dv-colon.json "http://localhost:8080/api/dataverses/peteTop?key=pete" 3 | # curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q="description:\:"' 4 | diff <(curl -s 'http://localhost:8080/api/search?q=:') scripts/search/tests/expected/colon 5 | 6 | # http://stackoverflow.com/questions/18277609/search-in-solr-with-special-characters 7 | # curl -H "Content-type:application/json" -X POST -d @scripts/search/tests/data/dv-dash.json "http://localhost:8080/api/dataverses/peteTop?key=pete" 8 | # curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=name:\-' 9 | # diff <(curl -s 'http://localhost:8080/api/search?q=name:"Titanic - 1999"') scripts/search/tests/expected/dash 10 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/tests/upload-1000-files: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . scripts/search/export-keys 3 | . scripts/search/assumptions 4 | echo "Uploading 1000 files" 5 | curl -s --insecure --data-binary @scripts/search/data/binary/1000files.zip -H 'Content-Disposition: filename=1000files.zip' -H 'Content-Type: application/zip' -H 'Packaging: http://purl.org/net/sword/package/SimpleZip' -u spruce:spruce https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/$FIRST_SPRUCE_DOI 6 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/search/users.tsv: -------------------------------------------------------------------------------- 1 | userName firstName lastName email 2 | finch Fiona Finch finch@mailinator.com 3 | sparrow Sammy Sparrow sparrow@mailinator.com 4 | wren Wilbur Wren wren@mailinator.com 5 | spruce Sabrina Spruce spruce@mailinator.com 6 | chestnut Caleb Chestnut chestnut@mailinator.com 7 | psiadmin PSI Admin psi@mailinator.com 8 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/trello/trello: -------------------------------------------------------------------------------- 1 | curl -s https://api.trello.com/1/boards/527d1605c7b30060420027b0 | python -m json.tool 2 | #curl -s https://api.trello.com/1/lists/527d1605c7b30060420027b0?fields=name&cards=open&card_fields=name 3 | # https://api.trello.com/1/lists/4eea4ffc91e31d174600004a?fields=name&cards=open&card_fields=name&key=[application_key]&token=[optional_auth_token] 4 | 5 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/vagrant/rpmbuild.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-7.noarch.rpm 3 | yum install -y rpm-build httpd-devel libapreq2-devel R-devel 4 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/vagrant/setup-solr.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Setting up Solr" 3 | GLASSFISH_USER=glassfish 4 | GLASSFISH_USER_HOME=~glassfish 5 | SOLR_HOME=$GLASSFISH_USER_HOME/solr 6 | su $GLASSFISH_USER -s /bin/sh -c "mkdir $SOLR_HOME" 7 | su $GLASSFISH_USER -s /bin/sh -c "cp /downloads/solr-4.6.0.tgz $SOLR_HOME" 8 | su $GLASSFISH_USER -s /bin/sh -c "cd $SOLR_HOME && tar xfz solr-4.6.0.tgz" 9 | su $GLASSFISH_USER -s /bin/sh -c "cp /conf/solr/4.6.0/schema.xml $SOLR_HOME/solr-4.6.0/example/solr/collection1/conf/schema.xml" 10 | su $GLASSFISH_USER -s /bin/sh -c "cd $SOLR_HOME/solr-4.6.0/example && java -jar start.jar &" 11 | -------------------------------------------------------------------------------- /postgresql/testdata/scripts/vagrant/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo "running tests..." 3 | echo "running search tests..." 4 | cd / 5 | scripts/search/tests/permissions 6 | echo "done running tests. no output is good. silence is golden" 7 | -------------------------------------------------------------------------------- /postgresql/testscripts/db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | psql -U postgres -c "CREATE ROLE dvnapp UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1 3 | psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1 4 | -------------------------------------------------------------------------------- /postgresql/testscripts/install: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | export HOST_ADDRESS=localhost 3 | export GLASSFISH_ROOT=/usr/local/glassfish4 4 | export FILES_DIR=/usr/local/glassfish4/glassfish/domains/domain1/files 5 | export DB_NAME=dvndb 6 | export DB_PORT=5432 7 | export DB_HOST=localhost 8 | export DB_USER=dvnapp 9 | export DB_PASS=secret 10 | export RSERVE_HOST=localhost 11 | export RSERVE_PORT=6311 12 | export RSERVE_USER=rserve 13 | export RSERVE_PASS=rserve 14 | export SMTP_SERVER=localhost 15 | export MEM_HEAP_SIZE=2048 16 | export GLASSFISH_DOMAIN=domain1 17 | cd scripts/installer 18 | cp pgdriver/postgresql-8.4-703.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib 19 | #cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf 20 | cp /opt/dv/testdata/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf 21 | ./glassfish-setup.sh 22 | -------------------------------------------------------------------------------- /postgresql/testscripts/post: -------------------------------------------------------------------------------- 1 | #/bin/sh 2 | cd scripts/api 3 | ./setup-all.sh --insecure | tee /tmp/setup-all.sh.out 4 | cd ../.. 5 | psql -U dvnapp dvndb -f scripts/database/reference_data.sql 6 | psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql 7 | psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql 8 | scripts/search/tests/publish-dataverse-root 9 | #git checkout scripts/api/data/dv-root.json 10 | scripts/search/tests/grant-authusers-add-on-root 11 | scripts/search/populate-users 12 | scripts/search/create-users 13 | scripts/search/tests/create-all-and-test 14 | scripts/search/tests/publish-spruce1-and-test 15 | #java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest 16 | -------------------------------------------------------------------------------- /secrets/admin/password: -------------------------------------------------------------------------------- 1 | admin1 2 | -------------------------------------------------------------------------------- /secrets/api/key: -------------------------------------------------------------------------------- 1 | supersecret 2 | -------------------------------------------------------------------------------- /secrets/db/password: -------------------------------------------------------------------------------- 1 | dvnsecret 2 | -------------------------------------------------------------------------------- /secrets/db_asadmin: -------------------------------------------------------------------------------- 1 | AS_ADMIN_ALIASPASSWORD=dvnsecret 2 | -------------------------------------------------------------------------------- /secrets/doi/password: -------------------------------------------------------------------------------- 1 | changeme 2 | -------------------------------------------------------------------------------- /secrets/doi_asadmin: -------------------------------------------------------------------------------- 1 | AS_ADMIN_ALIASPASSWORD=changeme 2 | -------------------------------------------------------------------------------- /solr/4.6.0/readme.me: -------------------------------------------------------------------------------- 1 | Please see the dev guide for what to do with Solr config file(s). 2 | 3 | schema.xml.4.6.0.dist is the original schema.xml file that came from the 4.6.0 Solr distribution. It's only included so you can diff the files to see what has changed. 4 | -------------------------------------------------------------------------------- /solr/Dockerfile: -------------------------------------------------------------------------------- 1 | #FROM ndslabs/dataverse-solr:latest 2 | FROM vtti/dataverse-solr 3 | COPY schema.xml /usr/local/solr-4.6.0/example/solr/collection1/conf/schema.xml 4 | -------------------------------------------------------------------------------- /solr7/7.3.0/readme.me: -------------------------------------------------------------------------------- 1 | Please see the dev guide for what to do with Solr config files. -------------------------------------------------------------------------------- /solr7/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | MAINTAINER Dataverse (support@dataverse.org) 3 | 4 | RUN yum install -y unzip java-1.8.0-openjdk-devel lsof 5 | 6 | # Install Solr 7.3.0 7 | # The context of the build is the "conf" directory. 8 | COPY solr-7.3.0dv.tgz /tmp 9 | RUN cd /tmp \ 10 | && tar xvfz solr-7.3.0dv.tgz \ 11 | && rm solr-7.3.0dv.tgz \ 12 | && mkdir /usr/local/solr \ 13 | && mv solr-7.3.0 /usr/local/solr/ 14 | 15 | COPY 7.3.0/schema.xml /tmp 16 | COPY solrconfig_master.xml /tmp 17 | COPY solrconfig_slave.xml /tmp 18 | 19 | RUN chmod g=u /etc/passwd 20 | 21 | RUN chgrp -R 0 /usr/local/solr && \ 22 | chmod -R g=u /usr/local/solr 23 | 24 | EXPOSE 8983 25 | 26 | COPY Dockerfile / 27 | COPY entrypoint.sh / 28 | 29 | ENTRYPOINT ["/entrypoint.sh"] 30 | USER 1001 31 | CMD ["solr"] 32 | -------------------------------------------------------------------------------- /solr7/backup_cron.sh: -------------------------------------------------------------------------------- 1 | 0 */6 * * * curl 'http://localhost:8983/solr/collection1/replication?command=backup&location=/home/share' 2 | -------------------------------------------------------------------------------- /triggers/affiliations.sql: -------------------------------------------------------------------------------- 1 | DROP TRIGGER IF EXISTS group_trigger on explicitgroup; 2 | 3 | CREATE TRIGGER group_trigger AFTER INSERT ON explicitgroup 4 | 5 | FOR EACH ROW EXECUTE PROCEDURE groupmonitor(); 6 | 7 | CREATE OR REPLACE FUNCTION groupmonitor() RETURNS TRIGGER AS $group_table$ 8 | 9 | BEGIN insert into explicitgroup_authenticateduser select e.id, a.id from explicitgroup as e, authenticateduser as a where e.displayname=a.affiliation and NOT EXISTS (select 1 from explicitgroup_authenticateduser where a.id = containedauthenticatedusers_id and e.id = explicitgroup_id); 10 | 11 | RETURN NEW; 12 | 13 | END; 14 | 15 | $group_table$ LANGUAGE plpgsql; 16 | -------------------------------------------------------------------------------- /triggers/lang-properties-convert.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | import sys 3 | import codecs 4 | import re 5 | 6 | def display_unicode(data): 7 | return "".join(["\\u%s" % hex(ord(l))[2:].zfill(4) for l in data]) 8 | 9 | with codecs.open(sys.argv[1],'r',encoding='utf8') as f: 10 | text = f.read() 11 | 12 | if text: 13 | for uni in text.split('\n'): 14 | data = uni.split("=",1) 15 | 16 | try: 17 | print("%s=%s" % (data[0], display_unicode(data[1]))) 18 | except: 19 | print("%s" % uni) #display_unicode(uni)) 20 | --------------------------------------------------------------------------------