├── .github ├── ISSUE_TEMPLATE │ ├── bug-report-template.md │ ├── development-issue-template.md │ └── feature-request-template.md ├── pull_request_template.md └── workflows │ ├── apply-formatting.yml │ ├── build-docs.yml │ ├── format-check.yml │ └── unit-tests.yml ├── .gitignore ├── .gitlab-ci.yml ├── .gitlab ├── build │ ├── build_core_image.yml │ ├── build_foxx_image.yml │ ├── build_gcs_base_image.yml │ ├── build_gcs_image.yml │ ├── build_repo_image.yml │ ├── build_ws_image.yml │ ├── force_build_core_image.yml │ ├── force_build_foxx_image.yml │ ├── force_build_gcs_base_image.yml │ ├── force_build_gcs_image.yml │ ├── force_build_repo_image.yml │ └── force_build_ws_image.yml ├── build_ci_infrastructure.yml ├── common.yml ├── end_to_end.yml ├── infrastructure.yml ├── skip_ci_infrastructure.yml ├── stage_build.yml ├── stage_build_base.yml ├── stage_clear_cache.yml ├── stage_image_check.yml ├── stage_pipeline_serialize.yml └── stage_provision_client.yml ├── .gitmodules ├── CHANGELOG.md ├── CMakeLists.txt ├── LICENSE.md ├── README.md ├── cmake ├── Boost.cmake ├── CURL.cmake ├── JSON.cmake ├── JSONSchema.cmake ├── OpenSSL.cmake ├── Protobuf.cmake ├── Utils.cmake ├── Version.cmake ├── Web.cmake ├── ZeroMQ.cmake ├── Zlib.cmake ├── curl_version.cpp ├── sodium_version.cpp └── zlib_version.cpp ├── common ├── CMakeLists.txt ├── include │ └── common │ │ ├── CommunicatorFactory.hpp │ │ ├── CredentialFactory.hpp │ │ ├── DynaLog.hpp │ │ ├── ErrorCodes.hpp │ │ ├── GSSAPI_Utils.hpp │ │ ├── IAuthenticationManager.hpp │ │ ├── ICommunicator.hpp │ │ ├── ICredentials.hpp │ │ ├── IMessage.hpp │ │ ├── IMessageMapper.hpp │ │ ├── IOperator.hpp │ │ ├── IServer.hpp │ │ ├── ISocket.hpp │ │ ├── KeyGenerator.hpp │ │ ├── MessageBuffer.hpp │ │ ├── MessageFactory.hpp │ │ ├── OperatorFactory.hpp │ │ ├── OperatorTypes.hpp │ │ ├── ProtoBufMap.hpp │ │ ├── ProtocolTypes.hpp │ │ ├── ServerFactory.hpp │ │ ├── SmartTokenizer.hpp │ │ ├── SocketFactory.hpp │ │ ├── SocketOptions.hpp │ │ ├── TraceException.hpp │ │ ├── Util.hpp │ │ ├── Version.hpp.in │ │ ├── fpconv.h │ │ ├── libjson.hpp │ │ └── powers.h ├── proto │ └── common │ │ ├── CMakeLists.txt │ │ ├── SDMS.proto │ │ ├── SDMS_Anon.proto │ │ ├── SDMS_Auth.proto │ │ └── Version.proto.in ├── source │ ├── Buffer.hpp │ ├── CommunicatorFactory.cpp │ ├── CredentialFactory.cpp │ ├── DynaLog.cpp │ ├── Frame.cpp │ ├── Frame.hpp │ ├── KeyGenerator.cpp │ ├── MessageFactory.cpp │ ├── OperatorFactory.cpp │ ├── ProtoBufFactory.cpp │ ├── ProtoBufFactory.hpp │ ├── ProtoBufMap.cpp │ ├── ServerFactory.cpp │ ├── SocketFactory.cpp │ ├── SocketOptions.cpp │ ├── Util.cpp │ ├── communicators │ │ ├── ZeroMQCommunicator.cpp │ │ ├── ZeroMQCommunicator.hpp │ │ ├── ZeroMQCommunicatorSecure.cpp │ │ └── ZeroMQCommunicatorSecure.hpp │ ├── credentials │ │ ├── ZeroMQSocketCredentials.cpp │ │ └── ZeroMQSocketCredentials.hpp │ ├── fpconv.cpp │ ├── messages │ │ ├── GoogleProtoMessage.cpp │ │ └── GoogleProtoMessage.hpp │ ├── operators │ │ ├── AuthenticationOperator.cpp │ │ ├── AuthenticationOperator.hpp │ │ ├── RouterBookKeepingOperator.cpp │ │ └── RouterBookKeepingOperator.hpp │ ├── servers │ │ ├── Proxy.cpp │ │ ├── Proxy.hpp │ │ ├── ProxyBasicZMQ.cpp │ │ └── ProxyBasicZMQ.hpp │ ├── sockets │ │ ├── ZeroMQSocket.cpp │ │ └── ZeroMQSocket.hpp │ └── support │ │ └── zeromq │ │ ├── Context.hpp │ │ ├── SocketTranslator.cpp │ │ └── SocketTranslator.hpp └── tests │ ├── CMakeLists.txt │ ├── security │ ├── CMakeLists.txt │ └── tcp_secure │ │ ├── CMakeLists.txt │ │ ├── README.md │ │ ├── runtests.cmake │ │ ├── test_tcp_insecure.sh │ │ ├── test_tcp_secure.sh │ │ ├── test_tcp_secure_client.cpp │ │ └── test_tcp_secure_server.cpp │ └── unit │ ├── CMakeLists.txt │ ├── test_Buffer.cpp │ ├── test_CommunicatorFactory.cpp │ ├── test_DynaLog.cpp │ ├── test_Frame.cpp │ ├── test_MessageFactory.cpp │ ├── test_OperatorFactory.cpp │ ├── test_ProtoBufFactory.cpp │ ├── test_ProtoBufMap.cpp │ ├── test_Proxy.cpp │ ├── test_ProxyBasicZMQ.cpp │ ├── test_SocketFactory.cpp │ ├── test_SocketOptions.cpp │ └── test_Value.cpp ├── compose ├── README.md ├── all │ ├── build_images_for_compose.sh │ ├── cleanup_globus_files.sh │ ├── compose.yml │ ├── generate_env.sh │ ├── generate_globus_files.sh │ └── globus-connect-server.log ├── metadata │ ├── build_metadata_images_for_compose.sh │ ├── compose.yml │ ├── generate_build_args.sh │ └── generate_env.sh └── repo │ ├── build_repo_images_for_compose.sh │ ├── cleanup_globus_files.sh │ ├── compose.yml │ ├── generate_env.sh │ └── generate_globus_files.sh ├── config └── gsi-authz.conf ├── core ├── CMakeLists.txt ├── database │ ├── CMakeLists.txt │ ├── backup │ │ ├── datafed-backup-cron │ │ └── datafed-backup.sh │ ├── foxx │ │ ├── README.md │ │ ├── api │ │ │ ├── .eslintrc.json │ │ │ ├── acl_router.js │ │ │ ├── admin_router.js │ │ │ ├── authz_router.js │ │ │ ├── coll_router.js │ │ │ ├── config_router.js │ │ │ ├── data_router.js │ │ │ ├── group_router.js │ │ │ ├── metrics_router.js │ │ │ ├── note_router.js │ │ │ ├── process.js │ │ │ ├── proj_router.js │ │ │ ├── query_router.js │ │ │ ├── repo_router.js │ │ │ ├── schema_router.js │ │ │ ├── support.js │ │ │ ├── tag_router.js │ │ │ ├── task_router.js │ │ │ ├── tasks.js │ │ │ ├── topic_router.js │ │ │ ├── user_router.js │ │ │ └── version_router.js.in │ │ ├── db_clear.js │ │ ├── db_create.js │ │ ├── db_migrate_0_10.js │ │ ├── index.js │ │ ├── manifest.json.in │ │ ├── passwd.file │ │ └── tests │ │ │ ├── support.test.js │ │ │ └── version.test.js │ └── tests │ │ ├── test_foxx.sh │ │ ├── test_setup.sh │ │ └── test_teardown.sh ├── docker │ ├── Dockerfile │ └── entrypoint.sh └── server │ ├── AuthMap.cpp │ ├── AuthMap.hpp │ ├── AuthenticationManager.cpp │ ├── AuthenticationManager.hpp │ ├── CMakeLists.txt │ ├── ClientWorker.cpp │ ├── ClientWorker.hpp │ ├── Condition.cpp │ ├── Condition.hpp │ ├── Config.cpp │ ├── Config.hpp │ ├── CoreServer.cpp │ ├── CoreServer.hpp │ ├── DatabaseAPI.cpp │ ├── DatabaseAPI.hpp │ ├── GlobusAPI.cpp │ ├── GlobusAPI.hpp │ ├── ICoreServer.hpp │ ├── ITaskMgr.hpp │ ├── ITaskWorker.hpp │ ├── PublicKeyTypes.hpp │ ├── TaskMgr.cpp │ ├── TaskMgr.hpp │ ├── TaskWorker.cpp │ ├── TaskWorker.hpp │ ├── Version.hpp.in │ ├── main.cpp │ └── tests │ ├── CMakeLists.txt │ └── unit │ ├── CMakeLists.txt │ ├── test_AuthMap.cpp │ └── test_AuthenticationManager.cpp ├── doc_source ├── CMakeLists.txt ├── README.md ├── index.rst └── source │ ├── _static │ ├── css │ │ └── custom.css │ ├── data_lifecycle.png │ ├── globus_endpoints │ │ ├── finding_endpoint_01.png │ │ ├── finding_endpoint_02.png │ │ └── finding_endpoint_03.png │ ├── papers_presentations │ │ ├── 2019_CSCI.pdf │ │ ├── 2019_CSCI_slides.pdf │ │ ├── 2020_SMC.pdf │ │ ├── 2020_SMC_slides.pdf │ │ └── DataFed_General_Presentation.pptx │ ├── python_high_level │ │ ├── provenance.png │ │ ├── search_01.png │ │ ├── search_02.png │ │ └── search_03.png │ ├── simplified_architecture.png │ └── system_components.png │ ├── admin │ ├── install_bare_metal.rst │ └── install_docker.rst │ ├── conf.py │ ├── dev │ ├── design.rst │ ├── project.rst │ ├── release.rst │ └── roadmap.rst │ ├── index.rst │ ├── system │ ├── getting_started.rst │ ├── introduction.rst │ ├── overview.rst │ ├── papers.rst │ └── usecases.rst │ └── user │ ├── cli │ ├── guide.rst │ ├── header.rst │ └── reference.rst │ ├── client │ └── install.rst │ ├── python │ ├── high_level_guide.rst │ └── notebooks.rst │ └── web │ └── portal.rst ├── docker ├── Dockerfile.dependencies ├── Dockerfile.foxx ├── Dockerfile.runtime ├── README.md └── entrypoint_foxx.sh ├── docs ├── _generated │ └── cli_python_cmd_ref.html ├── _images │ ├── data_lifecycle.png │ ├── finding_endpoint_01.png │ ├── finding_endpoint_02.png │ ├── finding_endpoint_03.png │ ├── provenance.png │ ├── search_01.png │ ├── search_02.png │ ├── search_03.png │ ├── simplified_architecture.png │ └── system_components.png ├── _sources │ ├── _generated │ │ └── cli_python_cmd_ref.rst.txt │ ├── admin │ │ └── general.rst.txt │ ├── autoapi │ │ ├── datafed │ │ │ ├── CLI │ │ │ │ └── index.rst.txt │ │ │ ├── CommandLib │ │ │ │ └── index.rst.txt │ │ │ ├── Config │ │ │ │ └── index.rst.txt │ │ │ ├── Connection │ │ │ │ └── index.rst.txt │ │ │ ├── MessageLib │ │ │ │ └── index.rst.txt │ │ │ ├── SDMS_Anon_pb2 │ │ │ │ └── index.rst.txt │ │ │ ├── SDMS_Auth_pb2 │ │ │ │ └── index.rst.txt │ │ │ ├── SDMS_pb2 │ │ │ │ └── index.rst.txt │ │ │ ├── VERSION │ │ │ │ └── index.rst.txt │ │ │ ├── Version_pb2 │ │ │ │ └── index.rst.txt │ │ │ └── index.rst.txt │ │ └── index.rst.txt │ ├── dev │ │ ├── design.rst.txt │ │ ├── project.rst.txt │ │ ├── release.rst.txt │ │ └── roadmap.rst.txt │ ├── index.rst.txt │ ├── system │ │ ├── getting_started.rst.txt │ │ ├── introduction.rst.txt │ │ ├── overview.rst.txt │ │ ├── papers.rst.txt │ │ └── usecases.rst.txt │ └── user │ │ ├── cli │ │ ├── guide.rst.txt │ │ ├── header.rst.txt │ │ └── reference.rst.txt │ │ ├── client │ │ └── install.rst.txt │ │ ├── python │ │ ├── high_level_guide.rst.txt │ │ └── notebooks.rst.txt │ │ └── web │ │ └── portal.rst.txt ├── _static │ ├── basic.css │ ├── css │ │ ├── badge_only.css │ │ ├── custom.css │ │ ├── fonts │ │ │ ├── Roboto-Slab-Bold.woff │ │ │ ├── Roboto-Slab-Bold.woff2 │ │ │ ├── Roboto-Slab-Regular.woff │ │ │ ├── Roboto-Slab-Regular.woff2 │ │ │ ├── fontawesome-webfont.eot │ │ │ ├── fontawesome-webfont.svg │ │ │ ├── fontawesome-webfont.ttf │ │ │ ├── fontawesome-webfont.woff │ │ │ ├── fontawesome-webfont.woff2 │ │ │ ├── lato-bold-italic.woff │ │ │ ├── lato-bold-italic.woff2 │ │ │ ├── lato-bold.woff │ │ │ ├── lato-bold.woff2 │ │ │ ├── lato-normal-italic.woff │ │ │ ├── lato-normal-italic.woff2 │ │ │ ├── lato-normal.woff │ │ │ └── lato-normal.woff2 │ │ └── theme.css │ ├── data_lifecycle.png │ ├── doctools.js │ ├── documentation_options.js │ ├── file.png │ ├── globus_endpoints │ │ ├── finding_endpoint_01.png │ │ ├── finding_endpoint_02.png │ │ └── finding_endpoint_03.png │ ├── graphviz.css │ ├── js │ │ ├── badge_only.js │ │ ├── html5shiv-printshiv.min.js │ │ ├── html5shiv.min.js │ │ └── theme.js │ ├── language_data.js │ ├── minus.png │ ├── papers_presentations │ │ ├── 2019_CSCI.pdf │ │ ├── 2019_CSCI_slides.pdf │ │ ├── 2020_SMC.pdf │ │ ├── 2020_SMC_slides.pdf │ │ └── DataFed_General_Presentation.pptx │ ├── plus.png │ ├── pygments.css │ ├── python_high_level │ │ ├── provenance.png │ │ ├── search_01.png │ │ ├── search_02.png │ │ └── search_03.png │ ├── searchtools.js │ ├── simplified_architecture.png │ ├── sphinx_highlight.js │ └── system_components.png ├── admin │ └── general.html ├── autoapi │ ├── datafed │ │ ├── CLI │ │ │ └── index.html │ │ ├── CommandLib │ │ │ └── index.html │ │ ├── Config │ │ │ └── index.html │ │ ├── Connection │ │ │ └── index.html │ │ ├── MessageLib │ │ │ └── index.html │ │ ├── SDMS_Anon_pb2 │ │ │ └── index.html │ │ ├── SDMS_Auth_pb2 │ │ │ └── index.html │ │ ├── SDMS_pb2 │ │ │ └── index.html │ │ ├── VERSION │ │ │ └── index.html │ │ ├── Version_pb2 │ │ │ └── index.html │ │ └── index.html │ └── index.html ├── dev │ ├── design.html │ ├── project.html │ ├── release.html │ └── roadmap.html ├── genindex.html ├── index.html ├── objects.inv ├── py-modindex.html ├── search.html ├── searchindex.js ├── system │ ├── getting_started.html │ ├── introduction.html │ ├── overview.html │ ├── papers.html │ └── usecases.html └── user │ ├── cli │ ├── guide.html │ ├── header.html │ └── reference.html │ ├── client │ └── install.html │ ├── python │ ├── high_level_guide.html │ └── notebooks.html │ └── web │ └── portal.html ├── docs_other ├── README.md └── dev │ └── design │ ├── Auth.proto Summary.xlsx │ ├── CLI Commands Summary.xlsx │ ├── SDMS Overview and Design.docx │ ├── catalog.txt │ ├── data_life_cycle.odg │ ├── graph │ ├── graph_schema.html │ └── graph_schema2.html │ ├── schema.jpg │ ├── schema.odg │ ├── sdms_architecture_v2.odg │ ├── sdms_architecture_v3.odg │ ├── sdms_spec.odt │ ├── sdms_sys_diagram.jpg │ └── sdms_sys_diagram.odg ├── facility ├── CMakeLists.txt ├── README.md ├── client │ ├── CMakeLists.txt │ ├── cli │ │ ├── CMakeLists.txt │ │ └── main.cpp │ └── lib │ │ ├── CMakeLists.txt │ │ ├── Client.cpp │ │ ├── Client.hpp │ │ ├── bin2ascii.h │ │ ├── pbjson.cpp │ │ ├── pbjson.hpp │ │ └── test │ │ ├── CMakeLists.txt │ │ └── main.cpp └── server │ ├── CMakeLists.txt │ ├── CentralDatabaseClient.cpp │ ├── CentralDatabaseClient.hpp │ ├── CentralStorage.cpp │ ├── CentralStorage.hpp │ ├── FacilityServer.cpp │ ├── FacilityServer.hpp │ ├── Session.cpp │ ├── Session.hpp │ └── main.cpp ├── jupyter_notebooks ├── 0_verify.ipynb ├── 1_Basics.ipynb ├── 1_Basics_with_Solutions.ipynb ├── 2_Data_Records.ipynb ├── 2_Data_Records_with_Solutions.ipynb ├── 3_Data_Transfer.ipynb ├── 3_Data_Transfer_with_Solutions.ipynb ├── 4_Collections_Queries.ipynb ├── 4_Collections_Queries_with_Solutions.ipynb └── 5_Collaborative_Exercise.ipynb ├── python ├── CMakeLists.txt ├── datafed_pkg │ ├── CMakeLists.txt │ ├── Doxyfile │ ├── README │ ├── datafed │ │ ├── CLI.py │ │ ├── CMakeLists.txt │ │ ├── CommandLib.py │ │ ├── Config.py │ │ ├── Connection.py │ │ ├── MessageLib.py │ │ ├── VERSION.py.in │ │ ├── __init__.py │ │ └── doxygen.cfg │ ├── requirements.txt │ ├── scripts │ │ └── datafed │ ├── setup.py │ └── test │ │ ├── Test_EndToEnd.py │ │ ├── Test_ObjectReturn.py │ │ ├── data_gen.py │ │ └── security.py ├── docker │ ├── Dockerfile.python-client-base.ubuntu │ ├── Dockerfile.python-client.ubuntu │ └── entrypoint.sh └── pyproto_add_msg_idx.py ├── repository ├── CMakeLists.txt ├── docker │ ├── 000-default.conf │ ├── Dockerfile │ ├── Dockerfile.gcs │ ├── entrypoint_authz.sh │ └── entrypoint_repo.sh ├── filesys │ ├── CMakeLists.txt │ └── fusemain.cpp ├── gridftp │ ├── CMakeLists.txt │ └── globus5 │ │ ├── CMakeLists.txt │ │ └── authz │ │ ├── CMakeLists.txt │ │ └── source │ │ ├── AuthzWorker.cpp │ │ ├── AuthzWorker.h │ │ ├── CMakeLists.txt │ │ ├── Config.h │ │ ├── README.md │ │ ├── Version.hpp.in │ │ └── libauthz.c └── server │ ├── CMakeLists.txt │ ├── Config.hpp │ ├── RepoServer.cpp │ ├── RepoServer.hpp │ ├── RequestWorker.cpp │ ├── RequestWorker.hpp │ ├── Version.hpp.in │ └── main.cpp ├── scripts ├── admin_get_user_emails.sh ├── admin_send_email_notification.sh ├── admin_toggle_user_acccount.sh ├── admin_update_config_message.sh ├── ci_database_health_check.sh ├── ci_generate_pypirc.sh ├── ci_harbor_artifact_count.sh ├── ci_pipeline_serializer.sh ├── ci_pipeline_setup.sh ├── ci_purge_images.sh ├── ci_setup_web_certs.sh ├── clear_db.sh ├── compose_build_images.sh ├── compose_cleanup_globus_files.sh ├── compose_generate_env.sh ├── compose_generate_globus_files.sh ├── container_run_test.sh ├── container_stop.sh ├── copy_dependency.sh ├── dependency_install_functions.sh ├── dependency_versions.sh ├── export_dependency_version.sh ├── generate_authz_config.sh ├── generate_certificate_refresh_script.sh ├── generate_core_config.sh ├── generate_core_service.sh ├── generate_datafed.sh ├── generate_datafed_backup_script.sh ├── generate_gsi-authz_config.sh ├── generate_repo_config.sh ├── generate_repo_service.sh ├── generate_ws_config.sh ├── generate_ws_service.sh ├── globus │ ├── __init__.py │ ├── clean_globus.sh │ ├── clean_globus_nodes.py │ ├── create_guest_collection.py │ ├── generate_repo_form.sh │ ├── globus_cleanup.py │ ├── initialize_globus_endpoint.py │ ├── setup_globus.sh │ └── utils.py ├── install_authz.sh ├── install_authz_dependencies.sh ├── install_client_dependencies.sh ├── install_core.sh ├── install_core_dependencies.sh ├── install_core_service.sh ├── install_dependencies.sh ├── install_docker_dependencies.sh ├── install_docs_dependencies.sh ├── install_end_to_end_test_dependencies.sh ├── install_foxx.sh ├── install_gcs.sh ├── install_lego_and_certificates.sh ├── install_python_client_dependencies.sh ├── install_repo.sh ├── install_repo_dependencies.sh ├── install_repo_service.sh ├── install_ws.sh ├── install_ws_dependencies.sh ├── install_ws_service.sh ├── run_arango_service.sh ├── run_core_service.sh ├── run_repo_service.sh ├── run_ws_service.sh ├── uninstall_all.sh └── utils.sh ├── setup ├── setup_condo.sh ├── test ├── CMakeLists.txt ├── Test_Cases_CLI.xlsx ├── import │ ├── record10.json │ ├── record1_upd.json │ ├── record2.json │ ├── record2_upd.json │ ├── record3.json │ ├── record4.json │ ├── record5.json │ ├── record6.json │ ├── record7.json │ ├── record8.json │ ├── record9.json │ ├── record_oversize.json │ └── records_upd.json └── libjson │ ├── CMakeLists.txt │ └── main.cpp ├── tests └── end-to-end │ ├── CMakeLists.txt │ ├── README.md │ ├── setup.sh │ ├── test_api_alloc.py │ ├── test_api_collection.py │ ├── test_api_context.py │ ├── test_api_endpoint.py │ ├── test_api_record.py │ ├── test_api_repo.py │ ├── test_api_user_login.py │ └── web-UI │ ├── CMakeLists.txt │ ├── auth.setup.js.in │ ├── package-lock.json │ ├── package.json │ ├── playwright.config.js │ └── scripts │ └── testingBasicFunction.spec.js └── web ├── .eslintrc.json ├── datafed-ws.js ├── docker ├── Dockerfile └── entrypoint.sh ├── package.json.in ├── static ├── .eslintrc.json ├── ace │ ├── ace.js │ ├── ext-beautify.js │ ├── ext-elastic_tabstops_lite.js │ ├── ext-emmet.js │ ├── ext-error_marker.js │ ├── ext-keybinding_menu.js │ ├── ext-language_tools.js │ ├── ext-linking.js │ ├── ext-modelist.js │ ├── ext-options.js │ ├── ext-rtl.js │ ├── ext-searchbox.js │ ├── ext-settings_menu.js │ ├── ext-spellcheck.js │ ├── ext-split.js │ ├── ext-static_highlight.js │ ├── ext-statusbar.js │ ├── ext-textarea.js │ ├── ext-themelist.js │ ├── ext-whitespace.js │ ├── keybinding-emacs.js │ ├── keybinding-vim.js │ ├── mode-json.js │ ├── theme-dark.js │ ├── theme-light.js │ ├── worker-coffee.js │ ├── worker-css.js │ ├── worker-html.js │ ├── worker-javascript.js │ ├── worker-json.js │ ├── worker-lua.js │ ├── worker-php.js │ ├── worker-xml.js │ └── worker-xquery.js ├── api.js ├── dialogs.js ├── dlg_alloc_new_edit.js ├── dlg_annotation.js ├── dlg_coll_new_edit.js ├── dlg_data_new_edit.js ├── dlg_ep_browse.js ├── dlg_group_edit.js ├── dlg_groups.js ├── dlg_owner_chg_confirm.js ├── dlg_pick_proj.js ├── dlg_pick_topic.js ├── dlg_pick_user.js ├── dlg_proj_new_edit.js ├── dlg_query_builder.js ├── dlg_query_save.js ├── dlg_repo_edit.js ├── dlg_repo_manage.js ├── dlg_schema.js ├── dlg_schema_list.js ├── dlg_set_acls.js ├── dlg_settings.js ├── dlg_start_xfer.js ├── doi_style.css ├── favicon.png ├── index.js ├── jquery-ui-dark │ ├── datafed.css │ ├── images │ │ ├── ui-bg_glass_40_ffc73d_1x400.png │ │ ├── ui-bg_highlight-hard_20_0972a5_1x100.png │ │ ├── ui-bg_highlight-soft_33_003147_1x100.png │ │ ├── ui-bg_highlight-soft_35_222222_1x100.png │ │ ├── ui-bg_highlight-soft_44_444444_1x100.png │ │ ├── ui-bg_highlight-soft_80_eeeeee_1x100.png │ │ ├── ui-bg_loop_25_000000_21x21.png │ │ ├── ui-icons_222222_256x240.png │ │ ├── ui-icons_4b8e0b_256x240.png │ │ ├── ui-icons_a83300_256x240.png │ │ ├── ui-icons_cccccc_256x240.png │ │ └── ui-icons_ffffff_256x240.png │ └── jquery-ui.css ├── jquery-ui-light │ ├── datafed.css │ ├── images │ │ ├── ui-bg_diagonals-small_40_db4865_40x40.png │ │ ├── ui-bg_diagonals-small_50_93c3cd_40x40.png │ │ ├── ui-bg_diagonals-small_50_ff3853_40x40.png │ │ ├── ui-bg_diagonals-small_75_a0e1cb_40x40.png │ │ ├── ui-bg_diagonals-small_75_ccd232_40x40.png │ │ ├── ui-bg_dots-medium_80_ffff38_4x4.png │ │ ├── ui-bg_dots-small_35_35414f_2x2.png │ │ ├── ui-bg_white-lines_85_f7f7ba_40x100.png │ │ ├── ui-icons_454545_256x240.png │ │ ├── ui-icons_88a206_256x240.png │ │ ├── ui-icons_c02669_256x240.png │ │ ├── ui-icons_e1e463_256x240.png │ │ ├── ui-icons_ffeb33_256x240.png │ │ └── ui-icons_ffffff_256x240.png │ └── jquery-ui.css ├── jquery │ ├── font │ │ ├── jquery-ui.eot │ │ ├── jquery-ui.svg │ │ ├── jquery-ui.ttf │ │ ├── jquery-ui.woff │ │ └── jquery-ui.woff2 │ ├── jquery-ui-1.12.icon-font.min.css │ ├── jquery-ui.js │ ├── jquery.js │ ├── jquery.ui-contextmenu.min.js │ └── jquery.ui-contextmenu.min.js.map ├── js-cookie │ └── js-cookie.js ├── main.js ├── main_browse_tab.js ├── model.js ├── panel_catalog.js ├── panel_graph.js ├── panel_item_info.js ├── panel_search.js ├── project.gif ├── query_builder.css ├── query_builder.js ├── register.js ├── settings.js ├── style.css ├── tag │ ├── tag-it.css │ └── tag-it.js └── util.js ├── version.js.in └── views ├── docs.ect ├── error.ect ├── head.ect ├── index.ect ├── main.ect ├── panel_catalog.ect ├── panel_search.ect ├── register.ect ├── tab_admin.ect ├── tab_browser.ect └── tab_repo.ect /.github/ISSUE_TEMPLATE/bug-report-template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report Template 3 | about: Create a report to help us improve 4 | title: "[Bug]" 5 | labels: 'Type: Bug' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Description 11 | 12 | 13 | 14 | ## Steps to Replicate 15 | 21 | 22 | ### Additional context 23 | 24 | 25 | Page occured: 26 | 27 | 33 | 34 | 41 | 42 | ### Expected behavior 43 | 44 | 45 | 46 | ## Artifacts (if appropriate): 47 | 48 | 49 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/development-issue-template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Development Issue Template 3 | about: Describe this issue template's purpose here. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Ticket 11 | 12 |   13 | 14 | ## Description 15 | 16 | 17 | 18 | ## Acceptance Criteria 19 | 20 | 21 | 22 | ## Resources (optional): 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request-template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request template 3 | about: Suggest an idea for this project 4 | title: "[Feature]" 5 | labels: 'Type: New Feature' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Problem We Want to Solve 11 | 12 | 13 | ## Describe the Solution You'd Like 14 | 15 | 16 | ## Alternatives (optional) 17 | 18 | 19 | ## Additional context 20 | 21 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Ticket 2 | 3 |   4 | 5 | ## Description 6 | 7 | 8 | 9 | ## How Has This Been Tested? 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | ## Artifacts (if appropriate): 18 | 19 | 20 | 21 | ## Tasks 22 | 23 | * [ ] - A description of the PR has been provided, and a diagram included if it is a new feature. 24 | * [ ] - Formatter has been run 25 | * [ ] - CHANGELOG comment has been added 26 | * [ ] - Labels have been assigned to the pr 27 | * [ ] - A reviwer has been added 28 | * [ ] - A user has been assigned to work on the pr 29 | * [ ] - If new feature a unit test has been added 30 | -------------------------------------------------------------------------------- /.github/workflows/apply-formatting.yml: -------------------------------------------------------------------------------- 1 | name: Command Mode Formatting 2 | on: 3 | issue_comment: 4 | types: [created] 5 | jobs: 6 | apply-formatting: 7 | name: Format Py and C++ Code 8 | runs-on: ubuntu-latest 9 | if: startsWith(github.event.comment.body, '@par-hermes format') 10 | steps: 11 | - uses: JoshuaSBrown/cpp-py-formatter/command@v0.2.3 12 | with: 13 | botName: par-hermes 14 | clangFormatVersion: 10 15 | token: ${{ secrets.GITHUB_TOKEN }} 16 | -------------------------------------------------------------------------------- /.github/workflows/format-check.yml: -------------------------------------------------------------------------------- 1 | name: Format Check 2 | on: push 3 | jobs: 4 | check-formatting: 5 | name: Code Formatting 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: JoshuaSBrown/cpp-py-formatter/check@v0.2.3 9 | with: 10 | clangFormatVersion: 10 11 | token: ${{ secrets.GITHUB_TOKEN }} 12 | 13 | format-check: 14 | runs-on: ubuntu-latest 15 | if: ${{ always() }} 16 | # needs: check-formatting 17 | strategy: 18 | matrix: 19 | python-version: [ "3.9" ] 20 | steps: 21 | - uses: actions/checkout@v2 22 | - name: Update ubuntu 23 | run: sudo apt-get update 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v2 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | - name: Install dependencies 29 | run: | 30 | sudo apt-get install -y python3-sphinx sphinx-rtd-theme-common 31 | pip install sphinx_rtd_theme recommonmark flake8 black pytest pytest-cov black[jupyter] 32 | - name: Black formatting checks 33 | run: black --exclude '/jupyter_notebooks/' --check . 34 | - name: Lint with flake8 35 | run: | 36 | flake8 --count --select=E9,F63,F7,F82 --show-source --statistics --max-line-length 100 . 37 | flake8 --count --statistics --max-line-length 100 . 38 | -------------------------------------------------------------------------------- /.github/workflows/unit-tests.yml: -------------------------------------------------------------------------------- 1 | name: Unit-Testing 2 | on: push 3 | jobs: 4 | unit-test: 5 | runs-on: ubuntu-latest 6 | if: ${{ always() }} 7 | steps: 8 | - uses: actions/checkout@v2 9 | - name: Update ubuntu 10 | run: sudo apt-get update 11 | - name: Install dependencies 12 | run: | 13 | ./scripts/generate_datafed.sh 14 | sudo ./scripts/install_core_dependencies.sh 15 | ./scripts/generate_datafed.sh 16 | - name: Build 17 | run: | 18 | cmake -S. -B build -DCMAKE_BUILD_TYPE=Debug 19 | cmake --build build -j4 20 | - name: Run tests 21 | run: | 22 | cmake --build build --target test 23 | -------------------------------------------------------------------------------- /.gitlab/build/build_core_image.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build 4 | 5 | include: 6 | - local: .gitlab/common.yml 7 | 8 | build-core: 9 | extends: .docker_build_script 10 | stage: build 11 | variables: 12 | IMAGE_TAG: "datafed/core" 13 | GIT_STRATEGY: clone 14 | DOCKER_FILE_PATH: "core/docker/Dockerfile" 15 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 16 | tags: 17 | - ci-datafed-core 18 | - docker 19 | rules: 20 | - changes: 21 | - docker/**/* 22 | - scripts/**/* 23 | - core/**/* 24 | - common/**/* 25 | - CMakeLists.txt 26 | - cmake/**/* 27 | - .gitlab-ci.yml 28 | when: on_success 29 | 30 | retag-image: 31 | extends: .docker_retag_image 32 | stage: build 33 | variables: 34 | IMAGE_TAG: "datafed/core" 35 | GIT_STRATEGY: clone 36 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 37 | tags: 38 | - docker 39 | -------------------------------------------------------------------------------- /.gitlab/build/build_foxx_image.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build 4 | 5 | include: 6 | - local: .gitlab/common.yml 7 | 8 | build-foxx: 9 | extends: .docker_build_script 10 | stage: build 11 | variables: 12 | IMAGE_TAG: "datafed/foxx" 13 | GIT_STRATEGY: clone 14 | DOCKER_FILE_PATH: "docker/Dockerfile.foxx" 15 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 16 | tags: 17 | - docker 18 | rules: 19 | - changes: 20 | - docker/**/* 21 | - scripts/**/* 22 | - web/**/* 23 | - common/proto/**/* 24 | - .gitlab-ci.yml 25 | when: on_success 26 | 27 | retag-image: 28 | extends: .docker_retag_image 29 | stage: build 30 | variables: 31 | IMAGE_TAG: "datafed/foxx" 32 | GIT_STRATEGY: clone 33 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 34 | tags: 35 | - docker 36 | -------------------------------------------------------------------------------- /.gitlab/build/build_repo_image.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build 4 | 5 | include: 6 | - local: .gitlab/common.yml 7 | 8 | build-repo: 9 | extends: .docker_build_script 10 | stage: build 11 | variables: 12 | IMAGE_TAG: "datafed/repo" 13 | GIT_STRATEGY: clone 14 | DOCKER_FILE_PATH: "repository/docker/Dockerfile" 15 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 16 | tags: 17 | - ci-datafed-repo 18 | - docker 19 | rules: 20 | - changes: 21 | - docker/**/* 22 | - scripts/**/* 23 | - web/**/* 24 | - common/proto/**/* 25 | - .gitlab-ci.yml 26 | when: on_success 27 | 28 | retag-image: 29 | extends: .docker_retag_image 30 | stage: build 31 | variables: 32 | IMAGE_TAG: "datafed/repo" 33 | GIT_STRATEGY: clone 34 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 35 | tags: 36 | - docker 37 | -------------------------------------------------------------------------------- /.gitlab/build/build_ws_image.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build 4 | 5 | include: 6 | - local: .gitlab/common.yml 7 | 8 | build-ws: 9 | extends: .docker_build_script 10 | stage: build 11 | variables: 12 | IMAGE_TAG: "datafed/ws" 13 | GIT_STRATEGY: clone 14 | DOCKER_FILE_PATH: "web/docker/Dockerfile" 15 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 16 | tags: 17 | - ci-datafed-core 18 | - docker 19 | rules: 20 | - changes: 21 | - docker/**/* 22 | - scripts/**/* 23 | - web/**/* 24 | - common/proto/**/* 25 | - .gitlab-ci.yml 26 | when: on_success 27 | 28 | retag-image: 29 | extends: .docker_retag_image 30 | stage: build 31 | variables: 32 | IMAGE_TAG: "datafed/ws" 33 | GIT_STRATEGY: clone 34 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 35 | tags: 36 | - docker 37 | -------------------------------------------------------------------------------- /.gitlab/build/force_build_core_image.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build 4 | 5 | include: 6 | - local: .gitlab/common.yml 7 | 8 | build-core: 9 | extends: .docker_build_script 10 | stage: build 11 | variables: 12 | IMAGE_TAG: "datafed/core" 13 | GIT_STRATEGY: clone 14 | DOCKER_FILE_PATH: "core/docker/Dockerfile" 15 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 16 | tags: 17 | - ci-datafed-core 18 | - docker 19 | -------------------------------------------------------------------------------- /.gitlab/build/force_build_foxx_image.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build 4 | 5 | include: 6 | - local: .gitlab/common.yml 7 | 8 | build-foxx: 9 | extends: .docker_build_script 10 | stage: build 11 | variables: 12 | IMAGE_TAG: "datafed/foxx" 13 | GIT_STRATEGY: clone 14 | DOCKER_FILE_PATH: "docker/Dockerfile.foxx" 15 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 16 | tags: 17 | - docker 18 | -------------------------------------------------------------------------------- /.gitlab/build/force_build_repo_image.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build 4 | 5 | include: 6 | - local: .gitlab/common.yml 7 | 8 | build-repo: 9 | extends: .docker_build_script 10 | stage: build 11 | variables: 12 | IMAGE_TAG: "datafed/repo" 13 | GIT_STRATEGY: clone 14 | DOCKER_FILE_PATH: "repository/docker/Dockerfile" 15 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 16 | tags: 17 | - ci-datafed-repo 18 | - docker 19 | -------------------------------------------------------------------------------- /.gitlab/build/force_build_ws_image.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build 4 | 5 | include: 6 | - local: .gitlab/common.yml 7 | 8 | build-ws: 9 | extends: .docker_build_script 10 | stage: build 11 | variables: 12 | IMAGE_TAG: "datafed/ws" 13 | GIT_STRATEGY: clone 14 | DOCKER_FILE_PATH: "web/docker/Dockerfile" 15 | DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count 16 | tags: 17 | - ci-datafed-core 18 | - docker 19 | -------------------------------------------------------------------------------- /.gitlab/build_ci_infrastructure.yml: -------------------------------------------------------------------------------- 1 | --- 2 | stages: 3 | - build-infrastructure 4 | 5 | build-infrastructure-job: 6 | stage: build-infrastructure 7 | trigger: 8 | project: dlsw/datafed/ci 9 | branch: main 10 | strategy: depend 11 | -------------------------------------------------------------------------------- /.gitlab/skip_ci_infrastructure.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - skip-infrastructure-build 3 | 4 | skip-build-infrastructure: 5 | stage: skip-infrastructure-build 6 | tags: 7 | - datafed-infrastructure 8 | script: 9 | - echo "Infrastructure is already running!" 10 | 11 | -------------------------------------------------------------------------------- /.gitlab/stage_build_base.yml: -------------------------------------------------------------------------------- 1 | --- 2 | include: 3 | - local: .gitlab/common.yml 4 | 5 | build-dependencies: 6 | extends: .docker_base_build_script 7 | stage: build-base 8 | variables: 9 | IMAGE_TAG: "datafed/dependencies" 10 | GIT_STRATEGY: clone 11 | DOCKER_FILE_PATH: "docker/Dockerfile.dependencies" 12 | tags: 13 | - docker 14 | 15 | build-runtime: 16 | extends: .docker_base_build_script 17 | stage: build-base 18 | variables: 19 | IMAGE_TAG: "datafed/runtime" 20 | GIT_STRATEGY: clone 21 | DOCKER_FILE_PATH: "docker/Dockerfile.runtime" 22 | tags: 23 | - docker 24 | -------------------------------------------------------------------------------- /.gitlab/stage_clear_cache.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ################################################################################ 3 | # STAGE: clear-docker-cache 4 | ################################################################################ 5 | # Used to clear out the cache on VMs where the images are being built 6 | .clear_cache: 7 | script: 8 | - docker login "${REGISTRY}" -u "${HARBOR_USER}" -p "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}" 9 | - if [[ $(docker ps -q | wc -l) != "0" ]]; then docker stop $(docker ps -q); fi; 10 | - docker system prune -f 11 | - ./scripts/ci_purge_images.sh 12 | 13 | clear-core-cache: 14 | extends: .clear_cache 15 | stage: clear-docker-cache 16 | needs: ["signal"] 17 | tags: 18 | - ci_1 19 | 20 | clear-repo-cache: 21 | extends: .clear_cache 22 | stage: clear-docker-cache 23 | needs: ["signal"] 24 | tags: 25 | - ci_3 26 | 27 | clear-python-client-cache: 28 | extends: .clear_cache 29 | stage: clear-docker-cache 30 | variables: 31 | DATAFED_CI_PURGE_THRESHOLD: "9" 32 | needs: ["signal"] 33 | tags: 34 | - ci_5 35 | 36 | 37 | -------------------------------------------------------------------------------- /.gitlab/stage_image_check.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | include: 4 | - local: .gitlab/common.yml 5 | 6 | check-ws-image: 7 | extends: .image_check 8 | stage: image-check 9 | variables: 10 | COMPONENT: "ws" 11 | IMAGE_TAG: "datafed/ws" 12 | 13 | check-core-image: 14 | extends: .image_check 15 | stage: image-check 16 | variables: 17 | COMPONENT: "core" 18 | IMAGE_TAG: "datafed/core" 19 | 20 | check-repo-image: 21 | extends: .image_check 22 | stage: image-check 23 | variables: 24 | COMPONENT: "repo" 25 | IMAGE_TAG: "datafed/repo" 26 | 27 | check-gcs-base-image: 28 | extends: .image_check 29 | stage: image-check 30 | variables: 31 | COMPONENT: "gcs_base" 32 | IMAGE_TAG: "datafed/gcs-base" 33 | 34 | check-gcs-image: 35 | extends: .image_check 36 | stage: image-check 37 | variables: 38 | COMPONENT: "gcs" 39 | IMAGE_TAG: "datafed/gcs" 40 | 41 | check-foxx-image: 42 | extends: .image_check 43 | stage: image-check 44 | variables: 45 | COMPONENT: "foxx" 46 | IMAGE_TAG: "datafed/foxx" 47 | -------------------------------------------------------------------------------- /.gitlab/stage_pipeline_serialize.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | ################################################################################ 4 | # STAGE: pipeline-serializer 5 | ################################################################################ 6 | # The whole purpose of this job is to finish one pipeline before executing 7 | # the next one. 8 | 9 | wait: 10 | stage: pipeline-serialize 11 | tags: 12 | - datafed-infrastructure 13 | script: 14 | - chmod +x ./scripts/ci_pipeline_serializer.sh && ./scripts/ci_pipeline_serializer.sh 15 | resource_group: pipeline_serializer 16 | -------------------------------------------------------------------------------- /.gitlab/stage_provision_client.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | ################################################################################# 4 | # STAGE: provision client 5 | ################################################################################# 6 | provision-client: 7 | needs: ["signal"] 8 | variables: 9 | GIT_STRATEGY: clone 10 | stage: provision-client 11 | tags: 12 | - ci-datafed-client 13 | before_script: 14 | - export PATH=/opt/datafed/dependencies/bin:$PATH 15 | script: 16 | - ./scripts/generate_datafed.sh 17 | - ./scripts/install_client_dependencies.sh 18 | - ./scripts/install_end_to_end_test_dependencies.sh 19 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "external/globus-connect-server-deploy"] 2 | path = external/globus-connect-server-deploy 3 | url = https://github.com/globus/globus-connect-server-deploy.git 4 | [submodule "external/protobuf"] 5 | path = external/protobuf 6 | url = https://github.com/protocolbuffers/protobuf.git 7 | -------------------------------------------------------------------------------- /cmake/JSON.cmake: -------------------------------------------------------------------------------- 1 | 2 | function(find_json_library) 3 | 4 | set(JSON_INCLUDE_DIR "" ) 5 | set(JSON_LIB_DIR "" ) 6 | set(JSON_LIB_CMAKE_DIR "" ) 7 | set(JSON_LIBRARY_PATH "" ) 8 | 9 | if(EXISTS ${DEPENDENCY_INSTALL_PATH}) 10 | set(JSON_LIB_CMAKE_DIR "${DEPENDENCY_INSTALL_PATH}/lib/cmake") 11 | set(JSON_INCLUDE_DIR "${DEPENDENCY_INSTALL_PATH}/include") 12 | set(JSON_LIB_DIR "${DEPENDENCY_INSTALL_PATH}/lib") 13 | endif() 14 | 15 | 16 | find_package(nlohmann_json CONFIG REQUIRED PATHS "${JSON_LIB_CMAKE_DIR}") 17 | 18 | # NOTE interfaces do not have a physical location associated with the library 19 | get_target_property(JSON_INCLUDE_PATH nlohmann_json::nlohmann_json INTERFACE_INCLUDE_DIRECTORIES ) 20 | 21 | set(DATAFED_JSON_INCLUDE_PATH "${JSON_INCLUDE_PATH}" PARENT_SCOPE) 22 | set(DATAFED_JSON_VERSION_ACTUAL "${nlohmann_json_VERSION}" PARENT_SCOPE) 23 | endfunction() 24 | 25 | find_json_library() 26 | -------------------------------------------------------------------------------- /cmake/JSONSchema.cmake: -------------------------------------------------------------------------------- 1 | 2 | function(find_json_schema_library) 3 | 4 | set(JSON_SCHEMA_INCLUDE_DIR "" ) 5 | set(JSON_SCHEMA_LIB_DIR "" ) 6 | set(JSON_SCHEMA_LIB_CMAKE_DIR "" ) 7 | set(JSON_SCHEMA_LIBRARY_PATH "" ) 8 | 9 | if(EXISTS ${DEPENDENCY_INSTALL_PATH}) 10 | set(JSON_SCHEMA_LIB_CMAKE_DIR "${DEPENDENCY_INSTALL_PATH}/lib/cmake") 11 | set(JSON_SCHEMA_INCLUDE_DIR "${DEPENDENCY_INSTALL_PATH}/include") 12 | set(JSON_SCHEMA_LIB_DIR "${DEPENDENCY_INSTALL_PATH}/lib") 13 | endif() 14 | 15 | find_package(nlohmann_json_schema_validator CONFIG REQUIRED VERSION "${JSON_SCHEMA_VALIDATOR_VERSION}" PATHS "${JSON_SCHEMA_LIB_CMAKE_DIR}") 16 | 17 | # NOTE interfaces do not have a physical location associated with the library 18 | get_target_property(JSON_SCHEMA_INCLUDE_PATH nlohmann_json_schema_validator INTERFACE_INCLUDE_DIRECTORIES) 19 | get_target_property(JSON_SCHEMA_LIBRARY_PATH nlohmann_json_schema_validator IMPORTED_LOCATION_NOCONFIG) 20 | 21 | set(DATAFED_JSON_SCHEMA_INCLUDE_PATH "${JSON_SCHEMA_INCLUDE_PATH}" PARENT_SCOPE) 22 | set(DATAFED_JSON_SCHEMA_LIBRARY_PATH "${JSON_SCHEMA_LIBRARY_PATH}" PARENT_SCOPE) 23 | set(DATAFED_JSON_SCHEMA_VERSION_ACTUAL "${nlohmann_json_schema_validator_VERSION}" PARENT_SCOPE) 24 | 25 | endfunction() 26 | 27 | find_json_schema_library() 28 | -------------------------------------------------------------------------------- /cmake/Utils.cmake: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Function is designed to pull the value of a key value pair file i.e. 4 | # 5 | # file.sh 6 | # 7 | # PROTO_VERSION="v2.1.0" 8 | # ZMQ_VERSION="v5.53.3" 9 | # 10 | # Read the script first 11 | # 12 | # file(READ "file.sh" SCRIPT_CONTENT) 13 | # get_version_from_script(${SCRIPT_CONTENT} "PROTO_VERSION" value) 14 | # message("$value") 15 | # 16 | # Will output "v2.1.0" 17 | function(get_version_from_script INPUT_STRING PATTERN_TO_MATCH OUTPUT_VERSION) 18 | string(REGEX MATCH "${PATTERN_TO_MATCH}=(.*)" MATCHED_PART "${INPUT_STRING}") 19 | if(MATCHED_PART) 20 | string(STRIP "${CMAKE_MATCH_1}" SANITIZED_OUTPUT_VERSION) 21 | string(REGEX REPLACE "\"\n.*" "" NEWLINE_REMOVED "${SANITIZED_OUTPUT_VERSION}") 22 | string(REPLACE "\"" "" REMOVED_QUOTES "${NEWLINE_REMOVED}") 23 | set(${OUTPUT_VERSION} "${REMOVED_QUOTES}" PARENT_SCOPE) 24 | endif() 25 | endfunction() 26 | 27 | # Function will get exported value from a shell script 28 | # 29 | # i.e. if datafed.sh has 30 | # 31 | # datafed.sh 32 | # export MY_NAME="Barry" 33 | # 34 | # set(DATAFED_CONFIG_SH "config/datafed.sh") 35 | # get_value_from_datafed_sh "MY_NAME" name) 36 | # message("$name") 37 | # 38 | # Will output "Barry" 39 | function(get_value_from_datafed_sh INPUT_KEY OUTPUT_VALUE) 40 | execute_process( 41 | COMMAND bash "-c" "source ${DATAFED_CONFIG_SH} && echo \$${INPUT_KEY}" 42 | OUTPUT_VARIABLE OUTPUT_VAR 43 | OUTPUT_STRIP_TRAILING_WHITESPACE 44 | ) 45 | set(${OUTPUT_VALUE} "${OUTPUT_VAR}" PARENT_SCOPE) 46 | endfunction() 47 | 48 | -------------------------------------------------------------------------------- /cmake/Version.cmake: -------------------------------------------------------------------------------- 1 | 2 | set(DATAFED_RELEASE_YEAR 2024) 3 | set(DATAFED_RELEASE_MONTH 6) 4 | set(DATAFED_RELEASE_DAY 17) 5 | set(DATAFED_RELEASE_HOUR 10) 6 | set(DATAFED_RELEASE_MINUTE 40) 7 | 8 | set(DATAFED_COMMON_LIB_MAJOR 1) 9 | set(DATAFED_COMMON_LIB_MINOR 0) 10 | set(DATAFED_COMMON_LIB_PATCH 0) 11 | 12 | set(DATAFED_COMMON_PROTOCOL_API_MAJOR 1) 13 | set(DATAFED_COMMON_PROTOCOL_API_MINOR 0) 14 | set(DATAFED_COMMON_PROTOCOL_API_PATCH 0) 15 | 16 | set(DATAFED_CORE_MAJOR 1) 17 | set(DATAFED_CORE_MINOR 0) 18 | set(DATAFED_CORE_PATCH 0) 19 | 20 | set(DATAFED_FOXX_MAJOR 1) 21 | set(DATAFED_FOXX_MINOR 0) 22 | set(DATAFED_FOXX_PATCH 0) 23 | 24 | set(DATAFED_FOXX_API_MAJOR 1) 25 | set(DATAFED_FOXX_API_MINOR 0) 26 | set(DATAFED_FOXX_API_PATCH 0) 27 | 28 | set(DATAFED_WEB_MAJOR 1) 29 | set(DATAFED_WEB_MINOR 0) 30 | set(DATAFED_WEB_PATCH 0) 31 | 32 | set(DATAFED_REPO_MAJOR 1) 33 | set(DATAFED_REPO_MINOR 0) 34 | set(DATAFED_REPO_PATCH 0) 35 | 36 | set(DATAFED_AUTHZ_MAJOR 1) 37 | set(DATAFED_AUTHZ_MINOR 0) 38 | set(DATAFED_AUTHZ_PATCH 0) 39 | 40 | set(DATAFED_PYTHON_CLIENT_MAJOR 3) 41 | set(DATAFED_PYTHON_CLIENT_MINOR 0) 42 | set(DATAFED_PYTHON_CLIENT_PATCH 0) 43 | set(DATAFED_PYTHON_CLIENT_RELEASE_TYPE "b") 44 | set(DATAFED_PYTHON_CLIENT_PRE_RELEASE_IDENTIFER "1") 45 | -------------------------------------------------------------------------------- /cmake/Web.cmake: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | # DataFed Version numbers need to be defined before running configure scripts 4 | include (${CMAKE_CURRENT_LIST_DIR}/Version.cmake) 5 | 6 | # Prepare web files 7 | # This script can be run as part of the cmake include process or as it's own 8 | # script 9 | configure_file( 10 | "${CMAKE_CURRENT_LIST_DIR}/../web/version.js.in" 11 | "${CMAKE_CURRENT_LIST_DIR}/../web/version.js" 12 | @ONLY) 13 | 14 | configure_file( 15 | "${CMAKE_CURRENT_LIST_DIR}/../web/package.json.in" 16 | "${CMAKE_CURRENT_LIST_DIR}/../web/package.json" 17 | @ONLY) 18 | 19 | 20 | -------------------------------------------------------------------------------- /cmake/curl_version.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | /** 5 | * This script is used to show what vesion of curl is being used with the 6 | * rest of the build process and will print the version number of the curl 7 | * library. 8 | **/ 9 | int main() { 10 | std::cout << curl_version() << std::endl; 11 | return 0; 12 | } 13 | -------------------------------------------------------------------------------- /cmake/sodium_version.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | int main() { 5 | if (sodium_init() == -1) { 6 | std::cerr << "sodium_init() failed" << std::endl; 7 | return 1; 8 | } 9 | 10 | std::cout << sodium_version_string() << std::endl; 11 | 12 | return 0; 13 | } 14 | -------------------------------------------------------------------------------- /cmake/zlib_version.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | /** 5 | * This little file is used to make sure that we are compiling with the version 6 | * of a library that we think we are. This one when run will print the zlib 7 | * version numbers. 8 | **/ 9 | int main() { 10 | std::cout << zlibVersion() << std::endl; 11 | return 0; 12 | } 13 | -------------------------------------------------------------------------------- /common/include/common/CommunicatorFactory.hpp: -------------------------------------------------------------------------------- 1 | #ifndef COMMUNICATOR_FACTORY_HPP 2 | #define COMMUNICATOR_FACTORY_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "DynaLog.hpp" 7 | #include "ICommunicator.hpp" 8 | #include "ICredentials.hpp" 9 | #include "SocketOptions.hpp" 10 | 11 | // Standard includes 12 | #include 13 | 14 | namespace SDMS { 15 | 16 | class CommunicatorFactory { 17 | private: 18 | LogContext m_log_context; 19 | 20 | public: 21 | explicit CommunicatorFactory(const LogContext &log_context) 22 | : m_log_context(log_context){}; 23 | 24 | std::unique_ptr create(const SocketOptions &socket_options, 25 | const ICredentials &credentials, 26 | uint32_t timeout_on_receive, 27 | long timeout_on_poll) const; 28 | }; 29 | 30 | } // namespace SDMS 31 | 32 | #endif // COMMUNICATOR_FACTORY_HPP 33 | -------------------------------------------------------------------------------- /common/include/common/CredentialFactory.hpp: -------------------------------------------------------------------------------- 1 | #ifndef CREDENTIAL_FACTORY_HPP 2 | #define CREDENTIAL_FACTORY_HPP 3 | #pragma once 4 | 5 | // Local public includes 6 | #include "ICredentials.hpp" 7 | #include "ProtocolTypes.hpp" 8 | 9 | // Standard includes 10 | #include 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | class CredentialFactory { 16 | public: 17 | std::unique_ptr 18 | create(const ProtocolType, 19 | const std::unordered_map &options) const; 20 | }; 21 | 22 | } // namespace SDMS 23 | 24 | #endif // CREDENTIAL_FACTORY_HPP 25 | -------------------------------------------------------------------------------- /common/include/common/ErrorCodes.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ERRORCODES_HPP 2 | #define ERRORCODES_HPP 3 | 4 | enum { 5 | EC_BUSY = 1, 6 | EC_SYNTAX_ERROR, 7 | EC_INVALID_PATH, 8 | EC_INVALID_MESSAGE, 9 | EC_INVALID_OBJECT, 10 | EC_INVALID_REFERENCE, 11 | EC_INVALID_OPERATION, 12 | EC_MISSING_REQUIRED_PARAM, 13 | EC_INVALID_PARAM, 14 | EC_OBJECT_ALREADY_EXISTS, 15 | EC_SCHEMA_MISMATCH, 16 | EC_PROTO_MISMATCH, 17 | EC_PROTO_INIT, 18 | EC_PROTO_SERIALIZE, 19 | EC_ZMQ_ERROR, 20 | EC_IO_FAILURE, 21 | EC_CAPACITY_EXCEEDED, 22 | EC_INCONSISTENT_STATE, 23 | EC_INTERNAL_ERROR, 24 | EC_ACCESS_DENIED 25 | }; 26 | 27 | #endif // ERRORCODES_HPP 28 | -------------------------------------------------------------------------------- /common/include/common/ICommunicator.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ICOMMUNICATOR_HPP 2 | #define ICOMMUNICATOR_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "IMessage.hpp" 7 | 8 | // Standard includes 9 | #include 10 | #include 11 | 12 | namespace SDMS { 13 | 14 | namespace constants { 15 | namespace communicator { 16 | const size_t MAX_COMMUNICATOR_IDENTITY_SIZE = 256; 17 | } 18 | } // namespace constants 19 | 20 | class ICommunicator { 21 | public: 22 | struct Response { 23 | int events = 0; 24 | bool time_out = false; 25 | bool error = false; 26 | std::string error_msg = ""; 27 | std::unique_ptr message; 28 | // Again this should be changed to IMessage in the future 29 | }; 30 | /** 31 | * Poll for incoming messages at the sockets 32 | * 33 | * Return true if a message has been provided 34 | * Return false if timeout and or no message 35 | **/ 36 | virtual Response poll(const MessageType) = 0; 37 | 38 | /** 39 | * This is technical debt in the future get rid of MsgBuf and replace with 40 | * IMessage 41 | **/ 42 | virtual void send(IMessage &message) = 0; 43 | 44 | /* Ideally in the future get rid of MsgBuf and replace with IMessage 45 | **/ 46 | virtual Response receive(const MessageType) = 0; 47 | 48 | virtual const std::string id() const noexcept = 0; 49 | virtual const std::string address() const noexcept = 0; 50 | 51 | virtual ~ICommunicator(){}; 52 | }; 53 | 54 | } // namespace SDMS 55 | 56 | #endif // ICOMMUNICATOR_HPP 57 | -------------------------------------------------------------------------------- /common/include/common/ICredentials.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ICREDENTIALS_HPP 2 | #define ICREDENTIALS_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "ProtocolTypes.hpp" 7 | 8 | // Standard includes 9 | #include 10 | 11 | namespace SDMS { 12 | 13 | enum class CredentialType { PUBLIC_KEY, PRIVATE_KEY, SERVER_KEY }; 14 | 15 | class ICredentials { 16 | public: 17 | virtual ProtocolType getType() const noexcept = 0; 18 | virtual std::string get(const CredentialType) const = 0; 19 | virtual bool has(CredentialType) const noexcept = 0; 20 | }; 21 | 22 | } // namespace SDMS 23 | 24 | #endif // ICREDENTIALS_HPP 25 | -------------------------------------------------------------------------------- /common/include/common/IMessageMapper.hpp: -------------------------------------------------------------------------------- 1 | #ifndef IMESSAGE_MAPPER_HPP 2 | #define IMESSAGE_MAPPER_HPP 3 | #pragma once 4 | 5 | // Standard includes 6 | #include 7 | 8 | namespace SDMS { 9 | 10 | enum class MessageProtocol { GOOGLE_ANONONYMOUS, GOOGLE_AUTHORIZED }; 11 | 12 | class IMessageMapper { 13 | public: 14 | enum ErrorCode { 15 | EC_OK = 0, 16 | EC_PROTO_INIT, 17 | EC_INVALID_PARAM, 18 | EC_INVALID_STATE, 19 | EC_SERIALIZE, 20 | EC_UNSERIALIZE 21 | }; 22 | 23 | public: 24 | virtual uint16_t getMessageType(uint8_t a_proto_id, 25 | const std::string &a_message_name) = 0; 26 | 27 | virtual uint8_t getProtocolID(MessageProtocol) const = 0; 28 | }; 29 | } // namespace SDMS 30 | 31 | #endif // IMESSAGE_MAPPER_HPP 32 | -------------------------------------------------------------------------------- /common/include/common/IOperator.hpp: -------------------------------------------------------------------------------- 1 | #ifndef IOPERATOR_HPP 2 | #define IOPERATOR_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "OperatorTypes.hpp" 7 | 8 | namespace SDMS { 9 | 10 | class IMessage; 11 | 12 | class IOperator { 13 | public: 14 | virtual OperatorType type() const noexcept = 0; 15 | virtual void execute(IMessage &message) = 0; 16 | }; 17 | 18 | } // namespace SDMS 19 | 20 | #endif // IOPERATOR_HPP 21 | -------------------------------------------------------------------------------- /common/include/common/IServer.hpp: -------------------------------------------------------------------------------- 1 | #ifndef SERVER_HPP 2 | #define SERVER_HPP 3 | #pragma once 4 | 5 | // Local public includes 6 | #include "ServerFactory.hpp" 7 | 8 | // Standard includes 9 | #include 10 | #include 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | enum class SocketRole; 16 | 17 | class IServer { 18 | 19 | public: 20 | virtual ServerType type() const noexcept = 0; 21 | /** 22 | * By default will run forever you can specify a time to run the for instead 23 | * 24 | * std::chrono::duration duration = std::chrono::seconds(1); 25 | * setRunDuration(duration) 26 | **/ 27 | virtual void setRunDuration(std::chrono::duration duration) = 0; 28 | 29 | virtual void run() = 0; 30 | 31 | virtual std::unordered_map getAddresses() const = 0; 32 | }; 33 | 34 | } // namespace SDMS 35 | 36 | #endif // SERVER_HPP 37 | -------------------------------------------------------------------------------- /common/include/common/ISocket.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ISOCKET_HPP 2 | #define ISOCKET_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "ICredentials.hpp" 7 | #include "ProtocolTypes.hpp" 8 | #include "SocketOptions.hpp" 9 | 10 | // Standard includes 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | class ISocket { 16 | 17 | public: 18 | /********************************************************* 19 | * Getters 20 | *********************************************************/ 21 | virtual SocketClassType getSocketClassType() const noexcept = 0; 22 | virtual SocketCommunicationType 23 | getSocketCommunicationType() const noexcept = 0; 24 | virtual SocketDirectionalityType 25 | getSocketDirectionalityType() const noexcept = 0; 26 | virtual SocketConnectionLife getSocketConnectionLife() const noexcept = 0; 27 | 28 | virtual ProtocolType getProtocolType() const noexcept = 0; 29 | virtual std::string getAddress() const noexcept = 0; 30 | virtual std::string get(const CredentialType credential_type) const = 0; 31 | 32 | virtual bool hasCredentials() const noexcept = 0; 33 | /** 34 | * Get an identifier to the local socket host/thread/process, requires 35 | * users setting the local_id in the socket options otherwise a random 36 | * identifier will be created. 37 | **/ 38 | virtual std::string getID() const noexcept = 0; 39 | }; 40 | 41 | } // namespace SDMS 42 | 43 | #endif // ISOCKET_HPP 44 | -------------------------------------------------------------------------------- /common/include/common/KeyGenerator.hpp: -------------------------------------------------------------------------------- 1 | #ifndef KEY_GENERATOR_HPP 2 | #define KEY_GENERATOR_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "ICredentials.hpp" 7 | #include "ProtocolTypes.hpp" 8 | 9 | // Standard includes 10 | #include 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | enum class KeyType { PUBLIC_PRIVATE }; 16 | 17 | class KeyGenerator { 18 | public: 19 | std::unordered_map generate(const ProtocolType, 20 | const KeyType); 21 | }; 22 | 23 | class KeyPairValidator { 24 | public: 25 | bool validate(const std::string &public_key, 26 | const std::string &prviate_key) const; 27 | }; 28 | 29 | } // namespace SDMS 30 | 31 | #endif // KEY_GENERATOR_HPP 32 | -------------------------------------------------------------------------------- /common/include/common/MessageBuffer.hpp: -------------------------------------------------------------------------------- 1 | #ifndef MESSAGE_BUFFER_HPP 2 | #define MESSAGE_BUFFER_HPP 3 | #pragma once 4 | 5 | namespace SDMS { 6 | 7 | class MessageBuffer { 8 | virtual void serizalize() = 0; 9 | virtual void unserialize() = 0; 10 | }; 11 | 12 | } // namespace SDMS 13 | 14 | #endif // MESSAGE_BUFFER_HPP 15 | -------------------------------------------------------------------------------- /common/include/common/MessageFactory.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PROTOCOL_FACTORY_HPP 2 | #define PROTOCOL_FACTORY_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "IMessage.hpp" 7 | 8 | // Standard includes 9 | #include 10 | 11 | namespace SDMS { 12 | 13 | class MessageFactory { 14 | public: 15 | std::unique_ptr create(const MessageType) const; 16 | 17 | /** 18 | * Will create a Mesage envelope without the payload but containing the 19 | * routes so we know who to send the message too. This is meant to 20 | * be used from the server side of an async request reply model. 21 | **/ 22 | std::unique_ptr createResponseEnvelope(const IMessage &) const; 23 | }; 24 | 25 | } // namespace SDMS 26 | 27 | #endif // PROTOCOL_FACTORY_HPP 28 | -------------------------------------------------------------------------------- /common/include/common/OperatorFactory.hpp: -------------------------------------------------------------------------------- 1 | #ifndef OPERATOR_FACTORY_HPP 2 | #define OPERATOR_FACTORY_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "IOperator.hpp" 7 | #include "OperatorTypes.hpp" 8 | 9 | // Standard includes 10 | #include 11 | #include 12 | #include 13 | 14 | namespace SDMS { 15 | 16 | class OperatorFactory { 17 | public: 18 | using OperatorCreateMethod = std::unique_ptr (*)(std::any); 19 | 20 | private: 21 | static std::unordered_map 22 | m_create_methods; 23 | 24 | public: 25 | OperatorFactory(); 26 | /** 27 | * To register an operator you need to run this command with the factory. 28 | * 29 | * ```C++ 30 | * OperatorFactory factory; 31 | * factory.register(); 32 | * ``` 33 | **/ 34 | template static bool registerOperator() { 35 | if (m_create_methods.count(oper_type) > 0) { 36 | return false; 37 | } else { 38 | m_create_methods[oper_type] = T::create; 39 | } 40 | return true; 41 | } 42 | 43 | std::unique_ptr create(const OperatorType, 44 | std::any &arguments) const; 45 | }; 46 | 47 | } // namespace SDMS 48 | 49 | #endif // OPERATOR_FACTORY_HPP 50 | -------------------------------------------------------------------------------- /common/include/common/OperatorTypes.hpp: -------------------------------------------------------------------------------- 1 | 2 | #ifndef OPERATOR_TYPES_HPP 3 | #define OPERATOR_TYPES_HPP 4 | #pragma once 5 | 6 | #include 7 | 8 | namespace SDMS { 9 | 10 | enum class OperatorType { Authenticator, RouterBookKeeping }; 11 | 12 | } 13 | 14 | #endif // OPERATOR_TYPES_HPP 15 | -------------------------------------------------------------------------------- /common/include/common/ProtoBufMap.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PROTOBUFMAP_HPP 2 | #define PROTOBUFMAP_HPP 3 | #pragma once 4 | 5 | // Public common includes 6 | #include "IMessageMapper.hpp" 7 | 8 | // Third party includes 9 | #include 10 | #include 11 | 12 | // Standard includes 13 | #include 14 | #include 15 | 16 | namespace SDMS { 17 | 18 | class ProtoBufMap : public IMessageMapper { 19 | public: 20 | typedef std::map 21 | FileDescriptorMap; 22 | typedef std::map 23 | DescriptorMap; 24 | typedef std::map MsgTypeMap; 25 | 26 | private: 27 | FileDescriptorMap m_file_descriptor_map; 28 | DescriptorMap m_descriptor_map; 29 | MsgTypeMap m_msg_type_map; 30 | std::unordered_map m_protocol_ids; 31 | 32 | public: 33 | ProtoBufMap(); 34 | 35 | const ::google::protobuf::Descriptor * 36 | getDescriptorType(uint16_t message_type) const; 37 | bool exists(uint16_t message_type) const { 38 | return m_descriptor_map.count(message_type) > 0; 39 | } 40 | uint16_t getMessageType(::google::protobuf::Message &); 41 | std::string toString(uint16_t MessageType) const; 42 | virtual uint16_t getMessageType(uint8_t a_proto_id, 43 | const std::string &a_message_name) final; 44 | virtual uint8_t getProtocolID(MessageProtocol) const final; 45 | }; 46 | } // namespace SDMS 47 | 48 | #endif // PROTOBUFMAP_HPP 49 | -------------------------------------------------------------------------------- /common/include/common/ProtocolTypes.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PROTOCOL_TYPES_HPP 2 | #define PROTOCOL_TYPES_HPP 3 | #pragma once 4 | 5 | namespace SDMS { 6 | 7 | enum class ProtocolType { 8 | HTTP, 9 | ZQTP // ZeroMQ Transport Layer Protocol 10 | }; 11 | 12 | } // namespace SDMS 13 | 14 | #endif // PROTOCOL_TYPES_HPP 15 | -------------------------------------------------------------------------------- /common/include/common/ServerFactory.hpp: -------------------------------------------------------------------------------- 1 | #ifndef SERVER_FACTORY_HPP 2 | #define SERVER_FACTORY_HPP 3 | #pragma once 4 | 5 | // Local public includes 6 | #include "DynaLog.hpp" 7 | #include "SocketOptions.hpp" 8 | 9 | // Standard includes 10 | #include 11 | #include 12 | #include 13 | 14 | namespace SDMS { 15 | 16 | class ICredentials; 17 | class IOperator; 18 | class IServer; 19 | 20 | enum class SocketRole { CLIENT, SERVER, MONITOR, CONTROL }; 21 | 22 | /** 23 | * The custom proxy server is not required to use ZMQ, it can work with 24 | * any hanldes hence there is no need to specify the PROTOCOL. The others 25 | * use convenience objects provided by zmq and are thus technology specific. 26 | **/ 27 | enum class ServerType { PROXY_CUSTOM, PROXY_BASIC_ZMQ, ROUTER_ZMQ }; 28 | 29 | class ServerFactory { 30 | LogContext m_log_context; 31 | 32 | public: 33 | explicit ServerFactory(const LogContext &log_context) 34 | : m_log_context(log_context){}; 35 | 36 | std::unique_ptr create( 37 | ServerType server_type, 38 | const std::unordered_map &socket_options, 39 | const std::unordered_map &socket_credentials); 40 | 41 | std::unique_ptr create( 42 | ServerType server_type, 43 | const std::unordered_map &socket_options, 44 | const std::unordered_map &socket_credentials, 45 | std::vector> incoming_operators); 46 | }; 47 | 48 | } // namespace SDMS 49 | 50 | #endif // SERVER_FACTORY_HPP 51 | -------------------------------------------------------------------------------- /common/include/common/SocketFactory.hpp: -------------------------------------------------------------------------------- 1 | #ifndef SOCKET_FACTORY_HPP 2 | #define SOCKET_FACTORY_HPP 3 | #pragma once 4 | 5 | // Local public includes 6 | #include "ICredentials.hpp" 7 | #include "ISocket.hpp" 8 | #include "SocketOptions.hpp" 9 | 10 | // Standard includes 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | class SocketFactory { 16 | public: 17 | std::unique_ptr create(const SocketOptions &socket_options, 18 | const ICredentials &credentials) const; 19 | }; 20 | 21 | } // namespace SDMS 22 | 23 | #endif // SOCKET_FACTORY_HPP 24 | -------------------------------------------------------------------------------- /common/include/common/Util.hpp: -------------------------------------------------------------------------------- 1 | #ifndef UTIL_HPP 2 | #define UTIL_HPP 3 | 4 | #include 5 | #include 6 | 7 | std::string exec(const char *cmd); 8 | 9 | struct curlReadBuffer { 10 | char *ptr; 11 | size_t size; 12 | }; 13 | 14 | size_t curlResponseWriteCB(char *ptr, size_t size, size_t nmemb, 15 | void *userdata); 16 | size_t curlBodyReadCB(char *ptr, size_t size, size_t nitems, void *userdata); 17 | void generateKeys(std::string &a_pub_key, std::string &a_priv_key); 18 | void hexDump(const char *a_buffer, const char *a_buffer_end, 19 | std::ostream &a_out); 20 | std::string escapeCSV(const std::string &a_value); 21 | std::string escapeJSON(const std::string &a_value); 22 | bool to_uint32(const char *a_str, uint32_t &a_out); 23 | 24 | // std::vector smartTokenize( const std::string & a_text, const 25 | // std::string & a_delim ); 26 | 27 | // std::string parseQuery( const std::string & a_query, bool & use_client, bool 28 | // & use_shared_users, bool & use_shared_projects ); 29 | 30 | #endif 31 | -------------------------------------------------------------------------------- /common/include/common/Version.hpp.in: -------------------------------------------------------------------------------- 1 | #ifndef COMMON_VERSION_HPP 2 | #define COMMON_VERSION_HPP 3 | #pragma once 4 | 5 | namespace SDMS { 6 | namespace common { 7 | namespace version { 8 | constexpr int MAJOR = @DATAFED_COMMON_LIB_MAJOR@; 9 | constexpr int MINOR = @DATAFED_COMMON_LIB_MINOR@; 10 | constexpr int PATCH = @DATAFED_COMMON_LIB_PATCH@; 11 | } 12 | } 13 | } 14 | 15 | #endif // COMMON_VERSION_HPP 16 | 17 | -------------------------------------------------------------------------------- /common/include/common/fpconv.h: -------------------------------------------------------------------------------- 1 | #ifndef FPCONV_H 2 | #define FPCONV_H 3 | 4 | /* Fast and accurate double to string conversion based on Florian Loitsch's 5 | * Grisu-algorithm[1]. 6 | * 7 | * Input: 8 | * fp -> the double to convert, dest -> destination buffer. 9 | * The generated string will never be longer than 24 characters. 10 | * Make sure to pass a pointer to at least 24 bytes of memory. 11 | * The emitted string will not be null terminated. 12 | * 13 | * Output: 14 | * The number of written characters. 15 | * 16 | * Exemplary usage: 17 | * 18 | * void print(double d) 19 | * { 20 | * char buf[24 + 1] // plus null terminator 21 | * int str_len = fpconv_dtoa(d, buf); 22 | * 23 | * buf[str_len] = '\0'; 24 | * printf("%s", buf); 25 | * } 26 | * 27 | */ 28 | 29 | int fpconv_dtoa(double fp, char dest[24]); 30 | 31 | #endif 32 | 33 | /* [1] http://florian.loitsch.com/publications/dtoa-pldi2010.pdf */ 34 | -------------------------------------------------------------------------------- /common/proto/common/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | add_library( protobuf-target ${ProtoFiles} ) 4 | 5 | # Create the .cpp and .hpp files 6 | protobuf_generate( 7 | TARGET protobuf-target 8 | LANGUAGE cpp 9 | IMPORT_DIRS "${DataFed_SOURCE_DIR}/common/proto/common" 10 | OUT_VAR protobuf-generated-files 11 | ) 12 | 13 | # make sure that datafed-protobuf is dependent on the cpp files when it compiles 14 | if(BUILD_SHARED_LIBS) 15 | add_library( datafed-protobuf SHARED ${protobuf-generated-files} ) 16 | else() 17 | add_library( datafed-protobuf STATIC ${protobuf-generated-files} ) 18 | endif() 19 | set_target_properties(datafed-protobuf PROPERTIES POSITION_INDEPENDENT_CODE ON SOVERSION ${DATAFED_COMMON_PROTOCOL_API_MAJOR} VERSION ${DATAFED_COMMON_PROTOCOL_API_MAJOR}.${DATAFED_COMMON_PROTOCOL_API_MINOR}.${DATAFED_COMMON_PROTOCOL_API_PATCH} ) 20 | target_link_libraries( datafed-protobuf protobuf::libprotobuf protobuf::libprotoc protobuf::libprotobuf-lite) 21 | target_include_directories( datafed-protobuf INTERFACE ${PROJECT_BINARY_DIR}/common/proto) 22 | -------------------------------------------------------------------------------- /common/source/CommunicatorFactory.cpp: -------------------------------------------------------------------------------- 1 | 2 | // Local private includes 3 | #include "communicators/ZeroMQCommunicator.hpp" 4 | #include "communicators/ZeroMQCommunicatorSecure.hpp" 5 | #include "sockets/ZeroMQSocket.hpp" 6 | 7 | // Local public includes 8 | #include "common/CommunicatorFactory.hpp" 9 | 10 | // Standard includes 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | std::unique_ptr CommunicatorFactory::create( 16 | const SocketOptions &socket_options, const ICredentials &credentials, 17 | uint32_t timeout_on_receive, long timeout_on_poll) const { 18 | 19 | if (socket_options.protocol_type == ProtocolType::ZQTP) { 20 | if (socket_options.connection_security == 21 | SocketConnectionSecurity::INSECURE) { 22 | return std::unique_ptr(new ZeroMQCommunicator( 23 | socket_options, credentials, timeout_on_receive, timeout_on_poll, 24 | m_log_context)); 25 | } else { 26 | return std::unique_ptr(new ZeroMQCommunicatorSecure( 27 | socket_options, credentials, timeout_on_receive, timeout_on_poll, 28 | m_log_context)); 29 | } 30 | } 31 | return std::unique_ptr(); 32 | } 33 | 34 | } // namespace SDMS 35 | -------------------------------------------------------------------------------- /common/source/CredentialFactory.cpp: -------------------------------------------------------------------------------- 1 | 2 | // Local private includes 3 | #include "credentials/ZeroMQSocketCredentials.hpp" 4 | 5 | // Local public includes 6 | #include "common/CredentialFactory.hpp" 7 | 8 | // Standard includes 9 | #include 10 | 11 | namespace SDMS { 12 | 13 | std::unique_ptr CredentialFactory::create( 14 | const ProtocolType protocol_type, 15 | const std::unordered_map &options) const { 16 | 17 | if (protocol_type == ProtocolType::ZQTP) { 18 | return std::unique_ptr(new ZeroMQSocketCredentials(options)); 19 | } 20 | return std::unique_ptr(); 21 | } 22 | 23 | } // namespace SDMS 24 | -------------------------------------------------------------------------------- /common/source/OperatorFactory.cpp: -------------------------------------------------------------------------------- 1 | 2 | // Local private includes 3 | #include "operators/AuthenticationOperator.hpp" 4 | #include "operators/RouterBookKeepingOperator.hpp" 5 | 6 | // Local public includes 7 | #include "common/OperatorFactory.hpp" 8 | 9 | // Standard includes 10 | #include 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | std::unordered_map 16 | OperatorFactory::m_create_methods; 17 | 18 | OperatorFactory::OperatorFactory() { 19 | registerOperator(); 20 | registerOperator(); 22 | } 23 | 24 | std::unique_ptr OperatorFactory::create(const OperatorType type, 25 | std::any &options) const { 26 | if (m_create_methods.count(type)) { 27 | return m_create_methods[type](options); 28 | } 29 | return std::unique_ptr(); 30 | } 31 | 32 | } // namespace SDMS 33 | -------------------------------------------------------------------------------- /common/source/ProtoBufFactory.cpp: -------------------------------------------------------------------------------- 1 | // Local private includes 2 | #include "ProtoBufFactory.hpp" 3 | 4 | // Local public includes 5 | #include "common/SDMS_Anon.pb.h" 6 | #include "common/SDMS_Auth.pb.h" 7 | #include "common/TraceException.hpp" 8 | 9 | // Standard includes 10 | #include 11 | 12 | namespace SDMS { 13 | 14 | ProtoBufFactory::ProtoBufFactory() { 15 | Anon::Protocol_descriptor(); 16 | Auth::Protocol_descriptor(); 17 | m_factory = ::google::protobuf::MessageFactory::generated_factory(); 18 | } 19 | 20 | std::unique_ptr<::google::protobuf::Message> 21 | ProtoBufFactory::create(uint16_t desc_type) { 22 | const ::google::protobuf::Descriptor *msg_descriptor = 23 | m_proto_map.getDescriptorType(desc_type); 24 | return create(msg_descriptor); 25 | } 26 | 27 | // https://stackoverflow.com/questions/29960871/protobuf-message-object-creation-by-name 28 | std::unique_ptr<::google::protobuf::Message> 29 | ProtoBufFactory::create(const ::google::protobuf::Descriptor *msg_descriptor) { 30 | const ::google::protobuf::Message *prototype_msg = 31 | m_factory->GetPrototype(msg_descriptor); 32 | 33 | if (prototype_msg == nullptr) { 34 | EXCEPT(1, "Cannot create prototype message from message descriptor"); 35 | } 36 | 37 | ::google::protobuf::Message *mutable_msg = prototype_msg->New(); 38 | 39 | if (mutable_msg == nullptr) { 40 | EXCEPT(1, "Failed in prototype_msg->New(); to create mutable message"); 41 | } 42 | 43 | return std::unique_ptr<::google::protobuf::Message>(mutable_msg); 44 | } 45 | 46 | } // namespace SDMS 47 | -------------------------------------------------------------------------------- /common/source/ProtoBufFactory.hpp: -------------------------------------------------------------------------------- 1 | #ifndef PROTOBUFFACTORY_HPP 2 | #define PROTOBUFFACTORY_HPP 3 | #pragma once 4 | 5 | // Local public includes 6 | #include "common/ProtoBufMap.hpp" 7 | #include "common/TraceException.hpp" 8 | 9 | // Local protobuf includes 10 | #include "common/SDMS_Anon.pb.h" 11 | #include "common/SDMS_Auth.pb.h" 12 | 13 | // Standard includes 14 | #include 15 | 16 | namespace SDMS { 17 | 18 | class ProtoBufFactory { 19 | ProtoBufMap m_proto_map; 20 | ::google::protobuf::MessageFactory *m_factory; 21 | 22 | public: 23 | ProtoBufFactory(); 24 | std::unique_ptr<::google::protobuf::Message> create(uint16_t desc_type); 25 | std::unique_ptr<::google::protobuf::Message> 26 | create(const ::google::protobuf::Descriptor *msg_descriptor); 27 | }; 28 | 29 | } // namespace SDMS 30 | 31 | #endif // PROTOBUFFACTORY_HPP 32 | -------------------------------------------------------------------------------- /common/source/SocketFactory.cpp: -------------------------------------------------------------------------------- 1 | 2 | // Local private includes 3 | #include "communicators/ZeroMQCommunicator.hpp" 4 | #include "sockets/ZeroMQSocket.hpp" 5 | 6 | // Local public includes 7 | #include "common/SocketFactory.hpp" 8 | #include "common/TraceException.hpp" 9 | 10 | // Standard includes 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | std::unique_ptr 16 | SocketFactory::create(const SocketOptions &socket_options, 17 | const ICredentials &credentials) const { 18 | 19 | if (socket_options.protocol_type == ProtocolType::ZQTP) { 20 | return std::unique_ptr( 21 | new ZeroMQSocket(socket_options, credentials)); 22 | } 23 | EXCEPT(1, "Unsupported ProtocolType specified in SocketFactory."); 24 | } 25 | 26 | } // namespace SDMS 27 | -------------------------------------------------------------------------------- /common/source/communicators/ZeroMQCommunicatorSecure.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ZEROMQ_COMMUNICATOR_SECURE_HPP 2 | #define ZEROMQ_COMMUNICATOR_SECURE_HPP 3 | #pragma once 4 | 5 | // Local private includes 6 | #include "../Buffer.hpp" 7 | #include "ZeroMQCommunicator.hpp" 8 | 9 | // Local public includes 10 | #include "common/DynaLog.hpp" 11 | #include "common/ICommunicator.hpp" 12 | #include "common/ICredentials.hpp" 13 | #include "common/IMessage.hpp" 14 | #include "common/ISocket.hpp" 15 | #include "common/SocketOptions.hpp" 16 | 17 | namespace SDMS { 18 | 19 | class ZeroMQCommunicatorSecure : public ZeroMQCommunicator { 20 | private: 21 | void zmqCurveSetup(const ICredentials &credentials); 22 | LogContext m_log_context; 23 | 24 | public: 25 | ZeroMQCommunicatorSecure(const SocketOptions &socket_options, 26 | const ICredentials &credentials, 27 | uint32_t timeout_on_receive_milliseconds, 28 | long timeout_on_poll_milliseconds, 29 | LogContext log_context); 30 | }; 31 | 32 | } // namespace SDMS 33 | 34 | #endif // ZEROMQ_COMMUNICATOR_SECURE_HPP 35 | -------------------------------------------------------------------------------- /common/source/operators/AuthenticationOperator.cpp: -------------------------------------------------------------------------------- 1 | 2 | // Local private includes 3 | #include "AuthenticationOperator.hpp" 4 | 5 | // Local public includes 6 | #include "common/TraceException.hpp" 7 | 8 | // Standard includes 9 | #include 10 | #include 11 | 12 | namespace SDMS { 13 | 14 | AuthenticationOperator::AuthenticationOperator(std::any &options) { 15 | try { 16 | m_authentication_manager = std::any_cast(options); 17 | } catch (std::bad_cast &error) { 18 | std::cerr << "Caught bad any cast in AuthenticationOperator constructor." 19 | << error.what() << std::endl; 20 | } 21 | } 22 | 23 | void AuthenticationOperator::execute(IMessage &message) { 24 | 25 | if (message.exists(MessageAttribute::KEY) == 0) { 26 | EXCEPT(1, "'KEY' attribute not defined."); 27 | } 28 | 29 | m_authentication_manager->purge(); 30 | 31 | std::string key = std::get(message.get(MessageAttribute::KEY)); 32 | 33 | std::string uid = "anon"; 34 | if (m_authentication_manager->hasKey(key)) { 35 | m_authentication_manager->incrementKeyAccessCounter(key); 36 | uid = m_authentication_manager->getUID(key); 37 | } 38 | message.set(MessageAttribute::ID, uid); 39 | } 40 | 41 | } // namespace SDMS 42 | -------------------------------------------------------------------------------- /common/source/operators/RouterBookKeepingOperator.cpp: -------------------------------------------------------------------------------- 1 | 2 | // Local private includes 3 | #include "RouterBookKeepingOperator.hpp" 4 | 5 | // Local public includes 6 | #include "common/TraceException.hpp" 7 | 8 | // Standard includes 9 | #include 10 | #include 11 | 12 | namespace SDMS { 13 | 14 | RouterBookKeepingOperator::RouterBookKeepingOperator(std::any options) { 15 | try { 16 | m_client_socket_id = std::any_cast(options); 17 | if (m_client_socket_id.size() == 0) { 18 | EXCEPT(1, "Cannot use a null identity for RouterBookKeepingOperator"); 19 | } 20 | } catch (std::bad_cast &error) { 21 | std::cerr << "Caught bad any cast in RouterBookKeepingOperator constructor." 22 | << error.what() << std::endl; 23 | } 24 | } 25 | 26 | void RouterBookKeepingOperator::execute(IMessage &message) { 27 | message.getRoutes().push_front(m_client_socket_id); 28 | } 29 | 30 | } // namespace SDMS 31 | -------------------------------------------------------------------------------- /common/source/operators/RouterBookKeepingOperator.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ROUTERBOOKKEEPING_OPERATOR_HPP 2 | #define ROUTERBOOKKEEPING_OPERATOR_HPP 3 | #pragma once 4 | 5 | // Local includes 6 | #include "common/IMessage.hpp" 7 | #include "common/IOperator.hpp" 8 | #include "common/OperatorTypes.hpp" 9 | 10 | // Standard includes 11 | #include 12 | 13 | namespace SDMS { 14 | 15 | class RouterBookKeepingOperator : public IOperator { 16 | /** 17 | * Because ZMQ does some things differently depending on whether communication 18 | * is between a ROUTER DEALER vs some other combination this operator is 19 | * needed, to add the router identity in cases where the ROUTER dealer 20 | * combination is not being used 21 | **/ 22 | public: 23 | explicit RouterBookKeepingOperator(std::any options); 24 | 25 | static std::unique_ptr create(std::any options); 26 | 27 | private: 28 | std::string m_client_socket_id; 29 | 30 | virtual OperatorType type() const noexcept final { 31 | return OperatorType::RouterBookKeeping; 32 | } 33 | 34 | virtual void execute(IMessage &message) final; 35 | }; 36 | 37 | inline std::unique_ptr 38 | RouterBookKeepingOperator::create(std::any options) { 39 | return std::make_unique(options); 40 | } 41 | 42 | } // namespace SDMS 43 | 44 | #endif // ROUTERBOOKKEEPING_OPERATOR_HPP 45 | -------------------------------------------------------------------------------- /common/source/support/zeromq/Context.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ZMQCONTEXT_HPP 2 | #define ZMQCONTEXT_HPP 3 | #pragma once 4 | 5 | // Third party includes 6 | #include 7 | 8 | namespace SDMS { 9 | /** 10 | * Singleton Pattern for Security Context of zmq. 11 | **/ 12 | inline void *getContext() { 13 | static void *context = zmq_ctx_new(); 14 | return context; 15 | } 16 | } // namespace SDMS 17 | 18 | #endif // ZMQCONTEXT_HPP 19 | -------------------------------------------------------------------------------- /common/tests/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | if( ENABLE_UNIT_TESTS ) 2 | add_subdirectory(unit) 3 | endif( ENABLE_UNIT_TESTS ) 4 | add_subdirectory(security) 5 | -------------------------------------------------------------------------------- /common/tests/security/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_subdirectory(tcp_secure) 2 | -------------------------------------------------------------------------------- /common/tests/security/tcp_secure/runtests.cmake: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.17.0) 2 | # Why are we running this as a CMake script 3 | # 4 | # The client, server and shell monitoring process need to be run concurrently 5 | 6 | message("Running in folder ${CMAKE_ARGV3}") 7 | 8 | find_program(TCPDUMP_CMD NAMES tcpdump) 9 | find_program(TIMEOUT_CMD NAMES timeout) 10 | 11 | set(BUFFER_TIME_SECONDS 1) 12 | MATH(EXPR MAX_RUN_TIME "${BUFFER_TIME_SECONDS}+1") 13 | 14 | # Insecure test should be run first to make sure the communication is occuring 15 | # as expected 16 | execute_process( 17 | COMMAND ${CMAKE_ARGV3}/test_tcp_secure_client -p "${BUFFER_TIME_SECONDS}" --insecure 18 | COMMAND ${CMAKE_ARGV3}/test_tcp_secure_server -p "${BUFFER_TIME_SECONDS}" --insecure 19 | COMMAND ${CMAKE_ARGV3}/test_tcp_insecure.sh ${TCPDUMP_CMD} ${TIMEOUT_CMD} ${MAX_RUN_TIME} 20 | RESULTS_VARIABLE STATUS1 21 | ) 22 | 23 | if("1" IN_LIST STATUS1) 24 | message(FATAL_ERROR "Insecure tcp test failed, this indicates a network connectivity issues") 25 | endif() 26 | 27 | execute_process( 28 | COMMAND ${CMAKE_ARGV3}/test_tcp_secure_client -p "${BUFFER_TIME_SECONDS}" 29 | COMMAND ${CMAKE_ARGV3}/test_tcp_secure_server -p "${BUFFER_TIME_SECONDS}" 30 | COMMAND ${CMAKE_ARGV3}/test_tcp_secure.sh ${TCPDUMP_CMD} ${TIMEOUT_CMD} ${MAX_RUN_TIME} 31 | RESULTS_VARIABLE STATUS2 32 | ) 33 | 34 | if("1" IN_LIST STATUS2) 35 | message(FATAL_ERROR "Secure tcp test failed, this indicates a problem with the security encryption") 36 | endif() 37 | 38 | 39 | -------------------------------------------------------------------------------- /common/tests/security/tcp_secure/test_tcp_secure.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # If no arguments are provided assume command paths have not been passed in 4 | if [ $# -eq 0 ] 5 | then 6 | TIMEOUT_CMD="timeout" 7 | TCPDUMP_CMD="tcpdump" 8 | MAX_TEST_TIME_SEC=2 9 | else 10 | TCPDUMP_CMD=$1 11 | TIMEOUT_CMD=$2 12 | MAX_TEST_TIME_SEC=$3 13 | fi 14 | 15 | # Check that pcap group exists and the user is part of it 16 | if [ $(getent group pcap) ] 17 | then 18 | if id -nG "$USER" | grep -qw "pcap" 19 | then 20 | echo "CONTINUE" 21 | else 22 | echo "SKIPPING - user does not belong to pcap group cannot run tcp_secure test" 23 | exit 0 24 | fi 25 | else 26 | echo "SKIPPING - pcap group does not exist cannot run tcp_secure test" 27 | exit 0 28 | fi 29 | 30 | echo 31 | echo "Running with:" 32 | echo "TCPDUMP: ${TCPDUMP_CMD}" 33 | echo "TIMEOUT: ${TIMEOUT_CMD}" 34 | echo "MAX_TEST_TIME: ${MAX_TEST_TIME_SEC}" 35 | 36 | # Grab the first 30 packets sent on the loop back interface (127.0.0.1) and port 7515 37 | match=$( "${TIMEOUT_CMD}" "${MAX_TEST_TIME_SEC}" "${TCPDUMP_CMD}" -vvv -A port 7515 -i lo | grep token) 38 | 39 | echo "Content of grep ${match}" 40 | # If '.magic_token' is returned from the network sniffer then we know that 41 | # the encryption is not working 42 | if [[ "${match}" == ".magic_token" ]] 43 | then 44 | echo "FAILED - the connection is insecure we were able to pull out the token" 45 | exit 1 46 | else 47 | echo "SUCCESS - the connection is secure we were unable to pull out the token" 48 | exit 0 49 | fi 50 | -------------------------------------------------------------------------------- /common/tests/unit/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # Each test listed in Alphabetical order 2 | foreach(PROG 3 | test_Buffer 4 | test_CommunicatorFactory 5 | test_Frame 6 | test_DynaLog 7 | test_Value 8 | test_MessageFactory 9 | test_OperatorFactory 10 | test_ProtoBufFactory 11 | test_ProtoBufMap 12 | test_Proxy 13 | test_ProxyBasicZMQ 14 | test_SocketFactory 15 | test_SocketOptions 16 | ) 17 | 18 | include_directories(${PROJECT_SOURCE_DIR}/common/source) 19 | file(GLOB ${PROG}_SOURCES ${PROG}.cpp) 20 | add_executable(unit_${PROG} ${${PROG}_SOURCES}) 21 | if(BUILD_SHARED_LIBS) 22 | target_link_libraries(unit_${PROG} PRIVATE ${DATAFED_BOOST_LIBRARIES} 23 | common libzmq protobuf::libprotobuf Threads::Threads) 24 | target_compile_definitions(unit_${PROG} PRIVATE BOOST_TEST_DYN_LINK) 25 | else() 26 | target_link_libraries(unit_${PROG} PRIVATE ${DATAFED_BOOST_LIBRARIES} 27 | common libzmq-static protobuf::libprotobuf Threads::Threads) 28 | endif() 29 | # Only want this if using shared boost libraries 30 | add_test(unit_${PROG} unit_${PROG}) 31 | 32 | endforeach(PROG) 33 | 34 | -------------------------------------------------------------------------------- /common/tests/unit/test_ProtoBufFactory.cpp: -------------------------------------------------------------------------------- 1 | #define BOOST_TEST_MAIN 2 | 3 | #define BOOST_TEST_MODULE protobuffactory 4 | #include 5 | #include 6 | 7 | // Local private includes 8 | #include "ProtoBufFactory.hpp" 9 | 10 | // Local public includes 11 | #include "common/ProtoBufMap.hpp" 12 | 13 | // Proto file includes 14 | #include "common/SDMS_Anon.pb.h" 15 | #include "common/SDMS_Auth.pb.h" 16 | 17 | // Standard includes 18 | #include 19 | 20 | using namespace SDMS; 21 | 22 | BOOST_AUTO_TEST_SUITE(ProtoBufFactoryTest) 23 | 24 | BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory) { 25 | 26 | ProtoBufMap proto_map; 27 | ProtoBufFactory proto_factory; 28 | 29 | SDMS::Anon::VersionRequest version_request; 30 | uint16_t msg_type = proto_map.getMessageType(version_request); 31 | auto msg = proto_factory.create(msg_type); 32 | BOOST_CHECK(msg_type == proto_map.getMessageType(*msg)); 33 | } 34 | 35 | BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory2) { 36 | 37 | ProtoBufMap proto_map; 38 | ProtoBufFactory proto_factory; 39 | 40 | SDMS::Anon::NackReply nack_reply; 41 | uint16_t msg_type = proto_map.getMessageType(nack_reply); 42 | auto msg = proto_factory.create(msg_type); 43 | BOOST_CHECK(msg_type == proto_map.getMessageType(*msg)); 44 | 45 | auto nack_reply_new = dynamic_cast(*msg); 46 | 47 | nack_reply_new.set_err_msg("This is working"); 48 | } 49 | BOOST_AUTO_TEST_SUITE_END() 50 | -------------------------------------------------------------------------------- /common/tests/unit/test_ProtoBufMap.cpp: -------------------------------------------------------------------------------- 1 | #define BOOST_TEST_MAIN 2 | 3 | #define BOOST_TEST_MODULE protobuffactory 4 | #include 5 | #include 6 | 7 | // Local private includes 8 | #include "ProtoBufFactory.hpp" 9 | 10 | // Local public includes 11 | #include "common/ProtoBufMap.hpp" 12 | 13 | // Standard includes 14 | #include 15 | 16 | using namespace SDMS; 17 | 18 | BOOST_AUTO_TEST_SUITE(ProtoBufFactoryTest) 19 | 20 | BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory_ProtocolID) { 21 | ProtoBufMap proto_map; 22 | 23 | uint8_t proto_id = 24 | proto_map.getProtocolID(MessageProtocol::GOOGLE_ANONONYMOUS); 25 | BOOST_CHECK(proto_id == 1); 26 | proto_id = proto_map.getProtocolID(MessageProtocol::GOOGLE_AUTHORIZED); 27 | BOOST_CHECK(proto_id == 2); 28 | } 29 | 30 | BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory) { 31 | ProtoBufMap proto_map; 32 | ProtoBufFactory proto_factory; 33 | 34 | SDMS::Anon::VersionRequest version_request; 35 | uint16_t msg_type = proto_map.getMessageType(version_request); 36 | auto msg = proto_factory.create(msg_type); 37 | std::cout << "VersionRequest msg_type of VersionRequest, " << msg_type 38 | << " and " << proto_map.getMessageType(*msg) << std::endl; 39 | } 40 | 41 | BOOST_AUTO_TEST_CASE(testing_ProtoBufMap_toString) { 42 | ProtoBufMap proto_map; 43 | SDMS::Anon::VersionRequest version_request; 44 | uint16_t msg_type = proto_map.getMessageType(version_request); 45 | auto name = proto_map.toString(msg_type); 46 | BOOST_CHECK(name.compare("VersionRequest") == 0); 47 | } 48 | 49 | BOOST_AUTO_TEST_SUITE_END() 50 | -------------------------------------------------------------------------------- /compose/all/build_images_for_compose.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 8 | 9 | "${PROJECT_ROOT}/scripts/compose_build_images.sh" 10 | -------------------------------------------------------------------------------- /compose/all/cleanup_globus_files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SCRIPT=$(realpath "$0") 3 | SOURCE=$(dirname "$SCRIPT") 4 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 5 | 6 | "${PROJECT_ROOT}/scripts/compose_cleanup_globus_files.sh" -d "$(pwd)" 7 | 8 | -------------------------------------------------------------------------------- /compose/all/generate_env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SCRIPT=$(realpath "$0") 3 | SOURCE=$(dirname "$SCRIPT") 4 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 5 | 6 | "${PROJECT_ROOT}/scripts/compose_generate_env.sh" -d "$(pwd)" 7 | 8 | -------------------------------------------------------------------------------- /compose/all/generate_globus_files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SCRIPT=$(realpath "$0") 3 | SOURCE=$(dirname "$SCRIPT") 4 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 5 | 6 | "${PROJECT_ROOT}/scripts/compose_generate_globus_files.sh" -d "$(pwd)" 7 | -------------------------------------------------------------------------------- /compose/all/globus-connect-server.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/compose/all/globus-connect-server.log -------------------------------------------------------------------------------- /compose/metadata/build_metadata_images_for_compose.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 8 | 9 | BUILD_ARG_FILE="$SOURCE/.build-args" 10 | if [ $# -gt 0 ]; then 11 | # Do not check if file already exists becuase other generate scripts 12 | # from a different repo might have already put their args in the file 13 | # and calling this script might be the second step. Where this step would 14 | # be appending to an existing file. 15 | BUILD_ARG_FILE="$1/.build-args" 16 | fi 17 | 18 | # Generate arg list 19 | 20 | if [ ! -f "$BUILD_ARG_FILE" ] 21 | then 22 | echo "Missing .build-args file, please run generate_build_args.sh first." 23 | fi 24 | 25 | # Load the variables from the build_args 26 | . "$BUILD_ARG_FILE" 27 | 28 | echo "BASE_IMAGE is $BASE_IMAGE" 29 | 30 | "${PROJECT_ROOT}/scripts/compose_build_images.sh" -m -b "$BASE_IMAGE" 31 | -------------------------------------------------------------------------------- /compose/metadata/generate_build_args.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Description 4 | # 5 | # The point of this file is to generate a .build-args file. The .build-args 6 | # file contains variables that are used when building the docker images. 7 | # 8 | # Example 9 | # 10 | # ./generate_build_args.sh 11 | 12 | SCRIPT=$(realpath "$0") 13 | SOURCE=$(dirname "$SCRIPT") 14 | 15 | BUILD_ARG_FILE="$SOURCE/.build-args" 16 | if [ $# -gt 0 ]; then 17 | # Do not check if file already exists becuase other generate scripts 18 | # from a different repo might have already put their args in the file 19 | # and calling this script might be the second step. Where this step would 20 | # be appending to an existing file. 21 | BUILD_ARG_FILE="$1/.build-args" 22 | else 23 | 24 | # Force the user to manaully rm the file to avoid accidental overwrites. 25 | if [ -f "$BUILD_ARG_FILE" ] 26 | then 27 | echo "$BUILD_ARG_FILE already exist! Will not overwrite!" 28 | exit 0 29 | fi 30 | fi 31 | 32 | # Needs to append to the ./build-args.sh file not overwrite. 33 | cat << EOF >> "$BUILD_ARG_FILE" 34 | BASE_IMAGE=ubuntu:focal 35 | EOF 36 | -------------------------------------------------------------------------------- /compose/metadata/generate_env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SCRIPT=$(realpath "$0") 3 | SOURCE=$(dirname "$SCRIPT") 4 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 5 | 6 | "${PROJECT_ROOT}/scripts/compose_generate_env.sh" -d "$(pwd)" -m 7 | 8 | -------------------------------------------------------------------------------- /compose/repo/build_repo_images_for_compose.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 8 | 9 | "${PROJECT_ROOT}/scripts/compose_build_images.sh" -r 10 | -------------------------------------------------------------------------------- /compose/repo/cleanup_globus_files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SCRIPT=$(realpath "$0") 3 | SOURCE=$(dirname "$SCRIPT") 4 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 5 | 6 | "${PROJECT_ROOT}/scripts/compose_cleanup_globus_files.sh" -d "$(pwd)" 7 | 8 | -------------------------------------------------------------------------------- /compose/repo/generate_env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SCRIPT=$(realpath "$0") 3 | SOURCE=$(dirname "$SCRIPT") 4 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 5 | 6 | "${PROJECT_ROOT}/scripts/compose_generate_env.sh" -d "$(pwd)" -r 7 | 8 | -------------------------------------------------------------------------------- /compose/repo/generate_globus_files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SCRIPT=$(realpath "$0") 3 | SOURCE=$(dirname "$SCRIPT") 4 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 5 | 6 | "${PROJECT_ROOT}/scripts/compose_generate_globus_files.sh" -d "$(pwd)" 7 | -------------------------------------------------------------------------------- /config/gsi-authz.conf: -------------------------------------------------------------------------------- 1 | GLOBUS_GSI_AUTHZ_SYSTEM_INIT /opt/datafed/authz/libdatafed-authz gsi_authz_init 2 | GLOBUS_GSI_AUTHZ_SYSTEM_DESTROY /opt/datafed/authz/libdatafed-authz gsi_authz_destroy 3 | GLOBUS_GSI_AUTHZ_HANDLE_INIT /opt/datafed/authz/libdatafed-authz gsi_authz_handle_init 4 | GLOBUS_GSI_AUTHORIZE_ASYNC /opt/datafed/authz/libdatafed-authz gsi_authz_authorize_async 5 | GLOBUS_GSI_AUTHZ_CANCEL /opt/datafed/authz/libdatafed-authz gsi_authz_cancel 6 | GLOBUS_GSI_AUTHZ_HANDLE_DESTROY /opt/datafed/authz/libdatafed-authz gsi_authz_handle_destroy 7 | GLOBUS_GSI_GET_AUTHORIZATION_IDENTITY /opt/datafed/authz/libdatafed-authz gsi_authz_identify 8 | globus_mapping /opt/datafed/authz/libdatafed-authz gsi_map_user 9 | -------------------------------------------------------------------------------- /core/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | if( BUILD_FOXX ) 4 | add_subdirectory (database) 5 | endif() 6 | 7 | if( BUILD_CORE_SERVER ) 8 | include_directories(${CMAKE_BINARY_DIR}/common/include) 9 | add_subdirectory (server) 10 | endif() 11 | 12 | -------------------------------------------------------------------------------- /core/database/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | # 3.7.0 requires to use test fixtures 3 | configure_file( 4 | "${CMAKE_CURRENT_SOURCE_DIR}/foxx/api/version_router.js.in" 5 | "${CMAKE_CURRENT_SOURCE_DIR}/foxx/api/version_router.js" 6 | @ONLY) 7 | 8 | configure_file( 9 | "${CMAKE_CURRENT_SOURCE_DIR}/foxx/manifest.json.in" 10 | "${CMAKE_CURRENT_SOURCE_DIR}/foxx/manifest.json" 11 | @ONLY) 12 | 13 | if( ENABLE_FOXX_TESTS ) 14 | add_test(NAME foxx_setup COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_setup.sh") 15 | add_test(NAME foxx_teardown COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_teardown.sh") 16 | add_test(NAME foxx_version COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "version") 17 | add_test(NAME foxx_support COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "support") 18 | 19 | set_tests_properties(foxx_setup PROPERTIES FIXTURES_SETUP Foxx) 20 | set_tests_properties(foxx_teardown PROPERTIES FIXTURES_CLEANUP Foxx) 21 | set_tests_properties(foxx_version PROPERTIES FIXTURES_REQUIRED Foxx) 22 | set_tests_properties(foxx_support PROPERTIES FIXTURES_REQUIRED Foxx) 23 | endif() 24 | -------------------------------------------------------------------------------- /core/database/backup/datafed-backup-cron: -------------------------------------------------------------------------------- 1 | # crontab -e 2 | SHELL=/bin/bash 3 | MAILTO=stansberrydv@ornl.gov 4 | PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin 5 | 6 | # For details see man 4 crontabs 7 | 8 | # Example of job definition: 9 | # .---------------- minute (0 - 59) 10 | # | .------------- hour (0 - 23) 11 | # | | .---------- day of month (1 - 31) 12 | # | | | .------- month (1 - 12) OR jan,feb,mar,apr ... 13 | # | | | | .---- day of week (0 - 6) (Sunday=0 or 7) OR sun,mon,tue,wed,thu,fri,sat 14 | # | | | | | 15 | # * * * * * user-name command to be executed 16 | 17 | # backup 11:45 pm Sunday night 18 | #45 23 * * 0 /opt/datafed/datafed-backup.sh 19 | 30 15 * * * /opt/datafed/datafed-backup.sh 20 | -------------------------------------------------------------------------------- /core/database/backup/datafed-backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "DataFed - running DB backup" 4 | 5 | # Shutdown DataFed services 6 | systemctl stop globus-gridftp-server.service 7 | systemctl stop datafed-ws.service 8 | systemctl stop datafed-repo.service 9 | systemctl stop datafed-core.service 10 | systemctl stop arangodb3.service 11 | 12 | backup_file=DataFed_DB_Backup_$(date +"%Y_%m_%d").tar.gz 13 | 14 | # Tar contents of arangodb directory without full path 15 | tar -C /var/lib/arangodb3 -cvzf "${backup_file}" . 16 | 17 | # Move backup file to storage location 18 | mv "${backup_file}" /data/backups 19 | 20 | # Restart DataFed services 21 | systemctl start arangodb3.service 22 | systemctl start datafed-core.service 23 | systemctl start globus-gridftp-server.service 24 | systemctl start datafed-repo.service 25 | systemctl start datafed-ws.service 26 | 27 | echo "DataFed - backup completed" 28 | -------------------------------------------------------------------------------- /core/database/foxx/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Running tests 3 | 4 | Note the tests in the tests folder should not be run outside of CMake, they are designed to be run after being uploaded to Arango. 5 | -------------------------------------------------------------------------------- /core/database/foxx/api/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "node": true, 4 | "es6":true 5 | }, 6 | "settings":{ 7 | }, 8 | "extends": ["eslint:recommended","plugin:import/errors","plugin:import/warnings"], 9 | "globals": { 10 | "ace":true, 11 | "d3":true, 12 | "Cookies":true 13 | }, 14 | "rules": { 15 | "semi": "error", 16 | "no-unused-vars": ["error", { "args": "none" }], 17 | "import/no-unresolved": [2, {"caseSensitive": true}] 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /core/database/foxx/api/config_router.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const createRouter = require('@arangodb/foxx/router'); 4 | const router = createRouter(); 5 | const g_db = require('@arangodb').db; 6 | const g_lib = require('./support'); 7 | 8 | module.exports = router; 9 | 10 | 11 | router.get('/msg/daily', function(req, res) { 12 | try { 13 | var msg = {}, 14 | key = { 15 | _key: "msg_daily" 16 | }; 17 | 18 | if (g_db.config.exists(key)) { 19 | msg = g_db.config.document(key); 20 | 21 | delete msg._id; 22 | delete msg._key; 23 | delete msg._rev; 24 | } 25 | 26 | res.send(msg); 27 | } catch (e) { 28 | g_lib.handleException(e, res); 29 | } 30 | }) 31 | .summary('Get message of the day.') 32 | .description('Get message of the day. If not set, an empty document will be returned.'); -------------------------------------------------------------------------------- /core/database/foxx/api/version_router.js.in: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const createRouter = require('@arangodb/foxx/router'); 4 | const router = createRouter(); 5 | const joi = require('joi'); 6 | const g_db = require('@arangodb').db; 7 | const g_lib = require('./support'); 8 | 9 | module.exports = router; 10 | 11 | 12 | //==================== ACL API FUNCTIONS 13 | 14 | 15 | router.get('/version', function(req, res) { 16 | try { 17 | res.send({ 18 | "release_year": @DATAFED_RELEASE_YEAR@, 19 | "release_month": @DATAFED_RELEASE_MONTH@, 20 | "release_day": @DATAFED_RELEASE_DAY@, 21 | "release_hour": @DATAFED_RELEASE_HOUR@, 22 | "release_minute": @DATAFED_RELEASE_MINUTE@, 23 | "api_major": @DATAFED_FOXX_API_MAJOR@, 24 | "api_minor": @DATAFED_FOXX_API_MINOR@, 25 | "api_patch": @DATAFED_FOXX_API_PATCH@, 26 | "component_major": @DATAFED_FOXX_MAJOR@, 27 | "component_minor": @DATAFED_FOXX_MINOR@, 28 | "component_patch": @DATAFED_FOXX_PATCH@ 29 | }); 30 | } catch (e) { 31 | g_lib.handleException(e, res); 32 | } 33 | }) 34 | .summary('Get version numbers') 35 | .description('Get version number of Foxx service, of foxx API and of release'); 36 | -------------------------------------------------------------------------------- /core/database/foxx/db_clear.js: -------------------------------------------------------------------------------- 1 | db._useDatabase('sdms'); 2 | db._truncate("u"); 3 | db._truncate("accn"); 4 | db._truncate("uuid"); 5 | db._truncate("p"); 6 | db._truncate("g"); 7 | db._truncate("d"); 8 | db._truncate("c"); 9 | db._truncate("t"); 10 | db._truncate("a"); 11 | db._truncate("q"); 12 | db._truncate("globus_token"); 13 | db._truncate("globus_coll"); 14 | db._truncate("owner"); 15 | db._truncate("member"); 16 | db._truncate("item"); 17 | db._truncate("acl"); 18 | db._truncate("top"); 19 | db._truncate("ident"); 20 | db._truncate("admin"); 21 | db._truncate("alias"); 22 | db._truncate("alloc"); 23 | db._truncate("loc"); 24 | db._truncate("dep"); 25 | db._truncate("lock"); 26 | db._truncate("block"); 27 | 28 | db._dropView("textview"); 29 | db._dropView("projview"); 30 | -------------------------------------------------------------------------------- /core/database/foxx/db_migrate_0_10.js: -------------------------------------------------------------------------------- 1 | db._useDatabase('sdms'); 2 | 3 | db._query("for i in alloc update i with { data_limit: i.max_size, data_size: i.tot_size, rec_limit: i.max_count, rec_count: i.tot_count } in alloc"); 4 | db._query("for i in alloc update i with { max_count: null, max_size: null, tot_size: null, tot_count: null } in alloc options { keepNull:false }"); -------------------------------------------------------------------------------- /core/database/foxx/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | require("@arangodb/aql/cache").properties({ 4 | mode: "demand" 5 | }); 6 | 7 | const createRouter = require('@arangodb/foxx/router'); 8 | const router = createRouter(); 9 | 10 | router.use("/usr", require('./api/user_router')); 11 | router.use("/prj", require('./api/proj_router')); 12 | router.use("/grp", require('./api/group_router')); 13 | router.use("/dat", require('./api/data_router')); 14 | router.use("/col", require('./api/coll_router')); 15 | router.use("/acl", require('./api/acl_router')); 16 | router.use("/qry", require('./api/query_router')); 17 | router.use("/topic", require('./api/topic_router')); 18 | router.use("/tag", require('./api/tag_router')); 19 | router.use("/note", require('./api/note_router')); 20 | router.use("/authz", require('./api/authz_router')); 21 | router.use("/repo", require('./api/repo_router')); 22 | router.use("/task", require('./api/task_router')); 23 | router.use("/schema", require('./api/schema_router')); 24 | router.use("/config", require('./api/config_router')); 25 | router.use("/metrics", require('./api/metrics_router')); 26 | router.use("/admin", require('./api/admin_router')); 27 | router.use("/", require('./api/version_router')); 28 | 29 | module.context.use(router); -------------------------------------------------------------------------------- /core/database/foxx/manifest.json.in: -------------------------------------------------------------------------------- 1 | { 2 | "engines": { 3 | "arangodb": "^3.0.0" 4 | }, 5 | "main": "index.js", 6 | "name": "DataFed", 7 | "description": "DataFed ArangoDB Microservices API", 8 | "version": "@DATAFED_FOXX_API_MAJOR@.@DATAFED_FOXX_API_MINOR@.@DATAFED_FOXX_API_PATCH@", 9 | "tests": "tests/**/*.js" 10 | } 11 | -------------------------------------------------------------------------------- /core/database/foxx/passwd.file: -------------------------------------------------------------------------------- 1 | mastermind 2 | -------------------------------------------------------------------------------- /core/database/foxx/tests/support.test.js: -------------------------------------------------------------------------------- 1 | "use strict" 2 | 3 | // Integration test of API 4 | const chai = require("chai"); 5 | const should = chai.should(); 6 | const expect = chai.expect; 7 | const assert = chai.assert; 8 | const g_lib = require("../api/support") 9 | 10 | describe("the Foxx microservice support module evaluating isUUID.", () => { 11 | it("should return true if string is a UUID.", () => { 12 | var uuid = "XXXXYYYY-XXXX-YYYY-XXXX-YYYYXXXXYYYY" 13 | expect(g_lib.isUUID(uuid)).to.be.true; 14 | }); 15 | }); 16 | 17 | describe("the Foxx microservice support module evaluating isUUIDList.", () => { 18 | it("should return true if string is a UUID List.", () => { 19 | var uuids = "XXXXYYYY-XXXX-YYYY-XXXX-YYYYXXXXYYYY,XXXXYYYY-XXXX-YYYY-XXXX-YYYYXXXXYYYY" 20 | expect(g_lib.isUUIDList(uuids)).to.be.true; 21 | }); 22 | it("should return false because one of the provided items is not a uuid", () => { 23 | var uuids = "XXXXYYYY-XXXX-YYYY-XXXX-YYYYXXXXYYYY,132" 24 | expect(g_lib.isUUIDList(uuids)).to.be.false; 25 | }); 26 | }); -------------------------------------------------------------------------------- /core/database/foxx/tests/version.test.js: -------------------------------------------------------------------------------- 1 | "use strict" 2 | 3 | // Integration test of API 4 | const chai = require("chai"); 5 | const should = chai.should(); 6 | const expect = chai.expect; 7 | const request = require("@arangodb/request"); 8 | const { 9 | baseUrl 10 | } = module.context; 11 | 12 | describe("the Foxx microservice version route.", () => { 13 | it("should return version information about the release and the foxx service and api versions.", () => { 14 | const response = request.get(`${baseUrl}/version`); 15 | expect(response.status).to.equal(200); 16 | var object = JSON.parse(response.body); 17 | object.should.have.property("release_year"); 18 | object.should.have.property("release_month"); 19 | object.should.have.property("release_day"); 20 | object.should.have.property("release_hour"); 21 | object.should.have.property("release_minute"); 22 | }); 23 | }); -------------------------------------------------------------------------------- /core/database/tests/test_teardown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # -e has been removed so that if an error occurs the PASSWORD File is deleted and not left lying around 4 | set -uf -o pipefail 5 | 6 | SCRIPT=$(realpath "$0") 7 | SOURCE=$(dirname "$SCRIPT") 8 | PROJECT_ROOT=$(realpath "${SOURCE}/../../../") 9 | source "${PROJECT_ROOT}/config/datafed.sh" 10 | 11 | PATH_TO_PASSWD_FILE="${SOURCE}/database_temp.password" 12 | rm "${PATH_TO_PASSWD_FILE}" 13 | -------------------------------------------------------------------------------- /core/docker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | if [ -n "$UID" ]; then 6 | usermod -u $UID datafed 7 | fi 8 | 9 | SCRIPT=$(realpath "$0") 10 | SOURCE=$(dirname "$SCRIPT") 11 | PROJECT_ROOT=$(realpath "${SOURCE}/../..") 12 | 13 | env 14 | 15 | "${PROJECT_ROOT}/scripts/generate_datafed.sh" 16 | "${PROJECT_ROOT}/scripts/generate_core_config.sh" 17 | "${PROJECT_ROOT}/scripts/install_core.sh" 18 | 19 | log_path="$DATAFED_DEFAULT_LOG_PATH" 20 | 21 | if [ ! -d "${log_path}" ] 22 | then 23 | su -c "mkdir -p ${log_path}" datafed 24 | fi 25 | 26 | echo "Number of arguments is $#" 27 | echo "arguments are $@" 28 | 29 | if [ "$#" -eq 0 ]; then 30 | echo "No arguments were passed, running bash" 31 | exec "bash" 32 | exit 0 33 | fi 34 | 35 | datafed_core_exec=$(basename "$1") 36 | if [ "${datafed_core_exec}" = "datafed-core" ] 37 | then 38 | # Send output to log file 39 | # For this to work all commands must be passed in as a single string 40 | su datafed -c '"$@"' -- argv0 "$@" 2>&1 | su datafed -c "tee $log_path/datafed-core.log" 41 | else 42 | echo "Not sending output to datafed-core.log" 43 | # If not do not by default send to log file 44 | su datafed -c '"$@"' -- argv0 "$@" 45 | fi 46 | 47 | echo "Give a few minutes to debug the problem" 48 | sleep 10000 49 | -------------------------------------------------------------------------------- /core/server/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | configure_file( 4 | "${CMAKE_CURRENT_SOURCE_DIR}/Version.hpp.in" 5 | "${CMAKE_CURRENT_SOURCE_DIR}/Version.hpp" 6 | @ONLY) 7 | 8 | file( GLOB Sources "*.cpp" ) 9 | file( GLOB Main "main.cpp") 10 | list(REMOVE_ITEM Sources files ${Main}) 11 | 12 | # Must be public for unit tests to import them 13 | if(BUILD_SHARED_LIBS) 14 | add_library( datafed-core-lib SHARED ${Sources} ) 15 | target_link_libraries( datafed-core-lib PRIVATE protobuf::libprotobuf Threads::Threads 16 | "${DATAFED_CURL_LIBRARIES}" "${OPENSSL_SSL_LIBRARY}" "${OPENSSL_CRYPTO_LIBRARY}" 17 | "${DATAFED_ZLIB_LIBRARIES}" ${DATAFED_BOOST_LIBRARIES} libzmq nlohmann_json_schema_validator -ldl ) 18 | else() 19 | add_library( datafed-core-lib STATIC ${Sources} ) 20 | target_link_libraries( datafed-core-lib PRIVATE protobuf::libprotobuf Threads::Threads 21 | "${DATAFED_CURL_LIBRARIES}" "${OPENSSL_SSL_LIBRARY}" "${OPENSSL_CRYPTO_LIBRARY}" 22 | "${DATAFED_ZLIB_LIBRARIES}" "${DATAFED_BOOST_LIBRARIES}" libzmq-static 23 | "${DATAFED_JSON_SCHEMA_LIBRARY_PATH}" -ldl ) 24 | endif() 25 | target_include_directories( datafed-core-lib PUBLIC ${CMAKE_CURRENT_SOURCE_DIR} ) 26 | set_target_properties(datafed-core-lib PROPERTIES POSITION_INDEPENDENT_CODE ON) 27 | target_link_libraries( datafed-core-lib PUBLIC common datafed-protobuf ) 28 | add_executable( datafed-core ${Main} ) 29 | target_link_libraries( datafed-core datafed-core-lib ) 30 | 31 | add_subdirectory(tests) 32 | -------------------------------------------------------------------------------- /core/server/Condition.cpp: -------------------------------------------------------------------------------- 1 | 2 | // Local private includes 3 | #include "Condition.hpp" 4 | 5 | // Standard includes 6 | #include 7 | 8 | namespace SDMS { 9 | namespace Core { 10 | 11 | void Promote::enforce(AuthMap &auth_map, const std::string &public_key) { 12 | if (auth_map.hasKeyType(m_promote_from, public_key)) { 13 | size_t access_count = auth_map.getAccessCount(m_promote_from, public_key); 14 | if (access_count >= m_transient_to_session_count_threshold) { 15 | // Convert transient key to session key if has been accessed more than the 16 | // threshold 17 | std::string uid = auth_map.getUID(m_promote_from, public_key); 18 | auth_map.addKey(m_promote_to, public_key, uid); 19 | } 20 | // Remove expired short lived transient key 21 | auth_map.removeKey(m_promote_from, public_key); 22 | } 23 | } 24 | 25 | void Reset::enforce(AuthMap &auth_map, const std::string &public_key) { 26 | if (auth_map.hasKeyType(m_act_on_key_type, public_key)) { 27 | size_t access_count = 28 | auth_map.getAccessCount(m_act_on_key_type, public_key); 29 | if (access_count >= m_access_attempts) { 30 | // If the session key has been accessed within the threshold then reset 31 | // the active period 32 | auth_map.resetKey(m_act_on_key_type, public_key); 33 | } else { 34 | // If the key has not been used then remove it. 35 | auth_map.removeKey(m_act_on_key_type, public_key); 36 | } 37 | } 38 | } 39 | 40 | } // namespace Core 41 | } // namespace SDMS 42 | -------------------------------------------------------------------------------- /core/server/ICoreServer.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ICORESERVER_HPP 2 | #define ICORESERVER_HPP 3 | #pragma once 4 | 5 | // Common public libraries 6 | #include "common/DynaLog.hpp" 7 | 8 | // Standard includes 9 | #include 10 | 11 | namespace SDMS { 12 | namespace Core { 13 | 14 | class ICoreServer { 15 | public: 16 | virtual void authenticateClient(const std::string &a_cert_uid, 17 | const std::string &a_key, 18 | const std::string &a_uid, 19 | LogContext log_context) = 0; 20 | virtual void metricsUpdateMsgCount(const std::string &a_uid, 21 | uint16_t a_msg_type) = 0; 22 | }; 23 | 24 | } // namespace Core 25 | } // namespace SDMS 26 | 27 | #endif 28 | -------------------------------------------------------------------------------- /core/server/ITaskMgr.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ITASKMGR_HPP 2 | #define ITASKMGR_HPP 3 | #pragma once 4 | 5 | // Local private includes 6 | #include "ITaskWorker.hpp" 7 | 8 | // Local public includes 9 | #include "common/DynaLog.hpp" 10 | #include "common/libjson.hpp" 11 | 12 | // Standard includes 13 | #include 14 | #include 15 | 16 | namespace SDMS { 17 | namespace Core { 18 | 19 | /** 20 | * @brief Interface use by TaskWorkers to interact with TaskMgr 21 | * 22 | * This interface is "private" for TaskWorkers only, not for external clients 23 | * of the TaskMgr (i.e. ClientWorkers). Provides worker scheduling and work 24 | * work assignment methods. Also defines a task control structure for ready 25 | * and running tasks. 26 | */ 27 | class ITaskMgr { 28 | public: 29 | typedef std::chrono::system_clock::time_point timepoint_t; 30 | typedef std::chrono::system_clock::duration duration_t; 31 | 32 | struct Task { 33 | Task(const std::string &a_id) 34 | : task_id(a_id), cancel(false), retry_count(0) {} 35 | 36 | ~Task() {} 37 | 38 | std::string task_id; 39 | bool cancel; 40 | uint32_t retry_count; 41 | timepoint_t retry_time; 42 | timepoint_t retry_fail_time; 43 | }; 44 | 45 | virtual std::unique_ptr getNextTask(ITaskWorker *a_worker) = 0; 46 | virtual bool retryTask(std::unique_ptr a_task, 47 | LogContext log_context) = 0; 48 | virtual void newTasks(const libjson::Value &a_tasks, 49 | LogContext log_context) = 0; 50 | }; 51 | 52 | } // namespace Core 53 | } // namespace SDMS 54 | 55 | #endif 56 | -------------------------------------------------------------------------------- /core/server/ITaskWorker.hpp: -------------------------------------------------------------------------------- 1 | #ifndef ITASKWORKER_HPP 2 | #define ITASKWORKER_HPP 3 | #pragma once 4 | 5 | // Standard includes 6 | #include 7 | #include 8 | 9 | namespace SDMS { 10 | namespace Core { 11 | 12 | /** 13 | * @brief Provides control structure per worker needed by TaskMgr 14 | * 15 | * Next/prev attrib are for worker pool linked list. 'run' flag tells mgr if 16 | * the worker is in the pool or not (run == true means not in pool, run == 17 | * false means a spurious wake). 18 | */ 19 | class ITaskWorker { 20 | public: 21 | ITaskWorker(uint32_t a_id, LogContext log_context) 22 | : m_id(a_id), m_run(false), m_next(0), m_log_context(log_context) {} 23 | 24 | virtual ~ITaskWorker() {} 25 | 26 | inline uint32_t id() const { return m_id; } 27 | 28 | private: 29 | uint32_t m_id; 30 | bool m_run; 31 | ITaskWorker *m_next; 32 | LogContext m_log_context; 33 | // ITaskWorker * m_prev; 34 | std::condition_variable m_cvar; 35 | 36 | friend class TaskMgr; 37 | }; 38 | 39 | } // namespace Core 40 | } // namespace SDMS 41 | 42 | #endif 43 | -------------------------------------------------------------------------------- /core/server/PublicKeyTypes.hpp: -------------------------------------------------------------------------------- 1 | 2 | #ifndef PUBLICKEYTYPES_HPP 3 | #define PUBLICKEYTYPES_HPP 4 | #pragma once 5 | 6 | // Standard includes 7 | #include 8 | 9 | namespace SDMS { 10 | namespace Core { 11 | 12 | enum class PublicKeyType { TRANSIENT, SESSION, PERSISTENT }; 13 | 14 | struct PublicKeyTypesClassHash { 15 | template std::size_t operator()(T t) const { 16 | return static_cast(t); 17 | } 18 | }; 19 | 20 | } // namespace Core 21 | } // namespace SDMS 22 | #endif // PUBLICKEYTYPES 23 | -------------------------------------------------------------------------------- /core/server/Version.hpp.in: -------------------------------------------------------------------------------- 1 | #ifndef CORE_VERSION_HPP 2 | #define CORE_VERSION_HPP 3 | #pragma once 4 | 5 | namespace SDMS { 6 | namespace core { 7 | namespace version { 8 | constexpr int MAJOR = @DATAFED_CORE_MAJOR@; 9 | constexpr int MINOR = @DATAFED_CORE_MINOR@; 10 | constexpr int PATCH = @DATAFED_CORE_PATCH@; 11 | } 12 | } 13 | 14 | namespace foxx_api { 15 | namespace version { 16 | constexpr int MAJOR = @DATAFED_FOXX_API_MAJOR@; 17 | constexpr int MINOR = @DATAFED_FOXX_API_MINOR@; 18 | constexpr int PATCH = @DATAFED_FOXX_API_PATCH@; 19 | } 20 | } 21 | } 22 | 23 | #endif // CORE_VERSION_HPP 24 | 25 | -------------------------------------------------------------------------------- /core/server/tests/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | if( ENABLE_UNIT_TESTS ) 2 | add_subdirectory(unit) 3 | endif() 4 | -------------------------------------------------------------------------------- /core/server/tests/unit/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # Each test listed in Alphabetical order 2 | foreach(PROG 3 | test_AuthMap 4 | test_AuthenticationManager 5 | ) 6 | 7 | file(GLOB ${PROG}_SOURCES ${PROG}*.cpp) 8 | add_executable(unit_${PROG} ${${PROG}_SOURCES}) 9 | target_link_libraries(unit_${PROG} PUBLIC datafed-core-lib ${DATAFED_BOOST_LIBRARIES}) 10 | if(BUILD_SHARED_LIBS) 11 | target_compile_definitions(unit_${PROG} PRIVATE BOOST_TEST_DYN_LINK) 12 | endif() 13 | add_test(unit_${PROG} unit_${PROG}) 14 | 15 | endforeach(PROG) 16 | -------------------------------------------------------------------------------- /core/server/tests/unit/test_AuthMap.cpp: -------------------------------------------------------------------------------- 1 | #define BOOST_TEST_MAIN 2 | 3 | #define BOOST_TEST_MODULE authmap 4 | #include 5 | #include 6 | 7 | #include "AuthMap.hpp" 8 | 9 | using namespace SDMS::Core; 10 | 11 | BOOST_AUTO_TEST_SUITE(AuthMapTest) 12 | 13 | BOOST_AUTO_TEST_CASE(testing_AuthMap) { 14 | time_t active_transient_key_time = 30; 15 | time_t active_session_key_time = 30; 16 | std::string db_url = "https://db/sdms/blah"; 17 | std::string db_user = "greatestone"; 18 | std::string db_pass = "1234"; 19 | 20 | AuthMap auth_map(active_transient_key_time, active_session_key_time, db_url, 21 | db_user, db_pass); 22 | 23 | BOOST_TEST(auth_map.size(PublicKeyType::TRANSIENT) == 0); 24 | std::string new_pub_key = "ugh"; 25 | std::string user_id = "u/bob"; 26 | auth_map.addKey(PublicKeyType::TRANSIENT, new_pub_key, user_id); 27 | BOOST_TEST(auth_map.size(PublicKeyType::TRANSIENT) == 1); 28 | 29 | BOOST_TEST(auth_map.hasKey(PublicKeyType::TRANSIENT, new_pub_key)); 30 | BOOST_TEST(auth_map.hasKey(PublicKeyType::SESSION, new_pub_key) == false); 31 | BOOST_TEST(auth_map.hasKey(PublicKeyType::PERSISTENT, new_pub_key) == false); 32 | 33 | BOOST_TEST(auth_map.getUID(PublicKeyType::TRANSIENT, new_pub_key) == user_id); 34 | } 35 | 36 | BOOST_AUTO_TEST_SUITE_END() 37 | -------------------------------------------------------------------------------- /doc_source/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | # Copy doc_source/source tree into build directory 4 | 5 | # THis should be done during make stage 6 | file( COPY ${CMAKE_CURRENT_SOURCE_DIR}/source DESTINATION ${CMAKE_CURRENT_BINARY_DIR} ) 7 | file( MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/docs ) 8 | 9 | add_custom_target( docs ) 10 | add_dependencies( docs pydatafed ) 11 | 12 | # Build CLI command reference w/ datafed CLI and place in local doc source directory 13 | # Then run sphinx-build to make doc html 14 | 15 | add_custom_command( TARGET docs POST_BUILD 16 | COMMAND echo "Preparing docs build directories" 17 | COMMAND rm -rf ${CMAKE_SOURCE_DIR}/docs 18 | COMMAND mkdir ${CMAKE_SOURCE_DIR}/docs 19 | COMMAND mkdir -p ${CMAKE_CURRENT_BINARY_DIR}/source/_generated 20 | COMMAND echo "Generating API docs" 21 | COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_BINARY_DIR}/python/datafed_pkg/" ${CMAKE_BINARY_DIR}/python/datafed_pkg/scripts/datafed gendoc > ${CMAKE_CURRENT_BINARY_DIR}/source/_generated/cli_python_cmd_ref.rst 22 | COMMAND echo "Generating docs HTML" 23 | COMMAND sphinx-build -b html ${CMAKE_CURRENT_BINARY_DIR}/source ${CMAKE_SOURCE_DIR}/docs 24 | COMMAND touch ${CMAKE_SOURCE_DIR}/docs/.nojekyll ) 25 | -------------------------------------------------------------------------------- /doc_source/source/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | .table-no-hscroll td{ 2 | white-space: normal!important; 3 | } -------------------------------------------------------------------------------- /doc_source/source/_static/data_lifecycle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/data_lifecycle.png -------------------------------------------------------------------------------- /doc_source/source/_static/globus_endpoints/finding_endpoint_01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/globus_endpoints/finding_endpoint_01.png -------------------------------------------------------------------------------- /doc_source/source/_static/globus_endpoints/finding_endpoint_02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/globus_endpoints/finding_endpoint_02.png -------------------------------------------------------------------------------- /doc_source/source/_static/globus_endpoints/finding_endpoint_03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/globus_endpoints/finding_endpoint_03.png -------------------------------------------------------------------------------- /doc_source/source/_static/papers_presentations/2019_CSCI.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/papers_presentations/2019_CSCI.pdf -------------------------------------------------------------------------------- /doc_source/source/_static/papers_presentations/2019_CSCI_slides.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/papers_presentations/2019_CSCI_slides.pdf -------------------------------------------------------------------------------- /doc_source/source/_static/papers_presentations/2020_SMC.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/papers_presentations/2020_SMC.pdf -------------------------------------------------------------------------------- /doc_source/source/_static/papers_presentations/2020_SMC_slides.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/papers_presentations/2020_SMC_slides.pdf -------------------------------------------------------------------------------- /doc_source/source/_static/papers_presentations/DataFed_General_Presentation.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/papers_presentations/DataFed_General_Presentation.pptx -------------------------------------------------------------------------------- /doc_source/source/_static/python_high_level/provenance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/python_high_level/provenance.png -------------------------------------------------------------------------------- /doc_source/source/_static/python_high_level/search_01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/python_high_level/search_01.png -------------------------------------------------------------------------------- /doc_source/source/_static/python_high_level/search_02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/python_high_level/search_02.png -------------------------------------------------------------------------------- /doc_source/source/_static/python_high_level/search_03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/python_high_level/search_03.png -------------------------------------------------------------------------------- /doc_source/source/_static/simplified_architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/simplified_architecture.png -------------------------------------------------------------------------------- /doc_source/source/_static/system_components.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/doc_source/source/_static/system_components.png -------------------------------------------------------------------------------- /doc_source/source/dev/design.rst: -------------------------------------------------------------------------------- 1 | ===================== 2 | Architecture & Design 3 | ===================== 4 | 5 | -------------------------------------------------------------------------------- /doc_source/source/dev/project.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Project Management 3 | ================== 4 | 5 | DataFed is an open-source project hosted on `GitHub `_. Development is 6 | on-going and the development team is based at `Oak Ridge National Laboratories `_ (ORNL) 7 | within the `Advanced Technologies Section `_ (ATS) 8 | of the `Oak Ridge Leadership Computing Facility `_ (OLCF). 9 | 10 | Personnel 11 | ========= 12 | 13 | - Olga Kuchar, Data Lifecycle and Scalable Workflows group leader within ATS. 14 | - Dale Stansberry, Sr. Software Developer - PI, architect, lead developer of the DataFed Project 15 | - Suhas Somnath, Computer Scientist - Scientific user requirements and Python interface development 16 | - Jessica Breet, Data Scientist - Scientific user requirements and system testing 17 | -------------------------------------------------------------------------------- /doc_source/source/system/papers.rst: -------------------------------------------------------------------------------- 1 | ======================== 2 | Papers and Presentations 3 | ======================== 4 | 5 | `DataFed Overview Presentation <../_static/papers_presentations/DataFed_General_Presentation.pptx>`_ 6 | 7 | * 2019 - 6th Annual Conf. on `Computational Science & Computational Intelligence `_ (CSCI'19) - introducing DataFed: 8 | 9 | * `CSCI 2019 Paper <../_static/papers_presentations/2019_CSCI.pdf>`_ 10 | * `CSCI 2019 Presentation slides <../_static/papers_presentations/2019_CSCI_slides.pdf>`_ 11 | * 2020 - `Smoky Mountains Conference `_ - End-to-end data management using DataFed: 12 | 13 | * `SMC 2020 Paper <../_static/papers_presentations/2020_SMC.pdf>`_ 14 | * `SMC 2020 Presentation video `_ 15 | * `SMC 2020 Presentation slides <../_static/papers_presentations/2020_SMC_slides.pdf>`_ 16 | -------------------------------------------------------------------------------- /doc_source/source/system/usecases.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Use Cases 3 | ========= 4 | 5 | Please see `this video `_ to 6 | learn more about how and where DataFed could be used for simulations, 7 | modelling, data analytics, measurements, and observational sciences. 8 | 9 | -------------------------------------------------------------------------------- /doc_source/source/user/cli/header.rst: -------------------------------------------------------------------------------- 1 | The DataFed command-line-interface (CLI) provides access to basic DataFed capabilities for both 2 | interactive use and non-interactive scripting from a command shell, and is the primary means of 3 | utilizing DataFed from within data, compute, and analytics environments. The DataFed 4 | CLI is provided via a Python 3 package and can be used on any operating system where Python 3 5 | is available. 6 | -------------------------------------------------------------------------------- /doc_source/source/user/cli/reference.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Command Reference 3 | ================= 4 | 5 | .. include:: header.rst 6 | 7 | This document provides detailed information about the commands and sub-commands of the DataFed CLI. 8 | For general CLI usage information, see the CLI :doc:`/user/cli/guide` page. Also, before the DataFed 9 | CLI can be used it must be installed and properly configured - the CLI :doc:`/user/client/install` page 10 | provides details. 11 | 12 | .. include:: /_generated/cli_python_cmd_ref.rst 13 | -------------------------------------------------------------------------------- /docker/Dockerfile.runtime: -------------------------------------------------------------------------------- 1 | ARG DATAFED_DIR="/datafed" 2 | ARG DATAFED_INSTALL_PATH="/opt/datafed" 3 | ARG GCS_IMAGE="code.ornl.gov:4567/dlsw/datafed/gcs-ubuntu-focal" 4 | ARG BUILD_DIR="$DATAFED_DIR/source" 5 | ARG LIB_DIR="/usr/local/lib" 6 | ARG BASE_IMAGE="ubuntu:focal" 7 | 8 | FROM ${BASE_IMAGE} AS base 9 | 10 | SHELL ["/bin/bash", "-c"] 11 | ARG DATAFED_DIR 12 | ARG DATAFED_INSTALL_PATH 13 | ARG DATAFED_DEPENDENCIES_INSTALL_PATH 14 | ARG BUILD_DIR 15 | 16 | ENV BUILD_DIR="${BUILD_DIR}" 17 | ENV DATAFED_DIR="${DATAFED_DIR}" 18 | 19 | RUN echo $DATAFED_DIR 20 | 21 | # Create datafed user, prefer more secure login options than password 22 | # Recommended to mount ssh public key on run 23 | RUN adduser --disabled-password --gecos "" datafed 24 | 25 | COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/ 26 | COPY ./scripts/copy_dependency.sh ${BUILD_DIR}/scripts/ 27 | RUN mkdir -p ${DATAFED_DIR} 28 | RUN mkdir -p /opt/datafed 29 | RUN mkdir -p /var/log/datafed 30 | RUN chown -R datafed:root /opt/datafed 31 | RUN chown -R datafed:root /var/log/datafed 32 | RUN chown -R datafed:root ${DATAFED_DIR} 33 | WORKDIR ${DATAFED_DIR} 34 | 35 | RUN apt update 36 | RUN apt install -y grep libcurl4 wget 37 | -------------------------------------------------------------------------------- /docs/_images/data_lifecycle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/data_lifecycle.png -------------------------------------------------------------------------------- /docs/_images/finding_endpoint_01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/finding_endpoint_01.png -------------------------------------------------------------------------------- /docs/_images/finding_endpoint_02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/finding_endpoint_02.png -------------------------------------------------------------------------------- /docs/_images/finding_endpoint_03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/finding_endpoint_03.png -------------------------------------------------------------------------------- /docs/_images/provenance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/provenance.png -------------------------------------------------------------------------------- /docs/_images/search_01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/search_01.png -------------------------------------------------------------------------------- /docs/_images/search_02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/search_02.png -------------------------------------------------------------------------------- /docs/_images/search_03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/search_03.png -------------------------------------------------------------------------------- /docs/_images/simplified_architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/simplified_architecture.png -------------------------------------------------------------------------------- /docs/_images/system_components.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_images/system_components.png -------------------------------------------------------------------------------- /docs/_sources/autoapi/datafed/Connection/index.rst.txt: -------------------------------------------------------------------------------- 1 | :py:mod:`datafed.Connection` 2 | ============================ 3 | 4 | .. py:module:: datafed.Connection 5 | 6 | 7 | Module Contents 8 | --------------- 9 | 10 | Classes 11 | ~~~~~~~ 12 | 13 | .. autoapisummary:: 14 | 15 | datafed.Connection.Connection 16 | 17 | 18 | 19 | 20 | .. py:class:: Connection(server_host, server_port, server_pub_key, client_pub_key, client_priv_key, zmq_ctxt=None, log_level=logging.INFO) 21 | 22 | 23 | .. py:method:: __del__() 24 | 25 | 26 | .. py:method:: registerProtocol(msg_module) 27 | 28 | 29 | .. py:method:: recv(a_timeout=1000) 30 | 31 | 32 | .. py:method:: send(message, ctxt) 33 | 34 | 35 | .. py:method:: reset() 36 | 37 | 38 | .. py:method:: makeMessage(msg_name) 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /docs/_sources/autoapi/datafed/MessageLib/index.rst.txt: -------------------------------------------------------------------------------- 1 | :py:mod:`datafed.MessageLib` 2 | ============================ 3 | 4 | .. py:module:: datafed.MessageLib 5 | 6 | 7 | Module Contents 8 | --------------- 9 | 10 | Classes 11 | ~~~~~~~ 12 | 13 | .. autoapisummary:: 14 | 15 | datafed.MessageLib.API 16 | 17 | 18 | 19 | Functions 20 | ~~~~~~~~~ 21 | 22 | .. autoapisummary:: 23 | 24 | datafed.MessageLib.get_latest_version 25 | 26 | 27 | 28 | .. py:function:: get_latest_version(package_name) 29 | 30 | 31 | .. py:class:: API(server_host=None, server_port=None, server_pub_key_file=None, server_pub_key=None, client_pub_key_file=None, client_pub_key=None, client_priv_key_file=None, client_priv_key=None, client_token=None, manual_auth=None, **kwargs) 32 | 33 | 34 | .. py:method:: keysLoaded() 35 | 36 | 37 | .. py:method:: keysValid() 38 | 39 | 40 | .. py:method:: getAuthStatus() 41 | 42 | 43 | .. py:method:: manualAuthByPassword(uid, password) 44 | 45 | 46 | .. py:method:: manualAuthByToken(token) 47 | 48 | 49 | .. py:method:: logout() 50 | 51 | 52 | .. py:method:: getNackExceptionEnabled() 53 | 54 | 55 | .. py:method:: setNackExceptionEnabled(enabled) 56 | 57 | 58 | .. py:method:: setDefaultTimeout(timeout) 59 | 60 | 61 | .. py:method:: getDefaultTimeout() 62 | 63 | 64 | .. py:method:: getDailyMessage() 65 | 66 | 67 | .. py:method:: sendRecv(msg, timeout=None, nack_except=None) 68 | 69 | 70 | .. py:method:: send(msg) 71 | 72 | 73 | .. py:method:: recv(timeout=None, nack_except=None) 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /docs/_sources/autoapi/datafed/VERSION/index.rst.txt: -------------------------------------------------------------------------------- 1 | :py:mod:`datafed.VERSION` 2 | ========================= 3 | 4 | .. py:module:: datafed.VERSION 5 | 6 | 7 | Module Contents 8 | --------------- 9 | 10 | .. py:data:: __version__ 11 | :value: '2.0.3' 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /docs/_sources/autoapi/datafed/Version_pb2/index.rst.txt: -------------------------------------------------------------------------------- 1 | :py:mod:`datafed.Version_pb2` 2 | ============================= 3 | 4 | .. py:module:: datafed.Version_pb2 5 | 6 | .. autoapi-nested-parse:: 7 | 8 | Generated protocol buffer code. 9 | 10 | 11 | 12 | Module Contents 13 | --------------- 14 | 15 | .. py:data:: _sym_db 16 | 17 | 18 | 19 | .. py:data:: DESCRIPTOR 20 | 21 | 22 | 23 | .. py:data:: _VERSION 24 | 25 | 26 | 27 | .. py:data:: Version 28 | 29 | 30 | 31 | .. py:data:: DATAFED_RELEASE_YEAR 32 | :value: 2023 33 | 34 | 35 | 36 | .. py:data:: DATAFED_RELEASE_MONTH 37 | :value: 8 38 | 39 | 40 | 41 | .. py:data:: DATAFED_RELEASE_DAY 42 | :value: 21 43 | 44 | 45 | 46 | .. py:data:: DATAFED_RELEASE_HOUR 47 | :value: 10 48 | 49 | 50 | 51 | .. py:data:: DATAFED_RELEASE_MINUTE 52 | :value: 40 53 | 54 | 55 | 56 | .. py:data:: DATAFED_COMMON_PROTOCOL_API_MAJOR 57 | :value: 0 58 | 59 | 60 | 61 | .. py:data:: DATAFED_COMMON_PROTOCOL_API_MINOR 62 | :value: 0 63 | 64 | 65 | 66 | .. py:data:: DATAFED_COMMON_PROTOCOL_API_PATCH 67 | :value: 0 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /docs/_sources/autoapi/datafed/index.rst.txt: -------------------------------------------------------------------------------- 1 | :py:mod:`datafed` 2 | ================= 3 | 4 | .. py:module:: datafed 5 | 6 | 7 | Submodules 8 | ---------- 9 | .. toctree:: 10 | :titlesonly: 11 | :maxdepth: 1 12 | 13 | CLI/index.rst 14 | CommandLib/index.rst 15 | Config/index.rst 16 | Connection/index.rst 17 | MessageLib/index.rst 18 | SDMS_Anon_pb2/index.rst 19 | SDMS_Auth_pb2/index.rst 20 | SDMS_pb2/index.rst 21 | VERSION/index.rst 22 | Version_pb2/index.rst 23 | 24 | 25 | Package Contents 26 | ---------------- 27 | 28 | .. py:data:: name 29 | :value: 'datafed' 30 | 31 | 32 | 33 | .. py:data:: version 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /docs/_sources/autoapi/index.rst.txt: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | This page contains auto-generated API reference documentation [#f1]_. 5 | 6 | .. toctree:: 7 | :titlesonly: 8 | 9 | /autoapi/datafed/index 10 | 11 | .. [#f1] Created with `sphinx-autoapi `_ -------------------------------------------------------------------------------- /docs/_sources/dev/design.rst.txt: -------------------------------------------------------------------------------- 1 | ===================== 2 | Architecture & Design 3 | ===================== 4 | 5 | -------------------------------------------------------------------------------- /docs/_sources/dev/project.rst.txt: -------------------------------------------------------------------------------- 1 | ================== 2 | Project Management 3 | ================== 4 | 5 | DataFed is an open-source project hosted on `GitHub `_. Development is 6 | on-going and the development team is based at `Oak Ridge National Laboratories `_ (ORNL) 7 | within the `Advanced Technologies Section `_ (ATS) 8 | of the `Oak Ridge Leadership Computing Facility `_ (OLCF). 9 | 10 | Personnel 11 | ========= 12 | 13 | - Olga Kuchar, Data Lifecycle and Scalable Workflows group leader within ATS. 14 | - Dale Stansberry, Sr. Software Developer - PI, architect, lead developer of the DataFed Project 15 | - Suhas Somnath, Computer Scientist - Scientific user requirements and Python interface development 16 | - Jessica Breet, Data Scientist - Scientific user requirements and system testing 17 | -------------------------------------------------------------------------------- /docs/_sources/system/papers.rst.txt: -------------------------------------------------------------------------------- 1 | ======================== 2 | Papers and Presentations 3 | ======================== 4 | 5 | `DataFed Overview Presentation <../_static/papers_presentations/DataFed_General_Presentation.pptx>`_ 6 | 7 | * 2019 - 6th Annual Conf. on `Computational Science & Computational Intelligence `_ (CSCI'19) - introducing DataFed: 8 | 9 | * `CSCI 2019 Paper <../_static/papers_presentations/2019_CSCI.pdf>`_ 10 | * `CSCI 2019 Presentation slides <../_static/papers_presentations/2019_CSCI_slides.pdf>`_ 11 | * 2020 - `Smoky Mountains Conference `_ - End-to-end data management using DataFed: 12 | 13 | * `SMC 2020 Paper <../_static/papers_presentations/2020_SMC.pdf>`_ 14 | * `SMC 2020 Presentation video `_ 15 | * `SMC 2020 Presentation slides <../_static/papers_presentations/2020_SMC_slides.pdf>`_ 16 | -------------------------------------------------------------------------------- /docs/_sources/system/usecases.rst.txt: -------------------------------------------------------------------------------- 1 | ========= 2 | Use Cases 3 | ========= 4 | 5 | Please see `this video `_ to 6 | learn more about how and where DataFed could be used for simulations, 7 | modelling, data analytics, measurements, and observational sciences. 8 | 9 | -------------------------------------------------------------------------------- /docs/_sources/user/cli/header.rst.txt: -------------------------------------------------------------------------------- 1 | The DataFed command-line-interface (CLI) provides access to basic DataFed capabilities for both 2 | interactive use and non-interactive scripting from a command shell, and is the primary means of 3 | utilizing DataFed from within data, compute, and analytics environments. The DataFed 4 | CLI is provided via a Python 3 package and can be used on any operating system where Python 3 5 | is available. 6 | -------------------------------------------------------------------------------- /docs/_sources/user/cli/reference.rst.txt: -------------------------------------------------------------------------------- 1 | ================= 2 | Command Reference 3 | ================= 4 | 5 | .. include:: header.rst 6 | 7 | This document provides detailed information about the commands and sub-commands of the DataFed CLI. 8 | For general CLI usage information, see the CLI :doc:`/user/cli/guide` page. Also, before the DataFed 9 | CLI can be used it must be installed and properly configured - the CLI :doc:`/user/client/install` page 10 | provides details. 11 | 12 | .. include:: /_generated/cli_python_cmd_ref.rst 13 | -------------------------------------------------------------------------------- /docs/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | .table-no-hscroll td{ 2 | white-space: normal!important; 3 | } -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/Roboto-Slab-Bold.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/Roboto-Slab-Regular.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/lato-bold-italic.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/lato-bold-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/lato-bold.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/lato-normal-italic.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/lato-normal-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/lato-normal.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/css/fonts/lato-normal.woff2 -------------------------------------------------------------------------------- /docs/_static/data_lifecycle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/data_lifecycle.png -------------------------------------------------------------------------------- /docs/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | var DOCUMENTATION_OPTIONS = { 2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), 3 | VERSION: '1.4', 4 | LANGUAGE: 'en', 5 | COLLAPSE_INDEX: false, 6 | BUILDER: 'html', 7 | FILE_SUFFIX: '.html', 8 | LINK_SUFFIX: '.html', 9 | HAS_SOURCE: true, 10 | SOURCELINK_SUFFIX: '.txt', 11 | NAVIGATION_WITH_KEYS: false, 12 | SHOW_SEARCH_SUMMARY: true, 13 | ENABLE_SEARCH_SHORTCUTS: true, 14 | }; -------------------------------------------------------------------------------- /docs/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/file.png -------------------------------------------------------------------------------- /docs/_static/globus_endpoints/finding_endpoint_01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/globus_endpoints/finding_endpoint_01.png -------------------------------------------------------------------------------- /docs/_static/globus_endpoints/finding_endpoint_02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/globus_endpoints/finding_endpoint_02.png -------------------------------------------------------------------------------- /docs/_static/globus_endpoints/finding_endpoint_03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/globus_endpoints/finding_endpoint_03.png -------------------------------------------------------------------------------- /docs/_static/graphviz.css: -------------------------------------------------------------------------------- 1 | /* 2 | * graphviz.css 3 | * ~~~~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- graphviz extension. 6 | * 7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | img.graphviz { 13 | border: 0; 14 | max-width: 100%; 15 | } 16 | 17 | object.graphviz { 18 | max-width: 100%; 19 | } 20 | -------------------------------------------------------------------------------- /docs/_static/js/badge_only.js: -------------------------------------------------------------------------------- 1 | !function(e){var t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=4)}({4:function(e,t,r){}}); -------------------------------------------------------------------------------- /docs/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/minus.png -------------------------------------------------------------------------------- /docs/_static/papers_presentations/2019_CSCI.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/papers_presentations/2019_CSCI.pdf -------------------------------------------------------------------------------- /docs/_static/papers_presentations/2019_CSCI_slides.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/papers_presentations/2019_CSCI_slides.pdf -------------------------------------------------------------------------------- /docs/_static/papers_presentations/2020_SMC.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/papers_presentations/2020_SMC.pdf -------------------------------------------------------------------------------- /docs/_static/papers_presentations/2020_SMC_slides.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/papers_presentations/2020_SMC_slides.pdf -------------------------------------------------------------------------------- /docs/_static/papers_presentations/DataFed_General_Presentation.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/papers_presentations/DataFed_General_Presentation.pptx -------------------------------------------------------------------------------- /docs/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/plus.png -------------------------------------------------------------------------------- /docs/_static/python_high_level/provenance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/python_high_level/provenance.png -------------------------------------------------------------------------------- /docs/_static/python_high_level/search_01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/python_high_level/search_01.png -------------------------------------------------------------------------------- /docs/_static/python_high_level/search_02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/python_high_level/search_02.png -------------------------------------------------------------------------------- /docs/_static/python_high_level/search_03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/python_high_level/search_03.png -------------------------------------------------------------------------------- /docs/_static/simplified_architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/simplified_architecture.png -------------------------------------------------------------------------------- /docs/_static/system_components.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/_static/system_components.png -------------------------------------------------------------------------------- /docs/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs/objects.inv -------------------------------------------------------------------------------- /docs_other/README.md: -------------------------------------------------------------------------------- 1 | This directory contains DataFed related documentation that is NOT part of the gitpages documentation. Most of this documentation is for internal design/development purposes. 2 | -------------------------------------------------------------------------------- /docs_other/dev/design/Auth.proto Summary.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/Auth.proto Summary.xlsx -------------------------------------------------------------------------------- /docs_other/dev/design/CLI Commands Summary.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/CLI Commands Summary.xlsx -------------------------------------------------------------------------------- /docs_other/dev/design/SDMS Overview and Design.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/SDMS Overview and Design.docx -------------------------------------------------------------------------------- /docs_other/dev/design/data_life_cycle.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/data_life_cycle.odg -------------------------------------------------------------------------------- /docs_other/dev/design/schema.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/schema.jpg -------------------------------------------------------------------------------- /docs_other/dev/design/schema.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/schema.odg -------------------------------------------------------------------------------- /docs_other/dev/design/sdms_architecture_v2.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/sdms_architecture_v2.odg -------------------------------------------------------------------------------- /docs_other/dev/design/sdms_architecture_v3.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/sdms_architecture_v3.odg -------------------------------------------------------------------------------- /docs_other/dev/design/sdms_spec.odt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/sdms_spec.odt -------------------------------------------------------------------------------- /docs_other/dev/design/sdms_sys_diagram.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/sdms_sys_diagram.jpg -------------------------------------------------------------------------------- /docs_other/dev/design/sdms_sys_diagram.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/docs_other/dev/design/sdms_sys_diagram.odg -------------------------------------------------------------------------------- /facility/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | include_directories(${CMAKE_BINARY_DIR}/common) 4 | 5 | #add_subdirectory (server) 6 | add_subdirectory (client) 7 | -------------------------------------------------------------------------------- /facility/README.md: -------------------------------------------------------------------------------- 1 | Note: This directory contains old "facility" server code that is no longer used; however, 2 | it should be retained because the facility server code was written using HTTPS via ASIO and 3 | is the model for how the Core server should be refactored. Once the Core is fixed, this 4 | directory can be removed from this branch. -------------------------------------------------------------------------------- /facility/client/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | #add_subdirectory (cli) 4 | #add_subdirectory (lib) 5 | -------------------------------------------------------------------------------- /facility/client/cli/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | file( GLOB Sources "*.cpp" ) 4 | 5 | add_executable( sdms ${Sources} ) 6 | add_dependencies( sdms fclient ) 7 | 8 | target_link_libraries( sdms fclient -lboost_program_options -lreadline ) 9 | 10 | target_include_directories( sdms PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) 11 | -------------------------------------------------------------------------------- /facility/client/lib/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | file( GLOB Sources "*.cpp" ) 4 | 5 | add_library( fclient STATIC ${Sources} ) 6 | add_dependencies( fclient common ) 7 | target_link_libraries( fclient common -lprotobuf -lpthread -lzmq -lboost_system -lboost_filesystem ) 8 | 9 | target_include_directories( fclient INTERFACE ${CMAKE_CURRENT_SOURCE_DIR} ) 10 | target_include_directories( fclient PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) 11 | 12 | add_subdirectory( test ) 13 | -------------------------------------------------------------------------------- /facility/client/lib/test/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | file( GLOB Sources "*.cpp" ) 4 | 5 | add_executable( fctest ${Sources} ) 6 | add_dependencies( fctest fclient ) 7 | 8 | target_link_libraries( fctest fclient ) 9 | 10 | target_include_directories( fctest PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) 11 | -------------------------------------------------------------------------------- /facility/server/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | file( GLOB Sources "*.cpp" ) 4 | 5 | add_executable( sdmsfd ${Sources} ) 6 | add_dependencies( sdmsfd common ) 7 | target_link_libraries( sdmsfd common -lprotobuf -lpthread -lcrypto -lssl -lcurl -lboost_system -lboost_filesystem ) 8 | 9 | target_include_directories( sdmsfd PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) 10 | -------------------------------------------------------------------------------- /facility/server/CentralStorage.cpp: -------------------------------------------------------------------------------- 1 | #include "CentralStorage.hpp" 2 | #include 3 | #include 4 | 5 | using namespace std; 6 | 7 | namespace SDMS { 8 | 9 | CentralStorage::CentralStorage() {} 10 | 11 | CentralStorage::~CentralStorage() {} 12 | 13 | void CentralStorage::dataDelete(const std::string &a_filename) { 14 | boost::system::error_code ec; 15 | boost::filesystem::path data_path(a_filename); 16 | 17 | if (boost::filesystem::remove(data_path, ec)) { 18 | if (ec) { 19 | cerr << "Delete " << a_filename << " error\n"; 20 | } 21 | } 22 | } 23 | 24 | bool CentralStorage::dataGetSize(const std::string &a_filename, 25 | size_t &a_size) { 26 | boost::system::error_code ec; 27 | boost::filesystem::path data_path(a_filename); 28 | 29 | if (boost::filesystem::exists(data_path, ec)) { 30 | a_size = boost::filesystem::file_size(data_path); 31 | 32 | return true; 33 | } 34 | 35 | cout << "dataGetSize: file " << a_filename << " does not exist\n"; 36 | return false; 37 | } 38 | 39 | } // namespace SDMS -------------------------------------------------------------------------------- /facility/server/CentralStorage.hpp: -------------------------------------------------------------------------------- 1 | #ifndef CENTRALSTORAGE_HPP 2 | #define CENTRALSTORAGE_HPP 3 | 4 | #include 5 | 6 | namespace SDMS { 7 | 8 | class CentralStorage { 9 | public: 10 | CentralStorage(); 11 | ~CentralStorage(); 12 | 13 | void dataDelete(const std::string &a_filename); 14 | bool dataGetSize(const std::string &a_filename, size_t &a_size); 15 | }; 16 | 17 | } // namespace SDMS 18 | 19 | #endif 20 | -------------------------------------------------------------------------------- /python/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | add_subdirectory( datafed_pkg ) -------------------------------------------------------------------------------- /python/datafed_pkg/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | # Copy README file to python build dir 4 | configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/README ${CMAKE_CURRENT_BINARY_DIR} COPYONLY ) 5 | 6 | # The files should exist in both the source and binary directories, they are needed in the 7 | # source directory to run the tests and in the binary directory to install the package 8 | 9 | # Create Release Version 10 | configure_file( 11 | "${CMAKE_CURRENT_SOURCE_DIR}/datafed/VERSION.py.in" 12 | "${CMAKE_CURRENT_BINARY_DIR}/datafed/VERSION.py" 13 | @ONLY) 14 | 15 | configure_file( 16 | "${CMAKE_CURRENT_SOURCE_DIR}/datafed/VERSION.py.in" 17 | "${CMAKE_CURRENT_SOURCE_DIR}/datafed/VERSION.py" 18 | @ONLY) 19 | 20 | # Copy package files to build dir 21 | file( GLOB PkgFiles ${CMAKE_CURRENT_SOURCE_DIR}/*.py ) 22 | foreach(file ${PkgFiles}) 23 | configure_file(${file} ${CMAKE_CURRENT_BINARY_DIR} COPYONLY ) 24 | endforeach() 25 | 26 | # Make scripts build dir 27 | file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/scripts) 28 | 29 | # Copy scripts to build dir 30 | file( GLOB ScriptFiles ${CMAKE_CURRENT_SOURCE_DIR}/scripts/* ) 31 | foreach(file ${ScriptFiles}) 32 | configure_file(${file} ${CMAKE_CURRENT_BINARY_DIR}/scripts COPYONLY ) 33 | endforeach() 34 | 35 | add_subdirectory( datafed ) 36 | 37 | add_custom_target( pydatafed ) 38 | add_dependencies( pydatafed pydatafed_src) 39 | -------------------------------------------------------------------------------- /python/datafed_pkg/README: -------------------------------------------------------------------------------- 1 | 2 | DataFed is a federated scientific data management system developed by Oak Ridge National Laboratories to facilitate FAIR data practices within supported experimental, compute, and analytics environments. The DataFed Python package provides both a command-line-interface (CLI) and a programming API for interacting with DataFed services. 3 | -------------------------------------------------------------------------------- /python/datafed_pkg/datafed/VERSION.py.in: -------------------------------------------------------------------------------- 1 | __version__="@DATAFED_PYTHON_CLIENT_MAJOR@.@DATAFED_PYTHON_CLIENT_MINOR@.@DATAFED_PYTHON_CLIENT_PATCH@@DATAFED_PYTHON_CLIENT_RELEASE_TYPE@@DATAFED_PYTHON_CLIENT_PRE_RELEASE_IDENTIFER@" 2 | -------------------------------------------------------------------------------- /python/datafed_pkg/datafed/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # @mainpage DataFed Python Client Package 3 | # 4 | # @section Introduction 5 | # 6 | # This is the source-level documentation for the DataFed Python Client package. 7 | # For DataFed command-line interface (CLI) documentation, please refer to the 8 | # DataFed wiki located on the DataFed project page: 9 | # https://github.com/ORNL/DataFed/wiki 10 | # 11 | # @subsection Package Modules and Use Cases 12 | # 13 | # The DataFed client Python packages consists of the DataFed command-line client 14 | # interface script (datafed), a high-level programming interface module 15 | # (CommandLib), a low-level message-oriented programming module (MessageLib), and 16 | # two support modules (Connection and Config). 17 | # 18 | # The "datafed" CLI, by default, supports human-interactive use, but it is also 19 | # applicable to general scripting by utilizing the optional JSON output mode. 20 | # For Python-specific scripting, the "CommandLib" module can be used to access 21 | # the CLI-style text-based command interface, but with results returned directly 22 | # as Python objects instead of JSON text. If greater control or features are 23 | # needed, Python applications may use the "MessageLib" module to access the low- 24 | # level message-oriented programming interface of DataFed. 25 | # 26 | from . import VERSION 27 | 28 | name = "datafed" 29 | 30 | version = VERSION.__version__ 31 | -------------------------------------------------------------------------------- /python/datafed_pkg/requirements.txt: -------------------------------------------------------------------------------- 1 | protobuf>=4.21.1 2 | pyzmq>=16 3 | wget>=3 4 | requests>=2 5 | click>=7 6 | prompt_toolkit>=2 7 | -------------------------------------------------------------------------------- /python/datafed_pkg/scripts/datafed: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | DataFed CLI 5 | """ 6 | 7 | import datafed.CLI 8 | 9 | datafed.CLI.run() 10 | -------------------------------------------------------------------------------- /python/datafed_pkg/setup.py: -------------------------------------------------------------------------------- 1 | from datafed.VERSION import __version__ 2 | import setuptools 3 | from os import path 4 | import os 5 | 6 | # read the contents of README file 7 | this_directory = path.abspath(path.dirname(__file__)) 8 | with open(path.join(this_directory, "README"), encoding="utf-8") as f: 9 | long_description = f.read() 10 | 11 | 12 | with open("requirements.txt", "r") as f: 13 | install_requires = [line.strip() for line in f] 14 | 15 | setuptools.setup( 16 | name=os.getenv("DATAFED_PYPI_REPO", "datafed"), 17 | version=__version__, 18 | author="Dale Stansberry, Joshua Brown", 19 | author_email="stansberrydv@ornl.gov, brownjs@ornl.gov", 20 | description="DataFed CLI and API", 21 | long_description=long_description, 22 | long_description_content_type="text/markdown", 23 | url="https://github.com/ORNL/DataFed", 24 | packages=setuptools.find_packages(), 25 | setup_requires=["setuptools"], 26 | install_requires=install_requires, 27 | entry_points={"console_scripts": ["datafed = datafed.CLI:run"]}, 28 | classifiers=[ 29 | "Programming Language :: Python :: 3", 30 | "License :: OSI Approved :: MIT License", 31 | "Operating System :: OS Independent", 32 | ], 33 | ) 34 | -------------------------------------------------------------------------------- /python/datafed_pkg/test/Test_ObjectReturn.py: -------------------------------------------------------------------------------- 1 | import datafed 2 | import datafed.CommandLib 3 | import datafed.Config 4 | 5 | 6 | def main(): 7 | datafed.Config.API() # generate default configs 8 | # Config module will try to find things and send to MessageLib init 9 | datafed.CommandLib.init() 10 | for i in range(10): 11 | returned = datafed.CommandLib.command("data get y4 -fp ../../../URL_gets") 12 | # returned1 = datafed.CommandLib.command('more 2') 13 | print(returned) 14 | # print(returned1) 15 | 16 | 17 | if __name__ == "__main__": 18 | main() 19 | -------------------------------------------------------------------------------- /python/datafed_pkg/test/security.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import getpass 4 | import datafed.CommandLib 5 | import datafed.SDMS_Auth_pb2 as auth 6 | 7 | 8 | opts = {} 9 | 10 | opts["manual_auth"] = True 11 | uid = input("User ID: ") 12 | password = getpass.getpass(prompt="Password: ") 13 | 14 | api = datafed.CommandLib.API(opts) 15 | 16 | api.loginByPassword(uid, password) 17 | 18 | msg = auth.UserCreateRequest() 19 | msg.uid = "newuser" 20 | msg.password = "temptemp" 21 | msg.name = "New User" 22 | msg.email = "NewUser@foo.bar" 23 | msg.secret = "dfgdfg" 24 | 25 | print("sending") 26 | 27 | reply, mt = api._mapi.sendRecv(msg) 28 | 29 | print("got", reply) 30 | -------------------------------------------------------------------------------- /python/docker/Dockerfile.python-client-base.ubuntu: -------------------------------------------------------------------------------- 1 | FROM ubuntu:focal as build 2 | 3 | ARG DATAFED_DIR="/datafed" 4 | ARG BUILD_DIR="/datafed/source" 5 | ARG DATAFED_DEPENDENCIES_INSTALL_PATH="/opt/datafed/dependencies" 6 | 7 | ENV DATAFED_DEPENDENCIES_INSTALL_PATH="${DATAFED_DEPENDENCIES_INSTALL_PATH}" 8 | 9 | RUN mkdir -p ${BUILD_DIR} 10 | 11 | WORKDIR ${BUILD_DIR} 12 | 13 | COPY ./scripts/dependency_install_functions.sh ${BUILD_DIR}/scripts/ 14 | COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/ 15 | COPY ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/ 16 | COPY ./scripts/utils.sh ${BUILD_DIR}/scripts/ 17 | COPY ./scripts/install_python_client_dependencies.sh ${BUILD_DIR}/scripts/ 18 | 19 | RUN echo "#!/bin/bash\n\$@" > /usr/bin/sudo && chmod +x /usr/bin/sudo 20 | RUN ${BUILD_DIR}/scripts/generate_datafed.sh 21 | RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC ${BUILD_DIR}/scripts/install_python_client_dependencies.sh 22 | -------------------------------------------------------------------------------- /python/docker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | 8 | # Entry point file expects that the directory where the DataFed source file 9 | # is passed in as the first argument 10 | 11 | echo "SOURCE BUILD DIR $BUILD_DIR" 12 | 13 | # Here we will rerun datafed configuration script to create a new set of 14 | # default variables that are useful for setting up the DataFed Python client 15 | # ini file 16 | 17 | "${BUILD_DIR}/scripts/generate_datafed.sh" 18 | source "${BUILD_DIR}/config/datafed.sh" 19 | 20 | mkdir -p "/home/datafed/.datafed" 21 | 22 | # At this point we will create an ini file 23 | cat << EOF > "/home/datafed/.datafed/datafed-client.ini" 24 | [server] 25 | host = ${DATAFED_DOMAIN} 26 | port = ${DATAFED_SERVER_PORT} 27 | config_dir = /home/datafed/.datafed 28 | 29 | [client] 30 | config_dir = /home/datafed/.datafed 31 | 32 | EOF 33 | 34 | if [ "$#" -eq 0 ]; then 35 | echo "No arguments were passed, running bash" 36 | exec "/home/datafed/.local/bin/datafed --cfg /home/datafed/.datafed/datafed-client.ini" 37 | fi 38 | 39 | "$@" 40 | 41 | -------------------------------------------------------------------------------- /python/pyproto_add_msg_idx.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | Protobuf processing to generate message ID maps for C++, Python, and JS 5 | """ 6 | 7 | import sys 8 | import re 9 | 10 | print("args", sys.argv) 11 | 12 | pf_in = open(sys.argv[1], "r") 13 | pf_out = open(sys.argv[2], "a") 14 | 15 | while True: 16 | line = pf_in.readline() 17 | if len(line) == 0: 18 | sys.exit(-1) 19 | parts = re.split(r"\W+", line.strip()) 20 | # print( line, parts ) 21 | try: 22 | idx = parts.index("ID") 23 | # print( "ID:", parts[idx+1] ) 24 | msg_type = int(parts[idx + 1]) << 8 25 | break 26 | except BaseException: 27 | pass 28 | 29 | # msg_type = 0 30 | 31 | by_type = [] 32 | idx = 0 33 | 34 | pf_out.write("\n_msg_name_to_type = {\n") 35 | 36 | while True: 37 | line = pf_in.readline() 38 | if len(line) == 0: 39 | break 40 | 41 | if line.startswith("message "): 42 | msg_name = line.split()[1] 43 | by_type.append(msg_name) 44 | # print( msg_name, msg_type ) 45 | if idx > 0: 46 | pf_out.write(",\n") 47 | pf_out.write(" '{}' : {}".format(msg_name, msg_type | idx)) 48 | idx += 1 49 | 50 | pf_out.write("\n}\n\n_msg_type_to_name = {\n") 51 | 52 | idx = 0 53 | for name in by_type: 54 | if idx > 0: 55 | pf_out.write(",\n") 56 | pf_out.write(" {} : '{}'".format(msg_type | idx, name)) 57 | idx += 1 58 | 59 | pf_out.write("\n}\n") 60 | 61 | sys.exit(0) 62 | -------------------------------------------------------------------------------- /repository/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | include_directories(${CMAKE_BINARY_DIR}/common) 4 | 5 | if( BUILD_REPO_SERVER ) 6 | add_subdirectory( server ) 7 | endif() 8 | 9 | if( BUILD_AUTHZ ) 10 | add_subdirectory( gridftp ) 11 | endif() 12 | #add_subdirectory( filesys EXCLUDE_FROM_ALL ) 13 | -------------------------------------------------------------------------------- /repository/filesys/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | file( GLOB Sources "*.cpp" ) 4 | 5 | add_executable( datafed-fs ${Sources} ) 6 | add_dependencies( datafed-fs common ) 7 | target_link_libraries( datafed-fs common -lprotobuf -lpthread -lzmq -lfuse -lboost_system -lboost_program_options ) 8 | 9 | target_include_directories( datafed-fs PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) 10 | -------------------------------------------------------------------------------- /repository/gridftp/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | if(GLOBUS_VERSION EQUAL 5) 3 | message("-- Building Globus 5") 4 | add_subdirectory( globus5 ) 5 | endif() 6 | -------------------------------------------------------------------------------- /repository/gridftp/globus5/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_subdirectory( authz ) 2 | -------------------------------------------------------------------------------- /repository/gridftp/globus5/authz/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_subdirectory( source ) 2 | -------------------------------------------------------------------------------- /repository/gridftp/globus5/authz/source/AuthzWorker.h: -------------------------------------------------------------------------------- 1 | #ifndef AUTHZWORKER_H 2 | #define AUTHZWORKER_H 3 | 4 | #include "Config.h" 5 | 6 | const char *getVersion(); 7 | const char *getAPIVersion(); 8 | const char *getReleaseVersion(); 9 | int checkAuthorization(char *client_id, char *object, char *action, 10 | struct Config *config); 11 | 12 | #endif 13 | -------------------------------------------------------------------------------- /repository/gridftp/globus5/authz/source/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | configure_file( 4 | "${CMAKE_CURRENT_SOURCE_DIR}/Version.hpp.in" 5 | "${CMAKE_CURRENT_SOURCE_DIR}/Version.hpp" 6 | @ONLY) 7 | file( GLOB Sources "*.cpp" "*.c" ) 8 | add_library( datafed-authz SHARED ${Sources} ) 9 | 10 | target_compile_options(datafed-authz PRIVATE -fPIC) 11 | SET_TARGET_PROPERTIES( datafed-authz PROPERTIES LINKER_LANGUAGE CXX ) 12 | set_target_properties(datafed-authz PROPERTIES POSITION_INDEPENDENT_CODE ON SOVERSION ${DATAFED_AUTHZ_MAJOR} VERSION ${DATAFED_AUTHZ_MAJOR}.${DATAFED_AUTHZ_MINOR}.${DATAFED_AUTHZ_PATCH}) 13 | add_dependencies( datafed-authz common) 14 | if ( NOT BUILD_AUTHZ_WITH_SYSLOG ) 15 | target_compile_definitions(datafed-authz PRIVATE DONT_USE_SYSLOG) 16 | endif() 17 | 18 | if(BUILD_SHARED_LIBS) 19 | target_link_libraries( datafed-authz common Threads::Threads libzmq 20 | datafed-protobuf ${DATAFED_BOOST_LIBRARIES} ) 21 | else() 22 | target_link_libraries( datafed-authz common Threads::Threads libzmq-static 23 | datafed-protobuf ${DATAFED_BOOST_LIBRARIES} ) 24 | endif() 25 | target_include_directories( datafed-authz PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) 26 | -------------------------------------------------------------------------------- /repository/gridftp/globus5/authz/source/Config.h: -------------------------------------------------------------------------------- 1 | #ifndef CONFIG_H 2 | #define CONFIG_H 3 | 4 | #define MAX_ADDR_LEN 200 5 | #define MAX_ID_LEN 80 6 | #define MAX_PATH_LEN 500 7 | #define MAX_KEY_LEN 100 8 | 9 | // Standard includes 10 | #include 11 | 12 | struct Config { 13 | char repo_id[MAX_ID_LEN]; 14 | char server_addr[MAX_ADDR_LEN]; 15 | char pub_key[MAX_KEY_LEN]; 16 | char priv_key[MAX_KEY_LEN]; 17 | char server_key[MAX_KEY_LEN]; 18 | char user[MAX_ID_LEN]; 19 | char test_path[MAX_PATH_LEN]; 20 | char log_path[MAX_PATH_LEN]; 21 | char globus_collection_path[MAX_PATH_LEN]; 22 | size_t timeout; 23 | }; 24 | 25 | #endif 26 | -------------------------------------------------------------------------------- /repository/gridftp/globus5/authz/source/README.md: -------------------------------------------------------------------------------- 1 | This directory contains source code for a custom gridFTP authorization module 2 | for the DataFed. The makefile generates a shared library, libdatafed_authz.so, 3 | that must be deployed and configured for use in the gsi-authz.conf file under 4 | /etc/grid-security. An example gsi-authz.conf file is provided here. 5 | 6 | GridFTP will delegate authorization to this library, which will utilize DataFed 7 | services to provide fine-grain access control to files stored in central DataFed 8 | storage. This library does not provide authentication - this can be done with 9 | the standard gridmap file, or a custom solution. 10 | -------------------------------------------------------------------------------- /repository/gridftp/globus5/authz/source/Version.hpp.in: -------------------------------------------------------------------------------- 1 | #ifndef AUTHZ_VERSION_HPP 2 | #define AUTHZ_VERSION_HPP 3 | #pragma once 4 | 5 | namespace SDMS { 6 | namespace authz { 7 | namespace version { 8 | constexpr int MAJOR = @DATAFED_AUTHZ_MAJOR@; 9 | constexpr int MINOR = @DATAFED_AUTHZ_MINOR@; 10 | constexpr int PATCH = @DATAFED_AUTHZ_PATCH@; 11 | } 12 | } 13 | } 14 | 15 | #endif // AUTHZ_VERSION_HPP 16 | 17 | -------------------------------------------------------------------------------- /repository/server/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | file( GLOB Sources "*.cpp" ) 4 | 5 | configure_file( 6 | "${CMAKE_CURRENT_SOURCE_DIR}/Version.hpp.in" 7 | "${CMAKE_CURRENT_SOURCE_DIR}/Version.hpp" 8 | @ONLY) 9 | 10 | add_executable( datafed-repo ${Sources} ) 11 | add_dependencies( datafed-repo common ) 12 | if(BUILD_SHARED_LIBS) 13 | target_link_libraries( datafed-repo common Threads::Threads libzmq datafed-protobuf ${DATAFED_BOOST_LIBRARIES} ) 14 | else() 15 | target_link_libraries( datafed-repo common Threads::Threads libzmq-static datafed-protobuf ${DATAFED_BOOST_LIBRARIES} ) 16 | endif() 17 | 18 | 19 | target_include_directories( datafed-repo PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) 20 | -------------------------------------------------------------------------------- /repository/server/Config.hpp: -------------------------------------------------------------------------------- 1 | #ifndef CONFIG_HPP 2 | #define CONFIG_HPP 3 | #pragma once 4 | 5 | // Common public includes 6 | #include "common/ICredentials.hpp" 7 | 8 | // Proto includes 9 | #include "common/SDMS.pb.h" 10 | 11 | // Standard includes 12 | #include 13 | #include 14 | #include 15 | 16 | namespace SDMS { 17 | namespace Repo { 18 | 19 | struct Config { 20 | static Config &getInstance() { 21 | static Config inst; 22 | return inst; 23 | } 24 | 25 | Config() {} 26 | 27 | std::string globus_collection_path; 28 | std::string core_server = "tcp://datafed.ornl.gov:7512"; 29 | std::string cred_dir = "/opt/datafed/keys"; 30 | uint16_t port = 9000; 31 | uint32_t timeout = 5; 32 | uint32_t num_req_worker_threads = 4; 33 | 34 | std::unique_ptr sec_ctx; 35 | // MsgComm::SecurityContext sec_ctx; 36 | }; 37 | 38 | } // namespace Repo 39 | } // namespace SDMS 40 | 41 | #endif 42 | -------------------------------------------------------------------------------- /repository/server/RepoServer.hpp: -------------------------------------------------------------------------------- 1 | #ifndef REPOSERVER_HPP 2 | #define REPOSERVER_HPP 3 | #pragma once 4 | 5 | // Local private includes 6 | #include "Config.hpp" 7 | #include "RequestWorker.hpp" 8 | 9 | // Local public includes 10 | #include "common/DynaLog.hpp" 11 | 12 | // Standard includes 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | 21 | namespace SDMS { 22 | namespace Repo { 23 | 24 | /** @brief RepoServer connects with CoreServer and starts request processing 25 | * workers 26 | * 27 | * The ReposServer class deals with configuration, setting up external 28 | * interface, and starting request processing workers. An internal 0MQ proxy 29 | * thread is used to pass received messages to any available worker via in-proc 30 | * 0MQ queue. Once the server is started, it will not exit (unless a critical 31 | * error causes an abort). 32 | */ 33 | 34 | class Server { 35 | public: 36 | Server(LogContext log_context); 37 | virtual ~Server(); 38 | 39 | Server &operator=(const Server &) = delete; 40 | 41 | void run(); 42 | 43 | private: 44 | void loadKeys(); 45 | void checkServerVersion(); 46 | void ioSecure(); 47 | 48 | Config &m_config; 49 | std::thread *m_io_thread; 50 | std::string m_pub_key; 51 | std::string m_priv_key; 52 | std::string m_core_key; 53 | std::vector m_req_workers; 54 | LogContext m_log_context; 55 | }; 56 | 57 | } // namespace Repo 58 | } // namespace SDMS 59 | 60 | #endif 61 | -------------------------------------------------------------------------------- /repository/server/Version.hpp.in: -------------------------------------------------------------------------------- 1 | #ifndef REPO_VERSION_HPP 2 | #define REPO_VERSION_HPP 3 | #pragma once 4 | 5 | namespace SDMS { 6 | namespace repository { 7 | namespace version { 8 | constexpr int MAJOR = @DATAFED_REPO_MAJOR@; 9 | constexpr int MINOR = @DATAFED_REPO_MINOR@; 10 | constexpr int PATCH = @DATAFED_REPO_PATCH@; 11 | } 12 | } 13 | } 14 | 15 | #endif // REPO_VERSION_HPP 16 | 17 | -------------------------------------------------------------------------------- /scripts/ci_generate_pypirc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Not do not include "-u" in set option, we will be checking for unbound variables 4 | # if that option is set then this script will throw an error when there is none 5 | set -ef -o pipefail 6 | 7 | SCRIPT=$(realpath "$0") 8 | SOURCE=$(dirname "$SCRIPT") 9 | PROJECT_ROOT=$(realpath ${SOURCE}/..) 10 | 11 | if [ -z "${TWINE_CONFIG_FILE}" ] 12 | then 13 | local_TWINE_CONFIG_FILE=".pypirc" 14 | else 15 | local_TWINE_CONFIG_FILE=$(printenv TWINE_CONFIG_FILE) 16 | fi 17 | 18 | 19 | 20 | if [ -z "${DATAFED_PYPI_REPO_TOKEN}" ] 21 | then 22 | local_DATAFED_PYPI_REPO_TOKEN="" 23 | else 24 | local_DATAFED_PYPI_REPO_TOKEN=$(printenv DATAFED_PYPI_REPO_TOKEN) 25 | fi 26 | 27 | cat << EOF > "$local_TWINE_CONFIG_FILE" 28 | [distutils] 29 | index-servers = pypi 30 | 31 | [pypi] 32 | username = __token__ 33 | password = $local_DATAFED_PYPI_REPO_TOKEN 34 | EOF 35 | -------------------------------------------------------------------------------- /scripts/ci_purge_images.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # The purpose of this script is to prevent the number of images built on a 4 | # VM from taking over to much storage, here we can set the number of GB, that 5 | # we will allow to be stored on the VM, after which we will start deleting 6 | # the oldest one 7 | 8 | # Max allowed size of all images in GB 9 | if [ -z "${DATAFED_CI_PURGE_THRESHOLD}" ] 10 | then 11 | local_DATAFED_CI_PURGE_THRESHOLD="15" 12 | else 13 | local_DATAFED_CI_PURGE_THRESHOLD=$(printenv DATAFED_CI_PURGE_THRESHOLD) 14 | fi 15 | 16 | 17 | get_size_of_all_images_in_GB() { 18 | declare -g total_image_size_number="0" 19 | docker_size_stats=$(docker system df --format "{{.Type}} {{.Size}}") 20 | echo "docker_size_stats" 21 | total_image_size=$(echo "${docker_size_stats}" | head -1 | awk '{print $2}' ) 22 | echo "Image size is $total_image_size" 23 | if [ ! -z "${total_image_size}" ] 24 | then 25 | if [ "${total_image_size: -2}" = "GB" ] 26 | then 27 | total_image_size_number="${total_image_size%??}" 28 | total_image_size_number="${total_image_size%%.*}" 29 | fi 30 | fi 31 | } 32 | 33 | purge_oldest_image() { 34 | oldest_image_id=$(docker image list --format "{{.ID}}" | tail -n1) 35 | docker image rm "$oldest_image_id" -f 36 | } 37 | 38 | get_size_of_all_images_in_GB 39 | 40 | while [ "$total_image_size_number" -gt "$local_DATAFED_CI_PURGE_THRESHOLD" ] 41 | do 42 | purge_oldest_image 43 | get_size_of_all_images_in_GB 44 | done 45 | 46 | -------------------------------------------------------------------------------- /scripts/ci_setup_web_certs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ ! -e "$DATAFED_WEB_CERT_PATH" ] || [ ! -e "$DATAFED_WEB_KEY_PATH" ] 4 | then 5 | if [ -e "$DATAFED_WEB_CERT_PATH" ] 6 | then 7 | rm "${DATAFED_WEB_CERT_PATH}" 8 | fi 9 | if [ -e "$DATAFED_WEB_KEY_PATH" ] 10 | then 11 | rm "${DATAFED_WEB_KEY_PATH}" 12 | fi 13 | if [ -e "$DATAFED_WEB_CSR_PATH" ] 14 | then 15 | rm "${DATAFED_WEB_CSR_PATH}" 16 | fi 17 | openssl genrsa -out "$DATAFED_WEB_KEY_PATH" 2048 18 | openssl req -new -key "$DATAFED_WEB_KEY_PATH" \ 19 | -out "${DATAFED_WEB_CSR_PATH}" \ 20 | -subj "/C=US/ST=TN/L=Oak Ridge/O=ORNL/OU=DLT/CN=${DI_DATAFED_DOMAIN}" 21 | openssl x509 -req -days 3650 \ 22 | -in "${DATAFED_WEB_CSR_PATH}" \ 23 | -signkey "$DATAFED_WEB_KEY_PATH" \ 24 | -out "$DATAFED_WEB_CERT_PATH" 25 | fi 26 | 27 | -------------------------------------------------------------------------------- /scripts/dependency_versions.sh: -------------------------------------------------------------------------------- 1 | 2 | # Versions 3 | DATAFED_CMAKE_VERSION="3.17.5" 4 | DATAFED_GLOBUS_VERSION="6.0.31-1" 5 | DATAFED_JSON_SCHEMA_VALIDATOR_VERSION="2.1.0" 6 | DATAFED_NLOHMANN_JSON_VERSION="3.10.2" 7 | DATAFED_LIBSODIUM_VERSION="1.0.18" 8 | # this version is different from above due to the fact libsodium names its shared library diffrently than the actual api version 9 | DATAFED_LIB_LIBSODIUM_VERSION="23.3.0" 10 | DATAFED_LIBZMQ_VERSION="4.3.4" 11 | # this version is different from above due to the fact libzmq names its shared library diffrently than the actual api version 12 | DATAFED_LIB_LIBZMQ_VERSION="5.2.4" 13 | DATAFED_LIB_ZMQCPP_VERSION="4.10.0" 14 | DATAFED_NVM_VERSION="v0.39.7" 15 | DATAFED_NODE_VERSION="v14.21.3" 16 | DATAFED_NVM_VERSION="v0.39.7" 17 | # Git tag 18 | DATAFED_PROTOBUF_VERSION="25.2" 19 | # Dynamic library extension .so.{DATAFED_FULL_PROTOBUF_VERSION} 20 | DATAFED_DYNAMIC_LIBRARY_PROTOBUF_VERSION="25.2.0" 21 | # Full version 22 | DATAFED_FULL_PROTOBUF_VERSION="4.25.2" 23 | DATAFED_LIBCURL="7.88.1" 24 | DATAFED_LIBCURL_URL="https://github.com/curl/curl/releases/download/curl-7_88_1/curl-7.88.1.tar.gz" 25 | DATAFED_OPENSSL="1.1.1" 26 | DATAFED_OPENSSL_COMMIT="e04bd34" 27 | DATAFED_BOOST="1.71.0" 28 | DATAFED_ZLIB_VERSION="1.3.1" 29 | DATAFED_ZLIB_URL="https://zlib.net/zlib-1.3.1.tar.gz" 30 | DATAFED_GCS_SUBMODULE_VERSION="v2.8.0" 31 | -------------------------------------------------------------------------------- /scripts/export_dependency_version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Assumes sourcing not running 3 | SCRIPT=$( realpath "${BASH_SOURCE[0]}" ) 4 | SCRIPT_DIR=$( dirname "${SCRIPT}" ) 5 | PROJECT_ROOT=$(realpath "${SCRIPT_DIR}/..") 6 | . "${PROJECT_ROOT}/scripts/utils.sh" 7 | # WARNING 8 | # For this script to work it must be called with source 9 | # source export_dependency_version_numbers 10 | export_dependency_version_numbers 11 | -------------------------------------------------------------------------------- /scripts/generate_core_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Not do not include "-u" in set option, we will be checking for unbound variables 4 | # if that option is set then this script will throw an error when there is none 5 | set -ef -o pipefail 6 | 7 | SCRIPT=$(realpath "$0") 8 | SOURCE=$(dirname "$SCRIPT") 9 | PROJECT_ROOT=$(realpath ${SOURCE}/..) 10 | source ${PROJECT_ROOT}/config/datafed.sh 11 | 12 | PATH_TO_SERVICE_DIR=$(realpath "$SOURCE/../services") 13 | SERVICE_FILE_NAME="datafed-core.service" 14 | 15 | local_DATAFED_LOG_PATH="" 16 | 17 | if [ -z "${DATAFED_DEFAULT_LOG_PATH}" ] 18 | then 19 | local_DATAFED_LOG_PATH="/var/log/datafed" 20 | else 21 | local_DATAFED_LOG_PATH=$(printenv DATAFED_DEFAULT_LOG_PATH) 22 | fi 23 | 24 | DATAFED_CORE_LOG_FILE_PATH="/$local_DATAFED_LOG_PATH/datafed-core.log" 25 | 26 | # Remove double forward slashes 27 | DATAFED_CORE_LOG_FILE_PATH=$( echo "$DATAFED_CORE_LOG_FILE_PATH" | sed 's/\/\//\//g') 28 | 29 | if [ ! -d "$PATH_TO_SERVICE_DIR" ] 30 | then 31 | mkdir -p $PATH_TO_SERVICE_DIR 32 | fi 33 | 34 | cat << EOF > "$PATH_TO_SERVICE_DIR/$SERVICE_FILE_NAME" 35 | [Unit] 36 | Description=DataFed Core Server 37 | Requires=arangodb3.service 38 | After=arangodb3.service 39 | [Service] 40 | PIDFile=/tmp/datafed-core.pid 41 | Restart=always 42 | KillSignal=SIGQUIT 43 | WorkingDirectory=${DATAFED_INSTALL_PATH}/core 44 | ExecStart=${DATAFED_INSTALL_PATH}/core/datafed-core --cfg ${DATAFED_INSTALL_PATH}/core/datafed-core.cfg 45 | StandardOutput=append:${DATAFED_CORE_LOG_FILE_PATH} 46 | StandardError=append:${DATAFED_CORE_LOG_FILE_PATH} 47 | User=${DATAFED_CORE_USER} 48 | [Install] 49 | WantedBy=multi-user.target 50 | EOF 51 | -------------------------------------------------------------------------------- /scripts/generate_gsi-authz_config.sh: -------------------------------------------------------------------------------- 1 | #!/bin/env bash 2 | # Cannot run with -u because we check for unbound variables 3 | # and the script will exit prematurely if '-u' is set 4 | set -ef -o pipefail 5 | 6 | SCRIPT=$(realpath "$0") 7 | SOURCE=$(dirname "$SCRIPT") 8 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 9 | source "${PROJECT_ROOT}/config/datafed.sh" 10 | 11 | PATH_TO_CONFIG_DIR=$(realpath "$SOURCE/../config") 12 | CONFIG_FILE_NAME="gsi-authz.conf" 13 | 14 | cat << EOF > "$PATH_TO_CONFIG_DIR/$CONFIG_FILE_NAME" 15 | GLOBUS_GSI_AUTHZ_SYSTEM_INIT $DATAFED_INSTALL_PATH/authz/libdatafed-authz gsi_authz_init 16 | GLOBUS_GSI_AUTHZ_SYSTEM_DESTROY $DATAFED_INSTALL_PATH/authz/libdatafed-authz gsi_authz_destroy 17 | GLOBUS_GSI_AUTHZ_HANDLE_INIT $DATAFED_INSTALL_PATH/authz/libdatafed-authz gsi_authz_handle_init 18 | GLOBUS_GSI_AUTHORIZE_ASYNC $DATAFED_INSTALL_PATH/authz/libdatafed-authz gsi_authz_authorize_async 19 | GLOBUS_GSI_AUTHZ_CANCEL $DATAFED_INSTALL_PATH/authz/libdatafed-authz gsi_authz_cancel 20 | GLOBUS_GSI_AUTHZ_HANDLE_DESTROY $DATAFED_INSTALL_PATH/authz/libdatafed-authz gsi_authz_handle_destroy 21 | GLOBUS_GSI_GET_AUTHORIZATION_IDENTITY $DATAFED_INSTALL_PATH/authz/libdatafed-authz gsi_authz_identify 22 | globus_mapping $DATAFED_INSTALL_PATH/authz/libdatafed-authz gsi_map_user 23 | EOF 24 | 25 | echo 26 | echo "gsi-conf file is being placed here: $PATH_TO_CONFIG_DIR/${CONFIG_FILE_NAME}" 27 | echo 28 | echo "Contents are:" 29 | echo 30 | cat "$PATH_TO_CONFIG_DIR/${CONFIG_FILE_NAME}" 31 | 32 | 33 | -------------------------------------------------------------------------------- /scripts/generate_repo_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/env bash 2 | 3 | # Not do not include "-u" in set option, we will be checking for unbound variables 4 | # if that option is set then this script will throw an error when there is none 5 | set -ef -o pipefail 6 | 7 | SCRIPT=$(realpath "$0") 8 | SOURCE=$(dirname "$SCRIPT") 9 | PROJECT_ROOT=$(realpath ${SOURCE}/..) 10 | source ${PROJECT_ROOT}/config/datafed.sh 11 | 12 | PATH_TO_SERVICE_DIR=$(realpath "$SOURCE/../services") 13 | SERVICE_FILE_NAME="datafed-repo.service" 14 | 15 | local_DATAFED_LOG_PATH="" 16 | 17 | if [ -z "${DATAFED_DEFAULT_LOG_PATH}" ] 18 | then 19 | local_DATAFED_LOG_PATH="/var/log/datafed" 20 | else 21 | local_DATAFED_LOG_PATH=$(printenv DATAFED_DEFAULT_LOG_PATH) 22 | fi 23 | 24 | DATAFED_REPO_LOG_FILE_PATH="/$local_DATAFED_LOG_PATH/datafed-repo.log" 25 | 26 | # Remove double forward slashes 27 | DATAFED_REPO_LOG_FILE_PATH=$( echo "$DATAFED_REPO_LOG_FILE_PATH" | sed 's/\/\//\//g') 28 | 29 | if [ ! -d "$PATH_TO_SERVICE_DIR" ] 30 | then 31 | mkdir -p $PATH_TO_SERVICE_DIR 32 | fi 33 | cat << EOF > "$PATH_TO_SERVICE_DIR/$SERVICE_FILE_NAME" 34 | [Unit] 35 | Description=DataFed Repo Server 36 | [Service] 37 | PIDFile=/tmp/datafed-repo.pid 38 | Restart=always 39 | KillSignal=SIGQUIT 40 | WorkingDirectory=${DATAFED_INSTALL_PATH}/repo 41 | ExecStart=${DATAFED_INSTALL_PATH}/repo/datafed-repo --cfg ${DATAFED_INSTALL_PATH}/repo/datafed-repo.cfg tcp://${DATAFED_DOMAIN}:${DATAFED_SERVER_PORT} 42 | User=${DATAFED_GLOBUS_REPO_USER} 43 | StandardOutput=append:${DATAFED_REPO_LOG_FILE_PATH} 44 | StandardError=append:${DATAFED_REPO_LOG_FILE_PATH} 45 | [Install] 46 | WantedBy=multi-user.target 47 | EOF 48 | -------------------------------------------------------------------------------- /scripts/globus/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/scripts/globus/__init__.py -------------------------------------------------------------------------------- /scripts/globus/clean_globus.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SCRIPT=$(realpath "$0") 4 | SOURCE=$(dirname "$SCRIPT") 5 | PROJECT_ROOT=$(realpath "${SOURCE}/../../") 6 | source "${PROJECT_ROOT}/config/datafed.sh" 7 | 8 | if [ -z "$DATAFED_GCS_ROOT_NAME" ] 9 | then 10 | echo "DATAFED_GCS_ROOT_NAME is not defined cannot run $SCRIPT" 11 | exit 1 12 | fi 13 | 14 | GATEWAY_NAME="${DATAFED_GCS_ROOT_NAME} Storage Gateway" 15 | COLLECTION_NAME="${DATAFED_GCS_ROOT_NAME} Collection Mapped" 16 | 17 | # Removing the mapped collection will also remove any guest collections 18 | 19 | collection_line=$( globus-connect-server collection list | grep "$COLLECTION_NAME" ) 20 | if [ ! -z "$collection_line" ] 21 | then 22 | uuid_of_collection=$( globus-connect-server collection list | grep "$COLLECTION_NAME" | awk '{ print $1 }') 23 | 24 | 25 | globus-connect-server collection update \ 26 | "$uuid_of_collection" \ 27 | --no-delete-protected 28 | 29 | globus-connect-server collection delete "$uuid_of_collection" 30 | fi 31 | 32 | gateway_line=$(globus-connect-server storage-gateway list | grep "$GATEWAY_NAME" ) 33 | if [ ! -z "$gateway_line" ] 34 | then 35 | 36 | spaces_in_name=$(echo "$GATEWAY_NAME" | awk '{print gsub("[ \t]",""); exit}') 37 | columns=$(( $spaces_in_name + 3 )) 38 | uuid_of_storage_gateway=$( globus-connect-server storage-gateway list | grep "$GATEWAY_NAME" | awk -v col=$columns '{ print $col }') 39 | 40 | # Check if it already exists 41 | globus-connect-server storage-gateway delete "${uuid_of_storage_gateway}" 42 | fi 43 | -------------------------------------------------------------------------------- /scripts/install_authz.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/config/datafed.sh" 9 | 10 | # Make sure paths exist 11 | mkdir -p "${DATAFED_INSTALL_PATH}/keys" 12 | mkdir -p "${DATAFED_DEFAULT_LOG_PATH}" 13 | 14 | # Copy configuration files 15 | cp "$PROJECT_ROOT/config/gsi-authz.conf" /etc/grid-security 16 | cp "$PROJECT_ROOT/config/datafed-authz.cfg" "${DATAFED_INSTALL_PATH}/authz" 17 | 18 | # Ensure permissions are correctly set on authz library 19 | chmod 755 "${DATAFED_INSTALL_PATH}/authz/libdatafed-authz.so" 20 | 21 | # Update GridFTP so it knows about env variable 22 | PATTERN1="(" 23 | PATTERN2=";" 24 | PATH_GRIDFTP_SERVICE=$(sudo systemctl status globus-gridftp-server.service | grep "loaded (" | awk '{print $3}' | sed -e "s/.*$PATTERN1\(.*\)$PATTERN2.*/\1/") 25 | echo "$PATH_GRIDFTP_SERVICE" 26 | -------------------------------------------------------------------------------- /scripts/install_client_dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Exit on error 4 | set -e 5 | 6 | SCRIPT=$(realpath "$0") 7 | SOURCE=$(dirname "$SCRIPT") 8 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 9 | 10 | source "${PROJECT_ROOT}/scripts/utils.sh" 11 | source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" 12 | 13 | packages=("python3-pkg-resources" "python3-pip" "pkg-config" "python3-venv") 14 | 15 | sudo_command 16 | # This script will install all of the dependencies needed by DataFed 1.0 17 | "$SUDO_CMD" apt-get update 18 | "$SUDO_CMD" dpkg --configure -a 19 | "$SUDO_CMD" apt-get install -y "${packages[@]}" 20 | 21 | init_python 22 | source "${DATAFED_PYTHON_ENV}/bin/activate" 23 | python3 -m pip install -r "${PROJECT_ROOT}/python/datafed_pkg/requirements.txt" 24 | 25 | install_protobuf 26 | cd ~ 27 | 28 | -------------------------------------------------------------------------------- /scripts/install_core.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/config/datafed.sh" 9 | 10 | # Make sure paths exist 11 | mkdir -p "${DATAFED_INSTALL_PATH}/core" 12 | mkdir -p "${DATAFED_INSTALL_PATH}/keys" 13 | mkdir -p "${DATAFED_DEFAULT_LOG_PATH}" 14 | 15 | # Copy configuration files 16 | cp "$PROJECT_ROOT/config/datafed-core.cfg" "${DATAFED_INSTALL_PATH}/core" 17 | 18 | 19 | # Move keys to Default:/opt/datafed/keys if they do not already exist 20 | if [ ! -f "${DATAFED_INSTALL_PATH}/keys/datafed-core-key.priv" ] 21 | then 22 | # Generate keys 23 | echo "No keys for core server were detected in ${DATAFED_INSTALL_PATH}/keys/ creating them" 24 | "${DATAFED_INSTALL_PATH}/core/datafed-core" --gen-keys 25 | mv datafed-core-key.pub "${DATAFED_INSTALL_PATH}/keys/" 26 | mv datafed-core-key.priv "${DATAFED_INSTALL_PATH}/keys/" 27 | fi 28 | -------------------------------------------------------------------------------- /scripts/install_core_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/config/datafed.sh" 9 | 10 | # Copy services 11 | cp "$PROJECT_ROOT/services/datafed-core.service" /etc/systemd/system 12 | -------------------------------------------------------------------------------- /scripts/install_docker_dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Exit on error 4 | set -e 5 | 6 | # This script will install all of the dependencies needed by DataFed 1.0 7 | sudo apt-get update 8 | sudo dpkg --configure -a 9 | 10 | sudo apt-get install \ 11 | ca-certificates \ 12 | curl \ 13 | gnupg \ 14 | lsb-release 15 | 16 | sudo mkdir -p /etc/apt/keyrings 17 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg 18 | 19 | echo \ 20 | "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \ 21 | $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null 22 | 23 | sudo apt-get update 24 | sudo apt-get install docker-ce docker-ce-cli containerd.io docker-compose-plugin 25 | -------------------------------------------------------------------------------- /scripts/install_docs_dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Exit on error 4 | set -e 5 | 6 | SCRIPT=$(realpath "$0") 7 | SOURCE=$(dirname "$SCRIPT") 8 | PROJECT_ROOT=$(realpath ${SOURCE}/..) 9 | 10 | source "${PROJECT_ROOT}/scripts/utils.sh" 11 | source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" 12 | 13 | packages=("g++" "gcc" "make" "python3-pkg-resources" "python3-pip" "pkg-config" "python3-venv") 14 | externals=("cmake" "protobuf") 15 | pip_packages=("setuptools" "sphinx" "sphinx-rtd-theme" "sphinx-autoapi") 16 | 17 | local_UNIFY=false 18 | 19 | if [ $# -eq 1 ]; then 20 | case "$1" in 21 | -h|--help) 22 | # If -h or --help is provided, print help 23 | echo "Usage: $0 [-h|--help] [unify]" 24 | ;; 25 | unify) 26 | # If 'unify' is provided, print the packages 27 | # The extra space is necessary to not conflict with the other install scripts 28 | echo -n "${packages[@]} " >> "$apt_file_path" 29 | echo -n "${pip_packages[@]} " >> "$pip_file_path" 30 | echo -n "${externals[@]} " >> "$ext_file_path" 31 | local_UNIFY=true 32 | ;; 33 | *) 34 | echo "Invalid Argument" 35 | ;; 36 | esac 37 | fi 38 | 39 | 40 | if [[ $local_UNIFY = false ]]; then 41 | sudo_command 42 | "$SUDO_CMD" apt-get update 43 | "$SUDO_CMD" dpkg --configure -a 44 | "$SUDO_CMD" apt-get install -y "${packages[@]}" 45 | init_python 46 | source "${DATAFED_PYTHON_ENV}/bin/activate" 47 | python3 -m pip install --upgrade pip 48 | python3 -m pip install "${pip_packages[@]}" 49 | 50 | for ext in "${externals[@]}"; do 51 | install_dep_by_name "$ext" 52 | done 53 | fi 54 | 55 | -------------------------------------------------------------------------------- /scripts/install_gcs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" 9 | 10 | install_gcs 11 | -------------------------------------------------------------------------------- /scripts/install_python_client_dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Exit on error 4 | set -e 5 | 6 | SCRIPT=$(realpath "$0") 7 | SOURCE=$(dirname "$SCRIPT") 8 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 9 | 10 | source "${PROJECT_ROOT}/scripts/utils.sh" 11 | source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" 12 | 13 | sudo_command 14 | 15 | # This script will install all of the dependencies needed by DataFed 1.0 16 | "$SUDO_CMD" apt-get update 17 | "$SUDO_CMD" dpkg --configure -a 18 | "$SUDO_CMD" apt-get install -y libtool build-essential g++ gcc make libboost-all-dev \ 19 | pkg-config autoconf automake unzip libcurl4-openssl-dev wget \ 20 | rapidjson-dev libkrb5-dev git python3-pkg-resources python3-pip python3-venv libssl-dev 21 | 22 | cd ~ 23 | install_cmake 24 | cd ~ 25 | 26 | # Install cmake 3.17 27 | 28 | init_python 29 | source "${DATAFED_PYTHON_ENV}/bin/activate" 30 | python3 -m pip install --upgrade pip 31 | python3 -m pip install setuptools 32 | 33 | install_protobuf 34 | cd ~ 35 | 36 | 37 | -------------------------------------------------------------------------------- /scripts/install_repo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/config/datafed.sh" 9 | 10 | # Make sure paths exist 11 | mkdir -p "${DATAFED_INSTALL_PATH}/repo" 12 | mkdir -p "${DATAFED_INSTALL_PATH}/keys" 13 | mkdir -p "${DATAFED_DEFAULT_LOG_PATH}" 14 | 15 | # Copy configuration files 16 | cp "$PROJECT_ROOT/config/datafed-repo.cfg" "${DATAFED_INSTALL_PATH}/repo" 17 | 18 | # Generate keys only if they do not exist 19 | if [ ! -f "${DATAFED_INSTALL_PATH}/keys/datafed-repo-key.priv" ] 20 | then 21 | "${DATAFED_INSTALL_PATH}/repo/datafed-repo" --gen-keys --cred-dir "${DATAFED_INSTALL_PATH}/keys" 22 | fi 23 | -------------------------------------------------------------------------------- /scripts/install_repo_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/config/datafed.sh" 9 | 10 | # Copy services 11 | cp "$PROJECT_ROOT/services/datafed-repo.service" /etc/systemd/system 12 | -------------------------------------------------------------------------------- /scripts/install_ws_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/config/datafed.sh" 9 | 10 | # Copy services 11 | cp "$PROJECT_ROOT/services/datafed-ws.service" /etc/systemd/system 12 | -------------------------------------------------------------------------------- /scripts/run_arango_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath ${SOURCE}/..) 8 | 9 | systemctl_exists=$( which systemctl ) 10 | 11 | if [[ ! -z $systemctl_exists ]] 12 | then 13 | sudo systemctl daemon-reload 14 | 15 | # Turn off exit - on non zero exit code for the below command, we don't want 16 | # it to exit if arangodb is not reported as active. 17 | set +e 18 | arango_status=$(systemctl is-active arangodb3.service) 19 | set -e 20 | if [ ! "active" = "$arango_status" ] 21 | then 22 | sudo systemctl restart arangodb3.service 23 | fi 24 | 25 | arango_status=$(systemctl is-active arangodb3.service) 26 | if [ ! "active" = "$arango_status" ] 27 | then 28 | echo "ERROR something is wrong arangodb3.service is not active" 29 | exit 1 30 | fi 31 | # Enable services on reboot 32 | sudo systemctl enable arangodb3.service 33 | else 34 | echo "Not starting systemctl service because did not find systemctl." 35 | fi 36 | -------------------------------------------------------------------------------- /scripts/run_core_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/config/datafed.sh" 9 | 10 | systemctl_exists=$( which systemctl ) 11 | 12 | if [[ ! -z $systemctl_exists ]] 13 | then 14 | sudo systemctl daemon-reload 15 | 16 | echo "The ArangoDB service should be up and running before you use this command" 17 | sudo systemctl restart datafed-core.service 18 | sudo systemctl status datafed-core.service 19 | 20 | # Enable services on reboot 21 | sudo systemctl enable datafed-core.service 22 | else 23 | echo "Not starting systemctl service because did not find systemctl." 24 | fi 25 | -------------------------------------------------------------------------------- /scripts/run_repo_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath ${SOURCE}/..) 8 | source ${PROJECT_ROOT}/config/datafed.sh 9 | 10 | systemctl_exists=$( which systemctl ) 11 | if [[ ! -z $systemctl_exists ]] 12 | then 13 | sudo systemctl daemon-reload 14 | 15 | echo "The Globus service should be installed before you use this command" 16 | if [ ! -f "${DATAFED_INSTALL_PATH}/keys/datafed-core-key.pub" ] 17 | then 18 | echo "Missing ${DATAFED_INSTALL_PATH}/keys/datafed-core-key.pub you will not be able to run the repo service until the public key is provided" 19 | else 20 | sudo systemctl restart datafed-repo.service 21 | sudo systemctl status datafed-repo.service 22 | fi 23 | 24 | # Enable services on reboot 25 | sudo systemctl enable datafed-repo.service 26 | else 27 | echo "Not starting systemctl service because did not find systemctl." 28 | fi 29 | -------------------------------------------------------------------------------- /scripts/run_ws_service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euf -o pipefail 4 | 5 | SCRIPT=$(realpath "$0") 6 | SOURCE=$(dirname "$SCRIPT") 7 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 8 | source "${PROJECT_ROOT}/config/datafed.sh" 9 | 10 | systemctl_exists=$( which systemctl ) 11 | 12 | if [[ ! -z $systemctl_exists ]] 13 | then 14 | sudo systemctl daemon-reload 15 | 16 | echo "The ArangoDB service and core service should be up and running before you use this command" 17 | sudo systemctl restart datafed-ws.service 18 | sudo systemctl status datafed-ws.service 19 | 20 | # Enable services on reboot 21 | sudo systemctl enable datafed-ws.service 22 | else 23 | echo "Not starting systemctl service because did not find systemctl." 24 | fi 25 | -------------------------------------------------------------------------------- /scripts/uninstall_all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SCRIPT=$(realpath "$0") 4 | SOURCE=$(dirname "$SCRIPT") 5 | PROJECT_ROOT=$(realpath "${SOURCE}/..") 6 | source "${PROJECT_ROOT}/config/datafed.sh" 7 | 8 | # Will remove datafed components, with the exception of 9 | # the certificates because we can only call lego so many times 10 | rm -rf "${DATAFED_INSTALL_PATH}/core" 11 | rm -rf "${DATAFED_INSTALL_PATH}/web" 12 | rm -rf "${DATAFED_INSTALL_PATH}/repo" 13 | rm -rf "${DATAFED_INSTALL_PATH}/authz" 14 | 15 | rm -f /etc/systemd/system/datafed* 16 | rm -f /etc/grid-security/gsi-authz.conf 17 | # If the path is overwritten and the value that is not found in datafed.sh 18 | # is used to install a particular component then this will not suffice 19 | rm -rf "${DATAFED_DEFAULT_LOG_PATH}" 20 | 21 | "${PROJECT_ROOT}/scripts/clear_db.sh" 22 | -------------------------------------------------------------------------------- /setup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #export ZMQ_INC_DIR=/usr/include 4 | #export ZMQ_LIB_DIR=/usr/lib/x86_64-linux-gnu 5 | export PYTHONPATH=/home/cades/dvstans/DataFed/active/python/datafed_pkg/ 6 | -------------------------------------------------------------------------------- /setup_condo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | module load PE-gnu/3.0 4 | module load boost 5 | module load protobuf 6 | module load cmake 7 | 8 | export BOOST_LIB=/software/dev_tools/swtree/cs400_centos7.2_pe2016-08/boost/1.67.0/centos7.2_gnu5.3.0/lib 9 | export BOOST_INC=/software/dev_tools/swtree/cs400_centos7.2_pe2016-08/boost/1.67.0/centos7.2_gnu5.3.0/include 10 | export ZMQ_INC_DIR=/software/dev_tools/swtree/cs400_centos7.5_pe2018/zeromq/4.2.3/centos7.5_gnu8.1.0/include 11 | export ZMQ_LIB_DIR=/software/dev_tools/swtree/cs400_centos7.5_pe2018/zeromq/4.2.3/centos7.5_gnu8.1.0/lib 12 | export PBUF_INC_DIR=/software/dev_tools/swtree/cs400_centos7.5_pe2018/protobuf/3.6.1/centos7.5_gnu8.1.0/include 13 | export PBUF_LIB_DIR=/software/dev_tools/swtree/cs400_centos7.5_pe2018/protobuf/3.6.1/centos7.5_gnu8.1.0/lib 14 | -------------------------------------------------------------------------------- /test/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | include_directories(${CMAKE_BINARY_DIR}/common) 4 | 5 | add_subdirectory (libjson) 6 | -------------------------------------------------------------------------------- /test/Test_Cases_CLI.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/test/Test_Cases_CLI.xlsx -------------------------------------------------------------------------------- /test/import/record1_upd.json: -------------------------------------------------------------------------------- 1 | { 2 | "id":"import-rec-1", 3 | "title":"Import Record #1 (updated)", 4 | "desc":"Nominal JSON file for testing record import. (updated)", 5 | "keyw":"import,nominal,test,updated", 6 | "md":{ 7 | "update":true 8 | }, 9 | "doi":"doi/fake", 10 | "data_url":"http://ipv4.download.thinkbroadband.com/5MB.zip" 11 | } 12 | -------------------------------------------------------------------------------- /test/import/record2.json: -------------------------------------------------------------------------------- 1 | { 2 | "title":"Import Record #2", 3 | "desc":"Nominal JSON file for testing record import.", 4 | "keyw":"import,nominal,test", 5 | "alias":"import-rec-2", 6 | "doi":"imp/rec2", 7 | "data_url":"https://import/record?id=2", 8 | "md":{ 9 | "rec":2, 10 | "x":21, 11 | "y":22, 12 | "z":23 13 | }, 14 | "parent":"import-coll" 15 | } -------------------------------------------------------------------------------- /test/import/record2_upd.json: -------------------------------------------------------------------------------- 1 | { 2 | "id":"import-rec-2", 3 | "title":"Import Record #2 (updated)", 4 | "desc":"Nominal JSON file for testing record import. (updated)", 5 | "keyw":"import,nominal,test,updated", 6 | "md":{ 7 | "update":true 8 | }, 9 | "doi":"doi/fake2", 10 | "data_url":"http://ipv4.download.thinkbroadband.com/5MB.zip" 11 | } 12 | -------------------------------------------------------------------------------- /test/import/record3.json: -------------------------------------------------------------------------------- 1 | { 2 | "title":"Import Record #3", 3 | "desc":"Nominal JSON file for testing record import.", 4 | "keyw":"import,nominal,test", 5 | "alias":"import-rec-3", 6 | "doi":"imp/rec3", 7 | "data_url":"https://import/record?id=3", 8 | "deps":[ 9 | {"id":"import-rec-1","type":0}, 10 | {"id":"import-rec-2","type":1} 11 | ], 12 | "md":{ 13 | "rec":3, 14 | "x":31, 15 | "y":32, 16 | "z":33 17 | }, 18 | "parent":"import-coll" 19 | } -------------------------------------------------------------------------------- /test/import/record4.json: -------------------------------------------------------------------------------- 1 | { 2 | "title":"Import Record #4", 3 | "desc":"Nominal JSON file for testing record import.", 4 | "keyw":"import,nominal,test", 5 | "alias":"import-rec-4", 6 | "doi":"imp/rec4", 7 | "data_url":"https://import/record?id=4", 8 | "md":{ 9 | "rec":4, 10 | "x":41, 11 | "y":42, 12 | "z":43 13 | }, 14 | "parent":"import-coll" 15 | } -------------------------------------------------------------------------------- /test/import/records_upd.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "id":"import-rec-1", 3 | "title":"Import Record #1 (updated)", 4 | "desc":"Nominal JSON file for testing record import. (updated)", 5 | "keyw":"import,nominal,test,updated", 6 | "md":{ 7 | "update":true 8 | }, 9 | "doi":"doi/fake", 10 | "data_url":"http://ipv4.download.thinkbroadband.com/5MB.zip" 11 | },{ 12 | "id":"import-rec-2", 13 | "title":"Import Record #2 (updated)", 14 | "desc":"Nominal JSON file for testing record import. (updated)", 15 | "keyw":"import,nominal,test,updated", 16 | "md":{ 17 | "update":true 18 | }, 19 | "doi":"doi/fake2", 20 | "data_url":"http://ipv4.download.thinkbroadband.com/5MB.zip" 21 | }] 22 | -------------------------------------------------------------------------------- /test/libjson/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | 3 | file( GLOB Sources "*.cpp" ) 4 | 5 | add_executable( libjson-test ${Sources} ) 6 | add_dependencies( libjson-test common ) 7 | target_link_libraries( libjson-test common -lprotobuf -lpthread -lcrypto -lssl -lcurl -lboost_program_options -lzmq ) 8 | 9 | target_include_directories( libjson-test PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ) 10 | -------------------------------------------------------------------------------- /tests/end-to-end/README.md: -------------------------------------------------------------------------------- 1 | # Instructions 2 | 3 | The end to end tests require that two users be created. Because we cannot 4 | automate the authentication steps we will create entries in the database for two 5 | users to be test users, the passwords for these test users should only exist in 6 | the env. 7 | 8 | The python API will be exclusively used to run the end to end tests, in this 9 | folder 10 | 11 | To use the python API you will need to build it 12 | 13 | ```bash 14 | cmake -S. -B build -DBUILD_PYTHON_CLIENT=ON 15 | cmake --build build --target pydatafed 16 | ``` 17 | -------------------------------------------------------------------------------- /tests/end-to-end/web-UI/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required (VERSION 3.17.0) 2 | # 3.7.0 requires to use test fixtures 3 | 4 | message("TESTING: DATAFED_DOMAIN : ${DATAFED_DOMAIN}") 5 | 6 | configure_file( 7 | "${CMAKE_CURRENT_SOURCE_DIR}/auth.setup.js.in" 8 | "${CMAKE_CURRENT_SOURCE_DIR}/auth.setup.js" 9 | @ONLY 10 | ) 11 | 12 | #FIXTHIS 13 | # For E2E web ui test 14 | if(ENABLE_END_TO_END_WEB_TESTS) 15 | add_custom_target(end_to_end_web_tests 16 | COMMAND npx playwright test 17 | WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} 18 | COMMENT "Running Playwright tests" 19 | ) 20 | 21 | # Note because these tests are all using the same database we cannot run most of them concurrently 22 | # They must be run sequentially so that concurrent API calls do not create problems 23 | # set_tests_properties(end_to_end_setup PROPERTIES FIXTURES_SETUP FIX_SETUP) 24 | 25 | 26 | # The following must be run sequentially 27 | # set_tests_properties(end_to_end_repo PROPERTIES FIXTURES_REQUIRED FIX_LOGIN) 28 | 29 | 30 | endif() 31 | -------------------------------------------------------------------------------- /tests/end-to-end/web-UI/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "web-UI", 3 | "version": "1.0.0", 4 | "main": "index.js", 5 | "scripts": {}, 6 | "keywords": [], 7 | "author": "", 8 | "license": "ISC", 9 | "description": "", 10 | "devDependencies": { 11 | "@playwright/test": "^1.45.1", 12 | "@types/node": "^20.14.10" 13 | }, 14 | "dependencies": { 15 | "dotenv": "^16.4.5", 16 | "playwright": "^1.45.1" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /web/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "node": true 4 | }, 5 | "parserOptions": { 6 | "ecmaVersion": 6, 7 | "sourceType": "module" 8 | }, 9 | "settings":{ 10 | }, 11 | "extends": ["eslint:recommended","plugin:import/errors","plugin:import/warnings"], 12 | "globals": { 13 | "require":true 14 | }, 15 | "rules": { 16 | "semi": "error", 17 | "no-unused-vars": ["error", { "args": "none" }], 18 | "import/no-unresolved": [2, {"caseSensitive": true}] 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /web/package.json.in: -------------------------------------------------------------------------------- 1 | { 2 | "//": "WARNING package.json.in is where dependency changes should be made package.json is generated by cmake", 3 | "name": "datafed-ws", 4 | "version": "@DATAFED_WEB_MAJOR@.@DATAFED_WEB_MINOR@.@DATAFED_WEB_PATCH@", 5 | "description": "DataFed web service", 6 | "main": "datafed-ws.js", 7 | "dependencies": { 8 | "client-oauth2": "^4.3.3", 9 | "cookie-parser": "^1.4.5", 10 | "ect": "^0.5.9", 11 | "express": "^4.18.2", 12 | "express-session": "^1.17.2", 13 | "helmet": "^4.6.0", 14 | "ini": "^2.0.0", 15 | "protobufjs": "^6.11.2", 16 | "uuid": "^9.0.0", 17 | "zeromq": "^5.2.0", 18 | "sanitize-html": "^2.11.0" 19 | }, 20 | "scripts": { 21 | "test": "echo \"Error: no test specified\" && exit 1" 22 | }, 23 | "repository": { 24 | "type": "git", 25 | "url": "git+https://github.com/ORNL/DataFed.git" 26 | }, 27 | "keywords": [ 28 | "datafed", 29 | "data", 30 | "management" 31 | ], 32 | "author": "Dale Stansberry", 33 | "license": "ISC", 34 | "bugs": { 35 | "url": "https://github.com/ORNL/DataFed/issues" 36 | }, 37 | "homepage": "https://github.com/ORNL/DataFed#readme" 38 | } 39 | -------------------------------------------------------------------------------- /web/static/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "browser": true, 4 | "es6": true, 5 | "jquery": true 6 | }, 7 | "parserOptions": { 8 | "ecmaVersion": 6, 9 | "sourceType": "module" 10 | }, 11 | "settings":{ 12 | }, 13 | "extends": ["eslint:recommended","plugin:import/errors","plugin:import/warnings"], 14 | "globals": { 15 | "ace":true, 16 | "d3":true, 17 | "Cookies":true 18 | }, 19 | "rules": { 20 | "semi": "error", 21 | "no-unused-vars": ["error", { "args": "none" }], 22 | "import/no-unresolved": [2, {"caseSensitive": true}] 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /web/static/ace/ext-error_marker.js: -------------------------------------------------------------------------------- 1 | ; 2 | (function() { 3 | window.require(["ace/ext/error_marker"], function(m) { 4 | if (typeof module == "object" && typeof exports == "object" && module) { 5 | module.exports = m; 6 | } 7 | }); 8 | })(); 9 | -------------------------------------------------------------------------------- /web/static/ace/ext-linking.js: -------------------------------------------------------------------------------- 1 | define("ace/ext/linking",["require","exports","module","ace/editor","ace/config"],function(e,t,n){function i(e){var n=e.editor,r=e.getAccelKey();if(r){var n=e.editor,i=e.getDocumentPosition(),s=n.session,o=s.getTokenAt(i.row,i.column);t.previousLinkingHover&&t.previousLinkingHover!=o&&n._emit("linkHoverOut"),n._emit("linkHover",{position:i,token:o}),t.previousLinkingHover=o}else t.previousLinkingHover&&(n._emit("linkHoverOut"),t.previousLinkingHover=!1)}function s(e){var t=e.getAccelKey(),n=e.getButton();if(n==0&&t){var r=e.editor,i=e.getDocumentPosition(),s=r.session,o=s.getTokenAt(i.row,i.column);r._emit("linkClick",{position:i,token:o})}}var r=e("ace/editor").Editor;e("../config").defineOptions(r.prototype,"editor",{enableLinking:{set:function(e){e?(this.on("click",s),this.on("mousemove",i)):(this.off("click",s),this.off("mousemove",i))},value:!1}}),t.previousLinkingHover=!1}); 2 | (function() { 3 | window.require(["ace/ext/linking"], function(m) { 4 | if (typeof module == "object" && typeof exports == "object" && module) { 5 | module.exports = m; 6 | } 7 | }); 8 | })(); 9 | -------------------------------------------------------------------------------- /web/static/ace/ext-statusbar.js: -------------------------------------------------------------------------------- 1 | define("ace/ext/statusbar",["require","exports","module","ace/lib/dom","ace/lib/lang"],function(e,t,n){"use strict";var r=e("ace/lib/dom"),i=e("ace/lib/lang"),s=function(e,t){this.element=r.createElement("div"),this.element.className="ace_status-indicator",this.element.style.cssText="display: inline-block;",t.appendChild(this.element);var n=i.delayedCall(function(){this.updateStatus(e)}.bind(this)).schedule.bind(null,100);e.on("changeStatus",n),e.on("changeSelection",n),e.on("keyboardActivity",n)};(function(){this.updateStatus=function(e){function n(e,n){e&&t.push(e,n||"|")}var t=[];n(e.keyBinding.getStatusText(e)),e.commands.recording&&n("REC");var r=e.selection,i=r.lead;if(!r.isEmpty()){var s=e.getSelectionRange();n("("+(s.end.row-s.start.row)+":"+(s.end.column-s.start.column)+")"," ")}n(i.row+":"+i.column," "),r.rangeCount&&n("["+r.rangeCount+"]"," "),t.pop(),this.element.textContent=t.join("")}}).call(s.prototype),t.StatusBar=s}); 2 | (function() { 3 | window.require(["ace/ext/statusbar"], function(m) { 4 | if (typeof module == "object" && typeof exports == "object" && module) { 5 | module.exports = m; 6 | } 7 | }); 8 | })(); 9 | -------------------------------------------------------------------------------- /web/static/ace/ext-themelist.js: -------------------------------------------------------------------------------- 1 | define("ace/ext/themelist",["require","exports","module","ace/lib/fixoldbrowsers"],function(e,t,n){"use strict";e("ace/lib/fixoldbrowsers");var r=[["Light"],["Dark","dark"]];t.themesByName={},t.themes=r.map(function(e){var n=e[1]||e[0].replace(/ /g,"_").toLowerCase(),r={caption:e[0],theme:"ace/theme/"+n,isDark:e[2]=="dark",name:n};return t.themesByName[n]=r,r})}); 2 | (function() { 3 | window.require(["ace/ext/themelist"], function(m) { 4 | if (typeof module == "object" && typeof exports == "object" && module) { 5 | module.exports = m; 6 | } 7 | }); 8 | })(); 9 | -------------------------------------------------------------------------------- /web/static/doi_style.css: -------------------------------------------------------------------------------- 1 | .content 2 | { 3 | font-size: 120%; 4 | } 5 | 6 | .doi-grid 7 | { 8 | display:grid; 9 | grid-template-columns:max-content auto; 10 | grid-column-gap:1em; 11 | grid-row-gap:.25em; 12 | overflow-y:hidden; 13 | width:100%; 14 | } 15 | 16 | .doi-grid>div 17 | { 18 | box-sizing: border-box; 19 | padding: 0.2em; 20 | } 21 | 22 | span.fancytree-title 23 | { 24 | font-size: 1.2rem; 25 | } 26 | -------------------------------------------------------------------------------- /web/static/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/favicon.png -------------------------------------------------------------------------------- /web/static/index.js: -------------------------------------------------------------------------------- 1 | import * as api from "/api.js"; 2 | 3 | $(".btn-help").on( "click", function(){ 4 | window.open('https://ornl.github.io/DataFed/','datafed-docs'); 5 | }); 6 | 7 | $(".btn-login").on( "click", function(){ 8 | location.href = "/ui/login"; 9 | }); 10 | 11 | $(document).ready(function(){ 12 | window.name = 'DataFed Welcome'; 13 | $(".btn").button(); 14 | 15 | api.getDailyMessage( function( ok, reply ){ 16 | if ( ok && reply.message ){ 17 | $("#msg_daily").text( reply.message ); 18 | $("#msg_daily_div").show(); 19 | } 20 | }); 21 | 22 | var tmpl_data = JSON.parse(document.getElementById('template_data').innerHTML); 23 | if ( tmpl_data.test_mode == "true" ){ 24 | $("#devmode").show(); 25 | } 26 | }); 27 | 28 | 29 | -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-bg_glass_40_ffc73d_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-bg_glass_40_ffc73d_1x400.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-bg_highlight-hard_20_0972a5_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-bg_highlight-hard_20_0972a5_1x100.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-bg_highlight-soft_33_003147_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-bg_highlight-soft_33_003147_1x100.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-bg_highlight-soft_35_222222_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-bg_highlight-soft_35_222222_1x100.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-bg_highlight-soft_44_444444_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-bg_highlight-soft_44_444444_1x100.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-bg_highlight-soft_80_eeeeee_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-bg_highlight-soft_80_eeeeee_1x100.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-bg_loop_25_000000_21x21.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-bg_loop_25_000000_21x21.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-icons_222222_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-icons_222222_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-icons_4b8e0b_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-icons_4b8e0b_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-icons_a83300_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-icons_a83300_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-icons_cccccc_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-icons_cccccc_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-dark/images/ui-icons_ffffff_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-dark/images/ui-icons_ffffff_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-bg_diagonals-small_40_db4865_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-bg_diagonals-small_40_db4865_40x40.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-bg_diagonals-small_50_93c3cd_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-bg_diagonals-small_50_93c3cd_40x40.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-bg_diagonals-small_50_ff3853_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-bg_diagonals-small_50_ff3853_40x40.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-bg_diagonals-small_75_a0e1cb_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-bg_diagonals-small_75_a0e1cb_40x40.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-bg_diagonals-small_75_ccd232_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-bg_diagonals-small_75_ccd232_40x40.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-bg_dots-medium_80_ffff38_4x4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-bg_dots-medium_80_ffff38_4x4.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-bg_dots-small_35_35414f_2x2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-bg_dots-small_35_35414f_2x2.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-bg_white-lines_85_f7f7ba_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-bg_white-lines_85_f7f7ba_40x100.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-icons_454545_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-icons_454545_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-icons_88a206_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-icons_88a206_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-icons_c02669_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-icons_c02669_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-icons_e1e463_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-icons_e1e463_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-icons_ffeb33_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-icons_ffeb33_256x240.png -------------------------------------------------------------------------------- /web/static/jquery-ui-light/images/ui-icons_ffffff_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery-ui-light/images/ui-icons_ffffff_256x240.png -------------------------------------------------------------------------------- /web/static/jquery/font/jquery-ui.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery/font/jquery-ui.eot -------------------------------------------------------------------------------- /web/static/jquery/font/jquery-ui.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery/font/jquery-ui.ttf -------------------------------------------------------------------------------- /web/static/jquery/font/jquery-ui.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery/font/jquery-ui.woff -------------------------------------------------------------------------------- /web/static/jquery/font/jquery-ui.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/jquery/font/jquery-ui.woff2 -------------------------------------------------------------------------------- /web/static/project.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ORNL/DataFed/bd5d99fbd7cf1e1fc204598b664951d2e99f8518/web/static/project.gif -------------------------------------------------------------------------------- /web/version.js.in: -------------------------------------------------------------------------------- 1 | 2 | module.exports = { 3 | MAJOR: @DATAFED_WEB_MAJOR@, 4 | MINOR: @DATAFED_WEB_MINOR@, 5 | PATCH: @DATAFED_WEB_PATCH@ 6 | }; 7 | -------------------------------------------------------------------------------- /web/views/error.ect: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | <% include 'head.ect' %> 6 | DataFed Error 7 | 8 | 9 |

An error has occurred.

10 | Back to main
11 | 12 | 13 | -------------------------------------------------------------------------------- /web/views/head.ect: -------------------------------------------------------------------------------- 1 | <% if @enableGoogleAnalytics : %> 2 | 3 | 4 | 11 | <% end %> 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /web/views/tab_admin.ect: -------------------------------------------------------------------------------- 1 | Admin interface TBD -------------------------------------------------------------------------------- /web/views/tab_repo.ect: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 |
Repositories:
5 |
6 |
7 |
8 |
9 |
10 |
Selection Details:
11 |
12 |
13 |
14 |
15 | 16 | 17 | 18 |
19 |
20 | --------------------------------------------------------------------------------