├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── app.py ├── buildspec.yml ├── cdk.json ├── docimage-rstudio ├── Dockerfile ├── rocker_scripts │ ├── add_ubuntugis.sh │ ├── bh-gdal.sh │ ├── bh-proj-gdal_only.sh │ ├── bh-proj.sh │ ├── config_R_cuda.sh │ ├── default_user.sh │ ├── dev_osgeo.sh │ ├── experimental │ │ ├── cuda10.2-tf.sh │ │ ├── install_R_binary.sh │ │ └── install_rl.sh │ ├── install_R.sh │ ├── install_R_ppa.sh │ ├── install_binder.sh │ ├── install_cuda-10.1.sh │ ├── install_cuda-11.1.sh │ ├── install_gdal_source.sh │ ├── install_geospatial.sh │ ├── install_geospatial_unstable.sh │ ├── install_nvtop.sh │ ├── install_pandoc.sh │ ├── install_proj.sh │ ├── install_python.sh │ ├── install_rstudio.sh │ ├── install_s6init.sh │ ├── install_shiny_server.sh │ ├── install_tensorflow.sh │ ├── install_texlive.sh │ ├── install_tidyverse.sh │ ├── install_verse.sh │ ├── install_wgrib2.sh │ ├── pam-helper.sh │ ├── rsession.sh │ └── userconf.sh └── user_scripts │ ├── config.sh │ └── install_packages.R ├── docimage-shiny ├── Dockerfile └── rocker_scripts │ ├── add_ubuntugis.sh │ ├── bh-gdal.sh │ ├── bh-proj-gdal_only.sh │ ├── bh-proj.sh │ ├── config_R_cuda.sh │ ├── default_user.sh │ ├── dev_osgeo.sh │ ├── experimental │ ├── cuda10.2-tf.sh │ ├── install_R_binary.sh │ └── install_rl.sh │ ├── install_R.sh │ ├── install_R_ppa.sh │ ├── install_binder.sh │ ├── install_cuda-10.1.sh │ ├── install_cuda-11.1.sh │ ├── install_gdal_source.sh │ ├── install_geospatial.sh │ ├── install_geospatial_unstable.sh │ ├── install_nvtop.sh │ ├── install_pandoc.sh │ ├── install_proj.sh │ ├── install_python.sh │ ├── install_rstudio.sh │ ├── install_s6init.sh │ ├── install_shiny_server.sh │ ├── install_tensorflow.sh │ ├── install_texlive.sh │ ├── install_tidyverse.sh │ ├── install_verse.sh │ ├── install_wgrib2.sh │ ├── pam-helper.sh │ ├── rsession.sh │ └── userconf.sh ├── dummy └── empty.txt ├── images ├── Rstudio_architecture.png ├── Rstudio_deployment_image.png └── datasync_blog.png ├── ml_example ├── breast-cancer-prediction │ └── app.R └── breast_cancer_modeling.r ├── parameters.json ├── requirements.txt ├── rstudio_fargate ├── datalake │ ├── dl_resources.py │ ├── dl_resources_stage.py │ └── rstudio_s3_stack.py ├── datasync_trigger │ ├── datasync_trigger_lambda_stack.py │ ├── datasync_trigger_lambda_stage.py │ └── trigger_datasync_handler.py ├── network │ ├── rstudio_network_account_resources.py │ ├── rstudio_network_account_stage.py │ └── rstudio_route53_stack.py ├── rstudio │ ├── custom │ │ ├── ssm_custom_resource.py │ │ └── ssm_custom_resource_handler.py │ ├── datasync │ │ └── datasync_stack.py │ ├── ecs │ │ └── ecs_cluster_stack.py │ ├── efs │ │ ├── rstudio_efs_stack.py │ │ └── shiny_efs_stack.py │ ├── fargate │ │ ├── rstudio_ec2_stack.py │ │ ├── rstudio_fargate_stack.py │ │ └── shiny_stack.py │ ├── kms │ │ └── kms_stack.py │ ├── route53 │ │ └── rstudio_instance_domain_stack.py │ ├── rstudio_pipeline_stage.py │ ├── ses │ │ ├── rstudio_email_passwords_stack.py │ │ ├── ses_custom_resource.py │ │ └── ses_custom_resource_handler.py │ ├── vpc │ │ └── vpc_stack.py │ └── waf │ │ ├── rstudio_waf_stack.py │ │ └── shiny_waf_stack.py └── rstudio_pipeline_stack.py ├── setup.py └── source.bat /.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | package-lock.json 3 | __pycache__ 4 | .pytest_cache 5 | .env 6 | .venv 7 | *.egg-info 8 | 9 | # CDK asset staging directory 10 | .cdk.staging 11 | cdk.out 12 | 13 | # R 14 | # History files 15 | .Rhistory 16 | .Rapp.history 17 | 18 | # Session Data files 19 | .RData 20 | 21 | # User-specific files 22 | *.rds 23 | .Ruserdata 24 | 25 | # Example code in package build process 26 | *-Ex.R 27 | 28 | # Output files from R CMD build 29 | /*.tar.gz 30 | 31 | # Output files from R CMD check 32 | /*.Rcheck/ 33 | 34 | # RStudio files 35 | .Rproj.user/ 36 | 37 | # produced vignettes 38 | vignettes/*.html 39 | vignettes/*.pdf 40 | 41 | # OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3 42 | .httr-oauth 43 | 44 | # knitr and R markdown default cache directories 45 | *_cache/ 46 | /cache/ 47 | 48 | # Temporary files created by R markdown 49 | *.utf8.md 50 | *.knit.md 51 | 52 | # R Environment Variables 53 | .Renviron 54 | 55 | # pkgdown site 56 | docs/ 57 | 58 | # translation temp files 59 | po/*~ -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | 16 | -------------------------------------------------------------------------------- /buildspec.yml: -------------------------------------------------------------------------------- 1 | ###################################################################################### 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | ###################################################################################### 18 | 19 | version: 0.2 20 | phases: 21 | pre_build: 22 | commands: 23 | - echo Logging in to Amazon ECR... 24 | - aws --version 25 | - $(aws ecr get-login --region $AWS_DEFAULT_REGION --no-include-email) 26 | - RSTUDIO_REPOSITORY_URI=${RSTUDIO_REPO_ECR} 27 | - SHINY_REPOSITORY_URI=${SHINY_REPO_ECR} 28 | - COMMIT_HASH=$(echo $CODEBUILD_RESOLVED_SOURCE_VERSION | cut -c 1-7) 29 | - IMAGE_TAG=${COMMIT_HASH:=latest} 30 | build: 31 | commands: 32 | - echo Rstudio Image Build started on `date` 33 | - echo Building the Rstudio Docker image... 34 | - docker login -u AWS -p $(aws ecr get-login-password --region $AWS_DEFAULT_REGION) $AWS_DEFAULT_ACCOUNT.dkr.ecr.$AWS_DEFAULT_REGION.amazonaws.com 35 | - docker login -u $DOCKER_HUB_USERNAME -p $DOCKER_HUB_PASSWORD 36 | - docker build -f ./docimage-rstudio/Dockerfile -t $RSTUDIO_REPOSITORY_URI:latest . 37 | - docker tag $RSTUDIO_REPOSITORY_URI:latest $RSTUDIO_REPOSITORY_URI:$IMAGE_TAG 38 | - echo Shiny Image Build started on `date` 39 | - echo Building the Shiny image... 40 | - docker build -f ./docimage-shiny/Dockerfile -t $SHINY_REPOSITORY_URI:latest . 41 | - docker tag $SHINY_REPOSITORY_URI:latest $SHINY_REPOSITORY_URI:$IMAGE_TAG 42 | post_build: 43 | commands: 44 | - echo Build completed on `date` 45 | - echo Pushing the Docker images... 46 | - docker push $RSTUDIO_REPOSITORY_URI:latest 47 | - docker push $RSTUDIO_REPOSITORY_URI:$IMAGE_TAG 48 | - docker push $SHINY_REPOSITORY_URI:latest 49 | - docker push $SHINY_REPOSITORY_URI:$IMAGE_TAG 50 | - echo Push completed on `date` -------------------------------------------------------------------------------- /cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 app.py", 3 | "context": { 4 | "instance": "dev", 5 | "rstudio_account_id": "xxxxxxxxxxxx", 6 | "rstudio_pipeline_account_id": "qqqqqqqqqqqqq", 7 | "network_account_id": "nnnnnnnnnnnn", 8 | "datalake_account_id": "ttttttttttt", 9 | "datalake_aws_region": "eu-west-1", 10 | "code_repo_name": "codecommit_repository_name", 11 | "r53_base_domain": "example.com", 12 | "rstudio_install_type": "ec2", 13 | "rstudio_ec2_instance_type": "t3.xlarge", 14 | "rstudio_container_memory_in_gb": "4", 15 | "number_of_rstudio_containers": "4", 16 | "vpc_cidr_range": "10.5.0.0/16", 17 | "allowed_ips": "", 18 | "sns_email_id": "abc@example.com", 19 | "datalake_source_bucket_name": "S3 bucket name", 20 | "@aws-cdk/core:enableStackNameDuplicates": "true", 21 | "aws-cdk:enableDiffNoFail": "true", 22 | "@aws-cdk/core:stackRelativeExports": "true", 23 | "@aws-cdk/aws-ecr-assets:dockerIgnoreSupport": true, 24 | "@aws-cdk/aws-secretsmanager:parseOwnedSecretName": true, 25 | "@aws-cdk/aws-kms:defaultKeyPolicies": true, 26 | "@aws-cdk/core:newStyleStackSynthesis": "true" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /docimage-rstudio/Dockerfile: -------------------------------------------------------------------------------- 1 | ###################################################################################### 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | ###################################################################################### 18 | 19 | FROM rocker/r-ver:4.1.0 20 | 21 | ENV AWS_S3_BUCKET=${AWS_S3_BUCKET} 22 | ENV AWS_ACCESS_KEY_ID="" 23 | ENV AWS_ACCESS_KEY="" 24 | ENV AWS_ATHENA_WG=${AWS_ATHENA_WG} 25 | ENV ROOT ${ROOT} 26 | ENV ADD ${ADD} 27 | ENV S6_VERSION "v2.1.0.2" 28 | ENV RSTUDIO_VERSION=1.4.1717 29 | ENV PATH /usr/lib/rstudio-server/bin:$PATH 30 | ENV SHINY_SERVER_VERSION latest 31 | ENV PANDOC_VERSION default 32 | 33 | COPY ./docimage-rstudio/rocker_scripts /rocker_scripts 34 | COPY ./docimage-rstudio/user_scripts /usr/local/bin/user_scripts 35 | 36 | RUN chmod 755 /rocker_scripts/* 37 | RUN chmod 755 /usr/local/bin/user_scripts/* 38 | 39 | RUN /rocker_scripts/install_rstudio.sh 40 | RUN /rocker_scripts/install_pandoc.sh 41 | RUN /rocker_scripts/install_shiny_server.sh 42 | RUN /rocker_scripts/install_tidyverse.sh 43 | 44 | RUN Rscript /usr/local/bin/user_scripts/install_packages.R 45 | 46 | EXPOSE 8787 47 | EXPOSE 3838 48 | 49 | CMD ["/init"] 50 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/add_ubuntugis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | UBUNTUGIS_VERSION=${1:-${UBUNTUGIS_VERSION:-stable}} 6 | 7 | ## Force installs from SOURCE if using RStudio Package Manager Repository 8 | CRAN=${CRAN/"__linux__/focal"/""} 9 | echo "options(repos = c(CRAN = '${CRAN}'))" >> ${R_HOME}/etc/Rprofile.site 10 | 11 | 12 | 13 | apt-get update \ 14 | && apt-get install -y --no-install-recommends \ 15 | software-properties-common \ 16 | vim \ 17 | wget \ 18 | ca-certificates \ 19 | && add-apt-repository --enable-source --yes "ppa:ubuntugis/ubuntugis-$UBUNTUGIS_VERSION" 20 | 21 | 22 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/bh-gdal.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | if [ "${GDAL_VERSION}" = "master" ]; then 5 | GDAL_VERSION=$(curl -Ls https://api.github.com/repos/OSGeo/gdal/commits/HEAD -H "Accept: application/vnd.github.VERSION.sha") 6 | export GDAL_VERSION 7 | GDAL_RELEASE_DATE=$(date "+%Y%m%d") 8 | export GDAL_RELEASE_DATE 9 | fi 10 | 11 | if [ -z "${GDAL_BUILD_IS_RELEASE:-}" ]; then 12 | export GDAL_SHA1SUM=${GDAL_VERSION} 13 | fi 14 | 15 | #mkdir gdal 16 | #wget -q "https://github.com/OSGeo/gdal/archive/${GDAL_VERSION}.tar.gz" \ 17 | # -O - | tar xz -C gdal --strip-components=1 18 | 19 | git clone https://github.com/OSGeo/gdal 20 | cd gdal/gdal 21 | git checkout v${GDAL_VERSION} . 22 | ./configure --prefix=/usr \ 23 | --without-libtool \ 24 | --with-jpeg12 \ 25 | --with-python \ 26 | --with-poppler \ 27 | --with-spatialite \ 28 | --with-mysql \ 29 | --with-liblzma \ 30 | --with-webp \ 31 | --with-epsilon \ 32 | --with-proj="${PROJ_INSTALL_PREFIX-/usr/local}" \ 33 | --with-poppler \ 34 | --with-hdf5 \ 35 | --with-dods-root=/usr \ 36 | --with-sosi \ 37 | --with-libtiff=internal \ 38 | --with-geotiff=internal \ 39 | --with-kea=/usr/bin/kea-config \ 40 | --with-mongocxxv3 \ 41 | --with-tiledb \ 42 | --with-crypto 43 | 44 | make "-j$(nproc)" 45 | make install 46 | 47 | cd / 48 | 49 | rm -rf gdal 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/bh-proj-gdal_only.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | mkdir proj 5 | wget -q "https://github.com/OSGeo/PROJ/archive/${PROJ_VERSION}.tar.gz" \ 6 | -O - | tar xz -C proj --strip-components=1 7 | 8 | ( 9 | cd proj 10 | 11 | ./autogen.sh 12 | 13 | if [ -n "${RSYNC_REMOTE:-}" ]; then 14 | echo "Downloading cache..." 15 | rsync -ra "${RSYNC_REMOTE}/proj/" "$HOME/" 16 | echo "Finished" 17 | 18 | export CC="ccache gcc" 19 | export CXX="ccache g++" 20 | export PROJ_DB_CACHE_DIR="$HOME/.ccache" 21 | 22 | ccache -M 100M 23 | fi 24 | 25 | export CFLAGS="-DPROJ_RENAME_SYMBOLS -O2 -g" 26 | export CXXFLAGS="-DPROJ_RENAME_SYMBOLS -DPROJ_INTERNAL_CPP_NAMESPACE -O2 -g" 27 | export CFLAGS="-O2 -g" 28 | export CXXFLAGS="-O2 -g" 29 | 30 | 31 | ./configure "--prefix=${PROJ_INSTALL_PREFIX:-/usr/local}" 32 | 33 | make "-j$(nproc)" 34 | make install DESTDIR="/build" 35 | 36 | if [ -n "${RSYNC_REMOTE:-}" ]; then 37 | ccache -s 38 | 39 | echo "Uploading cache..." 40 | rsync -ra --delete "$HOME/.ccache" "${RSYNC_REMOTE}/proj/" 41 | echo "Finished" 42 | 43 | rm -rf "$HOME/.ccache" 44 | unset CC 45 | unset CXX 46 | fi 47 | ) 48 | 49 | rm -rf proj 50 | 51 | PROJ_SO=$(readlink "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so" | sed "s/libproj\.so\.//") 52 | PROJ_SO_FIRST=$(echo "$PROJ_SO" | awk 'BEGIN {FS="."} {print $1}') 53 | PROJ_SO_DEST="/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO}" 54 | 55 | mv "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO}" "${PROJ_SO_DEST}" 56 | 57 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO_FIRST}" 58 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so" 59 | 60 | rm "/build${PROJ_INSTALL_PREFIX}/lib"/libproj.* 61 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO_FIRST}" 62 | 63 | 64 | if [ "${WITH_DEBUG_SYMBOLS}" = "yes" ]; then 65 | # separate debug symbols 66 | mkdir -p "/build${PROJ_INSTALL_PREFIX}/lib/.debug/" "/build${PROJ_INSTALL_PREFIX}/bin/.debug/" 67 | 68 | DEBUG_SO="/build${PROJ_INSTALL_PREFIX}/lib/.debug/libinternalproj.so.${PROJ_SO}.debug" 69 | objcopy -v --only-keep-debug --compress-debug-sections "${PROJ_SO_DEST}" "${DEBUG_SO}" 70 | strip --strip-debug --strip-unneeded "${PROJ_SO_DEST}" 71 | objcopy --add-gnu-debuglink="${DEBUG_SO}" "${PROJ_SO_DEST}" 72 | 73 | for P in "/build${PROJ_INSTALL_PREFIX}/bin"/*; do 74 | if file -h "$P" | grep -qi elf; then 75 | F=$(basename "$P") 76 | DEBUG_P="/build${PROJ_INSTALL_PREFIX}/bin/.debug/${F}.debug" 77 | objcopy -v --only-keep-debug --strip-unneeded "$P" "${DEBUG_P}" 78 | strip --strip-debug --strip-unneeded "$P" 79 | objcopy --add-gnu-debuglink="${DEBUG_P}" "$P" 80 | fi 81 | done 82 | else 83 | strip -s "${PROJ_SO_DEST}" 84 | for P in "/build${PROJ_INSTALL_PREFIX}/bin"/*; do 85 | strip -s "$P" 2>/dev/null || /bin/true; 86 | done; 87 | fi 88 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/bh-proj.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | mkdir proj 5 | wget -q "https://github.com/OSGeo/PROJ/archive/${PROJ_VERSION}.tar.gz" \ 6 | -O - | tar xz -C proj --strip-components=1 7 | 8 | ( 9 | cd proj 10 | 11 | ./autogen.sh 12 | 13 | if [ -n "${RSYNC_REMOTE:-}" ]; then 14 | echo "Downloading cache..." 15 | rsync -ra "${RSYNC_REMOTE}/proj/" "$HOME/" 16 | echo "Finished" 17 | 18 | export CC="ccache gcc" 19 | export CXX="ccache g++" 20 | export PROJ_DB_CACHE_DIR="$HOME/.ccache" 21 | 22 | ccache -M 100M 23 | fi 24 | 25 | export CFLAGS="-DPROJ_RENAME_SYMBOLS -O2 -g" 26 | export CXXFLAGS="-DPROJ_RENAME_SYMBOLS -DPROJ_INTERNAL_CPP_NAMESPACE -O2 -g" 27 | 28 | ./configure "--prefix=${PROJ_INSTALL_PREFIX:-/usr/local}" --disable-static 29 | 30 | make "-j$(nproc)" 31 | make install DESTDIR="/build" 32 | 33 | if [ -n "${RSYNC_REMOTE:-}" ]; then 34 | ccache -s 35 | 36 | echo "Uploading cache..." 37 | rsync -ra --delete "$HOME/.ccache" "${RSYNC_REMOTE}/proj/" 38 | echo "Finished" 39 | 40 | rm -rf "$HOME/.ccache" 41 | unset CC 42 | unset CXX 43 | fi 44 | ) 45 | 46 | rm -rf proj 47 | 48 | PROJ_SO=$(readlink "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so" | sed "s/libproj\.so\.//") 49 | PROJ_SO_FIRST=$(echo "$PROJ_SO" | awk 'BEGIN {FS="."} {print $1}') 50 | PROJ_SO_DEST="/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO}" 51 | 52 | mv "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO}" "${PROJ_SO_DEST}" 53 | 54 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO_FIRST}" 55 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so" 56 | 57 | rm "/build${PROJ_INSTALL_PREFIX}/lib"/libproj.* 58 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO_FIRST}" 59 | 60 | if [ "${WITH_DEBUG_SYMBOLS}" = "yes" ]; then 61 | # separate debug symbols 62 | mkdir -p "/build${PROJ_INSTALL_PREFIX}/lib/.debug/" "/build${PROJ_INSTALL_PREFIX}/bin/.debug/" 63 | 64 | DEBUG_SO="/build${PROJ_INSTALL_PREFIX}/lib/.debug/libinternalproj.so.${PROJ_SO}.debug" 65 | objcopy -v --only-keep-debug --compress-debug-sections "${PROJ_SO_DEST}" "${DEBUG_SO}" 66 | strip --strip-debug --strip-unneeded "${PROJ_SO_DEST}" 67 | objcopy --add-gnu-debuglink="${DEBUG_SO}" "${PROJ_SO_DEST}" 68 | 69 | for P in "/build${PROJ_INSTALL_PREFIX}/bin"/*; do 70 | if file -h "$P" | grep -qi elf; then 71 | F=$(basename "$P") 72 | DEBUG_P="/build${PROJ_INSTALL_PREFIX}/bin/.debug/${F}.debug" 73 | objcopy -v --only-keep-debug --strip-unneeded "$P" "${DEBUG_P}" 74 | strip --strip-debug --strip-unneeded "$P" 75 | objcopy --add-gnu-debuglink="${DEBUG_P}" "$P" 76 | fi 77 | done 78 | else 79 | strip -s "${PROJ_SO_DEST}" 80 | for P in "/build${PROJ_INSTALL_PREFIX}/bin"/*; do 81 | strip -s "$P" 2>/dev/null || /bin/true; 82 | done; 83 | fi -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/config_R_cuda.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | ## CUDA environmental variables configuration for RStudio 5 | 6 | ## These should be exported as ENV vars too 7 | CUDA_HOME=${CUDA_HOME:-/usr/local/cuda} 8 | PATH={$PATH:-$PATH:$CUDA_HOME/bin} 9 | LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-$LD_LIBRARY_PATH:$CUDA_HOME/lib64:$CUDA_HOME/extras/CUPTI/lib64} 10 | NVBLAS_CONFIG_FILE=${NVBLAS_CONFIG_FILE:-/etc/nvblas.conf} 11 | 12 | ## cli R inherits these, but RStudio needs to have these set in as follows: 13 | ## (From https://tensorflow.rstudio.com/tools/local_gpu.html#environment-variables) 14 | echo "CUDA_HOME=$CUDA_HOME" >> ${R_HOME}/etc/Renviron 15 | echo "PATH=$PATH" >> ${R_HOME}/etc/Renviron 16 | 17 | if test -f /etc/rstudio/rserver.conf; then 18 | sed -i '/^rsession-ld-library-path/d' /etc/rstudio/rserver.conf 19 | echo "rsession-ld-library-path=$LD_LIBRARY_PATH" >> /etc/rstudio/rserver.conf 20 | fi 21 | 22 | 23 | ## nvblas configuration 24 | touch /var/log/nvblas.log && chown :staff /var/log/nvblas.log 25 | chmod a+rw /var/log/nvblas.log 26 | 27 | ## Configure R & RStudio to use drop-in CUDA blas 28 | ## Allow R to use CUDA for BLAS, with fallback on openblas 29 | echo 'NVBLAS_LOGFILE /var/log/nvblas.log 30 | NVBLAS_CPU_BLAS_LIB /usr/lib/x86_64-linux-gnu/openblas/libblas.so.3 31 | NVBLAS_GPU_LIST ALL' > /etc/nvblas.conf 32 | 33 | echo "NVBLAS_CONFIG_FILE=$NVBLAS_CONFIG_FILE" >> ${R_HOME}/etc/Renviron 34 | 35 | 36 | ## We don't want to set LD_PRELOAD globally 37 | ##ENV LD_PRELOAD=/usr/local/cuda/lib64/libnvblas.so 38 | # 39 | ### Instead, we will set it before calling R, Rscript, or RStudio: 40 | #mv /usr/bin/R /usr/bin/R_ 41 | #mv /usr/bin/Rscript /usr/bin/Rscript_ 42 | # 43 | #echo '\#!/bin/sh \ 44 | # \n LD_PRELOAD=/usr/local/cuda/lib64/libnvblas.so /usr/bin/R_ "$@"' \ 45 | # > /usr/bin/R && \ 46 | # chmod +x /usr/bin/R && \ 47 | # echo '#!/bin/sh \ 48 | # \n LD_PRELOAD=/usr/local/cuda/lib64/libnvblas.so /usr/bin/Rscript_ "$@"' \ 49 | # > /usr/bin/Rscript && \ 50 | # chmod +x /usr/bin/Rscript 51 | # 52 | #echo '#!/usr/bin/with-contenv bash \ 53 | # \n## load /etc/environment vars first: \ 54 | # \n for line in \$( cat /etc/environment ) ; do export $line ; done \ 55 | # \n export LD_PRELOAD=/usr/local/cuda/lib64/libnvblas.so \ 56 | # \n exec /usr/lib/rstudio-server/bin/rserver --server-daemonize 0' \ 57 | # > /etc/services.d/rstudio/run 58 | 59 | 60 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/default_user.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | if id -u "$user" >/dev/null 2>&1; then 5 | echo 'rstudio user already exists' 6 | else 7 | ## Need to configure non-root user for RStudio 8 | DEFAULT_USER=${1:-${DEFAULT_USER:-rstudio}} 9 | useradd $DEFAULT_USER 10 | echo "${DEFAULT_USER}:${DEFAULT_USER}" | chpasswd 11 | mkdir -p /home/${DEFAULT_USER} 12 | chown ${DEFAULT_USER}:${DEFAULT_USER} /home/${DEFAULT_USER} 13 | addgroup ${DEFAULT_USER} staff 14 | 15 | mkdir -p /home/${DEFAULT_USER}/.rstudio/monitored/user-settings 16 | echo "alwaysSaveHistory='0' \ 17 | \nloadRData='0' \ 18 | \nsaveAction='0'" \ 19 | > /home/${DEFAULT_USER}/.rstudio/monitored/user-settings/user-settings 20 | 21 | chown -R ${DEFAULT_USER}:${DEFAULT_USER} /home/${DEFAULT_USER} 22 | 23 | fi 24 | 25 | # If shiny server installed, make the user part of the shiny group 26 | if [ -x "$(command -v shiny-server)" ]; then 27 | adduser ${DEFAULT_USER} shiny 28 | fi 29 | 30 | ## configure git not to request password each time 31 | git config --system credential.helper 'cache --timeout=3600' 32 | git config --system push.default simple 33 | 34 | 35 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/dev_osgeo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | CRAN=${CRAN_SOURCE:-https://cran.r-project.org} 6 | echo "options(repos = c(CRAN = '${CRAN}'), download.file.method = 'libcurl')" >> ${R_HOME}/etc/Rprofile.site 7 | 8 | 9 | 10 | export DEBIAN_FRONTEND=noninteractive; apt-get -y update \ 11 | && apt-get install -y \ 12 | gdb \ 13 | git \ 14 | libcairo2-dev \ 15 | libcurl4-openssl-dev \ 16 | libexpat1-dev \ 17 | libpq-dev \ 18 | libsqlite3-dev \ 19 | libudunits2-dev \ 20 | make \ 21 | pandoc \ 22 | qpdf \ 23 | sqlite3 \ 24 | subversion \ 25 | valgrind \ 26 | vim \ 27 | tk-dev \ 28 | wget 29 | 30 | apt-get install -y \ 31 | libv8-dev \ 32 | libjq-dev \ 33 | libprotobuf-dev \ 34 | libxml2-dev \ 35 | libprotobuf-dev \ 36 | protobuf-compiler \ 37 | unixodbc-dev \ 38 | libssh2-1-dev \ 39 | libgit2-dev \ 40 | libnetcdf-dev \ 41 | locales \ 42 | libssl-dev \ 43 | libtiff-dev 44 | 45 | locale-gen en_US.UTF-8 46 | 47 | PROJ_VERSION=${PROJ_VERSION:-7.2.0} 48 | LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH 49 | 50 | export DEBIAN_FRONTEND=noninteractive; apt-get -y update \ 51 | && apt-get install -y \ 52 | cmake \ 53 | libtiff5-dev 54 | 55 | #git clone --depth 1 https://github.com/OSGeo/PROJ.git 56 | wget http://download.osgeo.org/proj/proj-$PROJ_VERSION.tar.gz 57 | tar zxvf proj-${PROJ_VERSION}.tar.gz 58 | cd proj-${PROJ_VERSION} \ 59 | && ls -l \ 60 | && mkdir build \ 61 | && cd build \ 62 | && cmake .. \ 63 | && make \ 64 | && make install \ 65 | && cd ../.. \ 66 | && ldconfig 67 | 68 | # install proj-data: 69 | #cd /usr/local/share/proj \ 70 | # && wget http://download.osgeo.org/proj/proj-data-1.1RC1.zip \ 71 | # && unzip -o proj-data*zip \ 72 | # && rm proj-data*zip \ 73 | # && cd - 74 | 75 | # GDAL: 76 | 77 | # https://download.osgeo.org/gdal/ 78 | GDAL_VERSION=${GDAL_VERSION:-3.2.0} 79 | GDAL_VERSION_NAME=${GDAL_VERSION} 80 | 81 | wget http://download.osgeo.org/gdal/${GDAL_VERSION}/gdal-${GDAL_VERSION_NAME}.tar.gz \ 82 | && tar -xf gdal-${GDAL_VERSION_NAME}.tar.gz \ 83 | && rm *.tar.gz \ 84 | && cd gdal* \ 85 | && ./configure \ 86 | && make \ 87 | && make install \ 88 | && cd .. \ 89 | && ldconfig 90 | 91 | #git clone --depth 1 https://github.com/OSGeo/gdal.git 92 | #cd gdal/gdal \ 93 | # && ls -l \ 94 | # && ./configure \ 95 | # && make \ 96 | # && make install \ 97 | # && cd .. \ 98 | # && ldconfig 99 | 100 | # GEOS: 101 | GEOS_VERSION=${GEOS_VERSION:-3.8.1} 102 | 103 | wget http://download.osgeo.org/geos/geos-${GEOS_VERSION}.tar.bz2 \ 104 | && bzip2 -d geos-*bz2 \ 105 | && tar xf geos*tar \ 106 | && rm *.tar \ 107 | && cd geos* \ 108 | && ./configure \ 109 | && make \ 110 | && make install \ 111 | && cd .. \ 112 | && ldconfig 113 | 114 | # svn checkout svn://scm.r-forge.r-project.org/svnroot/rgdal/ 115 | # R CMD build rgdal/pkg --no-build-vignettes 116 | # R CMD INSTALL rgdal_*.tar.gz 117 | 118 | Rscript -e 'install.packages(c("sp", "rgeos", "rgdal", "RPostgreSQL", "RSQLite", "testthat", "knitr", "tidyr", "geosphere", "maptools", "maps", "microbenchmark", "raster", "dplyr", "tibble", "units", "DBI", "covr", "protolite", "tmap", "mapview", "odbc", "pool", "rmarkdown", "RPostgres","spatstat", "stars"))' 119 | 120 | git clone --depth 10 https://github.com/r-spatial/sf.git 121 | git clone --depth 10 https://github.com/r-spatial/lwgeom.git 122 | git clone --depth 10 https://github.com/r-spatial/stars.git 123 | #git config --global user.email "edzer.pebesma@uni-muenster.de" 124 | 125 | R CMD build --no-build-vignettes --no-manual lwgeom 126 | (cd sf; git pull) 127 | R CMD build --no-build-vignettes --no-manual sf 128 | # pkg-config proj --modversion 129 | R CMD INSTALL sf 130 | R CMD INSTALL lwgeom 131 | R CMD build --no-build-vignettes --no-manual stars 132 | R CMD INSTALL stars 133 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/experimental/cuda10.2-tf.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## not sure why cuda-cudart-dev-10-1 when this is 10.2 and we already have 10.2... 4 | 5 | sudo apt update && \ 6 | sudo apt install \ 7 | libnvinfer-dev \ 8 | cuda-cudart-dev-10-1 9 | 10 | 11 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/experimental/install_R_binary.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | UBUNTU_VERSION=${UBUNTU_VERSION:-focal} 5 | CRAN_LINUX_VERSION=${CRAN_LINUX_VERSION:-cran40} 6 | LANG=${LANG:-en_US.UTF-8} 7 | LC_ALL=${LC_ALL:-en_US.UTF-8} 8 | 9 | 10 | DEBIAN_FRONTEND=noninteractive 11 | 12 | # Set up and install R 13 | R_HOME=${R_HOME:-/usr/lib/R} 14 | 15 | #R_VERSION=${R_VERSION} 16 | 17 | 18 | apt-get update 19 | 20 | apt-get -y install --no-install-recommends \ 21 | ca-certificates \ 22 | less \ 23 | libopenblas-base \ 24 | locales \ 25 | vim-tiny \ 26 | wget \ 27 | dirmngr \ 28 | gpg \ 29 | gpg-agent 30 | 31 | echo "deb http://cloud.r-project.org/bin/linux/ubuntu ${UBUNTU_VERSION}-${CRAN_LINUX_VERSION}/" >> /etc/apt/sources.list 32 | 33 | gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9 34 | gpg -a --export E298A3A825C0D65DFD57CBB651716619E084DAB9 | apt-key add - 35 | 36 | 37 | # Wildcard * at end of version will grab (latest) patch of requested version 38 | apt-get update && apt-get -y install --no-install-recommends r-base-dev=${R_VERSION}* 39 | 40 | 41 | 42 | rm -rf /var/lib/apt/lists/* 43 | 44 | ## Add PPAs: NOTE this will mean that installing binary R packages won't be version stable. 45 | ## 46 | ## These are required at least for bionic-based images since 3.4 r binaries are 47 | 48 | 49 | echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 50 | locale-gen en_US.utf8 51 | /usr/sbin/update-locale LANG=${LANG} 52 | 53 | Rscript -e "install.packages(c('littler', 'docopt'))" 54 | 55 | ## By default R_LIBS_SITE is unset, and defaults to this, so this is where `littler` will be. 56 | ## We set it here for symlinks, but don't make the env var persist (since it's already the default) 57 | R_LIBS_SITE=/usr/local/lib/R/site-library 58 | ln -s ${R_LIBS_SITE}/littler/examples/install.r /usr/local/bin/install.r 59 | ln -s ${R_LIBS_SITE}/littler/examples/install2.r /usr/local/bin/install2.r 60 | ln -s ${R_LIBS_SITE}/littler/examples/installGithub.r /usr/local/bin/installGithub.r 61 | ln -s ${R_LIBS_SITE}/littler/bin/r /usr/local/bin/r 62 | 63 | 64 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/experimental/install_rl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | python -m venv /opt/venv/rl 5 | . /opt/venv/rl/bin/activate 6 | 7 | pip install wheel 8 | pip install gym tensorflow keras keras-rl 9 | 10 | chown -R :staff /opt/venv/rl 11 | chmod g+rx /opt/venv/rl 12 | 13 | 14 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_R.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | apt-get update && apt-get -y install lsb-release 5 | 6 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`} 7 | LANG=${LANG:-en_US.UTF-8} 8 | LC_ALL=${LC_ALL:-en_US.UTF-8} 9 | CRAN=${CRAN:-https://cran.r-project.org} 10 | 11 | ## mechanism to force source installs if we're using RSPM 12 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION"/""} 13 | 14 | export DEBIAN_FRONTEND=noninteractive 15 | 16 | # Set up and install R 17 | R_HOME=${R_HOME:-/usr/local/lib/R} 18 | 19 | 20 | 21 | READLINE_VERSION=8 22 | OPENBLAS=libopenblas-dev 23 | if [ ${UBUNTU_VERSION} == "bionic" ]; then 24 | READLINE_VERSION=7 25 | OPENBLAS=libopenblas-dev 26 | fi 27 | 28 | apt-get update \ 29 | && apt-get install -y --no-install-recommends \ 30 | bash-completion \ 31 | ca-certificates \ 32 | devscripts \ 33 | file \ 34 | fonts-texgyre \ 35 | g++ \ 36 | gfortran \ 37 | gsfonts \ 38 | libblas-dev \ 39 | libbz2-* \ 40 | libcurl4 \ 41 | libicu* \ 42 | libpcre2* \ 43 | libjpeg-turbo* \ 44 | ${OPENBLAS} \ 45 | libpangocairo-* \ 46 | libpng16* \ 47 | libreadline${READLINE_VERSION} \ 48 | libtiff* \ 49 | liblzma* \ 50 | locales \ 51 | make \ 52 | unzip \ 53 | zip \ 54 | zlib1g 55 | 56 | echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 57 | locale-gen en_US.utf8 58 | /usr/sbin/update-locale LANG=en_US.UTF-8 59 | 60 | BUILDDEPS="curl \ 61 | default-jdk \ 62 | libbz2-dev \ 63 | libcairo2-dev \ 64 | libcurl4-openssl-dev \ 65 | libpango1.0-dev \ 66 | libjpeg-dev \ 67 | libicu-dev \ 68 | libpcre2-dev \ 69 | libpng-dev \ 70 | libreadline-dev \ 71 | libtiff5-dev \ 72 | liblzma-dev \ 73 | libx11-dev \ 74 | libxt-dev \ 75 | perl \ 76 | rsync \ 77 | subversion \ 78 | tcl-dev \ 79 | tk-dev \ 80 | texinfo \ 81 | texlive-extra-utils \ 82 | texlive-fonts-recommended \ 83 | texlive-fonts-extra \ 84 | texlive-latex-recommended \ 85 | texlive-latex-extra \ 86 | x11proto-core-dev \ 87 | xauth \ 88 | xfonts-base \ 89 | xvfb \ 90 | wget \ 91 | zlib1g-dev" 92 | 93 | apt-get install -y --no-install-recommends $BUILDDEPS 94 | 95 | 96 | if [[ "$R_VERSION" == "devel" ]]; then \ 97 | wget https://stat.ethz.ch/R/daily/R-devel.tar.gz; \ 98 | elif [[ "$R_VERSION" == "patched" ]]; then \ 99 | wget https://stat.ethz.ch/R/daily/R-patched.tar.gz; \ 100 | else \ 101 | wget https://cran.r-project.org/src/base/R-3/R-${R_VERSION}.tar.gz || \ 102 | wget https://cran.r-project.org/src/base/R-4/R-${R_VERSION}.tar.gz; \ 103 | fi && \ 104 | tar xzf R-${R_VERSION}.tar.gz && 105 | 106 | cd R-${R_VERSION} 107 | R_PAPERSIZE=letter \ 108 | R_BATCHSAVE="--no-save --no-restore" \ 109 | R_BROWSER=xdg-open \ 110 | PAGER=/usr/bin/pager \ 111 | PERL=/usr/bin/perl \ 112 | R_UNZIPCMD=/usr/bin/unzip \ 113 | R_ZIPCMD=/usr/bin/zip \ 114 | R_PRINTCMD=/usr/bin/lpr \ 115 | LIBnn=lib \ 116 | AWK=/usr/bin/awk \ 117 | CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g" \ 118 | CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g" \ 119 | ./configure --enable-R-shlib \ 120 | --enable-memory-profiling \ 121 | --with-readline \ 122 | --with-blas \ 123 | --with-lapack \ 124 | --with-tcltk \ 125 | --disable-nls \ 126 | --with-recommended-packages 127 | make 128 | make install 129 | make clean 130 | 131 | ## Add a default CRAN mirror 132 | echo "options(repos = c(CRAN = '${CRAN}'), download.file.method = 'libcurl')" >> ${R_HOME}/etc/Rprofile.site 133 | 134 | ## Set HTTPUserAgent for RSPM (https://github.com/rocker-org/rocker/issues/400) 135 | echo 'options(HTTPUserAgent = sprintf("R/%s R (%s)", getRversion(), 136 | paste(getRversion(), R.version$platform, 137 | R.version$arch, R.version$os)))' >> ${R_HOME}/etc/Rprofile.site 138 | 139 | 140 | ## Add a library directory (for user-installed packages) 141 | mkdir -p ${R_HOME}/site-library 142 | chown root:staff ${R_HOME}/site-library 143 | chmod g+ws ${R_HOME}/site-library 144 | 145 | ## Fix library path 146 | echo "R_LIBS=\${R_LIBS-'${R_HOME}/site-library:${R_HOME}/library'}" >> ${R_HOME}/etc/Renviron 147 | echo "TZ=${TZ}" >> ${R_HOME}/etc/Renviron 148 | 149 | ## Use littler installation scripts 150 | Rscript -e "install.packages(c('littler', 'docopt'), repos='${CRAN_SOURCE}')" 151 | ln -s ${R_HOME}/site-library/littler/examples/install2.r /usr/local/bin/install2.r 152 | ln -s ${R_HOME}/site-library/littler/examples/installGithub.r /usr/local/bin/installGithub.r 153 | ln -s ${R_HOME}/site-library/littler/bin/r /usr/local/bin/r 154 | 155 | 156 | ## Clean up from R source install 157 | cd / 158 | rm -rf /tmp/* 159 | rm -rf R-${R_VERSION} 160 | rm -rf R-${R_VERSION}.tar.gz 161 | apt-get remove --purge -y $BUILDDEPS 162 | apt-get autoremove -y 163 | apt-get autoclean -y 164 | rm -rf /var/lib/apt/lists/* 165 | 166 | 167 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_R_ppa.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | UBUNTU_VERSION=${UBUNTU_VERSION:-focal} 5 | CRAN_LINUX_VERSION=${CRAN_LINUX_VERSION:-cran40} 6 | LANG=${LANG:-en_US.UTF-8} 7 | LC_ALL=${LC_ALL:-en_US.UTF-8} 8 | 9 | 10 | DEBIAN_FRONTEND=noninteractive 11 | 12 | # Set up and install R 13 | R_HOME=${R_HOME:-/usr/lib/R} 14 | 15 | #R_VERSION=${R_VERSION} 16 | 17 | 18 | apt-get update 19 | 20 | apt-get -y install --no-install-recommends \ 21 | ca-certificates \ 22 | less \ 23 | libopenblas-base \ 24 | locales \ 25 | vim-tiny \ 26 | wget \ 27 | dirmngr \ 28 | gpg \ 29 | gpg-agent 30 | 31 | echo "deb http://cloud.r-project.org/bin/linux/ubuntu ${UBUNTU_VERSION}-${CRAN_LINUX_VERSION}/" >> /etc/apt/sources.list 32 | 33 | gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9 34 | gpg -a --export E298A3A825C0D65DFD57CBB651716619E084DAB9 | apt-key add - 35 | 36 | 37 | # Wildcard * at end of version will grab (latest) patch of requested version 38 | apt-get update && apt-get -y install --no-install-recommends r-base-dev=${R_VERSION}* 39 | 40 | 41 | 42 | rm -rf /var/lib/apt/lists/* 43 | 44 | ## Add PPAs: NOTE this will mean that installing binary R packages won't be version stable. 45 | ## 46 | ## These are required at least for bionic-based images since 3.4 r binaries are 47 | 48 | 49 | echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 50 | locale-gen en_US.utf8 51 | /usr/sbin/update-locale LANG=${LANG} 52 | 53 | Rscript -e "install.packages(c('littler', 'docopt'))" 54 | 55 | ## By default R_LIBS_SITE is unset, and defaults to this, so this is where `littler` will be. 56 | ## We set it here for symlinks, but don't make the env var persist (since it's already the default) 57 | R_LIBS_SITE=/usr/local/lib/R/site-library 58 | ln -s ${R_LIBS_SITE}/littler/examples/install.r /usr/local/bin/install.r 59 | ln -s ${R_LIBS_SITE}/littler/examples/install2.r /usr/local/bin/install2.r 60 | ln -s ${R_LIBS_SITE}/littler/examples/installGithub.r /usr/local/bin/installGithub.r 61 | ln -s ${R_LIBS_SITE}/littler/bin/r /usr/local/bin/r 62 | 63 | 64 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_binder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | RSTUDIO_VERSION=1.3.959 /rocker_scripts/install_rstudio.sh 5 | 6 | ## NOTE: this runs as user NB_USER! 7 | PYTHON_VENV_PATH=${PYTHON_VENV_PATH:-/opt/venv/reticulate} 8 | NB_USER=${NB_USER:-rstudio} 9 | NB_UID=${NB_UID:-1000} 10 | WORKDIR=${WORKDIR:-/home/${NB_USER}} 11 | usermod -l ${NB_USER} rstudio 12 | # Create a venv dir owned by unprivileged user & set up notebook in it 13 | # This allows non-root to install python libraries if required 14 | mkdir -p ${PYTHON_VENV_PATH} && chown -R ${NB_USER} ${PYTHON_VENV_PATH} 15 | 16 | # And set ENV for R! It doesn't read from the environment... 17 | echo "PATH=${PATH}" >> ${R_HOME}/etc/Renviron 18 | echo "export PATH=${PATH}" >> ${WORKDIR}/.profile 19 | 20 | ## This gets run as user 21 | su ${NB_USER} 22 | cd ${WORKDIR} 23 | python3 -m venv ${PYTHON_VENV_PATH} 24 | pip3 install --no-cache-dir jupyter-rsession-proxy 25 | 26 | R --quiet -e "devtools::install_github('IRkernel/IRkernel')" 27 | R --quiet -e "IRkernel::installspec(prefix='${PYTHON_VENV_PATH}')" 28 | 29 | 30 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_cuda-10.1.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | 3 | apt-get update && apt-get install -y --no-install-recommends \ 4 | gnupg2 curl ca-certificates && \ 5 | curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/7fa2af80.pub | apt-key add - && \ 6 | echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64 /" > /etc/apt/sources.list.d/cuda.list && \ 7 | echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list && \ 8 | apt-get purge --autoremove -y curl && \ 9 | rm -rf /var/lib/apt/lists/* 10 | 11 | CUDA_VERSION=${CUDA_VERSION:-10.1.243} 12 | CUDA_PKG_VERSION=${CUDA_PKG_VERSION:-10-1=$CUDA_VERSION-1} 13 | 14 | # For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a 15 | apt-get update && apt-get install -y --no-install-recommends \ 16 | cuda-cudart-$CUDA_PKG_VERSION \ 17 | cuda-compat-10-1 && \ 18 | ln -s cuda-10.1 /usr/local/cuda && \ 19 | rm -rf /var/lib/apt/lists/* 20 | 21 | # Required for nvidia-docker v1 22 | echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \ 23 | echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf 24 | 25 | 26 | ## PATH & LD_LIBRARY_PATH are wrong? 27 | PATH=${PATH:-/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH}} 28 | LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-/usr/local/nvidia/lib:/usr/local/nvidia/lib64} 29 | 30 | 31 | # nvidia-container-runtime 32 | NVIDIA_VISIBLE_DEVICES=${NVIDIA_VISIBLE_DEVICES:-all} 33 | NVIDIA_DRIVER_CAPABILITIES=${NVIDIA_DRIVER_CAPABILITIES:-"compute,utility"} 34 | NVIDIA_REQUIRE_CUDA=${NVIDIA_REQUIRE_CUDA:-"cuda>=10.1 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=396,driver<397 brand=tesla,driver>=410,driver<411"} 35 | 36 | 37 | ## Tensorflow config for cuda runtime. 38 | ## Adapted from: 39 | 40 | ARCH= 41 | CUDA=10.1 42 | CUDNN=7.6.4.38-1 43 | CUDNN_MAJOR_VERSION=7 44 | LIB_DIR_PREFIX=x86_64 45 | LIBNVINFER=6.0.1-1 46 | LIBNVINFER_MAJOR_VERSION=6 47 | 48 | #SHELL ["/bin/bash", "-c"] 49 | # Pick up some TF dependencies 50 | # There appears to be a regression in libcublas10=10.2.2.89-1 which 51 | # prevents cublas from initializing in TF. See 52 | # https://github.com/tensorflow/tensorflow/issues/9489#issuecomment-562394257 53 | apt-get update && apt-get install -y --no-install-recommends --allow-downgrades \ 54 | build-essential \ 55 | cuda-command-line-tools-10-1 \ 56 | libcublas10=10.2.1.243-1 \ 57 | cuda-nvrtc-10-1 \ 58 | cuda-cufft-10-1 \ 59 | cuda-curand-10-1 \ 60 | cuda-cusolver-10-1 \ 61 | cuda-cusparse-10-1 \ 62 | curl \ 63 | libcudnn7=${CUDNN}+cuda10.1 \ 64 | libfreetype6-dev \ 65 | libhdf5-serial-dev \ 66 | libzmq3-dev \ 67 | pkg-config \ 68 | software-properties-common \ 69 | unzip 70 | 71 | # Install TensorRT if not building for PowerPC 72 | apt-get update && \ 73 | apt-get install -y --no-install-recommends libnvinfer${LIBNVINFER_MAJOR_VERSION}=${LIBNVINFER}+cuda${CUDA} \ 74 | libnvinfer-plugin${LIBNVINFER_MAJOR_VERSION}=${LIBNVINFER}+cuda${CUDA} \ 75 | && apt-get clean \ 76 | && rm -rf /var/lib/apt/lists/* 77 | 78 | # For CUDA profiling, TensorFlow requires CUPTI. 79 | LD_LIBRARY_PATH==${LD_LIBRARY_PATH:-/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda/lib64:$LD_LIBRARY_PATH} 80 | 81 | # Link the libcuda stub to the location where tensorflow is searching for it and reconfigure 82 | # dynamic linker run-time bindings 83 | ln -s /usr/local/cuda/lib64/stubs/libcuda.so /usr/local/cuda/lib64/stubs/libcuda.so.1 84 | echo "/usr/local/cuda/lib64/stubs" > /etc/ld.so.conf.d/z-cuda-stubs.conf 85 | ldconfig 86 | 87 | ## Add nvtop 88 | #/rocker_scripts/install_nvtop.sh 89 | 90 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_cuda-11.1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | apt-get update && apt-get install -y --no-install-recommends \ 4 | gnupg2 curl ca-certificates && \ 5 | curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/7fa2af80.pub | apt-key add - && \ 6 | echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/cuda.list && \ 7 | echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list && \ 8 | apt-get purge --autoremove -y curl \ 9 | && rm -rf /var/lib/apt/lists/* 10 | 11 | CUDA_VERSION=${CUDA_VERSION:-11.1.1} 12 | 13 | # For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a 14 | apt-get update && apt-get install -y --no-install-recommends \ 15 | cuda-cudart-11-1=11.1.74-1 \ 16 | cuda-compat-11-1 \ 17 | && ln -s cuda-11.1 /usr/local/cuda && \ 18 | rm -rf /var/lib/apt/lists/* 19 | 20 | # Required for nvidia-docker v1 21 | echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \ 22 | echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf 23 | 24 | 25 | ## Set all of these as global ENV 26 | # PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH} 27 | # LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 28 | 29 | # NVIDIA_VISIBLE_DEVICES=all 30 | #NVIDIA_DRIVER_CAPABILITIES="compute,utility" 31 | # NVIDIA_REQUIRE_CUDA="cuda>=11.1 brand=tesla,driver>=418,driver<419 brand=tesla,driver>=440,driver<441 brand=tesla,driver>=450,driver<451" 32 | 33 | 34 | ## runtime #################################################### 35 | ##FROM ${IMAGE_NAME}:11.1-base-ubuntu20.04 36 | 37 | NCCL_VERSION=${NCCL_VERSION:-2.7.8} 38 | 39 | apt-get update && apt-get install -y --no-install-recommends \ 40 | cuda-libraries-11-1=11.1.1-1 \ 41 | libnpp-11-1=11.1.2.301-1 \ 42 | cuda-nvtx-11-1=11.1.74-1 \ 43 | libcublas-11-1=11.3.0.106-1 \ 44 | libnccl2=$NCCL_VERSION-1+cuda11.1 \ 45 | && apt-mark hold libnccl2 \ 46 | && rm -rf /var/lib/apt/lists/* 47 | 48 | 49 | ## devel ####################################################### 50 | 51 | apt-get update && apt-get install -y --no-install-recommends \ 52 | cuda-nvml-dev-11-1=11.1.74-1 \ 53 | cuda-command-line-tools-11-1=11.1.1-1 \ 54 | cuda-nvprof-11-1=11.1.105-1 \ 55 | libnpp-dev-11-1=11.1.2.301-1 \ 56 | cuda-libraries-dev-11-1=11.1.1-1 \ 57 | cuda-minimal-build-11-1=11.1.1-1 \ 58 | libnccl-dev=2.7.8-1+cuda11.1 \ 59 | libcublas-dev-11-1=11.3.0.106-1 \ 60 | libcusparse-11-1=11.3.0.10-1 \ 61 | libcusparse-dev-11-1=11.3.0.10-1 \ 62 | && apt-mark hold libnccl-dev \ 63 | && rm -rf /var/lib/apt/lists/* 64 | 65 | LIBRARY_PATH=/usr/local/cuda/lib64/stubs 66 | 67 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_geospatial.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # always set this for scripts but don't declare as ENV.. 6 | export DEBIAN_FRONTEND=noninteractive 7 | 8 | apt-get update -qq \ 9 | && apt-get install -y --no-install-recommends \ 10 | gdal-bin \ 11 | lbzip2 \ 12 | libfftw3-dev \ 13 | libgdal-dev \ 14 | libgeos-dev \ 15 | libgsl0-dev \ 16 | libgl1-mesa-dev \ 17 | libglu1-mesa-dev \ 18 | libhdf4-alt-dev \ 19 | libhdf5-dev \ 20 | libjq-dev \ 21 | libpq-dev \ 22 | libproj-dev \ 23 | libprotobuf-dev \ 24 | libnetcdf-dev \ 25 | libsqlite3-dev \ 26 | libssl-dev \ 27 | libudunits2-dev \ 28 | lsb-release \ 29 | netcdf-bin \ 30 | postgis \ 31 | protobuf-compiler \ 32 | sqlite3 \ 33 | tk-dev \ 34 | unixodbc-dev 35 | 36 | # lwgeom 0.2-2 and 0.2-3 have a regression which prevents install on ubuntu:bionic 37 | ## permissionless PAT for builds 38 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`} 39 | 40 | if [ ${UBUNTU_VERSION} == "bionic" ]; then 41 | R -e "Sys.setenv(GITHUB_PAT='0e7777db4b3bb48acb542b8912a989b8047f6351'); remotes::install_github('r-spatial/lwgeom')" 42 | fi 43 | 44 | 45 | ## Somehow foreign is messed up on CRAN between 2020-04-25 -- 2020-05-0? 46 | ##install2.r --error --skipinstalled --repo https://mran.microsoft.com/snapshot/2020-04-24 foreign 47 | 48 | install2.r --error --skipinstalled \ 49 | RColorBrewer \ 50 | RandomFields \ 51 | RNetCDF \ 52 | classInt \ 53 | deldir \ 54 | gstat \ 55 | hdf5r \ 56 | lidR \ 57 | mapdata \ 58 | maptools \ 59 | mapview \ 60 | ncdf4 \ 61 | proj4 \ 62 | raster \ 63 | rgdal \ 64 | rgeos \ 65 | rlas \ 66 | sf \ 67 | sp \ 68 | spacetime \ 69 | spatstat \ 70 | spatialreg \ 71 | spdep \ 72 | stars \ 73 | tidync \ 74 | tmap \ 75 | geoR \ 76 | geosphere 77 | 78 | R -e "BiocManager::install('rhdf5')" 79 | 80 | ## install wgrib2 for NOAA's NOMADS / rNOMADS forecast files 81 | /rocker_scripts/install_wgrib2.sh 82 | 83 | rm -r /tmp/downloaded_packages 84 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_geospatial_unstable.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # always set this for scripts but don't declare as ENV.. 6 | export DEBIAN_FRONTEND=noninteractive 7 | apt-get update && apt-get install -y --no-install-recommends \ 8 | gnupg2 curl ca-certificates 9 | 10 | apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 6B827C12C2D425E227EDCA75089EBE08314DF160 11 | echo "deb http://ppa.launchpad.net/ubuntugis/ubuntugis-unstable/ubuntu focal main" >> /etc/apt/sources.list.d/ubuntugis.list 12 | echo "deb-src http://ppa.launchpad.net/ubuntugis/ubuntugis-unstable/ubuntu focal main" >> /etc/apt/sources.list.d/ubuntugis.list 13 | rm -rf /var/lib/apt/lists/* 14 | 15 | 16 | 17 | ## in UNSTABLE, we will install everything from source by default: 18 | CRAN=https://cran.r-project.org 19 | ## Add a default CRAN mirror 20 | echo "options(repos = c(CRAN = '${CRAN}'), download.file.method = 'libcurl')" >> ${R_HOME}/etc/Rprofile.site 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_nvtop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | apt-get update && apt-get -y install cmake libncurses5-dev libncursesw5-dev git 5 | git clone https://github.com/Syllo/nvtop.git 6 | mkdir -p nvtop/build && cd nvtop/build 7 | cmake .. -DNVML_RETRIEVE_HEADER_ONLINE=True 8 | make 9 | make install 10 | 11 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_pandoc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | # Note that 'default' pandoc version means the version bundled with RStudio 5 | # if RStudio is installed , but latest otherwise 6 | 7 | PANDOC_VERSION=${1:-${PANDOC_VERSION:-default}} 8 | 9 | apt-get update && apt-get -y install wget 10 | 11 | if [ -x "$(command -v pandoc)" ]; then 12 | INSTALLED_PANDOC=$(pandoc --version 2>/dev/null | head -n 1 | grep -oP '[\d\.]+$') 13 | fi 14 | 15 | if [ "$INSTALLED_PANDOC" != "$PANDOC_VERSION" ]; then 16 | 17 | if [ -f "/usr/lib/rstudio-server/bin/pandoc/pandoc" ] && 18 | { [ "$PANDOC_VERSION" = "$(/usr/lib/rstudio-server/bin/pandoc/pandoc --version | head -n 1 | grep -oP '[\d\.]+$')" ] || 19 | [ "$PANDOC_VERSION" = "default" ]; }; then 20 | ln -fs /usr/lib/rstudio-server/bin/pandoc/pandoc /usr/local/bin 21 | ln -fs /usr/lib/rstudio-server/bin/pandoc/pandoc-citeproc /usr/local/bin 22 | else 23 | if [ "$PANDOC_VERSION" = "default" ]; then 24 | PANDOC_DL_URL=$(wget -qO- https://api.github.com/repos/jgm/pandoc/releases/latest | grep -oP "(?<=\"browser_download_url\":\s\")https.*amd64\.deb") 25 | else 26 | PANDOC_DL_URL=https://github.com/jgm/pandoc/releases/download/${PANDOC_VERSION}/pandoc-${PANDOC_VERSION}-amd64.deb 27 | fi 28 | wget ${PANDOC_DL_URL} -O pandoc-amd64.deb 29 | dpkg -i pandoc-amd64.deb 30 | rm pandoc-amd64.deb 31 | fi 32 | 33 | ## Symlink pandoc & standard pandoc templates for use system-wide 34 | PANDOC_TEMPLATES_VERSION=`pandoc -v | grep -oP "(?<=pandoc\s)[0-9\.]+$"` 35 | wget https://github.com/jgm/pandoc-templates/archive/${PANDOC_TEMPLATES_VERSION}.tar.gz -O pandoc-templates.tar.gz 36 | rm -fr /opt/pandoc/templates 37 | mkdir -p /opt/pandoc/templates 38 | tar xvf pandoc-templates.tar.gz 39 | cp -r pandoc-templates*/* /opt/pandoc/templates && rm -rf pandoc-templates* 40 | rm -fr /root/.pandoc 41 | mkdir /root/.pandoc && ln -s /opt/pandoc/templates /root/.pandoc/templates 42 | 43 | fi 44 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_proj.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | git clone https://github.com/OSGeo/PROJ 5 | 6 | PROJ_VERSION=${PROJ_VERSION:-master} 7 | cd PROJ 8 | 9 | git checkout ${PROJ_VERSION} . 10 | ./autogen.sh 11 | ./configure --prefix=/usr/local 12 | make 13 | make install 14 | 15 | 16 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_python.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | WORKON_HOME=${WORKON_HOME:-/opt/venv} 5 | PYTHON_VENV_PATH=${PYTHON_VENV_PATH:-${WORKON_HOME}/reticulate} 6 | RETICULATE_MINICONDA_ENABLED=${RETICULATE_MINICONDA_ENABLED:-FALSE} 7 | 8 | apt-get update && apt-get install -y --no-install-recommends \ 9 | libpython3-dev \ 10 | python3-dev \ 11 | python3-pip \ 12 | python3-virtualenv \ 13 | python3-venv && \ 14 | rm -rf /var/lib/apt/lists/* 15 | 16 | python3 -m pip --no-cache-dir install --upgrade \ 17 | pip \ 18 | setuptools \ 19 | virtualenv 20 | 21 | # Some TF tools expect a "python" binary 22 | if [ ! -e /usr/local/bin/python ]; then 23 | ln -s $(which python3) /usr/local/bin/python 24 | fi 25 | 26 | mkdir -p ${WORKON_HOME} 27 | python3 -m venv ${PYTHON_VENV_PATH} 28 | 29 | install2.r --skipinstalled --error reticulate 30 | 31 | ## Ensure RStudio inherits this env var 32 | echo "" >> ${R_HOME}/etc/Renviron 33 | echo "WORKON_HOME=${WORKON_HOME}" >> ${R_HOME}/etc/Renviron 34 | echo "RETICULATE_MINICONDA_ENABLED=${RETICULATE_MINICONDA_ENABLED}" >> ${R_HOME}/etc/Renviron 35 | 36 | 37 | ## symlink these so that these are available when switching to a new venv 38 | ## -f check for file, -L for link, -e for either 39 | if [ ! -e /usr/local/bin/python ]; then 40 | ln -s $(which python3) /usr/local/bin/python 41 | fi 42 | 43 | if [ ! -e /usr/local/bin/pip ]; then 44 | ln -s ${PYTHON_VENV_PATH}/bin/pip /usr/local/bin/pip 45 | fi 46 | 47 | if [ ! -e /usr/local/bin/virtualenv ]; then 48 | ln -s ${PYTHON_VENV_PATH}/bin/virtualenv /usr/local/bin/virtualenv 49 | fi 50 | 51 | ## Allow staff-level users to modify the shared environment 52 | chown -R :staff ${WORKON_HOME} 53 | chmod g+wx ${WORKON_HOME} 54 | chown :staff ${PYTHON_VENV_PATH} 55 | 56 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_s6init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | ### Sets up S6 supervisor. 5 | 6 | S6_VERSION=${1:-${S6_VERSION:-v1.21.7.0}} 7 | S6_BEHAVIOUR_IF_STAGE2_FAILS=2 8 | 9 | apt-get update && apt-get -y install wget 10 | 11 | ## Set up S6 init system 12 | if [ -f "/rocker_scripts/.s6_version" ] && [ "$S6_VERSION" = "$(cat /rocker_scripts/.s6_version)" ]; then 13 | echo "S6 already installed" 14 | else 15 | wget -P /tmp/ https://github.com/just-containers/s6-overlay/releases/download/${S6_VERSION}/s6-overlay-amd64.tar.gz 16 | 17 | ## need the modified double tar now, see https://github.com/just-containers/s6-overlay/issues/288 18 | tar hzxf /tmp/s6-overlay-amd64.tar.gz -C / --exclude=usr/bin/execlineb 19 | tar hzxf /tmp/s6-overlay-amd64.tar.gz -C /usr ./bin/execlineb && $_clean 20 | 21 | echo "$S6_VERSION" > /rocker_scripts/.s6_version 22 | fi 23 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_shiny_server.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | SHINY_SERVER_VERSION=${1:-${SHINY_SERVER_VERSION:-latest}} 5 | 6 | # Run dependency scripts 7 | . /rocker_scripts/install_s6init.sh 8 | . /rocker_scripts/install_pandoc.sh 9 | 10 | if [ "$SHINY_SERVER_VERSION" = "latest" ]; then 11 | SHINY_SERVER_VERSION=$(wget -qO- https://download3.rstudio.org/ubuntu-14.04/x86_64/VERSION) 12 | fi 13 | 14 | # Get apt packages 15 | apt-get update 16 | apt-get install -y --no-install-recommends \ 17 | sudo \ 18 | gdebi-core \ 19 | libcurl4-gnutls-dev \ 20 | libcairo2-dev \ 21 | libxt-dev \ 22 | xtail \ 23 | wget 24 | 25 | # Install Shiny server 26 | wget --no-verbose "https://download3.rstudio.org/ubuntu-14.04/x86_64/shiny-server-${SHINY_SERVER_VERSION}-amd64.deb" -O ss-latest.deb 27 | gdebi -n ss-latest.deb 28 | rm ss-latest.deb 29 | 30 | # Get R packages 31 | install2.r --error --skipinstalled shiny rmarkdown 32 | 33 | # Set up directories and permissions 34 | if [ -x "$(command -v rstudio-server)" ]; then 35 | DEFAULT_USER=${DEFAULT_USER:-rstudio} 36 | adduser ${DEFAULT_USER} shiny 37 | fi 38 | 39 | cp -R /usr/local/lib/R/site-library/shiny/examples/* /srv/shiny-server/ 40 | chown shiny:shiny /var/lib/shiny-server 41 | mkdir -p /var/log/shiny-server 42 | chown shiny:shiny /var/log/shiny-server 43 | 44 | # create init scripts 45 | mkdir -p /etc/services.d/shiny-server 46 | cat > /etc/services.d/shiny-server/run << 'EOF' 47 | #!/usr/bin/with-contenv bash 48 | ## load /etc/environment vars first: 49 | for line in $( cat /etc/environment ) ; do export $line > /dev/null; done 50 | if [ "$APPLICATION_LOGS_TO_STDOUT" != "false" ]; then 51 | exec xtail /var/log/shiny-server/ & 52 | fi 53 | exec shiny-server 2>&1 54 | EOF 55 | chmod +x /etc/services.d/shiny-server/run 56 | 57 | # Clean up 58 | rm -rf /var/lib/apt/lists/* 59 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_tensorflow.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | TENSORFLOW_VERSION=${1:-${TENSORFLOW_VERSION:-default}} 5 | KERAS_VERSION=${2:-${KERAS_VERSION:-default}} 6 | 7 | ## Install python dependency 8 | /rocker_scripts/install_python.sh 9 | 10 | ## To support different version of TF, install to different virtualenvs 11 | TENSORFLOW_VENV=$PYTHON_VENV_PATH 12 | install2.r --error --skipinstalled keras 13 | Rscript -e "keras::install_keras(version = \"$KERAS_VERSION\", \ 14 | tensorflow = \"$TENSORFLOW_VERSION\", \ 15 | envname =\"$TENSORFLOW_VENV\")" 16 | 17 | rm -r /tmp/downloaded_packages 18 | 19 | chown -R 1000:1000 /opt/venv 20 | chmod -R 777 /opt/venv 21 | 22 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_texlive.sh: -------------------------------------------------------------------------------- 1 | echo 'selected_scheme scheme-infraonly 2 | TEXDIR /usr/local/texlive 3 | TEXMFCONFIG /opt/texlive/texmf-config 4 | TEXMFHOME /opt/texlive/texmf 5 | TEXMFLOCAL /opt/texlive/texmf-local 6 | TEXMFSYSCONFIG /opt/texlive/texmf-config 7 | TEXMFSYSVAR /opt/texlive/texmf-var 8 | TEXMFVAR /opt/texlive/texmf-var 9 | option_doc 0 10 | option_src 0' > /tmp/texlive-profile.txt 11 | 12 | CTAN_REPO=${CTAN_REPO:-http://mirror.ctan.org/systems/texlive/tlnet} 13 | export PATH=$PATH:/usr/local/texlive/bin/x86_64-linux/ 14 | 15 | mkdir -p /opt/texlive 16 | # set up packages 17 | apt-get update && apt-get -y install wget perl xzdec 18 | wget ${CTAN_REPO}/install-tl-unx.tar.gz 19 | tar -xzf install-tl-unx.tar.gz 20 | install-tl-20*/install-tl --profile=/tmp/texlive-profile.txt && \ 21 | rm -rf install-tl-* 22 | 23 | 24 | tlmgr update --self 25 | tlmgr install latex-bin luatex xetex 26 | tlmgr install ae bibtex context inconsolata listings makeindex metafont mfware parskip pdfcrop tex tools url xkeyval 27 | 28 | ## do not add to /usr/local/bin 29 | # tlmgr path add 30 | # instead, we keep binaries separate and add to PATH 31 | echo "PATH=${PATH}" >> ${R_HOME}/etc/Renviron 32 | 33 | ## open permissions to avoid needless warnings 34 | chown -R rstudio:staff /opt/texlive 35 | chown -R rstudio:staff /usr/local/texlive 36 | chmod -R 777 /opt/texlive 37 | chmod -R 777 /usr/local/texlive 38 | 39 | 40 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_tidyverse.sh: -------------------------------------------------------------------------------- 1 | 2 | #!/bin/bash 3 | 4 | ## build ARGs 5 | NCPUS=${NCPUS:-1} 6 | 7 | set -e 8 | apt-get update -qq && apt-get -y --no-install-recommends install \ 9 | libxml2-dev \ 10 | libcairo2-dev \ 11 | libgit2-dev \ 12 | default-libmysqlclient-dev \ 13 | libpq-dev \ 14 | libsasl2-dev \ 15 | libsqlite3-dev \ 16 | libssh2-1-dev \ 17 | unixodbc-dev && \ 18 | rm -rf /var/lib/apt/lists/* 19 | 20 | 21 | install2.r --error --skipinstalled -r $CRAN -n $NCPUS \ 22 | tidyverse \ 23 | devtools \ 24 | rmarkdown \ 25 | BiocManager \ 26 | vroom \ 27 | gert 28 | 29 | ## dplyr database backends 30 | install2.r --error --skipinstalled -r $CRAN -n $NCPUS \ 31 | arrow \ 32 | dbplyr \ 33 | DBI \ 34 | dtplyr \ 35 | nycflights13 \ 36 | Lahman \ 37 | RMariaDB \ 38 | RPostgres \ 39 | RSQLite \ 40 | fst 41 | 42 | ## a bridge to far? -- brings in another 60 packages 43 | # install2.r --error --skipinstalled -r $CRAN -n $NCPUS tidymodels 44 | 45 | rm -rf /tmp/downloaded_packages 46 | 47 | 48 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_verse.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # always set this for scripts but don't declare as ENV.. 6 | export DEBIAN_FRONTEND=noninteractive 7 | 8 | 9 | export PATH=$PATH:/usr/local/texlive/bin/x86_64-linux/ 10 | 11 | apt-get update -qq \ 12 | && apt-get install -y --no-install-recommends \ 13 | cmake \ 14 | curl \ 15 | default-jdk \ 16 | fonts-roboto \ 17 | ghostscript \ 18 | hugo \ 19 | less \ 20 | libbz2-dev \ 21 | libglpk-dev \ 22 | libgmp3-dev \ 23 | libfribidi-dev \ 24 | libharfbuzz-dev \ 25 | libhunspell-dev \ 26 | libicu-dev \ 27 | liblzma-dev \ 28 | libmagick++-dev \ 29 | libopenmpi-dev \ 30 | libpcre2-dev \ 31 | libssl-dev \ 32 | libv8-dev \ 33 | libxml2-dev\ 34 | libxslt1-dev \ 35 | libzmq3-dev \ 36 | lsb-release \ 37 | qpdf \ 38 | texinfo \ 39 | software-properties-common \ 40 | vim \ 41 | wget 42 | 43 | # libgit2-dev also depends on the libcurl4-gnutils in bionic but not on focal 44 | # cran PPA is a super-stable solution to this 45 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`} 46 | if [ ${UBUNTU_VERSION} == "bionic" ]; then 47 | add-apt-repository -y ppa:cran/travis 48 | fi 49 | 50 | 51 | # 52 | # librdf0-dev depends on libcurl4-gnutils-dev instead of libcurl4-openssl-dev... 53 | # So: we can build the redland package bindings and then swap back to libcurl-openssl-dev... (ick) 54 | # explicitly install runtime library sub-deps of librdf0-dev so they are not auto-removed. 55 | apt-get install -y librdf0-dev 56 | install2.r --error --skipinstalled -r $CRAN redland 57 | apt-get install -y \ 58 | libcurl4-openssl-dev \ 59 | libxslt-dev \ 60 | librdf0 \ 61 | redland-utils \ 62 | rasqal-utils \ 63 | raptor2-utils \ 64 | && apt-get remove -y systemd \ 65 | && apt-get -y autoremove 66 | 67 | apt-get install -y libgit2-dev libcurl4-openssl-dev 68 | 69 | 70 | 71 | ## Add LaTeX, rticles and bookdown support 72 | wget "https://travis-bin.yihui.name/texlive-local.deb" \ 73 | && dpkg -i texlive-local.deb \ 74 | && rm texlive-local.deb 75 | 76 | 77 | ## Install texlive 78 | /rocker_scripts/install_texlive.sh 79 | 80 | install2.r --error -r $CRAN --skipinstalled tinytex 81 | install2.r --error --deps TRUE -r $CRAN --skipinstalled \ 82 | blogdown bookdown rticles rmdshower rJava xaringan 83 | 84 | rm -rf /tmp/downloaded_packages 85 | rm -rf /var/lib/apt/lists/* 86 | 87 | 88 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/install_wgrib2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## https://www.cpc.ncep.noaa.gov/products/wesley/wgrib2/ 4 | 5 | apt-get update && apt-get -y install wget 6 | cd /opt 7 | wget https://www.ftp.cpc.ncep.noaa.gov/wd51we/wgrib2/wgrib2.tgz 8 | tar -xvf wgrib2.tgz 9 | rm -rf wgrib2.tgz 10 | cd grib2 11 | 12 | ## really someone needs to learn proper packaging conventions, but whatever 13 | CC=gcc FC=gfortran make 14 | ln -s /opt/grib2/wgrib2/wgrib2 /usr/local/bin/wgrib2 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/pam-helper.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ## Enforces the custom password specified in the PASSWORD environment variable 4 | ## The accepted RStudio username is the same as the USER environment variable (i.e., local user name). 5 | 6 | set -o nounset 7 | 8 | IFS='' read -r password 9 | 10 | [ "${USER}" = "${1}" ] && [ "${PASSWORD}" = "${password}" ] 11 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/rsession.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | R_DOC_DIR=$R_HOME/doc 4 | R_INCLUDE_DIR=$R_HOME/include 5 | R_SHARE_DIR=$R_HOME/share 6 | RSTUDIO_DEFAULT_R_VERSION_HOME=$R_HOME 7 | RSTUDIO_DEFAULT_R_VERSION=$R_VERSION 8 | PATH=$PATH:/usr/lib/rstudio-server/bin 9 | rsession --standalone=1 \ 10 | --program-mode=server \ 11 | --log-stderr=1 \ 12 | --session-timeout-minutes=0 \ 13 | --user-identity=rstudio \ 14 | --www-port=8787 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /docimage-rstudio/rocker_scripts/userconf.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bash 2 | 3 | ###################################################################################### 4 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 5 | # SPDX-License-Identifier: MIT-0 6 | 7 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 8 | # software and associated documentation files (the "Software"), to deal in the Software 9 | # without restriction, including without limitation the rights to use, copy, modify, 10 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 11 | # permit persons to whom the Software is furnished to do so. 12 | 13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 14 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 15 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 16 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 18 | # OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | ###################################################################################### 20 | 21 | ## Set defaults for environmental variables in case they are undefined 22 | USER=${USER:=rstudio} 23 | 24 | PASSWORD=${RSTUDIO_PASS:=rstudio} 25 | USERID=${USERID:=1000} 26 | GROUPID=${GROUPID:=1000} 27 | ROOT=${ROOT:=FALSE} 28 | UMASK=${UMASK:=022} 29 | LANG=${LANG:=en_US.UTF-8} 30 | TZ=${TZ:=Etc/UTC} 31 | 32 | bold=$(tput bold) 33 | normal=$(tput sgr0) 34 | 35 | if [[ ${DISABLE_AUTH,,} == "true" ]] 36 | 37 | then 38 | mv /etc/rstudio/disable_auth_rserver.conf /etc/rstudio/rserver.conf 39 | echo "USER=$USER" >> /etc/environment 40 | fi 41 | 42 | if grep --quiet "auth-none=1" /etc/rstudio/rserver.conf 43 | then 44 | echo "Skipping authentication as requested" 45 | elif [ "$PASSWORD" == "rstudio" ] 46 | then 47 | printf "\n\n" 48 | tput bold 49 | printf "\e[31mERROR\e[39m: You must set a unique PASSWORD (not 'rstudio') first! e.g. run with:\n" 50 | printf "docker run -e PASSWORD=\e[92m\e[39m -p 8787:8787 rocker/rstudio\n" 51 | tput sgr0 52 | printf "\n\n" 53 | exit 1 54 | fi 55 | 56 | if [ "$USERID" -lt 1000 ] 57 | # Probably a macOS user, https://github.com/rocker-org/rocker/issues/205 58 | then 59 | echo "$USERID is less than 1000" 60 | check_user_id=$(grep -F "auth-minimum-user-id" /etc/rstudio/rserver.conf) 61 | if [[ ! -z $check_user_id ]] 62 | then 63 | echo "minumum authorised user already exists in /etc/rstudio/rserver.conf: $check_user_id" 64 | else 65 | echo "setting minumum authorised user to 499" 66 | echo auth-minimum-user-id=499 >> /etc/rstudio/rserver.conf 67 | fi 68 | fi 69 | 70 | if [ "$USERID" -ne 1000 ] 71 | ## Configure user with a different USERID if requested. 72 | then 73 | echo "deleting user rstudio" 74 | userdel rstudio 75 | echo "creating new $USER with UID $USERID" 76 | useradd -m $USER -u $USERID 77 | mkdir -p /home/$USER 78 | chown -R $USER /home/$USER 79 | usermod -a -G staff $USER 80 | elif [ "$USER" != "rstudio" ] 81 | then 82 | ## cannot move home folder when it's a shared volume, have to copy and change permissions instead 83 | cp -r /home/rstudio /home/$USER 84 | ## RENAME the user 85 | usermod -l $USER -d /home/$USER rstudio 86 | groupmod -n $USER rstudio 87 | usermod -a -G staff $USER 88 | chown -R $USER:$USER /home/$USER 89 | echo "USER is now $USER" 90 | fi 91 | 92 | if [ "$GROUPID" -ne 1000 ] 93 | ## Configure the primary GID (whether rstudio or $USER) with a different GROUPID if requested. 94 | then 95 | echo "Modifying primary group $(id $USER -g -n)" 96 | groupmod -g $GROUPID $(id $USER -g -n) 97 | echo "Primary group ID is now custom_group $GROUPID" 98 | fi 99 | 100 | ## Add a password to user 101 | echo "$USER:$PASSWORD" | chpasswd 102 | 103 | # Use Env flag to know if user should be added to sudoers 104 | if [[ ${ROOT,,} == "true" ]] 105 | then 106 | adduser $USER sudo && echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers 107 | echo "$USER added to sudoers" 108 | fi 109 | 110 | ## Change Umask value if desired 111 | if [ "$UMASK" -ne 022 ] 112 | then 113 | echo "server-set-umask=false" >> /etc/rstudio/rserver.conf 114 | echo "Sys.umask(mode=$UMASK)" >> /home/$USER/.Rprofile 115 | fi 116 | 117 | ## Next one for timezone setup 118 | if [ "$TZ" != "Etc/UTC" ] 119 | then 120 | ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone 121 | fi 122 | 123 | ## Set our dynamic variables in Renviron.site to be reflected by RStudio 124 | exclude_vars="HOME PASSWORD RSTUDIO_PASS RSTUDIO_VERSION" 125 | for file in /var/run/s6/container_environment/* 126 | do 127 | sed -i "/^${file##*/}=/d" ${R_HOME}/etc/Renviron.site 128 | regex="(^| )${file##*/}($| )" 129 | [[ ! $exclude_vars =~ $regex ]] && echo "${file##*/}=$(cat $file)" >> ${R_HOME}/etc/Renviron.site || echo "skipping $file" 130 | done 131 | 132 | ## Update Locale if needed 133 | if [ "$LANG" != "en_US.UTF-8" ] 134 | then 135 | /usr/sbin/locale-gen --lang $LANG 136 | /usr/sbin/update-locale --reset LANG=$LANG 137 | fi 138 | 139 | ## only file-owner (root) should read container_environment files: 140 | chmod 600 /var/run/s6/container_environment/* 141 | 142 | chmod 775 /srv/shiny-server 143 | 144 | -------------------------------------------------------------------------------- /docimage-rstudio/user_scripts/config.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bash 2 | 3 | ###################################################################################### 4 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 5 | # SPDX-License-Identifier: MIT-0 6 | 7 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 8 | # software and associated documentation files (the "Software"), to deal in the Software 9 | # without restriction, including without limitation the rights to use, copy, modify, 10 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 11 | # permit persons to whom the Software is furnished to do so. 12 | 13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 14 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 15 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 16 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 18 | # OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | ###################################################################################### 20 | 21 | echo "Adding environment variables to R env for Athena integration..." 22 | 23 | echo "ATHENA_USER=${AWS_ACCESS_KEY_ID}" >> /usr/local/lib/R/etc/Renviron 24 | echo "ATHENA_PASSWORD=${AWS_ACCESS_KEY}" >> /usr/local/lib/R/etc/Renviron 25 | echo "S3_BUCKET=${AWS_S3_BUCKET}" >> /usr/local/lib/R/etc/Renviron 26 | echo "ATHENA_WG=${AWS_ATHENA_WG}" >> /usr/local/lib/R/etc/Renviron 27 | echo "JDBC_URL='jdbc:awsathena://athena.${AWS_REGION}.amazonaws.com:443/'" >> /usr/local/lib/R/etc/Renviron 28 | 29 | -------------------------------------------------------------------------------- /docimage-rstudio/user_scripts/install_packages.R: -------------------------------------------------------------------------------- 1 | ###################################################################################### 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | ###################################################################################### 18 | 19 | install.packages("rJava") 20 | install.packages("RJDBC") 21 | library(rJava) 22 | library(RJDBC) -------------------------------------------------------------------------------- /docimage-shiny/Dockerfile: -------------------------------------------------------------------------------- 1 | ###################################################################################### 2 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # SPDX-License-Identifier: MIT-0 4 | 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | # software and associated documentation files (the "Software"), to deal in the Software 7 | # without restriction, including without limitation the rights to use, copy, modify, 8 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so. 10 | 11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | # OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | ###################################################################################### 18 | 19 | FROM rocker/r-ver:4.1.0 20 | 21 | ENV S6_VERSION=v2.1.0.2 22 | ENV PATH=/usr/lib/rstudio-server/bin:$PATH 23 | ENV SHINY_SERVER_VERSION=latest 24 | ENV PANDOC_VERSION=default 25 | ENV ROOT=TRUE 26 | ENV AWS_ACCOUNT=${AWS_ACCOUNT} 27 | ENV AWS_REGION=${AWS_REGION} 28 | 29 | COPY ./docimage-shiny/rocker_scripts /rocker_scripts 30 | 31 | RUN chmod 755 /rocker_scripts/* 32 | 33 | RUN /rocker_scripts/install_pandoc.sh 34 | RUN /rocker_scripts/install_shiny_server.sh 35 | RUN /rocker_scripts/install_tidyverse.sh 36 | 37 | EXPOSE 3838 38 | 39 | CMD ["/init"] 40 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/add_ubuntugis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | UBUNTUGIS_VERSION=${1:-${UBUNTUGIS_VERSION:-stable}} 6 | 7 | ## Force installs from SOURCE if using RStudio Package Manager Repository 8 | CRAN=${CRAN/"__linux__/focal"/""} 9 | echo "options(repos = c(CRAN = '${CRAN}'))" >> ${R_HOME}/etc/Rprofile.site 10 | 11 | 12 | 13 | apt-get update \ 14 | && apt-get install -y --no-install-recommends \ 15 | software-properties-common \ 16 | vim \ 17 | wget \ 18 | ca-certificates \ 19 | && add-apt-repository --enable-source --yes "ppa:ubuntugis/ubuntugis-$UBUNTUGIS_VERSION" 20 | 21 | 22 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/bh-gdal.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | if [ "${GDAL_VERSION}" = "master" ]; then 5 | GDAL_VERSION=$(curl -Ls https://api.github.com/repos/OSGeo/gdal/commits/HEAD -H "Accept: application/vnd.github.VERSION.sha") 6 | export GDAL_VERSION 7 | GDAL_RELEASE_DATE=$(date "+%Y%m%d") 8 | export GDAL_RELEASE_DATE 9 | fi 10 | 11 | if [ -z "${GDAL_BUILD_IS_RELEASE:-}" ]; then 12 | export GDAL_SHA1SUM=${GDAL_VERSION} 13 | fi 14 | 15 | #mkdir gdal 16 | #wget -q "https://github.com/OSGeo/gdal/archive/${GDAL_VERSION}.tar.gz" \ 17 | # -O - | tar xz -C gdal --strip-components=1 18 | 19 | git clone https://github.com/OSGeo/gdal 20 | cd gdal/gdal 21 | git checkout v${GDAL_VERSION} . 22 | ./configure --prefix=/usr \ 23 | --without-libtool \ 24 | --with-jpeg12 \ 25 | --with-python \ 26 | --with-poppler \ 27 | --with-spatialite \ 28 | --with-mysql \ 29 | --with-liblzma \ 30 | --with-webp \ 31 | --with-epsilon \ 32 | --with-proj="${PROJ_INSTALL_PREFIX-/usr/local}" \ 33 | --with-poppler \ 34 | --with-hdf5 \ 35 | --with-dods-root=/usr \ 36 | --with-sosi \ 37 | --with-libtiff=internal \ 38 | --with-geotiff=internal \ 39 | --with-kea=/usr/bin/kea-config \ 40 | --with-mongocxxv3 \ 41 | --with-tiledb \ 42 | --with-crypto 43 | 44 | make "-j$(nproc)" 45 | make install 46 | 47 | cd / 48 | 49 | rm -rf gdal 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/bh-proj-gdal_only.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | mkdir proj 5 | wget -q "https://github.com/OSGeo/PROJ/archive/${PROJ_VERSION}.tar.gz" \ 6 | -O - | tar xz -C proj --strip-components=1 7 | 8 | ( 9 | cd proj 10 | 11 | ./autogen.sh 12 | 13 | if [ -n "${RSYNC_REMOTE:-}" ]; then 14 | echo "Downloading cache..." 15 | rsync -ra "${RSYNC_REMOTE}/proj/" "$HOME/" 16 | echo "Finished" 17 | 18 | export CC="ccache gcc" 19 | export CXX="ccache g++" 20 | export PROJ_DB_CACHE_DIR="$HOME/.ccache" 21 | 22 | ccache -M 100M 23 | fi 24 | 25 | export CFLAGS="-DPROJ_RENAME_SYMBOLS -O2 -g" 26 | export CXXFLAGS="-DPROJ_RENAME_SYMBOLS -DPROJ_INTERNAL_CPP_NAMESPACE -O2 -g" 27 | export CFLAGS="-O2 -g" 28 | export CXXFLAGS="-O2 -g" 29 | 30 | 31 | ./configure "--prefix=${PROJ_INSTALL_PREFIX:-/usr/local}" 32 | 33 | make "-j$(nproc)" 34 | make install DESTDIR="/build" 35 | 36 | if [ -n "${RSYNC_REMOTE:-}" ]; then 37 | ccache -s 38 | 39 | echo "Uploading cache..." 40 | rsync -ra --delete "$HOME/.ccache" "${RSYNC_REMOTE}/proj/" 41 | echo "Finished" 42 | 43 | rm -rf "$HOME/.ccache" 44 | unset CC 45 | unset CXX 46 | fi 47 | ) 48 | 49 | rm -rf proj 50 | 51 | PROJ_SO=$(readlink "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so" | sed "s/libproj\.so\.//") 52 | PROJ_SO_FIRST=$(echo "$PROJ_SO" | awk 'BEGIN {FS="."} {print $1}') 53 | PROJ_SO_DEST="/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO}" 54 | 55 | mv "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO}" "${PROJ_SO_DEST}" 56 | 57 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO_FIRST}" 58 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so" 59 | 60 | rm "/build${PROJ_INSTALL_PREFIX}/lib"/libproj.* 61 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO_FIRST}" 62 | 63 | 64 | if [ "${WITH_DEBUG_SYMBOLS}" = "yes" ]; then 65 | # separate debug symbols 66 | mkdir -p "/build${PROJ_INSTALL_PREFIX}/lib/.debug/" "/build${PROJ_INSTALL_PREFIX}/bin/.debug/" 67 | 68 | DEBUG_SO="/build${PROJ_INSTALL_PREFIX}/lib/.debug/libinternalproj.so.${PROJ_SO}.debug" 69 | objcopy -v --only-keep-debug --compress-debug-sections "${PROJ_SO_DEST}" "${DEBUG_SO}" 70 | strip --strip-debug --strip-unneeded "${PROJ_SO_DEST}" 71 | objcopy --add-gnu-debuglink="${DEBUG_SO}" "${PROJ_SO_DEST}" 72 | 73 | for P in "/build${PROJ_INSTALL_PREFIX}/bin"/*; do 74 | if file -h "$P" | grep -qi elf; then 75 | F=$(basename "$P") 76 | DEBUG_P="/build${PROJ_INSTALL_PREFIX}/bin/.debug/${F}.debug" 77 | objcopy -v --only-keep-debug --strip-unneeded "$P" "${DEBUG_P}" 78 | strip --strip-debug --strip-unneeded "$P" 79 | objcopy --add-gnu-debuglink="${DEBUG_P}" "$P" 80 | fi 81 | done 82 | else 83 | strip -s "${PROJ_SO_DEST}" 84 | for P in "/build${PROJ_INSTALL_PREFIX}/bin"/*; do 85 | strip -s "$P" 2>/dev/null || /bin/true; 86 | done; 87 | fi 88 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/bh-proj.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | mkdir proj 5 | wget -q "https://github.com/OSGeo/PROJ/archive/${PROJ_VERSION}.tar.gz" \ 6 | -O - | tar xz -C proj --strip-components=1 7 | 8 | ( 9 | cd proj 10 | 11 | ./autogen.sh 12 | 13 | if [ -n "${RSYNC_REMOTE:-}" ]; then 14 | echo "Downloading cache..." 15 | rsync -ra "${RSYNC_REMOTE}/proj/" "$HOME/" 16 | echo "Finished" 17 | 18 | export CC="ccache gcc" 19 | export CXX="ccache g++" 20 | export PROJ_DB_CACHE_DIR="$HOME/.ccache" 21 | 22 | ccache -M 100M 23 | fi 24 | 25 | export CFLAGS="-DPROJ_RENAME_SYMBOLS -O2 -g" 26 | export CXXFLAGS="-DPROJ_RENAME_SYMBOLS -DPROJ_INTERNAL_CPP_NAMESPACE -O2 -g" 27 | 28 | ./configure "--prefix=${PROJ_INSTALL_PREFIX:-/usr/local}" --disable-static 29 | 30 | make "-j$(nproc)" 31 | make install DESTDIR="/build" 32 | 33 | if [ -n "${RSYNC_REMOTE:-}" ]; then 34 | ccache -s 35 | 36 | echo "Uploading cache..." 37 | rsync -ra --delete "$HOME/.ccache" "${RSYNC_REMOTE}/proj/" 38 | echo "Finished" 39 | 40 | rm -rf "$HOME/.ccache" 41 | unset CC 42 | unset CXX 43 | fi 44 | ) 45 | 46 | rm -rf proj 47 | 48 | PROJ_SO=$(readlink "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so" | sed "s/libproj\.so\.//") 49 | PROJ_SO_FIRST=$(echo "$PROJ_SO" | awk 'BEGIN {FS="."} {print $1}') 50 | PROJ_SO_DEST="/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO}" 51 | 52 | mv "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO}" "${PROJ_SO_DEST}" 53 | 54 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so.${PROJ_SO_FIRST}" 55 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libinternalproj.so" 56 | 57 | rm "/build${PROJ_INSTALL_PREFIX}/lib"/libproj.* 58 | ln -s "libinternalproj.so.${PROJ_SO}" "/build${PROJ_INSTALL_PREFIX}/lib/libproj.so.${PROJ_SO_FIRST}" 59 | 60 | if [ "${WITH_DEBUG_SYMBOLS}" = "yes" ]; then 61 | # separate debug symbols 62 | mkdir -p "/build${PROJ_INSTALL_PREFIX}/lib/.debug/" "/build${PROJ_INSTALL_PREFIX}/bin/.debug/" 63 | 64 | DEBUG_SO="/build${PROJ_INSTALL_PREFIX}/lib/.debug/libinternalproj.so.${PROJ_SO}.debug" 65 | objcopy -v --only-keep-debug --compress-debug-sections "${PROJ_SO_DEST}" "${DEBUG_SO}" 66 | strip --strip-debug --strip-unneeded "${PROJ_SO_DEST}" 67 | objcopy --add-gnu-debuglink="${DEBUG_SO}" "${PROJ_SO_DEST}" 68 | 69 | for P in "/build${PROJ_INSTALL_PREFIX}/bin"/*; do 70 | if file -h "$P" | grep -qi elf; then 71 | F=$(basename "$P") 72 | DEBUG_P="/build${PROJ_INSTALL_PREFIX}/bin/.debug/${F}.debug" 73 | objcopy -v --only-keep-debug --strip-unneeded "$P" "${DEBUG_P}" 74 | strip --strip-debug --strip-unneeded "$P" 75 | objcopy --add-gnu-debuglink="${DEBUG_P}" "$P" 76 | fi 77 | done 78 | else 79 | strip -s "${PROJ_SO_DEST}" 80 | for P in "/build${PROJ_INSTALL_PREFIX}/bin"/*; do 81 | strip -s "$P" 2>/dev/null || /bin/true; 82 | done; 83 | fi -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/config_R_cuda.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | ## CUDA environmental variables configuration for RStudio 5 | 6 | ## These should be exported as ENV vars too 7 | CUDA_HOME=${CUDA_HOME:-/usr/local/cuda} 8 | PATH={$PATH:-$PATH:$CUDA_HOME/bin} 9 | LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-$LD_LIBRARY_PATH:$CUDA_HOME/lib64:$CUDA_HOME/extras/CUPTI/lib64} 10 | NVBLAS_CONFIG_FILE=${NVBLAS_CONFIG_FILE:-/etc/nvblas.conf} 11 | 12 | ## cli R inherits these, but RStudio needs to have these set in as follows: 13 | ## (From https://tensorflow.rstudio.com/tools/local_gpu.html#environment-variables) 14 | echo "CUDA_HOME=$CUDA_HOME" >> ${R_HOME}/etc/Renviron 15 | echo "PATH=$PATH" >> ${R_HOME}/etc/Renviron 16 | 17 | if test -f /etc/rstudio/rserver.conf; then 18 | sed -i '/^rsession-ld-library-path/d' /etc/rstudio/rserver.conf 19 | echo "rsession-ld-library-path=$LD_LIBRARY_PATH" >> /etc/rstudio/rserver.conf 20 | fi 21 | 22 | 23 | ## nvblas configuration 24 | touch /var/log/nvblas.log && chown :staff /var/log/nvblas.log 25 | chmod a+rw /var/log/nvblas.log 26 | 27 | ## Configure R & RStudio to use drop-in CUDA blas 28 | ## Allow R to use CUDA for BLAS, with fallback on openblas 29 | echo 'NVBLAS_LOGFILE /var/log/nvblas.log 30 | NVBLAS_CPU_BLAS_LIB /usr/lib/x86_64-linux-gnu/openblas/libblas.so.3 31 | NVBLAS_GPU_LIST ALL' > /etc/nvblas.conf 32 | 33 | echo "NVBLAS_CONFIG_FILE=$NVBLAS_CONFIG_FILE" >> ${R_HOME}/etc/Renviron 34 | 35 | 36 | ## We don't want to set LD_PRELOAD globally 37 | ##ENV LD_PRELOAD=/usr/local/cuda/lib64/libnvblas.so 38 | # 39 | ### Instead, we will set it before calling R, Rscript, or RStudio: 40 | #mv /usr/bin/R /usr/bin/R_ 41 | #mv /usr/bin/Rscript /usr/bin/Rscript_ 42 | # 43 | #echo '\#!/bin/sh \ 44 | # \n LD_PRELOAD=/usr/local/cuda/lib64/libnvblas.so /usr/bin/R_ "$@"' \ 45 | # > /usr/bin/R && \ 46 | # chmod +x /usr/bin/R && \ 47 | # echo '#!/bin/sh \ 48 | # \n LD_PRELOAD=/usr/local/cuda/lib64/libnvblas.so /usr/bin/Rscript_ "$@"' \ 49 | # > /usr/bin/Rscript && \ 50 | # chmod +x /usr/bin/Rscript 51 | # 52 | #echo '#!/usr/bin/with-contenv bash \ 53 | # \n## load /etc/environment vars first: \ 54 | # \n for line in \$( cat /etc/environment ) ; do export $line ; done \ 55 | # \n export LD_PRELOAD=/usr/local/cuda/lib64/libnvblas.so \ 56 | # \n exec /usr/lib/rstudio-server/bin/rserver --server-daemonize 0' \ 57 | # > /etc/services.d/rstudio/run 58 | 59 | 60 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/default_user.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | if id -u "$user" >/dev/null 2>&1; then 5 | echo 'rstudio user already exists' 6 | else 7 | ## Need to configure non-root user for RStudio 8 | DEFAULT_USER=${1:-${DEFAULT_USER:-rstudio}} 9 | useradd $DEFAULT_USER 10 | echo "${DEFAULT_USER}:${DEFAULT_USER}" | chpasswd 11 | mkdir -p /home/${DEFAULT_USER} 12 | chown ${DEFAULT_USER}:${DEFAULT_USER} /home/${DEFAULT_USER} 13 | addgroup ${DEFAULT_USER} staff 14 | 15 | mkdir -p /home/${DEFAULT_USER}/.rstudio/monitored/user-settings 16 | echo "alwaysSaveHistory='0' \ 17 | \nloadRData='0' \ 18 | \nsaveAction='0'" \ 19 | > /home/${DEFAULT_USER}/.rstudio/monitored/user-settings/user-settings 20 | 21 | chown -R ${DEFAULT_USER}:${DEFAULT_USER} /home/${DEFAULT_USER} 22 | 23 | fi 24 | 25 | # If shiny server installed, make the user part of the shiny group 26 | if [ -x "$(command -v shiny-server)" ]; then 27 | adduser ${DEFAULT_USER} shiny 28 | fi 29 | 30 | ## configure git not to request password each time 31 | git config --system credential.helper 'cache --timeout=3600' 32 | git config --system push.default simple 33 | 34 | 35 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/dev_osgeo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | CRAN=${CRAN_SOURCE:-https://cran.r-project.org} 6 | echo "options(repos = c(CRAN = '${CRAN}'), download.file.method = 'libcurl')" >> ${R_HOME}/etc/Rprofile.site 7 | 8 | 9 | 10 | export DEBIAN_FRONTEND=noninteractive; apt-get -y update \ 11 | && apt-get install -y \ 12 | gdb \ 13 | git \ 14 | libcairo2-dev \ 15 | libcurl4-openssl-dev \ 16 | libexpat1-dev \ 17 | libpq-dev \ 18 | libsqlite3-dev \ 19 | libudunits2-dev \ 20 | make \ 21 | pandoc \ 22 | qpdf \ 23 | sqlite3 \ 24 | subversion \ 25 | valgrind \ 26 | vim \ 27 | tk-dev \ 28 | wget 29 | 30 | apt-get install -y \ 31 | libv8-dev \ 32 | libjq-dev \ 33 | libprotobuf-dev \ 34 | libxml2-dev \ 35 | libprotobuf-dev \ 36 | protobuf-compiler \ 37 | unixodbc-dev \ 38 | libssh2-1-dev \ 39 | libgit2-dev \ 40 | libnetcdf-dev \ 41 | locales \ 42 | libssl-dev \ 43 | libtiff-dev 44 | 45 | locale-gen en_US.UTF-8 46 | 47 | PROJ_VERSION=${PROJ_VERSION:-7.2.0} 48 | LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH 49 | 50 | export DEBIAN_FRONTEND=noninteractive; apt-get -y update \ 51 | && apt-get install -y \ 52 | cmake \ 53 | libtiff5-dev 54 | 55 | #git clone --depth 1 https://github.com/OSGeo/PROJ.git 56 | wget http://download.osgeo.org/proj/proj-$PROJ_VERSION.tar.gz 57 | tar zxvf proj-${PROJ_VERSION}.tar.gz 58 | cd proj-${PROJ_VERSION} \ 59 | && ls -l \ 60 | && mkdir build \ 61 | && cd build \ 62 | && cmake .. \ 63 | && make \ 64 | && make install \ 65 | && cd ../.. \ 66 | && ldconfig 67 | 68 | # install proj-data: 69 | #cd /usr/local/share/proj \ 70 | # && wget http://download.osgeo.org/proj/proj-data-1.1RC1.zip \ 71 | # && unzip -o proj-data*zip \ 72 | # && rm proj-data*zip \ 73 | # && cd - 74 | 75 | # GDAL: 76 | 77 | # https://download.osgeo.org/gdal/ 78 | GDAL_VERSION=${GDAL_VERSION:-3.2.0} 79 | GDAL_VERSION_NAME=${GDAL_VERSION} 80 | 81 | wget http://download.osgeo.org/gdal/${GDAL_VERSION}/gdal-${GDAL_VERSION_NAME}.tar.gz \ 82 | && tar -xf gdal-${GDAL_VERSION_NAME}.tar.gz \ 83 | && rm *.tar.gz \ 84 | && cd gdal* \ 85 | && ./configure \ 86 | && make \ 87 | && make install \ 88 | && cd .. \ 89 | && ldconfig 90 | 91 | #git clone --depth 1 https://github.com/OSGeo/gdal.git 92 | #cd gdal/gdal \ 93 | # && ls -l \ 94 | # && ./configure \ 95 | # && make \ 96 | # && make install \ 97 | # && cd .. \ 98 | # && ldconfig 99 | 100 | # GEOS: 101 | GEOS_VERSION=${GEOS_VERSION:-3.8.1} 102 | 103 | wget http://download.osgeo.org/geos/geos-${GEOS_VERSION}.tar.bz2 \ 104 | && bzip2 -d geos-*bz2 \ 105 | && tar xf geos*tar \ 106 | && rm *.tar \ 107 | && cd geos* \ 108 | && ./configure \ 109 | && make \ 110 | && make install \ 111 | && cd .. \ 112 | && ldconfig 113 | 114 | # svn checkout svn://scm.r-forge.r-project.org/svnroot/rgdal/ 115 | # R CMD build rgdal/pkg --no-build-vignettes 116 | # R CMD INSTALL rgdal_*.tar.gz 117 | 118 | Rscript -e 'install.packages(c("sp", "rgeos", "rgdal", "RPostgreSQL", "RSQLite", "testthat", "knitr", "tidyr", "geosphere", "maptools", "maps", "microbenchmark", "raster", "dplyr", "tibble", "units", "DBI", "covr", "protolite", "tmap", "mapview", "odbc", "pool", "rmarkdown", "RPostgres","spatstat", "stars"))' 119 | 120 | git clone --depth 10 https://github.com/r-spatial/sf.git 121 | git clone --depth 10 https://github.com/r-spatial/lwgeom.git 122 | git clone --depth 10 https://github.com/r-spatial/stars.git 123 | #git config --global user.email "edzer.pebesma@uni-muenster.de" 124 | 125 | R CMD build --no-build-vignettes --no-manual lwgeom 126 | (cd sf; git pull) 127 | R CMD build --no-build-vignettes --no-manual sf 128 | # pkg-config proj --modversion 129 | R CMD INSTALL sf 130 | R CMD INSTALL lwgeom 131 | R CMD build --no-build-vignettes --no-manual stars 132 | R CMD INSTALL stars 133 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/experimental/cuda10.2-tf.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## not sure why cuda-cudart-dev-10-1 when this is 10.2 and we already have 10.2... 4 | 5 | sudo apt update && \ 6 | sudo apt install \ 7 | libnvinfer-dev \ 8 | cuda-cudart-dev-10-1 9 | 10 | 11 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/experimental/install_R_binary.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | UBUNTU_VERSION=${UBUNTU_VERSION:-focal} 5 | CRAN_LINUX_VERSION=${CRAN_LINUX_VERSION:-cran40} 6 | LANG=${LANG:-en_US.UTF-8} 7 | LC_ALL=${LC_ALL:-en_US.UTF-8} 8 | 9 | 10 | DEBIAN_FRONTEND=noninteractive 11 | 12 | # Set up and install R 13 | R_HOME=${R_HOME:-/usr/lib/R} 14 | 15 | #R_VERSION=${R_VERSION} 16 | 17 | 18 | apt-get update 19 | 20 | apt-get -y install --no-install-recommends \ 21 | ca-certificates \ 22 | less \ 23 | libopenblas-base \ 24 | locales \ 25 | vim-tiny \ 26 | wget \ 27 | dirmngr \ 28 | gpg \ 29 | gpg-agent 30 | 31 | echo "deb http://cloud.r-project.org/bin/linux/ubuntu ${UBUNTU_VERSION}-${CRAN_LINUX_VERSION}/" >> /etc/apt/sources.list 32 | 33 | gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9 34 | gpg -a --export E298A3A825C0D65DFD57CBB651716619E084DAB9 | apt-key add - 35 | 36 | 37 | # Wildcard * at end of version will grab (latest) patch of requested version 38 | apt-get update && apt-get -y install --no-install-recommends r-base-dev=${R_VERSION}* 39 | 40 | 41 | 42 | rm -rf /var/lib/apt/lists/* 43 | 44 | ## Add PPAs: NOTE this will mean that installing binary R packages won't be version stable. 45 | ## 46 | ## These are required at least for bionic-based images since 3.4 r binaries are 47 | 48 | 49 | echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 50 | locale-gen en_US.utf8 51 | /usr/sbin/update-locale LANG=${LANG} 52 | 53 | Rscript -e "install.packages(c('littler', 'docopt'))" 54 | 55 | ## By default R_LIBS_SITE is unset, and defaults to this, so this is where `littler` will be. 56 | ## We set it here for symlinks, but don't make the env var persist (since it's already the default) 57 | R_LIBS_SITE=/usr/local/lib/R/site-library 58 | ln -s ${R_LIBS_SITE}/littler/examples/install.r /usr/local/bin/install.r 59 | ln -s ${R_LIBS_SITE}/littler/examples/install2.r /usr/local/bin/install2.r 60 | ln -s ${R_LIBS_SITE}/littler/examples/installGithub.r /usr/local/bin/installGithub.r 61 | ln -s ${R_LIBS_SITE}/littler/bin/r /usr/local/bin/r 62 | 63 | 64 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/experimental/install_rl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | python -m venv /opt/venv/rl 5 | . /opt/venv/rl/bin/activate 6 | 7 | pip install wheel 8 | pip install gym tensorflow keras keras-rl 9 | 10 | chown -R :staff /opt/venv/rl 11 | chmod g+rx /opt/venv/rl 12 | 13 | 14 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_R.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | apt-get update && apt-get -y install lsb-release 5 | 6 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`} 7 | LANG=${LANG:-en_US.UTF-8} 8 | LC_ALL=${LC_ALL:-en_US.UTF-8} 9 | CRAN=${CRAN:-https://cran.r-project.org} 10 | 11 | ## mechanism to force source installs if we're using RSPM 12 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION"/""} 13 | 14 | export DEBIAN_FRONTEND=noninteractive 15 | 16 | # Set up and install R 17 | R_HOME=${R_HOME:-/usr/local/lib/R} 18 | 19 | 20 | 21 | READLINE_VERSION=8 22 | OPENBLAS=libopenblas-dev 23 | if [ ${UBUNTU_VERSION} == "bionic" ]; then 24 | READLINE_VERSION=7 25 | OPENBLAS=libopenblas-dev 26 | fi 27 | 28 | apt-get update \ 29 | && apt-get install -y --no-install-recommends \ 30 | bash-completion \ 31 | ca-certificates \ 32 | devscripts \ 33 | file \ 34 | fonts-texgyre \ 35 | g++ \ 36 | gfortran \ 37 | gsfonts \ 38 | libblas-dev \ 39 | libbz2-* \ 40 | libcurl4 \ 41 | libicu* \ 42 | libpcre2* \ 43 | libjpeg-turbo* \ 44 | ${OPENBLAS} \ 45 | libpangocairo-* \ 46 | libpng16* \ 47 | libreadline${READLINE_VERSION} \ 48 | libtiff* \ 49 | liblzma* \ 50 | locales \ 51 | make \ 52 | unzip \ 53 | zip \ 54 | zlib1g 55 | 56 | echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 57 | locale-gen en_US.utf8 58 | /usr/sbin/update-locale LANG=en_US.UTF-8 59 | 60 | BUILDDEPS="curl \ 61 | default-jdk \ 62 | libbz2-dev \ 63 | libcairo2-dev \ 64 | libcurl4-openssl-dev \ 65 | libpango1.0-dev \ 66 | libjpeg-dev \ 67 | libicu-dev \ 68 | libpcre2-dev \ 69 | libpng-dev \ 70 | libreadline-dev \ 71 | libtiff5-dev \ 72 | liblzma-dev \ 73 | libx11-dev \ 74 | libxt-dev \ 75 | perl \ 76 | rsync \ 77 | subversion \ 78 | tcl-dev \ 79 | tk-dev \ 80 | texinfo \ 81 | texlive-extra-utils \ 82 | texlive-fonts-recommended \ 83 | texlive-fonts-extra \ 84 | texlive-latex-recommended \ 85 | texlive-latex-extra \ 86 | x11proto-core-dev \ 87 | xauth \ 88 | xfonts-base \ 89 | xvfb \ 90 | wget \ 91 | zlib1g-dev" 92 | 93 | apt-get install -y --no-install-recommends $BUILDDEPS 94 | 95 | 96 | if [[ "$R_VERSION" == "devel" ]]; then \ 97 | wget https://stat.ethz.ch/R/daily/R-devel.tar.gz; \ 98 | elif [[ "$R_VERSION" == "patched" ]]; then \ 99 | wget https://stat.ethz.ch/R/daily/R-patched.tar.gz; \ 100 | else \ 101 | wget https://cran.r-project.org/src/base/R-3/R-${R_VERSION}.tar.gz || \ 102 | wget https://cran.r-project.org/src/base/R-4/R-${R_VERSION}.tar.gz; \ 103 | fi && \ 104 | tar xzf R-${R_VERSION}.tar.gz && 105 | 106 | cd R-${R_VERSION} 107 | R_PAPERSIZE=letter \ 108 | R_BATCHSAVE="--no-save --no-restore" \ 109 | R_BROWSER=xdg-open \ 110 | PAGER=/usr/bin/pager \ 111 | PERL=/usr/bin/perl \ 112 | R_UNZIPCMD=/usr/bin/unzip \ 113 | R_ZIPCMD=/usr/bin/zip \ 114 | R_PRINTCMD=/usr/bin/lpr \ 115 | LIBnn=lib \ 116 | AWK=/usr/bin/awk \ 117 | CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g" \ 118 | CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g" \ 119 | ./configure --enable-R-shlib \ 120 | --enable-memory-profiling \ 121 | --with-readline \ 122 | --with-blas \ 123 | --with-lapack \ 124 | --with-tcltk \ 125 | --disable-nls \ 126 | --with-recommended-packages 127 | make 128 | make install 129 | make clean 130 | 131 | ## Add a default CRAN mirror 132 | echo "options(repos = c(CRAN = '${CRAN}'), download.file.method = 'libcurl')" >> ${R_HOME}/etc/Rprofile.site 133 | 134 | ## Set HTTPUserAgent for RSPM (https://github.com/rocker-org/rocker/issues/400) 135 | echo 'options(HTTPUserAgent = sprintf("R/%s R (%s)", getRversion(), 136 | paste(getRversion(), R.version$platform, 137 | R.version$arch, R.version$os)))' >> ${R_HOME}/etc/Rprofile.site 138 | 139 | 140 | ## Add a library directory (for user-installed packages) 141 | mkdir -p ${R_HOME}/site-library 142 | chown root:staff ${R_HOME}/site-library 143 | chmod g+ws ${R_HOME}/site-library 144 | 145 | ## Fix library path 146 | echo "R_LIBS=\${R_LIBS-'${R_HOME}/site-library:${R_HOME}/library'}" >> ${R_HOME}/etc/Renviron 147 | echo "TZ=${TZ}" >> ${R_HOME}/etc/Renviron 148 | 149 | ## Use littler installation scripts 150 | Rscript -e "install.packages(c('littler', 'docopt'), repos='${CRAN_SOURCE}')" 151 | ln -s ${R_HOME}/site-library/littler/examples/install2.r /usr/local/bin/install2.r 152 | ln -s ${R_HOME}/site-library/littler/examples/installGithub.r /usr/local/bin/installGithub.r 153 | ln -s ${R_HOME}/site-library/littler/bin/r /usr/local/bin/r 154 | 155 | 156 | ## Clean up from R source install 157 | cd / 158 | rm -rf /tmp/* 159 | rm -rf R-${R_VERSION} 160 | rm -rf R-${R_VERSION}.tar.gz 161 | apt-get remove --purge -y $BUILDDEPS 162 | apt-get autoremove -y 163 | apt-get autoclean -y 164 | rm -rf /var/lib/apt/lists/* 165 | 166 | 167 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_R_ppa.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | UBUNTU_VERSION=${UBUNTU_VERSION:-focal} 5 | CRAN_LINUX_VERSION=${CRAN_LINUX_VERSION:-cran40} 6 | LANG=${LANG:-en_US.UTF-8} 7 | LC_ALL=${LC_ALL:-en_US.UTF-8} 8 | 9 | 10 | DEBIAN_FRONTEND=noninteractive 11 | 12 | # Set up and install R 13 | R_HOME=${R_HOME:-/usr/lib/R} 14 | 15 | #R_VERSION=${R_VERSION} 16 | 17 | 18 | apt-get update 19 | 20 | apt-get -y install --no-install-recommends \ 21 | ca-certificates \ 22 | less \ 23 | libopenblas-base \ 24 | locales \ 25 | vim-tiny \ 26 | wget \ 27 | dirmngr \ 28 | gpg \ 29 | gpg-agent 30 | 31 | echo "deb http://cloud.r-project.org/bin/linux/ubuntu ${UBUNTU_VERSION}-${CRAN_LINUX_VERSION}/" >> /etc/apt/sources.list 32 | 33 | gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9 34 | gpg -a --export E298A3A825C0D65DFD57CBB651716619E084DAB9 | apt-key add - 35 | 36 | 37 | # Wildcard * at end of version will grab (latest) patch of requested version 38 | apt-get update && apt-get -y install --no-install-recommends r-base-dev=${R_VERSION}* 39 | 40 | 41 | 42 | rm -rf /var/lib/apt/lists/* 43 | 44 | ## Add PPAs: NOTE this will mean that installing binary R packages won't be version stable. 45 | ## 46 | ## These are required at least for bionic-based images since 3.4 r binaries are 47 | 48 | 49 | echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 50 | locale-gen en_US.utf8 51 | /usr/sbin/update-locale LANG=${LANG} 52 | 53 | Rscript -e "install.packages(c('littler', 'docopt'))" 54 | 55 | ## By default R_LIBS_SITE is unset, and defaults to this, so this is where `littler` will be. 56 | ## We set it here for symlinks, but don't make the env var persist (since it's already the default) 57 | R_LIBS_SITE=/usr/local/lib/R/site-library 58 | ln -s ${R_LIBS_SITE}/littler/examples/install.r /usr/local/bin/install.r 59 | ln -s ${R_LIBS_SITE}/littler/examples/install2.r /usr/local/bin/install2.r 60 | ln -s ${R_LIBS_SITE}/littler/examples/installGithub.r /usr/local/bin/installGithub.r 61 | ln -s ${R_LIBS_SITE}/littler/bin/r /usr/local/bin/r 62 | 63 | 64 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_binder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | RSTUDIO_VERSION=1.3.959 /rocker_scripts/install_rstudio.sh 5 | 6 | ## NOTE: this runs as user NB_USER! 7 | PYTHON_VENV_PATH=${PYTHON_VENV_PATH:-/opt/venv/reticulate} 8 | NB_USER=${NB_USER:-rstudio} 9 | NB_UID=${NB_UID:-1000} 10 | WORKDIR=${WORKDIR:-/home/${NB_USER}} 11 | usermod -l ${NB_USER} rstudio 12 | # Create a venv dir owned by unprivileged user & set up notebook in it 13 | # This allows non-root to install python libraries if required 14 | mkdir -p ${PYTHON_VENV_PATH} && chown -R ${NB_USER} ${PYTHON_VENV_PATH} 15 | 16 | # And set ENV for R! It doesn't read from the environment... 17 | echo "PATH=${PATH}" >> ${R_HOME}/etc/Renviron 18 | echo "export PATH=${PATH}" >> ${WORKDIR}/.profile 19 | 20 | ## This gets run as user 21 | su ${NB_USER} 22 | cd ${WORKDIR} 23 | python3 -m venv ${PYTHON_VENV_PATH} 24 | pip3 install --no-cache-dir jupyter-rsession-proxy 25 | 26 | R --quiet -e "devtools::install_github('IRkernel/IRkernel')" 27 | R --quiet -e "IRkernel::installspec(prefix='${PYTHON_VENV_PATH}')" 28 | 29 | 30 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_cuda-10.1.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | 3 | apt-get update && apt-get install -y --no-install-recommends \ 4 | gnupg2 curl ca-certificates && \ 5 | curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/7fa2af80.pub | apt-key add - && \ 6 | echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64 /" > /etc/apt/sources.list.d/cuda.list && \ 7 | echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list && \ 8 | apt-get purge --autoremove -y curl && \ 9 | rm -rf /var/lib/apt/lists/* 10 | 11 | CUDA_VERSION=${CUDA_VERSION:-10.1.243} 12 | CUDA_PKG_VERSION=${CUDA_PKG_VERSION:-10-1=$CUDA_VERSION-1} 13 | 14 | # For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a 15 | apt-get update && apt-get install -y --no-install-recommends \ 16 | cuda-cudart-$CUDA_PKG_VERSION \ 17 | cuda-compat-10-1 && \ 18 | ln -s cuda-10.1 /usr/local/cuda && \ 19 | rm -rf /var/lib/apt/lists/* 20 | 21 | # Required for nvidia-docker v1 22 | echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \ 23 | echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf 24 | 25 | 26 | ## PATH & LD_LIBRARY_PATH are wrong? 27 | PATH=${PATH:-/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH}} 28 | LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-/usr/local/nvidia/lib:/usr/local/nvidia/lib64} 29 | 30 | 31 | # nvidia-container-runtime 32 | NVIDIA_VISIBLE_DEVICES=${NVIDIA_VISIBLE_DEVICES:-all} 33 | NVIDIA_DRIVER_CAPABILITIES=${NVIDIA_DRIVER_CAPABILITIES:-"compute,utility"} 34 | NVIDIA_REQUIRE_CUDA=${NVIDIA_REQUIRE_CUDA:-"cuda>=10.1 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=396,driver<397 brand=tesla,driver>=410,driver<411"} 35 | 36 | 37 | ## Tensorflow config for cuda runtime. 38 | ## Adapted from: 39 | 40 | ARCH= 41 | CUDA=10.1 42 | CUDNN=7.6.4.38-1 43 | CUDNN_MAJOR_VERSION=7 44 | LIB_DIR_PREFIX=x86_64 45 | LIBNVINFER=6.0.1-1 46 | LIBNVINFER_MAJOR_VERSION=6 47 | 48 | #SHELL ["/bin/bash", "-c"] 49 | # Pick up some TF dependencies 50 | # There appears to be a regression in libcublas10=10.2.2.89-1 which 51 | # prevents cublas from initializing in TF. See 52 | # https://github.com/tensorflow/tensorflow/issues/9489#issuecomment-562394257 53 | apt-get update && apt-get install -y --no-install-recommends --allow-downgrades \ 54 | build-essential \ 55 | cuda-command-line-tools-10-1 \ 56 | libcublas10=10.2.1.243-1 \ 57 | cuda-nvrtc-10-1 \ 58 | cuda-cufft-10-1 \ 59 | cuda-curand-10-1 \ 60 | cuda-cusolver-10-1 \ 61 | cuda-cusparse-10-1 \ 62 | curl \ 63 | libcudnn7=${CUDNN}+cuda10.1 \ 64 | libfreetype6-dev \ 65 | libhdf5-serial-dev \ 66 | libzmq3-dev \ 67 | pkg-config \ 68 | software-properties-common \ 69 | unzip 70 | 71 | # Install TensorRT if not building for PowerPC 72 | apt-get update && \ 73 | apt-get install -y --no-install-recommends libnvinfer${LIBNVINFER_MAJOR_VERSION}=${LIBNVINFER}+cuda${CUDA} \ 74 | libnvinfer-plugin${LIBNVINFER_MAJOR_VERSION}=${LIBNVINFER}+cuda${CUDA} \ 75 | && apt-get clean \ 76 | && rm -rf /var/lib/apt/lists/* 77 | 78 | # For CUDA profiling, TensorFlow requires CUPTI. 79 | LD_LIBRARY_PATH==${LD_LIBRARY_PATH:-/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda/lib64:$LD_LIBRARY_PATH} 80 | 81 | # Link the libcuda stub to the location where tensorflow is searching for it and reconfigure 82 | # dynamic linker run-time bindings 83 | ln -s /usr/local/cuda/lib64/stubs/libcuda.so /usr/local/cuda/lib64/stubs/libcuda.so.1 84 | echo "/usr/local/cuda/lib64/stubs" > /etc/ld.so.conf.d/z-cuda-stubs.conf 85 | ldconfig 86 | 87 | ## Add nvtop 88 | #/rocker_scripts/install_nvtop.sh 89 | 90 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_cuda-11.1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | apt-get update && apt-get install -y --no-install-recommends \ 4 | gnupg2 curl ca-certificates && \ 5 | curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/7fa2af80.pub | apt-key add - && \ 6 | echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/cuda.list && \ 7 | echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list && \ 8 | apt-get purge --autoremove -y curl \ 9 | && rm -rf /var/lib/apt/lists/* 10 | 11 | CUDA_VERSION=${CUDA_VERSION:-11.1.1} 12 | 13 | # For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a 14 | apt-get update && apt-get install -y --no-install-recommends \ 15 | cuda-cudart-11-1=11.1.74-1 \ 16 | cuda-compat-11-1 \ 17 | && ln -s cuda-11.1 /usr/local/cuda && \ 18 | rm -rf /var/lib/apt/lists/* 19 | 20 | # Required for nvidia-docker v1 21 | echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \ 22 | echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf 23 | 24 | 25 | ## Set all of these as global ENV 26 | # PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH} 27 | # LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 28 | 29 | # NVIDIA_VISIBLE_DEVICES=all 30 | #NVIDIA_DRIVER_CAPABILITIES="compute,utility" 31 | # NVIDIA_REQUIRE_CUDA="cuda>=11.1 brand=tesla,driver>=418,driver<419 brand=tesla,driver>=440,driver<441 brand=tesla,driver>=450,driver<451" 32 | 33 | 34 | ## runtime #################################################### 35 | ##FROM ${IMAGE_NAME}:11.1-base-ubuntu20.04 36 | 37 | NCCL_VERSION=${NCCL_VERSION:-2.7.8} 38 | 39 | apt-get update && apt-get install -y --no-install-recommends \ 40 | cuda-libraries-11-1=11.1.1-1 \ 41 | libnpp-11-1=11.1.2.301-1 \ 42 | cuda-nvtx-11-1=11.1.74-1 \ 43 | libcublas-11-1=11.3.0.106-1 \ 44 | libnccl2=$NCCL_VERSION-1+cuda11.1 \ 45 | && apt-mark hold libnccl2 \ 46 | && rm -rf /var/lib/apt/lists/* 47 | 48 | 49 | ## devel ####################################################### 50 | 51 | apt-get update && apt-get install -y --no-install-recommends \ 52 | cuda-nvml-dev-11-1=11.1.74-1 \ 53 | cuda-command-line-tools-11-1=11.1.1-1 \ 54 | cuda-nvprof-11-1=11.1.105-1 \ 55 | libnpp-dev-11-1=11.1.2.301-1 \ 56 | cuda-libraries-dev-11-1=11.1.1-1 \ 57 | cuda-minimal-build-11-1=11.1.1-1 \ 58 | libnccl-dev=2.7.8-1+cuda11.1 \ 59 | libcublas-dev-11-1=11.3.0.106-1 \ 60 | libcusparse-11-1=11.3.0.10-1 \ 61 | libcusparse-dev-11-1=11.3.0.10-1 \ 62 | && apt-mark hold libnccl-dev \ 63 | && rm -rf /var/lib/apt/lists/* 64 | 65 | LIBRARY_PATH=/usr/local/cuda/lib64/stubs 66 | 67 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_geospatial.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # always set this for scripts but don't declare as ENV.. 6 | export DEBIAN_FRONTEND=noninteractive 7 | 8 | apt-get update -qq \ 9 | && apt-get install -y --no-install-recommends \ 10 | gdal-bin \ 11 | lbzip2 \ 12 | libfftw3-dev \ 13 | libgdal-dev \ 14 | libgeos-dev \ 15 | libgsl0-dev \ 16 | libgl1-mesa-dev \ 17 | libglu1-mesa-dev \ 18 | libhdf4-alt-dev \ 19 | libhdf5-dev \ 20 | libjq-dev \ 21 | libpq-dev \ 22 | libproj-dev \ 23 | libprotobuf-dev \ 24 | libnetcdf-dev \ 25 | libsqlite3-dev \ 26 | libssl-dev \ 27 | libudunits2-dev \ 28 | lsb-release \ 29 | netcdf-bin \ 30 | postgis \ 31 | protobuf-compiler \ 32 | sqlite3 \ 33 | tk-dev \ 34 | unixodbc-dev 35 | 36 | # lwgeom 0.2-2 and 0.2-3 have a regression which prevents install on ubuntu:bionic 37 | ## permissionless PAT for builds 38 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`} 39 | 40 | if [ ${UBUNTU_VERSION} == "bionic" ]; then 41 | R -e "Sys.setenv(GITHUB_PAT='0e7777db4b3bb48acb542b8912a989b8047f6351'); remotes::install_github('r-spatial/lwgeom')" 42 | fi 43 | 44 | 45 | ## Somehow foreign is messed up on CRAN between 2020-04-25 -- 2020-05-0? 46 | ##install2.r --error --skipinstalled --repo https://mran.microsoft.com/snapshot/2020-04-24 foreign 47 | 48 | install2.r --error --skipinstalled \ 49 | RColorBrewer \ 50 | RandomFields \ 51 | RNetCDF \ 52 | classInt \ 53 | deldir \ 54 | gstat \ 55 | hdf5r \ 56 | lidR \ 57 | mapdata \ 58 | maptools \ 59 | mapview \ 60 | ncdf4 \ 61 | proj4 \ 62 | raster \ 63 | rgdal \ 64 | rgeos \ 65 | rlas \ 66 | sf \ 67 | sp \ 68 | spacetime \ 69 | spatstat \ 70 | spatialreg \ 71 | spdep \ 72 | stars \ 73 | tidync \ 74 | tmap \ 75 | geoR \ 76 | geosphere 77 | 78 | R -e "BiocManager::install('rhdf5')" 79 | 80 | ## install wgrib2 for NOAA's NOMADS / rNOMADS forecast files 81 | /rocker_scripts/install_wgrib2.sh 82 | 83 | rm -r /tmp/downloaded_packages 84 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_geospatial_unstable.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # always set this for scripts but don't declare as ENV.. 6 | export DEBIAN_FRONTEND=noninteractive 7 | apt-get update && apt-get install -y --no-install-recommends \ 8 | gnupg2 curl ca-certificates 9 | 10 | apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 6B827C12C2D425E227EDCA75089EBE08314DF160 11 | echo "deb http://ppa.launchpad.net/ubuntugis/ubuntugis-unstable/ubuntu focal main" >> /etc/apt/sources.list.d/ubuntugis.list 12 | echo "deb-src http://ppa.launchpad.net/ubuntugis/ubuntugis-unstable/ubuntu focal main" >> /etc/apt/sources.list.d/ubuntugis.list 13 | rm -rf /var/lib/apt/lists/* 14 | 15 | 16 | 17 | ## in UNSTABLE, we will install everything from source by default: 18 | CRAN=https://cran.r-project.org 19 | ## Add a default CRAN mirror 20 | echo "options(repos = c(CRAN = '${CRAN}'), download.file.method = 'libcurl')" >> ${R_HOME}/etc/Rprofile.site 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_nvtop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | apt-get update && apt-get -y install cmake libncurses5-dev libncursesw5-dev git 5 | git clone https://github.com/Syllo/nvtop.git 6 | mkdir -p nvtop/build && cd nvtop/build 7 | cmake .. -DNVML_RETRIEVE_HEADER_ONLINE=True 8 | make 9 | make install 10 | 11 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_pandoc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | # Note that 'default' pandoc version means the version bundled with RStudio 5 | # if RStudio is installed , but latest otherwise 6 | 7 | PANDOC_VERSION=${1:-${PANDOC_VERSION:-default}} 8 | 9 | apt-get update && apt-get -y install wget 10 | 11 | if [ -x "$(command -v pandoc)" ]; then 12 | INSTALLED_PANDOC=$(pandoc --version 2>/dev/null | head -n 1 | grep -oP '[\d\.]+$') 13 | fi 14 | 15 | if [ "$INSTALLED_PANDOC" != "$PANDOC_VERSION" ]; then 16 | 17 | if [ -f "/usr/lib/rstudio-server/bin/pandoc/pandoc" ] && 18 | { [ "$PANDOC_VERSION" = "$(/usr/lib/rstudio-server/bin/pandoc/pandoc --version | head -n 1 | grep -oP '[\d\.]+$')" ] || 19 | [ "$PANDOC_VERSION" = "default" ]; }; then 20 | ln -fs /usr/lib/rstudio-server/bin/pandoc/pandoc /usr/local/bin 21 | ln -fs /usr/lib/rstudio-server/bin/pandoc/pandoc-citeproc /usr/local/bin 22 | else 23 | if [ "$PANDOC_VERSION" = "default" ]; then 24 | PANDOC_DL_URL=$(wget -qO- https://api.github.com/repos/jgm/pandoc/releases/latest | grep -oP "(?<=\"browser_download_url\":\s\")https.*amd64\.deb") 25 | else 26 | PANDOC_DL_URL=https://github.com/jgm/pandoc/releases/download/${PANDOC_VERSION}/pandoc-${PANDOC_VERSION}-amd64.deb 27 | fi 28 | wget ${PANDOC_DL_URL} -O pandoc-amd64.deb 29 | dpkg -i pandoc-amd64.deb 30 | rm pandoc-amd64.deb 31 | fi 32 | 33 | ## Symlink pandoc & standard pandoc templates for use system-wide 34 | PANDOC_TEMPLATES_VERSION=`pandoc -v | grep -oP "(?<=pandoc\s)[0-9\.]+$"` 35 | wget https://github.com/jgm/pandoc-templates/archive/${PANDOC_TEMPLATES_VERSION}.tar.gz -O pandoc-templates.tar.gz 36 | rm -fr /opt/pandoc/templates 37 | mkdir -p /opt/pandoc/templates 38 | tar xvf pandoc-templates.tar.gz 39 | cp -r pandoc-templates*/* /opt/pandoc/templates && rm -rf pandoc-templates* 40 | rm -fr /root/.pandoc 41 | mkdir /root/.pandoc && ln -s /opt/pandoc/templates /root/.pandoc/templates 42 | 43 | fi 44 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_proj.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | git clone https://github.com/OSGeo/PROJ 5 | 6 | PROJ_VERSION=${PROJ_VERSION:-master} 7 | cd PROJ 8 | 9 | git checkout ${PROJ_VERSION} . 10 | ./autogen.sh 11 | ./configure --prefix=/usr/local 12 | make 13 | make install 14 | 15 | 16 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_python.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | WORKON_HOME=${WORKON_HOME:-/opt/venv} 5 | PYTHON_VENV_PATH=${PYTHON_VENV_PATH:-${WORKON_HOME}/reticulate} 6 | RETICULATE_MINICONDA_ENABLED=${RETICULATE_MINICONDA_ENABLED:-FALSE} 7 | 8 | apt-get update && apt-get install -y --no-install-recommends \ 9 | libpython3-dev \ 10 | python3-dev \ 11 | python3-pip \ 12 | python3-virtualenv \ 13 | python3-venv && \ 14 | rm -rf /var/lib/apt/lists/* 15 | 16 | python3 -m pip --no-cache-dir install --upgrade \ 17 | pip \ 18 | setuptools \ 19 | virtualenv 20 | 21 | # Some TF tools expect a "python" binary 22 | if [ ! -e /usr/local/bin/python ]; then 23 | ln -s $(which python3) /usr/local/bin/python 24 | fi 25 | 26 | mkdir -p ${WORKON_HOME} 27 | python3 -m venv ${PYTHON_VENV_PATH} 28 | 29 | install2.r --skipinstalled --error reticulate 30 | 31 | ## Ensure RStudio inherits this env var 32 | echo "" >> ${R_HOME}/etc/Renviron 33 | echo "WORKON_HOME=${WORKON_HOME}" >> ${R_HOME}/etc/Renviron 34 | echo "RETICULATE_MINICONDA_ENABLED=${RETICULATE_MINICONDA_ENABLED}" >> ${R_HOME}/etc/Renviron 35 | 36 | 37 | ## symlink these so that these are available when switching to a new venv 38 | ## -f check for file, -L for link, -e for either 39 | if [ ! -e /usr/local/bin/python ]; then 40 | ln -s $(which python3) /usr/local/bin/python 41 | fi 42 | 43 | if [ ! -e /usr/local/bin/pip ]; then 44 | ln -s ${PYTHON_VENV_PATH}/bin/pip /usr/local/bin/pip 45 | fi 46 | 47 | if [ ! -e /usr/local/bin/virtualenv ]; then 48 | ln -s ${PYTHON_VENV_PATH}/bin/virtualenv /usr/local/bin/virtualenv 49 | fi 50 | 51 | ## Allow staff-level users to modify the shared environment 52 | chown -R :staff ${WORKON_HOME} 53 | chmod g+wx ${WORKON_HOME} 54 | chown :staff ${PYTHON_VENV_PATH} 55 | 56 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_s6init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | ### Sets up S6 supervisor. 5 | 6 | S6_VERSION=${1:-${S6_VERSION:-v1.21.7.0}} 7 | S6_BEHAVIOUR_IF_STAGE2_FAILS=2 8 | 9 | apt-get update && apt-get -y install wget 10 | 11 | ## Set up S6 init system 12 | if [ -f "/rocker_scripts/.s6_version" ] && [ "$S6_VERSION" = "$(cat /rocker_scripts/.s6_version)" ]; then 13 | echo "S6 already installed" 14 | else 15 | wget -P /tmp/ https://github.com/just-containers/s6-overlay/releases/download/${S6_VERSION}/s6-overlay-amd64.tar.gz 16 | 17 | ## need the modified double tar now, see https://github.com/just-containers/s6-overlay/issues/288 18 | tar hzxf /tmp/s6-overlay-amd64.tar.gz -C / --exclude=usr/bin/execlineb 19 | tar hzxf /tmp/s6-overlay-amd64.tar.gz -C /usr ./bin/execlineb && $_clean 20 | 21 | echo "$S6_VERSION" > /rocker_scripts/.s6_version 22 | fi 23 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_shiny_server.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | SHINY_SERVER_VERSION=${1:-${SHINY_SERVER_VERSION:-latest}} 5 | 6 | # Run dependency scripts 7 | . /rocker_scripts/install_s6init.sh 8 | . /rocker_scripts/install_pandoc.sh 9 | 10 | if [ "$SHINY_SERVER_VERSION" = "latest" ]; then 11 | SHINY_SERVER_VERSION=$(wget -qO- https://download3.rstudio.org/ubuntu-14.04/x86_64/VERSION) 12 | fi 13 | 14 | # Get apt packages 15 | apt-get update 16 | apt-get install -y --no-install-recommends \ 17 | sudo \ 18 | gdebi-core \ 19 | libcurl4-gnutls-dev \ 20 | libcairo2-dev \ 21 | libxt-dev \ 22 | xtail \ 23 | wget 24 | 25 | # Install Shiny server 26 | wget --no-verbose "https://download3.rstudio.org/ubuntu-14.04/x86_64/shiny-server-${SHINY_SERVER_VERSION}-amd64.deb" -O ss-latest.deb 27 | gdebi -n ss-latest.deb 28 | rm ss-latest.deb 29 | 30 | # Get R packages 31 | install2.r --error --skipinstalled shiny rmarkdown 32 | 33 | # Set up directories and permissions 34 | if [ -x "$(command -v rstudio-server)" ]; then 35 | DEFAULT_USER=${DEFAULT_USER:-rstudio} 36 | adduser ${DEFAULT_USER} shiny 37 | fi 38 | 39 | cp -R /usr/local/lib/R/site-library/shiny/examples/* /srv/shiny-server/ 40 | chown shiny:shiny /var/lib/shiny-server 41 | mkdir -p /var/log/shiny-server 42 | chown shiny:shiny /var/log/shiny-server 43 | 44 | # create init scripts 45 | mkdir -p /etc/services.d/shiny-server 46 | cat > /etc/services.d/shiny-server/run << 'EOF' 47 | #!/usr/bin/with-contenv bash 48 | ## load /etc/environment vars first: 49 | for line in $( cat /etc/environment ) ; do export $line > /dev/null; done 50 | if [ "$APPLICATION_LOGS_TO_STDOUT" != "false" ]; then 51 | exec xtail /var/log/shiny-server/ & 52 | fi 53 | exec shiny-server 2>&1 54 | EOF 55 | chmod +x /etc/services.d/shiny-server/run 56 | 57 | # Clean up 58 | rm -rf /var/lib/apt/lists/* 59 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_tensorflow.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | TENSORFLOW_VERSION=${1:-${TENSORFLOW_VERSION:-default}} 5 | KERAS_VERSION=${2:-${KERAS_VERSION:-default}} 6 | 7 | ## Install python dependency 8 | /rocker_scripts/install_python.sh 9 | 10 | ## To support different version of TF, install to different virtualenvs 11 | TENSORFLOW_VENV=$PYTHON_VENV_PATH 12 | install2.r --error --skipinstalled keras 13 | Rscript -e "keras::install_keras(version = \"$KERAS_VERSION\", \ 14 | tensorflow = \"$TENSORFLOW_VERSION\", \ 15 | envname =\"$TENSORFLOW_VENV\")" 16 | 17 | rm -r /tmp/downloaded_packages 18 | 19 | chown -R 1000:1000 /opt/venv 20 | chmod -R 777 /opt/venv 21 | 22 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_texlive.sh: -------------------------------------------------------------------------------- 1 | echo 'selected_scheme scheme-infraonly 2 | TEXDIR /usr/local/texlive 3 | TEXMFCONFIG /opt/texlive/texmf-config 4 | TEXMFHOME /opt/texlive/texmf 5 | TEXMFLOCAL /opt/texlive/texmf-local 6 | TEXMFSYSCONFIG /opt/texlive/texmf-config 7 | TEXMFSYSVAR /opt/texlive/texmf-var 8 | TEXMFVAR /opt/texlive/texmf-var 9 | option_doc 0 10 | option_src 0' > /tmp/texlive-profile.txt 11 | 12 | CTAN_REPO=${CTAN_REPO:-http://mirror.ctan.org/systems/texlive/tlnet} 13 | export PATH=$PATH:/usr/local/texlive/bin/x86_64-linux/ 14 | 15 | mkdir -p /opt/texlive 16 | # set up packages 17 | apt-get update && apt-get -y install wget perl xzdec 18 | wget ${CTAN_REPO}/install-tl-unx.tar.gz 19 | tar -xzf install-tl-unx.tar.gz 20 | install-tl-20*/install-tl --profile=/tmp/texlive-profile.txt && \ 21 | rm -rf install-tl-* 22 | 23 | 24 | tlmgr update --self 25 | tlmgr install latex-bin luatex xetex 26 | tlmgr install ae bibtex context inconsolata listings makeindex metafont mfware parskip pdfcrop tex tools url xkeyval 27 | 28 | ## do not add to /usr/local/bin 29 | # tlmgr path add 30 | # instead, we keep binaries separate and add to PATH 31 | echo "PATH=${PATH}" >> ${R_HOME}/etc/Renviron 32 | 33 | ## open permissions to avoid needless warnings 34 | chown -R rstudio:staff /opt/texlive 35 | chown -R rstudio:staff /usr/local/texlive 36 | chmod -R 777 /opt/texlive 37 | chmod -R 777 /usr/local/texlive 38 | 39 | 40 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_tidyverse.sh: -------------------------------------------------------------------------------- 1 | 2 | #!/bin/bash 3 | 4 | ## build ARGs 5 | NCPUS=${NCPUS:-1} 6 | 7 | set -e 8 | apt-get update -qq && apt-get -y --no-install-recommends install \ 9 | libxml2-dev \ 10 | libcairo2-dev \ 11 | libgit2-dev \ 12 | default-libmysqlclient-dev \ 13 | libpq-dev \ 14 | libsasl2-dev \ 15 | libsqlite3-dev \ 16 | libssh2-1-dev \ 17 | unixodbc-dev && \ 18 | rm -rf /var/lib/apt/lists/* 19 | 20 | 21 | install2.r --error --skipinstalled -r $CRAN -n $NCPUS \ 22 | tidyverse \ 23 | devtools \ 24 | rmarkdown \ 25 | BiocManager \ 26 | vroom \ 27 | gert 28 | 29 | ## dplyr database backends 30 | install2.r --error --skipinstalled -r $CRAN -n $NCPUS \ 31 | arrow \ 32 | dbplyr \ 33 | DBI \ 34 | dtplyr \ 35 | nycflights13 \ 36 | Lahman \ 37 | RMariaDB \ 38 | RPostgres \ 39 | RSQLite \ 40 | fst 41 | 42 | ## a bridge to far? -- brings in another 60 packages 43 | # install2.r --error --skipinstalled -r $CRAN -n $NCPUS tidymodels 44 | 45 | rm -rf /tmp/downloaded_packages 46 | 47 | 48 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_verse.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # always set this for scripts but don't declare as ENV.. 6 | export DEBIAN_FRONTEND=noninteractive 7 | 8 | 9 | export PATH=$PATH:/usr/local/texlive/bin/x86_64-linux/ 10 | 11 | apt-get update -qq \ 12 | && apt-get install -y --no-install-recommends \ 13 | cmake \ 14 | curl \ 15 | default-jdk \ 16 | fonts-roboto \ 17 | ghostscript \ 18 | hugo \ 19 | less \ 20 | libbz2-dev \ 21 | libglpk-dev \ 22 | libgmp3-dev \ 23 | libfribidi-dev \ 24 | libharfbuzz-dev \ 25 | libhunspell-dev \ 26 | libicu-dev \ 27 | liblzma-dev \ 28 | libmagick++-dev \ 29 | libopenmpi-dev \ 30 | libpcre2-dev \ 31 | libssl-dev \ 32 | libv8-dev \ 33 | libxml2-dev\ 34 | libxslt1-dev \ 35 | libzmq3-dev \ 36 | lsb-release \ 37 | qpdf \ 38 | texinfo \ 39 | software-properties-common \ 40 | vim \ 41 | wget 42 | 43 | # libgit2-dev also depends on the libcurl4-gnutils in bionic but not on focal 44 | # cran PPA is a super-stable solution to this 45 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`} 46 | if [ ${UBUNTU_VERSION} == "bionic" ]; then 47 | add-apt-repository -y ppa:cran/travis 48 | fi 49 | 50 | 51 | # 52 | # librdf0-dev depends on libcurl4-gnutils-dev instead of libcurl4-openssl-dev... 53 | # So: we can build the redland package bindings and then swap back to libcurl-openssl-dev... (ick) 54 | # explicitly install runtime library sub-deps of librdf0-dev so they are not auto-removed. 55 | apt-get install -y librdf0-dev 56 | install2.r --error --skipinstalled -r $CRAN redland 57 | apt-get install -y \ 58 | libcurl4-openssl-dev \ 59 | libxslt-dev \ 60 | librdf0 \ 61 | redland-utils \ 62 | rasqal-utils \ 63 | raptor2-utils \ 64 | && apt-get remove -y systemd \ 65 | && apt-get -y autoremove 66 | 67 | apt-get install -y libgit2-dev libcurl4-openssl-dev 68 | 69 | 70 | 71 | ## Add LaTeX, rticles and bookdown support 72 | wget "https://travis-bin.yihui.name/texlive-local.deb" \ 73 | && dpkg -i texlive-local.deb \ 74 | && rm texlive-local.deb 75 | 76 | 77 | ## Install texlive 78 | /rocker_scripts/install_texlive.sh 79 | 80 | install2.r --error -r $CRAN --skipinstalled tinytex 81 | install2.r --error --deps TRUE -r $CRAN --skipinstalled \ 82 | blogdown bookdown rticles rmdshower rJava xaringan 83 | 84 | rm -rf /tmp/downloaded_packages 85 | rm -rf /var/lib/apt/lists/* 86 | 87 | 88 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/install_wgrib2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## https://www.cpc.ncep.noaa.gov/products/wesley/wgrib2/ 4 | 5 | apt-get update && apt-get -y install wget 6 | cd /opt 7 | wget https://www.ftp.cpc.ncep.noaa.gov/wd51we/wgrib2/wgrib2.tgz 8 | tar -xvf wgrib2.tgz 9 | rm -rf wgrib2.tgz 10 | cd grib2 11 | 12 | ## really someone needs to learn proper packaging conventions, but whatever 13 | CC=gcc FC=gfortran make 14 | ln -s /opt/grib2/wgrib2/wgrib2 /usr/local/bin/wgrib2 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/pam-helper.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ## Enforces the custom password specified in the PASSWORD environment variable 4 | ## The accepted RStudio username is the same as the USER environment variable (i.e., local user name). 5 | 6 | set -o nounset 7 | 8 | IFS='' read -r password 9 | 10 | [ "${USER}" = "${1}" ] && [ "${PASSWORD}" = "${password}" ] 11 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/rsession.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | R_DOC_DIR=$R_HOME/doc 4 | R_INCLUDE_DIR=$R_HOME/include 5 | R_SHARE_DIR=$R_HOME/share 6 | RSTUDIO_DEFAULT_R_VERSION_HOME=$R_HOME 7 | RSTUDIO_DEFAULT_R_VERSION=$R_VERSION 8 | PATH=$PATH:/usr/lib/rstudio-server/bin 9 | rsession --standalone=1 \ 10 | --program-mode=server \ 11 | --log-stderr=1 \ 12 | --session-timeout-minutes=0 \ 13 | --user-identity=rstudio \ 14 | --www-port=8787 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /docimage-shiny/rocker_scripts/userconf.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bash 2 | 3 | ###################################################################################### 4 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 5 | # SPDX-License-Identifier: MIT-0 6 | 7 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this 8 | # software and associated documentation files (the "Software"), to deal in the Software 9 | # without restriction, including without limitation the rights to use, copy, modify, 10 | # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 11 | # permit persons to whom the Software is furnished to do so. 12 | 13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 14 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 15 | # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 16 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 18 | # OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | ###################################################################################### 20 | 21 | ## Set defaults for environmental variables in case they are undefined 22 | USER=${USER:=rstudio} 23 | 24 | if [ "$INDIVIDUAL_CONT" = "YES" ]; then 25 | mod_user=`echo "${RSTUDIO_USERS}"|tr "@" "_"|tr "." "_"` 26 | RSTUDIO_PASS_VAR="rstudio_${INSTANCE_NAME}_${mod_user}_container_pass_arn" 27 | RSTUDIO_PASS="${!RSTUDIO_PASS_VAR}" 28 | fi 29 | 30 | PASSWORD=${RSTUDIO_PASS:=rstudio} 31 | USERID=${USERID:=1000} 32 | GROUPID=${GROUPID:=1000} 33 | ROOT=${ROOT:=FALSE} 34 | UMASK=${UMASK:=022} 35 | LANG=${LANG:=en_US.UTF-8} 36 | TZ=${TZ:=Etc/UTC} 37 | 38 | bold=$(tput bold) 39 | normal=$(tput sgr0) 40 | 41 | if [[ ${DISABLE_AUTH,,} == "true" ]] 42 | 43 | then 44 | mv /etc/rstudio/disable_auth_rserver.conf /etc/rstudio/rserver.conf 45 | echo "USER=$USER" >> /etc/environment 46 | fi 47 | 48 | if grep --quiet "auth-none=1" /etc/rstudio/rserver.conf 49 | then 50 | echo "Skipping authentication as requested" 51 | elif [ "$PASSWORD" == "rstudio" ] 52 | then 53 | printf "\n\n" 54 | tput bold 55 | printf "\e[31mERROR\e[39m: You must set a unique PASSWORD (not 'rstudio') first! e.g. run with:\n" 56 | printf "docker run -e PASSWORD=\e[92m\e[39m -p 8787:8787 rocker/rstudio\n" 57 | tput sgr0 58 | printf "\n\n" 59 | exit 1 60 | fi 61 | 62 | if [ "$USERID" -lt 1000 ] 63 | # Probably a macOS user, https://github.com/rocker-org/rocker/issues/205 64 | then 65 | echo "$USERID is less than 1000" 66 | check_user_id=$(grep -F "auth-minimum-user-id" /etc/rstudio/rserver.conf) 67 | if [[ ! -z $check_user_id ]] 68 | then 69 | echo "minumum authorised user already exists in /etc/rstudio/rserver.conf: $check_user_id" 70 | else 71 | echo "setting minumum authorised user to 499" 72 | echo auth-minimum-user-id=499 >> /etc/rstudio/rserver.conf 73 | fi 74 | fi 75 | 76 | if [ "$USERID" -ne 1000 ] 77 | ## Configure user with a different USERID if requested. 78 | then 79 | echo "deleting user rstudio" 80 | userdel rstudio 81 | echo "creating new $USER with UID $USERID" 82 | useradd -m $USER -u $USERID 83 | mkdir -p /home/$USER 84 | chown -R $USER /home/$USER 85 | usermod -a -G staff $USER 86 | elif [ "$USER" != "rstudio" ] 87 | then 88 | ## cannot move home folder when it's a shared volume, have to copy and change permissions instead 89 | cp -r /home/rstudio /home/$USER 90 | ## RENAME the user 91 | usermod -l $USER -d /home/$USER rstudio 92 | groupmod -n $USER rstudio 93 | usermod -a -G staff $USER 94 | chown -R $USER:$USER /home/$USER 95 | echo "USER is now $USER" 96 | fi 97 | 98 | if [ "$GROUPID" -ne 1000 ] 99 | ## Configure the primary GID (whether rstudio or $USER) with a different GROUPID if requested. 100 | then 101 | echo "Modifying primary group $(id $USER -g -n)" 102 | groupmod -g $GROUPID $(id $USER -g -n) 103 | echo "Primary group ID is now custom_group $GROUPID" 104 | fi 105 | 106 | ## Add a password to user 107 | echo "$USER:$PASSWORD" | chpasswd 108 | 109 | # Use Env flag to know if user should be added to sudoers 110 | if [[ ${ROOT,,} == "true" ]] 111 | then 112 | adduser $USER sudo && echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers 113 | echo "$USER added to sudoers" 114 | fi 115 | 116 | ## Change Umask value if desired 117 | if [ "$UMASK" -ne 022 ] 118 | then 119 | echo "server-set-umask=false" >> /etc/rstudio/rserver.conf 120 | echo "Sys.umask(mode=$UMASK)" >> /home/$USER/.Rprofile 121 | fi 122 | 123 | ## Next one for timezone setup 124 | if [ "$TZ" != "Etc/UTC" ] 125 | then 126 | ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone 127 | fi 128 | 129 | ## Set our dynamic variables in Renviron.site to be reflected by RStudio 130 | exclude_vars="HOME PASSWORD" 131 | for file in /var/run/s6/container_environment/* 132 | do 133 | sed -i "/^${file##*/}=/d" ${R_HOME}/etc/Renviron.site 134 | regex="(^| )${file##*/}($| )" 135 | [[ ! $exclude_vars =~ $regex ]] && echo "${file##*/}=$(cat $file)" >> ${R_HOME}/etc/Renviron.site || echo "skipping $file" 136 | done 137 | 138 | ## Update Locale if needed 139 | if [ "$LANG" != "en_US.UTF-8" ] 140 | then 141 | /usr/sbin/locale-gen --lang $LANG 142 | /usr/sbin/update-locale --reset LANG=$LANG 143 | fi 144 | 145 | ## only file-owner (root) should read container_environment files: 146 | chmod 600 /var/run/s6/container_environment/* 147 | 148 | -------------------------------------------------------------------------------- /dummy/empty.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-fargate-with-rstudio-open-source/67029f300b5fbf011e78e3f245f6332afa29dbc6/dummy/empty.txt -------------------------------------------------------------------------------- /images/Rstudio_architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-fargate-with-rstudio-open-source/67029f300b5fbf011e78e3f245f6332afa29dbc6/images/Rstudio_architecture.png -------------------------------------------------------------------------------- /images/Rstudio_deployment_image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-fargate-with-rstudio-open-source/67029f300b5fbf011e78e3f245f6332afa29dbc6/images/Rstudio_deployment_image.png -------------------------------------------------------------------------------- /images/datasync_blog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/aws-fargate-with-rstudio-open-source/67029f300b5fbf011e78e3f245f6332afa29dbc6/images/datasync_blog.png -------------------------------------------------------------------------------- /ml_example/breast-cancer-prediction/app.R: -------------------------------------------------------------------------------- 1 | library(shiny) 2 | library(caret) 3 | 4 | test_data <- readRDS('./breast_cancer_test_data.rds') 5 | gbmFit <- readRDS('./gbm_model.rds') 6 | preProcessor <- readRDS('./preProcessor.rds') 7 | test_data_transformed <- predict(preProcessor, test_data) 8 | prediction <- predict(gbmFit, newdata = test_data_transformed[,2:10], type = "prob") 9 | 10 | inputs1 <- c("Clump Thickness" = "Cl.thickness", 11 | "Uniformity of Cell Size" = "Cell.size", 12 | "Uniformity of Cell Shape" = "Cell.shape", 13 | "Marginal Adhesion" = "Marg.adhesion", 14 | "Single Epithelial Cell Size" = "Epith.c.size", 15 | "Bare Nuclei" = "Bare.nuclei", 16 | "Bland Chromatin" = "Bl.cromatin", 17 | "Normal Nucleoli" = "Normal.nucleoli", 18 | "Mitoses" = "Mitoses") 19 | 20 | inputs2 <- c("Uniformity of Cell Size" = "Cell.size", 21 | "Clump Thickness" = "Cl.thickness", 22 | "Uniformity of Cell Shape" = "Cell.shape", 23 | "Marginal Adhesion" = "Marg.adhesion", 24 | "Single Epithelial Cell Size" = "Epith.c.size", 25 | "Bare Nuclei" = "Bare.nuclei", 26 | "Bland Chromatin" = "Bl.cromatin", 27 | "Normal Nucleoli" = "Normal.nucleoli", 28 | "Mitoses" = "Mitoses") 29 | 30 | 31 | # Define UI for miles per gallon app ---- 32 | ui <- fluidPage( 33 | 34 | # App title ---- 35 | titlePanel("Breast Cancer"), 36 | 37 | # Sidebar layout with input and output definitions ---- 38 | sidebarLayout( 39 | 40 | # Sidebar panel for inputs ---- 41 | sidebarPanel( 42 | # Input: Decimal interval with step value ---- 43 | sliderInput("threshold", "Probability Threshold:", 44 | min = 0, max = 1, 45 | value = 0.5, step = 0.01), 46 | 47 | # Input: Selector for variable to plot on x axis ---- 48 | selectInput("variable_x", "Variable on X:", 49 | inputs1), 50 | 51 | # Input: Selector for variable to plot on y axis ---- 52 | selectInput("variable_y", "Variable on Y:", 53 | inputs2), 54 | ), 55 | 56 | # Main panel for displaying outputs ---- 57 | mainPanel( 58 | 59 | # Output: Formatted text for caption ---- 60 | h3(textOutput("caption")), 61 | 62 | # Output: prediction outcome 63 | tableOutput("predictions"), 64 | 65 | # Output: Verbatim text for data summary ---- 66 | verbatimTextOutput("summary"), 67 | 68 | # Output: Formatted text for formula ---- 69 | h3(textOutput("formula")), 70 | 71 | # Output: Plot of the data ---- 72 | # was click = "plot_click" 73 | plotOutput("scatterPlot", brush = "plot_brush"), 74 | 75 | # Output: present click info 76 | tableOutput("info") 77 | 78 | ) 79 | ) 80 | ) 81 | 82 | # Define server logic to plot various variables against mpg ---- 83 | server <- function(input, output) { 84 | 85 | # Compute the formula text ---- 86 | # This is in a reactive expression since it is shared by the 87 | # output$caption and output$mpgPlot functions 88 | formulaText <- reactive({ 89 | paste(input$variable_y, "~", input$variable_x) 90 | }) 91 | 92 | # Compute the formula text ---- 93 | # This is in a reactive expression since it is shared by the 94 | # output$caption and output$mpgPlot functions 95 | total_count <- reactive({ 96 | data.frame(Class = colnames(prediction), 97 | Count = c(sum(prediction$malignant=input$threshold))) 99 | }) 100 | 101 | # Compute the formula text ---- 102 | # This is in a reactive expression since it is shared by the 103 | # output$caption and output$mpgPlot functions 104 | threshold_proba <- reactive({ 105 | cbind(Prediction = ifelse(prediction$malignant>=input$threshold, 106 | "malignant", "benign"), 107 | test_data) 108 | }) 109 | 110 | # return prediction summary 111 | output$predictions <- renderTable({ 112 | total_count() 113 | }) 114 | 115 | # Return the formula text for printing as a caption ---- 116 | output$caption <- renderText({ 117 | "Breast cancer test data summary" 118 | }) 119 | 120 | # Generate a summary of the dataset ---- 121 | # The output$summary depends on the datasetInput reactive 122 | # expression, so will be re-executed whenever datasetInput is 123 | # invalidated, i.e. whenever the input$dataset changes 124 | output$summary <- renderPrint({ 125 | summary(test_data) 126 | }) 127 | 128 | # Return the formula text for printing as a caption ---- 129 | output$formula <- renderText({ 130 | formulaText() 131 | }) 132 | 133 | # Generate a plot of the requested variable against mpg ---- 134 | # and only exclude outliers if requested 135 | output$scatterPlot <- renderPlot({ 136 | plot(as.formula(formulaText()), data = threshold_proba()) 137 | #ggplot(test_data, aes(x=input$variable_x, y=input$variable_y)) + geom_point() 138 | }) 139 | 140 | output$info <- renderTable({ 141 | # With base graphics, need to tell it what the x and y variables are. 142 | #nearPoints(test_data, input$plot_click, 143 | # xvar = "Cl.thickness", yvar = "Epith.c.size") 144 | # nearPoints() also works with hover and dblclick events 145 | brushedPoints(threshold_proba(), input$plot_brush, 146 | xvar = input$variable_x, yvar = input$variable_y) 147 | }) 148 | 149 | } 150 | 151 | # Create Shiny app ---- 152 | shinyApp(ui, server) -------------------------------------------------------------------------------- /ml_example/breast_cancer_modeling.r: -------------------------------------------------------------------------------- 1 | install.packages(c('caret', 'mlbench', 'gbm', 'e1071')) 2 | setwd("~/aws-fargate-with-rstudio-open-source/ml_example") 3 | library(caret) 4 | library(mlbench) 5 | 6 | data(BreastCancer) 7 | summary(BreastCancer) #Summary of Dataset 8 | 9 | df <- BreastCancer 10 | # convert input values to numeric 11 | for(i in 2:10) { 12 | df[,i] <- as.numeric(as.character(df[,i])) 13 | } 14 | 15 | # split the data into train and test and perform preprocessing 16 | trainIndex <- createDataPartition(df$Class, p = .8, 17 | list = FALSE, 18 | times = 1) 19 | df_train <- df[ trainIndex,] 20 | df_test <- df[-trainIndex,] 21 | preProcValues <- preProcess(df_train, method = c("center", "scale", "medianImpute")) 22 | df_train_transformed <- predict(preProcValues, df_train) 23 | 24 | # train a model on df_train 25 | fitControl <- trainControl(## 10-fold CV 26 | method = "repeatedcv", 27 | number = 10, 28 | ## repeated ten times 29 | repeats = 10, 30 | ## Estimate class probabilities 31 | classProbs = TRUE, 32 | ## Evaluate performance using 33 | ## the following function 34 | summaryFunction = twoClassSummary) 35 | 36 | set.seed(825) 37 | gbmFit <- train(Class ~ ., data = df_train_transformed[,2:11], 38 | method = "gbm", 39 | trControl = fitControl, 40 | ## This last option is actually one 41 | ## for gbm() that passes through 42 | verbose = FALSE, 43 | metric = "ROC") 44 | gbmFit 45 | 46 | saveRDS(preProcValues, file = './breast-cancer-prediction/preProcessor.rds') 47 | saveRDS(gbmFit, file = './breast-cancer-prediction/gbm_model.rds') 48 | saveRDS(df_test[,1:10], file = './breast-cancer-prediction/breast_cancer_test_data.rds') 49 | 50 | 51 | -------------------------------------------------------------------------------- /parameters.json: -------------------------------------------------------------------------------- 1 | { 2 | "Parameters": { 3 | "docker_secret_name": "dockerid", 4 | "asg_min_capacity": "1", 5 | "asg_desired_capacity": "2", 6 | "asg_max_capacity": "10", 7 | "shiny_min_capacity": "2", 8 | "shiny_desired_capacity": "2", 9 | "shiny_max_capacity": "4", 10 | "shiny_container_memory_in_gb": "4", 11 | "rstudio_container_memory_reserved": "2048", 12 | "rstudio_health_check_grace_period": "900", 13 | "shiny_health_check_grace_period": "900", 14 | "shiny_cookie_stickiness_duration": "24", 15 | "shiny_scale_in_cooldown": "60", 16 | "shiny_scale_out_cooldown": "30", 17 | "shiny_cpu_target_utilization_percent": "70", 18 | "shiny_memory_target_utilization_percent": "70", 19 | "shiny_requests_per_target": "50", 20 | "datalake_source_bucket_key_hourly": "hourly_sync", 21 | "access_point_path_hourly": "/rstudio-path-hourly", 22 | "datalake_source_bucket_key_instant": "instant_sync", 23 | "access_point_path_instant": "/rstudio-path-instant", 24 | "athena_output_bucket_key": "Athena-Query", 25 | "s3_lifecycle_expiration_duration": "365", 26 | "s3_trasnition_duration_infrequent_access": "30", 27 | "s3_trasnition_duration_glacier": "90", 28 | "home_container_path": "/home", 29 | "shiny_share_container_path": "/srv/shiny-server", 30 | "hourly_sync_container_path": "/s3_data_sync/hourly_sync", 31 | "instant_sync_container_path": "/s3_data_sync/instant_upload" 32 | } 33 | } -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -e . 2 | -------------------------------------------------------------------------------- /rstudio_fargate/datalake/dl_resources.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This stack creates the S3 bucket for user data upload and configues the bucket for 19 | cross-account access. This stack also creates the instant and hourly data upload folders 20 | and event notification for instant upload. 21 | 22 | """ 23 | 24 | import os 25 | 26 | from aws_cdk import ( 27 | core as cdk, 28 | aws_s3 as s3, 29 | aws_ssm as ssm, 30 | aws_iam as iam, 31 | aws_s3_deployment as s3_deploy, 32 | aws_s3_notifications as s3_notifications, 33 | aws_lambda as _lambda, 34 | ) 35 | from aws_cdk.core import RemovalPolicy 36 | 37 | 38 | class DataLakeResourcesStack(cdk.Stack): 39 | def __init__( 40 | self, 41 | scope: cdk.Construct, 42 | id: str, 43 | instance: str, 44 | rstudio_account_id: str, 45 | datalake_source_bucket_name: str, 46 | datalake_source_bucket_key_hourly: str, 47 | datalake_source_bucket_key_instant: str, 48 | lambda_datasync_trigger_function_arn: str, 49 | **kwargs, 50 | ): 51 | cdk.Stack.__init__(self, scope, id, **kwargs) 52 | 53 | """ 54 | # set removal policy objects 55 | self.removal_policy = ( 56 | core.RemovalPolicy.DESTROY 57 | if os.getenv("AWS_REMOVAL_POLICY", "FALSE") == "TRUE" 58 | else core.RemovalPolicy.RETAIN 59 | ) 60 | """ 61 | 62 | source_bucket = s3.Bucket( 63 | self, 64 | id=f"rstudio-user-data-{instance}", 65 | bucket_name=datalake_source_bucket_name, 66 | # removal_policy=self.removal_policy, 67 | removal_policy=RemovalPolicy.DESTROY, 68 | versioned=True, 69 | ) 70 | 71 | source_bucket.add_to_resource_policy( 72 | permission=iam.PolicyStatement( 73 | principals=[iam.AccountPrincipal(rstudio_account_id)], 74 | effect=iam.Effect.ALLOW, 75 | actions=[ 76 | "s3:GetBucketNotification", 77 | "s3:AbortMultipartUpload", 78 | "s3:DeleteObject", 79 | "s3:GetObject", 80 | "s3:ListMultipartUploadParts", 81 | "s3:PutObjectTagging", 82 | "s3:GetObjectTagging", 83 | "s3:PutObject", 84 | "s3:ListBucket", 85 | "s3:GetBucketLocation", 86 | "s3:ListBucketMultipartUploads", 87 | ], 88 | resources=[ 89 | source_bucket.bucket_arn, 90 | f"{source_bucket.bucket_arn}/*", 91 | ], 92 | ) 93 | ) 94 | 95 | s3_prefix_creation_hourly = s3_deploy.BucketDeployment( 96 | self, 97 | id=f"s3-prefix-deployment-hourly-{instance}", 98 | sources=[s3_deploy.Source.asset("./dummy")], 99 | destination_bucket=source_bucket, 100 | destination_key_prefix=datalake_source_bucket_key_hourly, 101 | retain_on_delete=False, 102 | ) 103 | 104 | s3_prefix_creation_instant = s3_deploy.BucketDeployment( 105 | self, 106 | id=f"s3-prefix-deployment-instant-{instance}", 107 | sources=[s3_deploy.Source.asset("./dummy")], 108 | destination_bucket=source_bucket, 109 | destination_key_prefix=datalake_source_bucket_key_instant, 110 | retain_on_delete=False, 111 | ) 112 | 113 | # Setup bucket notification to trigger lambda (in destination account) whenever a file is uploaded into the bucket 114 | lambda_destination = s3_notifications.LambdaDestination( 115 | _lambda.Function.from_function_arn( 116 | self, 117 | id=f"datasync-lambda-{instance}", 118 | function_arn=lambda_datasync_trigger_function_arn, 119 | ) 120 | ) 121 | 122 | source_bucket.add_event_notification( 123 | s3.EventType.OBJECT_CREATED, 124 | lambda_destination, 125 | s3.NotificationKeyFilter(prefix=f"{datalake_source_bucket_key_instant}/"), 126 | ) 127 | -------------------------------------------------------------------------------- /rstudio_fargate/datalake/dl_resources_stage.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This stack creates the pipeline stage for deployment into central data account 19 | 20 | """ 21 | 22 | from aws_cdk import core 23 | from aws_cdk import core as cdk 24 | 25 | from .dl_resources import DataLakeResourcesStack 26 | from .rstudio_s3_stack import RstudioS3Stack 27 | 28 | 29 | class DataLakeResourcesStage(core.Stage): 30 | def __init__( 31 | self, 32 | scope: core.Construct, 33 | id: str, 34 | instance: str, 35 | rstudio_account_id: str, 36 | datalake_source_bucket_name: str, 37 | athena_output_bucket_name: str, 38 | athena_output_bucket_key: str, 39 | athena_workgroup_name: str, 40 | s3_lifecycle_expiration_duration: int, 41 | s3_trasnition_duration_infrequent_access: int, 42 | s3_trasnition_duration_glacier: int, 43 | ssm_cross_account_role_name: str, 44 | datalake_source_bucket_key_hourly: str, 45 | datalake_source_bucket_key_instant: str, 46 | lambda_datasync_trigger_function_arn: str, 47 | **kwargs, 48 | ): 49 | super().__init__(scope, id, **kwargs) 50 | 51 | env_dict = { 52 | "account": self.account, 53 | "region": self.region, 54 | } 55 | 56 | dl_s3_bucket_build = DataLakeResourcesStack( 57 | self, 58 | id=f"Dl-Resources-{instance}", 59 | instance=instance, 60 | rstudio_account_id=rstudio_account_id, 61 | datalake_source_bucket_name=datalake_source_bucket_name, 62 | datalake_source_bucket_key_hourly=datalake_source_bucket_key_hourly, 63 | datalake_source_bucket_key_instant=datalake_source_bucket_key_instant, 64 | lambda_datasync_trigger_function_arn=lambda_datasync_trigger_function_arn, 65 | env=env_dict, 66 | ) 67 | 68 | s3_stack_build = RstudioS3Stack( 69 | self, 70 | id=f"S3-RstudioStack-{instance}", 71 | instance=instance, 72 | rstudio_account_id=rstudio_account_id, 73 | athena_output_bucket_name=athena_output_bucket_name, 74 | athena_workgroup_name=athena_workgroup_name, 75 | athena_output_bucket_key=athena_output_bucket_key, 76 | ssm_cross_account_role_name=ssm_cross_account_role_name, 77 | s3_lifecycle_expiration_duration=s3_lifecycle_expiration_duration, 78 | s3_trasnition_duration_infrequent_access=s3_trasnition_duration_infrequent_access, 79 | s3_trasnition_duration_glacier=s3_trasnition_duration_glacier, 80 | env=env_dict, 81 | ) 82 | -------------------------------------------------------------------------------- /rstudio_fargate/datalake/rstudio_s3_stack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This stack creates the S3 bucket for athena query output and the athena workgroup 19 | for using athena from rstudio. The stack also configures the bucket for cross-account 20 | access. 21 | 22 | """ 23 | 24 | from aws_cdk import ( 25 | core as cdk, 26 | aws_s3 as s3, 27 | aws_iam as iam, 28 | aws_s3_deployment as s3_deploy, 29 | aws_athena as athena, 30 | aws_ssm as ssm, 31 | ) 32 | from aws_cdk.core import ( 33 | RemovalPolicy, 34 | Duration, 35 | ) 36 | 37 | 38 | class RstudioS3Stack(cdk.Stack): 39 | def __init__( 40 | self, 41 | scope: cdk.Construct, 42 | id: str, 43 | instance: str, 44 | rstudio_account_id: str, 45 | athena_output_bucket_name: str, 46 | athena_output_bucket_key: str, 47 | athena_workgroup_name: str, 48 | s3_lifecycle_expiration_duration: int, 49 | s3_trasnition_duration_infrequent_access: int, 50 | s3_trasnition_duration_glacier: int, 51 | ssm_cross_account_role_name: str, 52 | **kwargs, 53 | ) -> None: 54 | super().__init__(scope, id, **kwargs) 55 | 56 | # Create S3 bucket for athena queries 57 | athena_s3_bucket = s3.Bucket( 58 | self, 59 | id=f"r-bucket-for-athena-{instance}", 60 | versioned=False, 61 | bucket_name=athena_output_bucket_name, 62 | block_public_access=s3.BlockPublicAccess.BLOCK_ALL, 63 | encryption=s3.BucketEncryption.S3_MANAGED, 64 | removal_policy=RemovalPolicy.DESTROY, 65 | lifecycle_rules=[ 66 | s3.LifecycleRule( 67 | expiration=Duration.days(s3_lifecycle_expiration_duration), 68 | transitions=[ 69 | s3.Transition( 70 | storage_class=s3.StorageClass.INFREQUENT_ACCESS, 71 | transition_after=Duration.days( 72 | s3_trasnition_duration_infrequent_access 73 | ), 74 | ), 75 | s3.Transition( 76 | storage_class=s3.StorageClass.GLACIER, 77 | transition_after=Duration.days( 78 | s3_trasnition_duration_glacier 79 | ), 80 | ), 81 | ], 82 | ) 83 | ], 84 | ) 85 | 86 | athena_s3_bucket.add_to_resource_policy( 87 | permission=iam.PolicyStatement( 88 | principals=[iam.AccountPrincipal(rstudio_account_id)], 89 | effect=iam.Effect.ALLOW, 90 | actions=[ 91 | "s3:AbortMultipartUpload", 92 | "s3:DeleteObject", 93 | "s3:GetObject", 94 | "s3:ListMultipartUploadParts", 95 | "s3:PutObjectTagging", 96 | "s3:GetObjectTagging", 97 | "s3:PutObject", 98 | "s3:ListBucket", 99 | "s3:GetBucketLocation", 100 | "s3:ListBucketMultipartUploads", 101 | ], 102 | resources=[ 103 | athena_s3_bucket.bucket_arn, 104 | f"{athena_s3_bucket.bucket_arn}/*", 105 | ], 106 | ) 107 | ) 108 | 109 | s3_prefix_creation = s3_deploy.BucketDeployment( 110 | self, 111 | id=f"s3-prefix-deployment", 112 | sources=[s3_deploy.Source.asset("./dummy")], 113 | destination_bucket=athena_s3_bucket, 114 | destination_key_prefix=f"{athena_output_bucket_key}/", 115 | retain_on_delete=False, 116 | ) 117 | 118 | athena_wg = athena.CfnWorkGroup( 119 | self, 120 | id=f"r-wg-for-athena-{instance}", 121 | name=athena_workgroup_name, 122 | description="Rstudio Workgroup for Athena", 123 | work_group_configuration=athena.CfnWorkGroup.WorkGroupConfigurationProperty( 124 | publish_cloud_watch_metrics_enabled=True, 125 | enforce_work_group_configuration=True, 126 | requester_pays_enabled=True, 127 | # bytesScannedCutoffPerQuery=TEN_GIGABYTES_IN_BYTES, 128 | result_configuration=athena.CfnWorkGroup.ResultConfigurationProperty( 129 | output_location=f"s3://{athena_s3_bucket.bucket_name}/{athena_output_bucket_key}/", 130 | encryption_configuration=athena.CfnWorkGroup.EncryptionConfigurationProperty( 131 | encryption_option="SSE_S3", 132 | ), 133 | ), 134 | ), 135 | ) 136 | 137 | -------------------------------------------------------------------------------- /rstudio_fargate/datasync_trigger/datasync_trigger_lambda_stack.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 5 | SPDX-License-Identifier: MIT-0 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 8 | software and associated documentation files (the "Software"), to deal in the Software 9 | without restriction, including without limitation the rights to use, copy, modify, 10 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 11 | permit persons to whom the Software is furnished to do so. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 14 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 15 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 16 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 17 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 18 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | 20 | """ 21 | 22 | from os import getenv 23 | from pathlib import Path 24 | 25 | from aws_cdk import ( 26 | core as cdk, 27 | aws_lambda as _lambda, 28 | aws_iam as iam, 29 | ) 30 | from aws_cdk.core import ( 31 | RemovalPolicy, 32 | Duration, 33 | Construct, 34 | Stack, 35 | ) 36 | 37 | LAMBDA_DURATION = Duration.minutes(3) 38 | LAMBDA_MEMORY = 1024 39 | LAMBDA_RUNTIME = _lambda.Runtime.PYTHON_3_7 40 | 41 | 42 | class DatasyncTriggerLambdaStack(Stack): 43 | def __init__( 44 | self, 45 | scope: Construct, 46 | id: str, 47 | instance: str, 48 | datalake_account_id: str, 49 | datalake_source_bucket_name: str, 50 | datasync_task_arn_ssm_param_name: str, 51 | datasync_function_name: str, 52 | ssm_cross_account_lambda_role_name: str, 53 | **kwargs, 54 | ) -> None: 55 | super().__init__(scope, id, **kwargs) 56 | 57 | trigger_datasync_function = _lambda.Function( 58 | self, 59 | id=f"trigger-datasync-function-{instance}", 60 | code=_lambda.Code.asset("rstudio_fargate/datasync_trigger/"), 61 | function_name=datasync_function_name, 62 | handler="trigger_datasync_handler.lambda_handler", 63 | layers=[], 64 | runtime=LAMBDA_RUNTIME, 65 | timeout=LAMBDA_DURATION, 66 | memory_size=LAMBDA_MEMORY, 67 | environment={ 68 | "DATASYNC_TASK_ARN_SSM_PARAM_NAME": datasync_task_arn_ssm_param_name 69 | }, 70 | ) 71 | 72 | # Retrieve the datasync task arn parameter 73 | trigger_datasync_function.add_to_role_policy( 74 | statement=iam.PolicyStatement( 75 | actions=["datasync:StartTaskExecution"], 76 | effect=iam.Effect.ALLOW, 77 | resources=[f"arn:aws:datasync:{self.region}:{self.account}:task/*"], 78 | ) 79 | ) 80 | 81 | trigger_datasync_function.add_to_role_policy( 82 | statement=iam.PolicyStatement( 83 | actions=["ssm:GetParameter", "ssm:GetParameters"], 84 | effect=iam.Effect.ALLOW, 85 | resources=[f"arn:aws:ssm:{self.region}:{self.account}:parameter/*"], 86 | ) 87 | ) 88 | 89 | trigger_datasync_function.add_to_role_policy( 90 | statement=iam.PolicyStatement( 91 | actions=["ssm:DescribeParameters"], 92 | effect=iam.Effect.ALLOW, 93 | resources=[f"arn:aws:ssm:{self.region}:{self.account}:*"], 94 | ) 95 | ) 96 | 97 | trigger_datasync_function.add_to_role_policy( 98 | statement=iam.PolicyStatement( 99 | actions=["ec2:DescribeNetworkInterfaces"], 100 | effect=iam.Effect.ALLOW, 101 | resources=["*"], 102 | ) 103 | ) 104 | 105 | # To allow the remote S3 bucket to invoke this lambda: 106 | trigger_datasync_function.add_permission( 107 | id=f"AllowDLToInvokeMe-{instance}", 108 | principal=iam.ServicePrincipal("s3.amazonaws.com"), 109 | action="lambda:InvokeFunction", 110 | source_account=datalake_account_id, 111 | source_arn=f"arn:aws:s3:::{datalake_source_bucket_name}", 112 | ) 113 | 114 | trigger_datasync_function.add_permission( 115 | id=f"AllowDLToAddPermissionsOnMe-{instance}", 116 | principal=iam.AccountPrincipal(datalake_account_id), 117 | action="lambda:AddPermission", 118 | source_account=datalake_account_id, 119 | ) 120 | 121 | # The following role will be used as an execution role for the lambda function that retrieves cross-account SSM parameters 122 | ssm_lambda_execution_role = iam.Role( 123 | role_name=ssm_cross_account_lambda_role_name, 124 | scope=self, 125 | id=f"SSM-Lambda-ExecutionRole-{instance}", 126 | assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"), 127 | managed_policies=[ 128 | iam.ManagedPolicy.from_aws_managed_policy_name( 129 | "service-role/AWSLambdaBasicExecutionRole" 130 | ) 131 | ], 132 | ) 133 | 134 | @property 135 | def _function(self) -> _lambda.IFunction: 136 | return self.trigger_function 137 | -------------------------------------------------------------------------------- /rstudio_fargate/datasync_trigger/datasync_trigger_lambda_stage.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | 20 | from aws_cdk import core as cdk 21 | 22 | from .datasync_trigger_lambda_stack import DatasyncTriggerLambdaStack 23 | 24 | 25 | class DataSyncTriggerLambdaStage(cdk.Stage): 26 | def __init__( 27 | self, 28 | scope: cdk.Construct, 29 | id: str, 30 | instance: str, 31 | rstudio_account_id: str, 32 | datalake_account_id: str, 33 | datalake_source_bucket_name: str, 34 | ssm_cross_account_lambda_role_name: str, 35 | datasync_task_arn_ssm_param_name: str, 36 | datasync_function_name: str, 37 | **kwargs, 38 | ): 39 | super().__init__(scope, id, **kwargs) 40 | 41 | trigger_lambda_stack_build = DatasyncTriggerLambdaStack( 42 | self, 43 | id=f"Rstudio-Trigger-Lambda-DataSync-and-SSMRole-{instance}", 44 | instance=instance, 45 | datalake_account_id=datalake_account_id, 46 | datalake_source_bucket_name=datalake_source_bucket_name, 47 | ssm_cross_account_lambda_role_name=ssm_cross_account_lambda_role_name, 48 | datasync_task_arn_ssm_param_name=datasync_task_arn_ssm_param_name, 49 | datasync_function_name=datasync_function_name, 50 | env={ 51 | "account": rstudio_account_id, 52 | "region": self.region, 53 | }, 54 | ) 55 | -------------------------------------------------------------------------------- /rstudio_fargate/datasync_trigger/trigger_datasync_handler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | 20 | import json 21 | import boto3 22 | import os 23 | 24 | datasync_task_arn_param_name = os.environ["DATASYNC_TASK_ARN_SSM_PARAM_NAME"] 25 | 26 | # Let's use Amazon Datasync 27 | datasync = boto3.client("datasync") 28 | ssm = boto3.client("ssm") 29 | 30 | 31 | def lambda_handler(event, context): 32 | objectKey = "" 33 | datasync_task_arn = "" 34 | 35 | print(event) 36 | 37 | try: 38 | objectKey = event["Records"][0]["s3"]["object"]["key"] 39 | except KeyError: 40 | raise KeyError( 41 | "Received invalid event - unable to locate Object key to upload.", event 42 | ) 43 | 44 | try: 45 | parameter = ssm.get_parameter( 46 | Name=datasync_task_arn_param_name, WithDecryption=True 47 | ) 48 | print(parameter) 49 | datasync_task_arn = parameter["Parameter"]["Value"] 50 | except ValueError: 51 | raise ValueError( 52 | f"Unable to locate value for parameter {datasync_task_arn_param_name}.", 53 | event, 54 | ) 55 | 56 | response = datasync.start_task_execution( 57 | TaskArn=datasync_task_arn, 58 | OverrideOptions={}, 59 | Includes=[ 60 | {"FilterType": "SIMPLE_PATTERN", "Value": "/" + os.path.basename(objectKey)} 61 | ], 62 | ) 63 | 64 | return {"response": response} 65 | -------------------------------------------------------------------------------- /rstudio_fargate/network/rstudio_network_account_resources.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This stack creates the SSM parameter cross-account riole and permissions for the lambda 19 | from rstudio deployment account to retrieve hosted zone delegation 20 | 21 | """ 22 | 23 | import os 24 | import calendar 25 | import time 26 | 27 | from aws_cdk import ( 28 | core as cdk, 29 | aws_iam as iam, 30 | ) 31 | 32 | 33 | class NetworkAccountResources(cdk.Stack): 34 | def __init__( 35 | self, 36 | scope: cdk.Construct, 37 | id: str, 38 | instance: str, 39 | rstudio_account_id: str, 40 | ssm_cross_account_role_name: str, 41 | **kwargs, 42 | ): 43 | cdk.Stack.__init__(self, scope, id, **kwargs) 44 | 45 | # Create a policy and a role that will be assumed by the cross account lambda in the rstudio account to retrieve SSM parameters 46 | policy = iam.ManagedPolicy( 47 | self, 48 | id=f"SSM-Cross-Account-Policy-{instance}", 49 | statements=[ 50 | iam.PolicyStatement( 51 | effect=iam.Effect.ALLOW, 52 | actions=[ 53 | "ssm:GetParameter", 54 | "ssm:GetParameters", 55 | "ssm:DescribeParameters", 56 | ], 57 | resources=[ 58 | f"arn:aws:ssm:{self.region}:{self.account}:parameter/{instance}/*" 59 | ], 60 | ), 61 | ], 62 | ) 63 | 64 | principal = [] 65 | 66 | principal.append(iam.AccountPrincipal(rstudio_account_id)) 67 | 68 | composite_principal = iam.CompositePrincipal(*principal) 69 | 70 | ssm_cross_account_role = iam.Role( 71 | self, 72 | id=f"SSM-Cross-Account-Role-{instance}", 73 | role_name=ssm_cross_account_role_name, 74 | assumed_by=composite_principal, 75 | ) 76 | 77 | policy.attach_to_role(ssm_cross_account_role) 78 | -------------------------------------------------------------------------------- /rstudio_fargate/network/rstudio_network_account_stage.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This stack creates the pipeline stage for deployment into the central network account. 19 | 20 | """ 21 | 22 | from aws_cdk import core as cdk 23 | 24 | from .rstudio_route53_stack import RstudioRoute53Stack 25 | from .rstudio_network_account_resources import NetworkAccountResources 26 | 27 | 28 | class NetworkAccountStage(cdk.Stage): 29 | def __init__( 30 | self, 31 | scope: cdk.Construct, 32 | id: str, 33 | instance: str, 34 | rstudio_account_id: str, 35 | r53_base_domain: str, 36 | ssm_route53_delegation_name: str, 37 | ssm_route53_delegation_id: str, 38 | r53_delegation_role_name: str, 39 | ssm_cross_account_role_name: str, 40 | **kwargs, 41 | ): 42 | super().__init__(scope, id, **kwargs) 43 | 44 | r53_build = RstudioRoute53Stack( 45 | self, 46 | id=f"RstudioRoute53Stack-{instance}", 47 | instance=instance, 48 | rstudio_account_id=rstudio_account_id, 49 | r53_base_domain=r53_base_domain, 50 | ssm_route53_delegation_name=ssm_route53_delegation_name, 51 | ssm_route53_delegation_id=ssm_route53_delegation_id, 52 | r53_delegation_role_name=r53_delegation_role_name, 53 | ssm_cross_account_role_name=ssm_cross_account_role_name, 54 | env={ 55 | "account": self.account, 56 | "region": self.region, 57 | }, 58 | ) 59 | 60 | cross_account_ssm_role_build = NetworkAccountResources( 61 | self, 62 | id=f"Network-Account-Resources-{instance}", 63 | instance=instance, 64 | rstudio_account_id=rstudio_account_id, 65 | ssm_cross_account_role_name=ssm_cross_account_role_name, 66 | env={ 67 | "account": self.account, 68 | "region": self.region, 69 | }, 70 | ) 71 | -------------------------------------------------------------------------------- /rstudio_fargate/network/rstudio_route53_stack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This stack creates the hsoted zone to be delegated to rstudio deployment accounts. 19 | The hosted zone is created from a publicly resolvable domain which must pre-exist 20 | in route 53 before running this stack. 21 | 22 | """ 23 | 24 | from aws_cdk import ( 25 | core as cdk, 26 | aws_iam as iam, 27 | aws_ssm as ssm, 28 | aws_route53 as r53, 29 | aws_certificatemanager as acm, 30 | ) 31 | from aws_cdk.core import CfnOutput 32 | from aws_cdk.aws_route53 import RecordType, RecordTarget 33 | 34 | 35 | class RstudioRoute53Stack(cdk.Stack): 36 | def __init__( 37 | self, 38 | scope: cdk.Construct, 39 | id: str, 40 | instance: str, 41 | rstudio_account_id: str, 42 | r53_base_domain: str, 43 | ssm_route53_delegation_name: str, 44 | ssm_route53_delegation_id: str, 45 | r53_delegation_role_name: str, 46 | ssm_cross_account_role_name: str, 47 | **kwargs, 48 | ) -> None: 49 | super().__init__(scope, id, **kwargs) 50 | 51 | principal = [] 52 | 53 | principal.append(iam.AccountPrincipal(rstudio_account_id)) 54 | 55 | composite_principal = iam.CompositePrincipal(*principal) 56 | 57 | imported_base = r53.HostedZone.from_lookup( 58 | self, 59 | id=f"base-hosted-zone-{instance}", 60 | domain_name=r53_base_domain, 61 | ) 62 | 63 | build_domain = f"build{instance}.{r53_base_domain}" 64 | 65 | build_zone = r53.PublicHostedZone( 66 | self, 67 | id=f"route53-build-zone-{instance}", 68 | zone_name=build_domain, 69 | cross_account_zone_delegation_principal=composite_principal, 70 | ) 71 | 72 | recordset_base = r53.RecordSet( 73 | self, 74 | id=f"ns-record-set-base-{instance}", 75 | zone=imported_base, 76 | record_type=RecordType.NS, 77 | target=RecordTarget.from_values(*build_zone.hosted_zone_name_servers), 78 | record_name=build_domain, 79 | ) 80 | 81 | # This creates policy to allow sub-account make changes in parent domain 82 | dns_policy = iam.ManagedPolicy( 83 | self, 84 | id=f"DnsPolicy-{instance}", 85 | statements=[ 86 | iam.PolicyStatement( 87 | effect=iam.Effect.ALLOW, 88 | actions=["route53:ChangeResourceRecordSets"], 89 | resources=[ 90 | f"arn:aws:route53:::hostedzone/{build_zone.hosted_zone_id}" 91 | ], 92 | ), 93 | ], 94 | ) 95 | 96 | delegation_role = iam.Role( 97 | self, 98 | id=f"DelegationRole-{instance}", 99 | role_name=r53_delegation_role_name, 100 | assumed_by=composite_principal, 101 | ) 102 | 103 | dns_policy.attach_to_role(delegation_role) 104 | 105 | # Retrieve the role so we can grant permissions to it: 106 | cross_account_role = iam.Role.from_role_arn( 107 | self, 108 | id=f"Cross-Account-Role-{instance}", 109 | role_arn=f"arn:aws:iam::{self.account}:role/{ssm_cross_account_role_name}", 110 | ) 111 | 112 | build_hosted_zone_id = ssm.StringParameter( 113 | self, 114 | id=f"build-hosted-zone-id-{instance}", 115 | allowed_pattern=".*", 116 | description=f"Rstudio sub domain hosted zone id", 117 | parameter_name=ssm_route53_delegation_id, 118 | string_value=build_zone.hosted_zone_id, 119 | tier=ssm.ParameterTier.ADVANCED, 120 | ) 121 | 122 | build_hosted_zone_id.grant_read(cross_account_role) 123 | 124 | build_hosted_zone_name = ssm.StringParameter( 125 | self, 126 | id=f"build-hosted-zone-name-{instance}", 127 | allowed_pattern=".*", 128 | description="Rstudio sub domain hosted zone name", 129 | parameter_name=ssm_route53_delegation_name, 130 | string_value=build_zone.zone_name, 131 | tier=ssm.ParameterTier.ADVANCED, 132 | ) 133 | 134 | build_hosted_zone_name.grant_read(cross_account_role) 135 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/custom/ssm_custom_resource.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This script creates the lambda function to read the cross account SSM parameter store 19 | for obtaining route53 hsited zone delegation 20 | 21 | """ 22 | 23 | from typing import Any 24 | from aws_cdk import ( 25 | core, 26 | aws_iam as iam, 27 | aws_s3 as s3, 28 | ) 29 | 30 | from aws_cdk import aws_cloudformation as cfn, aws_lambda as lambda_, core 31 | 32 | from aws_cdk.custom_resources import ( 33 | AwsCustomResourcePolicy, 34 | PhysicalResourceId, 35 | ) 36 | 37 | from datetime import datetime 38 | 39 | 40 | class SSMParameterReader(core.Construct): 41 | """SSM Parameter constructs that retrieves the parameter value form an environment 42 | Arguments: 43 | :param parameter_name -- The name of the SSM parameter to retrieve its value 44 | """ 45 | 46 | def __init__( 47 | self, 48 | scope: core.Construct, 49 | id: str, 50 | parameter_name: str, 51 | region: str, 52 | instance: str, 53 | rstudio_account_id: str, 54 | rstudio_pipeline_account_id: str, 55 | network_account_id: str, 56 | cross_account_id: str, 57 | ssm_cross_account_role_name: str, 58 | ssm_cross_account_lambda_role_name: str, 59 | **kwargs, 60 | ) -> None: 61 | super().__init__(scope, id) 62 | 63 | with open( 64 | "rstudio_fargate/rstudio/custom/ssm_custom_resource_handler.py", 65 | encoding="utf-8", 66 | ) as fp: 67 | code_body = fp.read() 68 | 69 | cross_account_role_arn_network = ( 70 | f"arn:aws:iam::{network_account_id}:role/{ssm_cross_account_role_name}" 71 | ) 72 | 73 | cross_account_role_arn_pipeline = f"arn:aws:iam::{rstudio_pipeline_account_id}:role/{ssm_cross_account_role_name}" 74 | 75 | cross_account_role_arn = ( 76 | f"arn:aws:iam::{cross_account_id}:role/{ssm_cross_account_role_name}" 77 | ) 78 | 79 | policy = [ 80 | iam.PolicyStatement( 81 | effect=iam.Effect.ALLOW, 82 | actions=[ 83 | "sts:AssumeRole", 84 | ], 85 | resources=[ 86 | cross_account_role_arn_network, 87 | cross_account_role_arn_pipeline, 88 | ], 89 | ), 90 | ] 91 | 92 | params = {"ParameterName": parameter_name, "AssumeRole": cross_account_role_arn} 93 | 94 | role = self.get_provisioning_lambda_role( 95 | construct_id=id, 96 | instance=instance, 97 | role_name=ssm_cross_account_lambda_role_name, 98 | rstudio_account_id=rstudio_account_id, 99 | ssm_cross_account_lambda_role_name=ssm_cross_account_lambda_role_name, 100 | ) 101 | 102 | self.resource = cfn.CustomResource( 103 | self, 104 | id=f"Resource", 105 | provider=cfn.CustomResourceProvider.lambda_( 106 | lambda_.SingletonFunction( 107 | self, 108 | "Singleton", 109 | uuid="f7d4f730-4ee1-11e8-9c2d-fa7ae01bbebc", 110 | code=lambda_.InlineCode(code_body), 111 | handler="index.main", 112 | timeout=core.Duration.seconds(300), 113 | runtime=lambda_.Runtime.PYTHON_3_7, 114 | role=role, 115 | initial_policy=policy, 116 | ) 117 | ), 118 | properties=params, 119 | ) 120 | 121 | def get_parameter_value(self): 122 | return self.resource.get_att("Response").to_string() 123 | 124 | def get_provisioning_lambda_role( 125 | self, 126 | construct_id: str, 127 | instance: str, 128 | role_name: str, 129 | rstudio_account_id: str, 130 | ssm_cross_account_lambda_role_name: str, 131 | ): 132 | return iam.Role.from_role_arn( 133 | self, 134 | id=f"LambdaExecutionRole-{instance}", 135 | role_arn=f"arn:aws:iam::{rstudio_account_id}:role/{ssm_cross_account_lambda_role_name}", 136 | mutable=True, 137 | ) 138 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/custom/ssm_custom_resource_handler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This script creates the lambda handler for the cross account hosted zone delegation 19 | 20 | """ 21 | 22 | import boto3 23 | import logging as log 24 | import random 25 | import string 26 | import cfnresponse 27 | 28 | log.getLogger().setLevel(log.INFO) 29 | 30 | 31 | def id_generator(size, chars=string.ascii_lowercase + string.digits): 32 | return "".join(random.choice(chars) for _ in range(size)) 33 | 34 | 35 | def main(event, context): 36 | 37 | # This needs to change if there are to be multiple resources 38 | # in the same stack 39 | physical_id = "%s.%s" % (id_generator(6), id_generator(16)) 40 | 41 | print(event) 42 | 43 | try: 44 | log.info("Input event: %s", event) 45 | 46 | # Check if this is a Create and we're failing Creates 47 | if event["RequestType"] == "Create" and event["ResourceProperties"].get( 48 | "FailCreate", False 49 | ): 50 | raise RuntimeError("Create failure requested") 51 | if event["RequestType"] in ["Create", "Update"]: 52 | sts_connection = boto3.client("sts") 53 | role = event["ResourceProperties"]["AssumeRole"] 54 | acct_b = sts_connection.assume_role( 55 | RoleArn=role, RoleSessionName="cross_acct_lambda" 56 | ) 57 | 58 | ACCESS_KEY = acct_b["Credentials"]["AccessKeyId"] 59 | SECRET_KEY = acct_b["Credentials"]["SecretAccessKey"] 60 | SESSION_TOKEN = acct_b["Credentials"]["SessionToken"] 61 | 62 | # create service client using the assumed role credentials 63 | client = boto3.client( 64 | "ssm", 65 | aws_access_key_id=ACCESS_KEY, 66 | aws_secret_access_key=SECRET_KEY, 67 | aws_session_token=SESSION_TOKEN, 68 | ) 69 | parameter_name = event["ResourceProperties"]["ParameterName"] 70 | 71 | parameter = client.get_parameter(Name=parameter_name, WithDecryption=True) 72 | print(parameter) 73 | 74 | attributes = {"Response": parameter["Parameter"]["Value"]} 75 | 76 | cfnresponse.send( 77 | event, context, cfnresponse.SUCCESS, attributes, physical_id 78 | ) 79 | 80 | # Do not call into STS and SSM when the resource is being deleted by CloudFormation 81 | if event["RequestType"] == "Delete": 82 | attributes = {"Response": "Delete performed"} 83 | cfnresponse.send( 84 | event, context, cfnresponse.SUCCESS, attributes, physical_id 85 | ) 86 | except Exception as e: 87 | log.exception(e) 88 | # cfnresponse's error message is always "see CloudWatch" 89 | cfnresponse.send(event, context, cfnresponse.FAILED, {}, physical_id) 90 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/ecs/ecs_cluster_stack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | 20 | from aws_cdk import ( 21 | core as cdk, 22 | aws_ec2 as ec2, 23 | aws_ecs as ecs, 24 | aws_autoscaling as autoscaling, 25 | aws_kms as kms, 26 | ) 27 | 28 | 29 | class EcsClusterStack(cdk.Stack): 30 | def __init__( 31 | self, 32 | scope: cdk.Construct, 33 | id: str, 34 | vpc: ec2.Vpc, 35 | instance: str, 36 | rstudio_install_type: str, 37 | rstudio_ec2_instance_type: str, 38 | ecs_cluster_name: str, 39 | asg_min_capacity: int, 40 | asg_desired_capacity: int, 41 | asg_max_capacity: int, 42 | **kwargs, 43 | ) -> None: 44 | super().__init__(scope, id, **kwargs) 45 | 46 | cluster_security_group = ec2.SecurityGroup( 47 | self, 48 | id=f"Rstudio-SecurityGroup-{instance}", 49 | vpc=vpc, 50 | description=f"Rstudio Security Group - {instance}", 51 | ) 52 | 53 | cluster = ecs.Cluster( 54 | self, 55 | id=f"Rstudio-Shiny-ecs-cluster-{instance}", 56 | cluster_name=ecs_cluster_name, 57 | vpc=vpc, 58 | container_insights=True, 59 | ) 60 | 61 | if rstudio_install_type == "ec2": 62 | 63 | auto_scaling_group = autoscaling.AutoScalingGroup( 64 | self, 65 | id="Rstudio-ec2-ASG-" + instance, 66 | vpc=vpc, 67 | instance_type=ec2.InstanceType(rstudio_ec2_instance_type), 68 | machine_image=ecs.EcsOptimizedImage.amazon_linux2(), 69 | min_capacity=asg_min_capacity, 70 | desired_capacity=asg_desired_capacity, 71 | max_capacity=asg_max_capacity, 72 | security_group=cluster_security_group, 73 | ) 74 | 75 | cluster_capacity_provider = ecs.AsgCapacityProvider( 76 | self, 77 | id=f"RstudioCapacityProvider-{instance}", 78 | capacity_provider_name=f"Rstudio-Capacity-Provider-{instance}", 79 | auto_scaling_group=auto_scaling_group, 80 | target_capacity_percent=100, 81 | enable_managed_termination_protection=False, 82 | ) 83 | 84 | cluster.add_asg_capacity_provider( 85 | cluster_capacity_provider 86 | ) 87 | 88 | self.ecs_cluster_security_group_id = cluster_security_group.security_group_id 89 | self.ecs_cluster_name = cluster.cluster_name 90 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/efs/rstudio_efs_stack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | 20 | from aws_cdk import ( 21 | core as cdk, 22 | aws_efs as efs, 23 | aws_kms as kms, 24 | aws_ec2 as ec2, 25 | ) 26 | from aws_cdk.core import RemovalPolicy 27 | 28 | 29 | class RstudioEfsStack(cdk.Stack): 30 | def __init__( 31 | self, 32 | scope: cdk.Construct, 33 | id: str, 34 | vpc: ec2.Vpc, 35 | instance: str, 36 | rstudio_efs_key_alias: str, 37 | access_point_path_hourly: str, 38 | access_point_path_instant: str, 39 | **kwargs, 40 | ) -> None: 41 | super().__init__(scope, id, **kwargs) 42 | 43 | rstudio_efs_kms_key_alias = kms.Alias.from_alias_name( 44 | self, 45 | id=f"Rstudio-Efs-{instance}", 46 | alias_name=rstudio_efs_key_alias, 47 | ) 48 | 49 | # File system for sharing data between Shiny and RStudio instances 50 | file_system_rstudio_shiny_share = efs.FileSystem( 51 | self, 52 | id=f"Rstudio-shiny-user-data-share-{instance}", 53 | file_system_name=f"Rstudio-shiny-share-cont-fs-{instance}", 54 | vpc=vpc, 55 | encrypted=True, 56 | kms_key=rstudio_efs_kms_key_alias, 57 | performance_mode=efs.PerformanceMode.GENERAL_PURPOSE, 58 | throughput_mode=efs.ThroughputMode.BURSTING, 59 | enable_automatic_backups=True, 60 | removal_policy=RemovalPolicy.DESTROY, 61 | ) 62 | 63 | access_point_rstudio_shiny_share = efs.AccessPoint( 64 | self, 65 | id=f"Rstudio-shiny-share-access-point-{instance}", 66 | file_system=file_system_rstudio_shiny_share, 67 | path="/rstudio-shiny-share-path", 68 | create_acl=efs.Acl(owner_uid="1000", owner_gid="1000", permissions="755"), 69 | ) 70 | 71 | # Hourly sync file filesystem 72 | file_system_rstudio_hourly = efs.FileSystem( 73 | self, 74 | id=f"Rstudio-cont-user-data-hourly-{instance}", 75 | file_system_name=f"Rstudio-cont-fs-hourly-{instance}", 76 | vpc=vpc, 77 | encrypted=True, 78 | kms_key=rstudio_efs_kms_key_alias, 79 | performance_mode=efs.PerformanceMode.GENERAL_PURPOSE, 80 | throughput_mode=efs.ThroughputMode.BURSTING, 81 | enable_automatic_backups=True, 82 | removal_policy=RemovalPolicy.DESTROY, 83 | ) 84 | 85 | access_point_rstudio_hourly = efs.AccessPoint( 86 | self, 87 | id=f"Rstudio-access-point-hourly-{instance}", 88 | file_system=file_system_rstudio_hourly, 89 | path=access_point_path_hourly, 90 | create_acl=efs.Acl(owner_uid="1000", owner_gid="1000", permissions="755"), 91 | ) 92 | 93 | # Instant sync file system 94 | file_system_rstudio_instant = efs.FileSystem( 95 | self, 96 | id=f"Rstudio-cont-user-data-instant-{instance}", 97 | file_system_name=f"Rstudio-cont-fs-instant-{instance}", 98 | vpc=vpc, 99 | encrypted=True, 100 | kms_key=rstudio_efs_kms_key_alias, 101 | performance_mode=efs.PerformanceMode.GENERAL_PURPOSE, 102 | throughput_mode=efs.ThroughputMode.BURSTING, 103 | enable_automatic_backups=True, 104 | removal_policy=RemovalPolicy.DESTROY, 105 | ) 106 | 107 | access_point_rstudio_instant = efs.AccessPoint( 108 | self, 109 | id=f"Rstudio-access-point-instant-{instance}", 110 | file_system=file_system_rstudio_instant, 111 | path=access_point_path_instant, 112 | create_acl=efs.Acl(owner_uid="1000", owner_gid="1000", permissions="755"), 113 | ) 114 | 115 | # Shiny Shared file system to pass to other stacks 116 | self.file_system_rstudio_shiny_share_file_system_id = ( 117 | file_system_rstudio_shiny_share.file_system_id 118 | ) 119 | self.file_system_rstudio_shiny_share_security_group_id = ( 120 | file_system_rstudio_shiny_share.connections.security_groups[ 121 | 0 122 | ].security_group_id 123 | ) 124 | self.access_point_id_rstudio_shiny_share = ( 125 | access_point_rstudio_shiny_share.access_point_id 126 | ) 127 | 128 | # Hourly sync file system to pass to other stacks 129 | self.file_system_rstudio_hourly_file_system_id = ( 130 | file_system_rstudio_hourly.file_system_id 131 | ) 132 | self.file_system_rstudio_hourly_security_group_id = ( 133 | file_system_rstudio_hourly.connections.security_groups[0].security_group_id 134 | ) 135 | self.access_point_id_rstudio_hourly = ( 136 | access_point_rstudio_hourly.access_point_id 137 | ) 138 | 139 | # Instanct sync file system to pass to other stacks 140 | self.file_system_rstudio_instant_file_system_id = ( 141 | file_system_rstudio_instant.file_system_id 142 | ) 143 | self.file_system_rstudio_instant_security_group_id = ( 144 | file_system_rstudio_instant.connections.security_groups[0].security_group_id 145 | ) 146 | self.access_point_id_rstudio_instant = ( 147 | access_point_rstudio_instant.access_point_id 148 | ) 149 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/efs/shiny_efs_stack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | 20 | from aws_cdk import ( 21 | core as cdk, 22 | aws_efs as efs, 23 | aws_kms as kms, 24 | aws_ec2 as ec2, 25 | ) 26 | from aws_cdk.core import RemovalPolicy 27 | 28 | 29 | class ShinyEfsStack(cdk.Stack): 30 | def __init__( 31 | self, 32 | scope: cdk.Construct, 33 | id: str, 34 | vpc: ec2.Vpc, 35 | instance: str, 36 | shiny_efs_key_alias: str, 37 | **kwargs, 38 | ) -> None: 39 | super().__init__(scope, id, **kwargs) 40 | 41 | rstudio_efs_kms_key_alias = kms.Alias.from_alias_name( 42 | self, 43 | id="Rstudio-Efs-{instance}", 44 | alias_name=shiny_efs_key_alias, 45 | ) 46 | 47 | # Shiny home file system 48 | file_system_shiny_home = efs.FileSystem( 49 | self, 50 | id=f"Rstudio-shiny-cont-user-data-home-{instance}", 51 | file_system_name=f"Rstudio-shiny-cont-fs-home-{instance}", 52 | vpc=vpc, 53 | encrypted=True, 54 | kms_key=rstudio_efs_kms_key_alias, 55 | performance_mode=efs.PerformanceMode.GENERAL_PURPOSE, 56 | throughput_mode=efs.ThroughputMode.BURSTING, 57 | enable_automatic_backups=True, 58 | removal_policy=RemovalPolicy.DESTROY, 59 | ) 60 | 61 | access_point_shiny_home = efs.AccessPoint( 62 | self, 63 | id=f"Shiny-access-point-home-{instance}", 64 | file_system=file_system_shiny_home, 65 | path="/shiny-path-home", 66 | create_acl=efs.Acl(owner_uid="1000", owner_gid="1000", permissions="755"), 67 | ) 68 | 69 | # Instanct sync file system to pass to other stacks 70 | self.file_system_shiny_home_file_system_id = ( 71 | file_system_shiny_home.file_system_id 72 | ) 73 | self.file_system_shiny_home_security_group_id = ( 74 | file_system_shiny_home.connections.security_groups[0].security_group_id 75 | ) 76 | self.access_point_id_shiny_home = access_point_shiny_home.access_point_id 77 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/kms/kms_stack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | 20 | from aws_cdk import ( 21 | core as cdk, 22 | aws_kms as kms, 23 | aws_iam as iam, 24 | ) 25 | from aws_cdk.core import RemovalPolicy 26 | 27 | 28 | class KmsStack(cdk.Stack): 29 | def __init__( 30 | self, 31 | scope: cdk.Construct, 32 | id: str, 33 | instance: str, 34 | rstudio_cwlogs_key_alias: str, 35 | shiny_cwlogs_key_alias: str, 36 | rstudio_efs_key_alias: str, 37 | shiny_efs_key_alias: str, 38 | rstudio_user_key_alias: str, 39 | **kwargs, 40 | ) -> None: 41 | super().__init__(scope, id, **kwargs) 42 | 43 | rstudio_kms_key = kms.Key( 44 | self, 45 | id=f"Rstudio-Kms-Key-{instance}", 46 | enable_key_rotation=True, 47 | alias=rstudio_cwlogs_key_alias, 48 | removal_policy=RemovalPolicy.DESTROY, 49 | ) 50 | 51 | rstudio_kms_key.add_alias(shiny_cwlogs_key_alias) 52 | rstudio_kms_key.add_alias(rstudio_efs_key_alias) 53 | rstudio_kms_key.add_alias(shiny_efs_key_alias) 54 | rstudio_kms_key.add_alias(rstudio_user_key_alias) 55 | 56 | rstudio_kms_key.add_to_resource_policy( 57 | statement=iam.PolicyStatement( 58 | actions=[ 59 | "kms:Decrypt", 60 | "kms:DescribeKey", 61 | "kms:Encrypt", 62 | "kms:ReEncrypt*", 63 | "kms:GenerateDataKey*", 64 | ], 65 | effect=iam.Effect.ALLOW, 66 | resources=["*"], 67 | principals=[ 68 | iam.ServicePrincipal(f"logs.{self.region}.amazonaws.com"), 69 | iam.ServicePrincipal("sns.amazonaws.com"), 70 | ], 71 | ) 72 | ) 73 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/ses/rstudio_email_passwords_stack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | from .ses_custom_resource import SESSendEmail 20 | 21 | from aws_cdk import ( 22 | core as cdk, 23 | aws_secretsmanager as sm, 24 | aws_kms as kms, 25 | aws_route53 as r53, 26 | ) 27 | 28 | 29 | class RstudioEmailPasswordsStack(cdk.Stack): 30 | def __init__( 31 | self, 32 | scope: cdk.Construct, 33 | id: str, 34 | instance: str, 35 | rstudio_hosted_zone_id: str, 36 | rstudio_hosted_zone_name: str, 37 | shiny_hosted_zone_id: str, 38 | shiny_hosted_zone_name: str, 39 | sns_email: str, 40 | secretpass_arn: list, 41 | number_of_rstudio_containers: int, 42 | rstudio_user_key_alias: str, 43 | **kwargs, 44 | ) -> None: 45 | super().__init__(scope, id, **kwargs) 46 | 47 | sns_email = self.node.try_get_context("sns_email_id") 48 | 49 | rstudio_zone = r53.PublicHostedZone.from_hosted_zone_attributes( 50 | self, 51 | id=f"RStudio-zone-{instance}", 52 | hosted_zone_id=rstudio_hosted_zone_id, 53 | zone_name=rstudio_hosted_zone_name, 54 | ) 55 | 56 | rstudio_url = f"https://{rstudio_zone.zone_name}" 57 | 58 | shiny_zone = r53.PublicHostedZone.from_hosted_zone_attributes( 59 | self, 60 | id=f"Shiny-zone-{instance}", 61 | hosted_zone_id=shiny_hosted_zone_id, 62 | zone_name=shiny_hosted_zone_name, 63 | ) 64 | 65 | shiny_url = f"https://{shiny_zone.zone_name}" 66 | 67 | # Send email here 68 | for i in range(1, number_of_rstudio_containers + 1): 69 | rstudio_url = f"https://container{i}.{rstudio_zone.zone_name}" 70 | ses_send_email = SESSendEmail( 71 | self, 72 | id=f"SES-Send-container{i}-{instance}", 73 | email_from=sns_email, 74 | email_to=sns_email, 75 | secret_arn=secretpass_arn[i - 1], 76 | subject="Welcome to RStudio", 77 | message=f"""Hello rstudio@container{i},

Your username is: rstudio
78 | Your password is:

79 | To acess rstudio click {rstudio_url}

80 | To acess shiny click {shiny_url}

81 | In RStudio, save shiny app files in: /srv/shiny-server to deploy shiny apps.

82 | Regards, 83 |
Rstudio@container{i}.{instance}""", 84 | region=self.region, 85 | account_id=self.account, 86 | counter=i, 87 | instance=instance, 88 | rstudio_user_key_alias=rstudio_user_key_alias, 89 | ) 90 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/ses/ses_custom_resource.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This script creates the lambda function to send SES emails to users 19 | 20 | """ 21 | 22 | from typing import Any 23 | from aws_cdk import ( 24 | core, 25 | aws_iam as iam, 26 | aws_s3 as s3, 27 | aws_kms as kms, 28 | aws_cloudformation as cfn, 29 | aws_lambda as lambda_, 30 | ) 31 | 32 | from aws_cdk.custom_resources import ( 33 | AwsCustomResourcePolicy, 34 | PhysicalResourceId, 35 | ) 36 | 37 | from datetime import datetime 38 | 39 | 40 | class SESSendEmail(core.Construct): 41 | """SSM Parameter constructs that retrieves the parameter value form an environment 42 | Arguments: 43 | :param parameter_name -- The name of the SSM parameter to retrieve its value 44 | """ 45 | 46 | def __init__( 47 | self, 48 | scope: core.Construct, 49 | id: str, 50 | email_from: str, 51 | email_to: str, 52 | secret_arn: str, 53 | subject: str, 54 | message: str, 55 | region: str, 56 | account_id: str, 57 | counter: int, 58 | instance: str, 59 | rstudio_user_key_alias: str, 60 | **kwargs, 61 | ) -> None: 62 | super().__init__(scope, id) 63 | 64 | encryption_key = kms.Alias.from_alias_name( 65 | self, 66 | id=f"Encryption-Key-{instance}", 67 | alias_name=rstudio_user_key_alias, 68 | ) 69 | 70 | with open( 71 | "rstudio_fargate/rstudio/ses/ses_custom_resource_handler.py", 72 | encoding="utf-8", 73 | ) as fp: 74 | code_body = fp.read() 75 | 76 | function_name = f"rstudio_send_email_{instance}_" + str(counter) 77 | # function_name = f"rstudio_send_email" 78 | policy = [ 79 | iam.PolicyStatement( 80 | effect=iam.Effect.ALLOW, 81 | actions=["logs:CreateLogGroup"], 82 | resources=[f"arn:aws:logs:{region}:{account_id}:*"], 83 | ), 84 | iam.PolicyStatement( 85 | effect=iam.Effect.ALLOW, 86 | actions=["logs:CreateLogStream", "logs:PutLogEvents"], 87 | resources=[ 88 | f"arn:aws:logs:{region}:{account_id}:log-group:/aws/lambda/{function_name}:*" 89 | ], 90 | ), 91 | iam.PolicyStatement( 92 | effect=iam.Effect.ALLOW, 93 | actions=["ses:SendEmail", "ses:SendRawEmail", "ses:SendTemplatedEmail"], 94 | resources=[f"arn:aws:ses:{region}:{account_id}:identity/*"], 95 | ), 96 | iam.PolicyStatement( 97 | effect=iam.Effect.ALLOW, 98 | actions=["secretsmanager:GetSecretValue"], 99 | resources=[ 100 | f"arn:aws:secretsmanager:{region}:{account_id}:secret:*rstudio*" 101 | ], 102 | ), 103 | iam.PolicyStatement( 104 | effect=iam.Effect.ALLOW, 105 | actions=[ 106 | "kms:Decrypt", 107 | "kms:DescribeKey", 108 | "kms:Encrypt", 109 | "kms:ReEncrypt*", 110 | "kms:GenerateDataKey*", 111 | ], 112 | resources=["*"], 113 | ), 114 | ] 115 | 116 | params = { 117 | "EmailFrom": email_from, 118 | "EmailTo": email_to, 119 | "SecretArn": secret_arn, 120 | "Subject": subject, 121 | "Message": message, 122 | } 123 | 124 | func = lambda_.SingletonFunction( 125 | self, 126 | "SesSingleton", 127 | lambda_purpose="SesSingleton-Lambda", 128 | function_name=function_name, 129 | uuid="f3d4f730-4ee1-11e8-9c2d-fd7ae01bbebc", 130 | code=lambda_.InlineCode(code_body), 131 | handler="index.main", 132 | timeout=core.Duration.seconds(300), 133 | runtime=lambda_.Runtime.PYTHON_3_8, 134 | initial_policy=policy, 135 | ) 136 | 137 | self.resource = cfn.CustomResource( 138 | self, 139 | "Resource", 140 | provider=cfn.CustomResourceProvider.lambda_(func), 141 | properties=params, 142 | ) 143 | 144 | encryption_key.grant_decrypt(func) 145 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/ses/ses_custom_resource_handler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | This script creates the lambda function handler for the lambda function sending emails 19 | to users via SES 20 | 21 | """ 22 | 23 | import boto3 24 | import logging as log 25 | import random 26 | import string 27 | import cfnresponse 28 | from html import escape 29 | 30 | log.getLogger().setLevel(log.INFO) 31 | 32 | 33 | def id_generator(size, chars=string.ascii_lowercase + string.digits): 34 | return "".join(random.choice(chars) for _ in range(size)) 35 | 36 | 37 | def main(event, context): 38 | physical_id = "%s.%s" % (id_generator(6), id_generator(16)) 39 | 40 | print(event) 41 | 42 | try: 43 | log.info("Input event: %s", event) 44 | 45 | # Check if this is a Create and we're failing Creates 46 | if event["RequestType"] == "Create" and event["ResourceProperties"].get( 47 | "FailCreate", False 48 | ): 49 | raise RuntimeError("Create failure requested") 50 | if event["RequestType"] in ["Create"]: 51 | client = boto3.client("ses") 52 | sm_client = boto3.client("secretsmanager") 53 | email_from = event["ResourceProperties"]["EmailFrom"] 54 | email_to = event["ResourceProperties"]["EmailTo"] 55 | subject = event["ResourceProperties"]["Subject"] 56 | message = event["ResourceProperties"]["Message"] 57 | secret_arn = event["ResourceProperties"]["SecretArn"] 58 | sresponse = sm_client.get_secret_value(SecretId=secret_arn) 59 | message = message.replace("", escape(sresponse["SecretString"])) 60 | response = send_email(email_from, email_to, subject, message) 61 | 62 | attributes = {"Response": response} 63 | cfnresponse.send( 64 | event, context, cfnresponse.SUCCESS, attributes, physical_id 65 | ) 66 | 67 | if event["RequestType"] in ["Delete", "Update"]: 68 | attributes = {"Response": "Delete/update performed"} 69 | cfnresponse.send( 70 | event, context, cfnresponse.SUCCESS, attributes, physical_id 71 | ) 72 | except Exception as e: 73 | log.exception(e) 74 | cfnresponse.send(event, context, cfnresponse.FAILED, {}, physical_id) 75 | 76 | 77 | def send_email(email_from, email_to, subject, message): 78 | client = boto3.client("ses") 79 | 80 | return client.send_email( 81 | Source=email_from, 82 | Destination={"ToAddresses": [email_to]}, 83 | Message={ 84 | "Subject": {"Data": subject, "Charset": "UTF-8"}, 85 | "Body": { 86 | "Text": {"Data": get_text_content(message), "Charset": "UTF-8"}, 87 | "Html": {"Data": get_html_content(message), "Charset": "UTF-8"}, 88 | }, 89 | }, 90 | ReplyToAddresses=[ 91 | "no-reply@test.com", 92 | ], 93 | ) 94 | 95 | 96 | def get_html_content(message): 97 | return f""" 98 | 99 | 100 |

Good day,

101 |

{message}

102 | 103 | 104 | """ 105 | 106 | 107 | def get_text_content(message): 108 | return message 109 | -------------------------------------------------------------------------------- /rstudio_fargate/rstudio/vpc/vpc_stack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | 20 | from aws_cdk import ( 21 | core as cdk, 22 | aws_ec2 as ec2, 23 | ) 24 | 25 | 26 | class VpcStack(cdk.Stack): 27 | def __init__( 28 | self, 29 | scope: cdk.Construct, 30 | id: str, 31 | instance: str, 32 | vpc_cidr_range: str, 33 | **kwargs, 34 | ) -> None: 35 | super().__init__(scope, id, **kwargs) 36 | 37 | self.vpc = ec2.Vpc( 38 | self, 39 | id=f"rstudio-vpc-{instance}", 40 | cidr=vpc_cidr_range, 41 | max_azs=2, 42 | ) 43 | 44 | cdk.Tags.of(self.vpc).add("source-ag:environment-type", instance) 45 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: MIT-0 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 12 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 13 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 14 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 16 | OFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | """ 19 | 20 | import setuptools 21 | 22 | 23 | with open("README.md") as fp: 24 | long_description = fp.read() 25 | 26 | 27 | setuptools.setup( 28 | name="rstudio_fargate", 29 | version="0.0.1", 30 | description="RStudio/Shiny Open Source Project", 31 | long_description=long_description, 32 | long_description_content_type="text/markdown", 33 | author="author", 34 | package_dir={"": "rstudio_fargate"}, 35 | packages=setuptools.find_packages(where="rstudio_fargate"), 36 | install_requires=[ 37 | "aws_cdk.core", 38 | "aws_cdk.aws_s3", 39 | "aws_cdk.aws_s3_notifications", 40 | "aws_cdk.aws_s3_deployment", 41 | "aws_cdk.aws_athena", 42 | "aws_cdk.aws_ec2", 43 | "aws_cdk.aws_ecs", 44 | "aws_cdk.aws_eks", 45 | "aws_cdk.aws_ecs-patterns", 46 | "aws_cdk.aws_certificatemanager", 47 | "aws_cdk.aws_route53", 48 | "aws_cdk.aws_route53_targets", 49 | "aws_cdk.aws_efs", 50 | "aws_cdk.aws_logs", 51 | "aws_cdk.aws_kms", 52 | "aws_cdk.aws_sns", 53 | "aws_cdk.aws_sns_subscriptions", 54 | "aws_cdk.aws_events", 55 | "aws_cdk.aws_events_targets", 56 | "aws_cdk.aws_elasticloadbalancingv2", 57 | "aws_cdk.aws_secretsmanager", 58 | "aws_cdk.aws_ecr_assets", 59 | "aws_cdk.aws_datasync", 60 | "aws_cdk.pipelines", 61 | "aws_cdk.aws_codepipeline", 62 | "aws_cdk.aws_codepipeline_actions", 63 | "aws_cdk.aws_codecommit", 64 | "aws_cdk.aws_codebuild", 65 | "aws_cdk.aws_wafv2", 66 | "cdk_nag", 67 | ], 68 | python_requires=">=3.6", 69 | classifiers=[ 70 | "Development Status :: 4 - Beta", 71 | "Intended Audience :: Developers", 72 | "License :: OSI Approved :: Apache Software License", 73 | "Programming Language :: JavaScript", 74 | "Programming Language :: Python :: 3 :: Only", 75 | "Programming Language :: Python :: 3.6", 76 | "Programming Language :: Python :: 3.7", 77 | "Programming Language :: Python :: 3.8", 78 | "Topic :: Software Development :: Code Generators", 79 | "Topic :: Utilities", 80 | "Typing :: Typed", 81 | ], 82 | ) 83 | -------------------------------------------------------------------------------- /source.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | rem The sole purpose of this script is to make the command 4 | rem 5 | rem source .venv/bin/activate 6 | rem 7 | rem (which activates a Python virtualenv on Linux or Mac OS X) work on Windows. 8 | rem On Windows, this command just runs this batch file (the argument is ignored). 9 | rem 10 | rem Now we don't need to document a Windows command for activating a virtualenv. 11 | 12 | echo Executing .venv\Scripts\activate.bat for you 13 | .venv\Scripts\activate.bat 14 | --------------------------------------------------------------------------------