├── .dockerignore
├── .github
└── workflows
│ └── docker-image.yml
├── .gitignore
├── README.md
├── connectorx
├── Dockerfile
└── Dockerfile-3.2
├── files
├── .Rprofile
├── ccenv.sh
├── ccrenv.sh
├── ce.sh
├── ci.sh
├── cl.sh
├── clean.sh
├── condarc
├── connectorx
│ ├── connectorx-0.3.2-cp311-cp311-manylinux_2_34_aarch64.whl
│ ├── connectorx-0.3.3-cp311-cp311-manylinux_2_35_aarch64.whl
│ └── connectorx-0.3.3-cp312-cp312-manylinux_2_35_aarch64.whl
├── cr.sh
├── cuda.sh
├── install-R.sh
├── install-rstudio.sh
├── jupyter_config.json
├── jupyterhub_config.py
├── p10k.zsh
├── pg_hba.conf
├── postgresql.conf
├── safari-nerdfont.css
├── scalable_analytics
│ ├── core-site.xml
│ ├── hdfs-site.xml
│ ├── init-dfs.sh
│ ├── start-dfs.sh
│ └── stop-dfs.sh
├── setup-arrow.sh
├── setup-bml.sh
├── setup-extra.sh
├── setup-hadoop.sh
├── setup-jupyterhub.sh
├── setup-ml-frameworks.sh
├── setup-quarto.sh
├── setup-radiant.sh
├── setup-tidyverse.sh
├── setup.sh
├── start-notebook.sh
├── start-services-simplified.sh
├── start-services.sh
├── start-singleuser.sh
├── start.sh
├── supervisord copy.conf
├── supervisord.conf
├── usethis
└── zshrc
├── images
└── docker-icon.png
├── install
├── figures
│ ├── docker-general-wsl2-based-engine.png
│ ├── docker-help.png
│ ├── docker-icon.png
│ ├── docker-resources-mac.png
│ ├── docker-resources-wsl2-integration.png
│ ├── docker-system-mac.png
│ ├── ohmyzsh-icons.png
│ ├── ohmyzsh-powerlevel10k-iterm.png
│ ├── ohmyzsh-powerlevel10k.png
│ ├── postgresql-pgweb.png
│ ├── ppt-image-editing.pptx
│ ├── reset-credentials.png
│ ├── rsm-jupyter.png
│ ├── rsm-launch-menu-macos-arm.png
│ ├── rsm-launch-menu-macos.png
│ ├── rsm-launch-menu-wsl2-arm.png
│ ├── rsm-launch-menu-wsl2.png
│ ├── rsm-msba-menu-linux.png
│ ├── rsm-radiant.jpeg
│ ├── rsm-rstudio.png
│ ├── settings-resources.png
│ ├── start-ubuntu-terminal.png
│ ├── symlinks.png
│ ├── ubuntu-root.png
│ ├── win-protected.png
│ ├── win-update-message.png
│ ├── windows-shared-drives.png
│ ├── windows-version.png
│ └── wsl2-windows-docker-install.png
├── install-docker-chromeos.sh
├── install-docker.sh
├── rsm-msba-chromeos.md
├── rsm-msba-linux.md
├── rsm-msba-macos-arm.md
├── rsm-msba-macos-m1.md
├── rsm-msba-macos.md
├── rsm-msba-windows-arm.md
├── rsm-msba-windows.md
└── setup-ohmyzsh.md
├── launch-rsm-msba-arm.sh
├── launch-rsm-msba-intel-chromeos.sh
├── launch-rsm-msba-intel-jupyterhub.sh
├── launch-rsm-msba-intel.sh
├── postgres
├── .gitignore
├── postgres-connect-vscode.pgsql
├── postgres-connect.R
├── postgres-connect.Rmd
├── postgres-connect.ipynb
├── postgres-connect.md
├── postgres-createdb.sh
├── postgres-radiant.nb.html
└── postgres-radiant.state.rda
├── rsm-code-interpreter
└── Dockerfile
├── rsm-msba-arm
├── Dockerfile
└── docker-compose.yml
├── rsm-msba-intel-jupyterhub
├── Dockerfile
└── docker-compose.yml
├── rsm-msba-intel
├── Dockerfile
└── docker-compose.yml
├── rsm-simple-arm
└── Dockerfile
├── rsm-simple-intel
└── Dockerfile
├── scripts
├── build-images.sh
├── dclean.sh
├── dprune.sh
└── pull-containers.sh
└── vscode
├── extension-install.sh
├── extensions.txt
├── settings-vscode.json
└── settings-windows-terminal.json
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Ignore all files and dirs starting with a dot, e.g. ".git", ".idea", etc.
2 | .*
--------------------------------------------------------------------------------
/.github/workflows/docker-image.yml:
--------------------------------------------------------------------------------
1 | name: rsm-msba-arm docker ci
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'master'
7 | paths:
8 | - 'rsm-msba-arm/**'
9 | workflow_dispatch:
10 |
11 | jobs:
12 | docker:
13 | runs-on: self-hosted
14 | steps:
15 | -
16 | name: Checkout
17 | uses: actions/checkout@v2
18 | -
19 | name: Set up QEMU
20 | uses: docker/setup-qemu-action@v1
21 | -
22 | name: Set up Docker Buildx
23 | uses: docker/setup-buildx-action@v1
24 | with:
25 | buildkitd-flags: --debug
26 | -
27 | name: Login to DockerHub
28 | uses: docker/login-action@v1
29 | with:
30 | username: ${{ secrets.DOCKERHUB_USERNAME }}
31 | password: ${{ secrets.DOCKERHUB_TOKEN }}
32 | # -
33 | # name: Prepare tags
34 | # id: tag_prep
35 | # run: |
36 | # TAGS="latest,$(git rev-parse --short '$GITHUB_SHA')"
37 | # if [[ $(head -n 1 rsm-msba-arm/Dockerfile) == "# __version__"* ]]; then
38 | # TAGS="latest,$(head -n 1 rsm-msba-arm/Dockerfile | sed 's/^# __version__ //')"
39 | # fi
40 | # echo ::set-output name=tags::${TAGS}
41 | -
42 | name: Build and push
43 | uses: docker/build-push-action@v2
44 | with:
45 | context: .
46 | file: ./rsm-msba-arm/Dockerfile
47 | platforms: linux/arm64
48 | push: true
49 | tags: ${{ secrets.DOCKERHUB_USERNAME }}/rsm-msba-arm:testtag
50 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .Rproj.user
2 | .Rhistory
3 | .RData
4 | .Ruserdata
5 | .DS_Store
6 | .ipynb_checkpoints
7 | .mypy_cache
8 | .vscode
9 | files/ca.sh
10 | files/ccr.sh
11 | build-images.out
12 | rsm-code-interpreter/
13 | rsm-code-interpreter-working/
14 | rsm-shiny/
15 | rsm-test/
16 | rsm-mgta495/
17 | rsm-mgta453/
18 | files/chatgpt_requirements.txt
19 | rsm-code-interpreter/Dockerfile
20 | .bash_history
21 | build.log
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Dockerized Business Analytics
2 | ==================================
3 |
4 | This repo contains information to setup a docker image with R, Rstudio, Shiny, [Radiant](https://radiant-rstats/radiant), Python, Postgres, JupyterLab, and Spark
5 |
6 | ## Install Docker
7 |
8 | To use the docker images you first need to install Docker
9 |
10 | * For Mac (ARM): https://desktop.docker.com/mac/stable/arm64/Docker.dmg
11 | * For Mac (Intel): https://desktop.docker.com/mac/stable/amd64/Docker.dmg
12 | * For Windows (Intel): https://desktop.docker.com/win/main/amd64/Docker%20Desktop%20Installer.exe
13 | * For Windows (ARM): https://desktop.docker.com/win/main/arm64/Docker%20Desktop%20Installer.exe
14 |
15 | After installing Docker, check that it is running by typing `docker --version` in a terminal. This should return something like the below:
16 |
17 | ```bash
18 | docker --version
19 | Docker version 24.0.7, build afdd53b
20 | ```
21 |
22 | * For detailed install instructions on Windows (Intel) see [install/rsm-msba-windows.md](install/rsm-msba-windows.md)
23 | * For detailed install instructions on Windows (ARM) see [install/rsm-msba-windows-arm.md](install/rsm-msba-windows-arm.md)
24 | * For detailed install instructions on macOS (ARM) see [install/rsm-msba-macos-arm.md](install/rsm-msba-macos-arm.md)
25 | * For detailed install instructions on macOS (Intel) see [install/rsm-msba-macos.md](install/rsm-msba-macos.md)
26 | * For detailed install instructions on Linux see [install/rsm-msba-linux.md](install/rsm-msba-linux.md)
27 | * For detailed install instructions on ChromeOS see [install/rsm-msba-chromeos.md](install/rsm-msba-chromeos.md)
28 |
29 | ## rsm-msba-arm and rsm-msba-intel
30 |
31 | `rsm-msba-arm` is built for ARM based macOS computers (e.g., M3). `rsm-msba-intel` is built for Intel and AMD based computers. To build a new image based on `rsm-msba_intel` add the following at the top of your Dockerfile
32 |
33 | ```
34 | FROM vnijs/rsm-msba-intel:latest
35 | ```
36 |
37 | ## rsm-msba-intel-jupyterhub
38 |
39 | This image builds on rsm-msba-intel and is set up to be accessible from a server running jupyter hub.
40 |
41 | ## Trouble shooting
42 |
43 | To stop (all) running containers use:
44 |
45 | ```bash
46 | docker kill $(docker ps -q)
47 | ```
48 |
49 | If the build fails for some reason you can access the container through the bash shell using to investigate what went wrong:
50 |
51 | ```sh
52 | docker run -t -i $USER/rsm-msba-intel /bin/bash
53 | docker run -t -i $USER/rsm-msba-arm /bin/bash
54 | ```
55 |
56 | To remove an existing image use:
57 |
58 | ```sh
59 | docker rmi --force $USER/rsm-msba-intel
60 | docker rmi --force $USER/rsm-msba-arm
61 | ```
62 |
63 | To remove stop all running containers, remove unused images, and errand docker processes use the `dclean.sh` script
64 |
65 | ```sh
66 | ./scripts/dclean.sh
67 | ```
68 |
69 | ## General docker related commands
70 |
71 | Check the disk space used by docker images
72 |
73 | ```bash
74 | docker ps -s
75 | ```
76 |
77 | ```bash
78 | docker system df
79 | ```
80 |
81 | ## Previous versions of the RSM computing environment
82 |
83 | To see the documentation and configuration files for versions prior to 2.0 see docker1.0
84 |
85 | ## Trademarks
86 |
87 | Shiny is registered trademarks of RStudio, Inc. The use of the trademarked terms Shiny through the images hosted on hub.docker.com has been granted by explicit permission of RStudio. Please review RStudio's trademark use policy and address inquiries about further distribution or other questions to permissions@rstudio.com.
88 |
89 | Jupyter is distributed under the BSD 3-Clause license (Copyright (c) 2017, Project Jupyter Contributors)
--------------------------------------------------------------------------------
/connectorx/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12-slim AS builder
2 |
3 | RUN apt-get update
4 | RUN apt-get install -y curl
5 |
6 | RUN mkdir /wheeler
7 | WORKDIR /wheeler
8 |
9 | RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
10 | ENV PATH="$PATH:/root/.cargo/bin"
11 |
12 | RUN rustup install 1.78.0
13 | RUN rustup override set 1.78.0
14 |
15 | RUN apt-get install -y git
16 |
17 | RUN git clone https://github.com/sfu-db/connector-x.git
18 | WORKDIR /wheeler/connector-x
19 | RUN git checkout tags/v0.3.3
20 |
21 | RUN pip install maturin[patchelf]==0.14.15
22 |
23 | # Install the dependencies
24 | RUN apt-get install -y clang build-essential libkrb5-dev
25 |
26 | RUN maturin build -m connectorx-python/Cargo.toml -i python3.12 --release
27 |
28 | FROM builder AS base
29 |
30 | COPY --from=builder /wheeler/connector-x/connectorx-python/target/wheels/connectorx-0.3.3-*.whl ./
31 | # RUN pip install connectorx-0.3.3-*.whl
32 |
33 | # Copying the wheel into the host system
34 | # the below didn't work for me
35 | # COPY /wheeler/connector-x/connectorx-python/target/wheels/connectorx-* .
36 |
37 | # use the below to access the wheel in /wheeler/connector-x/connectorx-python/target/wheels/
38 | # docker run -it -v ./:/root your_user_name/connectorx /bin/bash
39 | # docker run -it -v ./:/root vnijs/connectorx /bin/bash
40 |
41 | # then navigate to the directory below and copy the wheel to the home directory
42 | # which is mounted to your current directory on your host OS
43 | # /wheeler/connector-x/connectorx-python/target/wheel
--------------------------------------------------------------------------------
/connectorx/Dockerfile-3.2:
--------------------------------------------------------------------------------
1 | FROM arm64v8/ubuntu:22.04
2 |
3 | # Installing devel dependencies
4 | RUN apt-get update
5 | RUN apt-get install -y \
6 | libmysqlclient-dev \
7 | freetds-dev \
8 | libpq-dev \
9 | wget \
10 | curl \
11 | build-essential \
12 | libkrb5-dev \
13 | clang \
14 | git
15 |
16 | # Creating and changing to a new directory
17 | RUN mkdir /wheeler
18 | WORKDIR /wheeler
19 |
20 | # Installing and setting up rust
21 | RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
22 | ENV PATH="$PATH:/root/.cargo/bin"
23 |
24 | # Installing just through cargo
25 | RUN cargo install just
26 |
27 | # Installing python3.11.7 from source
28 | RUN wget https://www.python.org/ftp/python/3.11.7/Python-3.11.7.tgz
29 | RUN tar -xvf Python-3.11.7.tgz
30 | RUN cd Python-3.11.7 && ./configure --enable-optimizations
31 | RUN cd Python-3.11.7 && make install
32 | RUN pip3.11 install poetry
33 |
34 | # Cloning the connectorx repo and switching to the 0.3.1 tag
35 | RUN git clone https://github.com/sfu-db/connector-x.git
36 | WORKDIR /wheeler/connector-x
37 | RUN git checkout tags/v0.3.3
38 |
39 | # Installing maturin
40 | RUN pip3.11 install maturin[patchelf]==0.14.15
41 |
42 | # Building the python wheel through maturin
43 | RUN maturin build -m connectorx-python/Cargo.toml -i python3.11 --release
44 |
45 | # Copying the wheel into the host system
46 | # the below didn't work for me
47 | # COPY /wheeler/connector-x/connectorx-python/target/wheels/connectorx-* .
48 |
49 | # use the below to access the wheel in /wheeler/connector-x/connectorx-python/target/wheels/
50 | # docker run -it -v ./:/root your_user_name/connectorx /bin/bash
51 |
52 | # then navigate to the directory below and copy the wheel to the home directory
53 | # which is mounted to your current directory on your host OS
54 | # /wheeler/connector-x/connectorx-python/target/wheel
--------------------------------------------------------------------------------
/files/.Rprofile:
--------------------------------------------------------------------------------
1 | options(radiant.ace_vim.keys = FALSE)
2 | options(radiant.maxRequestSize = -1)
3 | # options(radiant.maxRequestSize = 10 * 1024^2)
4 | options(radiant.report = TRUE)
5 | # options(radiant.ace_theme = "cobalt")
6 | options(radiant.ace_theme = "tomorrow")
7 | # options(radiant.ace_showInvisibles = TRUE)
8 | # options(radiant.sf_volumes = c(Home = "~"))
9 |
--------------------------------------------------------------------------------
/files/ccenv.sh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh -i
2 |
3 | set -e
4 |
5 | function conda_create_kernel() {
6 | if { conda env list | grep "$1"; } >/dev/null 2>&1; then
7 | echo "Conda environment $1 already exists"
8 | echo "Adding packages to $1"
9 | else
10 | conda create -y -n $1
11 | fi
12 |
13 | conda activate $1
14 | conda install -y -c conda-forge ipykernel "${@:2}"
15 | ipython kernel install --user --name=$1
16 | conda deactivate
17 | }
18 |
19 | if [ "$1" != "" ]; then
20 | conda_create_kernel $1 ${@:2}
21 | echo "The code run in this function is:"
22 | declare -f conda_create_kernel
23 | echo "You may need to refresh your browser to see the new kernel icon for environment '$1'"
24 | else
25 | echo "This function is used to create a conda environment kernel and requires the name of a conda envivronment to create and the names of any packages you want to install. For example:"
26 | echo "ccenv myenv pyasn1"
27 | fi
28 |
29 | echo "\nSee the link below for additional information about conda"
30 | echo "https://docs.conda.io/projects/conda/en/latest/user-guide/index.html"
31 |
--------------------------------------------------------------------------------
/files/ccrenv.sh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh -i
2 |
3 | ## not ready for use
4 |
5 | function conda_create_r_kernel() {
6 | if { conda env list | grep "$1"; } >/dev/null 2>&1; then
7 | echo "Conda environment $1 already exists"
8 | echo "Adding packages to $1"
9 | else
10 | conda create -y -n $1
11 | fi
12 |
13 | conda activate $1
14 | conda install -y -c conda-forge r-irkernel "${@:2}"
15 | ipython kernel install --user --name=$1
16 | conda deactivate
17 | }
18 |
19 | if [ "$1" != "" ]; then
20 | conda_create_r_kernel $1 ${@:2}
21 | echo "The code run in this function is:"
22 | declare -f conda_create_r_kernel
23 | echo "You may need to refresh your browser to see the new R-kernel icon for environment '$1'"
24 | else
25 | echo "This function is used to create a conda environment kernel and requires the name of a conda envivronment to create and the names of any packages you want to install. For example:"
26 | echo "cc myenv pyasn1"
27 | fi
28 |
29 | echo "\nSee the link below for additional information about conda"
30 | echo "https://docs.conda.io/projects/conda/en/latest/user-guide/index.html"
31 |
--------------------------------------------------------------------------------
/files/ce.sh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh -i
2 |
3 | set -e
4 |
5 | if [ "$1" != "" ]; then
6 | if [ "$2" != "" ]; then
7 | fn="$2"
8 | else
9 | fn="$PWD/$1.yaml"
10 | fi
11 | fprn="conda env export --name $1 > '${fn}'"
12 | eval $fprn
13 | echo "\nEnvironment file saved to ${fn}"
14 | echo "\nThe code run in this function is:\n"
15 | echo $fprn
16 | else
17 | echo "\nThe conda export function requires the name of a conda environment to export. You can add a 2nd argument to indicate the file name you want to use for the yaml file. If no 2nd argument is provided the yaml file name will be extracted from the environment name (e.g., 'ce myenv' would generate the file 'myenv.yaml')"
18 | fi
19 |
20 | echo "\nSee the link below for additional information about conda"
21 | echo "https://docs.conda.io/projects/conda/en/latest/user-guide/index.html"
22 |
--------------------------------------------------------------------------------
/files/ci.sh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh -i
2 |
3 | set -e
4 |
5 | function conda_import_environment() {
6 | if [ "$2" != "" ]; then
7 | env_name="$2"
8 | else
9 | env_name=$(basename -- "$1")
10 | env_name="${env_name%.*}"
11 | fi
12 | conda env create --file "$1" --name "${env_name}"
13 | conda activate "${env_name}"
14 | ipython kernel install --user --name="${env_name}"
15 | conda deactivate
16 | echo "You may need to refresh your browser to see the new kernel icon for environment '${env}'\n"
17 | }
18 |
19 | if [ "$1" != "" ]; then
20 | conda_import_environment $1 $2
21 | echo "You may need to refresh your browser to see the new kernel icon for environment '$1' \n"
22 | echo "The code run in this function is:"
23 | declare -f conda_import_environment
24 | else
25 | echo "This function to import a conda environment requires the path to a yml file with conda envivronment details to be used. You can add a 2nd argument to indicate the name you want to use for the new environment. If no 2nd argument is provided the environment name will be extracted from the yaml file name"
26 | fi
27 |
28 | echo "\nSee the link below for additional information about conda"
29 | echo "https://docs.conda.io/projects/conda/en/latest/user-guide/index.html"
30 |
--------------------------------------------------------------------------------
/files/cl.sh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh -i
2 |
3 | set -e
4 |
5 | CMD="conda info --envs"
6 | echo "$CMD\n"
7 | eval $CMD
8 |
9 | CMD="jupyter kernelspec list"
10 | echo "$CMD\n"
11 | eval $CMD
12 |
13 | echo "\nSee the link below for additional information about conda"
14 | echo "https://docs.conda.io/projects/conda/en/latest/user-guide/index.html"
15 |
--------------------------------------------------------------------------------
/files/clean.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ## script to run from jupyter lab to clean up settings
4 | ## and remove locally install R and python packages
5 | ## cleaning up settings is a common requirement for Rstudio
6 |
7 | HOMEDIR="/home/$(whoami)"
8 |
9 | if [ ! -d "${HOMEDIR}/.rsm-msba" ]; then
10 | echo "-----------------------------------------------------"
11 | echo "Directory ${HOMEDIR}/.rsm-msba not found"
12 | echo "No cleanup done"
13 | echo "-----------------------------------------------------"
14 | else
15 | echo "-----------------------------------------------------"
16 | echo "Clean up Rstudio sessions and settings (y/n)?"
17 | echo "-----------------------------------------------------"
18 | read cleanup
19 |
20 | if [ "${cleanup}" == "y" ]; then
21 | echo "Cleaning up Rstudio sessions and settings"
22 | rm -rf "${HOMEDIR}/.rstudio/sessions"
23 | rm -rf "${HOMEDIR}/.rstudio/projects"
24 | rm -rf "${HOMEDIR}/.rstudio/projects_settings"
25 |
26 | ## make sure abend is set correctly
27 | ## https://community.rstudio.com/t/restarting-rstudio-server-in-docker-avoid-error-message/10349/2
28 | rstudio_abend () {
29 | if [ -d "${HOMEDIR}/.rstudio/monitored/user-settings" ]; then
30 | touch "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
31 | sed -i '/^alwaysSaveHistory="[0-1]"/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
32 | sed -i '/^loadRData="[0-1]"/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
33 | sed -i '/^saveAction=/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
34 | echo 'alwaysSaveHistory="1"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
35 | echo 'loadRData="0"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
36 | echo 'saveAction="0"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
37 | sed -i '/^$/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
38 | fi
39 | }
40 | rstudio_abend
41 | fi
42 |
43 | echo "-----------------------------------------------------"
44 | echo "Remove locally installed R packages (y/n)?"
45 | echo "-----------------------------------------------------"
46 | read cleanup
47 |
48 | if [ "${cleanup}" == "y" ]; then
49 | echo "Removing locally installed R packages"
50 | rm_list=$(ls -d "${HOMEDIR}"/.rsm-msba/R/[0-9]\.[0-9]\.[0-9] 2>/dev/null)
51 | for i in ${rm_list}; do
52 | rm -rf "${i}"
53 | mkdir "${i}"
54 | done
55 | fi
56 |
57 | echo "-----------------------------------------------------"
58 | echo "Remove locally installed Python packages (y/n)?"
59 | echo "-----------------------------------------------------"
60 | read cleanup
61 |
62 | if [ "${cleanup}" == "y" ]; then
63 | echo "Removing locally installed Python packages"
64 | rm -rf "${HOMEDIR}/.rsm-msba/bin"
65 | rm -rf "${HOMEDIR}/.rsm-msba/lib"
66 | rm_list=$(ls "${HOMEDIR}/.rsm-msba/share" | grep -v jupyter)
67 | for i in ${rm_list}; do
68 | rm -rf "${HOMEDIR}/.rsm-msba/share/${i}"
69 | done
70 | fi
71 |
72 | echo "-----------------------------------------------------"
73 | echo "Cleanup complete"
74 | echo "-----------------------------------------------------"
75 | fi
76 |
--------------------------------------------------------------------------------
/files/condarc:
--------------------------------------------------------------------------------
1 | channels:
2 | - conda-forge
3 | - defaults
4 | channel_priority: disabled
5 | auto_update_conda: false
6 | show_channel_urls: true
7 | envs_dirs:
8 | - /home/jovyan/.rsm-msba/conda/envs
9 | - /opt/conda/envs
10 | pkg_dirs:
11 | - /home/jovyan/.rsm-msba/conda/pkg_dir
12 |
--------------------------------------------------------------------------------
/files/connectorx/connectorx-0.3.2-cp311-cp311-manylinux_2_34_aarch64.whl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/files/connectorx/connectorx-0.3.2-cp311-cp311-manylinux_2_34_aarch64.whl
--------------------------------------------------------------------------------
/files/connectorx/connectorx-0.3.3-cp311-cp311-manylinux_2_35_aarch64.whl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/files/connectorx/connectorx-0.3.3-cp311-cp311-manylinux_2_35_aarch64.whl
--------------------------------------------------------------------------------
/files/connectorx/connectorx-0.3.3-cp312-cp312-manylinux_2_35_aarch64.whl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/files/connectorx/connectorx-0.3.3-cp312-cp312-manylinux_2_35_aarch64.whl
--------------------------------------------------------------------------------
/files/cr.sh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh -i
2 |
3 | set -e
4 |
5 | if [ "$1" != "" ]; then
6 | echo "Please confirm that you want to remove the conda environment '$1'"
7 | echo "Press y or n, followed by [ENTER]:"
8 | read cont
9 | if [ "${cont}" != "y" ]; then
10 | echo "The cr (conda remove) function requires the name of a conda envivronment to remove"
11 | echo "Note that if a jupyter kernel with the same name exists, it will be removed as well"
12 | else
13 | echo "\nThe code run in this function is:\n"
14 |
15 | CMD="conda remove -y -n $1 --all"
16 | echo "$CMD\n"
17 | eval $CMD
18 |
19 | CMD="jupyter kernelspec remove -y $1"
20 | echo "$CMD\n"
21 | eval $CMD
22 |
23 | echo "\nSee the link below for additional information about conda"
24 | echo "https://docs.conda.io/projects/conda/en/latest/user-guide/index.html"
25 | fi
26 | else
27 | echo "The conda_remove function requires the name of a conda envivronment to remove"
28 | echo "Note that if a jupyter kernel with the same name exists, it will be removed as well"
29 | fi
30 |
31 |
--------------------------------------------------------------------------------
/files/cuda.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # From:
4 | # https://raw.githubusercontent.com/rocker-org/rocker-versioned2/master/scripts/install_cuda-11.1.sh
5 |
6 | apt-get update && apt-get install -y --no-install-recommends \
7 | gnupg2 curl ca-certificates && \
8 | curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/7fa2af80.pub | apt-key add - && \
9 | echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/cuda.list && \
10 | echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list && \
11 | apt-get purge --autoremove -y curl \
12 | && rm -rf /var/lib/apt/lists/*
13 |
14 | CUDA_VERSION=${CUDA_VERSION:-11.1.1}
15 |
16 | # For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
17 | apt-get update && apt-get install -y --no-install-recommends \
18 | cuda-cudart-11-1=11.1.74-1 \
19 | cuda-compat-11-1 \
20 | && ln -s cuda-11.1 /usr/local/cuda && \
21 | rm -rf /var/lib/apt/lists/*
22 |
23 | # Required for nvidia-docker v1
24 | echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
25 | echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf && \
26 | echo "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda-11.1/targets/x86_64-linux/lib" >> /etc/profile
27 |
28 | ## Set all of these as global ENV
29 | # PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH}
30 | # LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64
31 |
32 | # NVIDIA_VISIBLE_DEVICES=all
33 | #NVIDIA_DRIVER_CAPABILITIES="compute,utility"
34 | # NVIDIA_REQUIRE_CUDA="cuda>=11.1 brand=tesla,driver>=418,driver<419 brand=tesla,driver>=440,driver<441 brand=tesla,driver>=450,driver<451"
35 |
36 | ## runtime ####################################################
37 | ##FROM ${IMAGE_NAME}:11.1-base-ubuntu20.04
38 |
39 | NCCL_VERSION=${NCCL_VERSION:-2.7.8}
40 |
41 | apt-get update && apt-get install -y --no-install-recommends \
42 | cuda-libraries-11-1=11.1.1-1 \
43 | libnpp-11-1=11.1.2.301-1 \
44 | cuda-nvtx-11-1=11.1.74-1 \
45 | libcublas-11-1=11.3.0.106-1 \
46 | libnccl2=$NCCL_VERSION-1+cuda11.1 \
47 | && apt-mark hold libnccl2 \
48 | && rm -rf /var/lib/apt/lists/*
49 |
50 | ## devel #######################################################
51 |
52 | apt-get update && apt-get install -y --no-install-recommends \
53 | cuda-nvml-dev-11-1=11.1.74-1 \
54 | cuda-command-line-tools-11-1=11.1.1-1 \
55 | cuda-nvprof-11-1=11.1.105-1 \
56 | libnpp-dev-11-1=11.1.2.301-1 \
57 | cuda-libraries-dev-11-1=11.1.1-1 \
58 | cuda-minimal-build-11-1=11.1.1-1 \
59 | libnccl-dev=2.7.8-1+cuda11.1 \
60 | libcublas-dev-11-1=11.3.0.106-1 \
61 | libcusparse-11-1=11.3.0.10-1 \
62 | libcusparse-dev-11-1=11.3.0.10-1 \
63 | && apt-mark hold libnccl-dev \
64 | && rm -rf /var/lib/apt/lists/*
65 |
66 | LIBRARY_PATH=/usr/local/cuda/lib64/stubs
--------------------------------------------------------------------------------
/files/install-R.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## Install R from source.
4 | ##
5 | ## In order of preference, first argument of the script, the R_VERSION variable.
6 | ## ex. latest, devel, patched, 4.0.0
7 | ##
8 | ## 'devel' means the prerelease development version (Latest daily snapshot of development version).
9 | ## 'patched' means the prerelease patched version (Latest daily snapshot of patched version).
10 |
11 | set -e
12 |
13 | R_VERSION=${1:-${R_VERSION:-"latest"}}
14 |
15 | apt-get update
16 | apt-get -y install locales lsb-release
17 |
18 | ## Configure default locale
19 | LANG=${LANG:-"en_US.UTF-8"}
20 | /usr/sbin/locale-gen --lang "${LANG}"
21 | /usr/sbin/update-locale --reset LANG="${LANG}"
22 |
23 | UBUNTU_VERSION=$(lsb_release -sc)
24 |
25 | export DEBIAN_FRONTEND=noninteractive
26 |
27 | R_HOME=${R_HOME:-"/usr/local/lib/R"}
28 |
29 | READLINE_VERSION=8
30 | OPENBLAS=libopenblas-dev
31 | if [ ${UBUNTU_VERSION} == "bionic" ]; then
32 | READLINE_VERSION=7
33 | OPENBLAS=libopenblas-dev
34 | fi
35 |
36 | apt-get install -y --no-install-recommends \
37 | bash-completion \
38 | ca-certificates \
39 | devscripts \
40 | file \
41 | fonts-texgyre \
42 | g++ \
43 | gfortran \
44 | gsfonts \
45 | libblas-dev \
46 | libbz2-* \
47 | libcurl4 \
48 | libicu* \
49 | liblapack-dev \
50 | libpcre2* \
51 | libjpeg-turbo* \
52 | ${OPENBLAS} \
53 | libpangocairo-* \
54 | libpng16* \
55 | "libreadline${READLINE_VERSION}" \
56 | libtiff* \
57 | liblzma* \
58 | make \
59 | unzip \
60 | zip \
61 | zlib1g
62 |
63 | BUILDDEPS="curl \
64 | default-jdk \
65 | libbz2-dev \
66 | libcairo2-dev \
67 | libcurl4-openssl-dev \
68 | libpango1.0-dev \
69 | libjpeg-dev \
70 | libicu-dev \
71 | libpcre2-dev \
72 | libpng-dev \
73 | libreadline-dev \
74 | libtiff5-dev \
75 | liblzma-dev \
76 | libx11-dev \
77 | libxt-dev \
78 | perl \
79 | rsync \
80 | subversion \
81 | tcl-dev \
82 | tk-dev \
83 | texinfo \
84 | texlive-extra-utils \
85 | texlive-fonts-recommended \
86 | texlive-fonts-extra \
87 | texlive-latex-recommended \
88 | texlive-latex-extra \
89 | x11proto-core-dev \
90 | xauth \
91 | xfonts-base \
92 | xvfb \
93 | wget \
94 | zlib1g-dev"
95 |
96 | # shellcheck disable=SC2086
97 | apt-get install -y --no-install-recommends ${BUILDDEPS}
98 |
99 | wget -O icu4c.tgz https://github.com/unicode-org/icu/releases/download/release-70-1/icu4c-70_1-src.tgz \
100 | && tar -xf icu4c.tgz \
101 | && cd icu/source \
102 | && chmod +x runConfigureICU configure install-sh \
103 | && ./runConfigureICU Linux/gcc --prefix=/usr/local \
104 | && make \
105 | && make install \
106 | && cd \
107 | && rm icu4c.tgz
108 |
109 | wget https://ftp.gnu.org/pub/gnu/libiconv/libiconv-1.16.tar.gz \
110 | && tar -xf libiconv-1.16.tar.gz \
111 | && cd libiconv-1.16 \
112 | && chmod +x configure \
113 | && ./configure --prefix /usr/local \
114 | && make \
115 | && make install \
116 | && cd \
117 | && rm libiconv-1.16.tar.gz
118 |
119 | ## Download R from 0-Cloud CRAN mirror or CRAN
120 | function download_r_src() {
121 | wget "https://cloud.r-project.org/src/$1" -O "R.tar.gz" ||
122 | wget "https://cran.r-project.org/src/$1" -O "R.tar.gz"
123 | }
124 |
125 | if [ "$R_VERSION" == "devel" ]; then
126 | download_r_src "base-prerelease/R-devel.tar.gz"
127 | elif [ "$R_VERSION" == "patched" ]; then
128 | download_r_src "base-prerelease/R-latest.tar.gz"
129 | elif [ "$R_VERSION" == "latest" ]; then
130 | download_r_src "base/R-latest.tar.gz"
131 | else
132 | download_r_src "base/R-${R_VERSION%%.*}/R-${R_VERSION}.tar.gz"
133 | fi
134 |
135 | tar xzf "R.tar.gz"
136 | cd R-*/
137 |
138 | R_PAPERSIZE=letter \
139 | R_BATCHSAVE="--no-save --no-restore" \
140 | R_BROWSER=xdg-open \
141 | PAGER=/usr/bin/pager \
142 | PERL=/usr/bin/perl \
143 | R_UNZIPCMD=/usr/bin/unzip \
144 | R_ZIPCMD=/usr/bin/zip \
145 | R_PRINTCMD=/usr/bin/lpr \
146 | LIBnn=lib \
147 | AWK=/usr/bin/awk \
148 | CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g" \
149 | CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g" \
150 | ./configure --enable-R-shlib \
151 | --enable-memory-profiling \
152 | --with-readline \
153 | --with-blas \
154 | --with-lapack \
155 | --with-tcltk \
156 | --with-recommended-packages
157 |
158 | make
159 | make install
160 | make clean
161 |
162 | ## Add a library directory (for user-installed packages)
163 | mkdir -p "${R_HOME}/site-library"
164 | chown root:staff "${R_HOME}/site-library"
165 | chmod g+ws "${R_HOME}/site-library"
166 |
167 | ## Fix library path
168 | echo "R_LIBS=\${R_LIBS-'${R_HOME}/site-library:${R_HOME}/library'}" >>"${R_HOME}/etc/Renviron.site"
169 |
170 | ## Clean up from R source install
171 | cd ..
172 | rm -rf /tmp/*
173 | rm -rf R-*/
174 | rm -rf "R.tar.gz"
175 |
176 | # shellcheck disable=SC2086
177 | apt-get remove --purge -y ${BUILDDEPS}
178 | apt-get autoremove -y
179 | apt-get autoclean -y
180 | rm -rf /var/lib/apt/lists/*
181 |
182 | # Check the R info
183 | echo -e "Check the R info...\n"
184 |
185 | R -q -e "sessionInfo()"
186 |
187 | echo -e "\nInstall R from source, done!"
188 |
--------------------------------------------------------------------------------
/files/install-rstudio.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## Download and install RStudio server & dependencies uses.
4 | ## Also symlinks pandoc, pandoc-citeproc so they are available system-wide.
5 | ##
6 | ## In order of preference, first argument of the script, the RSTUDIO_VERSION variable.
7 | ## ex. stable, preview, daily, 1.3.959, 2021.09.1+372, 2021.09.1-372, 2022.06.0-daily+11
8 |
9 | ## adapted from
10 | ## https://raw.githubusercontent.com/rocker-org/rocker-versioned2/master/scripts/install_rstudio.sh
11 |
12 | set -e
13 |
14 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`}
15 |
16 | DEFAULT_USER=${DEFAULT_USER:-jovyan}
17 | ARCH=$(dpkg --print-architecture)
18 |
19 | apt-get update
20 | apt-get install -y --no-install-recommends \
21 | file \
22 | git \
23 | libapparmor1 \
24 | libgc1 \
25 | libclang-dev \
26 | libcurl4-openssl-dev \
27 | libedit2 \
28 | libobjc4 \
29 | libssl-dev \
30 | libpq5 \
31 | psmisc \
32 | procps \
33 | python-is-python3 \
34 | python3-setuptools \
35 | pwgen \
36 | sudo \
37 | wget \
38 | liblzma* \
39 | liblzma-dev
40 |
41 | rm -rf /var/lib/apt/lists/*
42 |
43 | export PATH=/usr/lib/rstudio-server/bin:$PATH
44 |
45 | ## Download RStudio Server for Ubuntu 18+
46 | DOWNLOAD_FILE=rstudio-server.deb
47 |
48 | if [ "$RSTUDIO_VERSION" = "latest" ]; then
49 | RSTUDIO_VERSION="stable"
50 | fi
51 |
52 | if [ "$(uname -m)" != "aarch64" ]; then
53 | # wget "https://s3.amazonaws.com/rstudio-ide-build/server/jammy/amd64/rstudio-server-2023.03.0-548-amd64.deb" -O "$DOWNLOAD_FILE"
54 | wget "https://s3.amazonaws.com/rstudio-ide-build/server/jammy/amd64/rstudio-server-2024.11.0-daily-302-amd64.deb" -O "$DOWNLOAD_FILE"
55 | # wget "https://s3.amazonaws.com/rstudio-ide-build/server/jammy/amd64/rstudio-server-2024.12.0-460-amd64.deb" -O "$DOWNLOAD_FILE"
56 |
57 | else
58 | # wget "https://s3.amazonaws.com/rstudio-ide-build/server/jammy/arm64/rstudio-server-2023.03.0-548-arm64.deb" -O "$DOWNLOAD_FILE"
59 | wget "https://s3.amazonaws.com/rstudio-ide-build/server/jammy/arm64/rstudio-server-2024.11.0-daily-302-arm64.deb" -O "$DOWNLOAD_FILE"
60 | # wget "https://s3.amazonaws.com/rstudio-ide-build/server/jammy/arm64/rstudio-server-2024.12.0-460-arm64.deb" -O "$DOWNLOAD_FILE"
61 |
62 | fi
63 |
64 | dpkg -i "$DOWNLOAD_FILE"
65 | # env PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin LD_LIBRARY_PATH= dpkg -i "$DOWNLOAD_FILE"
66 | rm "$DOWNLOAD_FILE"
67 |
68 | ## RStudio wants an /etc/R, will populate from $R_HOME/etc
69 | mkdir -p /etc/R
70 | echo "PATH=${PATH}" >> ${R_HOME}/etc/Renviron.site
71 |
72 | ## Make RStudio compatible with case when R is built from source
73 | ## (and thus is at /usr/local/bin/R), because RStudio doesn't obey
74 | ## path if a user apt-get installs a package
75 | R_BIN=$(which R)
76 | echo "rsession-which-r=${R_BIN}" > /etc/rstudio/rserver.conf
77 | ## use more robust file locking to avoid errors when using shared volumes:
78 | # echo "lock-type=advisory" > /etc/rstudio/file-locks
79 |
80 | ## Prepare optional configuration file to disable authentication
81 | ## To de-activate authentication, `disable_auth_rserver.conf` script
82 | ## will just need to be overwrite /etc/rstudio/rserver.conf.
83 | ## This is triggered by an env var in the user config
84 | cp /etc/rstudio/rserver.conf /etc/rstudio/disable_auth_rserver.conf
85 | echo "auth-none=1" >> /etc/rstudio/disable_auth_rserver.conf
86 |
87 | ## Set up RStudio init scripts
88 | mkdir -p /etc/services.d/rstudio
89 | # shellcheck disable=SC2016
90 | echo '#!/usr/bin/with-contenv bash
91 | ## load /etc/environment vars first:
92 | for line in $( cat /etc/environment ) ; do export $line > /dev/null; done
93 | exec /usr/lib/rstudio-server/bin/rserver --server-daemonize 0' \
94 | > /etc/services.d/rstudio/run
95 |
96 | echo '#!/bin/bash
97 | rstudio-server stop' \
98 | > /etc/services.d/rstudio/finish
99 |
100 | # If CUDA enabled, make sure RStudio knows (config_cuda_R.sh handles this anyway)
101 | if [ ! -z "$CUDA_HOME" ]; then
102 | sed -i '/^rsession-ld-library-path/d' /etc/rstudio/rserver.conf
103 | echo "rsession-ld-library-path=$LD_LIBRARY_PATH" >> /etc/rstudio/rserver.conf
104 | fi
105 |
106 | # Log to stderr
107 | LOGGING="[*]
108 | log-level=warn
109 | logger-type=syslog
110 | "
111 |
112 | printf "%s" "$LOGGING" > /etc/rstudio/logging.conf
113 |
--------------------------------------------------------------------------------
/files/jupyter_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "CondaKernelSpecManager": {
3 | "conda_only": true,
4 | "kernelspec_path": "--user",
5 | "name_format": "{language} [{environment}]"
6 | }
7 | }
--------------------------------------------------------------------------------
/files/jupyterhub_config.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Jupyter Development Team.
2 | # Distributed under the terms of the Modified BSD License.
3 |
4 | # Configuration file for JupyterHub
5 | import os
6 | from pwd import getpwnam
7 | from grp import getgrnam
8 | from dockerspawner import DockerSpawner
9 | import docker
10 |
11 |
12 | c = get_config()
13 |
14 | max_num_cpu = 6
15 |
16 |
17 | class MyDockerSpawner(DockerSpawner):
18 | def uid_for_user(self, user):
19 | return getpwnam(user.name).pw_uid
20 |
21 | def gid_for_user(self, user):
22 | return getgrnam(user.name).gr_gid
23 |
24 | def get_env(self):
25 | env = super().get_env()
26 | env["NB_UID"] = self.uid_for_user(self.user)
27 | env["NB_GID"] = self.gid_for_user(self.user)
28 | env["PYTHONUSERBASE"] = "~/.rsm-msba"
29 | env["OPENBLAS_NUM_THREADS"] = str(max_num_cpu)
30 | env["OMP_NUM_THREADS"] = str(max_num_cpu)
31 | # new stuff starts here
32 | env["JUPYTERHUB_VERSION"] = "2.2.0"
33 | env["DOCKER_MACHINE_NAME"] = "jupyterhub"
34 | env["DOCKER_NETWORK_NAME"] = "jupyterhub-network"
35 | env["DOCKER_NOTEBOOK_IMAGE"] = "jupyterhub-user"
36 | env["LOCAL_NOTEBOOK_IMAGE"] = "jupyterhub-user"
37 | env["DOCKER_NOTEBOOK_DIR"] = "/home/jovyan/"
38 | env["DOCKER_SPAWN_CMD"] = "start-singleuser.sh"
39 | env["SSL_KEY"] = "/etc/jupyterhub/secrets/rsm-compute-01.ucsd.edu.key"
40 | env["SSL_CERT"] = "/etc/jupyterhub/secrets/chained.crt"
41 | env[
42 | "COOKIE_SECRET_FILE_PATH"
43 | ] = "/etc/jupyterhub/secrets/jupyterhub_cookie_secret"
44 | env["SQLITE_FILE_PATH"] = "/etc/jupyterhub/secrets/jupyterhub.sqlite"
45 | env["PROXY_PID_FILE_PATH"] = "/etc/jupyterhub/secrets/jupyterhub-proxy.pid"
46 | return env
47 |
48 |
49 | c.DockerSpawner.extra_create_kwargs = {"user": "root"}
50 | c.Authenticator.delete_invalid_users = True
51 |
52 | # We rely on environment variables to configure JupyterHub so that we
53 | # avoid having to rebuild the JupyterHub container every time we change a
54 | # configuration parameter.
55 |
56 | # Spawn single-user servers as Docker containers
57 | # c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
58 | c.JupyterHub.spawner_class = MyDockerSpawner
59 | # Spawn containers from this image
60 | # c.DockerSpawner.image = os.environ["DOCKER_NOTEBOOK_IMAGE"]
61 | c.DockerSpawner.image = "vnijs/rsm-msba-intel-jupyterhub" # this line may be omitted
62 | # JupyterHub requires a single-user instance of the Notebook server, so we
63 | # default to using the `start-singleuser.sh` script included in the
64 | # jupyter/docker-stacks *-notebook images as the Docker run command when
65 | # spawning containers. Optionally, you can override the Docker run command
66 | # using the DOCKER_SPAWN_CMD environment variable.
67 | spawn_cmd = os.environ.get("DOCKER_SPAWN_CMD", "start-singleuser.sh")
68 | c.DockerSpawner.extra_create_kwargs.update({"command": spawn_cmd})
69 | # Connect containers to this Docker network
70 | # network_name = os.environ["DOCKER_NETWORK_NAME"]
71 | network_name = "jupyterhub-network"
72 | c.DockerSpawner.use_internal_ip = True
73 | c.DockerSpawner.network_name = network_name
74 | # Pass the network name as argument to spawned containers
75 | c.DockerSpawner.extra_host_config = {
76 | "network_mode": network_name,
77 | "cpu_period": 100000,
78 | "cpu_quota": max_num_cpu * 100000,
79 | # "device_requests": [docker.types.DeviceRequest(count=-1, capabilities=[["gpu"]])]
80 | }
81 | c.Spawner.mem_limit = (
82 | "16G" # cpu limit set above using c.DockerSpawner.extra_host_config
83 | # "64G" # cpu limit set above using c.DockerSpawner.extra_host_config
84 | )
85 | c.DockerSpawner.cpu_limit = 0.5
86 | c.Spawner.cpu_limit = 0.5
87 |
88 | # Explicitly set notebook directory because we'll be mounting a host volume to
89 | # it. Most jupyter/docker-stacks *-notebook images run the Notebook server as
90 | # user `jovyan`, and set the notebook directory to `/home/jovyan/work`.
91 | # We follow the same convention.
92 | notebook_dir = os.environ.get("DOCKER_NOTEBOOK_DIR") or "/home/jovyan/work"
93 | c.DockerSpawner.notebook_dir = notebook_dir
94 |
95 |
96 | c.DockerSpawner.environment = {"JUPYTER_ENABLE_LAB": "yes"}
97 |
98 |
99 | # Mount the real user's Docker volume on the host to the notebook user's
100 | # notebook directory in the container
101 | c.DockerSpawner.volumes = {
102 | "/home/{username}": notebook_dir,
103 | "pg_data_{username}": "/var/lib/postgresql/14/main",
104 | "/srv/jupyterhub/resources": "/srv/jupyterhub/resources",
105 | "/srv/jupyterhub/capstone_data": "/srv/jupyterhub/capstone_data",
106 | }
107 |
108 | c.DockerSpawner.read_only_volumes = {
109 | "/srv/jupyterhub/read-only": "/srv/jupyterhub/read-only",
110 | "/home/vnijs/Dropbox/": "/home/vnijs/Dropbox/",
111 | }
112 |
113 | # Remove containers once they are stopped
114 | c.DockerSpawner.remove_containers = True
115 | c.JupyterHub.cleanup_servers = False
116 |
117 | # User containers will access hub by container name on the Docker network
118 | c.JupyterHub.hub_ip = "172.17.0.1"
119 | c.JupyterHub.hub_port = 8080
120 |
121 | # TLS config
122 | # c.JupyterHub.port = 443
123 | # c.JupyterHub.ssl_key = os.environ["SSL_KEY"]
124 | c.JupyterHub.ssl_key = "/etc/jupyterhub/secrets/rsm-compute-01.ucsd.edu.key"
125 | # c.JupyterHub.ssl_cert = os.environ["SSL_CERT"]
126 | c.JupyterHub.ssl_cert = "/etc/jupyterhub/secrets/chained.crt"
127 |
128 | # Reverse proxy config
129 | # c.JupyterHub.port = 8000
130 | c.JupyterHub.port = 8000
131 | c.JupyterHub.bind_url = "http://127.0.0.1:8000"
132 | # c.JupyterHub.bind_url = 'https://rsm-compute-01.ucsd.edu'
133 |
134 |
135 | # New
136 | # c.JupyterHub.ip = '127.0.0.1'
137 | c.JupyterHub.ip = "0.0.0.0"
138 |
139 | c.JupyterHub.concurrent_spawn_limit = 80
140 | c.Spawner.start_timeout = 120
141 |
142 | c.PAMAuthenticator.open_sessions = False
143 |
144 | from jupyterhub.auth import PAMAuthenticator
145 | import pamela
146 | from tornado import gen
147 |
148 |
149 | class KerberosPAMAuthenticator(PAMAuthenticator):
150 | @gen.coroutine
151 | def authenticate(self, handler, data):
152 | """Authenticate with PAM, and return the username if login is successful.
153 | Return None otherwise.
154 | Establish credentials when authenticating instead of reinitializing them
155 | so that a Kerberos cred cache has the proper UID in it.
156 | """
157 | username = data["username"]
158 | try:
159 | pamela.authenticate(
160 | username,
161 | data["password"],
162 | service=self.service,
163 | resetcred=pamela.PAM_ESTABLISH_CRED,
164 | )
165 | except pamela.PAMError as e:
166 | if handler is not None:
167 | self.log.warning(
168 | "PAM Authentication failed (%s@%s): %s",
169 | username,
170 | handler.request.remote_ip,
171 | e,
172 | )
173 | else:
174 | self.log.warning("PAM Authentication failed: %s", e)
175 | else:
176 | return username
177 |
178 |
179 | c.JupyterHub.authenticator_class = KerberosPAMAuthenticator
180 |
181 |
182 | # c.JupyterHub.cookie_secret_file = os.environ["COOKIE_SECRET_FILE_PATH"]
183 | c.JupyterHub.cookie_secret_file = "/etc/jupyterhub/secrets/jupyterhub_cookie_secret"
184 | # c.JupyterHub.db_url = os.environ["SQLITE_FILE_PATH"]
185 | c.JupyterHub.db_url = "/etc/jupyterhub/secrets/jupyterhub.sqlite"
186 | # c.ConfigurableHTTPProxy.pid_file = os.environ["PROXY_PID_FILE_PATH"]
187 | c.ConfigurableHTTPProxy.pid_file = "/etc/jupyterhub/secrets/jupyterhub-proxy.pid"
188 |
189 |
190 | # c.JupyterHub.log_level = 'DEBUG'
191 | # c.Spawner.debug = True
192 | # c.DockerSpawner.debug = True
193 |
194 | # Whitlelist users and admins
195 | c.Authenticator.whitelist = whitelist = set()
196 | c.Authenticator.admin_users = admin = set()
197 | c.JupyterHub.admin_access = True
198 | pwd = os.path.dirname(__file__)
199 | with open(os.path.join(pwd, "userlist")) as f:
200 | for line in f:
201 | if line:
202 | # continue
203 | parts = line.split()
204 | # in case of newline at the end of userlist file
205 | if len(parts) >= 1:
206 | name = parts[0]
207 | # whitelist.add(name)
208 | if len(parts) > 1 and parts[1] == "admin":
209 | admin.add(name)
210 |
--------------------------------------------------------------------------------
/files/pg_hba.conf:
--------------------------------------------------------------------------------
1 | # PostgreSQL Client Authentication Configuration File
2 | # ===================================================
3 | #
4 | # Refer to the "Client Authentication" section in the PostgreSQL
5 | # documentation for a complete description of this file. A short
6 | # synopsis follows.
7 | #
8 | # This file controls: which hosts are allowed to connect, how clients
9 | # are authenticated, which PostgreSQL user names they can use, which
10 | # databases they can access. Records take one of these forms:
11 | #
12 | # local DATABASE USER METHOD [OPTIONS]
13 | # host DATABASE USER ADDRESS METHOD [OPTIONS]
14 | # hostssl DATABASE USER ADDRESS METHOD [OPTIONS]
15 | # hostnossl DATABASE USER ADDRESS METHOD [OPTIONS]
16 | #
17 | # (The uppercase items must be replaced by actual values.)
18 | #
19 | # The first field is the connection type: "local" is a Unix-domain
20 | # socket, "host" is either a plain or SSL-encrypted TCP/IP socket,
21 | # "hostssl" is an SSL-encrypted TCP/IP socket, and "hostnossl" is a
22 | # plain TCP/IP socket.
23 | #
24 | # DATABASE can be "all", "sameuser", "samerole", "replication", a
25 | # database name, or a comma-separated list thereof. The "all"
26 | # keyword does not match "replication". Access to replication
27 | # must be enabled in a separate record (see example below).
28 | #
29 | # USER can be "all", a user name, a group name prefixed with "+", or a
30 | # comma-separated list thereof. In both the DATABASE and USER fields
31 | # you can also write a file name prefixed with "@" to include names
32 | # from a separate file.
33 | #
34 | # ADDRESS specifies the set of hosts the record matches. It can be a
35 | # host name, or it is made up of an IP address and a CIDR mask that is
36 | # an integer (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that
37 | # specifies the number of significant bits in the mask. A host name
38 | # that starts with a dot (.) matches a suffix of the actual host name.
39 | # Alternatively, you can write an IP address and netmask in separate
40 | # columns to specify the set of hosts. Instead of a CIDR-address, you
41 | # can write "samehost" to match any of the server's own IP addresses,
42 | # or "samenet" to match any address in any subnet that the server is
43 | # directly connected to.
44 | #
45 | # METHOD can be "trust", "reject", "md5", "password", "scram-sha-256",
46 | # "gss", "sspi", "ident", "peer", "pam", "ldap", "radius" or "cert".
47 | # Note that "password" sends passwords in clear text; "md5" or
48 | # "scram-sha-256" are preferred since they send encrypted passwords.
49 | #
50 | # OPTIONS are a set of options for the authentication in the format
51 | # NAME=VALUE. The available options depend on the different
52 | # authentication methods -- refer to the "Client Authentication"
53 | # section in the documentation for a list of which options are
54 | # available for which authentication methods.
55 | #
56 | # Database and user names containing spaces, commas, quotes and other
57 | # special characters must be quoted. Quoting one of the keywords
58 | # "all", "sameuser", "samerole" or "replication" makes the name lose
59 | # its special character, and just match a database or username with
60 | # that name.
61 | #
62 | # This file is read on server startup and when the server receives a
63 | # SIGHUP signal. If you edit the file on a running system, you have to
64 | # SIGHUP the server for the changes to take effect, run "pg_ctl reload",
65 | # or execute "SELECT pg_reload_conf()".
66 | #
67 | # Put your actual configuration here
68 | # ----------------------------------
69 | #
70 | # If you want to allow non-local connections, you need to add more
71 | # "host" records. In that case you will also need to make PostgreSQL
72 | # listen on a non-local interface via the listen_addresses
73 | # configuration parameter, or via the -i or -h command line switches.
74 |
75 |
76 |
77 |
78 | # DO NOT DISABLE!
79 | # If you change this first entry you will need to make sure that the
80 | # database superuser can access the database using some other method.
81 | # Noninteractive access to all databases is required during automatic
82 | # maintenance (custom daily cronjobs, replication, and similar tasks).
83 | #
84 | # Database administrative login by Unix domain socket
85 | local all postgres peer
86 |
87 | # TYPE DATABASE USER ADDRESS METHOD
88 |
89 | # "local" is for Unix domain socket connections only
90 | local all all peer
91 | # IPv4 local connections:
92 | host all all 127.0.0.1/32 md5
93 | # IPv6 local connections:
94 | host all all ::1/128 md5
95 | # Allow replication connections from localhost, by a user with the
96 | # replication privilege.
97 | local replication all peer
98 | host replication all 127.0.0.1/32 md5
99 | host replication all ::1/128 md5
100 |
101 | ## required to connect from outside of docker container
102 | host all all 0.0.0.0/0 md5
103 |
--------------------------------------------------------------------------------
/files/safari-nerdfont.css:
--------------------------------------------------------------------------------
1 | @font-face {
2 | font-family: "MesloLGS NF";
3 | src: url("https://raw.githubusercontent.com/romkatv/powerlevel10k-media/master/MesloLGS%20NF%20Regular.ttf");
4 | font-weight: normal;
5 | font-style: normal;
6 | }
7 |
8 | @font-face {
9 | font-family: "MesloLGS NF";
10 | src: url("https://raw.githubusercontent.com/romkatv/powerlevel10k-media/master/MesloLGS%20NF%20Bold.ttf");
11 | font-weight: bold;
12 | font-style: normal;
13 | }
14 |
15 | @font-face {
16 | font-family: "MesloLGS NF";
17 | src: url("https://raw.githubusercontent.com/romkatv/powerlevel10k-media/master/MesloLGS%20NF%20Italic.ttf");
18 | font-weight: normal;
19 | font-style: italic;
20 | }
21 |
22 | @font-face {
23 | font-family: "MesloLGS NF";
24 | src: url("https://raw.githubusercontent.com/romkatv/powerlevel10k-media/master/MesloLGS%20NF%20Bold%20Italic.ttf");
25 | font-weight: bold;
26 | font-style: italic;
27 | }
--------------------------------------------------------------------------------
/files/scalable_analytics/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
17 |
18 |
19 |
20 |
21 | fs.defaultFS
22 | hdfs://localhost:9100
23 |
24 |
25 |
--------------------------------------------------------------------------------
/files/scalable_analytics/hdfs-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
16 |
17 |
18 |
19 |
20 |
21 | dfs.replication
22 | 1
23 |
24 |
25 |
--------------------------------------------------------------------------------
/files/scalable_analytics/init-dfs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | mkdir -p /tmp/hadoop-root/dfs/name
4 | mkdir -p /tmp/hadoop-jovyan/dfs/data
5 | sed -i '$a\# Add the line for suppressing the NativeCodeLoader warning \nlog4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR,console' /$HADOOP_HOME/etc/hadoop/log4j.properties
6 | $HADOOP_HOME/bin/hdfs namenode -format -force
7 | echo `${HADOOP_HOME}/bin/hdfs getconf -confKey dfs.datanode.data.dir` | cut -c8- | xargs rm -r
--------------------------------------------------------------------------------
/files/scalable_analytics/start-dfs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Starting HDFS ..."
4 |
5 | hdfs --daemon start namenode
6 | hdfs --daemon start datanode
7 | hdfs --daemon start secondarynamenode
--------------------------------------------------------------------------------
/files/scalable_analytics/stop-dfs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Stopping HDFS ..."
4 |
5 | hdfs --daemon stop namenode
6 | hdfs --daemon stop datanode
7 | hdfs --daemon stop secondarynamenode
8 |
9 |
10 |
--------------------------------------------------------------------------------
/files/setup-arrow.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`}
5 | CRAN=${CRAN:-https://cran.r-project.org}
6 |
7 | ## mechanism to force source installs if we're using RSPM
8 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
9 |
10 | ## source install if using RSPM and arm64 image
11 | if [ "$(uname -m)" = "aarch64" ]; then
12 | CRAN=https://cran.r-project.org
13 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
14 | CRAN=$CRAN_SOURCE
15 | fi
16 |
17 | NCPUS=${NCPUS:--1}
18 |
19 | R -e "Sys.setenv(ARROW_PARQUET = 'ON', ARROW_WITH_SNAPPY = 'ON', ARROW_R_DEV = TRUE); remotes::install_version('arrow', version='${PYARROW_VERSION}', repos='${CRAN}', Ncpus=${NCPUS})"
20 | # -e "install.packages(c('systemfonts', 'textshaping', 'ragg', 'httpgd', 'svglite'), repos='${CRAN}', Ncpus=${NCPUS})"
21 |
22 | # these run into issues earlier in the install process so we install them here
23 | # R -e "install.packages(c('systemfonts', 'textshaping', 'ragg', 'httpgd', 'svglite'), repos='${CRAN}', Ncpus=${NCPUS})"
24 |
25 | # for some reason this part needs to be at the end and does not work when combined with setup-radiant
26 | # library(arrow)
27 | # arrow_info()
--------------------------------------------------------------------------------
/files/setup-bml.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`}
5 | CRAN=${CRAN:-https://cran.r-project.org}
6 |
7 | ## mechanism to force source installs if we're using RSPM
8 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
9 |
10 | ## source install if using RSPM and arm64 image
11 | if [ "$(uname -m)" = "aarch64" ]; then
12 | CRAN=$CRAN_SOURCE
13 | fi
14 |
15 | NCPUS=${NCPUS:--1}
16 |
17 | R -e "install.packages(c('cmdstanr', 'posterior', 'bayesplot'), repo='${CRAN}', Ncpus=${NCPUS})" \
18 |
19 | rm -rf /tmp/downloaded_packages
--------------------------------------------------------------------------------
/files/setup-extra.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`}
5 | CRAN=${CRAN:-https://cran.r-project.org}
6 |
7 | ## mechanism to force source installs if we're using RSPM
8 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
9 |
10 | ## source install if using RSPM and arm64 image
11 | if [ "$(uname -m)" = "aarch64" ]; then
12 | CRAN=https://cran.r-project.org
13 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
14 | CRAN=$CRAN_SOURCE
15 | fi
16 |
17 | NCPUS=${NCPUS:--1}
18 |
19 | # if [ -f "/opt/conda/bin/R" ]; then
20 | # if [ "$(uname -m)" == "aarch64" ]; then
21 | # mamba install --quiet --yes -c conda-forge \
22 | # r-raster \
23 | # imagemagick \
24 | # libgdal
25 |
26 | # # R -e "install.packages('rgdal', repo='${CRAN}', Ncpus=${NCPUS})"
27 | # else
28 | # mamba install --quiet --yes -c conda-forge \
29 | # r-raster \
30 | # imagemagick \
31 | # libgdal \
32 | # r-rgdal
33 | # fi
34 | # else
35 | # export DEBIAN_FRONTEND=noninteractive
36 | # apt-get update -qq && apt-get -y install \
37 | # libcurl4-openssl-dev \
38 | # libssl-dev \
39 | # imagemagick \
40 | # libmagick++-dev \
41 | # gsfonts \
42 | # libpng-dev \
43 | # libgdal-dev \
44 | # gdal-bin \
45 | # libgeos-dev \
46 | # libproj-dev \
47 | # libicu-dev \
48 | # && rm -rf /var/lib/apt/lists/*
49 |
50 | # # R -e "install.packages('rgdal', repo='${CRAN}', Ncpus=${NCPUS})"
51 | # fi
52 |
53 | # R -e "install.packages(c('magick', 'leaflet', 'stringi', 'profvis'), repo='${CRAN}', Ncpus=${NCPUS})"
54 | # R -e "install.packages('devtools', repo='${CRAN}', Ncpus=${NCPUS})" \
55 | R -e "remotes::install_github('vnijs/webshot', upgrade = 'never')" \
56 | -e "webshot::install_phantomjs()"
57 | # -e "install.packages(c('tidytext', 'wordcloud'), repo='${CRAN}', Ncpus=${NCPUS})"
58 |
59 | # for Machine Learning with PyTorch and Scikit-Learn
60 | mamba install --quiet --yes -c conda-forge pytorch-lightning tensorboard zip
61 |
62 | rm -rf /tmp/downloaded_packages
--------------------------------------------------------------------------------
/files/setup-hadoop.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [ ! -d "${HADOOP_HOME}" ]; then
5 | mkdir $HADOOP_HOME
6 | fi
7 |
8 | curl -sL --retry 3 \
9 | "http://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz" \
10 | | gunzip \
11 | | tar -x --strip-components=1 -C $HADOOP_HOME \
12 | && rm -rf $HADOOP_HOME/share/doc \
13 | && ln -s /opt/hadoop/bin/hadoop /usr/bin/hadoop \
14 | && chown -R ${NB_USER} $HADOOP_HOME \
15 | && mkdir -p "${HADOOP_HOME}/logs"
16 |
--------------------------------------------------------------------------------
/files/setup-jupyterhub.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function docker_setup() {
4 | echo "Usage: $0 [-d]"
5 | echo " -d, --dev Setup using development repo"
6 | echo ""
7 | exit 1
8 | }
9 |
10 | ## parse command-line arguments
11 | while [[ "$#" > 0 ]]; do case $1 in
12 | -d|--dev) ARG_TAG="$1"; shift;shift;;
13 | *) echo "Unknown parameter passed: $1"; echo ""; docker_setup; shift; shift;;
14 | esac; done
15 |
16 | ostype=`uname`
17 | if [[ "$ostype" == "Linux" ]]; then
18 | HOMEDIR=~
19 | sed_fun () {
20 | sed -i $1 "$2"
21 | }
22 | is_wsl=$(which explorer.exe)
23 | if [[ "$is_wsl" != "" ]]; then
24 | ostype="WSL2"
25 | HOMEDIR=~
26 | fi
27 | elif [[ "$ostype" == "Darwin" ]]; then
28 | ostype="macOS"
29 | HOMEDIR=~
30 | sed_fun () {
31 | sed -i '' -e $1 "$2"
32 | }
33 | else
34 | ostype="Windows"
35 | HOMEDIR="C:/Users/$USERNAME"
36 | sed_fun () {
37 | sed -i $1 "$2"
38 | }
39 | fi
40 |
41 | ## make sure abend is set correctly
42 | ## https://community.rstudio.com/t/restarting-rstudio-server-in-docker-avoid-error-message/10349/2
43 | rstudio_abend () {
44 | if [ -d "${HOMEDIR}/.rstudio/sessions/active" ]; then
45 | RSTUDIO_STATE_FILES=$(find "${HOMEDIR}/.rstudio/sessions/active/*/session-persistent-state" -type f 2>/dev/null)
46 | if [ "${RSTUDIO_STATE_FILES}" != "" ]; then
47 | sed_fun 's/abend="1"/abend="0"/' ${RSTUDIO_STATE_FILES}
48 | fi
49 | fi
50 | if [ -d "${HOMEDIR}/.rstudio/monitored/user-settings" ]; then
51 | touch "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
52 | sed_fun '/^alwaysSaveHistory="[0-1]"/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
53 | sed_fun '/^loadRData="[0-1]"/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
54 | sed_fun '/^saveAction=/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
55 | echo 'alwaysSaveHistory="1"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
56 | echo 'loadRData="0"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
57 | echo 'saveAction="0"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
58 | sed_fun '/^$/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
59 | fi
60 | }
61 |
62 | echo "-----------------------------------------------------------------------"
63 | echo "Set appropriate default settings for Rstudio"
64 | echo "-----------------------------------------------------------------------"
65 |
66 | rstudio_abend
67 |
68 | echo "-----------------------------------------------------------------------"
69 | echo "Set report generation options for Radiant"
70 | echo "-----------------------------------------------------------------------"
71 |
72 | RPROF="${HOMEDIR}/.Rprofile"
73 | touch "${RPROF}"
74 |
75 | sed_fun '/^options(radiant.maxRequestSize/d' "${RPROF}"
76 | sed_fun '/^options(radiant.report/d' "${RPROF}"
77 | sed_fun '/^options(radiant.shinyFiles/d' "${RPROF}"
78 | sed_fun '/^options(radiant.ace_autoComplete/d' "${RPROF}"
79 | sed_fun '/^options(radiant.ace_theme/d' "${RPROF}"
80 | sed_fun '/^#.*List.*specific.*directories.*you.*want.*to.*use.*with.*radiant/d' "${RPROF}"
81 | sed_fun '/^#.*options(radiant\.sf_volumes.*=.*c(Git.*=.*"\/home\/jovyan\/git"))/d' "${RPROF}"
82 | echo ''
83 | echo 'options(radiant.maxRequestSize = -1)' >> "${RPROF}"
84 | echo 'options(radiant.report = TRUE)' >> "${RPROF}"
85 | echo 'options(radiant.shinyFiles = TRUE)' >> "${RPROF}"
86 | echo 'options(radiant.ace_autoComplete = "live")' >> "${RPROF}"
87 | echo 'options(radiant.ace_theme = "tomorrow")' >> "${RPROF}"
88 | if ! grep -q 'options(\s*repos\s*' ${RPROF}; then
89 | echo '
90 | if (Sys.info()["sysname"] == "Linux") {
91 | options(repos = c(
92 | RSPM = "https://packagemanager.rstudio.com/all/__linux__/jammy/latest",
93 | CRAN = "https://cloud.r-project.org"
94 | ))
95 | } else {
96 | options(repos = c(
97 | RSM = "https://radiant-rstats.github.io/minicran",
98 | CRAN = "https://cloud.r-project.org"
99 | ))
100 | }
101 | ' >> "${RPROF}"
102 | fi
103 | echo '# List specific directories you want to use with radiant' >> "${RPROF}"
104 | echo '# options(radiant.sf_volumes = c(Git = "/home/jovyan/git"))' >> "${RPROF}"
105 | echo '' >> "${RPROF}"
106 | sed_fun '/^[\s]*$/d' "${RPROF}"
107 |
108 | echo "-----------------------------------------------------------------------"
109 | echo "Setting up oh-my-zsh shell"
110 | echo "-----------------------------------------------------------------------"
111 |
112 | ## adding an dir for zsh to use
113 | if [ ! -d "${HOMEDIR}/.rsm-msba/zsh" ]; then
114 | mkdir -p "${HOMEDIR}/.rsm-msba/zsh"
115 | fi
116 |
117 | if [ ! -f "${HOMEDIR}/.rsm-msba/zsh/.p10k.zsh" ]; then
118 | cp /etc/skel/.p10k.zsh "${HOMEDIR}/.rsm-msba/zsh/.p10k.zsh"
119 | else
120 | echo "-----------------------------------------------------"
121 | echo "You have an existing .p10k.zsh file. Do you want to"
122 | echo "replace it with the recommended version for this"
123 | echo "docker container (y/n)?"
124 | echo "-----------------------------------------------------"
125 | read overwrite
126 | if [ "${overwrite}" == "y" ]; then
127 | cp /etc/skel/.p10k.zsh "${HOMEDIR}/.rsm-msba/zsh/.p10k.zsh"
128 | fi
129 | fi
130 |
131 | if [ ! -d "${HOMEDIR}/.rsm-msba/zsh/.oh-my-zsh" ]; then
132 | cp -r /etc/skel/.oh-my-zsh "${HOMEDIR}/.rsm-msba/zsh/"
133 | else
134 | echo "-----------------------------------------------------"
135 | echo "You have an existing .oh-my-zsh directory. Do you"
136 | echo "want to replace it with the recommended version for"
137 | echo "this docker container (y/n)?"
138 | echo "-----------------------------------------------------"
139 | read overwrite
140 | if [ "${overwrite}" == "y" ]; then
141 | cp -r /etc/skel/.oh-my-zsh "${HOMEDIR}/.rsm-msba/zsh/"
142 | fi
143 | fi
144 |
145 | if [ ! -f "${HOMEDIR}/.zshrc" ]; then
146 | cp /etc/skel/.zshrc "${HOMEDIR}/.zshrc"
147 | source "${HOMEDIR}/.zshrc" 2>/dev/null
148 | else
149 | echo "---------------------------------------------------"
150 | echo "You have an existing .zshrc file. Do you want to"
151 | echo "replace it with the recommended version for this"
152 | echo "docker container (y/n)?"
153 | echo "---------------------------------------------------"
154 | read overwrite
155 | if [ "${overwrite}" == "y" ]; then
156 | cp /etc/skel/.zshrc "${HOMEDIR}/.zshrc"
157 | source "${HOMEDIR}/.zshrc" 2>/dev/null
158 | fi
159 | fi
160 |
161 | if [ ! -d "${HOMEDIR}/.rsm-msba/TinyTex" ]; then
162 | echo "---------------------------------------------------"
163 | echo "To create PDFs you will need to install a recent"
164 | echo "distribution of TeX. We recommend using TinyTeX"
165 | echo "Do you want to install TinyTex now (y/n)?"
166 | echo "---------------------------------------------------"
167 | read tinytex
168 | if [ "${tinytex}" == "y" ]; then
169 | /usr/local/bin/R -e "tinytex::install_tinytex(dir = '${HOMEDIR}/.rsm-msba/TinyTex')"
170 | fi
171 | fi
172 |
173 | echo "-----------------------------------------------------------------------"
174 | echo "Setup complete"
175 | echo "-----------------------------------------------------------------------"
176 |
--------------------------------------------------------------------------------
/files/setup-ml-frameworks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [ "$(uname -m)" != "aarch64" ]; then
5 | # has to be conda for some reason
6 | # conda install -y pytorch torchvision cpuonly -c pytorch
7 | # pip install pytorch torchvision
8 | mamba install -y pytorch torchvision -c pytorch
9 | # pip install jaxlib==0.3.7
10 | # pip install jaxlib==0.3.22
11 | # pip install jaxlib==0.3.24 numpyro
12 | else
13 | # mamba install -y astunparse numpy ninja pyyaml setuptools cmake cffi \
14 | # typing_extensions future six requests dataclasses
15 | # export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"}
16 | # git clone --recursive https://github.com/pytorch/pytorch
17 | # cd pytorch
18 | # # git checkout 201ddafc22e22c387b4cd654f397e05354d73d09
19 | # # git checkout 8a5cc940e39820ad61dedee5c01f366af182ff3a
20 | # # git checkout 945d333ae485673d7a603ca71822c9a39ca4775a
21 | # git checkout 76af71444a43962ee3e1cef987ac2028f2b8f44d
22 | # git submodule sync
23 | # git submodule update --init --recursive --jobs 0
24 | # python setup.py install
25 |
26 | # git clone https://github.com/pytorch/vision.git
27 | # cd vision
28 | # # git checkout ecbff88a1ad605bf04d6c44862e93dde2fdbfc84
29 | # # git checkout fb7f9a16628cb0813ac958da4525247e325cc3d2
30 | # # git checkout f467349ce0d41c23695538add22f6fec5a30ece4
31 | # git checkout deba056203d009fec6b58afb9fa211f6ee3328c8
32 | # git submodule sync
33 | # git submodule update --init --recursive --jobs 0
34 | # python setup.py install
35 |
36 | # cd ..
37 | # rm -rf pytorch
38 | # rm -rf vision
39 | # conda install -y torchvision cpuonly -c pytorch
40 |
41 |
42 | # mamba install -y pytorch torchvision "pillow<9" -c pytorch -c anaconda
43 | # mamba install -y "pillow<9" -c anaconda
44 | # mamba install -y pytorch torchvision cpuonly -c pytorch
45 | mamba install -y pytorch torchvision -c pytorch
46 |
47 | ## current version on conda-forge is 9.2.0
48 | ## caused problems for torchvision
49 | # mamba install -y "pillow<9" -c anaconda
50 | fi
51 |
52 |
--------------------------------------------------------------------------------
/files/setup-quarto.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## Adapted from https://github.com/rocker-org/rocker-versioned2/blob/master/scripts/install_quarto.sh
4 |
5 | set -e
6 |
7 | ## build ARGs
8 | NCPUS=${NCPUS:--1}
9 |
10 | ## mechanism to force source installs if we're using RSPM
11 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`}
12 | CRAN=${CRAN:-https://cran.r-project.org}
13 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
14 |
15 | if [ "$(uname -m)" != "aarch64" ]; then
16 | # ln -fs /usr/lib/rstudio-server/bin/quarto/bin/quarto /usr/local/bin
17 | # need pre-release for inline python
18 | sudo apt-get update -qq && apt-get -y install gdebi-core
19 | wget https://github.com/quarto-dev/quarto-cli/releases/download/v${QUARTO_VERSION}/quarto-${QUARTO_VERSION}-linux-amd64.deb -O quarto.deb
20 | sudo gdebi -n quarto.deb # adding -n to run non-interactively
21 |
22 | else
23 | # need pre-release for inline python
24 | sudo apt-get update -qq && apt-get -y install gdebi-core
25 | wget https://github.com/quarto-dev/quarto-cli/releases/download/v${QUARTO_VERSION}/quarto-${QUARTO_VERSION}-linux-arm64.deb -O quarto.deb
26 | sudo gdebi -n quarto.deb # adding -n to run non-interactively
27 | CRAN=$CRAN_SOURCE
28 | fi
29 |
30 | # Get R packages
31 | R -e "install.packages('quarto', repo='${CRAN}', Ncpus=${NCPUS})"
32 |
33 | # Clean up
34 | rm -rf /var/lib/apt/lists/*
35 | rm -rf /tmp/downloaded_packages
--------------------------------------------------------------------------------
/files/setup-radiant.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`}
5 | CRAN=${CRAN:-https://cran.r-project.org}
6 |
7 | ## mechanism to force source installs if we're using RSPM
8 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
9 |
10 | ## source install if using RSPM and arm64 image
11 | if [ "$(uname -m)" = "aarch64" ]; then
12 | CRAN=https://cran.r-project.org
13 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
14 | CRAN=$CRAN_SOURCE
15 | fi
16 |
17 | NCPUS=${NCPUS:--1}
18 |
19 | if [ -f "/opt/conda/bin/R" ]; then
20 | mamba install --quiet --yes -c conda-forge snappy cmake
21 | else
22 | export DEBIAN_FRONTEND=noninteractive
23 | apt-get update -qq && apt-get -y --no-install-recommends install \
24 | libicu-dev \
25 | zlib1g-dev \
26 | libglpk-dev \
27 | libgmp3-dev \
28 | libxml2-dev \
29 | cmake \
30 | git \
31 | libharfbuzz-dev \
32 | libfribidi-dev \
33 | libfreetype6-dev \
34 | libpng-dev \
35 | libcairo2-dev \
36 | libtiff-dev \
37 | libjpeg-dev \
38 | libcurl4-openssl-dev \
39 | && rm -rf /var/lib/apt/lists/*
40 | fi
41 |
42 | # removed reticulate due to issue compiling RcppTOML
43 | R -e "install.packages('igraph', repo='${CRAN}', Ncpus=${NCPUS})" \
44 | -e "install.packages(c('radiant', 'png', 'bslib', 'gitgadget', 'miniUI', 'webshot', 'tinytex', 'svglite'), repo='${CRAN}', Ncpus=${NCPUS})" \
45 | -e "install.packages(c('remotes', 'formatR', 'styler', 'renv'), repo='${CRAN}', Ncpus=${NCPUS})" \
46 | -e "install.packages(c('fs', 'janitor', 'dm', 'stringr'), repo='${CRAN}', Ncpus=${NCPUS})" \
47 | -e "install.packages(c('httpgd', 'languageserver'), repo='${CRAN}', Ncpus=${NCPUS})" \
48 | -e "remotes::install_github('radiant-rstats/radiant.update', upgrade = 'never')" \
49 | -e "remotes::install_github('vnijs/gitgadget', upgrade = 'never')" \
50 | -e "remotes::install_github('radiant-rstats/radiant.data', upgrade = 'never')" \
51 | -e "remotes::install_github('radiant-rstats/radiant.design', upgrade = 'never')" \
52 | -e "remotes::install_github('radiant-rstats/radiant.basics', upgrade = 'never')" \
53 | -e "remotes::install_github('radiant-rstats/radiant.model', upgrade = 'never')" \
54 | -e "remotes::install_github('radiant-rstats/radiant.multivariate', upgrade = 'never')" \
55 | -e "remotes::install_github('radiant-rstats/radiant', upgrade = 'never')" \
56 | -e "install.packages(c('dbplyr', 'DBI', 'RPostgres', 'RSQLite', 'pool', 'usethis'), repo='${CRAN}', Ncpus=${NCPUS})"
57 |
58 | rm -rf /tmp/downloaded_packages
59 |
--------------------------------------------------------------------------------
/files/setup-tidyverse.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | ## adapted from
5 | # https://github.com/rocker-org/rocker-versioned2/blob/master/scripts/install_tidyverse.sh
6 |
7 | UBUNTU_VERSION=${UBUNTU_VERSION:-`lsb_release -sc`}
8 | CRAN=${CRAN:-https://cran.r-project.org}
9 |
10 | ## mechanism to force source installs if we're using RSPM
11 | CRAN_SOURCE=${CRAN/"__linux__/$UBUNTU_VERSION/"/""}
12 |
13 | ## source install if using RSPM and arm64 image
14 | if [ "$(uname -m)" = "aarch64" ]; then
15 | CRAN=$CRAN_SOURCE
16 | fi
17 |
18 | NCPUS=${NCPUS:--1}
19 |
20 | # if [ -f "/opt/conda/bin/R" ]; then
21 | # export DEBIAN_FRONTEND=noninteractive
22 | # apt-get update -qq \
23 | # && apt-get -y --no-install-recommends install \
24 | # libxml2-dev \
25 | # libcairo2-dev \
26 | # libgit2-dev \
27 | # libpq-dev \
28 | # libsasl2-dev \
29 | # libsqlite3-dev \
30 | # libssh2-1-dev \
31 | # libxtst6 \
32 | # libcurl4-openssl-dev \
33 | # libssl-dev \
34 | # unixodbc-dev \
35 | # libharfbuzz-dev \
36 | # libfribidi-dev \
37 | # libfreetype6-dev \
38 | # libpng-dev \
39 | # libtiff5-dev \
40 | # libjpeg-dev \
41 | # && rm -rf /var/lib/apt/lists/*
42 | # fi
43 |
44 | # version 0.6.32 (2023-6-26) is causing install issues on 6/7/2023
45 | # -e "remotes::install_version('digest', version='0.6.31', repos='${CRAN}', Ncpus=${NCPUS})" \
46 | # -e "install.packages(c('ragg', 'rlist'), repo='${CRAN}', Ncpus=${NCPUS})" \
47 | # -e "install.packages(c('tidyverse', 'rmarkdown', 'gert', 'usethis'), repo='${CRAN}', Ncpus=${NCPUS})" \
48 | # -e "install.packages(c('dbplyr', 'DBI', 'dtplyr', 'RPostgres', 'RSQLite'), repo='${CRAN}', Ncpus=${NCPUS})"
49 |
50 | # R -e "install.packages('remotes', repo='${CRAN}', Ncpus=${NCPUS})" \
51 | # -e "install.packages(c('dbplyr', 'DBI', 'dtplyr', 'RPostgres', 'RSQLite', 'pool', 'usethis'), repo='${CRAN}', Ncpus=${NCPUS})"
52 |
53 | rm -rf /tmp/downloaded_packages
--------------------------------------------------------------------------------
/files/setup.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function docker_setup() {
4 | echo "Usage: $0 [-d]"
5 | echo " -d, --dev Setup using development repo"
6 | echo ""
7 | exit 1
8 | }
9 |
10 | ## parse command-line arguments
11 | while [[ "$#" > 0 ]]; do case $1 in
12 | -d|--dev) ARG_TAG="$1"; shift;shift;;
13 | *) echo "Unknown parameter passed: $1"; echo ""; docker_setup; shift; shift;;
14 | esac; done
15 |
16 | ostype=`uname`
17 | if [[ "$ostype" == "Linux" ]]; then
18 | HOMEDIR=~
19 | sed_fun () {
20 | sed -i $1 "$2"
21 | }
22 | is_wsl=$(which explorer.exe)
23 | if [[ "$is_wsl" != "" ]]; then
24 | ostype="WSL2"
25 | HOMEDIR=~
26 | fi
27 | elif [[ "$ostype" == "Darwin" ]]; then
28 | ostype="macOS"
29 | HOMEDIR=~
30 | sed_fun () {
31 | sed -i '' -e $1 "$2"
32 | }
33 | else
34 | ostype="Windows"
35 | HOMEDIR="C:/Users/$USERNAME"
36 | sed_fun () {
37 | sed -i $1 "$2"
38 | }
39 | fi
40 |
41 | ## make sure abend is set correctly
42 | ## https://community.rstudio.com/t/restarting-rstudio-server-in-docker-avoid-error-message/10349/2
43 | rstudio_abend () {
44 | if [ -d "${HOMEDIR}/.rstudio/sessions/active" ]; then
45 | RSTUDIO_STATE_FILES=$(find "${HOMEDIR}/.rstudio/sessions/active/*/session-persistent-state" -type f 2>/dev/null)
46 | if [ "${RSTUDIO_STATE_FILES}" != "" ]; then
47 | sed_fun 's/abend="1"/abend="0"/' ${RSTUDIO_STATE_FILES}
48 | fi
49 | fi
50 | if [ -d "${HOMEDIR}/.rstudio/monitored/user-settings" ]; then
51 | touch "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
52 | sed_fun '/^alwaysSaveHistory="[0-1]"/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
53 | sed_fun '/^loadRData="[0-1]"/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
54 | sed_fun '/^saveAction=/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
55 | echo 'alwaysSaveHistory="1"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
56 | echo 'loadRData="0"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
57 | echo 'saveAction="0"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
58 | sed_fun '/^$/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
59 | fi
60 | }
61 |
62 | echo "-----------------------------------------------------------------------"
63 | echo "Set appropriate default settings for Rstudio"
64 | echo "-----------------------------------------------------------------------"
65 |
66 | rstudio_abend
67 |
68 | echo "-----------------------------------------------------------------------"
69 | echo "Set report generation options for Radiant"
70 | echo "-----------------------------------------------------------------------"
71 |
72 | RPROF="${HOMEDIR}/.Rprofile"
73 | touch "${RPROF}"
74 |
75 | sed_fun '/^options(radiant.maxRequestSize/d' "${RPROF}"
76 | sed_fun '/^options(radiant.report/d' "${RPROF}"
77 | sed_fun '/^options(radiant.shinyFiles/d' "${RPROF}"
78 | sed_fun '/^options(radiant.ace_autoComplete/d' "${RPROF}"
79 | sed_fun '/^options(radiant.ace_theme/d' "${RPROF}"
80 | sed_fun '/^#.*List.*specific.*directories.*you.*want.*to.*use.*with.*radiant/d' "${RPROF}"
81 | sed_fun '/^#.*options(radiant\.sf_volumes.*=.*c(Git.*=.*"\/home\/jovyan\/git"))/d' "${RPROF}"
82 | echo ''
83 | echo 'options(radiant.maxRequestSize = -1)' >> "${RPROF}"
84 | echo 'options(radiant.report = TRUE)' >> "${RPROF}"
85 | echo 'options(radiant.shinyFiles = TRUE)' >> "${RPROF}"
86 | echo 'options(radiant.ace_autoComplete = "live")' >> "${RPROF}"
87 | echo 'options(radiant.ace_theme = "tomorrow")' >> "${RPROF}"
88 | if ! grep -q 'options(\s*repos\s*' ${RPROF}; then
89 | echo '
90 | if (Sys.info()["sysname"] == "Linux") {
91 | options(repos = c(
92 | RSPM = "https://packagemanager.posit.co/cran/__linux__/jammy/latest",
93 | CRAN = "https://cloud.r-project.org"
94 | ))
95 | } else {
96 | options(repos = c(
97 | CRAN = "https://cloud.r-project.org"
98 | ))
99 | }
100 | ' >> "${RPROF}"
101 | fi
102 | echo '# List specific directories you want to use with radiant' >> "${RPROF}"
103 | echo '# options(radiant.sf_volumes = c(Git = "/home/jovyan/git"))' >> "${RPROF}"
104 | echo '' >> "${RPROF}"
105 | sed_fun '/^[\s]*$/d' "${RPROF}"
106 |
107 | echo "-----------------------------------------------------------------------"
108 | echo "Setting up oh-my-zsh shell"
109 | echo "-----------------------------------------------------------------------"
110 |
111 | ## adding an dir for zsh to use
112 | if [ ! -d "${HOMEDIR}/.rsm-msba/zsh" ]; then
113 | mkdir -p "${HOMEDIR}/.rsm-msba/zsh"
114 | fi
115 |
116 | if [ ! -f "${HOMEDIR}/.rsm-msba/zsh/.p10k.zsh" ]; then
117 | cp /etc/skel/.p10k.zsh "${HOMEDIR}/.rsm-msba/zsh/.p10k.zsh"
118 | else
119 | echo "-----------------------------------------------------"
120 | echo "You have an existing .p10k.zsh file. Do you want to"
121 | echo "replace it with the recommended version for this"
122 | echo "docker container (y/n)?"
123 | echo "-----------------------------------------------------"
124 | read overwrite
125 | if [ "${overwrite}" == "y" ]; then
126 | cp /etc/skel/.p10k.zsh "${HOMEDIR}/.rsm-msba/zsh/.p10k.zsh"
127 | fi
128 | fi
129 |
130 | if [ ! -d "${HOMEDIR}/.rsm-msba/zsh/.oh-my-zsh" ]; then
131 | cp -r /etc/skel/.oh-my-zsh "${HOMEDIR}/.rsm-msba/zsh/"
132 | else
133 | echo "-----------------------------------------------------"
134 | echo "You have an existing .oh-my-zsh directory. Do you"
135 | echo "want to replace it with the recommended version for"
136 | echo "this docker container (y/n)?"
137 | echo "-----------------------------------------------------"
138 | read overwrite
139 | if [ "${overwrite}" == "y" ]; then
140 | cp -r /etc/skel/.oh-my-zsh "${HOMEDIR}/.rsm-msba/zsh/"
141 | fi
142 | fi
143 |
144 | if command -v poetry &> /dev/null; then
145 | echo "Poetry is already installed"
146 | else
147 | echo "Installing Poetry $POETRY_VERSION"
148 | pipx install poetry==$POETRY_VERSION
149 | fi
150 |
151 | if [ ! -d "${HOMEDIR}/.rsm-msba/zsh/.oh-my-zsh/plugins/poetry" ]; then
152 | mkdir "${HOMEDIR}/.rsm-msba/zsh/.oh-my-zsh/plugins/poetry" ]
153 | poetry completions zsh > $ZSH_CUSTOM/plugins/poetry/_poetry
154 | fi
155 |
156 | if [ ! -f "${HOMEDIR}/.rsm-msba/zsh/.zshrc" ]; then
157 | cp /etc/skel/.zshrc "${HOMEDIR}/.rsm-msba/zsh/.zshrc"
158 | source ~/.rsm-msba/zsh/.zshrc 2>/dev/null
159 | else
160 | echo "---------------------------------------------------"
161 | echo "You have an existing .zshrc file. Do you want to"
162 | echo "replace it with the recommended version for this"
163 | echo "docker container (y/n)?"
164 | echo "---------------------------------------------------"
165 | read overwrite
166 | if [ "${overwrite}" == "y" ]; then
167 | cp /etc/skel/.zshrc "${HOMEDIR}/.rsm-msba/zsh/.zshrc"
168 | source ~/.rsm-msba/zsh/.zshrc 2>/dev/null
169 | fi
170 | fi
171 |
172 | if [ ! -f "${HOMEDIR}/.lintr" ]; then
173 | echo "---------------------------------------------------"
174 | echo "Adding a .lintr file to set linting preferences for"
175 | echo "R in VS Code"
176 | echo "---------------------------------------------------"
177 | echo 'linters: linters_with_defaults(
178 | object_name_linter = NULL,
179 | commented_code_linter = NULL,
180 | line_length_linter(120))
181 | ' > "${HOMEDIR}/.lintr"
182 | fi
183 |
184 | if [ ! -d "${HOMEDIR}/.rsm-msba/TinyTex" ]; then
185 | echo "---------------------------------------------------"
186 | echo "To create PDFs you will need to install a recent"
187 | echo "distribution of TeX. We recommend using TinyTeX"
188 | echo "Do you want to install TinyTex now (y/n)?"
189 | echo "---------------------------------------------------"
190 | read tinytex
191 | if [ "${tinytex}" == "y" ]; then
192 | R -e "tinytex::install_tinytex(dir = '${HOMEDIR}/.rsm-msba/TinyTex')"
193 | fi
194 | fi
195 |
196 | echo "-----------------------------------------------------------------------"
197 | echo "Setup complete"
198 | echo "-----------------------------------------------------------------------"
199 |
--------------------------------------------------------------------------------
/files/start-notebook.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright (c) Jupyter Development Team.
3 | # Distributed under the terms of the Modified BSD License.
4 |
5 | set -e
6 |
7 | exec sudo -u postgres /usr/lib/postgresql/${POSTGRES_VERSION}/bin/postgres -c config_file=/etc/postgresql/${POSTGRES_VERSION}/main/postgresql.conf &
8 | exec sudo /usr/sbin/sshd -D &
9 |
10 | if [[ ! -z "${JUPYTERHUB_API_TOKEN}" ]]; then
11 | # launched by JupyterHub, use single-user entrypoint
12 | exec /usr/local/bin/start-singleuser.sh $*
13 | else
14 | if [[ ! -z "${JUPYTER_ENABLE_LAB}" ]]; then
15 | . /usr/local/bin/start.sh jupyter lab $*
16 | else
17 | . /usr/local/bin/start.sh jupyter notebook $*
18 | fi
19 | fi
20 |
21 |
--------------------------------------------------------------------------------
/files/start-services-simplified.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | # Start sshd
5 | /usr/sbin/sshd -D &
--------------------------------------------------------------------------------
/files/start-services.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | # Setup PostgreSQL data directory with current user permissions
5 | # mkdir -p /var/lib/postgresql/${POSTGRES_VERSION}/main
6 | # chown -R $(id -u):$(id -g) /var/lib/postgresql/${POSTGRES_VERSION}/main
7 |
8 | # # Add dynamic user entry to /etc/passwd if it doesn't exist
9 | # if ! grep -q "^${USER}:" /etc/passwd; then
10 | # echo "${USER}:x:$(id -u):$(id -g)::/home/${USER}:/bin/bash" >> /etc/passwd
11 | # fi
12 |
13 | # Start PostgreSQL
14 | # /usr/lib/postgresql/${POSTGRES_VERSION}/bin/postgres -c config_file=/etc/postgresql/${POSTGRES_VERSION}/main/postgresql.conf &
15 |
16 | # Start sshd
17 | /usr/sbin/sshd -D &
18 |
19 | # Start JupyterLab (this will be our foreground process)
20 | # /opt/conda/bin/jupyter lab --ip=0.0.0.0 --port=8989 --allow-root --NotebookApp.token=''
--------------------------------------------------------------------------------
/files/start-singleuser.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright (c) Jupyter Development Team.
3 | # Distributed under the terms of the Modified BSD License.
4 |
5 | set -e
6 |
7 | ## starting up postgres
8 | exec sudo -u postgres /usr/lib/postgresql/${POSTGRES_VERSION}/bin/postgres -c config_file=/etc/postgresql/${POSTGRES_VERSION}/main/postgresql.conf &
9 | exec sudo /usr/sbin/sshd -D &
10 |
11 | # set default ip to 0.0.0.0
12 | if [[ "$NOTEBOOK_ARGS $@" != *"--ip="* ]]; then
13 | NOTEBOOK_ARGS="--ip=0.0.0.0 $NOTEBOOK_ARGS"
14 | fi
15 |
16 | # handle some deprecated environment variables
17 | # from DockerSpawner < 0.8.
18 | # These won't be passed from DockerSpawner 0.9,
19 | # so avoid specifying --arg=empty-string
20 | if [ ! -z "$NOTEBOOK_DIR" ]; then
21 | NOTEBOOK_ARGS="--notebook-dir='$NOTEBOOK_DIR' $NOTEBOOK_ARGS"
22 | fi
23 | if [ ! -z "$JPY_PORT" ]; then
24 | NOTEBOOK_ARGS="--port=$JPY_PORT $NOTEBOOK_ARGS"
25 | fi
26 | if [ ! -z "$JPY_USER" ]; then
27 | NOTEBOOK_ARGS="--user=$JPY_USER $NOTEBOOK_ARGS"
28 | fi
29 | if [ ! -z "$JPY_COOKIE_NAME" ]; then
30 | NOTEBOOK_ARGS="--cookie-name=$JPY_COOKIE_NAME $NOTEBOOK_ARGS"
31 | fi
32 | if [ ! -z "$JPY_BASE_URL" ]; then
33 | NOTEBOOK_ARGS="--base-url=$JPY_BASE_URL $NOTEBOOK_ARGS"
34 | fi
35 | if [ ! -z "$JPY_HUB_PREFIX" ]; then
36 | NOTEBOOK_ARGS="--hub-prefix=$JPY_HUB_PREFIX $NOTEBOOK_ARGS"
37 | fi
38 | if [ ! -z "$JPY_HUB_API_URL" ]; then
39 | NOTEBOOK_ARGS="--hub-api-url=$JPY_HUB_API_URL $NOTEBOOK_ARGS"
40 | fi
41 |
42 | NOTEBOOK_BIN="jupyterhub-singleuser --SingleUserNotebookApp.default_url=/lab"
43 |
44 | . /usr/local/bin/start.sh $NOTEBOOK_BIN $NOTEBOOK_ARGS "$@"
--------------------------------------------------------------------------------
/files/start.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright (c) Jupyter Development Team.
3 | # Distributed under the terms of the Modified BSD License.
4 |
5 | set -e
6 |
7 | # Exec the specified command or fall back on bash
8 | if [ $# -eq 0 ]; then
9 | cmd=bash
10 | else
11 | cmd=$*
12 | fi
13 |
14 | run-hooks () {
15 | # Source scripts or run executable files in a directory
16 | if [[ ! -d "$1" ]] ; then
17 | return
18 | fi
19 | echo "$0: running hooks in $1"
20 | for f in "$1"/*; do
21 | case "$f" in
22 | *.sh)
23 | echo "$0: running $f"
24 | source "$f"
25 | ;;
26 | *)
27 | if [[ -x "$f" ]] ; then
28 | echo "$0: running $f"
29 | "$f"
30 | else
31 | echo "$0: ignoring $f"
32 | fi
33 | ;;
34 | esac
35 | echo "$0: done running hooks in $1"
36 | done
37 | }
38 |
39 | run-hooks /usr/local/bin/start-notebook.d
40 |
41 | # Handle special flags if we're root
42 | if [ $(id -u) == 0 ] ; then
43 |
44 | # Only attempt to change the jovyan username if it exists
45 | if id jovyan &> /dev/null ; then
46 | echo "Set username to: $NB_USER"
47 | usermod -d /home/$NB_USER -l $NB_USER jovyan
48 | fi
49 |
50 | # Handle case where provisioned storage does not have the correct permissions by default
51 | # Ex: default NFS/EFS (no auto-uid/gid)
52 | if [[ "$CHOWN_HOME" == "1" || "$CHOWN_HOME" == 'yes' ]]; then
53 | echo "Changing ownership of /home/$NB_USER to $NB_UID:$NB_GID"
54 | chown $CHOWN_HOME_OPTS $NB_UID:$NB_GID /home/$NB_USER
55 | fi
56 | if [ ! -z "$CHOWN_EXTRA" ]; then
57 | for extra_dir in $(echo $CHOWN_EXTRA | tr ',' ' '); do
58 | chown $CHOWN_EXTRA_OPTS $NB_UID:$NB_GID $extra_dir
59 | done
60 | fi
61 |
62 | # handle home and working directory if the username changed
63 | if [[ "$NB_USER" != "jovyan" ]]; then
64 | # changing username, make sure homedir exists
65 | # (it could be mounted, and we shouldn't create it if it already exists)
66 | if [[ ! -e "/home/$NB_USER" ]]; then
67 | echo "Relocating home dir to /home/$NB_USER"
68 | mv /home/jovyan "/home/$NB_USER"
69 | fi
70 | # if workdir is in /home/jovyan, cd to /home/$NB_USER
71 | if [[ "$PWD/" == "/home/jovyan/"* ]]; then
72 | newcwd="/home/$NB_USER/${PWD:13}"
73 | echo "Setting CWD to $newcwd"
74 | cd "$newcwd"
75 | fi
76 | fi
77 |
78 | # Change UID of NB_USER to NB_UID if it does not match
79 | if [ "$NB_UID" != $(id -u $NB_USER) ] ; then
80 | echo "Set $NB_USER UID to: $NB_UID"
81 | usermod -u $NB_UID $NB_USER
82 | fi
83 |
84 | # Set NB_USER primary gid to NB_GID (after making the group). Set
85 | # supplementary gids to NB_GID and 100.
86 | if [ "$NB_GID" != $(id -g $NB_USER) ] ; then
87 | echo "Add $NB_USER to group: $NB_GID"
88 | groupadd -g $NB_GID -o ${NB_GROUP:-${NB_USER}}
89 | usermod -g $NB_GID -a -G $NB_GID,100 $NB_USER
90 | fi
91 |
92 | # Enable sudo if requested
93 | if [[ "$GRANT_SUDO" == "1" || "$GRANT_SUDO" == 'yes' ]]; then
94 | echo "Granting $NB_USER sudo access and appending $CONDA_DIR/bin to sudo PATH"
95 | echo "$NB_USER ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/notebook
96 | fi
97 |
98 | # Add $CONDA_DIR/bin to sudo secure_path
99 | sed -r "s#Defaults\s+secure_path=\"([^\"]+)\"#Defaults secure_path=\"\1:$CONDA_DIR/bin\"#" /etc/sudoers | grep secure_path > /etc/sudoers.d/path
100 |
101 | # Exec the command as NB_USER with the PATH and the rest of
102 | # the environment preserved
103 | run-hooks /usr/local/bin/before-notebook.d
104 | echo "Executing the command: $cmd"
105 | exec sudo -E -H -u $NB_USER PATH=$PATH XDG_CACHE_HOME=/home/$NB_USER/.cache PYTHONPATH=$PYTHONPATH $cmd
106 | else
107 | if [[ "$NB_UID" == "$(id -u jovyan)" && "$NB_GID" == "$(id -g jovyan)" ]]; then
108 | # User is not attempting to override user/group via environment
109 | # variables, but they could still have overridden the uid/gid that
110 | # container runs as. Check that the user has an entry in the passwd
111 | # file and if not add an entry.
112 | whoami &> /dev/null || STATUS=$? && true
113 | if [[ "$STATUS" != "0" ]]; then
114 | if [[ -w /etc/passwd ]]; then
115 | echo "Adding passwd file entry for $(id -u)"
116 | cat /etc/passwd | sed -e "s/^jovyan:/nayvoj:/" > /tmp/passwd
117 | echo "jovyan:x:$(id -u):$(id -g):,,,:/home/jovyan:/bin/bash" >> /tmp/passwd
118 | cat /tmp/passwd > /etc/passwd
119 | rm /tmp/passwd
120 | else
121 | echo 'Container must be run with group "root" to update passwd file'
122 | fi
123 | fi
124 |
125 | # Warn if the user isn't going to be able to write files to $HOME.
126 | if [[ ! -w /home/jovyan ]]; then
127 | echo 'Container must be run with group "users" to update files'
128 | fi
129 | else
130 | # Warn if looks like user want to override uid/gid but hasn't
131 | # run the container as root.
132 | if [[ ! -z "$NB_UID" && "$NB_UID" != "$(id -u)" ]]; then
133 | echo 'Container must be run as root to set $NB_UID'
134 | fi
135 | if [[ ! -z "$NB_GID" && "$NB_GID" != "$(id -g)" ]]; then
136 | echo 'Container must be run as root to set $NB_GID'
137 | fi
138 | fi
139 |
140 | # Warn if looks like user want to run in sudo mode but hasn't run
141 | # the container as root.
142 | if [[ "$GRANT_SUDO" == "1" || "$GRANT_SUDO" == 'yes' ]]; then
143 | echo 'Container must be run as root to grant sudo permissions'
144 | fi
145 |
146 | # Execute the command
147 | run-hooks /usr/local/bin/before-notebook.d
148 | echo "Executing the command: $cmd"
149 | exec $cmd
150 | fi
151 |
--------------------------------------------------------------------------------
/files/supervisord copy.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 | nodaemon=true
3 | logfile=/var/log/supervisor/supervisord.log
4 | pidfile=/tmp/supervisord.pid
5 |
6 | [program:jupyterlab]
7 | user=%(ENV_NB_USER)s
8 | environment=HOME=/home/%(ENV_NB_USER)s, USER=%(ENV_NB_USER)s, SHELL=/bin/zsh, PYTHONUSERBASE=%(ENV_PYBASE)s, JUPYTER_PATH=%(ENV_PYBASE)s/share/jupyter, JUPYTER_RUNTIME_DIR=/tmp/jupyter/runtime, JUPYTER_CONFIG_DIR=%(ENV_PYBASE)s/jupyter
9 | command=/opt/conda/bin/jupyter lab --ip=0.0.0.0 --port=8989 --allow-root --NotebookApp.token=''
10 | stdout_logfile=/var/log/supervisor/%(program_name)s.log
11 | stderr_logfile=/var/log/supervisor/%(program_name)s.log
12 | autorestart=false
13 |
14 | [program:postgres]
15 | command=sudo -u postgres /usr/lib/postgresql/%(ENV_POSTGRES_VERSION)s/bin/postgres -c config_file=/etc/postgresql/%(ENV_POSTGRES_VERSION)s/main/postgresql.conf
16 | stdout_logfile=/var/log/supervisor/%(program_name)s.log
17 | stderr_logfile=/var/log/supervisor/%(program_name)s.log
18 | autorestart=true
19 |
20 | [program:sshd]
21 | command=sudo /usr/sbin/sshd -D
22 | stdout_logfile=/var/log/supervisor/%(program_name)s.log
23 | stderr_logfile=/var/log/supervisor/%(program_name)s.log
24 | startsecs=0
25 | autorestart=false
26 |
27 | # [program:hadoop]
28 | # command=sudo /usr/bin/hadoop
29 | # stdout_logfile=/var/log/supervisor/%(program_name)s.log
30 | # stderr_logfile=/var/log/supervisor/%(program_name)s.log
31 | # startsecs=0
32 | # autorestart=false
--------------------------------------------------------------------------------
/files/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 | user=%(ENV_NB_USER)s
3 | nodaemon=true
4 | logfile=/var/log/supervisor/supervisord.log
5 | pidfile=/tmp/supervisord.pid
6 |
7 | [program:jupyterlab]
8 | user=%(ENV_NB_USER)s
9 | environment=HOME=/home/%(ENV_NB_USER)s,USER=%(ENV_NB_USER)s,SHELL=/bin/zsh,PYTHONUSERBASE=%(ENV_PYBASE)s,JUPYTER_PATH=%(ENV_PYBASE)s/share/jupyter,JUPYTER_RUNTIME_DIR=/tmp/jupyter/runtime,JUPYTER_CONFIG_DIR=%(ENV_PYBASE)s/jupyter
10 | command=/opt/conda/bin/jupyter lab --ip=0.0.0.0 --port=8989 --allow-root --NotebookApp.token=''
11 | stdout_logfile=/var/log/supervisor/%(program_name)s.log
12 | stderr_logfile=/var/log/supervisor/%(program_name)s.log
13 | autorestart=false
14 |
15 | [program:postgres]
16 | user=%(ENV_NB_USER)s
17 | command=/usr/lib/postgresql/%(ENV_POSTGRES_VERSION)s/bin/postgres -c config_file=/etc/postgresql/%(ENV_POSTGRES_VERSION)s/main/postgresql.conf
18 | stdout_logfile=/var/log/supervisor/%(program_name)s.log
19 | stderr_logfile=/var/log/supervisor/%(program_name)s.log
20 | autorestart=true
21 |
22 | [program:sshd]
23 | user=%(ENV_NB_USER)s
24 | command=/usr/sbin/sshd -D
25 | stdout_logfile=/var/log/supervisor/%(program_name)s.log
26 | stderr_logfile=/var/log/supervisor/%(program_name)s.log
27 | startsecs=0
28 | autorestart=false
--------------------------------------------------------------------------------
/files/usethis:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import requests
4 | import zipfile
5 | import os
6 | import shutil
7 | import argparse
8 |
9 | def download_zip(url, output_folder, file_or_folder_name):
10 | local_filename = "__temp_download__.zip"
11 | local_filepath = os.path.join(output_folder, local_filename)
12 |
13 | with requests.get(url, stream=True) as r:
14 | r.raise_for_status()
15 | with open(local_filepath, 'wb') as f:
16 | for chunk in r.iter_content(chunk_size=8192):
17 | f.write(chunk)
18 |
19 | temp_extract_path = os.path.join(output_folder, "temp_extracted")
20 | with zipfile.ZipFile(local_filepath, 'r') as zip_ref:
21 | zip_ref.extractall(temp_extract_path)
22 |
23 | extracted_content = os.listdir(temp_extract_path)
24 | if len(extracted_content) == 1:
25 | extracted_path = os.path.join(temp_extract_path, extracted_content[0])
26 | else:
27 | extracted_path = temp_extract_path
28 |
29 | final_destination = os.path.join(output_folder, file_or_folder_name)
30 | if os.path.exists(final_destination):
31 | shutil.rmtree(final_destination)
32 |
33 | shutil.move(extracted_path, final_destination)
34 | os.remove(local_filepath)
35 | if os.path.exists(temp_extract_path):
36 | shutil.rmtree(temp_extract_path)
37 |
38 | if __name__ == "__main__":
39 | parser = argparse.ArgumentParser(description='Download and extract a ZIP file (e.g., from Dropbox).')
40 | parser.add_argument('file_or_folder_name', type=str, help='The name to assign to the extracted file or folder.')
41 | parser.add_argument('--dest', type=str, default=os.getcwd(), help='The output folder for extraction. Defaults to the current directory.')
42 | parser.add_argument('url', type=str, help='The URL of the ZIP file to download.')
43 |
44 | args = parser.parse_args()
45 |
46 | output_folder = args.dest
47 | if not os.path.exists(output_folder):
48 | os.makedirs(output_folder)
49 |
50 | download_zip(args.url, output_folder, args.file_or_folder_name)
51 | print(f"Downloaded and extracted ZIP file from {args.url} to {os.path.join(output_folder, args.file_or_folder_name)}")
--------------------------------------------------------------------------------
/files/zshrc:
--------------------------------------------------------------------------------
1 | # Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc.
2 | # Initialization code that may require console input (password prompts, [y/n]
3 | # confirmations, etc.) must go above this block; everything else may go below.
4 | if [[ -r "${XDG_CACHE_HOME:-$HOME/.rsm-msba/zsh/.cache}/p10k-instant-prompt-${(%):-%n}.zsh" ]]; then
5 | source "${XDG_CACHE_HOME:-$HOME/.rsm-msba/zsh/.cache}/p10k-instant-prompt-${(%):-%n}.zsh"
6 | fi
7 |
8 | ZSH_DISABLE_COMPFIX="true"
9 |
10 | # If you come from bash you might have to change your $PATH.
11 | # export PATH=$HOME/bin:/usr/local/bin:$PATH
12 |
13 | # Path to your oh-my-zsh installation.
14 | export ZSH="$HOME/.rsm-msba/zsh/.oh-my-zsh"
15 |
16 | # Set name of the theme to load --- if set to "random", it will
17 | # load a random theme each time oh-my-zsh is loaded, in which case,
18 | # to know which specific one was loaded, run: echo $RANDOM_THEME
19 | # See https://github.com/ohmyzsh/ohmyzsh/wiki/Themes
20 | # ZSH_THEME="robbyrussell"
21 | ZSH_THEME="powerlevel10k/powerlevel10k"
22 |
23 | # Set list of themes to pick from when loading at random
24 | # Setting this variable when ZSH_THEME=random will cause zsh to load
25 | # a theme from this variable instead of looking in $ZSH/themes/
26 | # If set to an empty array, this variable will have no effect.
27 | # ZSH_THEME_RANDOM_CANDIDATES=( "robbyrussell" "agnoster" )
28 |
29 | # Uncomment the following line to use case-sensitive completion.
30 | # CASE_SENSITIVE="true"
31 |
32 | # Uncomment the following line to use hyphen-insensitive completion.
33 | # Case-sensitive completion must be off. _ and - will be interchangeable.
34 | # HYPHEN_INSENSITIVE="true"
35 |
36 | # Uncomment the following line to disable bi-weekly auto-update checks.
37 | # DISABLE_AUTO_UPDATE="true"
38 |
39 | # Uncomment the following line to automatically update without prompting.
40 | # DISABLE_UPDATE_PROMPT="true"
41 |
42 | # Uncomment the following line to change how often to auto-update (in days).
43 | # export UPDATE_ZSH_DAYS=13
44 |
45 | # Uncomment the following line if pasting URLs and other text is messed up.
46 | # DISABLE_MAGIC_FUNCTIONS="true"
47 |
48 | # Uncomment the following line to disable colors in ls.
49 | # DISABLE_LS_COLORS="true"
50 |
51 | # Uncomment the following line to disable auto-setting terminal title.
52 | # DISABLE_AUTO_TITLE="true"
53 |
54 | # Uncomment the following line to enable command auto-correction.
55 | # ENABLE_CORRECTION="true"
56 |
57 | # Uncomment the following line to display red dots whilst waiting for completion.
58 | # COMPLETION_WAITING_DOTS="true"
59 |
60 | # Uncomment the following line if you want to disable marking untracked files
61 | # under VCS as dirty. This makes repository status check for large repositories
62 | # much, much faster.
63 | # DISABLE_UNTRACKED_FILES_DIRTY="true"
64 |
65 | # Uncomment the following line if you want to change the command execution time
66 | # stamp shown in the history command output.
67 | # You can set one of the optional three formats:
68 | # "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
69 | # or set a custom format using the strftime function format specifications,
70 | # see 'man strftime' for details.
71 | # HIST_STAMPS="mm/dd/yyyy"
72 |
73 | # Would you like to use another custom folder than $ZSH/custom?
74 | # ZSH_CUSTOM=/path/to/new-custom-folder
75 |
76 | # Which plugins would you like to load?
77 | # Standard plugins can be found in $ZSH/plugins/
78 | # Custom plugins may be added to $ZSH_CUSTOM/plugins/
79 | # Example format: plugins=(rails git textmate ruby lighthouse)
80 | # Add wisely, as too many plugins slow down shell startup.
81 | plugins=(
82 | git
83 | zsh-completions
84 | zsh-autosuggestions
85 | zsh-syntax-highlighting
86 | k
87 | poetry
88 | )
89 |
90 | source $ZSH/oh-my-zsh.sh
91 |
92 | # User configuration
93 |
94 | # export MANPATH="/usr/local/man:$MANPATH"
95 |
96 | # You may need to manually set your language environment
97 | # export LANG=en_US.UTF-8
98 |
99 | # Preferred editor for local and remote sessions
100 | # if [[ -n $SSH_CONNECTION ]]; then
101 | # export EDITOR='vim'
102 | # else
103 | # export EDITOR='mvim'
104 | # fi
105 |
106 | # Compilation flags
107 | # export ARCHFLAGS="-arch x86_64"
108 |
109 | # Set personal aliases, overriding those provided by oh-my-zsh libs,
110 | # plugins, and themes. Aliases can be placed here, though oh-my-zsh
111 | # users are encouraged to define aliases within the ZSH_CUSTOM folder.
112 | # For a full list of active aliases, run `alias`.
113 | #
114 | # Example aliases
115 | # alias zshconfig="mate ~/.zshrc"
116 | # alias ohmyzsh="mate ~/.oh-my-zsh"
117 |
118 | alias c="clear"
119 | alias r="radian"
120 | # alias k="k -h"
121 | # alias zc="vim ~/.rsm-msba/zsh/.zshrc"
122 | # alias om="vim ~/.rsm-msba/zsh/.oh-my-zsh"
123 | # alias zp="vim ~/.rsm-msba/zsh/.p10k.zsh"
124 |
125 | # fpath+=~/.zfunc
126 | # autoload -Uz compinit && compinit
127 |
128 | # To customize prompt, run `p10k configure` or edit ~/.p10k.zsh.
129 | [[ ! -f ~/.rsm-msba/zsh/.p10k.zsh ]] || source ~/.rsm-msba/zsh/.p10k.zsh
130 |
--------------------------------------------------------------------------------
/images/docker-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/images/docker-icon.png
--------------------------------------------------------------------------------
/install/figures/docker-general-wsl2-based-engine.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/docker-general-wsl2-based-engine.png
--------------------------------------------------------------------------------
/install/figures/docker-help.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/docker-help.png
--------------------------------------------------------------------------------
/install/figures/docker-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/docker-icon.png
--------------------------------------------------------------------------------
/install/figures/docker-resources-mac.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/docker-resources-mac.png
--------------------------------------------------------------------------------
/install/figures/docker-resources-wsl2-integration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/docker-resources-wsl2-integration.png
--------------------------------------------------------------------------------
/install/figures/docker-system-mac.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/docker-system-mac.png
--------------------------------------------------------------------------------
/install/figures/ohmyzsh-icons.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/ohmyzsh-icons.png
--------------------------------------------------------------------------------
/install/figures/ohmyzsh-powerlevel10k-iterm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/ohmyzsh-powerlevel10k-iterm.png
--------------------------------------------------------------------------------
/install/figures/ohmyzsh-powerlevel10k.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/ohmyzsh-powerlevel10k.png
--------------------------------------------------------------------------------
/install/figures/postgresql-pgweb.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/postgresql-pgweb.png
--------------------------------------------------------------------------------
/install/figures/ppt-image-editing.pptx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/ppt-image-editing.pptx
--------------------------------------------------------------------------------
/install/figures/reset-credentials.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/reset-credentials.png
--------------------------------------------------------------------------------
/install/figures/rsm-jupyter.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/rsm-jupyter.png
--------------------------------------------------------------------------------
/install/figures/rsm-launch-menu-macos-arm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/rsm-launch-menu-macos-arm.png
--------------------------------------------------------------------------------
/install/figures/rsm-launch-menu-macos.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/rsm-launch-menu-macos.png
--------------------------------------------------------------------------------
/install/figures/rsm-launch-menu-wsl2-arm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/rsm-launch-menu-wsl2-arm.png
--------------------------------------------------------------------------------
/install/figures/rsm-launch-menu-wsl2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/rsm-launch-menu-wsl2.png
--------------------------------------------------------------------------------
/install/figures/rsm-msba-menu-linux.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/rsm-msba-menu-linux.png
--------------------------------------------------------------------------------
/install/figures/rsm-radiant.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/rsm-radiant.jpeg
--------------------------------------------------------------------------------
/install/figures/rsm-rstudio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/rsm-rstudio.png
--------------------------------------------------------------------------------
/install/figures/settings-resources.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/settings-resources.png
--------------------------------------------------------------------------------
/install/figures/start-ubuntu-terminal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/start-ubuntu-terminal.png
--------------------------------------------------------------------------------
/install/figures/symlinks.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/symlinks.png
--------------------------------------------------------------------------------
/install/figures/ubuntu-root.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/ubuntu-root.png
--------------------------------------------------------------------------------
/install/figures/win-protected.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/win-protected.png
--------------------------------------------------------------------------------
/install/figures/win-update-message.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/win-update-message.png
--------------------------------------------------------------------------------
/install/figures/windows-shared-drives.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/windows-shared-drives.png
--------------------------------------------------------------------------------
/install/figures/windows-version.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/windows-version.png
--------------------------------------------------------------------------------
/install/figures/wsl2-windows-docker-install.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/install/figures/wsl2-windows-docker-install.png
--------------------------------------------------------------------------------
/install/install-docker-chromeos.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ## setup for docker on ChromeOS using linux (beta)
4 | sudo apt-get update
5 | sudo apt-get install apt-transport-https \
6 | ca-certificates \
7 | curl \
8 | software-properties-common \
9 | wget \
10 | openssh-client \
11 | zsh \
12 | ntpdate \
13 | python-is-python3
14 |
15 | curl -fsSL https://download.docker.com/linux/debian/gpg | sudo apt-key add -
16 | sudo add-apt-repository \
17 | "deb [arch=amd64] https://download.docker.com/linux/debian \
18 | $(lsb_release -cs) \
19 | stable"
20 |
21 | sudo apt-get update
22 | sudo apt-get install docker-ce
23 |
24 | sudo groupadd docker
25 | sudo usermod -aG docker $USER
26 | newgrp docker
27 |
--------------------------------------------------------------------------------
/install/install-docker.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sudo apt update
4 | sudo apt install apt-transport-https \
5 | ca-certificates \
6 | curl \
7 | software-properties-common \
8 | git \
9 | openssh-client \
10 | zsh \
11 | ntpdate \
12 | python-is-python3
13 |
14 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
15 | sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu jammy stable"
16 | sudo apt update
17 | apt-cache policy docker-ce
18 | sudo apt install docker-ce
19 | sudo systemctl status docker
20 | sudo usermod -aG docker ${USER}
21 | su - ${USER}
22 |
--------------------------------------------------------------------------------
/install/setup-ohmyzsh.md:
--------------------------------------------------------------------------------
1 | ## Setup oh-my-zsh
2 |
3 | ## Install Meslo Nerd Font
4 |
5 | Follow the linked instructions to install the Meslo Nerd Font.
6 |
7 | To use fonts and icons in the Standard macOS terminal with ZSH, change the terminal settings as follows:
8 |
9 | > iTerm2: Preferences → Profiles → Text and set Font to "MesloLGS NF"
10 |
11 | To use fonts and icons in the Windows Terminal change the terminal settings as follows:
12 |
13 | > Windows Terminal: Open Settings (Ctrl+,), search for fontFace and set value to "MesloLGS NF" for every profile
14 |
15 | If you want to have access to the same icons in the terminal in VSCode change the settings as follows:
16 |
17 | > Visual Studio Code: Open File → Preferences → Settings, enter terminal.integrated.fontFamily in the search box and set the value to "MesloLGS NF"
18 |
19 | To use fonts and icons in the JupyterLab terminal, change the terminal settings as follows:
20 |
21 | > Settings: Advanced Settings Editor → Terminal, then enter the below in the `User Preferences` window
22 |
23 | ```
24 | {
25 | "fontFamily": "MesloLGS NF",
26 | }
27 | ```
28 |
29 | To use fonts and icons in the Windows Console Host (i.e., CMD) change the terminal settings as follows:
30 |
31 | > Apple Terminal: Open Terminal → Preferences → Profiles → Text, click Change under Font and select MesloLGS NF family
32 |
33 | To use fonts and icons in a iTerm2 terminal on macOS with ZSH, type `p10k configure` and answer `Yes` when asked whether to install `Meslo Nerd Font`. Alternatively:
34 |
35 | > Windows Console Host: Click the icon in the top left corner, then Properties → Font and set Font to "MesloLGS NF"
36 |
37 | ## Setup OhMyZsh
38 |
39 | > Note: If you are using the RSM-MSBA-ARM or RSM-MSBA-INTEL computing platform and have run the `setup` command you can skip this step
40 |
41 | Start by setting `ZSH` as the default shell instead of `BASH`. On macOS, Linux, and Windows (WSL2) use the command below from a terminal and answer "y" if prompted.
42 |
43 | `chsh -s $(which zsh)`
44 |
45 | Run the commands below to install some useful plugins and the `powerlevel10k` theme:
46 |
47 | ```
48 | sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)";
49 | git clone https://github.com/zsh-users/zsh-completions ${ZSH_CUSTOM:=~/.oh-my-zsh/custom}/plugins/zsh-completions;
50 | git clone https://github.com/zsh-users/zsh-autosuggestions ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-autosuggestions;
51 | git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting;
52 | git clone https://github.com/supercrabtree/k ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/k;
53 | git clone --depth=1 https://github.com/romkatv/powerlevel10k.git ${ZSH_CUSTOM:-$HOME/.oh-my-zsh/custom}/themes/powerlevel10k;
54 | ```
55 |
56 | Now use VSCode, or another text editor, to make a few changes to the `.zshrc` file. For example, if you have VSCode installed you can use it from a macOS or Windows Terminal by typing:
57 |
58 | ```
59 | code ~/.zshrc
60 | ```
61 |
62 | Replace the `plugins` section in the `.zshrc` file with the code below
63 |
64 | ```
65 | plugins=(
66 | git
67 | zsh-completions
68 | zsh-autosuggestions
69 | zsh-syntax-highlighting
70 | k
71 | )
72 | ```
73 |
74 | Then replace `ZSH_THEME="robbyrussell"` with `ZSH_THEME="powerlevel10k/powerlevel10k"` in `~/.zshrc`. Save the changes and type `source ~/.zshrc` in the terminal to start the configuration wizard. Follow the prompts and select the setup you prefer. You can always update and change the configuration by running `p10k configure` in a terminal.
75 |
--------------------------------------------------------------------------------
/launch-rsm-msba-intel-jupyterhub.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## set ARG_HOME to a directory of your choosing if you do NOT
4 | ## want to to map the docker home directory to your local
5 | ## home directory
6 |
7 | ## use the command below on to launch the container:
8 | ## ~/git/docker/launch-rsm-msba-intel.sh -v ~
9 |
10 | ## to map the directory where the launch script is located to
11 | ## the docker home directory call the script_home function
12 | script_home () {
13 | echo "$(echo "$( cd "$(dirname "$0")" ; pwd -P )" | sed -E "s|^/([A-z]{1})/|\1:/|")"
14 | }
15 |
16 | function launch_usage() {
17 | echo "Usage: $0 [-t tag (version)] [-d directory]"
18 | echo " -t, --tag Docker image tag (version) to use"
19 | echo " -d, --directory Project directory to use"
20 | echo " -v, --volume Volume to mount as home directory"
21 | echo " -s, --show Show all output generated on launch"
22 | echo " -h, --help Print help and exit"
23 | echo ""
24 | echo "Example: $0 --tag 3.0.0 --volume ~/project_1"
25 | echo ""
26 | exit 1
27 | }
28 |
29 | ## parse command-line arguments
30 | while [[ "$#" > 0 ]]; do case $1 in
31 | -t|--tag) ARG_TAG="$2"; shift;shift;;
32 | -d|--directory) ARG_DIR="$2";shift;shift;;
33 | -v|--volume) ARG_VOLUME="$2";shift;shift;;
34 | -s|--show) ARG_SHOW="show";shift;shift;;
35 | -h|--help) launch_usage;shift; shift;;
36 | *) echo "Unknown parameter passed: $1"; echo ""; launch_usage; shift; shift;;
37 | esac; done
38 |
39 | ## some cleanup on exit
40 | function finish {
41 | if [ "$ARG_HOME" != "" ]; then
42 | echo "Removing empty files and directories ..."
43 | find "$ARG_HOME" -empty -type d -delete
44 | find "$ARG_HOME" -empty -type f -delete
45 | fi
46 | }
47 | trap finish EXIT
48 |
49 | ## change to some other path to use as default
50 | # ARG_HOME="~/rady"
51 | # ARG_HOME="$(script_home)"
52 | ARG_HOME=""
53 | IMAGE_VERSION="latest"
54 | NB_USER="jovyan"
55 | ID="vnijs"
56 | LABEL="rsm-msba-intel-jupyterhub"
57 | NETWORK="rsm-docker"
58 | IMAGE=${ID}/${LABEL}
59 | # Choose your timezone https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
60 | TIMEZONE="America/Los_Angeles"
61 | if [ "$ARG_TAG" != "" ]; then
62 | IMAGE_VERSION="$ARG_TAG"
63 | DOCKERHUB_VERSION=${IMAGE_VERSION}
64 | else
65 | ## see https://stackoverflow.com/questions/34051747/get-environment-variable-from-docker-container
66 | DOCKERHUB_VERSION=$(docker inspect -f '{{range $index, $value := .Config.Env}}{{println $value}} {{end}}' ${IMAGE}:${IMAGE_VERSION} | grep DOCKERHUB_VERSION)
67 | DOCKERHUB_VERSION="${DOCKERHUB_VERSION#*=}"
68 | fi
69 | POSTGRES_VERSION=14
70 |
71 | ## what os is being used
72 | ostype=`uname`
73 | if [ "$ostype" == "Darwin" ]; then
74 | EXT="command"
75 | else
76 | EXT="sh"
77 | fi
78 |
79 | BOUNDARY="---------------------------------------------------------------------------"
80 |
81 | ## check the return code - if curl can connect something is already running
82 | curl -S localhost:8989 2>/dev/null
83 | ret_code=$?
84 | if [ "$ret_code" == 0 ]; then
85 | echo $BOUNDARY
86 | echo "A launch script may already be running. To close the new session and"
87 | echo "continue with the previous session press q + enter. To continue with"
88 | echo "the new session and stop the previous session, press enter"
89 | echo $BOUNDARY
90 | read contd
91 | if [ "${contd}" == "q" ]; then
92 | exit 1
93 | fi
94 | fi
95 |
96 | ## script to start Radiant, Rstudio, and JupyterLab
97 | if [ "$ARG_SHOW" != "show" ]; then
98 | clear
99 | fi
100 | has_docker=$(which docker)
101 | if [ "${has_docker}" == "" ]; then
102 | echo $BOUNDARY
103 | echo "Docker is not installed. Download and install Docker from"
104 | if [[ "$ostype" == "Linux" ]]; then
105 | is_wsl=$(which explorer.exe)
106 | if [[ "$is_wsl" != "" ]]; then
107 | echo "https://hub.docker.com/editions/community/docker-ce-desktop-windows"
108 | else
109 | echo "https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-22-04"
110 | fi
111 | elif [[ "$ostype" == "Darwin" ]]; then
112 | echo "https://hub.docker.com/editions/community/docker-ce-desktop-mac"
113 | else
114 | echo "https://hub.docker.com/editions/community/docker-ce-desktop-windows"
115 | fi
116 | echo $BOUNDARY
117 | read
118 | else
119 |
120 | ## check docker is running at all
121 | ## based on https://stackoverflow.com/questions/22009364/is-there-a-try-catch-command-in-bash
122 | {
123 | docker ps -q 2>/dev/null
124 | } || {
125 | if [[ "$ostype" == "Darwin" ]]; then
126 | ## from https://stackoverflow.com/a/48843074/1974918
127 | # On Mac OS this would be the terminal command to launch Docker
128 | open /Applications/Docker.app
129 | #Wait until Docker daemon is running and has completed initialisation
130 | while (! docker stats --no-stream 2>/dev/null); do
131 | echo "Please wait while Docker starts up ..."
132 | sleep 2
133 | done
134 | else
135 | echo $BOUNDARY
136 | echo "Docker is not running. Please start docker on your computer"
137 | echo "When docker has finished starting up press [ENTER] to continue"
138 | echo $BOUNDARY
139 | read
140 | fi
141 | }
142 |
143 | ## kill running containers
144 | running=$(docker ps -a --format {{.Names}} | grep ${LABEL} -w)
145 | if [ "${running}" != "" ]; then
146 | echo $BOUNDARY
147 | echo "Stopping running containers"
148 | echo $BOUNDARY
149 | docker stop ${LABEL}
150 | docker container rm ${LABEL} 2>/dev/null
151 | fi
152 |
153 | ## download image if not available
154 | available=$(docker images -q ${IMAGE}:${IMAGE_VERSION})
155 | if [ "${available}" == "" ]; then
156 | echo $BOUNDARY
157 | echo "Downloading the ${LABEL}:${IMAGE_VERSION} computing environment"
158 | echo $BOUNDARY
159 | docker logout
160 | docker pull ${IMAGE}:${IMAGE_VERSION}
161 | fi
162 |
163 | chip=""
164 | if [[ "$ostype" == "Linux" ]]; then
165 | ostype="Linux"
166 | if [[ "$archtype" == "aarch64" ]]; then
167 | chip="(ARM64)"
168 | else
169 | chip="(Intel)"
170 | fi
171 | HOMEDIR=~
172 | ID=$USER
173 | open_browser () {
174 | xdg-open $1
175 | }
176 | sed_fun () {
177 | sed -i $1 "$2"
178 | }
179 | if [ -d "/media" ]; then
180 | MNT="-v /media:/media"
181 | else
182 | MNT=""
183 | fi
184 |
185 | is_wsl=$(which explorer.exe)
186 | if [[ "$is_wsl" != "" ]]; then
187 | archtype=`arch`
188 | ostype="WSL2"
189 | if [[ "$archtype" == "aarch64" ]]; then
190 | chip="(ARM64)"
191 | else
192 | chip="(Intel)"
193 | fi
194 | HOMEDIR="/mnt/c/Users/$USER"
195 | if [ -d "/mnt/c" ]; then
196 | MNT="$MNT -v /mnt/c:/mnt/c"
197 | fi
198 | if [ -d "/mnt/d" ]; then
199 | MNT="$MNT -v /mnt/d:/mnt/d"
200 | fi
201 | fi
202 | elif [[ "$ostype" == "Darwin" ]]; then
203 | archtype=`arch`
204 | ostype="macOS"
205 | if [[ "$archtype" == "arm64" ]]; then
206 | chip="(ARM64)"
207 | else
208 | chip="(Intel)"
209 | fi
210 | HOMEDIR=~
211 | ID=$USER
212 | open_browser () {
213 | open $1
214 | }
215 | sed_fun () {
216 | sed -i '' -e $1 "$2"
217 | }
218 | MNT="-v /Volumes:/media/Volumes"
219 | else
220 | archtype=`arch`
221 | ostype="Windows"
222 | if [[ "$archtype" == "arm64" ]]; then
223 | chip="(ARM64)"
224 | else
225 | chip="(Intel)"
226 | fi
227 | HOMEDIR="C:/Users/$USERNAME"
228 | ID=$USERNAME
229 | open_browser () {
230 | start $1
231 | }
232 | sed_fun () {
233 | sed -i $1 "$2"
234 | }
235 | MNT=""
236 | fi
237 |
238 | if [ "$ARG_VOLUME" != "" ]; then
239 | HOMEDIR="$ARG_VOLUME"
240 | fi
241 |
242 | if [ "$ARG_DIR" != "" ] || [ "$ARG_HOME" != "" ]; then
243 | ## change mapping of docker home directory to local directory if specified
244 | if [ "${ARG_HOME}" != "" ] && [ ! -d "${ARG_HOME}" ]; then
245 | echo "The directory ${ARG_HOME} does not yet exist."
246 | echo "Please create the directory and restart the launch script"
247 | sleep 5
248 | exit 1
249 | fi
250 | if [ "$ARG_DIR" != "" ]; then
251 | if [ ! -d "${ARG_DIR}" ]; then
252 | echo "The directory ${ARG_DIR} does not yet exist."
253 | echo "Please create the directory and restart the launch script"
254 | sleep 5
255 | exit 1
256 | fi
257 | ARG_HOME="$(cd "$ARG_DIR"; pwd)"
258 | ## https://unix.stackexchange.com/questions/295991/sed-error-1-not-defined-in-the-re-under-os-x
259 | ARG_HOME="$(echo "$ARG_HOME" | sed -E "s|^/([A-z]{1})/|\1:/|")"
260 |
261 | echo $BOUNDARY
262 | echo "Do you want to access git, ssh, and R configuration in this directory (y/n)"
263 | echo "${ARG_HOME}"
264 | echo $BOUNDARY
265 | read copy_config
266 | else
267 | copy_config="y"
268 | fi
269 |
270 | if [ "${copy_config}" == "y" ]; then
271 | if [ -f "${HOMEDIR}/.inputrc" ] && [ ! -s "${ARG_HOME}/.inputrc" ]; then
272 | MNT="$MNT -v ${HOMEDIR}/.inputrc:/home/$NB_USER/.inputrc"
273 | fi
274 | if [ -f "${HOMEDIR}/.Rprofile" ] && [ ! -s "${ARG_HOME}/.Rprofile" ]; then
275 | MNT="$MNT -v ${HOMEDIR}/.Rprofile:/home/$NB_USER/.Rprofile"
276 | fi
277 | if [ -f "${HOMEDIR}/.Renviron" ] && [ ! -s "${ARG_HOME}/.Renviron" ]; then
278 | MNT="$MNT -v ${HOMEDIR}/.Renviron:/home/$NB_USER/.Renviron"
279 | fi
280 | if [ -f "${HOMEDIR}/.gitconfig" ] && [ ! -s "${ARG_HOME}/.gitconfig" ]; then
281 | MNT="$MNT -v ${HOMEDIR}/.gitconfig:/home/$NB_USER/.gitconfig"
282 | fi
283 | if [ -d "${HOMEDIR}/.ssh" ]; then
284 | if [ ! -d "${ARG_HOME}/.ssh" ] || [ ! "$(ls -A $ARG_HOME/.ssh)" ]; then
285 | MNT="$MNT -v ${HOMEDIR}/.ssh:/home/$NB_USER/.ssh"
286 | fi
287 | fi
288 | fi
289 |
290 | if [ ! -f "${ARG_HOME}/.gitignore" ]; then
291 | ## make sure no hidden files go into a git repo
292 | touch "${ARG_HOME}/.gitignore"
293 | echo ".*" >> "${ARG_HOME}/.gitignore"
294 | fi
295 |
296 | if [ -d "${HOMEDIR}/.R" ]; then
297 | if [ ! -d "${ARG_HOME}/.R" ] || [ ! "$(ls -A $ARG_HOME/.R)" ]; then
298 | MNT="$MNT -v ${HOMEDIR}/.R:/home/$NB_USER/.R"
299 | fi
300 | fi
301 |
302 | if [ -d "${HOMEDIR}/Dropbox" ]; then
303 | if [ ! -d "${ARG_HOME}/Dropbox" ] || [ ! "$(ls -A $ARG_HOME/Dropbox)" ]; then
304 | MNT="$MNT -v ${HOMEDIR}/Dropbox:/home/$NB_USER/Dropbox"
305 | sed_fun '/^Dropbox$/d' "${ARG_HOME}/.gitignore"
306 | echo "Dropbox" >> "${ARG_HOME}/.gitignore"
307 | fi
308 | fi
309 |
310 | if [ -d "${HOMEDIR}/.rstudio" ] && [ ! -d "${ARG_HOME}/.rstudio" ]; then
311 | echo $BOUNDARY
312 | echo "Copying Rstudio and JupyterLab settings to:"
313 | echo "${ARG_HOME}"
314 | echo $BOUNDARY
315 |
316 | {
317 | which rsync 2>/dev/null
318 | HD="$(echo "$HOMEDIR" | sed -E "s|^([A-z]):|/\1|")"
319 | AH="$(echo "$ARG_HOME" | sed -E "s|^([A-z]):|/\1|")"
320 | rsync -a "${HD}/.rstudio" "${AH}/" --exclude sessions --exclude projects --exclude projects_settings
321 | } ||
322 | {
323 | cp -r "${HOMEDIR}/.rstudio" "${ARG_HOME}/.rstudio"
324 | rm -rf "${ARG_HOME}/.rstudio/sessions"
325 | rm -rf "${ARG_HOME}/.rstudio/projects"
326 | rm -rf "${ARG_HOME}/.rstudio/projects_settings"
327 | }
328 |
329 | fi
330 | if [ -d "${HOMEDIR}/.rsm-msba" ] && [ ! -d "${ARG_HOME}/.rsm-msba" ]; then
331 |
332 | {
333 | which rsync 2>/dev/null
334 | HD="$(echo "$HOMEDIR" | sed -E "s|^([A-z]):|/\1|")"
335 | AH="$(echo "$ARG_HOME" | sed -E "s|^([A-z]):|/\1|")"
336 | rsync -a "${HD}/.rsm-msba" "${AH}/" --exclude R --exclude bin --exclude lib --exclude share
337 | } ||
338 | {
339 | cp -r "${HOMEDIR}/.rsm-msba" "${ARG_HOME}/.rsm-msba"
340 | rm -rf "${ARG_HOME}/.rsm-msba/R"
341 | rm -rf "${ARG_HOME}/.rsm-msba/bin"
342 | rm -rf "${ARG_HOME}/.rsm-msba/lib"
343 | rm_list=$(ls "${ARG_HOME}/.rsm-msba/share" | grep -v jupyter)
344 | for i in ${rm_list}; do
345 | rm -rf "${ARG_HOME}/.rsm-msba/share/${i}"
346 | done
347 | }
348 | fi
349 | SCRIPT_HOME="$(script_home)"
350 | if [ "${SCRIPT_HOME}" != "${ARG_HOME}" ]; then
351 | cp -p "$0" "${ARG_HOME}/launch-${LABEL}.${EXT}"
352 | sed_fun "s+^ARG_HOME\=\".*\"+ARG_HOME\=\"\$\(script_home\)\"+" "${ARG_HOME}/launch-${LABEL}.${EXT}"
353 | if [ "$ARG_TAG" != "" ]; then
354 | sed_fun "s/^IMAGE_VERSION=\".*\"/IMAGE_VERSION=\"${IMAGE_VERSION}\"/" "${ARG_HOME}/launch-${LABEL}.${EXT}"
355 | fi
356 | fi
357 | HOMEDIR="${ARG_HOME}"
358 | fi
359 |
360 | RPROF="${HOMEDIR}/.Rprofile"
361 | touch "${RPROF}"
362 | if ! grep -q 'radiant.report = TRUE' ${RPROF}; then
363 | echo "Your setup does not allow report generation in Radiant."
364 | echo "Would you like to add relevant code to .Rprofile?"
365 | echo "Press y or n, followed by [ENTER]:"
366 | echo ""
367 | read allow_report
368 | if [ "${allow_report}" == "y" ]; then
369 | ## Windows does not reliably use newlines with printf
370 | sed_fun '/^options(radiant.maxRequestSize/d' "${RPROF}"
371 | sed_fun '/^options(radiant.report/d' "${RPROF}"
372 | sed_fun '/^options(radiant.shinyFiles/d' "${RPROF}"
373 | sed_fun '/^#.*List.*specific.*directories.*you.*want.*to.*use.*with.*radiant/d' "${RPROF}"
374 | sed_fun '/^#.*options(radiant\.sf_volumes.*=.*c(Git.*=.*"\/home\/jovyan\/git"))/d' "${RPROF}"
375 | echo 'options(radiant.maxRequestSize = -1)' >> "${RPROF}"
376 | echo 'options(radiant.report = TRUE)' >> "${RPROF}"
377 | echo 'options(radiant.shinyFiles = TRUE)' >> "${RPROF}"
378 | echo '# List specific directories you want to use with radiant' >> "${RPROF}"
379 | echo '# options(radiant.sf_volumes = c(Git = "/home/jovyan/git"))' >> "${RPROF}"
380 | echo '' >> "${RPROF}"
381 | sed_fun '/^[\s]*$/d' "${RPROF}"
382 | fi
383 | fi
384 |
385 | ## adding an environment dir for conda to use
386 | if [ ! -d "${HOMEDIR}/.rsm-msba/conda/envs" ]; then
387 | mkdir -p "${HOMEDIR}/.rsm-msba/conda/envs"
388 | fi
389 |
390 | ## adding an dir for zsh to use
391 | if [ ! -d "${HOMEDIR}/.rsm-msba/zsh" ]; then
392 | mkdir -p "${HOMEDIR}/.rsm-msba/zsh"
393 | fi
394 |
395 | BUILD_DATE=$(docker inspect -f '{{.Created}}' ${IMAGE}:${IMAGE_VERSION})
396 |
397 | {
398 | # check if network already exists
399 | docker network inspect ${NETWORK} >/dev/null 2>&1
400 | } || {
401 | # if network doesn't exist create it
402 | echo "--- Creating docker network: ${NETWORK} ---"
403 | docker network create ${NETWORK}
404 | }
405 |
406 | echo $BOUNDARY
407 | echo "Starting the ${LABEL} computing environment on ${ostype} ${chip}"
408 | echo "Version : ${DOCKERHUB_VERSION}"
409 | echo "Build date: ${BUILD_DATE//T*/}"
410 | echo "Base dir. : ${HOMEDIR}"
411 | echo "Cont. name: ${LABEL}"
412 | echo $BOUNDARY
413 |
414 | has_volume=$(docker volume ls | awk "/pg_data/" | awk '{print $2}')
415 | if [ "${has_volume}" == "" ]; then
416 | docker volume create --name=pg_data
417 | fi
418 | {
419 | docker run --name ${LABEL} --net ${NETWORK} --rm \
420 | -p 127.0.0.1:2222:22 -p 127.0.0.1:8989:8989 -p 127.0.0.1:8765:8765 -p 127.0.0.1:8501:8501 -p 127.0.0.1:8000:8000 \
421 | -e NB_USER=0 -e NB_UID=1002 -e NB_GID=1002 \
422 | -e TZ=${TIMEZONE} \
423 | -v "${HOMEDIR}":/home/${NB_USER} $MNT \
424 | -v pg_data:/var/lib/postgresql/${POSTGRES_VERSION}/main \
425 | ${IMAGE}:${IMAGE_VERSION}
426 | } || {
427 | echo $BOUNDARY
428 | echo "It seems there was a problem starting the docker container. Please"
429 | echo "report the issue and add a screenshot of any messages shown on screen."
430 | echo "Press [ENTER] to continue"
431 | echo $BOUNDARY
432 | read
433 | }
434 |
435 | ## make sure abend is set correctly
436 | ## https://community.rstudio.com/t/restarting-rstudio-server-in-docker-avoid-error-message/10349/2
437 | rstudio_abend () {
438 | if [ -d "${HOMEDIR}/.rstudio/sessions/active" ]; then
439 | RSTUDIO_STATE_FILES=$(find "${HOMEDIR}/.rstudio/sessions/active/*/session-persistent-state" -type f 2>/dev/null)
440 | if [ "${RSTUDIO_STATE_FILES}" != "" ]; then
441 | sed_fun 's/abend="1"/abend="0"/' ${RSTUDIO_STATE_FILES}
442 | fi
443 | fi
444 | if [ -d "${HOMEDIR}/.rstudio/monitored/user-settings" ]; then
445 | touch "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
446 | sed_fun '/^alwaysSaveHistory="[0-1]"/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
447 | sed_fun '/^loadRData="[0-1]"/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
448 | sed_fun '/^saveAction=/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
449 | echo 'alwaysSaveHistory="1"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
450 | echo 'loadRData="0"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
451 | echo 'saveAction="0"' >> "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
452 | sed_fun '/^$/d' "${HOMEDIR}/.rstudio/monitored/user-settings/user-settings"
453 | fi
454 | }
455 | rstudio_abend
456 | fi
457 |
--------------------------------------------------------------------------------
/postgres/.gitignore:
--------------------------------------------------------------------------------
1 | postgres-connect.html
2 |
--------------------------------------------------------------------------------
/postgres/postgres-connect-vscode.pgsql:
--------------------------------------------------------------------------------
1 | /*
2 | click on Select Postgres Server" at the bottom of your VS Code window
3 | and choose rsm-docker and check if any of the below statements work
4 | all queries below are commented out. remove the "--" in front of a
5 | SELECT statement to make it available to run
6 |
7 | press F5 or right-click on the editor window and select "Run Query"
8 |
9 | what happens when you try to run a query for a table that is in another
10 | database?
11 | */
12 |
13 | -- SELECT * FROM "flights" LIMIT 5;
14 | -- SELECT * FROM "films" LIMIT 5;
15 | -- SELECT * FROM "mtcars" LIMIT 5;
16 |
17 | /* choose Northwind as the active server and check if the below statement works */
18 | -- SELECT * FROM "products" LIMIT 5;
19 |
20 | /* choose WestCoastImporter as the active server and check if the below statement works */
21 | -- SELECT * FROM "buyinggroup" LIMIT 5;
22 |
23 | /*
24 | make sure you have the PostgreSQL extension for VS Code
25 | installed (by Chris Kolkman)
26 |
27 | make sure to "Select Postgres Server" at the bottom
28 | of the VS Code window and then select a server and a database
29 | */
--------------------------------------------------------------------------------
/postgres/postgres-connect.R:
--------------------------------------------------------------------------------
1 | library(DBI)
2 | con <- dbConnect(
3 | RPostgres::Postgres(),
4 | user = "jovyan",
5 | host = "127.0.0.1",
6 | port = 8765,
7 | dbname = "rsm-docker",
8 | password = "postgres"
9 | )
10 |
11 | ## show list of tables
12 | db_tabs <- dbListTables(con)
13 | db_tabs
14 |
15 | ## add a dataframe that is available in R by default to the dbase
16 | library(dplyr)
17 | if (!"mtcars" %in% db_tabs) {
18 | copy_to(con, mtcars, "mtcars", temporary = FALSE)
19 | }
20 |
21 | ## extract data from dbase connection
22 | dat <- tbl(con, "mtcars")
23 | dat
24 |
25 | ## show updated list of tables
26 | dbListTables(con)
27 |
28 | ## drop a table
29 | # dbRemoveTable(con, "mtcars")
30 |
31 | ## show updated list of tables
32 | # dbListTables(con)
33 |
34 | ## disconnect from database
35 | dbDisconnect(con)
36 |
--------------------------------------------------------------------------------
/postgres/postgres-connect.Rmd:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Connecting to a postgresql database"
3 | output:
4 | html_document:
5 | keep_md: yes
6 | ---
7 |
8 | ```{r setup, include=FALSE}
9 | knitr::opts_chunk$set(
10 | comment = NA,
11 | echo = TRUE
12 | )
13 | ```
14 |
15 | Starting the `rsm-msba-arm` (or `rsm-msba-intel`) computing container also starts a postgresql server running on your machine. You can connect to the database from R using the code chunk below.
16 |
17 | ```{r}
18 | library(DBI)
19 | library(RPostgres)
20 | con <- dbConnect(
21 | RPostgres::Postgres(),
22 | user = "jovyan",
23 | host = "127.0.0.1",
24 | port = 8765,
25 | dbname = "rsm-docker",
26 | password = "postgres"
27 | )
28 | ```
29 |
30 | Is there anything in the database? If this is not the first time you are running this Rmarkdown file, the database should already have one or more tables and the code chunk below should show "flights" as an existing table.
31 |
32 | ```{r}
33 | library(dplyr)
34 | library(dbplyr)
35 | db_tabs <- dbListTables(con)
36 | db_tabs
37 | ```
38 |
39 | If the database is empty, lets start with the example at https://db.rstudio.com/dplyr/ and work through the following 6 steps:
40 |
41 | ### 1. install the nycflights13 package if not already available
42 |
43 | ```{r}
44 | ## install nycflights13 package locally if not already available
45 | if (!require("nycflights13")) {
46 | local_dir <- Sys.getenv("R_LIBS_USER")
47 | if (!dir.exists(local_dir)) {
48 | dir.create(local_dir, recursive = TRUE)
49 | }
50 | install.packages("nycflights13", lib = local_dir)
51 | ## now use Session > Restart R and start from the top of
52 | ## of this file again
53 | }
54 | ```
55 |
56 | ### 2. Push data into the database
57 |
58 | Note that this is a fairly large dataset that we are copying into the database so make sure you have sufficient resources set for docker to use. See the install instructions for details:
59 |
60 | * Windows:
61 | https://github.com/radiant-rstats/docker/blob/master/install/rsm-msba-windows.md
62 | * macOS: https://github.com/radiant-rstats/docker/blob/master/install/rsm-msba-macos.md
63 |
64 | ```{r}
65 | ## only push to db if table does not yet exist
66 | ## Note: This step requires you have a reasonable amount of memory
67 | ## accessible for docker. This can be changed in Docker > Preferences
68 | ## > Advanced
69 | ## Memory (RAM) should be set to 4GB or more
70 | if (!"flights" %in% db_tabs) {
71 | copy_to(con, nycflights13::flights, "flights",
72 | temporary = FALSE,
73 | indexes = list(
74 | c("year", "month", "day"),
75 | "carrier",
76 | "tailnum",
77 | "dest"
78 | )
79 | )
80 | }
81 | ```
82 |
83 | ### 3. Create a reference to the data base that (db)plyr can work with
84 |
85 | ```{r}
86 | flights_db <- tbl(con, "flights")
87 | ```
88 |
89 | ### 4. Query the data base using (db)plyr
90 |
91 | ```{r}
92 | flights_db %>% select(year:day, dep_delay, arr_delay)
93 | ```
94 |
95 | ```{r}
96 | flights_db %>% filter(dep_delay > 240)
97 | ```
98 |
99 | ```{r}
100 | flights_db %>%
101 | group_by(dest) %>%
102 | summarise(delay = mean(dep_time))
103 | ```
104 |
105 | ```{r}
106 | tailnum_delay_db <- flights_db %>%
107 | group_by(tailnum) %>%
108 | summarise(
109 | delay = mean(arr_delay),
110 | n = n()
111 | ) %>%
112 | window_order(desc(delay)) %>%
113 | filter(n > 100)
114 |
115 | tailnum_delay_db
116 | tailnum_delay_db %>% show_query()
117 | ```
118 |
119 | ```{r}
120 | nrow(tailnum_delay_db) ## why doesn't this work?
121 | tailnum_delay <- tailnum_delay_db %>% collect()
122 | nrow(tailnum_delay)
123 | tail(tailnum_delay)
124 | ```
125 |
126 | ### 5. Query the flights table using SQL
127 |
128 | You can specify a SQL code chunk to query the database directly
129 |
130 | ```{sql, connection = con, output.var = "flights"}
131 | /*
132 | set the header of the sql chunck to
133 | {sql, connection = con, output.var = "flights"}
134 | */
135 | SELECT * FROM flights WHERE dep_time > 2350
136 | ```
137 |
138 | The variable `flights` now contains the result from the SQL query and will be shown below.
139 |
140 | ```{r}
141 | head(flights)
142 | ```
143 |
--------------------------------------------------------------------------------
/postgres/postgres-connect.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 7,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from sqlalchemy import create_engine, inspect, text\n",
10 | "import pandas as pd\n",
11 | "\n",
12 | "## connecting to the rsm-docker database\n",
13 | "engine = create_engine('postgresql://jovyan:postgres@127.0.0.1:8765/rsm-docker')"
14 | ]
15 | },
16 | {
17 | "cell_type": "code",
18 | "execution_count": 8,
19 | "metadata": {},
20 | "outputs": [],
21 | "source": [
22 | "## add a table to the database\n",
23 | "with engine.connect() as con:\n",
24 | " con.execution_options(isolation_level=\"AUTOCOMMIT\")\n",
25 | " con.execute(text(\"CREATE TABLE IF NOT EXISTS films (title text, director text, year text)\"))\n",
26 | " con.execute(text(\"INSERT INTO films (title, director, year) VALUES ('Thor: Love and Thunder', 'Taika Waititi', '2022')\"))"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": 9,
32 | "metadata": {},
33 | "outputs": [],
34 | "source": [
35 | "df = pd.read_sql_query('SELECT * FROM films', con=engine.connect())"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 10,
41 | "metadata": {},
42 | "outputs": [
43 | {
44 | "data": {
45 | "text/html": [
46 | "
\n",
47 | "\n",
60 | "
\n",
61 | " \n",
62 | " \n",
63 | " | \n",
64 | " title | \n",
65 | " director | \n",
66 | " year | \n",
67 | "
\n",
68 | " \n",
69 | " \n",
70 | " \n",
71 | " 0 | \n",
72 | " Thor: Love and Thunder | \n",
73 | " Taika Waititi | \n",
74 | " 2022 | \n",
75 | "
\n",
76 | " \n",
77 | " 1 | \n",
78 | " Thor: Love and Thunder | \n",
79 | " Taika Waititi | \n",
80 | " 2022 | \n",
81 | "
\n",
82 | " \n",
83 | " 2 | \n",
84 | " Thor: Love and Thunder | \n",
85 | " Taika Waititi | \n",
86 | " 2022 | \n",
87 | "
\n",
88 | " \n",
89 | "
\n",
90 | "
"
91 | ],
92 | "text/plain": [
93 | " title director year\n",
94 | "0 Thor: Love and Thunder Taika Waititi 2022\n",
95 | "1 Thor: Love and Thunder Taika Waititi 2022\n",
96 | "2 Thor: Love and Thunder Taika Waititi 2022"
97 | ]
98 | },
99 | "execution_count": 10,
100 | "metadata": {},
101 | "output_type": "execute_result"
102 | }
103 | ],
104 | "source": [
105 | "df"
106 | ]
107 | },
108 | {
109 | "cell_type": "code",
110 | "execution_count": 11,
111 | "metadata": {},
112 | "outputs": [
113 | {
114 | "data": {
115 | "text/plain": [
116 | "['films']"
117 | ]
118 | },
119 | "execution_count": 11,
120 | "metadata": {},
121 | "output_type": "execute_result"
122 | }
123 | ],
124 | "source": [
125 | "inspector = inspect(engine)\n",
126 | "inspector.get_table_names()"
127 | ]
128 | },
129 | {
130 | "cell_type": "code",
131 | "execution_count": 12,
132 | "metadata": {},
133 | "outputs": [
134 | {
135 | "data": {
136 | "text/plain": [
137 | "'Flights table not available'"
138 | ]
139 | },
140 | "execution_count": 12,
141 | "metadata": {},
142 | "output_type": "execute_result"
143 | }
144 | ],
145 | "source": [
146 | "## If you create the \"flights\" table in R(studio) you can access it\n",
147 | "## using the command below\n",
148 | "if \"flights\" in inspector.get_table_names():\n",
149 | " df = pd.read_sql_query('SELECT * FROM flights WHERE dep_time > 2359 LIMIT 5;', con=engine)\n",
150 | "else:\n",
151 | " df = \"Flights table not available\"\n",
152 | "\n",
153 | "df"
154 | ]
155 | },
156 | {
157 | "cell_type": "code",
158 | "execution_count": null,
159 | "metadata": {},
160 | "outputs": [],
161 | "source": []
162 | }
163 | ],
164 | "metadata": {
165 | "kernelspec": {
166 | "display_name": "Python 3 (ipykernel)",
167 | "language": "python",
168 | "name": "python3"
169 | },
170 | "language_info": {
171 | "codemirror_mode": {
172 | "name": "ipython",
173 | "version": 3
174 | },
175 | "file_extension": ".py",
176 | "mimetype": "text/x-python",
177 | "name": "python",
178 | "nbconvert_exporter": "python",
179 | "pygments_lexer": "ipython3",
180 | "version": "3.11.4"
181 | },
182 | "vscode": {
183 | "interpreter": {
184 | "hash": "d4d1e4263499bec80672ea0156c357c1ee493ec2b1c70f0acce89fc37c4a6abe"
185 | }
186 | }
187 | },
188 | "nbformat": 4,
189 | "nbformat_minor": 4
190 | }
191 |
--------------------------------------------------------------------------------
/postgres/postgres-connect.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Connecting to a postgresql database"
3 | output:
4 | html_document:
5 | keep_md: yes
6 | ---
7 |
8 |
9 |
10 | Starting the `rsm-msba-spark` (or `rsm-msba-arm`) computing container also starts a postgresql server running on your machine. You can connect to the database from R using the code chunk below.
11 |
12 |
13 | ```r
14 | library(DBI)
15 | library(RPostgres)
16 | con <- dbConnect(
17 | RPostgres::Postgres(),
18 | user = "jovyan",
19 | host = "127.0.0.1",
20 | port = 8765,
21 | dbname = "rsm-docker",
22 | password = "postgres"
23 | )
24 | ```
25 |
26 | Is there anything in the database? If this is not the first time you are running this Rmarkdown file, the database should already have one or more tables and the code chunk below should show "flights" as an existing table.
27 |
28 |
29 | ```r
30 | library(dplyr)
31 | ```
32 |
33 | ```
34 |
35 | Attaching package: 'dplyr'
36 | ```
37 |
38 | ```
39 | The following objects are masked from 'package:stats':
40 |
41 | filter, lag
42 | ```
43 |
44 | ```
45 | The following objects are masked from 'package:base':
46 |
47 | intersect, setdiff, setequal, union
48 | ```
49 |
50 | ```r
51 | library(dbplyr)
52 | ```
53 |
54 | ```
55 |
56 | Attaching package: 'dbplyr'
57 | ```
58 |
59 | ```
60 | The following objects are masked from 'package:dplyr':
61 |
62 | ident, sql
63 | ```
64 |
65 | ```r
66 | db_tabs <- dbListTables(con)
67 | db_tabs
68 | ```
69 |
70 | ```
71 | [1] "flights"
72 | ```
73 |
74 | If the database is empty, lets start with the example at https://db.rstudio.com/dplyr/ and work through the following 6 steps:
75 |
76 | ### 1. install the nycflights13 package if not already available
77 |
78 |
79 | ```r
80 | ## install nycflights13 package locally if not already available
81 | if (!require("nycflights13")) {
82 | local_dir <- Sys.getenv("R_LIBS_USER")
83 | if (!dir.exists(local_dir)) {
84 | dir.create(local_dir, recursive = TRUE)
85 | }
86 | install.packages("nycflights13", lib = local_dir)
87 | ## now use Session > Restart R and start from the top of
88 | ## of this file again
89 | }
90 | ```
91 |
92 | ```
93 | Loading required package: nycflights13
94 | ```
95 |
96 | ### 2. Push data into the database
97 |
98 | Note that this is a fairly large dataset that we are copying into the database so make sure you have sufficient resources set for docker to use. See the install instructions for details:
99 |
100 | * Windows:
101 | https://github.com/radiant-rstats/docker/blob/master/install/rsm-msba-windows.md
102 | * macOS: https://github.com/radiant-rstats/docker/blob/master/install/rsm-msba-macos.md
103 |
104 |
105 | ```r
106 | ## only push to db if table does not yet exist
107 | ## Note: This step requires you have a reasonable amount of memory
108 | ## accessible for docker. This can be changed in Docker > Preferences
109 | ## > Advanced
110 | ## Memory (RAM) should be set to 4GB or more
111 | if (!"flights" %in% db_tabs) {
112 | copy_to(con, nycflights13::flights, "flights",
113 | temporary = FALSE,
114 | indexes = list(
115 | c("year", "month", "day"),
116 | "carrier",
117 | "tailnum",
118 | "dest"
119 | )
120 | )
121 | }
122 | ```
123 |
124 | ### 3. Create a reference to the data base that (db)plyr can work with
125 |
126 |
127 | ```r
128 | flights_db <- tbl(con, "flights")
129 | ```
130 |
131 | ### 4. Query the data base using (db)plyr
132 |
133 |
134 | ```r
135 | flights_db %>% select(year:day, dep_delay, arr_delay)
136 | ```
137 |
138 | ```
139 | # Source: lazy query [?? x 5]
140 | # Database: postgres [jovyan@127.0.0.1:8765/rsm-docker]
141 | year month day dep_delay arr_delay
142 |
143 | 1 2013 12 9 7 3
144 | 2 2013 12 9 -7 -8
145 | 3 2013 12 9 5 2
146 | 4 2013 12 9 1 -5
147 | 5 2013 12 9 12 28
148 | 6 2013 12 9 83 86
149 | 7 2013 12 9 16 12
150 | 8 2013 12 9 -4 -3
151 | 9 2013 12 9 77 87
152 | 10 2013 12 9 49 40
153 | # … with more rows
154 | ```
155 |
156 |
157 | ```r
158 | flights_db %>% filter(dep_delay > 240)
159 | ```
160 |
161 | ```
162 | # Source: lazy query [?? x 19]
163 | # Database: postgres [jovyan@127.0.0.1:8765/rsm-docker]
164 | year month day dep_time sched_dep_time dep_delay arr_time sched_arr_time
165 |
166 | 1 2013 12 9 1651 1135 316 1815 1250
167 | 2 2013 12 9 1654 1230 264 1928 1455
168 | 3 2013 12 9 1837 1229 368 2029 1413
169 | 4 2013 12 9 1940 1527 253 2122 1656
170 | 5 2013 12 9 2033 1630 243 2354 1935
171 | 6 2013 12 9 2108 1700 248 2252 1840
172 | 7 2013 12 9 2129 1725 244 2338 1915
173 | 8 2013 12 9 2310 1848 262 31 2005
174 | 9 2013 12 10 1048 645 243 1333 857
175 | 10 2013 12 10 1328 905 263 1618 1133
176 | # … with more rows, and 11 more variables: arr_delay , carrier ,
177 | # flight , tailnum , origin , dest , air_time ,
178 | # distance , hour , minute , time_hour
179 | ```
180 |
181 |
182 | ```r
183 | flights_db %>%
184 | group_by(dest) %>%
185 | summarise(delay = mean(dep_time))
186 | ```
187 |
188 | ```
189 | Warning: Missing values are always removed in SQL.
190 | Use `mean(x, na.rm = TRUE)` to silence this warning
191 | This warning is displayed only once per session.
192 | ```
193 |
194 | ```
195 | # Source: lazy query [?? x 2]
196 | # Database: postgres [jovyan@127.0.0.1:8765/rsm-docker]
197 | dest delay
198 |
199 | 1 ABQ 2006.
200 | 2 ACK 1033.
201 | 3 ALB 1627.
202 | 4 ANC 1635.
203 | 5 ATL 1293.
204 | 6 AUS 1521.
205 | 7 AVL 1175.
206 | 8 BDL 1490.
207 | 9 BGR 1690.
208 | 10 BHM 1944.
209 | # … with more rows
210 | ```
211 |
212 |
213 | ```r
214 | tailnum_delay_db <- flights_db %>%
215 | group_by(tailnum) %>%
216 | summarise(
217 | delay = mean(arr_delay),
218 | n = n()
219 | ) %>%
220 | window_order(desc(delay)) %>%
221 | filter(n > 100)
222 |
223 | tailnum_delay_db
224 | ```
225 |
226 | ```
227 | # Source: lazy query [?? x 3]
228 | # Database: postgres [jovyan@127.0.0.1:8765/rsm-docker]
229 | # Ordered by: desc(delay)
230 | tailnum delay n
231 |
232 | 1 N0EGMQ 9.98 371
233 | 2 N10156 12.7 153
234 | 3 N10575 20.7 289
235 | 4 N11106 14.9 129
236 | 5 N11107 15.0 148
237 | 6 N11109 14.9 148
238 | 7 N11113 15.8 138
239 | 8 N11119 30.3 148
240 | 9 N11121 10.3 154
241 | 10 N11127 13.6 124
242 | # … with more rows
243 | ```
244 |
245 | ```r
246 | tailnum_delay_db %>% show_query()
247 | ```
248 |
249 | ```
250 |
251 | SELECT *
252 | FROM (SELECT "tailnum", AVG("arr_delay") AS "delay", COUNT(*) AS "n"
253 | FROM "flights"
254 | GROUP BY "tailnum") "q01"
255 | WHERE ("n" > 100.0)
256 | ```
257 |
258 |
259 | ```r
260 | nrow(tailnum_delay_db) ## why doesn't this work?
261 | ```
262 |
263 | ```
264 | [1] NA
265 | ```
266 |
267 | ```r
268 | tailnum_delay <- tailnum_delay_db %>% collect()
269 | nrow(tailnum_delay)
270 | ```
271 |
272 | ```
273 | [1] 1201
274 | ```
275 |
276 | ```r
277 | tail(tailnum_delay)
278 | ```
279 |
280 | ```
281 | # A tibble: 6 × 3
282 | tailnum delay n
283 |
284 | 1 N5FNAA 8.92 101
285 | 2 N305DQ -4.26 139
286 | 3 N373NW -1.39 110
287 | 4 N543MQ 13.8 202
288 | 5 N602LR 12.1 274
289 | 6 N637VA -1.20 142
290 | ```
291 |
292 | ### 5. Query the flights table using SQL
293 |
294 | You can specify a SQL code chunk to query the database directly
295 |
296 |
297 | ```sql
298 | /*
299 | set the header of the sql chunck to
300 | {sql, connection = con, output.var = "flights"}
301 | */
302 | SELECT * FROM flights WHERE dep_time > 2350
303 | ```
304 |
305 | The variable `flights` now contains the result from the SQL query and will be shown below.
306 |
307 |
308 | ```r
309 | head(flights)
310 | ```
311 |
312 | ```
313 | year month day dep_time sched_dep_time dep_delay arr_time sched_arr_time
314 | 1 2013 12 9 2359 2359 0 759 437
315 | 2 2013 12 9 2400 2359 1 432 440
316 | 3 2013 12 9 2400 2250 70 59 2356
317 | 4 2013 12 11 2355 2359 -4 430 440
318 | 5 2013 12 11 2358 2359 -1 449 437
319 | 6 2013 12 11 2359 2359 0 440 445
320 | arr_delay carrier flight tailnum origin dest air_time distance hour minute
321 | 1 NA B6 839 N520JB JFK BQN NA 1576 23 59
322 | 2 -8 B6 1503 N705JB JFK SJU 195 1598 23 59
323 | 3 63 B6 1816 N187JB JFK SYR 41 209 22 50
324 | 4 -10 B6 1503 N606JB JFK SJU 196 1598 23 59
325 | 5 12 B6 839 N562JB JFK BQN 207 1576 23 59
326 | 6 -5 B6 745 N657JB JFK PSE 203 1617 23 59
327 | time_hour
328 | 1 2013-12-10 04:00:00
329 | 2 2013-12-10 04:00:00
330 | 3 2013-12-10 03:00:00
331 | 4 2013-12-12 04:00:00
332 | 5 2013-12-12 04:00:00
333 | 6 2013-12-12 04:00:00
334 | ```
335 |
--------------------------------------------------------------------------------
/postgres/postgres-createdb.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # script to use for SQL+ETL course
3 | # run from a terminal in JupyterLab
4 |
5 | cd ~
6 | mkdir sql_data
7 |
8 | ## check if Postgres is running and ready to accept connections
9 | {
10 | pg_isready -h 127.0.0.1 -p 8765 -U jovyan
11 | } || {
12 | echo "Postgres is not running or is not ready to accept connections"
13 | }
14 |
15 | ## get Northwind DB
16 | wget -O ~/sql_data/Northwind_DB_Dump.sql https://www.dropbox.com/s/s3bn7mkmpo391s3/Northwind_DB_Dump.sql
17 |
18 | ## add the Northwind DB to Postgres
19 | createdb -h 127.0.0.1 -p 8765 -U jovyan Northwind
20 | psql -h 127.0.0.1 -p 8765 Northwind -U jovyan < ~/sql_data/Northwind_DB_Dump.sql
21 |
22 | ## get WestCoastImporters DB
23 | wget -O ~/sql_data/WestCoastImporters_Full_Dump.sql https://www.dropbox.com/s/gqnhvhhxyjrslmb/WestCoastImporters_Full_Dump.sql
24 |
25 | ## add the WestCoastImporters DB to Postgres
26 | createdb -h 127.0.0.1 -p 8765 -U jovyan WestCoastImporters
27 | psql -h 127.0.0.1 -p 8765 WestCoastImporters -U jovyan < ~/sql_data/WestCoastImporters_Full_Dump.sql
28 |
29 | # Check if data exists in Northwind DB
30 | {
31 | psql -h 127.0.0.1 -p 8765 -U jovyan -d Northwind -c "\dt"
32 | } || {
33 | echo "Failed to fetch tables from Northwind database"
34 | }
35 |
36 | # Check if data exists in WestCoastImporters DB
37 | {
38 | psql -h 127.0.0.1 -p 8765 -U jovyan -d WestCoastImporters -c "\dt"
39 | } || {
40 | echo "Failed to fetch tables from WestCoastImporters database"
41 | }
42 |
43 | ## clean up
44 | printf "\n\nDo you want to delete the directory with the raw data (y/n)? "
45 | read del_sql_data
46 | if [ ${del_sql_data} = "y" ]; then
47 | {
48 | rm -rf ~/sql_data/
49 | echo "Raw data directory deleted"
50 | } || {
51 | echo "There was a problem deleting the data directory ~/sql_data"
52 | echo "Please remove it manually"
53 | }
54 | fi
55 |
56 |
57 | # to connect to the database from pgweb in the docker container
58 | # use the below as the "Scheme"
59 | # postgresql://jovyan:postgres@127.0.0.1:8765/Northwind
60 | # postgresql://jovyan:postgres@127.0.0.1:8765/WestCoastImporters
61 |
62 | # if you have an issue connecting to postgres
63 | # (1) stop the containers with q + Enter from the launch menu
64 | # (2) type "docker volume rm pg_data" + Enter in an Ubuntu of macOS terminal
65 | # (3) start the docker container again and re-run this script
66 |
--------------------------------------------------------------------------------
/postgres/postgres-radiant.state.rda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radiant-rstats/docker/903f0ec856d4d12211fddeed82a35d4bd44ca416/postgres/postgres-radiant.state.rda
--------------------------------------------------------------------------------
/rsm-code-interpreter/Dockerfile:
--------------------------------------------------------------------------------
1 | # Use an official Python runtime as a parent image
2 | FROM python:3.8-slim-buster
3 |
4 | # Set the working directory in the container
5 | WORKDIR /usr/src/app
6 |
7 | # Add metadata to the image to describe which port the container is listening on at runtime
8 | EXPOSE 8888
9 |
10 | RUN apt-get update && apt-get install -y \
11 | gcc \
12 | g++ \
13 | python3-dev \
14 | libfreetype6-dev \
15 | pkg-config \
16 | make \
17 | libgdal-dev \
18 | libcairo2-dev \
19 | portaudio19-dev \
20 | libgirepository1.0-dev \
21 | distro-info \
22 | libboost-all-dev \
23 | cmake \
24 | libdbus-1-dev \
25 | python-apt \
26 | unattended-upgrades \
27 | graphviz \
28 | libgraphviz-dev
29 |
30 | # Set GDAL environment variable
31 | ENV CPLUS_INCLUDE_PATH=/usr/include/gdal
32 | ENV C_INCLUDE_PATH=/usr/include/gdal
33 |
34 | # Install any needed packages specified in requirements.txt
35 | COPY rsm-code-interpreter/requirements-full.txt ./requirements.txt
36 | RUN pip install --no-cache-dir -r ./requirements.txt
37 |
38 | NOW ADD textract after cloning and fixing the requirements file
39 |
40 | # Run app.py when the container launches
41 | CMD ["jupyter", "lab", "--ip='*'", "--port=8888", "--no-browser", "--allow-root"]
42 |
--------------------------------------------------------------------------------
/rsm-msba-arm/Dockerfile:
--------------------------------------------------------------------------------
1 | # find all here: https://quay.io/repository/jupyter/pyspark-notebook?tab=tags
2 | # aarch64-10.22.2024 problems running dpkg for some reason
3 | # impacts installs that need to find OS stuff but gets blocked by conda paths
4 | # FROM quay.io/jupyter/pyspark-notebook@sha256:49678762ef988f83f22681b6086ffa56a6a27b35ad5a4adb8aede7bb3032b8db
5 |
6 | # aarch64-ubuntu-22.04 7/25/2024
7 | FROM quay.io/jupyter/pyspark-notebook@sha256:319eae80d974242c03a3f744a63f373d35b17e4b9d1203c2a0175660f7b0ad0e
8 |
9 | LABEL Vincent Nijs "radiant@rady.ucsd.edu"
10 |
11 | ARG DOCKERHUB_VERSION_UPDATE
12 | ENV DOCKERHUB_VERSION=${DOCKERHUB_VERSION_UPDATE}
13 | ENV DOCKERHUB_NAME=rsm-msba-arm
14 | # ENV PANDAS_VERSION="2.0.3" # pyspark image still using 2.0.3
15 | ENV PANDAS_VERSION="2.2.2"
16 | ENV PYARROW_VERSION="16.1.0"
17 | # needed to install gensim
18 | ENV SCIPY_VERSION="1.12.0"
19 |
20 | # Fix DL4006
21 | SHELL ["/bin/bash", "-o", "pipefail", "-c"]
22 |
23 | USER root
24 | ENV POSTGRES_VERSION=14
25 |
26 | # fixes the issue where sudo requires terminal for password when starting postgres
27 | RUN echo "${NB_USER} ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
28 |
29 | RUN apt-get update -qq && apt-get -y --no-install-recommends install \
30 | supervisor \
31 | openssh-server \
32 | libcurl4-openssl-dev \
33 | zsh \
34 | vim \
35 | vifm \
36 | wget \
37 | rsync \
38 | lsb-release \
39 | git \
40 | netcat-traditional \
41 | htop \
42 | openjdk-17-jdk-headless \
43 | ant \
44 | ca-certificates-java \
45 | lsof \
46 | rename \
47 | pipx \
48 | liblzma* \
49 | liblzma-dev \
50 | gnupg* \
51 | gpgv \
52 | dirmngr && \
53 | sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' && \
54 | wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
55 | apt -y update && \
56 | apt-get install -y \
57 | postgresql-${POSTGRES_VERSION} \
58 | postgresql-client-${POSTGRES_VERSION} \
59 | postgresql-contrib-${POSTGRES_VERSION} \
60 | && apt-get clean \
61 | && update-ca-certificates -f;
62 |
63 | # all the ipy and jupter versions are fixed below due to various errors
64 | # related to async
65 | RUN mamba install --quiet --yes -c conda-forge \
66 | scipy=${SCIPY_VERSION} \
67 | pandas=${PANDAS_VERSION} \
68 | sqlalchemy \
69 | psycopg2 \
70 | ipython-sql \
71 | beautifulsoup4 \
72 | scikit-learn \
73 | mlxtend \
74 | xgboost \
75 | lightgbm \
76 | graphviz \
77 | lime \
78 | shap \
79 | spacy \
80 | nltk \
81 | pydotplus \
82 | networkx \
83 | seaborn \
84 | plotnine \
85 | selenium \
86 | sqlalchemy \
87 | pyLDAvis \
88 | python-dotenv \
89 | statsmodels \
90 | linearmodels \
91 | IPython=8.18.1 \
92 | ipykernel=6.26.0 \
93 | ipywidgets=8.1.1 \
94 | jupyter_client=8.6.0 \
95 | jupyter_core=5.5.1\
96 | jupyter_server=2.12.1 \
97 | jupyterlab=4.0.9 \
98 | jupytext=1.16.0 \
99 | jupyterlab_widgets \
100 | jupyter-server-proxy \
101 | jupyter-rsession-proxy \
102 | black \
103 | isort \
104 | streamlit \
105 | xlrd \
106 | openpyxl \
107 | pyarrow=${PYARROW_VERSION} \
108 | python-duckdb \
109 | duckdb-engine \
110 | bash_kernel \
111 | sympy \
112 | simpy \
113 | awscli \
114 | bokeh \
115 | dask-kubernetes \
116 | dask-ml \
117 | findspark \
118 | pyspark \
119 | plotly \
120 | && python -m bash_kernel.install
121 |
122 | # causing issues with 1/12/2023 update
123 | # snowflake-connector-python
124 |
125 | COPY files/setup-ml-frameworks.sh setup.sh
126 | RUN chmod 755 setup.sh \
127 | && ./setup.sh \
128 | && rm setup.sh
129 |
130 | # make system (conda) R the first choice
131 | ENV R_VERSION=4.4.2
132 | ENV TERM=xterm
133 | ENV R_HOME=/opt/conda/lib/R
134 | ENV LD_LIBRARY_PATH="/opt/conda/lib:/usr/local/lib:${LD_LIBRARY_PATH}"
135 | ENV PATH="/usr/local/bin:$PATH"
136 |
137 | RUN mamba install --quiet --yes -c conda-forge \
138 | c-compiler \
139 | "r-base>=${R_VERSION}" \
140 | r-curl \
141 | r-matrix \
142 | r-systemfonts \
143 | binutils \
144 | libgit2 \
145 | freetype \
146 | libpng \
147 | libtiff \
148 | libjpeg-turbo \
149 | libxml2 \
150 | unixodbc \
151 | jupyterlab-variableinspector \
152 | jupyterlab_code_formatter \
153 | openssh \
154 | git \
155 | && ln -s /opt/conda/bin/R /usr/local/bin/R \
156 | && ln -s /opt/conda/bin/Rscript /usr/local/bin/Rscript
157 |
158 | # not available through conda-forge for both arm and amd
159 | # or the conda version is causing issues
160 | RUN pip install \
161 | jupyterlab-skip-traceback \
162 | radian \
163 | fastexcel \
164 | polars \
165 | xlsx2csv \
166 | jupysql \
167 | shiny \
168 | shinywidgets \
169 | pyrsm \
170 | textblob \
171 | transformers \
172 | gensim \
173 | alpaca-trade-api \
174 | vadersentiment
175 |
176 | # catboost # not available for arm64
177 |
178 | # connectorx is default for sql stuff in polars but is not built for aarch64
179 | # had to do that manually with a docker file
180 | # see https://github.com/sfu-db/connector-x/issues/386
181 | ENV wheel_name=connectorx-0.3.3-cp311-cp311-manylinux_2_35_aarch64.whl
182 | COPY files/connectorx/${wheel_name} ${wheel_name}
183 | RUN pip install ${wheel_name}
184 |
185 | RUN echo "R_LIBS_USER='~/.rsm-msba/R/${R_VERSION}'" >> ${R_HOME}/etc/Renviron.site
186 | RUN echo '.libPaths(unique(c(Sys.getenv("R_LIBS_USER"), .libPaths())))' >> ${R_HOME}/etc/Rprofile.site
187 |
188 | # packages need for radiant a reproducible analysis
189 | COPY files/setup-radiant.sh setup.sh
190 | RUN chmod +x setup.sh \
191 | && ./setup.sh \
192 | && rm setup.sh
193 |
194 | # Run the rest of the commands as the postgres user
195 | RUN usermod -aG postgres ${NB_USER} \
196 | && usermod -aG users postgres \
197 | && chown -R postgres:postgres /etc/postgresql/${POSTGRES_VERSION}/ \
198 | && chown -R postgres:postgres /var/lib/postgresql/${POSTGRES_VERSION}/ \
199 | && chmod -R u=rwX,go= /var/lib/postgresql/${POSTGRES_VERSION}/ \
200 | && mkdir -p /var/run/postgresql \
201 | && chown postgres:postgres /var/run/postgresql \
202 | && chmod 2777 /var/run/postgresql \
203 | && mkdir -p /var/log/postgresql \
204 | && chown postgres:postgres /var/log/postgresql
205 |
206 | USER postgres
207 | ARG PGPASSWORD=${PGPASSWORD:-postgres}
208 | ENV PGPASSWORD=${PGPASSWORD}
209 | # create a postgres role for ${NB_USER} with "postgres" as the password
210 | # create a database "rsm-docker" owned by the ${NB_USER} role.
211 | RUN /etc/init.d/postgresql start \
212 | && psql --command "CREATE USER ${NB_USER} WITH SUPERUSER PASSWORD '${PGPASSWORD}';" \
213 | && createdb -O ${NB_USER} rsm-docker
214 |
215 | COPY files/postgresql.conf /etc/postgresql/${POSTGRES_VERSION}/main/postgresql.conf
216 | COPY files/pg_hba.conf /etc/postgresql/${POSTGRES_VERSION}/main/pg_hba.conf
217 |
218 | USER root
219 | # populate version number in conf file
220 | RUN sed -i 's/__version__/'"$POSTGRES_VERSION"'/g' /etc/postgresql/${POSTGRES_VERSION}/main/postgresql.conf
221 | RUN chown -R postgres:postgres /etc/postgresql/${POSTGRES_VERSION}/main/ \
222 | && fix-permissions /etc/postgresql/${POSTGRES_VERSION}/main/
223 |
224 | # oh-my-zsh (need to install wget and curl again ...)
225 | RUN apt-get update -qq && apt-get -y --no-install-recommends install wget curl \
226 | && sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" \
227 | && git clone https://github.com/zsh-users/zsh-completions ${ZSH_CUSTOM:=~/.oh-my-zsh/custom}/plugins/zsh-completions \
228 | && git clone https://github.com/zsh-users/zsh-autosuggestions ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-autosuggestions \
229 | && git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting \
230 | && git clone https://github.com/supercrabtree/k ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/k \
231 | && git clone --depth=1 https://github.com/romkatv/powerlevel10k.git ${ZSH_CUSTOM:-$HOME/.oh-my-zsh/custom}/themes/powerlevel10k \
232 | && cp -R /home/jovyan/.oh-my-zsh /etc/skel/.oh-my-zsh
233 |
234 | COPY files/zshrc /etc/skel/.zshrc
235 | COPY files/p10k.zsh /etc/skel/.p10k.zsh
236 | COPY files/usethis /usr/local/bin/usethis
237 | COPY files/clean.sh /usr/local/bin/clean
238 |
239 | # settings for local install of python packages
240 | ARG PYBASE=/home/${NB_USER}/.rsm-msba
241 | ENV PYBASE=${PYBASE}
242 | ENV PYTHONUSERBASE=${PYBASE} \
243 | JUPYTER_PATH=${PYBASE}/share/jupyter \
244 | JUPYTER_DATA_DIR=${PYBASE}/share/jupyter \
245 | JUPYTER_CONFIG_DIR=${PYBASE}/jupyter \
246 | JUPYTER_RUNTIME_DIR=/tmp/jupyter/runtime \
247 | RSTUDIO_WHICH_R=/usr/local/bin/R \
248 | SHELL=/bin/zsh \
249 | ZDOTDIR=/home/${NB_USER}/.rsm-msba/zsh
250 |
251 | COPY files/install-rstudio.sh setup.sh
252 | RUN chmod 755 setup.sh \
253 | && ./setup.sh \
254 | && rm setup.sh
255 |
256 | # setup quarto - can be used with Rstudio
257 | # and when connecting to running container
258 | # from VSCode
259 | ENV QUARTO_VERSION="1.5.55"
260 | COPY files/setup-quarto.sh setup.sh
261 | RUN chmod +x setup.sh \
262 | && ./setup.sh \
263 | && rm setup.sh
264 |
265 | ENV POETRY_VERSION="1.8.3"
266 |
267 | # updating the supervisord.conf file for Jupyter and the notebook_config file
268 | COPY files/supervisord.conf /etc/supervisor/conf.d/supervisord.conf
269 | COPY files/condarc /opt/conda/.condarc
270 | RUN mkdir -p /var/log/supervisor \
271 | && fix-permissions /var/log/supervisor \
272 | && fix-permissions /etc/supervisor/conf.d/ \
273 | && fix-permissions "${CONDA_DIR}"
274 |
275 | # copy base conda environment management script
276 | COPY files/ccenv.sh /usr/local/bin/ccenv
277 | COPY files/cl.sh /usr/local/bin/cl
278 | COPY files/cr.sh /usr/local/bin/cr
279 | COPY files/ci.sh /usr/local/bin/ci
280 | COPY files/ce.sh /usr/local/bin/ce
281 |
282 | # Copy the launch script into the image
283 | COPY launch-${DOCKERHUB_NAME}.sh /opt/launch.sh
284 | COPY files/setup.sh /usr/local/bin/setup
285 | RUN fix-permissions /etc/skel \
286 | && fix-permissions /usr/local/bin \
287 | && chmod 755 /usr/local/bin/*
288 |
289 | # get pgweb
290 | RUN wget -O pgweb.zip https://github.com/sosedoff/pgweb/releases/download/v0.11.11/pgweb_linux_arm64_v7.zip \
291 | && unzip pgweb.zip -d pgweb_dir \
292 | && rm pgweb.zip \
293 | && mv pgweb_dir/* /usr/local/bin/pgweb \
294 | && rm -rf pgweb_dir
295 |
296 | # setting up jupyter-server-proxy extensions pgweb, gitgadget, and radiant
297 | RUN pip install git+https://github.com/vnijs/jupyter-pgweb-proxy.git \
298 | && pip install git+https://github.com/vnijs/jupyter-gitgadget-proxy.git \
299 | && pip install git+https://github.com/vnijs/jupyter-radiant-proxy.git
300 |
301 | # packages need for radiant a reproducible analysis
302 | COPY files/setup-extra.sh setup.sh
303 | RUN chmod +x setup.sh \
304 | && ./setup.sh \
305 | && rm setup.sh
306 |
307 | RUN mamba update --yes pandoc \
308 | && mamba clean --all -f -y \
309 | && fix-permissions "${CONDA_DIR}" \
310 | && fix-permissions "/home/${NB_USER}"
311 |
312 | # packages need for arrow
313 | COPY files/setup-arrow.sh setup.sh
314 | RUN chmod +x setup.sh \
315 | && ./setup.sh \
316 | && rm setup.sh
317 |
318 | # setup hadoop
319 | ENV JAVA_HOME "/usr/lib/jvm/java-17-openjdk-arm64/"
320 | ENV HADOOP_VERSION 3.3.4
321 | ENV HADOOP_HOME /opt/hadoop
322 | ENV HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
323 | COPY files/setup-hadoop.sh setup.sh
324 | RUN chmod +x setup.sh \
325 | && ./setup.sh \
326 | && rm setup.sh
327 |
328 | # hadoop configuration
329 | ADD files/scalable_analytics/core-site.xml $HADOOP_HOME/etc/hadoop/
330 | ADD files/scalable_analytics/hdfs-site.xml $HADOOP_HOME/etc/hadoop/
331 | ADD files/scalable_analytics/init-dfs.sh /opt/hadoop/
332 | ADD files/scalable_analytics/start-dfs.sh /opt/hadoop/
333 | ADD files/scalable_analytics/stop-dfs.sh /opt/hadoop/
334 | RUN chown -R ${NB_USER} ${HADOOP_HOME} \
335 | && chmod 755 ${HADOOP_HOME}/*.sh \
336 | && chmod 755 /usr/bin/hadoop
337 | ENV PATH $PATH:$HADOOP_HOME/bin
338 |
339 | # setting up ssh connection
340 | RUN mkdir -p /var/run/sshd \
341 | && ssh-keygen -A \
342 | && echo 'PasswordAuthentication no' >> /etc/ssh/sshd_config \
343 | && echo 'PermitRootLogin no' >> /etc/ssh/sshd_config \
344 | && echo 'PubkeyAuthentication yes' >> /etc/ssh/sshd_config \
345 | && echo "AllowUsers ${NB_USER}" >> /etc/ssh/sshd_config \
346 | && chmod 0755 /var/run/sshd \
347 | && chsh -s $(which zsh) ${NB_USER}
348 |
349 | # fixing version issue
350 | RUN apt-get update && \
351 | apt-get install -y openssl=3.0.2* && \
352 | apt-mark hold openssl && \
353 | apt-get install -y openssh-server && \
354 | rm -rf /var/lib/apt/lists/*
355 |
356 | # updating permissions
357 | RUN chown -R ${NB_USER} /var/log/ \
358 | && chmod -R +x /var/log/
359 |
360 | # cleanup
361 | RUN rm -rf ~/work/ \
362 | && rm -f ~/*.*
363 |
364 | EXPOSE 22 4040 4041 8181 8282 8765 8989 8501 8000
365 | CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"]
366 |
367 | # Switch back to jovyan to avoid accidental container runs as root
368 | USER ${NB_UID}
369 | ENV HOME /home/${NB_USER}
370 | WORKDIR "${HOME}"
371 |
--------------------------------------------------------------------------------
/rsm-msba-arm/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | rsm-msba-arm:
4 | image: "vnijs/rsm-msba-arm-arm"
5 | environment:
6 | USER: jovyan
7 | HOME: /home/jovyan
8 | SHELL: /bin/zsh
9 | PYTHONUSERBASE: /home/jovyan/.rsm-msba
10 | JUPYTER_PATH: /home/jovyan/.rsm-msba/share/jupyter
11 | JUPYTER_RUNTIME_DIR: /tmp/jupyter/runtime
12 | JUPYTER_CONFIG_DIR: /home/jovyan/.rsm-msba/jupyter
13 | ports:
14 | - 127.0.0.1:8989:8989
15 | - 127.0.0.1:8181:8181
16 | - 127.0.0.1:8282:8282
17 | - 127.0.0.1:8765:8765
18 | - 127.0.0.1:8501:8501
19 | - 127.0.0.1:8000:8000
20 | volumes:
21 | - ~:/home/jovyan
22 | - pg_data:/var/lib/postgresql/14/main
23 | volumes:
24 | pg_data:
25 | external: true
26 |
--------------------------------------------------------------------------------
/rsm-msba-intel-jupyterhub/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM vnijs/rsm-msba-intel:latest
2 |
3 | # Fix DL4006
4 | SHELL ["/bin/bash", "-o", "pipefail", "-c"]
5 |
6 | RUN npm install -g configurable-http-proxy
7 |
8 | COPY files/start.sh /usr/local/bin/
9 | COPY files/start-notebook.sh /usr/local/bin/
10 | COPY files/start-singleuser.sh /usr/local/bin/
11 |
12 | ## CUDA
13 | # COPY files/cuda.sh /opt/cuda/cuda.sh
14 | # RUN sh /opt/cuda/cuda.sh
15 |
16 | # we probably also need something like the below
17 | # https://github.com/rocker-org/rocker-versioned2/blob/95a84fa90a107026eea69090e8b03dd21f731e7f/scripts/config_R_cuda.sh
18 |
19 | # add jupyterhub_config.py. It could even reside in /srv/jupyterhub, not sure at the moment
20 | COPY files/jupyterhub_config.py /etc/jupyter
21 |
22 | # create NB_USER user with UID=1000 and in the 'users' group
23 | # and make sure these dirs are writable by the `users` group.
24 | USER root
25 |
26 | RUN groupadd wheel -g 11 && \
27 | echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
28 | chmod g+w /etc/passwd && \
29 | fix-permissions $HOME && \
30 | fix-permissions /opt && \
31 | mkdir /var/log/rstudio-server && \
32 | fix-permissions /var/log/rstudio-server && \
33 | fix-permissions /var/lib/rstudio-server
34 |
35 | ENV JUPYTER_ENABLE_LAB=1
36 | ENV CRAN=${CRAN:-https://cran.r-project.org}
37 | ENV NCPUS=${NCPUS:--1}
38 |
39 | # NOTE: check env setting in jupyterhub_config()
40 | ## limit the number of threads that a container can access from R
41 | RUN R -e "install.packages('RhpcBLASctl', repo='${CRAN}', Ncpus=${NCPUS})" \
42 | && echo "OPENBLAS_NUM_THREADS=8" >> ${R_HOME}/etc/Renviron.site \
43 | && echo "OMP_NUM_THREADS=8" >> ${R_HOME}/etc/Renviron.site \
44 | && echo 'RhpcBLASctl::blas_set_num_threads(Sys.getenv("OPENBLAS_NUM_THREADS"))' >> ${R_HOME}/etc/Rprofile.site \
45 | && echo 'RhpcBLASctl::omp_set_num_threads(Sys.getenv("OMP_NUM_THREADS"))' >> ${R_HOME}/etc/Rprofile.site
46 |
47 | # RUN conda remove -y --force jupyterlab_code_formatter
48 | # RUN pip install jupyterlab_code_formatter
49 |
50 | # Copy the launch script into the image
51 | COPY launch-rsm-msba-intel-jupyterhub.sh /opt/launch.sh
52 | COPY files/setup-jupyterhub.sh /usr/local/bin/setup
53 | RUN fix-permissions /usr/local/bin \
54 | && chmod 755 /usr/local/bin/* \
55 | && chmod 755 /opt/launch.sh
56 |
57 | ENV TINI_VERSION v0.18.0
58 | ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
59 | RUN sudo chmod +x /tini
60 |
61 | ENTRYPOINT ["/tini", "-g", "--"]
62 | CMD ["start-notebook.sh"]
63 |
64 | USER ${NB_UID}
65 | ENV HOME /home/${NB_USER}
66 | WORKDIR "${HOME}"
67 |
--------------------------------------------------------------------------------
/rsm-msba-intel-jupyterhub/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | rsm-msba-arm:
4 | image: "vnijs/rsm-msba-intel-jupyterhub"
5 | environment:
6 | USER: jovyan
7 | HOME: /home/jovyan
8 | SHELL: /bin/zsh
9 | PYTHONUSERBASE: /home/jovyan/.rsm-msba
10 | JUPYTER_PATH: /home/jovyan/.rsm-msba/share/jupyter
11 | JUPYTER_RUNTIME_DIR: /tmp/jupyter/runtime
12 | JUPYTER_CONFIG_DIR: /home/jovyan/.rsm-msba/jupyter
13 | ports:
14 | - 127.0.0.1:8989:8989
15 | - 127.0.0.1:8181:8181
16 | - 127.0.0.1:8282:8282
17 | - 127.0.0.1:8765:8765
18 | - 127.0.0.1:8501:8501
19 | - 127.0.0.1:8000:8000
20 | volumes:
21 | - ~:/home/jovyan
22 | - pg_data:/var/lib/postgresql/14/main
23 | volumes:
24 | pg_data:
25 | external: true
26 |
--------------------------------------------------------------------------------
/rsm-msba-intel/Dockerfile:
--------------------------------------------------------------------------------
1 | # find all here: https://quay.io/repository/jupyter/pyspark-notebook?tab=tags
2 | # x86_64-10.22.2024 problems running dpkg for some reason
3 | # impacts installs that need to find OS stuff but gets blocked by conda paths
4 | # FROM quay.io/jupyter/pyspark-notebook@sha256:a16447608f176d372e1e84893dbd489a1a524c0b35c509ca8c341a80715affc7
5 |
6 | # x86_64-ubuntu-22.04 7/25/2024
7 | FROM quay.io/jupyter/pyspark-notebook@sha256:fb5e83562d196ead71f31e4ae1e37c6fe13e1c13c3cbf55d64ffc6166f0aedb7
8 |
9 | LABEL Vincent Nijs "radiant@rady.ucsd.edu"
10 |
11 | ARG DOCKERHUB_VERSION_UPDATE
12 | ENV DOCKERHUB_VERSION=${DOCKERHUB_VERSION_UPDATE}
13 | ENV DOCKERHUB_NAME=rsm-msba-intel
14 | # ENV PANDAS_VERSION="2.0.3" # pyspark image still using 2.0.3
15 | ENV PANDAS_VERSION="2.2.2"
16 | ENV PYARROW_VERSION="16.1.0"
17 | # needed to install gensim
18 | ENV SCIPY_VERSION="1.12.0"
19 |
20 | # Fix DL4006
21 | SHELL ["/bin/bash", "-o", "pipefail", "-c"]
22 |
23 | USER root
24 | ENV POSTGRES_VERSION=14
25 |
26 | # fixes the issue where sudo requires terminal for password when starting postgres
27 | RUN echo "${NB_USER} ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
28 |
29 | RUN apt-get update -qq && apt-get -y --no-install-recommends install \
30 | supervisor \
31 | openssh-server \
32 | libcurl4-openssl-dev \
33 | zsh \
34 | vim \
35 | vifm \
36 | wget \
37 | rsync \
38 | lsb-release \
39 | git \
40 | netcat-traditional \
41 | htop \
42 | openjdk-17-jdk-headless \
43 | ant \
44 | ca-certificates-java \
45 | lsof \
46 | rename \
47 | pipx \
48 | liblzma* \
49 | liblzma-dev \
50 | gnupg* \
51 | gpgv \
52 | dirmngr && \
53 | sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' && \
54 | wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
55 | apt -y update && \
56 | apt-get install -y \
57 | postgresql-${POSTGRES_VERSION} \
58 | postgresql-client-${POSTGRES_VERSION} \
59 | postgresql-contrib-${POSTGRES_VERSION} \
60 | && apt-get clean \
61 | && update-ca-certificates -f;
62 |
63 | # all the ipy and jupter versions are fixed below due to various errors
64 | # related to async
65 | RUN mamba install --quiet --yes -c conda-forge \
66 | scipy=${SCIPY_VERSION} \
67 | pandas=${PANDAS_VERSION} \
68 | sqlalchemy \
69 | psycopg2 \
70 | ipython-sql \
71 | beautifulsoup4 \
72 | scikit-learn \
73 | mlxtend \
74 | xgboost \
75 | lightgbm \
76 | graphviz \
77 | lime \
78 | shap \
79 | spacy \
80 | nltk \
81 | pydotplus \
82 | networkx \
83 | seaborn \
84 | plotnine \
85 | selenium \
86 | sqlalchemy \
87 | pyLDAvis \
88 | python-dotenv \
89 | statsmodels \
90 | linearmodels \
91 | IPython=8.18.1 \
92 | ipykernel=6.26.0 \
93 | ipywidgets=8.1.1 \
94 | jupyter_client=8.6.0 \
95 | jupyter_core=5.5.1\
96 | jupyter_server=2.12.1 \
97 | jupyterlab=4.0.9 \
98 | jupytext=1.16.0 \
99 | jupyterlab_widgets \
100 | jupyter-server-proxy \
101 | jupyter-rsession-proxy \
102 | black \
103 | isort \
104 | streamlit \
105 | xlrd \
106 | openpyxl \
107 | pyarrow=${PYARROW_VERSION} \
108 | python-duckdb \
109 | duckdb-engine \
110 | bash_kernel \
111 | sympy \
112 | simpy \
113 | awscli \
114 | bokeh \
115 | dask-kubernetes \
116 | dask-ml \
117 | findspark \
118 | pyspark \
119 | plotly \
120 | && python -m bash_kernel.install
121 |
122 | # causing issues with 1/12/2023 update
123 | # snowflake-connector-python
124 |
125 | COPY files/setup-ml-frameworks.sh setup.sh
126 | RUN chmod 755 setup.sh \
127 | && ./setup.sh \
128 | && rm setup.sh
129 |
130 | # make system (conda) R the first choice
131 | ENV R_VERSION=4.4.2
132 | ENV TERM=xterm
133 | ENV R_HOME=/opt/conda/lib/R
134 | ENV LD_LIBRARY_PATH="/opt/conda/lib:/usr/local/lib:${LD_LIBRARY_PATH}"
135 | ENV PATH="/usr/local/bin:$PATH"
136 |
137 | RUN mamba install --quiet --yes -c conda-forge \
138 | c-compiler \
139 | "r-base>=${R_VERSION}" \
140 | r-curl \
141 | r-matrix \
142 | r-systemfonts \
143 | binutils \
144 | libgit2 \
145 | freetype \
146 | libpng \
147 | libtiff \
148 | libjpeg-turbo \
149 | libxml2 \
150 | unixodbc \
151 | jupyterlab-variableinspector \
152 | jupyterlab_code_formatter \
153 | openssh \
154 | git \
155 | && ln -s /opt/conda/bin/R /usr/local/bin/R \
156 | && ln -s /opt/conda/bin/Rscript /usr/local/bin/Rscript
157 |
158 | # not available through conda-forge for both arm and amd
159 | # or the conda version is causing issues
160 | RUN pip install \
161 | jupyterlab-skip-traceback \
162 | radian \
163 | fastexcel \
164 | polars \
165 | connectorx \
166 | xlsx2csv \
167 | jupysql \
168 | shiny \
169 | shinywidgets \
170 | pyrsm \
171 | textblob \
172 | transformers \
173 | gensim \
174 | alpaca-trade-api \
175 | vadersentiment
176 |
177 |
178 | RUN echo "R_LIBS_USER='~/.rsm-msba/R/${R_VERSION}'" >> ${R_HOME}/etc/Renviron.site
179 | RUN echo '.libPaths(unique(c(Sys.getenv("R_LIBS_USER"), .libPaths())))' >> ${R_HOME}/etc/Rprofile.site
180 |
181 | # packages need for radiant a reproducible analysis
182 | COPY files/setup-radiant.sh setup.sh
183 | RUN chmod +x setup.sh \
184 | && ./setup.sh \
185 | && rm setup.sh
186 |
187 | # Run the rest of the commands as the postgres user
188 | RUN usermod -aG postgres ${NB_USER} \
189 | && usermod -aG users postgres \
190 | && chown -R postgres:postgres /etc/postgresql/${POSTGRES_VERSION}/ \
191 | && chown -R postgres:postgres /var/lib/postgresql/${POSTGRES_VERSION}/ \
192 | && chmod -R u=rwX,go= /var/lib/postgresql/${POSTGRES_VERSION}/ \
193 | && mkdir -p /var/run/postgresql \
194 | && chown postgres:postgres /var/run/postgresql \
195 | && chmod 2777 /var/run/postgresql \
196 | && mkdir -p /var/log/postgresql \
197 | && chown postgres:postgres /var/log/postgresql
198 |
199 | USER postgres
200 | ARG PGPASSWORD=${PGPASSWORD:-postgres}
201 | ENV PGPASSWORD=${PGPASSWORD}
202 | # create a postgres role for ${NB_USER} with "postgres" as the password
203 | # create a database "rsm-docker" owned by the ${NB_USER} role.
204 | RUN /etc/init.d/postgresql start \
205 | && psql --command "CREATE USER ${NB_USER} WITH SUPERUSER PASSWORD '${PGPASSWORD}';" \
206 | && createdb -O ${NB_USER} rsm-docker
207 |
208 | COPY files/postgresql.conf /etc/postgresql/${POSTGRES_VERSION}/main/postgresql.conf
209 | COPY files/pg_hba.conf /etc/postgresql/${POSTGRES_VERSION}/main/pg_hba.conf
210 |
211 | USER root
212 | # populate version number in conf file
213 | RUN sed -i 's/__version__/'"$POSTGRES_VERSION"'/g' /etc/postgresql/${POSTGRES_VERSION}/main/postgresql.conf
214 | RUN chown -R postgres:postgres /etc/postgresql/${POSTGRES_VERSION}/main/ \
215 | && fix-permissions /etc/postgresql/${POSTGRES_VERSION}/main/
216 |
217 | # oh-my-zsh (need to install wget and curl again ...)
218 | RUN apt-get update -qq && apt-get -y --no-install-recommends install wget curl \
219 | && sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" \
220 | && git clone https://github.com/zsh-users/zsh-completions ${ZSH_CUSTOM:=~/.oh-my-zsh/custom}/plugins/zsh-completions \
221 | && git clone https://github.com/zsh-users/zsh-autosuggestions ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-autosuggestions \
222 | && git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting \
223 | && git clone https://github.com/supercrabtree/k ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/k \
224 | && git clone --depth=1 https://github.com/romkatv/powerlevel10k.git ${ZSH_CUSTOM:-$HOME/.oh-my-zsh/custom}/themes/powerlevel10k \
225 | && cp -R /home/jovyan/.oh-my-zsh /etc/skel/.oh-my-zsh
226 |
227 | COPY files/zshrc /etc/skel/.zshrc
228 | COPY files/p10k.zsh /etc/skel/.p10k.zsh
229 | COPY files/usethis /usr/local/bin/usethis
230 | COPY files/clean.sh /usr/local/bin/clean
231 |
232 | # settings for local install of python packages
233 | ARG PYBASE=/home/${NB_USER}/.rsm-msba
234 | ENV PYBASE=${PYBASE}
235 | ENV PYTHONUSERBASE=${PYBASE} \
236 | JUPYTER_PATH=${PYBASE}/share/jupyter \
237 | JUPYTER_DATA_DIR=${PYBASE}/share/jupyter \
238 | JUPYTER_CONFIG_DIR=${PYBASE}/jupyter \
239 | JUPYTER_RUNTIME_DIR=/tmp/jupyter/runtime \
240 | RSTUDIO_WHICH_R=/usr/local/bin/R \
241 | SHELL=/bin/zsh \
242 | ZDOTDIR=/home/${NB_USER}/.rsm-msba/zsh
243 |
244 | COPY files/install-rstudio.sh setup.sh
245 | RUN chmod 755 setup.sh \
246 | && ./setup.sh \
247 | && rm setup.sh
248 |
249 | # setup quarto - can be used with Rstudio
250 | # and when connecting to running container
251 | # from VSCode
252 | ENV QUARTO_VERSION="1.5.55"
253 | COPY files/setup-quarto.sh setup.sh
254 | RUN chmod +x setup.sh \
255 | && ./setup.sh \
256 | && rm setup.sh
257 |
258 | ENV POETRY_VERSION="1.8.3"
259 |
260 | # updating the supervisord.conf file for Jupyter and the notebook_config file
261 | COPY files/supervisord.conf /etc/supervisor/conf.d/supervisord.conf
262 | COPY files/condarc /opt/conda/.condarc
263 | RUN mkdir -p /var/log/supervisor \
264 | && fix-permissions /var/log/supervisor \
265 | && fix-permissions /etc/supervisor/conf.d/ \
266 | && fix-permissions "${CONDA_DIR}"
267 |
268 | # copy base conda environment management script
269 | COPY files/ccenv.sh /usr/local/bin/ccenv
270 | COPY files/cl.sh /usr/local/bin/cl
271 | COPY files/cr.sh /usr/local/bin/cr
272 | COPY files/ci.sh /usr/local/bin/ci
273 | COPY files/ce.sh /usr/local/bin/ce
274 |
275 | # Copy the launch script into the image
276 | COPY launch-${DOCKERHUB_NAME}.sh /opt/launch.sh
277 | COPY files/setup.sh /usr/local/bin/setup
278 | RUN fix-permissions /etc/skel \
279 | && fix-permissions /usr/local/bin \
280 | && chmod 755 /usr/local/bin/*
281 |
282 | # get pgweb
283 | RUN wget -O pgweb.zip https://github.com/sosedoff/pgweb/releases/download/v0.11.11/pgweb_linux_amd64.zip \
284 | && unzip pgweb.zip -d pgweb_dir \
285 | && rm pgweb.zip \
286 | && mv pgweb_dir/* /usr/local/bin/pgweb \
287 | && rm -rf pgweb_dir
288 |
289 | # setting up jupyter-server-proxy extensions pgweb, gitgadget, and radiant
290 | RUN pip install git+https://github.com/vnijs/jupyter-pgweb-proxy.git \
291 | && pip install git+https://github.com/vnijs/jupyter-gitgadget-proxy.git \
292 | && pip install git+https://github.com/vnijs/jupyter-radiant-proxy.git
293 |
294 | # packages need for radiant a reproducible analysis
295 | COPY files/setup-extra.sh setup.sh
296 | RUN chmod +x setup.sh \
297 | && ./setup.sh \
298 | && rm setup.sh
299 |
300 | RUN mamba update --yes pandoc \
301 | && mamba clean --all -f -y \
302 | && fix-permissions "${CONDA_DIR}" \
303 | && fix-permissions "/home/${NB_USER}"
304 |
305 | # packages need for arrow
306 | COPY files/setup-arrow.sh setup.sh
307 | RUN chmod +x setup.sh \
308 | && ./setup.sh \
309 | && rm setup.sh
310 |
311 | # setup hadoop
312 | ENV JAVA_HOME "/usr/lib/jvm/java-17-openjdk-amd64/"
313 | ENV HADOOP_VERSION 3.3.4
314 | ENV HADOOP_HOME /opt/hadoop
315 | ENV HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
316 | COPY files/setup-hadoop.sh setup.sh
317 | RUN chmod +x setup.sh \
318 | && ./setup.sh \
319 | && rm setup.sh
320 |
321 | # hadoop configuration
322 | ADD files/scalable_analytics/core-site.xml $HADOOP_HOME/etc/hadoop/
323 | ADD files/scalable_analytics/hdfs-site.xml $HADOOP_HOME/etc/hadoop/
324 | ADD files/scalable_analytics/init-dfs.sh /opt/hadoop/
325 | ADD files/scalable_analytics/start-dfs.sh /opt/hadoop/
326 | ADD files/scalable_analytics/stop-dfs.sh /opt/hadoop/
327 | RUN chown -R ${NB_USER} ${HADOOP_HOME} \
328 | && chmod 755 ${HADOOP_HOME}/*.sh \
329 | && chmod 755 /usr/bin/hadoop
330 | ENV PATH $PATH:$HADOOP_HOME/bin
331 |
332 | # setting up ssh connection
333 | RUN mkdir -p /var/run/sshd \
334 | && ssh-keygen -A \
335 | && echo 'PasswordAuthentication no' >> /etc/ssh/sshd_config \
336 | && echo 'PermitRootLogin no' >> /etc/ssh/sshd_config \
337 | && echo 'PubkeyAuthentication yes' >> /etc/ssh/sshd_config \
338 | && echo "AllowUsers ${NB_USER}" >> /etc/ssh/sshd_config \
339 | && chmod 0755 /var/run/sshd \
340 | && chsh -s $(which zsh) ${NB_USER}
341 |
342 | # fixing version issue
343 | # RUN apt-get update && \
344 | # apt-get install -y openssl=3.0.2* libssl1.1 && \
345 | # apt-mark hold openssl && \
346 | # apt-get install -y openssh-server && \
347 | # rm -rf /var/lib/apt/lists/*
348 |
349 | # updating permissions
350 | RUN chown -R ${NB_USER} /var/log/ \
351 | && chmod -R +w /var/log/ \
352 | && chown -R ${NB_USER} /var/lib/ \
353 | && chmod -R +x /var/lib/ \
354 | && rm -rf ~/work/ \
355 | && rm -f ~/*.*
356 |
357 | COPY files/start-services.sh /usr/local/bin/
358 | RUN chmod +x /usr/local/bin/start-services.sh
359 |
360 | EXPOSE 22 4040 4041 8181 8282 8765 8989 8501 8000
361 | # CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"]
362 |
363 | # Switch back to jovyan to avoid accidental container runs as root
364 | USER ${NB_UID}
365 | ENV HOME /home/${NB_USER}
366 | WORKDIR "${HOME}"
367 |
368 | # CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"]
369 | # CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"]
370 |
371 | CMD ["/usr/local/bin/start-services.sh"]
--------------------------------------------------------------------------------
/rsm-msba-intel/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | rsm-msba-arm:
4 | image: "vnijs/rsm-msba-intel"
5 | environment:
6 | USER: jovyan
7 | HOME: /home/jovyan
8 | SHELL: /bin/zsh
9 | PYTHONUSERBASE: /home/jovyan/.rsm-msba
10 | JUPYTER_PATH: /home/jovyan/.rsm-msba/share/jupyter
11 | JUPYTER_RUNTIME_DIR: /tmp/jupyter/runtime
12 | JUPYTER_CONFIG_DIR: /home/jovyan/.rsm-msba/jupyter
13 | ports:
14 | - 127.0.0.1:8989:8989
15 | - 127.0.0.1:8181:8181
16 | - 127.0.0.1:8282:8282
17 | - 127.0.0.1:8765:8765
18 | - 127.0.0.1:8501:8501
19 | - 127.0.0.1:8000:8000
20 | - 127.0.0.1:2222:22
21 | volumes:
22 | - ~:/home/jovyan
23 | - pg_data:/var/lib/postgresql/14/main
24 | volumes:
25 | pg_data:
26 | external: true
27 |
--------------------------------------------------------------------------------
/rsm-simple-arm/Dockerfile:
--------------------------------------------------------------------------------
1 | # find all here: https://quay.io/repository/jupyter/pyspark-notebook?tab=tags
2 | # aarch64-10.22.2024 problems running dpkg for some reason
3 | # impacts installs that need to find OS stuff but gets blocked by conda paths
4 | FROM quay.io/jupyter/pyspark-notebook@sha256:319eae80d974242c03a3f744a63f373d35b17e4b9d1203c2a0175660f7b0ad0e
5 |
6 | LABEL Vincent Nijs "radiant@rady.ucsd.edu"
7 |
8 | # Fix DL4006
9 | SHELL ["/bin/bash", "-o", "pipefail", "-c"]
10 |
11 | USER root
12 |
13 | # fixes the issue where sudo requires terminal for password when starting postgres
14 | RUN echo "${NB_USER} ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
15 |
16 | COPY files/start-services-simplified.sh /usr/local/bin/start-services.sh
17 | RUN chmod +x /usr/local/bin/start-services.sh
18 |
19 | # setting up ssh connection
20 | RUN mkdir -p /var/run/sshd \
21 | && ssh-keygen -A \
22 | && echo 'PasswordAuthentication no' >> /etc/ssh/sshd_config \
23 | && echo 'PermitRootLogin no' >> /etc/ssh/sshd_config \
24 | && echo 'PubkeyAuthentication yes' >> /etc/ssh/sshd_config \
25 | && echo "AllowUsers ${NB_USER}" >> /etc/ssh/sshd_config \
26 | && chmod 0755 /var/run/sshd \
27 | && chsh -s $(which zsh) ${NB_USER}
28 |
29 | EXPOSE 22
30 |
31 | # Switch back to jovyan to avoid accidental container runs as root
32 | USER ${NB_UID}
33 | ENV HOME /home/${NB_USER}
34 | # WORKDIR "${HOME}"
35 |
36 | # not running any commands yet
37 | # ENTRYPOINT []
38 | CMD ["/usr/local/bin/start-services.sh"]
39 | # CMD ["/bin/bash"]
40 |
41 |
--------------------------------------------------------------------------------
/rsm-simple-intel/Dockerfile:
--------------------------------------------------------------------------------
1 | # x86_64-ubuntu-22.04 7/25/2024
2 | FROM quay.io/jupyter/pyspark-notebook@sha256:fb5e83562d196ead71f31e4ae1e37c6fe13e1c13c3cbf55d64ffc6166f0aedb7
3 |
4 | LABEL Vincent Nijs "radiant@rady.ucsd.edu"
5 |
6 | # Fix DL4006
7 | SHELL ["/bin/bash", "-o", "pipefail", "-c"]
8 |
9 | USER root
10 |
11 | # fixes the issue where sudo requires terminal for password when starting postgres
12 | RUN echo "${NB_USER} ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
13 |
14 | COPY files/start-services-simplified.sh /usr/local/bin/start-services.sh
15 | RUN chmod +x /usr/local/bin/start-services.sh
16 |
17 | # setting up ssh connection
18 | RUN mkdir -p /var/run/sshd \
19 | && ssh-keygen -A \
20 | && echo 'PasswordAuthentication no' >> /etc/ssh/sshd_config \
21 | && echo 'PermitRootLogin no' >> /etc/ssh/sshd_config \
22 | && echo 'PubkeyAuthentication yes' >> /etc/ssh/sshd_config \
23 | && echo "AllowUsers ${NB_USER}" >> /etc/ssh/sshd_config \
24 | && chmod 0755 /var/run/sshd \
25 | && chsh -s $(which zsh) ${NB_USER}
26 |
27 | EXPOSE 22
28 |
29 | # Switch back to jovyan to avoid accidental container runs as root
30 | USER ${NB_UID}
31 | ENV HOME /home/${NB_USER}
32 | # WORKDIR "${HOME}"
33 |
34 | # not running any commands yet
35 | ENTRYPOINT []
36 | # CMD ["/usr/local/bin/start-services.sh"]
37 | CMD ["/bin/bash"]
38 |
39 |
--------------------------------------------------------------------------------
/scripts/build-images.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # git pull
4 | docker login
5 |
6 | # mkdir -vp ~/.docker/cli-plugins/
7 | # curl --silent -L "https://github.com/docker/buildx/releases/download/v0.6.3/buildx-v0.6.3.linux-amd64" > ~/.docker/cli-plugins/docker-buildx
8 | # curl --silent -L "https://github.com/docker/buildx/releases/download/v0.6.3/buildx-v0.6.3.linux-arm64" > ~/.docker/cli-plugins/docker-buildx
9 | # chmod a+x ~/.docker/cli-plugins/docker-buildx
10 |
11 | # DOCKERHUB_VERSION=3.1.0
12 | DOCKERHUB_VERSION=0.0.1
13 | JHUB_VERSION=0.0.1
14 | DOCKERHUB_USERNAME=vnijs
15 | UPLOAD="NO"
16 | UPLOAD="YES"
17 |
18 | DUAL="NO"
19 | # DUAL="YES"
20 |
21 | if [ "$(uname -m)" = "arm64" ]; then
22 | ARCH="linux/arm64"
23 | else
24 | ARCH="linux/amd64"
25 | # ARCH="linux/amd64,linux/arm64"
26 | fi
27 |
28 | build () {
29 | {
30 | ## using buildx to create multi-platform images
31 | ## run commands below the first time you build for platforms
32 | # docker buildx create --use
33 | # docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
34 | # docker buildx rm builder
35 | # docker buildx create --name builder --driver docker-container --use
36 |
37 | # from Code Interpreter - May not need the above anymore
38 | # docker buildx create --name mybuilder --driver docker-container --use
39 | # docker buildx inspect --bootstrap
40 |
41 | # docker buildx create --name mybuilder --use --driver-opt network=host --driver-opt storage-opt=size=100GB
42 |
43 | # docker buildx create --use --name larger_log --driver-opt env.BUILDKIT_STEP_LOG_MAX_SIZE=90000000
44 |
45 | if [[ "$1" == "NO" ]]; then
46 | if [ "${DUAL}" == "YES" ]; then
47 | ARCH="linux/amd64,linux/arm64"
48 | docker buildx build --platform ${ARCH} --file "${LABEL}/Dockerfile" --build-arg DOCKERHUB_VERSION_UPDATE=${DOCKERHUB_VERSION} --no-cache --tag $DOCKERHUB_USERNAME/${LABEL}:latest --tag $DOCKERHUB_USERNAME/${LABEL}:$DOCKERHUB_VERSION . --push > build.log 2>&1
49 | else
50 | docker buildx build -f "${LABEL}/Dockerfile" --progress=plain --load --platform ${ARCH} --build-arg DOCKERHUB_VERSION_UPDATE=${DOCKERHUB_VERSION} --no-cache --tag $DOCKERHUB_USERNAME/${LABEL}:latest --tag $DOCKERHUB_USERNAME/${LABEL}:$DOCKERHUB_VERSION . > build.log 2>&1
51 | fi
52 | else
53 | if [ "${DUAL}" == "YES" ]; then
54 | ARCH="linux/amd64,linux/arm64"
55 | docker buildx build --platform ${ARCH} --file "${LABEL}/Dockerfile" --build-arg DOCKERHUB_VERSION_UPDATE=${DOCKERHUB_VERSION} --tag $DOCKERHUB_USERNAME/${LABEL}:latest --tag $DOCKERHUB_USERNAME/${LABEL}:$DOCKERHUB_VERSION . --push > build.log 2>&1
56 | else
57 | # added a .dockerignore file so it ignores all . files (e.g., .git, .DS_Store, etc.)
58 | # docker buildx build -f "${LABEL}/Dockerfile" --progress=plain --load --platform ${ARCH} --build-arg DOCKERHUB_VERSION_UPDATE=${DOCKERHUB_VERSION} --tag $DOCKERHUB_USERNAME/${LABEL}:latest --tag $DOCKERHUB_USERNAME/${LABEL}:$DOCKERHUB_VERSION . > build.log 2>&1
59 | docker buildx build -f "${LABEL}/Dockerfile" --progress=plain --load --platform ${ARCH} --tag $DOCKERHUB_USERNAME/${LABEL}:latest --tag $DOCKERHUB_USERNAME/${LABEL}:$DOCKERHUB_VERSION . # > build.log 2>&1
60 | fi
61 | fi
62 | } || {
63 | echo "-----------------------------------------------------------------------"
64 | echo "Docker build for ${LABEL} was not successful"
65 | echo "-----------------------------------------------------------------------"
66 | sleep 3s
67 | exit 1
68 | }
69 | if [ "${UPLOAD}" == "YES" ]; then
70 | docker tag $USER/${LABEL}:latest $USER/${LABEL}:${DOCKERHUB_VERSION}
71 | docker push $USER/${LABEL}:${DOCKERHUB_VERSION}
72 | docker push $USER/${LABEL}:latest
73 | fi
74 | }
75 |
76 | # what os is being used
77 | ostype=`uname`
78 | if [[ "$ostype" == "Darwin" ]]; then
79 | sed_fun () {
80 | sed -i '' -e $1 $2
81 | }
82 | else
83 | sed_fun () {
84 | sed -i $1 $2
85 | }
86 | fi
87 |
88 | launcher () {
89 | cp -p ./launch-$1.sh ./launch-${LABEL}.sh
90 | sed_fun "s/^LABEL=\"$1\"/LABEL=\"${LABEL}\"/" ./launch-${LABEL}.sh
91 | sed_fun "s/launch-$1\.sh/launch-${LABEL}\.sh/" ./launch-${LABEL}.sh
92 | if [ "$2" != "" ] && [ "$3" != "" ]; then
93 | sed_fun "s/$2/$3/" ./launch-${LABEL}.sh
94 | fi
95 | }
96 |
97 | if [ "$(uname -m)" = "arm64" ]; then
98 |
99 | # re-run as needed for arm, polars with sql files
100 | # LABEL=connectorx
101 | # build NO
102 | # exit
103 |
104 | # run simplified version of the build
105 | LABEL=rsm-simple-arm
106 | build
107 | exit
108 |
109 | LABEL=rsm-msba-arm
110 | build
111 | else
112 |
113 | # run simplified version of the build
114 | LABEL=rsm-simple-intel
115 | build
116 | exit
117 |
118 | LABEL=rsm-msba-intel
119 | build
120 |
121 | # ## replace 127.0.0.1 by 0.0.0.0 for ChromeOS
122 | # cp -p ./launch-rsm-msba-intel.sh ./launch-rsm-msba-intel-chromeos.sh
123 | # sed_fun "s/127.0.0.1/0.0.0.0/g" ./launch-rsm-msba-intel-chromeos.sh
124 | # sed_fun "s/ostype=\"Linux\"/ostype=\"ChromeOS\"/" ./launch-rsm-msba-intel-chromeos.sh
125 |
126 | # LABEL=rsm-msba-intel-jupyterhub
127 | # build
128 |
129 | ## new containers should be launched using the newest version of the container
130 | # docker tag vnijs/rsm-msba-intel-jupyterhub:$JHUB_VERSION jupyterhub-user
131 |
132 | ## new containers should be launched using the newest version of the container
133 | # docker tag vnijs/rsm-msba-intel-jupyterhub:latest jupyterhub-test-user
134 | fi
135 |
136 | ## to connec on a server use
137 | # ssh -t vnijs@rsm-compute-01.ucsd.edu docker run -it -v ~:/home/jovyan vnijs/rsm-msba-intel-jupyterhub /bin/bash;
138 |
--------------------------------------------------------------------------------
/scripts/dclean.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## based on https://stackoverflow.com/a/32723285/1974918
4 | running=$(docker ps -q)
5 | if [ "${running}" != "" ]; then
6 | echo "Stopping running containers ..."
7 | docker stop ${running}
8 | else
9 | echo "No running containers"
10 | fi
11 |
12 | imgs=$(docker images | awk '// { print $3 }')
13 | if [ "${imgs}" != "" ]; then
14 | echo "Removing unused containers ..."
15 | docker rmi ${imgs}
16 | else
17 | echo "No images to remove"
18 | fi
19 |
20 | procs=$(docker ps -a -q --no-trunc)
21 | if [ "${procs}" != "" ]; then
22 | echo "Removing errand docker processes ..."
23 | docker rm ${procs}
24 | else
25 | echo "No processes to purge"
26 | fi
27 |
28 |
--------------------------------------------------------------------------------
/scripts/dprune.sh:
--------------------------------------------------------------------------------
1 |
2 | REF="vnijs/r-bionic"
3 | # REF="vnijs/radiant"
4 | # REF="vnijs/rsm-msba"
5 | # REF="vnijs/rsm-msba-spark"
6 | REF="vnijs/rsm-msba-intel-jupyterhub"
7 | # REF="vnijs/rsm-vscode"
8 | TAG="2.2.0"
9 |
10 | docker image ls --filter reference=$REF --filter before=$REF:$TAG
11 |
12 | echo "------------------------------------------"
13 | echo "Remove listed docker images? (yes/no)"
14 | echo "------------------------------------------"
15 | read remove_old
16 | if [ $remove_old == "yes" ]; then
17 | docker rmi $(docker image ls -q --all --filter reference=$REF --filter before=$REF:$TAG)
18 | fi
19 |
--------------------------------------------------------------------------------
/scripts/pull-containers.sh:
--------------------------------------------------------------------------------
1 | ## pull the latest version of all generated from radiant-stats/docker
2 | img_list=(rsm-msba-arm rsm-msba-intel rsm-msba-intel-jupyterhub)
3 |
4 | for img in ${img_list[@]}; do
5 | docker pull vnijs/${img}
6 | done
7 |
--------------------------------------------------------------------------------
/vscode/extension-install.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | if [ ! -f extensions.txt ]; then
4 | wget https://raw.githubusercontent.com/radiant-rstats/docker/master/vscode/extensions.txt
5 | fi
6 |
7 | cat extensions.txt | while read extension || [[ -n $extension ]];
8 | do
9 | code --install-extension $extension --force
10 | done
--------------------------------------------------------------------------------
/vscode/extensions.txt:
--------------------------------------------------------------------------------
1 | alefragnani.project-manager
2 | alexcvzz.vscode-sqlite
3 | christian-kohler.path-intellisense
4 | ckolkman.vscode-postgres
5 | RandomFractalsInc.duckdb-sql-tools
6 | eamodio.gitlens
7 | GitHub.copilot
8 | GitHub.copilot-chat
9 | mechatroner.rainbow-csv
10 | medo64.render-crlf
11 | ms-azuretools.vscode-docker
12 | ms-python.flake8
13 | ms-python.python
14 | ms-python.black-formatter
15 | ms-python.vscode-pylance
16 | ms-toolsai.jupyter
17 | ms-toolsai.jupyter-keymap
18 | ms-toolsai.jupyter-renderers
19 | ms-vscode-remote.remote-containers
20 | ms-vscode-remote.vscode-remote-extensionpack
21 | ms-vscode-remote.remote-ssh
22 | ms-vscode-remote.remote-ssh-edit
23 | ms-vscode-remote.remote-wsl
24 | ms-vscode.remote-explorer
25 | ms-vscode.live-server
26 | ms-vscode.vscode-speech
27 | ms-vsliveshare.vsliveshare
28 | quarto.quarto
29 | REditorSupport.r
30 | RDebugger.r-debugger
31 | streetsidesoftware.code-spell-checker
32 | tomoki1207.pdf
33 | Posit.shiny-python
34 | GitHub.vscode-pull-request-github
35 | GitHub.remotehub
--------------------------------------------------------------------------------
/vscode/settings-vscode.json:
--------------------------------------------------------------------------------
1 | {
2 | "terminal.integrated.fontFamily": "MesloLGS NF",
3 | "terminal.integrated.defaultProfile.osx": "zsh",
4 | "notebook.lineNumbers": "on",
5 | "jupyter.askForKernelRestart": false,
6 | "jupyter.sendSelectionToInteractiveWindow": true,
7 | "git.confirmSync": false,
8 | "git.enableSmartCommit": true,
9 | "python.formatting.provider": "black",
10 | "jupyter.pylanceHandlesNotebooks": true,
11 | "terminal.external.osxExec": "iTerm.app",
12 | "terminal.integrated.env.osx": {
13 | "RSTUDIO_PANDOC": "/Applications/RStudio.app/Contents/MacOS/pandoc"
14 | },
15 | "editor.formatOnSave": true,
16 | "python.experiments.enabled": false,
17 | "python.globalModuleInstallation": true,
18 | "python.testing.pytestEnabled": true,
19 | "code-eol.highlightNonDefault": true,
20 | "code-eol.highlightExtraWhitespace": true,
21 | "code-eol.decorateBeforeEol": true,
22 | "editor.renderWhitespace": "all",
23 | "code-eol.newlineCharacter": "¬",
24 | "code-eol.returnCharacter": "¤",
25 | "code-eol.crlfCharacter": "¤¬",
26 | "projectManager.git.baseFolders": [
27 | "~/git"
28 | ],
29 | "editor.bracketPairColorization.enabled": true,
30 | "jupyter.alwaysScrollOnNewCell": true,
31 | "quarto.mathjax.theme": "dark",
32 | "jupyter.jupyterServerType": "local",
33 | "r.plot.useHttpgd": true,
34 | "r.bracketedPaste": true,
35 | "r.rpath.linux": "/usr/local/bin/R",
36 | "r.rpath.mac": "/usr/local/bin/R",
37 | "r.rterm.mac": "/opt/conda/bin/radian",
38 | "r.alwaysUseActiveTerminal": true,
39 | "r.rterm.linux": "/opt/conda/bin/radian"
40 | }
--------------------------------------------------------------------------------
/vscode/settings-windows-terminal.json:
--------------------------------------------------------------------------------
1 | {
2 | "$help": "https://aka.ms/terminal-documentation",
3 | "$schema": "https://aka.ms/terminal-profiles-schema",
4 | "actions": [
5 | {
6 | "command": {
7 | "action": "commandPalette"
8 | },
9 | "keys": "ctrl+shift+p"
10 | },
11 | {
12 | "command": {
13 | "action": "copy",
14 | "singleLine": false
15 | },
16 | "keys": "ctrl+c"
17 | },
18 | {
19 | "command": "find",
20 | "keys": "ctrl+shift+f"
21 | },
22 | {
23 | "command": "paste",
24 | "keys": "ctrl+v"
25 | },
26 | {
27 | "command": {
28 | "action": "splitPane",
29 | "split": "auto",
30 | "splitMode": "duplicate"
31 | },
32 | "keys": "alt+shift+d"
33 | }
34 | ],
35 | "copyFormatting": "none",
36 | "copyOnSelect": false,
37 | "defaultProfile": "{07b52e3e-de2c-5db4-bd2d-ba144ed6c273}",
38 | "initialCols": 100,
39 | "profiles": {
40 | "defaults": {
41 | "closeOnExit": "graceful"
42 | },
43 | "list": [
44 | {
45 | "font": {
46 | "face": "MesloLGS NF"
47 | },
48 | "guid": "{61c54bbd-c2c6-5271-96e7-009a87ff44bf}",
49 | "hidden": false,
50 | "name": "Windows PowerShell"
51 | },
52 | {
53 | "guid": "{0caa0dad-35be-5f56-a8ff-afceeeaa6101}",
54 | "hidden": false,
55 | "name": "Command Prompt"
56 | },
57 | {
58 | "guid": "{b453ae62-4e3d-5e58-b989-0a998ec441b8}",
59 | "hidden": false,
60 | "name": "Azure Cloud Shell",
61 | "source": "Windows.Terminal.Azure"
62 | },
63 | {
64 | "font": {
65 | "face": "MesloLGS NF"
66 | },
67 | "guid": "{07b52e3e-de2c-5db4-bd2d-ba144ed6c273}",
68 | "hidden": false,
69 | "name": "Ubuntu-22.04",
70 | "source": "Windows.Terminal.Wsl",
71 | "startingDirectory": "\\\\wsl$\\Ubuntu-22.04\\home\\vnijs"
72 | },
73 | {
74 | "commandline": "%windir%\\System32\\WindowsPowerShell\\v1.0\\powershell.exe -ExecutionPolicy ByPass -NoExit -Command \"& '%USERPROFILE%\\Miniconda3\\shell\\condabin\\conda-hook.ps1' ; conda activate '%USERPROFILE%\\Miniconda3' \"",
75 | "guid": "{1caa0dad-35be-5f56-a812-afceeeaa1234}",
76 | "hidden": false,
77 | "icon": "%USERPROFILE%\\Miniconda3\\Menu\\Iconleak-Atrous-PSConsole.ico",
78 | "name": "Miniconda",
79 | "startingDirectory": "%HOMEPATH%"
80 | }
81 | ]
82 | },
83 | "schemes": [
84 | {
85 | "background": "#0C0C0C",
86 | "black": "#0C0C0C",
87 | "blue": "#0037DA",
88 | "brightBlack": "#767676",
89 | "brightBlue": "#3B78FF",
90 | "brightCyan": "#61D6D6",
91 | "brightGreen": "#16C60C",
92 | "brightPurple": "#B4009E",
93 | "brightRed": "#E74856",
94 | "brightWhite": "#F2F2F2",
95 | "brightYellow": "#F9F1A5",
96 | "cursorColor": "#FFFFFF",
97 | "cyan": "#3A96DD",
98 | "foreground": "#CCCCCC",
99 | "green": "#13A10E",
100 | "name": "Campbell",
101 | "purple": "#881798",
102 | "red": "#C50F1F",
103 | "selectionBackground": "#FFFFFF",
104 | "white": "#CCCCCC",
105 | "yellow": "#C19C00"
106 | },
107 | {
108 | "background": "#012456",
109 | "black": "#0C0C0C",
110 | "blue": "#0037DA",
111 | "brightBlack": "#767676",
112 | "brightBlue": "#3B78FF",
113 | "brightCyan": "#61D6D6",
114 | "brightGreen": "#16C60C",
115 | "brightPurple": "#B4009E",
116 | "brightRed": "#E74856",
117 | "brightWhite": "#F2F2F2",
118 | "brightYellow": "#F9F1A5",
119 | "cursorColor": "#FFFFFF",
120 | "cyan": "#3A96DD",
121 | "foreground": "#CCCCCC",
122 | "green": "#13A10E",
123 | "name": "Campbell Powershell",
124 | "purple": "#881798",
125 | "red": "#C50F1F",
126 | "selectionBackground": "#FFFFFF",
127 | "white": "#CCCCCC",
128 | "yellow": "#C19C00"
129 | },
130 | {
131 | "background": "#282C34",
132 | "black": "#282C34",
133 | "blue": "#61AFEF",
134 | "brightBlack": "#5A6374",
135 | "brightBlue": "#61AFEF",
136 | "brightCyan": "#56B6C2",
137 | "brightGreen": "#98C379",
138 | "brightPurple": "#C678DD",
139 | "brightRed": "#E06C75",
140 | "brightWhite": "#DCDFE4",
141 | "brightYellow": "#E5C07B",
142 | "cursorColor": "#FFFFFF",
143 | "cyan": "#56B6C2",
144 | "foreground": "#DCDFE4",
145 | "green": "#98C379",
146 | "name": "One Half Dark",
147 | "purple": "#C678DD",
148 | "red": "#E06C75",
149 | "selectionBackground": "#FFFFFF",
150 | "white": "#DCDFE4",
151 | "yellow": "#E5C07B"
152 | },
153 | {
154 | "background": "#FAFAFA",
155 | "black": "#383A42",
156 | "blue": "#0184BC",
157 | "brightBlack": "#4F525D",
158 | "brightBlue": "#61AFEF",
159 | "brightCyan": "#56B5C1",
160 | "brightGreen": "#98C379",
161 | "brightPurple": "#C577DD",
162 | "brightRed": "#DF6C75",
163 | "brightWhite": "#FFFFFF",
164 | "brightYellow": "#E4C07A",
165 | "cursorColor": "#4F525D",
166 | "cyan": "#0997B3",
167 | "foreground": "#383A42",
168 | "green": "#50A14F",
169 | "name": "One Half Light",
170 | "purple": "#A626A4",
171 | "red": "#E45649",
172 | "selectionBackground": "#FFFFFF",
173 | "white": "#FAFAFA",
174 | "yellow": "#C18301"
175 | },
176 | {
177 | "background": "#002B36",
178 | "black": "#002B36",
179 | "blue": "#268BD2",
180 | "brightBlack": "#073642",
181 | "brightBlue": "#839496",
182 | "brightCyan": "#93A1A1",
183 | "brightGreen": "#586E75",
184 | "brightPurple": "#6C71C4",
185 | "brightRed": "#CB4B16",
186 | "brightWhite": "#FDF6E3",
187 | "brightYellow": "#657B83",
188 | "cursorColor": "#FFFFFF",
189 | "cyan": "#2AA198",
190 | "foreground": "#839496",
191 | "green": "#859900",
192 | "name": "Solarized Dark",
193 | "purple": "#D33682",
194 | "red": "#DC322F",
195 | "selectionBackground": "#FFFFFF",
196 | "white": "#EEE8D5",
197 | "yellow": "#B58900"
198 | },
199 | {
200 | "background": "#FDF6E3",
201 | "black": "#002B36",
202 | "blue": "#268BD2",
203 | "brightBlack": "#073642",
204 | "brightBlue": "#839496",
205 | "brightCyan": "#93A1A1",
206 | "brightGreen": "#586E75",
207 | "brightPurple": "#6C71C4",
208 | "brightRed": "#CB4B16",
209 | "brightWhite": "#FDF6E3",
210 | "brightYellow": "#657B83",
211 | "cursorColor": "#002B36",
212 | "cyan": "#2AA198",
213 | "foreground": "#657B83",
214 | "green": "#859900",
215 | "name": "Solarized Light",
216 | "purple": "#D33682",
217 | "red": "#DC322F",
218 | "selectionBackground": "#FFFFFF",
219 | "white": "#EEE8D5",
220 | "yellow": "#B58900"
221 | },
222 | {
223 | "background": "#000000",
224 | "black": "#000000",
225 | "blue": "#3465A4",
226 | "brightBlack": "#555753",
227 | "brightBlue": "#729FCF",
228 | "brightCyan": "#34E2E2",
229 | "brightGreen": "#8AE234",
230 | "brightPurple": "#AD7FA8",
231 | "brightRed": "#EF2929",
232 | "brightWhite": "#EEEEEC",
233 | "brightYellow": "#FCE94F",
234 | "cursorColor": "#FFFFFF",
235 | "cyan": "#06989A",
236 | "foreground": "#D3D7CF",
237 | "green": "#4E9A06",
238 | "name": "Tango Dark",
239 | "purple": "#75507B",
240 | "red": "#CC0000",
241 | "selectionBackground": "#FFFFFF",
242 | "white": "#D3D7CF",
243 | "yellow": "#C4A000"
244 | },
245 | {
246 | "background": "#FFFFFF",
247 | "black": "#000000",
248 | "blue": "#3465A4",
249 | "brightBlack": "#555753",
250 | "brightBlue": "#729FCF",
251 | "brightCyan": "#34E2E2",
252 | "brightGreen": "#8AE234",
253 | "brightPurple": "#AD7FA8",
254 | "brightRed": "#EF2929",
255 | "brightWhite": "#EEEEEC",
256 | "brightYellow": "#FCE94F",
257 | "cursorColor": "#000000",
258 | "cyan": "#06989A",
259 | "foreground": "#555753",
260 | "green": "#4E9A06",
261 | "name": "Tango Light",
262 | "purple": "#75507B",
263 | "red": "#CC0000",
264 | "selectionBackground": "#FFFFFF",
265 | "white": "#D3D7CF",
266 | "yellow": "#C4A000"
267 | },
268 | {
269 | "background": "#000000",
270 | "black": "#000000",
271 | "blue": "#000080",
272 | "brightBlack": "#808080",
273 | "brightBlue": "#0000FF",
274 | "brightCyan": "#00FFFF",
275 | "brightGreen": "#00FF00",
276 | "brightPurple": "#FF00FF",
277 | "brightRed": "#FF0000",
278 | "brightWhite": "#FFFFFF",
279 | "brightYellow": "#FFFF00",
280 | "cursorColor": "#FFFFFF",
281 | "cyan": "#008080",
282 | "foreground": "#C0C0C0",
283 | "green": "#008000",
284 | "name": "Vintage",
285 | "purple": "#800080",
286 | "red": "#800000",
287 | "selectionBackground": "#FFFFFF",
288 | "white": "#C0C0C0",
289 | "yellow": "#808000"
290 | }
291 | ]
292 | }
--------------------------------------------------------------------------------