├── dash ├── app │ ├── __init__.py │ ├── mtcars.csv │ └── app.py └── Dockerfile ├── shiny ├── Dockerfile └── app.R ├── jupyter └── Dockerfile ├── nginx.Dockerfile ├── shinyproxy.Dockerfile ├── setup.sh ├── docker-compose.yml ├── nginx.conf ├── README.md ├── rstudio ├── Dockerfile └── start.sh └── application.yml /dash/app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /shiny/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rocker/shiny 2 | 3 | COPY app.R app.R 4 | 5 | CMD ["R", "-e", "shiny::runApp('app.R', host='0.0.0.0', port=3838)"] 6 | -------------------------------------------------------------------------------- /jupyter/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jupyter/datascience-notebook 2 | 3 | USER root 4 | 5 | CMD ["start.sh", "jupyter", "lab", "--LabApp.token=''", "--LabApp.ip='0.0.0.0'", "--LabApp.allow_origin='same'"] 6 | 7 | 8 | -------------------------------------------------------------------------------- /nginx.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx 2 | 3 | RUN apt-get update && \ 4 | apt-get install -y openssl 5 | 6 | COPY nginx.conf /etc/nginx/nginx.conf 7 | 8 | RUN openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /etc/nginx/localhost.key -out /etc/nginx/localhost.crt -subj "/C=US/ST=CA/O=RKDC/OU=Shinyproxy/CN=localhost" 9 | 10 | -------------------------------------------------------------------------------- /shinyproxy.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM openjdk:8-jre 2 | 3 | RUN mkdir -p /opt/shinyproxy/ 4 | RUN wget https://www.shinyproxy.io/downloads/shinyproxy-2.1.0.jar -O /opt/shinyproxy/shinyproxy.jar 5 | 6 | COPY application.yml /opt/shinyproxy/application.yml 7 | 8 | 9 | WORKDIR /opt/shinyproxy/ 10 | CMD ["java", "-jar", "/opt/shinyproxy/shinyproxy.jar"] 11 | -------------------------------------------------------------------------------- /setup.sh: -------------------------------------------------------------------------------- 1 | docker build -t example_shiny ./shiny 2 | docker build -t example_dash ./dash 3 | docker build -t rstudio ./rstudio 4 | docker build -t jupyter ./jupyter 5 | 6 | groupadd -g 1011 docker_worker 7 | useradd -s /bin/false -u 1010 -g 1020 docker_worker 8 | 9 | sudo mkdir /home/users/roz 10 | sudo mkdir /home/users/mew 11 | 12 | sudo chown -R docker_worker:docker_worker /home/users 13 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | 4 | nginx: 5 | # container_name: nginx 6 | build: 7 | context: . 8 | dockerfile: nginx.Dockerfile 9 | ports: 10 | - 8080:8080 11 | restart: always 12 | 13 | 14 | shinyproxy: 15 | build: 16 | context: . 17 | dockerfile: shinyproxy.Dockerfile 18 | depends_on: 19 | - nginx 20 | container_name: shinyproxy_server 21 | volumes: 22 | - /var/run/docker.sock:/var/run/docker.sock 23 | 24 | networks: 25 | default: 26 | external: 27 | name: sp-net 28 | 29 | 30 | -------------------------------------------------------------------------------- /dash/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:18.04 2 | 3 | RUN apt-get update && apt-get install -y python3 python3-pip 4 | 5 | # Dash and dependencies 6 | RUN pip3 install dash==0.39.0 # The core dash backend 7 | RUN pip3 install dash-renderer==0.20.0 # The dash front-end 8 | RUN pip3 install dash-html-components==0.14.0 # HTML components 9 | RUN pip3 install dash-core-components==0.44.0 # Supercharged components 10 | RUN pip3 install dash-daq==0.1.4 11 | 12 | RUN pip3 install pandas==0.24.1 13 | RUN pip3 install numpy==1.16.2 14 | 15 | RUN pip3 install scikit-learn 16 | RUN pip3 install gunicorn 17 | 18 | RUN mkdir app 19 | COPY app/* /app/ 20 | 21 | EXPOSE 8050 22 | 23 | WORKDIR /app 24 | CMD ["gunicorn","-b", "0.0.0.0:8050", "app:server"] 25 | -------------------------------------------------------------------------------- /nginx.conf: -------------------------------------------------------------------------------- 1 | worker_processes 1; 2 | events { worker_connections 1024; } 3 | 4 | http { 5 | sendfile on; 6 | 7 | upstream shinyproxy_server { 8 | server shinyproxy_server:8080; 9 | } 10 | 11 | server { 12 | 13 | listen 8080; 14 | 15 | #ssl_protocols TLSv1 TLSv1.1 TLSv1.2; 16 | 17 | #ssl_certificate /etc/nginx/localhost.crt; 18 | #ssl_certificate_key /etc/nginx/localhost.key; 19 | 20 | location / { 21 | proxy_pass http://shinyproxy_server; 22 | 23 | proxy_http_version 1.1; 24 | proxy_set_header Upgrade $http_upgrade; 25 | proxy_set_header Connection "upgrade"; 26 | proxy_read_timeout 600s; 27 | 28 | proxy_redirect off; 29 | proxy_set_header Host $http_host; 30 | proxy_set_header X-Real-IP $remote_addr; 31 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 32 | proxy_set_header X-Forwarded-Protocol $scheme; 33 | 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /dash/app/mtcars.csv: -------------------------------------------------------------------------------- 1 | "mpg","cyl","disp","hp","drat","wt","qsec","vs","am","gear","carb" 2 | 21,"6",160,110,3.9,2.62,16.46,0,1,4,4 3 | 21,"6",160,110,3.9,2.875,17.02,0,1,4,4 4 | 22.8,"4",108,93,3.85,2.32,18.61,1,1,4,1 5 | 21.4,"6",258,110,3.08,3.215,19.44,1,0,3,1 6 | 18.7,"8",360,175,3.15,3.44,17.02,0,0,3,2 7 | 18.1,"6",225,105,2.76,3.46,20.22,1,0,3,1 8 | 14.3,"8",360,245,3.21,3.57,15.84,0,0,3,4 9 | 24.4,"4",146.7,62,3.69,3.19,20,1,0,4,2 10 | 22.8,"4",140.8,95,3.92,3.15,22.9,1,0,4,2 11 | 19.2,"6",167.6,123,3.92,3.44,18.3,1,0,4,4 12 | 17.8,"6",167.6,123,3.92,3.44,18.9,1,0,4,4 13 | 16.4,"8",275.8,180,3.07,4.07,17.4,0,0,3,3 14 | 17.3,"8",275.8,180,3.07,3.73,17.6,0,0,3,3 15 | 15.2,"8",275.8,180,3.07,3.78,18,0,0,3,3 16 | 10.4,"8",472,205,2.93,5.25,17.98,0,0,3,4 17 | 10.4,"8",460,215,3,5.424,17.82,0,0,3,4 18 | 14.7,"8",440,230,3.23,5.345,17.42,0,0,3,4 19 | 32.4,"4",78.7,66,4.08,2.2,19.47,1,1,4,1 20 | 30.4,"4",75.7,52,4.93,1.615,18.52,1,1,4,2 21 | 33.9,"4",71.1,65,4.22,1.835,19.9,1,1,4,1 22 | 21.5,"4",120.1,97,3.7,2.465,20.01,1,0,3,1 23 | 15.5,"8",318,150,2.76,3.52,16.87,0,0,3,2 24 | 15.2,"8",304,150,3.15,3.435,17.3,0,0,3,2 25 | 13.3,"8",350,245,3.73,3.84,15.41,0,0,3,4 26 | 19.2,"8",400,175,3.08,3.845,17.05,0,0,3,2 27 | 27.3,"4",79,66,4.08,1.935,18.9,1,1,4,1 28 | 26,"4",120.3,91,4.43,2.14,16.7,0,1,5,2 29 | 30.4,"4",95.1,113,3.77,1.513,16.9,1,1,5,2 30 | 15.8,"8",351,264,4.22,3.17,14.5,0,1,5,4 31 | 19.7,"6",145,175,3.62,2.77,15.5,0,1,5,6 32 | 15,"8",301,335,3.54,3.57,14.6,0,1,5,8 33 | 21.4,"4",121,109,4.11,2.78,18.6,1,1,4,2 34 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Data Science Portal 2 | 3 | A portal to RStudio and Jupyter with ability to host Shiny and Dash apps. 4 | 5 | ## Background 6 | 7 | This uses [shinyproxy](https://www.shinyproxy.io/) under the hood to manage login and etc. Thi assumes you'll be running on a linux system. 8 | 9 | ## Getting started 10 | 11 | 1. Make sure `docker` and `docker-compose` are installed. 12 | 2. Clone this repo 13 | 3. Build docker images for example apps 14 | 15 | ``` sh 16 | docker build -t example_shiny ./shiny 17 | docker build -t example_dash ./dash 18 | docker build -t rstudio ./rstudio 19 | docker build -t jupyter ./jupyter 20 | ``` 21 | 22 | 4. Create a docker user/group for managing mounted home dirs in rstudio and jupyter. [more info](https://blog.stefanproell.at/2018/08/08/jupyter-docker-stacks-with-a-custom-user/) 23 | 24 | ``` sh 25 | groupadd -g 1011 docker_worker 26 | useradd -s /bin/false -u 1010 -g 1020 docker_worker 27 | ``` 28 | 29 | Give the users home directories and pass ownership to the `docker_worker` 30 | 31 | ``` 32 | # these are my cats - they are data scientists too... 33 | sudo mkdir /home/users/mau 34 | sudo mkdir /home/users/mau 35 | sudo chown -R docker_worker:docker_worker /home/users 36 | ``` 37 | 38 | 5. Add users and passwords for these users in `shinyproxy/application.yml` 39 | 40 | 4. Run `docker-compose up` to start the data science portal 41 | 42 | ``` sh 43 | docker compose up 44 | ``` 45 | 46 | ## Some Notes 47 | 48 | To get the terminal working in RStudio: `Tools` > `Global Options` > `Terminal` and uncheck `Connect with Websockets` 49 | 50 | -------------------------------------------------------------------------------- /rstudio/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rocker/ropensci 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y \ 5 | libopenmpi-dev \ 6 | libzmq3-dev 7 | 8 | RUN Rscript -e "install.packages(c('littler', 'docopt'), repo = 'https://cloud.r-project.org', lib='/usr/local/lib/R/site-library')" 9 | 10 | ## Legacy (snow is deprecated) 11 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error snow doSNOW 12 | 13 | ## MPI 14 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error Rmpi 15 | 16 | ## Random Number Generation (RNG) 17 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error rlecuyer 18 | 19 | ## The foreach ecosystem 20 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error foreach iterators 21 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error doParallel doMC doRNG 22 | 23 | ## The future ecosystem 24 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error future future.apply doFuture future.callr furrr 25 | 26 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error BatchJobs future.BatchJobs ## heavy set of dependencies 27 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error batchtools future.batchtools ## heavy set of dependencies 28 | RUN /usr/local/lib/R/site-library/littler/examples/install2.r --error clustermq ## heavy set of dependencies 29 | 30 | RUN echo "www-frame-origin=same" >> /etc/rstudio/disable_auth_rserver.conf 31 | 32 | EXPOSE 8787 33 | 34 | ARG NB_USER="rstudio" 35 | ARG NB_UID="1000" 36 | ARG NB_GID="100" 37 | ENV PATH=$PATH:/usr/local/bin/:/usr/lib/rstudio-server/bin/ 38 | 39 | USER root 40 | COPY start.sh /usr/local/bin/ 41 | RUN chmod +x /usr/local/bin/start.sh 42 | 43 | CMD ["/usr/local/bin/start.sh", "/usr/lib/rstudio-server/bin/rserver", "--server-daemonize=0", "--auth-none=1", "--auth-validate-users=0", "--www-frame-origin=same"] 44 | -------------------------------------------------------------------------------- /shiny/app.R: -------------------------------------------------------------------------------- 1 | library(shiny) 2 | 3 | # load our data 4 | data(mtcars) 5 | 6 | # make cyl a factor 7 | mtcars$cyl <- as.factor(mtcars$cyl) 8 | 9 | # run our regression 10 | fit <- lm(mpg ~ cyl + disp + qsec + am, data = mtcars) 11 | 12 | preds <- function(fit, disp, qsec, cyl, am){ 13 | # get the predicted MPG from new data 14 | mpg <- predict(object=fit, 15 | newdata = data.frame( 16 | cyl=factor(cyl, levels=c('4', '6', '8')), 17 | disp=disp, 18 | qsec=qsec, 19 | am=am)) 20 | 21 | # return as character string that can be easily rendered 22 | return(as.character(round(mpg, 2))) 23 | } 24 | 25 | app <- shinyApp(ui = fluidPage(title = 'Predicting MPG', 26 | # create inputs for each variable in the model 27 | sliderInput('disp', label = 'Displacement (in cubic inches)', 28 | min = floor(min(mtcars$disp)), 29 | max = ceiling(max(mtcars$disp)), 30 | value = floor(mean(mtcars$disp))), 31 | 32 | 33 | sliderInput('qsec', label='Quarter mile time', 34 | min = floor(min(mtcars$qsec)), 35 | max = ceiling(max(mtcars$qsec)), 36 | value = floor(mean(mtcars$qsec))), 37 | 38 | # this will return a character vector of length 1 39 | # that will get converted into a factor 40 | radioButtons('cyl', label='Number of cylinders', 41 | choices = levels(mtcars$cyl), 42 | inline=TRUE), 43 | 44 | # am is binary, 1/0, so we can coerse logical to integer 45 | checkboxInput('am', label='Has manual transmission'), 46 | 47 | # return our estimate 48 | h3("Predicted MPG: ", textOutput('prediction'))), 49 | 50 | 51 | server = function(input, output){ 52 | # pass our inputs to our prediction function defined earlier 53 | # and pass that result to the output 54 | output$prediction <- renderText({ 55 | preds(fit= fit, 56 | disp = input$disp, 57 | qsec = input$qsec, 58 | cyl = input$cyl, 59 | am = as.integer(input$am)) 60 | }) 61 | }) 62 | 63 | # and run it 64 | # runApp(app) 65 | 66 | 67 | -------------------------------------------------------------------------------- /application.yml: -------------------------------------------------------------------------------- 1 | proxy: 2 | title: R. King Data Consulting 3 | landing-page: / 4 | heartbeat-rate: 10000 5 | heartbeat-timeout: 60000 6 | port: 8080 7 | authentication: simple 8 | admin-groups: admin 9 | 10 | users: 11 | - name: roz 12 | password: password 13 | groups: admin 14 | - name: mew 15 | password: password 16 | groups: genusers 17 | 18 | 19 | docker: 20 | internal-networking: true 21 | # container-protocol: https 22 | 23 | specs: 24 | 25 | - id: rstudio 26 | display-name: RStudio Server 27 | description: An Instance of RStudio Server 28 | container-network: sp-net 29 | container-cmd: ["/usr/local/bin/start.sh", "/usr/lib/rstudio-server/bin/rserver", "--server-daemonize=0", "--auth-none=1","--auth-minimum-user-id=0", "--auth-validate-users=0", "--www-frame-origin=same"] 30 | container-volumes: [ "/home/users/#{proxy.UserId}/:/home/#{proxy.UserId}" ] 31 | container-image: rstudio 32 | container-env: 33 | DISABLE_AUTH: true 34 | USER: root 35 | NB_USER: "#{proxy.UserId}" 36 | NB_UID: 1010 37 | NB_GID: 1020 38 | CHOWN_HOME: 'yes' 39 | CHOWN_HOME_OPTS: -R 40 | port: 8787 41 | 42 | 43 | - id: jupyterlab 44 | display-name: Jupyter Lab 45 | description: Jupyter data science environment 46 | container-network: sp-net 47 | container-cmd: ["start.sh", 48 | "jupyter", "lab", 49 | "--LabApp.token=''", 50 | "--LabApp.ip='0.0.0.0'", 51 | "--LabApp.allow_origin='same'"] 52 | # container-cmd: ["start.sh", 53 | # "jupyter", "notebook", 54 | # "--NotebookApp.token=''", 55 | # "--NotebookApp.ip='0.0.0.0'", 56 | # "--NotebookApp.allow_origin='same'"] 57 | container-volumes: [ "/home/users/#{proxy.UserId}:/home/#{proxy.UserId}" ] 58 | container-image: jupyter 59 | container-env: 60 | USER: root 61 | NB_USER: '#{proxy.UserId}' 62 | NB_UID: 1010 63 | NB_GID: 1020 64 | CHOWN_HOME: 'yes' 65 | CHOWN_HOME_OPTS: -R 66 | port: 8888 67 | 68 | 69 | - id: dash_example 70 | display-name: Dash mtcars 71 | description: Example Dash App 72 | container-cmd: ["gunicorn","-b", "0.0.0.0:8050", "app:server"] 73 | container-image: example_dash 74 | container-network: sp-net 75 | port: 8050 76 | 77 | 78 | - id: shiny_example 79 | display-name: Shiny mtcars 80 | description: Example Shiny App 81 | container-cmd: ["R", "-e", "shiny::runApp('app.R', host='0.0.0.0', port=3838)"] 82 | container-image: example_shiny 83 | container-network: sp-net 84 | port: 3838 85 | 86 | logging: 87 | file: 88 | shinyproxy.log 89 | level: 90 | io.undertow: WARN 91 | org.springframework.web.servlet: WARN 92 | com.spotify.docker: DEBUG 93 | eu.openanalytics: WARN 94 | root: WARN 95 | -------------------------------------------------------------------------------- /dash/app/app.py: -------------------------------------------------------------------------------- 1 | import dash 2 | import dash_core_components as dcc 3 | import dash_html_components as html 4 | import dash_daq as daq 5 | 6 | import flask 7 | 8 | import pandas as pd 9 | import numpy as np 10 | 11 | from sklearn.linear_model import LinearRegression 12 | from sklearn.preprocessing import OneHotEncoder 13 | 14 | # load our data 15 | mtcars = pd.read_csv('mtcars.csv', 16 | dtype={'cyl': str, 17 | 'am': np.float64}) 18 | 19 | # create and fit a one-hot encoder--we'll want to reuse this in the app as well 20 | cyl_enc = OneHotEncoder(categories = 'auto', sparse=False) 21 | cyl_enc.fit(mtcars['cyl'].values.reshape(-1,1)) 22 | 23 | y = mtcars['mpg'] 24 | # we need to concatenate the one-hot (dummy) encoded values with 25 | # the values from mtcars 26 | X = np.concatenate( 27 | (mtcars[['disp', 'qsec', 'am']].values, 28 | cyl_enc.transform(mtcars['cyl'].values.reshape(-1,1))), 29 | axis=1) 30 | 31 | # fit our regression model 32 | fit = LinearRegression() 33 | fit.fit(X=X, y=y) 34 | 35 | def preds(fit, cyl_enc, disp, qsec, am, cyl): 36 | # construct our matrix 37 | X = np.concatenate( 38 | (np.array([[disp, qsec, am]]), 39 | cyl_enc.transform([[cyl]])), 40 | axis=1) 41 | # find predicted value 42 | pred = fit.predict(X)[0] 43 | # return a rounded string for nice UI display 44 | return str(round(pred, 2)) 45 | 46 | 47 | # load the resuired modules 48 | 49 | 50 | # create an instance of a dash app 51 | server = flask.Flask(__name__) 52 | app = dash.Dash(__name__, server=server) 53 | app.title = 'Predicting MPG' 54 | 55 | # dash apps are unstyled by default 56 | # this css I'm using was created by the author of Dash 57 | # and is the most commonly used style sheet 58 | app.css.append_css({ 59 | "external_url": "https://codepen.io/chriddyp/pen/bWLwgP.css" 60 | }) 61 | 62 | # I compute these up front to avoid having to 63 | # calculate thes twice 64 | unq_cyl = mtcars['cyl'].unique() 65 | unq_cyl.sort() # so it's in a nice order 66 | opts_cyl = [{'label': i, 'value': i} for i in unq_cyl] 67 | 68 | 69 | app.layout = html.Div([ 70 | 71 | html.H5('Displacement (in cubic inches):'), 72 | html.Br(), html.Br(), 73 | daq.Slider( 74 | id='input-disp', 75 | min=np.floor(mtcars['disp'].min()), 76 | max=np.ceil(mtcars['disp'].max()), 77 | step=.5, 78 | dots=False, 79 | handleLabel={"showCurrentValue": True,"label": "Value"}, 80 | value=np.floor(mtcars['disp'].mean())), 81 | 82 | html.H5('Quarter mile time:'), 83 | html.Br(), 84 | daq.Slider( 85 | id='input-qsec', 86 | min=np.floor(mtcars['qsec'].min()), 87 | max=np.ceil(mtcars['qsec'].max()), 88 | dots=False, 89 | handleLabel={"showCurrentValue": True,"label": "Value"}, 90 | step=.25, 91 | value=np.floor(mtcars['disp'].mean())), 92 | 93 | html.H5('Number of cylinders:'), 94 | dcc.RadioItems( 95 | id='input-cyl', 96 | options=opts_cyl, 97 | value=opts_cyl[0].get('value'), 98 | labelStyle={'display': 'inline-block'}), 99 | 100 | daq.ToggleSwitch( 101 | id='input-am', 102 | label='Has manual transmission', 103 | value=False), 104 | 105 | html.H2(id='output-prediction') 106 | ]) 107 | 108 | # callback will watch for changes in inputs and re-execute when any 109 | # changes are detected. 110 | @app.callback( 111 | dash.dependencies.Output('output-prediction', 'children'), 112 | [ 113 | dash.dependencies.Input('input-disp', 'value'), 114 | dash.dependencies.Input('input-qsec', 'value'), 115 | dash.dependencies.Input('input-cyl', 'value'), 116 | dash.dependencies.Input('input-am', 'value')]) 117 | def callback_pred(disp, qsec, cyl, am): 118 | # pass values from the function on to our prediction function 119 | # defined in setup 120 | pred = preds(fit=fit, 121 | cyl_enc=cyl_enc, 122 | disp=disp, 123 | qsec=qsec, 124 | am=np.float64(am), 125 | cyl=cyl) 126 | # return a string that will be rendered in the UI 127 | return "Predicted MPG: {}".format(pred) 128 | 129 | app.config.supress_callback_exceptions = True 130 | app.config.update({ 131 | # as the proxy server will remove the prefix 132 | 'routes_pathname_prefix': '' 133 | 134 | # the front-end will prefix this string to the requests 135 | # that are made to the proxy server 136 | , 'requests_pathname_prefix': '' 137 | }) 138 | 139 | # for running the app 140 | if __name__ == '__main__': 141 | app.run_server(debug=False, host='0.0.0.0') 142 | -------------------------------------------------------------------------------- /rstudio/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright (c) Jupyter Development Team. 3 | # Distributed under the terms of the Modified BSD License. 4 | 5 | set -e 6 | 7 | # Exec the specified command or fall back on bash 8 | if [ $# -eq 0 ]; then 9 | cmd=( "bash" ) 10 | else 11 | cmd=( "$@" ) 12 | fi 13 | 14 | run-hooks () { 15 | # Source scripts or run executable files in a directory 16 | if [[ ! -d "$1" ]] ; then 17 | return 18 | fi 19 | echo "$0: running hooks in $1" 20 | for f in "$1/"*; do 21 | case "$f" in 22 | *.sh) 23 | echo "$0: running $f" 24 | source "$f" 25 | ;; 26 | *) 27 | if [[ -x "$f" ]] ; then 28 | echo "$0: running $f" 29 | "$f" 30 | else 31 | echo "$0: ignoring $f" 32 | fi 33 | ;; 34 | esac 35 | done 36 | echo "$0: done running hooks in $1" 37 | } 38 | 39 | #run-hooks /usr/local/bin/start-notebook.d 40 | 41 | # Handle special flags if we're root 42 | if [ $(id -u) == 0 ] ; then 43 | 44 | # Only attempt to change the rstudio username if it exists 45 | if id rstudio &> /dev/null ; then 46 | echo "Set username to: $NB_USER" 47 | usermod -d /home/$NB_USER -l $NB_USER rstudio 48 | fi 49 | 50 | # Handle case where provisioned storage does not have the correct permissions by default 51 | # Ex: default NFS/EFS (no auto-uid/gid) 52 | if [[ "$CHOWN_HOME" == "1" || "$CHOWN_HOME" == 'yes' ]]; then 53 | echo "Changing ownership of /home/$NB_USER to $NB_UID:$NB_GID with options '${CHOWN_HOME_OPTS}'" 54 | chown $CHOWN_HOME_OPTS $NB_UID:$NB_GID /home/$NB_USER 55 | fi 56 | if [ ! -z "$CHOWN_EXTRA" ]; then 57 | for extra_dir in $(echo $CHOWN_EXTRA | tr ',' ' '); do 58 | echo "Changing ownership of ${extra_dir} to $NB_UID:$NB_GID with options '${CHOWN_EXTRA_OPTS}'" 59 | chown $CHOWN_EXTRA_OPTS $NB_UID:$NB_GID $extra_dir 60 | done 61 | fi 62 | 63 | # handle home and working directory if the username changed 64 | if [[ "$NB_USER" != "rstudio" ]]; then 65 | # changing username, make sure homedir exists 66 | # (it could be mounted, and we shouldn't create it if it already exists) 67 | if [[ ! -e "/home/$NB_USER" ]]; then 68 | echo "Relocating home dir to /home/$NB_USER" 69 | mv /home/rstudio "/home/$NB_USER" 70 | fi 71 | # if workdir is in /home/rstudio, cd to /home/$NB_USER 72 | if [[ "$PWD/" == "/home/rstudio/"* ]]; then 73 | newcwd="/home/$NB_USER/${PWD:13}" 74 | echo "Setting CWD to $newcwd" 75 | cd "$newcwd" 76 | fi 77 | fi 78 | 79 | # Change UID of NB_USER to NB_UID if it does not match 80 | if [ "$NB_UID" != $(id -u $NB_USER) ] ; then 81 | echo "Set $NB_USER UID to: $NB_UID" 82 | usermod -u $NB_UID $NB_USER 83 | fi 84 | 85 | # Set NB_USER primary gid to NB_GID (after making the group). Set 86 | # supplementary gids to NB_GID and 100. 87 | if [ "$NB_GID" != $(id -g $NB_USER) ] ; then 88 | echo "Add $NB_USER to group: $NB_GID" 89 | groupadd -g $NB_GID -o ${NB_GROUP:-${NB_USER}} 90 | usermod -g $NB_GID -aG 100 $NB_USER 91 | fi 92 | 93 | # Enable sudo if requested 94 | if [[ "$GRANT_SUDO" == "1" || "$GRANT_SUDO" == 'yes' ]]; then 95 | echo "Granting $NB_USER sudo access and appending $CONDA_DIR/bin to sudo PATH" 96 | echo "$NB_USER ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/notebook 97 | fi 98 | 99 | # Add $CONDA_DIR/bin to sudo secure_path 100 | sed -r "s#Defaults\s+secure_path=\"([^\"]+)\"#Defaults secure_path=\"\1:$CONDA_DIR/bin\"#" /etc/sudoers | grep secure_path > /etc/sudoers.d/path 101 | 102 | # Exec the command as NB_USER with the PATH and the rest of 103 | # the environment preserved 104 | #run-hooks /usr/local/bin/before-notebook.d 105 | echo "Executing the command: ${cmd[@]}" 106 | exec sudo -E -H -u $NB_USER PATH=$PATH XDG_CACHE_HOME=/home/$NB_USER/.cache PYTHONPATH=${PYTHONPATH:-} "${cmd[@]}" 107 | #exec sudo -E -H -u $NB_USER XDG_CACHE_HOME=/home/$NB_USER/.cache "${cmd[@]}" 108 | else 109 | if [[ "$NB_UID" == "$(id -u rstudio)" && "$NB_GID" == "$(id -g rstudio)" ]]; then 110 | # User is not attempting to override user/group via environment 111 | # variables, but they could still have overridden the uid/gid that 112 | # container runs as. Check that the user has an entry in the passwd 113 | # file and if not add an entry. 114 | STATUS=0 && whoami &> /dev/null || STATUS=$? && true 115 | if [[ "$STATUS" != "0" ]]; then 116 | if [[ -w /etc/passwd ]]; then 117 | echo "Adding passwd file entry for $(id -u)" 118 | cat /etc/passwd | sed -e "s/^rstudio:/nayvoj:/" > /tmp/passwd 119 | echo "rstudio:x:$(id -u):$(id -g):,,,:/home/rstudio:/bin/bash" >> /tmp/passwd 120 | cat /tmp/passwd > /etc/passwd 121 | rm /tmp/passwd 122 | else 123 | echo 'Container must be run with group "root" to update passwd file' 124 | fi 125 | fi 126 | 127 | # Warn if the user isn't going to be able to write files to $HOME. 128 | if [[ ! -w /home/rstudio ]]; then 129 | echo 'Container must be run with group "users" to update files' 130 | fi 131 | else 132 | # Warn if looks like user want to override uid/gid but hasn't 133 | # run the container as root. 134 | if [[ ! -z "$NB_UID" && "$NB_UID" != "$(id -u)" ]]; then 135 | echo 'Container must be run as root to set $NB_UID' 136 | fi 137 | if [[ ! -z "$NB_GID" && "$NB_GID" != "$(id -g)" ]]; then 138 | echo 'Container must be run as root to set $NB_GID' 139 | fi 140 | fi 141 | 142 | # Warn if looks like user want to run in sudo mode but hasn't run 143 | # the container as root. 144 | if [[ "$GRANT_SUDO" == "1" || "$GRANT_SUDO" == 'yes' ]]; then 145 | echo 'Container must be run as root to grant sudo permissions' 146 | fi 147 | 148 | # Execute the command 149 | #run-hooks /usr/local/bin/before-notebook.d 150 | echo "Executing the command: ${cmd[@]}" 151 | exec "${cmd[@]}" 152 | fi 153 | --------------------------------------------------------------------------------