├── .connect.yml ├── .github └── workflows │ └── deploy.yml ├── .gitignore ├── .python-version ├── LICENSE ├── README.md ├── dash-app ├── README.md ├── app.png ├── app.py ├── country_indicators.csv ├── manifest.json ├── requirements.txt └── styles.css ├── dash-bikeshare ├── README.md ├── app.py ├── assets │ └── simple.css ├── manifest.json └── requirements.txt ├── dash-stock-pricing ├── .internal.yml ├── README.md ├── app.py ├── dash-stock-pricing.png ├── manifest.json ├── prices.csv └── requirements.txt ├── fastapi-stock ├── .internal.yml ├── README.md ├── fastapi-stock.png ├── main.py ├── manifest.json ├── prices.csv └── requirements.txt ├── flask-getting-started-rsc ├── README.md ├── app.py ├── getting-started-flask.png ├── manifest.json ├── requirements.txt ├── static │ ├── api-snippet.png │ └── style.css └── templates │ └── index.html ├── flask-getting-started-sqlalchemy ├── README.md ├── app.py ├── manifest.json ├── requirements.txt ├── seed_db.py ├── static │ └── style.css ├── templates │ └── index.html └── users.db ├── flask-restx ├── README.md ├── app.py ├── manifest.json ├── model.p ├── mtcars.csv ├── requirements.txt └── train.py ├── flask-sentiment-analysis-api ├── README.md ├── app.py ├── manifest.json ├── model │ ├── meta.json │ ├── textcat │ │ ├── cfg │ │ └── model │ ├── tokenizer │ └── vocab │ │ ├── key2row │ │ ├── lexemes.bin │ │ ├── lookups.bin │ │ ├── strings.json │ │ └── vectors ├── requirements.txt └── train.py ├── flask-sentiment-analysis-app ├── README.md ├── app.py ├── manifest.json ├── model │ ├── meta.json │ ├── textcat │ │ ├── cfg │ │ └── model │ ├── tokenizer │ └── vocab │ │ ├── key2row │ │ ├── lexemes.bin │ │ ├── lookups.bin │ │ ├── strings.json │ │ └── vectors ├── requirements.txt ├── static │ ├── css │ │ ├── landing-page.css │ │ └── landing-page.min.css │ ├── img │ │ └── bg-masthead.jpg │ └── vendor │ │ ├── bootstrap │ │ ├── css │ │ │ ├── bootstrap-grid.css │ │ │ ├── bootstrap-grid.css.map │ │ │ ├── bootstrap-grid.min.css │ │ │ ├── bootstrap-grid.min.css.map │ │ │ ├── bootstrap-reboot.css │ │ │ ├── bootstrap-reboot.css.map │ │ │ ├── bootstrap-reboot.min.css │ │ │ ├── bootstrap-reboot.min.css.map │ │ │ ├── bootstrap.css │ │ │ ├── bootstrap.css.map │ │ │ ├── bootstrap.min.css │ │ │ └── bootstrap.min.css.map │ │ └── js │ │ │ ├── bootstrap.bundle.js │ │ │ ├── bootstrap.bundle.js.map │ │ │ ├── bootstrap.bundle.min.js │ │ │ ├── bootstrap.bundle.min.js.map │ │ │ ├── bootstrap.js │ │ │ ├── bootstrap.js.map │ │ │ ├── bootstrap.min.js │ │ │ └── bootstrap.min.js.map │ │ ├── fontawesome-free │ │ ├── css │ │ │ ├── all.css │ │ │ ├── all.min.css │ │ │ ├── brands.css │ │ │ ├── brands.min.css │ │ │ ├── fontawesome.css │ │ │ ├── fontawesome.min.css │ │ │ ├── regular.css │ │ │ ├── regular.min.css │ │ │ ├── solid.css │ │ │ ├── solid.min.css │ │ │ ├── svg-with-js.css │ │ │ ├── svg-with-js.min.css │ │ │ ├── v4-shims.css │ │ │ └── v4-shims.min.css │ │ └── webfonts │ │ │ ├── fa-brands-400.eot │ │ │ ├── fa-brands-400.svg │ │ │ ├── fa-brands-400.ttf │ │ │ ├── fa-brands-400.woff │ │ │ ├── fa-brands-400.woff2 │ │ │ ├── fa-regular-400.eot │ │ │ ├── fa-regular-400.svg │ │ │ ├── fa-regular-400.ttf │ │ │ ├── fa-regular-400.woff │ │ │ ├── fa-regular-400.woff2 │ │ │ ├── fa-solid-900.eot │ │ │ ├── fa-solid-900.svg │ │ │ ├── fa-solid-900.ttf │ │ │ ├── fa-solid-900.woff │ │ │ └── fa-solid-900.woff2 │ │ ├── jquery │ │ ├── jquery.js │ │ ├── jquery.min.js │ │ ├── jquery.min.map │ │ ├── jquery.slim.js │ │ ├── jquery.slim.min.js │ │ └── jquery.slim.min.map │ │ └── simple-line-icons │ │ ├── css │ │ └── simple-line-icons.css │ │ └── fonts │ │ ├── Simple-Line-Icons.eot │ │ ├── Simple-Line-Icons.svg │ │ ├── Simple-Line-Icons.ttf │ │ ├── Simple-Line-Icons.woff │ │ └── Simple-Line-Icons.woff2 ├── templates │ ├── app.html │ └── result.html └── train.py ├── jupyter-interactive-visualization ├── README.md ├── jupyter-interactive-visualization.ipynb ├── manifest.json └── requirements.txt ├── jupyter-voila ├── .internal.yml ├── README.md ├── bqplot.ipynb ├── hash.ipynb ├── ipyvolume.ipynb ├── manifest.json ├── requirements.txt └── voila.png ├── justfile ├── python-examples.Rproj ├── quarto-lightbox ├── .gitignore ├── .internal.yml ├── README.md ├── _extensions │ └── quarto-ext │ │ └── lightbox │ │ ├── _extension.yml │ │ ├── lightbox.css │ │ ├── lightbox.lua │ │ └── resources │ │ ├── css │ │ └── glightbox.min.css │ │ └── js │ │ └── glightbox.min.js ├── _publish.yml ├── _quarto.yml ├── img │ ├── Chengdu-pandas-d10.jpg │ ├── Lion_waiting_in_Namibia.jpg │ ├── Panthera_tigris_corbetti_(Tierpark_Berlin)_832-714-(118).jpg │ └── The_Wizard_of_Oz_1955_Lobby_Card.jpg ├── manifest.json ├── quarto-lightbox.png ├── quarto-python-lightbox.qmd └── requirements.txt ├── reticulated-image-classifier ├── .internal.yml ├── DESCRIPTION ├── README.md ├── app.R ├── config.yml ├── image-classifier.Rproj ├── image-classifier.py ├── img │ ├── cat.jpg │ ├── dog.jpg │ ├── flower.jpg │ ├── oil_platform.jpg │ └── truck.jpg ├── labels.json ├── manifest.json ├── model │ └── hub │ │ └── checkpoints │ │ └── squeezenet1_1-b8a52dc0.pth ├── pytorch-logo.png ├── renv.lock ├── renv │ ├── .gitignore │ ├── activate.R │ └── settings.dcf ├── requirements.txt └── reticulated-image-classifier.png ├── reticulated-rmarkdown-notebook ├── .gitignore ├── README.md ├── manifest.json ├── renv.lock ├── renv │ ├── .gitignore │ ├── activate.R │ └── settings.dcf ├── requirements.txt ├── rmarkdown-notebook.Rmd └── rmarkdown-notebook.Rproj ├── reticulated-sentiment-analysis-api ├── README.md ├── manifest.json ├── model │ ├── meta.json │ ├── textcat │ │ ├── cfg │ │ └── model │ ├── tokenizer │ └── vocab │ │ ├── key2row │ │ ├── lexemes.bin │ │ ├── lookups.bin │ │ ├── strings.json │ │ └── vectors ├── plumber.R ├── predict.py ├── renv.lock ├── renv │ ├── .gitignore │ ├── activate.R │ └── settings.dcf ├── requirements.txt ├── sentiment-analysis.Rproj ├── spacy_logo.jpg └── train.Rmd ├── reticulated-sentiment-analysis-app ├── README.md ├── app.R ├── manifest.json ├── model │ ├── meta.json │ ├── textcat │ │ ├── cfg │ │ └── model │ ├── tokenizer │ └── vocab │ │ ├── key2row │ │ ├── lexemes.bin │ │ ├── lookups.bin │ │ ├── strings.json │ │ └── vectors ├── namesgenerator.py ├── predict.py ├── renv.lock ├── renv │ ├── .gitignore │ ├── activate.R │ └── settings.dcf ├── requirements.txt ├── sentiment-analysis-app.Rproj └── train.Rmd ├── reticulated_python.png ├── shiny-income-share ├── .internal.yml ├── README.md ├── app.py ├── data.csv ├── manifest.json ├── requirements.txt └── shiny-income-share.png └── streamlit-income-share ├── .internal.yml ├── README.md ├── app.py ├── data.csv ├── manifest.json ├── requirements.txt └── streamlit-income-share.png /.connect.yml: -------------------------------------------------------------------------------- 1 | default: 2 | content: 3 | - title: "Use Python with R" 4 | path: "./rmarkdown-notebook" 5 | description: "Use the reticulate package to integrate Python into an R Markdown notebook." 6 | tag: 7 | - "Demo Content|Python" 8 | url: "/python/reticulate/" 9 | image: "reticulated_python.png" 10 | - title: "Sentiment Analysis with Python" 11 | path: "./sentiment-analysis" 12 | description: "A Plumber API that uses R and Python to evaluate sentiment in text input using a pretrained spaCy model." 13 | tag: 14 | - "Demo Content|Python" 15 | url: "/python/sentiment-analysis/" 16 | image: "sentiment-analysis/spacy_logo.jpg" 17 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: deploy 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | workflow_dispatch: 8 | 9 | jobs: 10 | deploy: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout 14 | uses: actions/checkout@v4 15 | 16 | - name: deploy 17 | uses: rstudio/actions/connect-publish@main 18 | with: 19 | url: https://${{ secrets.CONNECT_API_KEY }}@colorado.posit.co/rsc 20 | namespace: python-examples 21 | dir: | 22 | ./dash-app/ 23 | ./dash-bikeshare/ 24 | ./dash-stock-pricing/ 25 | ./fastapi-stock/ 26 | ./flask-getting-started-rsc/ 27 | ./flask-getting-started-sqlalchemy/ 28 | ./flask-restx/ 29 | ./flask-sentiment-analysis-api/ 30 | ./flask-sentiment-analysis-app/ 31 | ./jupyter-interactive-visualization/ 32 | ./jupyter-voila/ 33 | ./quarto-lightbox/ 34 | ./reticulated-image-classifier/ 35 | ./reticulated-rmarkdown-notebook/ 36 | ./reticulated-sentiment-analysis-api/ 37 | ./reticulated-sentiment-analysis-app/ 38 | ./shiny-income-share/ 39 | ./streamlit-income-share/ 40 | require-vanity-path: true 41 | access-type: all 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # History files 2 | .Rhistory 3 | .Rapp.history 4 | 5 | # Session Data files 6 | .RData 7 | 8 | # Example code in package build process 9 | *-Ex.R 10 | 11 | # Output files from R CMD build 12 | /*.tar.gz 13 | 14 | # Output files from R CMD check 15 | /*.Rcheck/ 16 | 17 | # RStudio files 18 | .Rproj.user/ 19 | 20 | # produced vignettes 21 | vignettes/*.html 22 | vignettes/*.pdf 23 | 24 | # OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3 25 | .httr-oauth 26 | 27 | # knitr and R markdown default cache directories 28 | /*_cache/ 29 | /cache/ 30 | 31 | # Temporary files created by R markdown 32 | *.utf8.md 33 | *.knit.md 34 | 35 | # Shiny token, see https://shiny.rstudio.com/articles/shinyapps.html 36 | rsconnect/ 37 | 38 | # Deployment details from rsconnect-python 39 | rsconnect-python/ 40 | 41 | # Temporary files 42 | .DS_Store 43 | __pycache__ 44 | .ipynb_checkpoints 45 | 46 | rmarkdown-notebook/flights.csv 47 | 48 | .venv 49 | venv 50 | .env 51 | .Rprofile 52 | 53 | /.luarc.json 54 | -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 3.10.0 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Posit Connect & Python 2 | 3 | Posit Connect is a publishing platform for the work your team creates in R and Python. 4 | This repository contains examples of Python content you can deploy to Connect, including: 5 | 6 | ## Interactive apps 7 | 8 | - [Streamlit](./streamlit-income-share/README.md) 9 | - [Dash](dash-app/README.md) 10 | - [Flask](flask-sentiment-analysis-app/README.md) 11 | - [Voila](./jupyter-voila/README.md) 12 | 13 | ### Web APIs 14 | 15 | - [Flask](./flask-sentiment-analysis-api/README.md) 16 | - [FastAPI](./fastapi-stock/README.md) 17 | 18 | ## Documents 19 | 20 | - [Jupyter Notebooks](./jupyter-interactive-visualization/README.md) 21 | - [Quarto Documents](./quarto-lightbox/README.md) 22 | 23 | ## Reticulate 24 | 25 | 26 | 27 | 28 | 29 | Reticulate allows you to call Python from within an R session. 30 | This enables you to use models built in Python to power Shiny apps, visualize pandas dataframes with ggplot2, and much more. 31 | 32 | ### Interactive apps 33 | 34 | - [Serving Sentiment Analysis with Plumber and spaCy](./reticulatd-sentiment-analysis-api/README.md) 35 | - [Image Classification with PyTorch and Shiny](./reticulated-image-classifier/README.md) 36 | 37 | ### Documents 38 | 39 | - [Visualizing pandas dataframes with ggplot2](./reticulated-rmarkdown-notebook/README.md) 40 | 41 | ## Getting Started 42 | 43 | You can deploy examples from this repo to your Connect server [via git-backed deployment](https://docs.posit.co/connect/user/git-backed/), or clone the repository and deploy examples from their manifests with the [`rsconnect` CLI](https://docs.posit.co/rsconnect-python/). 44 | 45 | If you want to explore an example more closely before deploying it: 46 | 47 | * Clone this repository 48 | * create a virtual environment in the folder you want to work in 49 | * restore the needed packages into the virtual environment 50 | 51 | ```bash 52 | $ cd flask-sentiment-analysis-api 53 | $ python -m venv .venv 54 | $ source .venv/bin/activate 55 | $ python -m pip install -U pip setuptools wheel 56 | $ python -m pip install -r requirements.txt 57 | ``` 58 | 59 | For reticulated content, set the `RETICULATE_PYTHON` environment variable to point to your virtual environment, by placing an `.Renviron` file in the folder containing the following: 60 | 61 | ``` 62 | RETICULATE_PYTHON=.venv/bin/python 63 | ``` 64 | 65 | ## Publishing basics 66 | 67 | Overview: 68 | 69 | * Create and activate a virtual environment 70 | * Run the examples locally 71 | * Acquire an [API key](https://docs.posit.co/connect/user/api-keys/) 72 | * Publish the examples with the [rsconnect cli](https://github.com/rstudio/rsconnect-python) 73 | * Save the environment and deployment details for future git-backed publishing 74 | 75 | ``` 76 | rsconnect add \ 77 | --api-key \ 78 | --server \ 79 | --name 80 | ``` 81 | 82 | ``` 83 | rsconnect deploy api . -n 84 | ``` 85 | -------------------------------------------------------------------------------- /dash-app/README.md: -------------------------------------------------------------------------------- 1 | # Data Visualization with Dash 2 | 3 | This example is from Plotly's [Dash tutorial](https://dash.plot.ly/getting-started-part-2). 4 | 5 | ![example app image](app.png) 6 | 7 | ## Deploy 8 | 9 | ``` 10 | rsconnect deploy dash . -n 11 | ``` 12 | 13 | ## Resources 14 | 15 | [Posit Connect User Guide - Dash](https://docs.posit.co/connect/user/dash/) -------------------------------------------------------------------------------- /dash-app/app.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/dash-app/app.png -------------------------------------------------------------------------------- /dash-app/app.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | load_dotenv() 3 | import dash 4 | import pandas as pd 5 | from dash import dcc, html 6 | from dash.dependencies import Input, Output 7 | 8 | external_stylesheets = ["styles.css"] 9 | 10 | app = dash.Dash(__name__, external_stylesheets=external_stylesheets) 11 | 12 | df = pd.read_csv("country_indicators.csv") 13 | 14 | available_indicators = df["Indicator Name"].unique() 15 | 16 | app.layout = html.Div( 17 | [ 18 | html.Div( 19 | [ 20 | html.Div( 21 | [ 22 | dcc.Dropdown( 23 | id="xaxis-column", 24 | options=[ 25 | {"label": i, "value": i} for i in available_indicators 26 | ], 27 | value="Fertility rate, total (births per woman)", 28 | ), 29 | dcc.RadioItems( 30 | id="xaxis-type", 31 | options=[ 32 | {"label": i, "value": i} for i in ["Linear", "Log"] 33 | ], 34 | value="Linear", 35 | labelStyle={"display": "inline-block"}, 36 | ), 37 | ], 38 | style={"width": "48%", "display": "inline-block"}, 39 | ), 40 | html.Div( 41 | [ 42 | dcc.Dropdown( 43 | id="yaxis-column", 44 | options=[ 45 | {"label": i, "value": i} for i in available_indicators 46 | ], 47 | value="Life expectancy at birth, total (years)", 48 | ), 49 | dcc.RadioItems( 50 | id="yaxis-type", 51 | options=[ 52 | {"label": i, "value": i} for i in ["Linear", "Log"] 53 | ], 54 | value="Linear", 55 | labelStyle={"display": "inline-block"}, 56 | ), 57 | ], 58 | style={"width": "48%", "float": "right", "display": "inline-block"}, 59 | ), 60 | ] 61 | ), 62 | dcc.Graph(id="indicator-graphic"), 63 | dcc.Slider( 64 | id="year--slider", 65 | min=df["Year"].min(), 66 | max=df["Year"].max(), 67 | value=df["Year"].max(), 68 | marks={str(year): str(year) for year in df["Year"].unique()}, 69 | step=None, 70 | ), 71 | ] 72 | ) 73 | 74 | 75 | @app.callback( 76 | Output("indicator-graphic", "figure"), 77 | [ 78 | Input("xaxis-column", "value"), 79 | Input("yaxis-column", "value"), 80 | Input("xaxis-type", "value"), 81 | Input("yaxis-type", "value"), 82 | Input("year--slider", "value"), 83 | ], 84 | ) 85 | def update_graph( 86 | xaxis_column_name, yaxis_column_name, xaxis_type, yaxis_type, year_value 87 | ): 88 | dff = df[df["Year"] == year_value] 89 | 90 | return { 91 | "data": [ 92 | dict( 93 | x=dff[dff["Indicator Name"] == xaxis_column_name]["Value"], 94 | y=dff[dff["Indicator Name"] == yaxis_column_name]["Value"], 95 | text=dff[dff["Indicator Name"] == yaxis_column_name]["Country Name"], 96 | mode="markers", 97 | marker={ 98 | "size": 15, 99 | "opacity": 0.5, 100 | "line": {"width": 0.5, "color": "white"}, 101 | }, 102 | ) 103 | ], 104 | "layout": dict( 105 | xaxis={ 106 | "title": xaxis_column_name, 107 | "type": "linear" if xaxis_type == "Linear" else "log", 108 | }, 109 | yaxis={ 110 | "title": yaxis_column_name, 111 | "type": "linear" if yaxis_type == "Linear" else "log", 112 | }, 113 | margin={"l": 40, "b": 40, "t": 10, "r": 0}, 114 | hovermode="closest", 115 | ), 116 | } 117 | 118 | 119 | if __name__ == "__main__": 120 | app.run_server(debug=True) 121 | -------------------------------------------------------------------------------- /dash-app/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-dash", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.10.0", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "714eff35716ba552e0dde4a13cd8ceb6" 19 | }, 20 | "app.png": { 21 | "checksum": "d7dc349dbfd3e2dd7be1d7f77b2f3309" 22 | }, 23 | "app.py": { 24 | "checksum": "b583e22cb3e01bce8e7034526128cd9d" 25 | }, 26 | "country_indicators.csv": { 27 | "checksum": "aa6e4392aeef18d537d9fcbd8d68e3ad" 28 | }, 29 | "styles.css": { 30 | "checksum": "a4e86ec9273e07fdb85cf5dd6c6fed1e" 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /dash-app/requirements.txt: -------------------------------------------------------------------------------- 1 | click==8.1.3 2 | dash==2.9.1 3 | dash-core-components==2.0.0 4 | dash-html-components==2.0.0 5 | dash-table==5.0.0 6 | Flask==2.2.5 7 | itsdangerous==2.1.2 8 | Jinja2==3.1.2 9 | MarkupSafe==2.1.2 10 | numpy==1.24.2 11 | pandas==1.5.3 12 | plotly==5.13.1 13 | python-dateutil==2.8.2 14 | pytz==2022.7.1 15 | six==1.16.0 16 | tenacity==8.2.2 17 | Werkzeug==3.0.1 18 | python-dotenv 19 | -------------------------------------------------------------------------------- /dash-bikeshare/README.md: -------------------------------------------------------------------------------- 1 | # Visualizing Model Predictions with Dash 2 | 3 | A Dash app that calls a Plumber API for predictions on available bikes for the [Capital Bikeshare](https://github.com/sol-eng/bike_predict). 4 | 5 | ## Deploy 6 | 7 | ``` 8 | rsconnect deploy dash . -n 9 | ``` 10 | ## Resources 11 | 12 | [Posit Connect User Guide - Dash](https://docs.posit.co/connect/user/dash/) -------------------------------------------------------------------------------- /dash-bikeshare/app.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | load_dotenv() 3 | import dash 4 | import os 5 | import pandas as pd 6 | import plotly.express as px 7 | import requests as req 8 | 9 | from dash import dcc, html 10 | from dash.dependencies import Input, Output 11 | 12 | app = dash.Dash(__name__) 13 | 14 | locations = pd.read_csv("https://colorado.rstudio.com/rsc/bike_station_info/data.csv") 15 | location_options = [ 16 | {"label": locations["name"][l], "value": locations["station_id"][l]} 17 | for l in locations.index 18 | ] 19 | mapbox = os.getenv("MAPBOX_API_KEY") 20 | 21 | app.layout = html.Div( 22 | [ 23 | html.Div( 24 | [ 25 | html.Div( 26 | [ 27 | html.Img( 28 | src="https://d33wubrfki0l68.cloudfront.net/1ac3f0e3753f18c7e2a8893957d1841fba1e3d08/48a33/wp-content/uploads/2018/10/rstudio-logo-flat.png", 29 | style={"height": "60px", "width": "auto"}, 30 | ) 31 | ], 32 | className="one-third column", 33 | ), 34 | html.Div( 35 | [ 36 | html.Div( 37 | [ 38 | html.H3( 39 | "Capitol Bikeshare", 40 | style={"margin-bottom": "0px"}, 41 | ), 42 | html.H5( 43 | "Availability Forecast", style={"margin-top": "0px"} 44 | ), 45 | ] 46 | ) 47 | ], 48 | className="one-half column", 49 | id="title", 50 | ), 51 | html.Div( 52 | [ 53 | html.A( 54 | html.Button("View Code", id="learn-more-button"), 55 | href="https://github.com/sol-eng/python-examples", 56 | ) 57 | ], 58 | className="one-third column", 59 | id="button", 60 | ), 61 | ], 62 | id="header", 63 | className="row flex-display", 64 | style={"margin-bottom": "25px"}, 65 | ), 66 | html.Div( 67 | [ 68 | html.Div( 69 | [ 70 | html.Div( 71 | [ 72 | html.P("Select Location:"), 73 | dcc.Dropdown( 74 | id="location", options=location_options, value=1 75 | ), 76 | ], 77 | style={"margin-top": "10"}, 78 | ), 79 | ], 80 | className="row", 81 | ), 82 | html.Div( 83 | [ 84 | html.Div([dcc.Graph(id="bike-forecast")]), 85 | html.Div([dcc.Graph(id="bike-map")]), 86 | ] 87 | ), 88 | ], 89 | className="row", 90 | ), 91 | ] 92 | ) 93 | 94 | 95 | @app.callback(Output("bike-forecast", "figure"), [Input("location", "value")]) 96 | def update_forecast_graph(value): 97 | r = req.get( 98 | "https://colorado.rstudio.com/rsc/bike_predict_api/pred", 99 | params={"station_id": value}, 100 | ) 101 | prediction = pd.DataFrame.from_dict(r.json()) 102 | fig = px.line(prediction, x="times", y="pred") 103 | fig.update_layout( 104 | xaxis_title="Day and Time", 105 | yaxis_title="Predicted Num of Bikes Available", 106 | ) 107 | return fig 108 | 109 | 110 | @app.callback(Output("bike-map", "figure"), [Input("location", "value")]) 111 | def update_bike_graph(value): 112 | this_station = locations[locations["station_id"] == value] 113 | df = pd.DataFrame.from_dict( 114 | {"lat": this_station["lat"], "lon": this_station["lon"], "size": 10} 115 | ) 116 | px.set_mapbox_access_token(mapbox) 117 | fig = px.scatter_mapbox(df, lat="lat", lon="lon", size="size") 118 | fig.update_layout(mapbox_style="open-street-map", mapbox=dict(zoom=13)) 119 | return fig 120 | 121 | 122 | if __name__ == "__main__": 123 | app.run_server(debug=True) 124 | -------------------------------------------------------------------------------- /dash-bikeshare/assets/simple.css: -------------------------------------------------------------------------------- 1 | /* Borrowed heavily from https://github.com/plotly/dash-sample-apps/blob/master/apps/dash-oil-and-gas/assets/s1.css 2 | */ 3 | body { 4 | font-family: sans-serif; 5 | } 6 | 7 | #header { 8 | align-items: center; 9 | background-color: #f2f2f2; 10 | } 11 | 12 | #learn-more-button { 13 | text-align: center; 14 | height: 100%; 15 | padding: 0 20px; 16 | text-transform: none; 17 | font-size: 15px; 18 | float: right; 19 | margin-right: 10px; 20 | margin-top: 5px; 21 | } 22 | #title { 23 | text-align: center; 24 | color:#555; 25 | font-family: sans-serif; 26 | } 27 | .two.columns { 28 | width: 16.25%; 29 | } 30 | 31 | .column, .columns { 32 | margin-left: 0.5%; 33 | } 34 | .flex-display { 35 | display: flex; 36 | } 37 | 38 | .button, 39 | button, 40 | input[type="submit"], 41 | input[type="reset"], 42 | input[type="button"] { 43 | display: inline-block; 44 | height: 38px; 45 | padding: 0 30px; 46 | color: #555; 47 | text-align: center; 48 | font-size: 11px; 49 | font-weight: 600; 50 | line-height: 38px; 51 | letter-spacing: 0.1rem 52 | text-decoration: none; 53 | white-space: nowrap; 54 | background-color: transparent; 55 | border-radius: 4px; 56 | border: 1px solid #bbb; 57 | cursor: pointer; 58 | box-sizing: border-box; 59 | } 60 | .button:hover, 61 | button:hover, 62 | input[type="submit"]:hover, 63 | input[type="reset"]:hover, 64 | input[type="button"]:hover, 65 | .button:focus, 66 | button:focus, 67 | input[type="submit"]:focus, 68 | input[type="reset"]:focus, 69 | input[type="button"]:focus { 70 | color: #333; 71 | border-color: #888; 72 | outline: 0; 73 | } 74 | .button.button-primary, 75 | button.button-primary, 76 | input[type="submit"].button-primary, 77 | input[type="reset"].button-primary, 78 | input[type="button"].button-primary { 79 | color: #fff; 80 | background-color: #33c3f0; 81 | border-color: #33c3f0; 82 | } 83 | .button.button-primary:hover, 84 | button.button-primary:hover, 85 | input[type="submit"].button-primary:hover, 86 | input[type="reset"].button-primary:hover, 87 | input[type="button"].button-primary:hover, 88 | .button.button-primary:focus, 89 | button.button-primary:focus, 90 | input[type="submit"].button-primary:focus, 91 | input[type="reset"].button-primary:focus, 92 | input[type="button"].button-primary:focus { 93 | color: #fff; 94 | background-color: #1eaedb; 95 | border-color: #1eaedb; 96 | } 97 | 98 | /* Grid 99 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 100 | .container { 101 | position: relative; 102 | width: 100%; 103 | max-width: 960px; 104 | margin: 0 auto; 105 | padding: 0 20px; 106 | box-sizing: border-box; 107 | } 108 | .column, 109 | .columns { 110 | width: 100%; 111 | float: left; 112 | box-sizing: border-box; 113 | } 114 | 115 | /* For devices larger than 400px */ 116 | @media (min-width: 400px) { 117 | .container { 118 | width: 85%; 119 | padding: 0; 120 | } 121 | } 122 | 123 | /* For devices larger than 550px */ 124 | @media (min-width: 550px) { 125 | .container { 126 | width: 80%; 127 | } 128 | .column, 129 | .columns { 130 | margin-left: 4%; 131 | } 132 | 133 | .one.column, 134 | .one.columns { 135 | width: 4.66666666667%; 136 | } 137 | .two.columns { 138 | width: 13.3333333333%; 139 | } 140 | .three.columns { 141 | width: 22%; 142 | } 143 | .four.columns { 144 | width: 30.6666666667%; 145 | } 146 | .five.columns { 147 | width: 39.3333333333%; 148 | } 149 | .six.columns { 150 | width: 48%; 151 | } 152 | .seven.columns { 153 | width: 56.6666666667%; 154 | } 155 | .eight.columns { 156 | width: 65.3333333333%; 157 | } 158 | .nine.columns { 159 | width: 74%; 160 | } 161 | .ten.columns { 162 | width: 82.6666666667%; 163 | } 164 | .eleven.columns { 165 | width: 91.3333333333%; 166 | } 167 | .twelve.columns { 168 | width: 100%; 169 | margin-left: 0; 170 | } 171 | 172 | .one-third.column { 173 | width: 30.6666666667%; 174 | } 175 | .two-thirds.column { 176 | width: 65.3333333333%; 177 | } 178 | 179 | .one-half.column { 180 | width: 48%; 181 | } 182 | 183 | /* Offsets */ 184 | .offset-by-one.column, 185 | .offset-by-one.columns { 186 | margin-left: 8.66666666667%; 187 | } 188 | .offset-by-two.column, 189 | .offset-by-two.columns { 190 | margin-left: 17.3333333333%; 191 | } 192 | .offset-by-three.column, 193 | .offset-by-three.columns { 194 | margin-left: 26%; 195 | } 196 | .offset-by-four.column, 197 | .offset-by-four.columns { 198 | margin-left: 34.6666666667%; 199 | } 200 | .offset-by-five.column, 201 | .offset-by-five.columns { 202 | margin-left: 43.3333333333%; 203 | } 204 | .offset-by-six.column, 205 | .offset-by-six.columns { 206 | margin-left: 52%; 207 | } 208 | .offset-by-seven.column, 209 | .offset-by-seven.columns { 210 | margin-left: 60.6666666667%; 211 | } 212 | .offset-by-eight.column, 213 | .offset-by-eight.columns { 214 | margin-left: 69.3333333333%; 215 | } 216 | .offset-by-nine.column, 217 | .offset-by-nine.columns { 218 | margin-left: 78%; 219 | } 220 | .offset-by-ten.column, 221 | .offset-by-ten.columns { 222 | margin-left: 86.6666666667%; 223 | } 224 | .offset-by-eleven.column, 225 | .offset-by-eleven.columns { 226 | margin-left: 95.3333333333%; 227 | } 228 | 229 | .offset-by-one-third.column, 230 | .offset-by-one-third.columns { 231 | margin-left: 34.6666666667%; 232 | } 233 | .offset-by-two-thirds.column, 234 | .offset-by-two-thirds.columns { 235 | margin-left: 69.3333333333%; 236 | } 237 | 238 | .offset-by-one-half.column, 239 | .offset-by-one-half.columns { 240 | margin-left: 52%; 241 | } 242 | } 243 | 244 | .container-display { 245 | display: flex; 246 | } -------------------------------------------------------------------------------- /dash-bikeshare/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-dash", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.10.0", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "78b12cc7395472129cd5149d4fe9c59c" 19 | }, 20 | "app.py": { 21 | "checksum": "34fd547314532279ce0eeae8ed9f32e9" 22 | }, 23 | "assets/simple.css": { 24 | "checksum": "30dc5ae0bfabcc5cf69e64a271e3c8c6" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /dash-bikeshare/requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2023.7.22 2 | charset-normalizer==3.1.0 3 | click==8.1.3 4 | dash==2.9.1 5 | dash-core-components==2.0.0 6 | dash-html-components==2.0.0 7 | dash-table==5.0.0 8 | Flask==2.2.5 9 | idna==3.4 10 | itsdangerous==2.1.2 11 | Jinja2==3.1.2 12 | MarkupSafe==2.1.2 13 | numpy==1.24.2 14 | pandas==1.5.3 15 | plotly==5.13.1 16 | python-dateutil==2.8.2 17 | python-dotenv==1.0.0 18 | pytz==2022.7.1 19 | requests==2.31.0 20 | six==1.16.0 21 | tenacity==8.2.2 22 | urllib3==1.26.18 23 | Werkzeug==3.0.1 24 | -------------------------------------------------------------------------------- /dash-stock-pricing/.internal.yml: -------------------------------------------------------------------------------- 1 | default: 2 | content: 3 | - name: "Stock Pricing Dashboard with Dash" 4 | path: "." 5 | description: "A Dash application makes it easy to transform your analysis into an interactive dashboard using Python so users can ask and answer questions in real-time, without having to touch any code.h" 6 | tag: 7 | - "Examples|Python|Dash" 8 | url: "/python-examples/dash/" 9 | image: "dash-stock-pricing.png" 10 | -------------------------------------------------------------------------------- /dash-stock-pricing/README.md: -------------------------------------------------------------------------------- 1 | # Stock Pricing Dashboard 2 | 3 | ## About this example 4 | 5 | A Dash application makes it easy to transform your analysis into an interactive dashboard using Python so users can ask and answer questions in real-time, without having to touch any code. 6 | 7 | 8 | ## Learn more 9 | 10 | * [Dash User Guide](https://dash.plotly.com/) 11 | * [User Guide: Dash](https://docs.posit.co/connect/user/dash/) 12 | 13 | ## Requirements 14 | 15 | * Python version 3.7 or higher 16 | -------------------------------------------------------------------------------- /dash-stock-pricing/dash-stock-pricing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/dash-stock-pricing/dash-stock-pricing.png -------------------------------------------------------------------------------- /dash-stock-pricing/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-dash", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.10.6", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "31a91eebf8335fbe666d282da8f43a37" 19 | }, 20 | "app.py": { 21 | "checksum": "2f03e2d860ca5209e04f6618ce384a28" 22 | }, 23 | "prices.csv": { 24 | "checksum": "90a908060f20dfa7e229e631a42c5081" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /dash-stock-pricing/requirements.txt: -------------------------------------------------------------------------------- 1 | dash>=1.21.0,<=2.5.1 2 | dash_bootstrap_components>=1.0.0 3 | pandas>=0.25.3,<=1.4.2 4 | numpy>=1.18.5,<=1.23.0 5 | werkzeug<2.1.2 -------------------------------------------------------------------------------- /fastapi-stock/.internal.yml: -------------------------------------------------------------------------------- 1 | default: 2 | content: 3 | - name: "Serving Stock Information with FastAPI " 4 | path: "." 5 | description: "An API allows you to turn your models into production services that other tools and teams can use." 6 | tag: 7 | - "Examples|Python|FastAPI" 8 | url: "/python-examples/fastapi/" 9 | image: "fastapi-stock.png" 10 | -------------------------------------------------------------------------------- /fastapi-stock/README.md: -------------------------------------------------------------------------------- 1 | # Serving Stock Information with FastAPI 2 | 3 | ## Deploy 4 | 5 | ``` 6 | rsconnect deploy fastapi . -n 7 | ``` 8 | ## Resources 9 | 10 | [Posit Connect User Guide - FastAPI](https://docs.posit.co/connect/user/fastapi/) -------------------------------------------------------------------------------- /fastapi-stock/fastapi-stock.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/fastapi-stock/fastapi-stock.png -------------------------------------------------------------------------------- /fastapi-stock/main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | from datetime import date 4 | from typing import List 5 | 6 | import numpy as np 7 | import pandas as pd 8 | from fastapi import FastAPI, HTTPException 9 | from pydantic import BaseModel, Field 10 | 11 | prices = pd.read_csv( 12 | os.path.join(os.path.dirname(__file__), "prices.csv"), 13 | index_col=0, 14 | parse_dates=True, 15 | ) 16 | 17 | 18 | def validate_ticker(ticker): 19 | if ticker not in prices["ticker"].unique(): 20 | raise HTTPException(status_code=404, detail="Ticker not found") 21 | 22 | 23 | class Tickers(BaseModel): 24 | tickers: list = Field(title="All available stock tickers") 25 | 26 | 27 | class Stock(BaseModel): 28 | ticker: str = Field(..., title="Ticker of the stock") 29 | price: float = Field(..., title="Latest price of the stock") 30 | volatility: float = Field(..., title="Latest volatility of the stock price") 31 | 32 | 33 | class Price(BaseModel): 34 | date: date 35 | high: float = Field(..., title="High price for this date") 36 | low: float = Field(..., title="Low price for this date") 37 | close: float = Field(..., title="Closing price for this date") 38 | volume: int = Field(..., title="Daily volume for this date") 39 | adjusted: float = Field(..., title="Split-adjusted price for this date") 40 | 41 | 42 | app = FastAPI( 43 | title="Stocks API", 44 | description="The Stocks API provides pricing and volatility data for a " 45 | "limited number of US equities from 2010-2018", 46 | ) 47 | 48 | 49 | @app.get("/stocks", response_model=Tickers) 50 | async def tickers(): 51 | tickers = prices["ticker"].unique().tolist() 52 | return {"tickers": tickers} 53 | 54 | 55 | @app.get("/stocks/{ticker}", response_model=Stock) 56 | async def ticker(ticker: str): 57 | validate_ticker(ticker) 58 | 59 | ticker_prices = prices[prices["ticker"] == ticker] 60 | current_price = ticker_prices["close"].last("1d").round(2) 61 | current_volatility = np.log( 62 | ticker_prices["adjusted"] / ticker_prices["adjusted"].shift(1) 63 | ).var() 64 | 65 | return { 66 | "ticker": ticker, 67 | "price": current_price, 68 | "volatility": current_volatility, 69 | } 70 | 71 | 72 | @app.get("/stocks/{ticker}/history", response_model=List[Price]) 73 | async def history(ticker: str): 74 | validate_ticker(ticker) 75 | 76 | ticker_prices = prices[prices["ticker"] == ticker] 77 | ticker_prices["date"] = ticker_prices.index 78 | return ticker_prices.to_dict("records") 79 | -------------------------------------------------------------------------------- /fastapi-stock/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-fastapi", 6 | "entrypoint": "main" 7 | }, 8 | "python": { 9 | "version": "3.10.0", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "f978bfe76823199781dace6b2e3a8bc6" 19 | }, 20 | "main.py": { 21 | "checksum": "a8d8820f25be4dc8e2bf51a5ba1690b6" 22 | }, 23 | "prices.csv": { 24 | "checksum": "e0bc27e3dd358c360863807e09079985" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /fastapi-stock/requirements.txt: -------------------------------------------------------------------------------- 1 | annotated-types==0.5.0 2 | anyio==3.7.1 3 | click==8.1.6 4 | exceptiongroup==1.1.2 5 | fastapi==0.100.0 6 | h11==0.14.0 7 | idna==3.4 8 | numpy==1.25.1 9 | pandas==2.0.3 10 | pydantic==2.0.3 11 | pydantic_core==2.3.0 12 | python-dateutil==2.8.2 13 | pytz==2023.3 14 | six==1.16.0 15 | sniffio==1.3.0 16 | starlette==0.27.0 17 | typing_extensions==4.7.1 18 | tzdata==2023.3 19 | uvicorn==0.23.1 20 | -------------------------------------------------------------------------------- /flask-getting-started-rsc/README.md: -------------------------------------------------------------------------------- 1 | ## Getting Started with Flask and Posit Connect 2 | 3 | This application structure and set-up follows the steps outlined in the links below. 4 | 5 | Two routes are defined: 6 | 7 | - `/` renders an HTML template 8 | - `/api/hello` returns a JSON object 9 | 10 | ![](https://github.com/sol-eng/python-examples/blob/master/flask-getting-started-rsc/getting-started-flask.png) 11 | 12 | ## Deploy 13 | 14 | ``` 15 | rsconnect deploy api . -n 16 | ``` 17 | 18 | #### Resources 19 | 20 | - [Posit Connect User Guide - Flask](https://docs.posit.co/connect/user/flask/) 21 | - [Getting Started with Flask and Posit Connect](https://support.rstudio.com/hc/en-us/articles/360044700234) 22 | - [Deploying Flask Applications to Posit Connect with Git and rsconnect-python](https://support.rstudio.com/hc/en-us/articles/360045224233) 23 | - [Using Templates and Static Assets with Flask Applications on Posit Connect](https://support.rstudio.com/hc/en-us/articles/360045279313) 24 | -------------------------------------------------------------------------------- /flask-getting-started-rsc/app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, render_template, jsonify 2 | 3 | app = Flask(__name__) 4 | 5 | 6 | @app.route("/") 7 | def index(): 8 | return render_template("index.html") 9 | 10 | 11 | @app.route("/api/hello", methods=["GET"]) 12 | def hello(): 13 | return jsonify({"message": "right back at ya!"}) 14 | -------------------------------------------------------------------------------- /flask-getting-started-rsc/getting-started-flask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-getting-started-rsc/getting-started-flask.png -------------------------------------------------------------------------------- /flask-getting-started-rsc/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-api", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.10.0", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "8d6edf2dedd1ca913165abec8a3beb86" 19 | }, 20 | "app.py": { 21 | "checksum": "9dcfbffbfa4f022525af8c47b9a0e976" 22 | }, 23 | "static/api-snippet.png": { 24 | "checksum": "7d75b4ee6c3834edc79e114b8b371c21" 25 | }, 26 | "static/style.css": { 27 | "checksum": "ee9656d5610fbc4ad05d1c1413618341" 28 | }, 29 | "templates/index.html": { 30 | "checksum": "8d670a0d9a9082893c7970a2680b6d95" 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /flask-getting-started-rsc/requirements.txt: -------------------------------------------------------------------------------- 1 | click==8.1.3 2 | Flask==2.2.5 3 | itsdangerous==2.1.2 4 | Jinja2==3.1.2 5 | MarkupSafe==2.1.2 6 | Werkzeug==3.0.1 7 | -------------------------------------------------------------------------------- /flask-getting-started-rsc/static/api-snippet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-getting-started-rsc/static/api-snippet.png -------------------------------------------------------------------------------- /flask-getting-started-rsc/static/style.css: -------------------------------------------------------------------------------- 1 | .sans-serif { 2 | font-family: -apple-system, BlinkMacSystemFont, "avenir next", avenir, 3 | "helvetica neue", helvetica, ubuntu, roboto, noto, "segoe ui", arial, 4 | sans-serif; 5 | font-weight: 300; 6 | color: #333333; 7 | } 8 | 9 | .ba { 10 | border-style: solid; 11 | border-width: 1px; 12 | } 13 | 14 | .br2 { 15 | border-radius: 0.25rem; 16 | } 17 | 18 | .b--black-30 { 19 | border-color: rgba(0, 0, 0, 0.3); 20 | } 21 | 22 | .w-60 { 23 | width: 60%; 24 | } 25 | 26 | .w-100 { 27 | width: 100%; 28 | } 29 | 30 | .center { 31 | margin-right: auto; 32 | margin-left: auto; 33 | } 34 | 35 | .tc { 36 | text-align: center; 37 | } 38 | 39 | .pa { 40 | padding: 1rem; 41 | } 42 | 43 | .mt5 { 44 | margin-top: 4rem; 45 | } 46 | -------------------------------------------------------------------------------- /flask-getting-started-rsc/templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 |
11 |

🎉 Hello, World! This is Flask on Posit Connect!

12 |
13 |
14 |

🚀Access the API endpoint

15 |

{{ url_for('hello') }}

16 | 17 | 21 |
22 | 23 | 24 | -------------------------------------------------------------------------------- /flask-getting-started-sqlalchemy/README.md: -------------------------------------------------------------------------------- 1 | ## Using Flask-SQLAlchemy with Flask Applications on Posit Connect 2 | 3 | This application structure and set-up follows the steps outlined in 4 | [Using Flask-SQLAlchemy with Flask Applications on Posit Connect](https://support.rstudio.com/hc/en-us/articles/360045926213): 5 | 6 | - Creating a minimal application based on the Flask-SQLAlchemy quickstart guide 7 | - Define the database model to use 8 | - Initialize a SQLite database 9 | - Commit data to the database 10 | - Deploy the application to Posit Connect with rsconnect-python 11 | 12 | And, optionally: 13 | 14 | - Switch to a PostgreSQL database server 15 | - Add `pyscopg2` dependency to the Python environment 16 | - Add the database server connection string as an environment variable in Posit Connect 17 | - Redeploy the application 18 | 19 | --- 20 | ## Setup 21 | 22 | Run the following to create the sqlite database used by the application: 23 | 24 | ``` 25 | python seed_db.py 26 | ``` 27 | 28 | ## Deploy 29 | 30 | ``` 31 | rsconnect deploy api . -n 32 | ``` 33 | 34 | ### Additional Resources 35 | 36 | - [Getting Started with Flask and Posit Connect](https://support.rstudio.com/hc/en-us/articles/360044700234) 37 | - [Deploying Flask Applications to Posit Connect with Git and rsconnect-python](https://support.rstudio.com/hc/en-us/articles/360045224233) 38 | - [Using Templates and Static Assets with Flask Applications on Posit Connect](https://support.rstudio.com/hc/en-us/articles/360045279313) 39 | -------------------------------------------------------------------------------- /flask-getting-started-sqlalchemy/app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, render_template 2 | from flask_sqlalchemy import SQLAlchemy 3 | import os 4 | 5 | 6 | app = Flask(__name__) 7 | basedir = os.path.abspath(os.path.dirname(__file__)) 8 | 9 | SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URI") or "sqlite:///" + os.path.join( 10 | basedir, "users.db" 11 | ) 12 | 13 | app.config["SQLALCHEMY_DATABASE_URI"] = SQLALCHEMY_DATABASE_URI 14 | 15 | # initialize the extension 16 | db = SQLAlchemy(app) 17 | 18 | 19 | class User(db.Model): 20 | id = db.Column(db.Integer, primary_key=True) 21 | username = db.Column(db.String, unique=True, nullable=False) 22 | email = db.Column(db.String, unique=True, nullable=False) 23 | 24 | 25 | @app.route("/") 26 | def index(): 27 | users = User.query.all() 28 | return render_template("index.html", users=users) 29 | -------------------------------------------------------------------------------- /flask-getting-started-sqlalchemy/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-api", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.8.6", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "4ab5dcdc69a553ba17d4225fd300bc59" 19 | }, 20 | "README.md": { 21 | "checksum": "f5aee02c5cc1cbf9d9181200e8b10bb3" 22 | }, 23 | "app.py": { 24 | "checksum": "4ba3af068512ebddd4abd66230e5b0b7" 25 | }, 26 | "seed_db.py": { 27 | "checksum": "eb441affe452a42d2ea5cec9a4f8e0c9" 28 | }, 29 | "static/style.css": { 30 | "checksum": "f49bd4d078ffd375f2deb76e0cdb5cf3" 31 | }, 32 | "templates/index.html": { 33 | "checksum": "f3d3be99cf0a8467bdb2548b90f9076f" 34 | }, 35 | "users.db": { 36 | "checksum": "ff96365c048742756322d209b57db15f" 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /flask-getting-started-sqlalchemy/requirements.txt: -------------------------------------------------------------------------------- 1 | click==8.1.3 2 | Flask==2.2.5 3 | Flask-SQLAlchemy==3.0.3 4 | greenlet==2.0.2 5 | itsdangerous==2.1.2 6 | Jinja2==3.1.2 7 | MarkupSafe==2.1.2 8 | SQLAlchemy==2.0.7 9 | typing_extensions==4.5.0 10 | Werkzeug==3.0.1 11 | -------------------------------------------------------------------------------- /flask-getting-started-sqlalchemy/seed_db.py: -------------------------------------------------------------------------------- 1 | import email 2 | from app import db, User 3 | 4 | db.create_all() 5 | db.session.add(User(username="Flask", email="flask@example.com")) 6 | db.session.commit() 7 | -------------------------------------------------------------------------------- /flask-getting-started-sqlalchemy/static/style.css: -------------------------------------------------------------------------------- 1 | .ba { 2 | border-style: solid; 3 | border-width: 1px; 4 | } 5 | 6 | .pa { 7 | padding: 1rem; 8 | } 9 | -------------------------------------------------------------------------------- /flask-getting-started-sqlalchemy/templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 |
11 | {% for user in users %} 12 | Hello, {{ user.username }} from Posit Connect!
13 | {% endfor %} 14 |
15 | 16 | 17 | -------------------------------------------------------------------------------- /flask-getting-started-sqlalchemy/users.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-getting-started-sqlalchemy/users.db -------------------------------------------------------------------------------- /flask-restx/README.md: -------------------------------------------------------------------------------- 1 | # API documentation with flask-restx 2 | 3 | This example uses Flask to make a simple model available for others to use for 4 | prediction. The example uses the `flask-restx` package to automatically generate 5 | the documentation for the model API. 6 | 7 | The script `train.py` creates the model object, and the script `predict.py` 8 | creates the Flask API. 9 | 10 | ## Setup 11 | 12 | ``` 13 | python train.py 14 | ``` 15 | ## Deploy 16 | 17 | ``` 18 | rsconnect deploy api -e predict:app . -n 19 | ``` -------------------------------------------------------------------------------- /flask-restx/app.py: -------------------------------------------------------------------------------- 1 | import json 2 | from sklearn import linear_model 3 | import pickle 4 | from flask import Flask, request 5 | from flask_restx import Api, Resource, fields 6 | 7 | # loads our previously trained model 8 | # this is loaded once per process startup 9 | # keeping subsequent requests fast 10 | m = pickle.load(open("model.p", "rb")) 11 | 12 | # define the flask-restx boilerplate 13 | app = Flask(__name__) 14 | api = Api( 15 | app, version="0.1.0", title="MPG API", description="mtcars predict mpg" 16 | ) 17 | 18 | # define the main subroute for requests 19 | ns = api.namespace("predict", description="predict mpg based on attributes") 20 | 21 | # define the API response 22 | mpg_predict = api.model( 23 | "MPG Prediction", 24 | { 25 | "hp": fields.Integer(required=True, description="Horsepower of new car"), 26 | "cyl": fields.Integer(required=True, description="Number of cylinders in new car"), 27 | "mpg": fields.Integer(description = "Predicted MPG") 28 | }, 29 | ) 30 | 31 | # POST example, that accepts a JSON body that specifies new data 32 | # served at route + namespace, so at /predict 33 | @ns.route("/") 34 | @ns.param("data", "JSON containing hp and cyl {'hp':'200', 'cyl':'4'} ", _in = "body") 35 | class Predict(Resource): 36 | @ns.marshal_with(mpg_predict) 37 | @ns.doc("get mpg for inputs") 38 | def post(self): 39 | json_data = request.get_json(force=True) 40 | hp = int(json_data['hp']) 41 | cyl = int(json_data['cyl']) 42 | mpg = m.predict([[cyl, hp]]) 43 | return {"hp": hp, "cyl": cyl, "mpg": mpg} 44 | 45 | # GET example that accepts a new data point as a query parameter in the URL path 46 | # served at route + namespace, so at /predict/cyl6/ 47 | @ns.route("/cyl6/") 48 | @ns.param("hp", "new hp value") 49 | class Predict(Resource): 50 | @ns.marshal_with(mpg_predict) 51 | @ns.doc("get mpg for 6 cyl vehicle") 52 | def get(self, hp): 53 | cyl = 6 54 | mpg = m.predict([[cyl, hp]]) 55 | return {"hp": hp, "cyl": cyl, "mpg": mpg} 56 | 57 | 58 | -------------------------------------------------------------------------------- /flask-restx/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-api", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.10.0", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "f8177edc73538e6f3d9da65922a26d3d" 19 | }, 20 | "app.py": { 21 | "checksum": "d1092446f4cf3eca946d2eeae9a7132d" 22 | }, 23 | "model.p": { 24 | "checksum": "1ca86161022cc9e5789945e9036ea943" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /flask-restx/model.p: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-restx/model.p -------------------------------------------------------------------------------- /flask-restx/mtcars.csv: -------------------------------------------------------------------------------- 1 | "","mpg","cyl","disp","hp","drat","wt","qsec","vs","am","gear","carb" 2 | "Mazda RX4",21,6,160,110,3.9,2.62,16.46,0,1,4,4 3 | "Mazda RX4 Wag",21,6,160,110,3.9,2.875,17.02,0,1,4,4 4 | "Datsun 710",22.8,4,108,93,3.85,2.32,18.61,1,1,4,1 5 | "Hornet 4 Drive",21.4,6,258,110,3.08,3.215,19.44,1,0,3,1 6 | "Hornet Sportabout",18.7,8,360,175,3.15,3.44,17.02,0,0,3,2 7 | "Valiant",18.1,6,225,105,2.76,3.46,20.22,1,0,3,1 8 | "Duster 360",14.3,8,360,245,3.21,3.57,15.84,0,0,3,4 9 | "Merc 240D",24.4,4,146.7,62,3.69,3.19,20,1,0,4,2 10 | "Merc 230",22.8,4,140.8,95,3.92,3.15,22.9,1,0,4,2 11 | "Merc 280",19.2,6,167.6,123,3.92,3.44,18.3,1,0,4,4 12 | "Merc 280C",17.8,6,167.6,123,3.92,3.44,18.9,1,0,4,4 13 | "Merc 450SE",16.4,8,275.8,180,3.07,4.07,17.4,0,0,3,3 14 | "Merc 450SL",17.3,8,275.8,180,3.07,3.73,17.6,0,0,3,3 15 | "Merc 450SLC",15.2,8,275.8,180,3.07,3.78,18,0,0,3,3 16 | "Cadillac Fleetwood",10.4,8,472,205,2.93,5.25,17.98,0,0,3,4 17 | "Lincoln Continental",10.4,8,460,215,3,5.424,17.82,0,0,3,4 18 | "Chrysler Imperial",14.7,8,440,230,3.23,5.345,17.42,0,0,3,4 19 | "Fiat 128",32.4,4,78.7,66,4.08,2.2,19.47,1,1,4,1 20 | "Honda Civic",30.4,4,75.7,52,4.93,1.615,18.52,1,1,4,2 21 | "Toyota Corolla",33.9,4,71.1,65,4.22,1.835,19.9,1,1,4,1 22 | "Toyota Corona",21.5,4,120.1,97,3.7,2.465,20.01,1,0,3,1 23 | "Dodge Challenger",15.5,8,318,150,2.76,3.52,16.87,0,0,3,2 24 | "AMC Javelin",15.2,8,304,150,3.15,3.435,17.3,0,0,3,2 25 | "Camaro Z28",13.3,8,350,245,3.73,3.84,15.41,0,0,3,4 26 | "Pontiac Firebird",19.2,8,400,175,3.08,3.845,17.05,0,0,3,2 27 | "Fiat X1-9",27.3,4,79,66,4.08,1.935,18.9,1,1,4,1 28 | "Porsche 914-2",26,4,120.3,91,4.43,2.14,16.7,0,1,5,2 29 | "Lotus Europa",30.4,4,95.1,113,3.77,1.513,16.9,1,1,5,2 30 | "Ford Pantera L",15.8,8,351,264,4.22,3.17,14.5,0,1,5,4 31 | "Ferrari Dino",19.7,6,145,175,3.62,2.77,15.5,0,1,5,6 32 | "Maserati Bora",15,8,301,335,3.54,3.57,14.6,0,1,5,8 33 | "Volvo 142E",21.4,4,121,109,4.11,2.78,18.6,1,1,4,2 34 | -------------------------------------------------------------------------------- /flask-restx/requirements.txt: -------------------------------------------------------------------------------- 1 | aniso8601==9.0.1 2 | attrs==22.2.0 3 | click==8.1.3 4 | Flask==2.2.5 5 | flask-restx==1.1.0 6 | itsdangerous==2.1.2 7 | Jinja2==3.1.2 8 | joblib==1.2.0 9 | jsonschema==4.17.3 10 | MarkupSafe==2.1.2 11 | numpy==1.24.2 12 | pandas==1.5.3 13 | pyrsistent==0.19.3 14 | python-dateutil==2.8.2 15 | pytz==2022.7.1 16 | scikit-learn==1.2.2 17 | scipy==1.10.1 18 | six==1.16.0 19 | threadpoolctl==3.1.0 20 | Werkzeug==3.0.1 21 | -------------------------------------------------------------------------------- /flask-restx/train.py: -------------------------------------------------------------------------------- 1 | import pickle as p 2 | import pandas as pd 3 | from sklearn import datasets, linear_model 4 | 5 | # Import CSV of mtcars dataset 6 | data = pd.read_csv("mtcars.csv") 7 | 8 | # train a simple liner model 9 | X = data[["cyl", "hp"]] 10 | y = data["mpg"] 11 | m = linear_model.LinearRegression().fit(X, y) 12 | 13 | # save the model to disk to deploy with our API 14 | p.dump(m, open("model.p", "wb")) 15 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/README.md: -------------------------------------------------------------------------------- 1 | # Serving Sentiment Analysis with spaCy and Flask 2 | 3 | This application exposes a model trained in spaCy via a Flask API. 4 | ## Setup 5 | 6 | ``` 7 | python train.py 8 | ``` 9 | ## Deploy 10 | 11 | ``` 12 | rsconnect deploy api . -n 13 | ``` 14 | 15 | ## Resources 16 | 17 | [Posit Connect User Guide - Flask](https://docs.posit.co/connect/user/flask/) -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/app.py: -------------------------------------------------------------------------------- 1 | import spacy 2 | from flask import Flask, request, jsonify 3 | app = Flask(__name__) 4 | 5 | model_dir = "model" 6 | 7 | @app.route('/') 8 | def predict(): 9 | input = request.args.get("input", "This is a wonderful movie!") 10 | nlp = spacy.load(model_dir) 11 | doc = nlp(input) 12 | result = (input, doc.cats) 13 | return jsonify(result) 14 | 15 | if __name__ == '__main__': 16 | app.run() 17 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-api", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.10.0", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "5516f0c68a7d99f0818f784db41dc1fb" 19 | }, 20 | "app.py": { 21 | "checksum": "26e3261020d1f21464d2441c1a4c98d8" 22 | }, 23 | "model/meta.json": { 24 | "checksum": "dac5074cb7cf0a693e3652287b48c43b" 25 | }, 26 | "model/textcat/cfg": { 27 | "checksum": "5835120c39c78ed403eac787c37e532e" 28 | }, 29 | "model/textcat/model": { 30 | "checksum": "bc3dfa6dd4684917ea2788ea9c6aa144" 31 | }, 32 | "model/tokenizer": { 33 | "checksum": "45838f5fb248489823a625c0bfedbdd8" 34 | }, 35 | "model/vocab/key2row": { 36 | "checksum": "8d39dd7eef115ea6975446ef4082951f" 37 | }, 38 | "model/vocab/lexemes.bin": { 39 | "checksum": "69e670f7fe63515625918eee8e080742" 40 | }, 41 | "model/vocab/lookups.bin": { 42 | "checksum": "0072b6791ad7eebbe5562967d6a22c07" 43 | }, 44 | "model/vocab/strings.json": { 45 | "checksum": "4fb558ca791112a0b6d8dc6c8394fe58" 46 | }, 47 | "model/vocab/vectors": { 48 | "checksum": "63fd11f249a725dee9c2941b339ea037" 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/model/meta.json: -------------------------------------------------------------------------------- 1 | { 2 | "lang":"en", 3 | "name":"model", 4 | "version":"0.0.0", 5 | "spacy_version":">=2.3.7", 6 | "description":"", 7 | "author":"", 8 | "email":"", 9 | "url":"", 10 | "license":"", 11 | "spacy_git_version":"cae72e46d", 12 | "vectors":{ 13 | "width":0, 14 | "vectors":0, 15 | "keys":0, 16 | "name":"spacy_pretrained_vectors" 17 | }, 18 | "pipeline":[ 19 | "textcat" 20 | ], 21 | "factories":{ 22 | "textcat":"textcat" 23 | }, 24 | "labels":{ 25 | "textcat":[ 26 | "POSITIVE" 27 | ] 28 | } 29 | } -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/model/textcat/cfg: -------------------------------------------------------------------------------- 1 | { 2 | "labels":[ 3 | "POSITIVE" 4 | ], 5 | "pretrained_vectors":null, 6 | "pretrained_dims":null 7 | } -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/model/textcat/model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-api/model/textcat/model -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/model/tokenizer: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-api/model/tokenizer -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/model/vocab/key2row: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-api/model/vocab/key2row -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/model/vocab/lexemes.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-api/model/vocab/lexemes.bin -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/model/vocab/lookups.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-api/model/vocab/lookups.bin -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/model/vocab/vectors: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-api/model/vocab/vectors -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/requirements.txt: -------------------------------------------------------------------------------- 1 | blis==0.7.9 2 | catalogue==1.0.2 3 | certifi==2023.7.22 4 | charset-normalizer==3.1.0 5 | click==8.1.3 6 | cymem==2.0.7 7 | Flask==2.2.5 8 | idna==3.4 9 | itsdangerous==2.1.2 10 | Jinja2==3.1.2 11 | MarkupSafe==2.1.2 12 | murmurhash==1.0.9 13 | numpy==1.24.2 14 | plac==1.1.3 15 | preshed==3.0.8 16 | requests==2.31.0 17 | spacy==2.3.9 18 | srsly==1.0.6 19 | thinc==7.4.6 20 | tqdm==4.65.0 21 | urllib3==1.26.18 22 | wasabi==0.10.1 23 | Werkzeug==3.0.1 24 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-api/train.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals, print_function 2 | import plac 3 | import random 4 | from pathlib import Path 5 | import thinc.extra.datasets 6 | 7 | import spacy 8 | from spacy.util import minibatch, compounding 9 | 10 | def load_data(limit=0, split=0.8): 11 | """Load data from the IMDB dataset.""" 12 | # Partition off part of the train data for evaluation 13 | train_data, _ = thinc.extra.datasets.imdb() 14 | random.shuffle(train_data) 15 | train_data = train_data[-limit:] 16 | texts, labels = zip(*train_data) 17 | cats = [{'POSITIVE': bool(y)} for y in labels] 18 | split = int(len(train_data) * split) 19 | return (texts[:split], cats[:split]), (texts[split:], cats[split:]) 20 | 21 | def evaluate(tokenizer, textcat, texts, cats): 22 | docs = (tokenizer(text) for text in texts) 23 | tp = 0.0 # True positives 24 | fp = 1e-8 # False positives 25 | fn = 1e-8 # False negatives 26 | tn = 0.0 # True negatives 27 | for i, doc in enumerate(textcat.pipe(docs)): 28 | gold = cats[i] 29 | for label, score in doc.cats.items(): 30 | if label not in gold: 31 | continue 32 | if score >= 0.5 and gold[label] >= 0.5: 33 | tp += 1. 34 | elif score >= 0.5 and gold[label] < 0.5: 35 | fp += 1. 36 | elif score < 0.5 and gold[label] < 0.5: 37 | tn += 1 38 | elif score < 0.5 and gold[label] >= 0.5: 39 | fn += 1 40 | precision = tp / (tp + fp) 41 | recall = tp / (tp + fn) 42 | try: 43 | f_score = 2 * (precision * recall) / (precision + recall) 44 | except ZeroDivisionError: 45 | print("Warning! Zero Division Error, setting f_score to 1") 46 | f_score = 1 47 | return {'textcat_p': precision, 'textcat_r': recall, 'textcat_f': f_score} 48 | 49 | nlp = spacy.blank('en') # create blank Language class 50 | print("Created blank 'en' model") 51 | 52 | # add the text classifier to the pipeline if it doesn't exist 53 | # nlp.create_pipe works for built-ins that are registered with spaCy 54 | if 'textcat' not in nlp.pipe_names: 55 | textcat = nlp.create_pipe('textcat') 56 | nlp.add_pipe(textcat, last=True) 57 | # otherwise, get it, so we can add labels to it 58 | else: 59 | textcat = nlp.get_pipe('textcat') 60 | 61 | # add label to text classifier 62 | textcat.add_label('POSITIVE') 63 | 64 | n_texts=1000 65 | # load the IMDB dataset 66 | print("Loading IMDB data...") 67 | (train_texts, train_cats), (dev_texts, dev_cats) = load_data(limit=n_texts) 68 | print("Using {} examples ({} training, {} evaluation)" 69 | .format(n_texts, len(train_texts), len(dev_texts))) 70 | train_data = list(zip(train_texts, 71 | [{'cats': cats} for cats in train_cats])) 72 | 73 | n_iter=20 74 | # get names of other pipes to disable them during training 75 | other_pipes = [pipe for pipe in nlp.pipe_names if pipe != 'textcat'] 76 | with nlp.disable_pipes(*other_pipes): # only train textcat 77 | optimizer = nlp.begin_training() 78 | print("Training the model...") 79 | print('{:^5}\t{:^5}\t{:^5}\t{:^5}'.format('LOSS', 'P', 'R', 'F')) 80 | for i in range(n_iter): 81 | losses = {} 82 | # batch up the examples using spaCy's minibatch 83 | batches = minibatch(train_data, size=compounding(4., 32., 1.001)) 84 | for batch in batches: 85 | texts, annotations = zip(*batch) 86 | nlp.update(texts, annotations, sgd=optimizer, drop=0.2, 87 | losses=losses) 88 | with textcat.model.use_params(optimizer.averages): 89 | # evaluate on the dev data split off in load_data() 90 | scores = evaluate(nlp.tokenizer, textcat, dev_texts, dev_cats) 91 | print('{0:.3f}\t{1:.3f}\t{2:.3f}\t{3:.3f}' # print a simple table 92 | .format(losses['textcat'], scores['textcat_p'], 93 | scores['textcat_r'], scores['textcat_f'])) 94 | 95 | test_text_neg = "This movie was an terrible, awful rehash of past movies. I will never watch it again." 96 | doc = nlp(test_text_neg) 97 | print(test_text_neg, "\n", doc.cats) 98 | 99 | test_text_pos = "This great movie was a wonderful remake of the original version. I loved it!" 100 | doc = nlp(test_text_pos) 101 | print(test_text_pos, "\n", doc.cats) 102 | 103 | output_dir = "model" 104 | 105 | if output_dir is not None: 106 | output_dir = Path(output_dir) 107 | if not output_dir.exists(): 108 | output_dir.mkdir() 109 | with nlp.use_params(optimizer.averages): 110 | nlp.to_disk(output_dir) 111 | print("Saved model to directory:", output_dir) 112 | 113 | print("Loading from", output_dir) 114 | nlp2 = spacy.load(output_dir) 115 | doc2 = nlp2(test_text_neg) 116 | print(test_text_neg, "\n", doc2.cats) 117 | doc3 = nlp2(test_text_pos) 118 | print(test_text_pos, "\n", doc3.cats) 119 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/README.md: -------------------------------------------------------------------------------- 1 | # Serving Sentiment Analysis with spaCy and Flask 2 | 3 | This application exposes a model trained in spaCy via a Flask API. 4 | A user interface for the application is also served via Flask. 5 | 6 | ## Setup 7 | 8 | ```bash 9 | python train.py 10 | ``` 11 | ## Deploy 12 | 13 | ``` 14 | rsconnect deploy api . -n 15 | ``` 16 | 17 | ## Resources 18 | 19 | [Posit Connect User Guide - Flask](https://docs.posit.co/connect/user/flask/) -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/app.py: -------------------------------------------------------------------------------- 1 | import spacy 2 | from flask import Flask, jsonify, render_template, request 3 | app = Flask(__name__, 4 | static_url_path='', 5 | static_folder='static', 6 | template_folder='templates') 7 | 8 | model_dir = "model" 9 | 10 | @app.route('/') 11 | def index(): 12 | return render_template("app.html") 13 | 14 | @app.route('/sentiment', methods = ['POST', 'GET']) 15 | def sentiment(): 16 | if request.method == "POST": 17 | input = request.form["input"] 18 | print(input) 19 | nlp = spacy.load(model_dir) 20 | doc = nlp(input) 21 | sentiment = (input, doc.cats) 22 | return render_template("result.html", input = input, sentiment = sentiment) 23 | 24 | if __name__ == '__main__': 25 | app.run() 26 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/model/meta.json: -------------------------------------------------------------------------------- 1 | { 2 | "lang":"en", 3 | "name":"model", 4 | "version":"0.0.0", 5 | "spacy_version":">=2.3.7", 6 | "description":"", 7 | "author":"", 8 | "email":"", 9 | "url":"", 10 | "license":"", 11 | "spacy_git_version":"cae72e46d", 12 | "vectors":{ 13 | "width":0, 14 | "vectors":0, 15 | "keys":0, 16 | "name":"spacy_pretrained_vectors" 17 | }, 18 | "pipeline":[ 19 | "textcat" 20 | ], 21 | "factories":{ 22 | "textcat":"textcat" 23 | }, 24 | "labels":{ 25 | "textcat":[ 26 | "POSITIVE" 27 | ] 28 | } 29 | } -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/model/textcat/cfg: -------------------------------------------------------------------------------- 1 | { 2 | "labels":[ 3 | "POSITIVE" 4 | ], 5 | "pretrained_vectors":null, 6 | "pretrained_dims":null 7 | } -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/model/textcat/model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/model/textcat/model -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/model/tokenizer: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/model/tokenizer -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/model/vocab/key2row: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/model/vocab/key2row -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/model/vocab/lexemes.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/model/vocab/lexemes.bin -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/model/vocab/lookups.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/model/vocab/lookups.bin -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/model/vocab/vectors: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/model/vocab/vectors -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/requirements.txt: -------------------------------------------------------------------------------- 1 | blis==0.7.9 2 | catalogue==1.0.2 3 | certifi==2023.7.22 4 | charset-normalizer==3.1.0 5 | click==8.1.3 6 | cymem==2.0.7 7 | Flask==2.2.5 8 | idna==3.4 9 | itsdangerous==2.1.2 10 | Jinja2==3.1.2 11 | MarkupSafe==2.1.2 12 | murmurhash==1.0.9 13 | numpy==1.24.2 14 | plac==1.1.3 15 | preshed==3.0.8 16 | requests==2.31.0 17 | spacy==2.3.9 18 | srsly==1.0.6 19 | thinc==7.4.6 20 | tqdm==4.65.0 21 | urllib3==1.26.18 22 | wasabi==0.10.1 23 | Werkzeug==3.0.1 24 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/css/landing-page.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Start Bootstrap - Landing Page v5.0.7 (https://startbootstrap.com/template-overviews/landing-page) 3 | * Copyright 2013-2019 Start Bootstrap 4 | * Licensed under MIT (https://github.com/BlackrockDigital/startbootstrap-landing-page/blob/master/LICENSE) 5 | */ 6 | 7 | body { 8 | font-family: 'Lato', 'Helvetica Neue', Helvetica, Arial, sans-serif; 9 | } 10 | 11 | h1, 12 | h2, 13 | h3, 14 | h4, 15 | h5, 16 | h6 { 17 | font-family: 'Lato', 'Helvetica Neue', Helvetica, Arial, sans-serif; 18 | font-weight: 700; 19 | } 20 | 21 | header.masthead { 22 | position: relative; 23 | background-color: #343a40; 24 | background: url("../img/bg-masthead.jpg") no-repeat center center; 25 | background-size: cover; 26 | padding-top: 8rem; 27 | padding-bottom: 8rem; 28 | } 29 | 30 | header.masthead .overlay { 31 | position: absolute; 32 | background-color: #212529; 33 | height: 100%; 34 | width: 100%; 35 | top: 0; 36 | left: 0; 37 | opacity: 0.3; 38 | } 39 | 40 | header.masthead h1 { 41 | font-size: 2rem; 42 | } 43 | 44 | @media (min-width: 768px) { 45 | header.masthead { 46 | padding-top: 12rem; 47 | padding-bottom: 12rem; 48 | } 49 | header.masthead h1 { 50 | font-size: 3rem; 51 | } 52 | } 53 | 54 | .showcase .showcase-text { 55 | padding: 3rem; 56 | } 57 | 58 | .showcase .showcase-img { 59 | min-height: 30rem; 60 | background-size: cover; 61 | } 62 | 63 | @media (min-width: 768px) { 64 | .showcase .showcase-text { 65 | padding: 7rem; 66 | } 67 | } 68 | 69 | .features-icons { 70 | padding-top: 7rem; 71 | padding-bottom: 7rem; 72 | } 73 | 74 | .features-icons .features-icons-item { 75 | max-width: 20rem; 76 | } 77 | 78 | .features-icons .features-icons-item .features-icons-icon { 79 | height: 7rem; 80 | } 81 | 82 | .features-icons .features-icons-item .features-icons-icon i { 83 | font-size: 4.5rem; 84 | } 85 | 86 | .features-icons .features-icons-item:hover .features-icons-icon i { 87 | font-size: 5rem; 88 | } 89 | 90 | .testimonials { 91 | padding-top: 7rem; 92 | padding-bottom: 7rem; 93 | } 94 | 95 | .testimonials .testimonial-item { 96 | max-width: 18rem; 97 | } 98 | 99 | .testimonials .testimonial-item img { 100 | max-width: 12rem; 101 | box-shadow: 0px 5px 5px 0px #adb5bd; 102 | } 103 | 104 | .call-to-action { 105 | position: relative; 106 | background-color: #343a40; 107 | background: url("../img/bg-masthead.jpg") no-repeat center center; 108 | background-size: cover; 109 | padding-top: 7rem; 110 | padding-bottom: 7rem; 111 | } 112 | 113 | .call-to-action .overlay { 114 | position: absolute; 115 | background-color: #212529; 116 | height: 100%; 117 | width: 100%; 118 | top: 0; 119 | left: 0; 120 | opacity: 0.3; 121 | } 122 | 123 | footer.footer { 124 | padding-top: 4rem; 125 | padding-bottom: 4rem; 126 | } 127 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/css/landing-page.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Start Bootstrap - Landing Page v5.0.7 (https://startbootstrap.com/template-overviews/landing-page) 3 | * Copyright 2013-2019 Start Bootstrap 4 | * Licensed under MIT (https://github.com/BlackrockDigital/startbootstrap-landing-page/blob/master/LICENSE) 5 | */body{font-family:Lato,'Helvetica Neue',Helvetica,Arial,sans-serif}h1,h2,h3,h4,h5,h6{font-family:Lato,'Helvetica Neue',Helvetica,Arial,sans-serif;font-weight:700}header.masthead{position:relative;background-color:#343a40;background:url(../img/bg-masthead.jpg) no-repeat center center;background-size:cover;padding-top:8rem;padding-bottom:8rem}header.masthead .overlay{position:absolute;background-color:#212529;height:100%;width:100%;top:0;left:0;opacity:.3}header.masthead h1{font-size:2rem}@media (min-width:768px){header.masthead{padding-top:12rem;padding-bottom:12rem}header.masthead h1{font-size:3rem}}.showcase .showcase-text{padding:3rem}.showcase .showcase-img{min-height:30rem;background-size:cover}@media (min-width:768px){.showcase .showcase-text{padding:7rem}}.features-icons{padding-top:7rem;padding-bottom:7rem}.features-icons .features-icons-item{max-width:20rem}.features-icons .features-icons-item .features-icons-icon{height:7rem}.features-icons .features-icons-item .features-icons-icon i{font-size:4.5rem}.features-icons .features-icons-item:hover .features-icons-icon i{font-size:5rem}.testimonials{padding-top:7rem;padding-bottom:7rem}.testimonials .testimonial-item{max-width:18rem}.testimonials .testimonial-item img{max-width:12rem;box-shadow:0 5px 5px 0 #adb5bd}.call-to-action{position:relative;background-color:#343a40;background:url(../img/bg-masthead.jpg) no-repeat center center;background-size:cover;padding-top:7rem;padding-bottom:7rem}.call-to-action .overlay{position:absolute;background-color:#212529;height:100%;width:100%;top:0;left:0;opacity:.3}footer.footer{padding-top:4rem;padding-bottom:4rem} -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/img/bg-masthead.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/img/bg-masthead.jpg -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/bootstrap/css/bootstrap-reboot.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap Reboot v4.3.1 (https://getbootstrap.com/) 3 | * Copyright 2011-2019 The Bootstrap Authors 4 | * Copyright 2011-2019 Twitter, Inc. 5 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 6 | * Forked from Normalize.css, licensed MIT (https://github.com/necolas/normalize.css/blob/master/LICENSE.md) 7 | */ 8 | *, 9 | *::before, 10 | *::after { 11 | box-sizing: border-box; 12 | } 13 | 14 | html { 15 | font-family: sans-serif; 16 | line-height: 1.15; 17 | -webkit-text-size-adjust: 100%; 18 | -webkit-tap-highlight-color: rgba(0, 0, 0, 0); 19 | } 20 | 21 | article, aside, figcaption, figure, footer, header, hgroup, main, nav, section { 22 | display: block; 23 | } 24 | 25 | body { 26 | margin: 0; 27 | font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; 28 | font-size: 1rem; 29 | font-weight: 400; 30 | line-height: 1.5; 31 | color: #212529; 32 | text-align: left; 33 | background-color: #fff; 34 | } 35 | 36 | [tabindex="-1"]:focus { 37 | outline: 0 !important; 38 | } 39 | 40 | hr { 41 | box-sizing: content-box; 42 | height: 0; 43 | overflow: visible; 44 | } 45 | 46 | h1, h2, h3, h4, h5, h6 { 47 | margin-top: 0; 48 | margin-bottom: 0.5rem; 49 | } 50 | 51 | p { 52 | margin-top: 0; 53 | margin-bottom: 1rem; 54 | } 55 | 56 | abbr[title], 57 | abbr[data-original-title] { 58 | text-decoration: underline; 59 | -webkit-text-decoration: underline dotted; 60 | text-decoration: underline dotted; 61 | cursor: help; 62 | border-bottom: 0; 63 | -webkit-text-decoration-skip-ink: none; 64 | text-decoration-skip-ink: none; 65 | } 66 | 67 | address { 68 | margin-bottom: 1rem; 69 | font-style: normal; 70 | line-height: inherit; 71 | } 72 | 73 | ol, 74 | ul, 75 | dl { 76 | margin-top: 0; 77 | margin-bottom: 1rem; 78 | } 79 | 80 | ol ol, 81 | ul ul, 82 | ol ul, 83 | ul ol { 84 | margin-bottom: 0; 85 | } 86 | 87 | dt { 88 | font-weight: 700; 89 | } 90 | 91 | dd { 92 | margin-bottom: .5rem; 93 | margin-left: 0; 94 | } 95 | 96 | blockquote { 97 | margin: 0 0 1rem; 98 | } 99 | 100 | b, 101 | strong { 102 | font-weight: bolder; 103 | } 104 | 105 | small { 106 | font-size: 80%; 107 | } 108 | 109 | sub, 110 | sup { 111 | position: relative; 112 | font-size: 75%; 113 | line-height: 0; 114 | vertical-align: baseline; 115 | } 116 | 117 | sub { 118 | bottom: -.25em; 119 | } 120 | 121 | sup { 122 | top: -.5em; 123 | } 124 | 125 | a { 126 | color: #007bff; 127 | text-decoration: none; 128 | background-color: transparent; 129 | } 130 | 131 | a:hover { 132 | color: #0056b3; 133 | text-decoration: underline; 134 | } 135 | 136 | a:not([href]):not([tabindex]) { 137 | color: inherit; 138 | text-decoration: none; 139 | } 140 | 141 | a:not([href]):not([tabindex]):hover, a:not([href]):not([tabindex]):focus { 142 | color: inherit; 143 | text-decoration: none; 144 | } 145 | 146 | a:not([href]):not([tabindex]):focus { 147 | outline: 0; 148 | } 149 | 150 | pre, 151 | code, 152 | kbd, 153 | samp { 154 | font-family: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; 155 | font-size: 1em; 156 | } 157 | 158 | pre { 159 | margin-top: 0; 160 | margin-bottom: 1rem; 161 | overflow: auto; 162 | } 163 | 164 | figure { 165 | margin: 0 0 1rem; 166 | } 167 | 168 | img { 169 | vertical-align: middle; 170 | border-style: none; 171 | } 172 | 173 | svg { 174 | overflow: hidden; 175 | vertical-align: middle; 176 | } 177 | 178 | table { 179 | border-collapse: collapse; 180 | } 181 | 182 | caption { 183 | padding-top: 0.75rem; 184 | padding-bottom: 0.75rem; 185 | color: #6c757d; 186 | text-align: left; 187 | caption-side: bottom; 188 | } 189 | 190 | th { 191 | text-align: inherit; 192 | } 193 | 194 | label { 195 | display: inline-block; 196 | margin-bottom: 0.5rem; 197 | } 198 | 199 | button { 200 | border-radius: 0; 201 | } 202 | 203 | button:focus { 204 | outline: 1px dotted; 205 | outline: 5px auto -webkit-focus-ring-color; 206 | } 207 | 208 | input, 209 | button, 210 | select, 211 | optgroup, 212 | textarea { 213 | margin: 0; 214 | font-family: inherit; 215 | font-size: inherit; 216 | line-height: inherit; 217 | } 218 | 219 | button, 220 | input { 221 | overflow: visible; 222 | } 223 | 224 | button, 225 | select { 226 | text-transform: none; 227 | } 228 | 229 | select { 230 | word-wrap: normal; 231 | } 232 | 233 | button, 234 | [type="button"], 235 | [type="reset"], 236 | [type="submit"] { 237 | -webkit-appearance: button; 238 | } 239 | 240 | button:not(:disabled), 241 | [type="button"]:not(:disabled), 242 | [type="reset"]:not(:disabled), 243 | [type="submit"]:not(:disabled) { 244 | cursor: pointer; 245 | } 246 | 247 | button::-moz-focus-inner, 248 | [type="button"]::-moz-focus-inner, 249 | [type="reset"]::-moz-focus-inner, 250 | [type="submit"]::-moz-focus-inner { 251 | padding: 0; 252 | border-style: none; 253 | } 254 | 255 | input[type="radio"], 256 | input[type="checkbox"] { 257 | box-sizing: border-box; 258 | padding: 0; 259 | } 260 | 261 | input[type="date"], 262 | input[type="time"], 263 | input[type="datetime-local"], 264 | input[type="month"] { 265 | -webkit-appearance: listbox; 266 | } 267 | 268 | textarea { 269 | overflow: auto; 270 | resize: vertical; 271 | } 272 | 273 | fieldset { 274 | min-width: 0; 275 | padding: 0; 276 | margin: 0; 277 | border: 0; 278 | } 279 | 280 | legend { 281 | display: block; 282 | width: 100%; 283 | max-width: 100%; 284 | padding: 0; 285 | margin-bottom: .5rem; 286 | font-size: 1.5rem; 287 | line-height: inherit; 288 | color: inherit; 289 | white-space: normal; 290 | } 291 | 292 | progress { 293 | vertical-align: baseline; 294 | } 295 | 296 | [type="number"]::-webkit-inner-spin-button, 297 | [type="number"]::-webkit-outer-spin-button { 298 | height: auto; 299 | } 300 | 301 | [type="search"] { 302 | outline-offset: -2px; 303 | -webkit-appearance: none; 304 | } 305 | 306 | [type="search"]::-webkit-search-decoration { 307 | -webkit-appearance: none; 308 | } 309 | 310 | ::-webkit-file-upload-button { 311 | font: inherit; 312 | -webkit-appearance: button; 313 | } 314 | 315 | output { 316 | display: inline-block; 317 | } 318 | 319 | summary { 320 | display: list-item; 321 | cursor: pointer; 322 | } 323 | 324 | template { 325 | display: none; 326 | } 327 | 328 | [hidden] { 329 | display: none !important; 330 | } 331 | /*# sourceMappingURL=bootstrap-reboot.css.map */ -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/bootstrap/css/bootstrap-reboot.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap Reboot v4.3.1 (https://getbootstrap.com/) 3 | * Copyright 2011-2019 The Bootstrap Authors 4 | * Copyright 2011-2019 Twitter, Inc. 5 | * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) 6 | * Forked from Normalize.css, licensed MIT (https://github.com/necolas/normalize.css/blob/master/LICENSE.md) 7 | */*,::after,::before{box-sizing:border-box}html{font-family:sans-serif;line-height:1.15;-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:transparent}article,aside,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}body{margin:0;font-family:-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";font-size:1rem;font-weight:400;line-height:1.5;color:#212529;text-align:left;background-color:#fff}[tabindex="-1"]:focus{outline:0!important}hr{box-sizing:content-box;height:0;overflow:visible}h1,h2,h3,h4,h5,h6{margin-top:0;margin-bottom:.5rem}p{margin-top:0;margin-bottom:1rem}abbr[data-original-title],abbr[title]{text-decoration:underline;-webkit-text-decoration:underline dotted;text-decoration:underline dotted;cursor:help;border-bottom:0;-webkit-text-decoration-skip-ink:none;text-decoration-skip-ink:none}address{margin-bottom:1rem;font-style:normal;line-height:inherit}dl,ol,ul{margin-top:0;margin-bottom:1rem}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem}b,strong{font-weight:bolder}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}a{color:#007bff;text-decoration:none;background-color:transparent}a:hover{color:#0056b3;text-decoration:underline}a:not([href]):not([tabindex]){color:inherit;text-decoration:none}a:not([href]):not([tabindex]):focus,a:not([href]):not([tabindex]):hover{color:inherit;text-decoration:none}a:not([href]):not([tabindex]):focus{outline:0}code,kbd,pre,samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;font-size:1em}pre{margin-top:0;margin-bottom:1rem;overflow:auto}figure{margin:0 0 1rem}img{vertical-align:middle;border-style:none}svg{overflow:hidden;vertical-align:middle}table{border-collapse:collapse}caption{padding-top:.75rem;padding-bottom:.75rem;color:#6c757d;text-align:left;caption-side:bottom}th{text-align:inherit}label{display:inline-block;margin-bottom:.5rem}button{border-radius:0}button:focus{outline:1px dotted;outline:5px auto -webkit-focus-ring-color}button,input,optgroup,select,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,input{overflow:visible}button,select{text-transform:none}select{word-wrap:normal}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled),button:not(:disabled){cursor:pointer}[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner,button::-moz-focus-inner{padding:0;border-style:none}input[type=checkbox],input[type=radio]{box-sizing:border-box;padding:0}input[type=date],input[type=datetime-local],input[type=month],input[type=time]{-webkit-appearance:listbox}textarea{overflow:auto;resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;max-width:100%;padding:0;margin-bottom:.5rem;font-size:1.5rem;line-height:inherit;color:inherit;white-space:normal}progress{vertical-align:baseline}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:none}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}summary{display:list-item;cursor:pointer}template{display:none}[hidden]{display:none!important} 8 | /*# sourceMappingURL=bootstrap-reboot.min.css.map */ -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/css/brands.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome Free 5.10.2 by @fontawesome - https://fontawesome.com 3 | * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) 4 | */ 5 | @font-face { 6 | font-family: 'Font Awesome 5 Brands'; 7 | font-style: normal; 8 | font-weight: normal; 9 | font-display: auto; 10 | src: url("../webfonts/fa-brands-400.eot"); 11 | src: url("../webfonts/fa-brands-400.eot?#iefix") format("embedded-opentype"), url("../webfonts/fa-brands-400.woff2") format("woff2"), url("../webfonts/fa-brands-400.woff") format("woff"), url("../webfonts/fa-brands-400.ttf") format("truetype"), url("../webfonts/fa-brands-400.svg#fontawesome") format("svg"); } 12 | 13 | .fab { 14 | font-family: 'Font Awesome 5 Brands'; } 15 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/css/brands.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome Free 5.10.2 by @fontawesome - https://fontawesome.com 3 | * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) 4 | */ 5 | @font-face{font-family:"Font Awesome 5 Brands";font-style:normal;font-weight:normal;font-display:auto;src:url(../webfonts/fa-brands-400.eot);src:url(../webfonts/fa-brands-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-brands-400.woff2) format("woff2"),url(../webfonts/fa-brands-400.woff) format("woff"),url(../webfonts/fa-brands-400.ttf) format("truetype"),url(../webfonts/fa-brands-400.svg#fontawesome) format("svg")}.fab{font-family:"Font Awesome 5 Brands"} -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/css/regular.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome Free 5.10.2 by @fontawesome - https://fontawesome.com 3 | * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) 4 | */ 5 | @font-face { 6 | font-family: 'Font Awesome 5 Free'; 7 | font-style: normal; 8 | font-weight: 400; 9 | font-display: auto; 10 | src: url("../webfonts/fa-regular-400.eot"); 11 | src: url("../webfonts/fa-regular-400.eot?#iefix") format("embedded-opentype"), url("../webfonts/fa-regular-400.woff2") format("woff2"), url("../webfonts/fa-regular-400.woff") format("woff"), url("../webfonts/fa-regular-400.ttf") format("truetype"), url("../webfonts/fa-regular-400.svg#fontawesome") format("svg"); } 12 | 13 | .far { 14 | font-family: 'Font Awesome 5 Free'; 15 | font-weight: 400; } 16 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/css/regular.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome Free 5.10.2 by @fontawesome - https://fontawesome.com 3 | * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) 4 | */ 5 | @font-face{font-family:"Font Awesome 5 Free";font-style:normal;font-weight:400;font-display:auto;src:url(../webfonts/fa-regular-400.eot);src:url(../webfonts/fa-regular-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-regular-400.woff2) format("woff2"),url(../webfonts/fa-regular-400.woff) format("woff"),url(../webfonts/fa-regular-400.ttf) format("truetype"),url(../webfonts/fa-regular-400.svg#fontawesome) format("svg")}.far{font-family:"Font Awesome 5 Free";font-weight:400} -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/css/solid.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome Free 5.10.2 by @fontawesome - https://fontawesome.com 3 | * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) 4 | */ 5 | @font-face { 6 | font-family: 'Font Awesome 5 Free'; 7 | font-style: normal; 8 | font-weight: 900; 9 | font-display: auto; 10 | src: url("../webfonts/fa-solid-900.eot"); 11 | src: url("../webfonts/fa-solid-900.eot?#iefix") format("embedded-opentype"), url("../webfonts/fa-solid-900.woff2") format("woff2"), url("../webfonts/fa-solid-900.woff") format("woff"), url("../webfonts/fa-solid-900.ttf") format("truetype"), url("../webfonts/fa-solid-900.svg#fontawesome") format("svg"); } 12 | 13 | .fa, 14 | .fas { 15 | font-family: 'Font Awesome 5 Free'; 16 | font-weight: 900; } 17 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/css/solid.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome Free 5.10.2 by @fontawesome - https://fontawesome.com 3 | * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) 4 | */ 5 | @font-face{font-family:"Font Awesome 5 Free";font-style:normal;font-weight:900;font-display:auto;src:url(../webfonts/fa-solid-900.eot);src:url(../webfonts/fa-solid-900.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-solid-900.woff2) format("woff2"),url(../webfonts/fa-solid-900.woff) format("woff"),url(../webfonts/fa-solid-900.ttf) format("truetype"),url(../webfonts/fa-solid-900.svg#fontawesome) format("svg")}.fa,.fas{font-family:"Font Awesome 5 Free";font-weight:900} -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/css/svg-with-js.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome Free 5.10.2 by @fontawesome - https://fontawesome.com 3 | * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) 4 | */ 5 | svg:not(:root).svg-inline--fa { 6 | overflow: visible; } 7 | 8 | .svg-inline--fa { 9 | display: inline-block; 10 | font-size: inherit; 11 | height: 1em; 12 | overflow: visible; 13 | vertical-align: -.125em; } 14 | .svg-inline--fa.fa-lg { 15 | vertical-align: -.225em; } 16 | .svg-inline--fa.fa-w-1 { 17 | width: 0.0625em; } 18 | .svg-inline--fa.fa-w-2 { 19 | width: 0.125em; } 20 | .svg-inline--fa.fa-w-3 { 21 | width: 0.1875em; } 22 | .svg-inline--fa.fa-w-4 { 23 | width: 0.25em; } 24 | .svg-inline--fa.fa-w-5 { 25 | width: 0.3125em; } 26 | .svg-inline--fa.fa-w-6 { 27 | width: 0.375em; } 28 | .svg-inline--fa.fa-w-7 { 29 | width: 0.4375em; } 30 | .svg-inline--fa.fa-w-8 { 31 | width: 0.5em; } 32 | .svg-inline--fa.fa-w-9 { 33 | width: 0.5625em; } 34 | .svg-inline--fa.fa-w-10 { 35 | width: 0.625em; } 36 | .svg-inline--fa.fa-w-11 { 37 | width: 0.6875em; } 38 | .svg-inline--fa.fa-w-12 { 39 | width: 0.75em; } 40 | .svg-inline--fa.fa-w-13 { 41 | width: 0.8125em; } 42 | .svg-inline--fa.fa-w-14 { 43 | width: 0.875em; } 44 | .svg-inline--fa.fa-w-15 { 45 | width: 0.9375em; } 46 | .svg-inline--fa.fa-w-16 { 47 | width: 1em; } 48 | .svg-inline--fa.fa-w-17 { 49 | width: 1.0625em; } 50 | .svg-inline--fa.fa-w-18 { 51 | width: 1.125em; } 52 | .svg-inline--fa.fa-w-19 { 53 | width: 1.1875em; } 54 | .svg-inline--fa.fa-w-20 { 55 | width: 1.25em; } 56 | .svg-inline--fa.fa-pull-left { 57 | margin-right: .3em; 58 | width: auto; } 59 | .svg-inline--fa.fa-pull-right { 60 | margin-left: .3em; 61 | width: auto; } 62 | .svg-inline--fa.fa-border { 63 | height: 1.5em; } 64 | .svg-inline--fa.fa-li { 65 | width: 2em; } 66 | .svg-inline--fa.fa-fw { 67 | width: 1.25em; } 68 | 69 | .fa-layers svg.svg-inline--fa { 70 | bottom: 0; 71 | left: 0; 72 | margin: auto; 73 | position: absolute; 74 | right: 0; 75 | top: 0; } 76 | 77 | .fa-layers { 78 | display: inline-block; 79 | height: 1em; 80 | position: relative; 81 | text-align: center; 82 | vertical-align: -.125em; 83 | width: 1em; } 84 | .fa-layers svg.svg-inline--fa { 85 | -webkit-transform-origin: center center; 86 | transform-origin: center center; } 87 | 88 | .fa-layers-text, .fa-layers-counter { 89 | display: inline-block; 90 | position: absolute; 91 | text-align: center; } 92 | 93 | .fa-layers-text { 94 | left: 50%; 95 | top: 50%; 96 | -webkit-transform: translate(-50%, -50%); 97 | transform: translate(-50%, -50%); 98 | -webkit-transform-origin: center center; 99 | transform-origin: center center; } 100 | 101 | .fa-layers-counter { 102 | background-color: #ff253a; 103 | border-radius: 1em; 104 | -webkit-box-sizing: border-box; 105 | box-sizing: border-box; 106 | color: #fff; 107 | height: 1.5em; 108 | line-height: 1; 109 | max-width: 5em; 110 | min-width: 1.5em; 111 | overflow: hidden; 112 | padding: .25em; 113 | right: 0; 114 | text-overflow: ellipsis; 115 | top: 0; 116 | -webkit-transform: scale(0.25); 117 | transform: scale(0.25); 118 | -webkit-transform-origin: top right; 119 | transform-origin: top right; } 120 | 121 | .fa-layers-bottom-right { 122 | bottom: 0; 123 | right: 0; 124 | top: auto; 125 | -webkit-transform: scale(0.25); 126 | transform: scale(0.25); 127 | -webkit-transform-origin: bottom right; 128 | transform-origin: bottom right; } 129 | 130 | .fa-layers-bottom-left { 131 | bottom: 0; 132 | left: 0; 133 | right: auto; 134 | top: auto; 135 | -webkit-transform: scale(0.25); 136 | transform: scale(0.25); 137 | -webkit-transform-origin: bottom left; 138 | transform-origin: bottom left; } 139 | 140 | .fa-layers-top-right { 141 | right: 0; 142 | top: 0; 143 | -webkit-transform: scale(0.25); 144 | transform: scale(0.25); 145 | -webkit-transform-origin: top right; 146 | transform-origin: top right; } 147 | 148 | .fa-layers-top-left { 149 | left: 0; 150 | right: auto; 151 | top: 0; 152 | -webkit-transform: scale(0.25); 153 | transform: scale(0.25); 154 | -webkit-transform-origin: top left; 155 | transform-origin: top left; } 156 | 157 | .fa-lg { 158 | font-size: 1.33333em; 159 | line-height: 0.75em; 160 | vertical-align: -.0667em; } 161 | 162 | .fa-xs { 163 | font-size: .75em; } 164 | 165 | .fa-sm { 166 | font-size: .875em; } 167 | 168 | .fa-1x { 169 | font-size: 1em; } 170 | 171 | .fa-2x { 172 | font-size: 2em; } 173 | 174 | .fa-3x { 175 | font-size: 3em; } 176 | 177 | .fa-4x { 178 | font-size: 4em; } 179 | 180 | .fa-5x { 181 | font-size: 5em; } 182 | 183 | .fa-6x { 184 | font-size: 6em; } 185 | 186 | .fa-7x { 187 | font-size: 7em; } 188 | 189 | .fa-8x { 190 | font-size: 8em; } 191 | 192 | .fa-9x { 193 | font-size: 9em; } 194 | 195 | .fa-10x { 196 | font-size: 10em; } 197 | 198 | .fa-fw { 199 | text-align: center; 200 | width: 1.25em; } 201 | 202 | .fa-ul { 203 | list-style-type: none; 204 | margin-left: 2.5em; 205 | padding-left: 0; } 206 | .fa-ul > li { 207 | position: relative; } 208 | 209 | .fa-li { 210 | left: -2em; 211 | position: absolute; 212 | text-align: center; 213 | width: 2em; 214 | line-height: inherit; } 215 | 216 | .fa-border { 217 | border: solid 0.08em #eee; 218 | border-radius: .1em; 219 | padding: .2em .25em .15em; } 220 | 221 | .fa-pull-left { 222 | float: left; } 223 | 224 | .fa-pull-right { 225 | float: right; } 226 | 227 | .fa.fa-pull-left, 228 | .fas.fa-pull-left, 229 | .far.fa-pull-left, 230 | .fal.fa-pull-left, 231 | .fab.fa-pull-left { 232 | margin-right: .3em; } 233 | 234 | .fa.fa-pull-right, 235 | .fas.fa-pull-right, 236 | .far.fa-pull-right, 237 | .fal.fa-pull-right, 238 | .fab.fa-pull-right { 239 | margin-left: .3em; } 240 | 241 | .fa-spin { 242 | -webkit-animation: fa-spin 2s infinite linear; 243 | animation: fa-spin 2s infinite linear; } 244 | 245 | .fa-pulse { 246 | -webkit-animation: fa-spin 1s infinite steps(8); 247 | animation: fa-spin 1s infinite steps(8); } 248 | 249 | @-webkit-keyframes fa-spin { 250 | 0% { 251 | -webkit-transform: rotate(0deg); 252 | transform: rotate(0deg); } 253 | 100% { 254 | -webkit-transform: rotate(360deg); 255 | transform: rotate(360deg); } } 256 | 257 | @keyframes fa-spin { 258 | 0% { 259 | -webkit-transform: rotate(0deg); 260 | transform: rotate(0deg); } 261 | 100% { 262 | -webkit-transform: rotate(360deg); 263 | transform: rotate(360deg); } } 264 | 265 | .fa-rotate-90 { 266 | -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=1)"; 267 | -webkit-transform: rotate(90deg); 268 | transform: rotate(90deg); } 269 | 270 | .fa-rotate-180 { 271 | -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2)"; 272 | -webkit-transform: rotate(180deg); 273 | transform: rotate(180deg); } 274 | 275 | .fa-rotate-270 { 276 | -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=3)"; 277 | -webkit-transform: rotate(270deg); 278 | transform: rotate(270deg); } 279 | 280 | .fa-flip-horizontal { 281 | -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)"; 282 | -webkit-transform: scale(-1, 1); 283 | transform: scale(-1, 1); } 284 | 285 | .fa-flip-vertical { 286 | -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"; 287 | -webkit-transform: scale(1, -1); 288 | transform: scale(1, -1); } 289 | 290 | .fa-flip-both, .fa-flip-horizontal.fa-flip-vertical { 291 | -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"; 292 | -webkit-transform: scale(-1, -1); 293 | transform: scale(-1, -1); } 294 | 295 | :root .fa-rotate-90, 296 | :root .fa-rotate-180, 297 | :root .fa-rotate-270, 298 | :root .fa-flip-horizontal, 299 | :root .fa-flip-vertical, 300 | :root .fa-flip-both { 301 | -webkit-filter: none; 302 | filter: none; } 303 | 304 | .fa-stack { 305 | display: inline-block; 306 | height: 2em; 307 | position: relative; 308 | width: 2.5em; } 309 | 310 | .fa-stack-1x, 311 | .fa-stack-2x { 312 | bottom: 0; 313 | left: 0; 314 | margin: auto; 315 | position: absolute; 316 | right: 0; 317 | top: 0; } 318 | 319 | .svg-inline--fa.fa-stack-1x { 320 | height: 1em; 321 | width: 1.25em; } 322 | 323 | .svg-inline--fa.fa-stack-2x { 324 | height: 2em; 325 | width: 2.5em; } 326 | 327 | .fa-inverse { 328 | color: #fff; } 329 | 330 | .sr-only { 331 | border: 0; 332 | clip: rect(0, 0, 0, 0); 333 | height: 1px; 334 | margin: -1px; 335 | overflow: hidden; 336 | padding: 0; 337 | position: absolute; 338 | width: 1px; } 339 | 340 | .sr-only-focusable:active, .sr-only-focusable:focus { 341 | clip: auto; 342 | height: auto; 343 | margin: 0; 344 | overflow: visible; 345 | position: static; 346 | width: auto; } 347 | 348 | .svg-inline--fa .fa-primary { 349 | fill: var(--fa-primary-color, currentColor); 350 | opacity: 1; 351 | opacity: var(--fa-primary-opacity, 1); } 352 | 353 | .svg-inline--fa .fa-secondary { 354 | fill: var(--fa-secondary-color, currentColor); 355 | opacity: 0.4; 356 | opacity: var(--fa-secondary-opacity, 0.4); } 357 | 358 | .svg-inline--fa.fa-swap-opacity .fa-primary { 359 | opacity: 0.4; 360 | opacity: var(--fa-secondary-opacity, 0.4); } 361 | 362 | .svg-inline--fa.fa-swap-opacity .fa-secondary { 363 | opacity: 1; 364 | opacity: var(--fa-primary-opacity, 1); } 365 | 366 | .svg-inline--fa mask .fa-primary, 367 | .svg-inline--fa mask .fa-secondary { 368 | fill: black; } 369 | 370 | .fad.fa-inverse { 371 | color: #fff; } 372 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/css/svg-with-js.min.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Font Awesome Free 5.10.2 by @fontawesome - https://fontawesome.com 3 | * License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) 4 | */ 5 | .svg-inline--fa,svg:not(:root).svg-inline--fa{overflow:visible}.svg-inline--fa{display:inline-block;font-size:inherit;height:1em;vertical-align:-.125em}.svg-inline--fa.fa-lg{vertical-align:-.225em}.svg-inline--fa.fa-w-1{width:.0625em}.svg-inline--fa.fa-w-2{width:.125em}.svg-inline--fa.fa-w-3{width:.1875em}.svg-inline--fa.fa-w-4{width:.25em}.svg-inline--fa.fa-w-5{width:.3125em}.svg-inline--fa.fa-w-6{width:.375em}.svg-inline--fa.fa-w-7{width:.4375em}.svg-inline--fa.fa-w-8{width:.5em}.svg-inline--fa.fa-w-9{width:.5625em}.svg-inline--fa.fa-w-10{width:.625em}.svg-inline--fa.fa-w-11{width:.6875em}.svg-inline--fa.fa-w-12{width:.75em}.svg-inline--fa.fa-w-13{width:.8125em}.svg-inline--fa.fa-w-14{width:.875em}.svg-inline--fa.fa-w-15{width:.9375em}.svg-inline--fa.fa-w-16{width:1em}.svg-inline--fa.fa-w-17{width:1.0625em}.svg-inline--fa.fa-w-18{width:1.125em}.svg-inline--fa.fa-w-19{width:1.1875em}.svg-inline--fa.fa-w-20{width:1.25em}.svg-inline--fa.fa-pull-left{margin-right:.3em;width:auto}.svg-inline--fa.fa-pull-right{margin-left:.3em;width:auto}.svg-inline--fa.fa-border{height:1.5em}.svg-inline--fa.fa-li{width:2em}.svg-inline--fa.fa-fw{width:1.25em}.fa-layers svg.svg-inline--fa{bottom:0;left:0;margin:auto;position:absolute;right:0;top:0}.fa-layers{display:inline-block;height:1em;position:relative;text-align:center;vertical-align:-.125em;width:1em}.fa-layers svg.svg-inline--fa{-webkit-transform-origin:center center;transform-origin:center center}.fa-layers-counter,.fa-layers-text{display:inline-block;position:absolute;text-align:center}.fa-layers-text{left:50%;top:50%;-webkit-transform:translate(-50%,-50%);transform:translate(-50%,-50%);-webkit-transform-origin:center center;transform-origin:center center}.fa-layers-counter{background-color:#ff253a;border-radius:1em;-webkit-box-sizing:border-box;box-sizing:border-box;color:#fff;height:1.5em;line-height:1;max-width:5em;min-width:1.5em;overflow:hidden;padding:.25em;right:0;text-overflow:ellipsis;top:0;-webkit-transform:scale(.25);transform:scale(.25);-webkit-transform-origin:top right;transform-origin:top right}.fa-layers-bottom-right{bottom:0;right:0;top:auto;-webkit-transform:scale(.25);transform:scale(.25);-webkit-transform-origin:bottom right;transform-origin:bottom right}.fa-layers-bottom-left{bottom:0;left:0;right:auto;top:auto;-webkit-transform:scale(.25);transform:scale(.25);-webkit-transform-origin:bottom left;transform-origin:bottom left}.fa-layers-top-right{right:0;top:0;-webkit-transform:scale(.25);transform:scale(.25);-webkit-transform-origin:top right;transform-origin:top right}.fa-layers-top-left{left:0;right:auto;top:0;-webkit-transform:scale(.25);transform:scale(.25);-webkit-transform-origin:top left;transform-origin:top left}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-.0667em}.fa-xs{font-size:.75em}.fa-sm{font-size:.875em}.fa-1x{font-size:1em}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-6x{font-size:6em}.fa-7x{font-size:7em}.fa-8x{font-size:8em}.fa-9x{font-size:9em}.fa-10x{font-size:10em}.fa-fw{text-align:center;width:1.25em}.fa-ul{list-style-type:none;margin-left:2.5em;padding-left:0}.fa-ul>li{position:relative}.fa-li{left:-2em;position:absolute;text-align:center;width:2em;line-height:inherit}.fa-border{border:.08em solid #eee;border-radius:.1em;padding:.2em .25em .15em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa.fa-pull-left,.fab.fa-pull-left,.fal.fa-pull-left,.far.fa-pull-left,.fas.fa-pull-left{margin-right:.3em}.fa.fa-pull-right,.fab.fa-pull-right,.fal.fa-pull-right,.far.fa-pull-right,.fas.fa-pull-right{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(1turn);transform:rotate(1turn)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-webkit-transform:scaleY(-1);transform:scaleY(-1)}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical,.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)"}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical{-webkit-transform:scale(-1);transform:scale(-1)}:root .fa-flip-both,:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{-webkit-filter:none;filter:none}.fa-stack{display:inline-block;height:2em;position:relative;width:2.5em}.fa-stack-1x,.fa-stack-2x{bottom:0;left:0;margin:auto;position:absolute;right:0;top:0}.svg-inline--fa.fa-stack-1x{height:1em;width:1.25em}.svg-inline--fa.fa-stack-2x{height:2em;width:2.5em}.fa-inverse{color:#fff}.sr-only{border:0;clip:rect(0,0,0,0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.sr-only-focusable:active,.sr-only-focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.svg-inline--fa .fa-primary{fill:var(--fa-primary-color,currentColor);opacity:1;opacity:var(--fa-primary-opacity,1)}.svg-inline--fa .fa-secondary{fill:var(--fa-secondary-color,currentColor)}.svg-inline--fa .fa-secondary,.svg-inline--fa.fa-swap-opacity .fa-primary{opacity:.4;opacity:var(--fa-secondary-opacity,.4)}.svg-inline--fa.fa-swap-opacity .fa-secondary{opacity:1;opacity:var(--fa-primary-opacity,1)}.svg-inline--fa mask .fa-primary,.svg-inline--fa mask .fa-secondary{fill:#000}.fad.fa-inverse{color:#fff} -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-brands-400.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-brands-400.eot -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-brands-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-brands-400.ttf -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-brands-400.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-brands-400.woff -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-brands-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-brands-400.woff2 -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-regular-400.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-regular-400.eot -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-regular-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-regular-400.ttf -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-regular-400.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-regular-400.woff -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-regular-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-regular-400.woff2 -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-solid-900.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-solid-900.eot -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-solid-900.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-solid-900.ttf -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-solid-900.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-solid-900.woff -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-solid-900.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/fontawesome-free/webfonts/fa-solid-900.woff2 -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/simple-line-icons/fonts/Simple-Line-Icons.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/simple-line-icons/fonts/Simple-Line-Icons.eot -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/simple-line-icons/fonts/Simple-Line-Icons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/simple-line-icons/fonts/Simple-Line-Icons.ttf -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/simple-line-icons/fonts/Simple-Line-Icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/simple-line-icons/fonts/Simple-Line-Icons.woff -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/static/vendor/simple-line-icons/fonts/Simple-Line-Icons.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/flask-sentiment-analysis-app/static/vendor/simple-line-icons/fonts/Simple-Line-Icons.woff2 -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/templates/app.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Sentiment Analysis with Posit Connect 12 | 13 | 14 | 15 | 16 | 17 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 |
27 |
28 |
29 |
30 |
31 |

Sentiment Analysis
with spaCy and Flask

32 |

Powered by Posit Connect



33 |
34 |
35 |
36 |
37 |
38 | 41 |
42 |
43 | 44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | 52 |
53 |
54 |
55 |
56 | 73 |

Copyright © 2020 RStudio, PBC. All Rights Reserved.

74 |
75 |
76 |
77 |
78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/templates/result.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Sentiment Analysis with Posit Connect 12 | 13 | 14 | 15 | 16 | 17 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 |
27 |
28 |
29 |
30 |
31 |

Sentiment Analysis
with spaCy and Flask

32 |

Powered by Posit Connect



33 |
34 |
35 |
36 |
37 |
38 | 40 |
41 |
42 |

43 |
44 |
45 |
46 |
47 |
48 |

Input text: {{ sentiment[0] }}

49 | {% for key, value in sentiment[1].items() %} 50 | Sentiment score: {{ '%0.4f'| format(value|float) }}

51 | {% endfor %} 52 |
53 |
54 |
55 |
56 | 57 |
58 |
59 |
60 |
61 | 78 |

Copyright © 2020 RStudio, PBC. All Rights Reserved.

79 |
80 |
81 |
82 |
83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /flask-sentiment-analysis-app/train.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals, print_function 2 | import plac 3 | import random 4 | from pathlib import Path 5 | import thinc.extra.datasets 6 | 7 | import spacy 8 | from spacy.util import minibatch, compounding 9 | 10 | def load_data(limit=0, split=0.8): 11 | """Load data from the IMDB dataset.""" 12 | # Partition off part of the train data for evaluation 13 | train_data, _ = thinc.extra.datasets.imdb() 14 | random.shuffle(train_data) 15 | train_data = train_data[-limit:] 16 | texts, labels = zip(*train_data) 17 | cats = [{'POSITIVE': bool(y)} for y in labels] 18 | split = int(len(train_data) * split) 19 | return (texts[:split], cats[:split]), (texts[split:], cats[split:]) 20 | 21 | def evaluate(tokenizer, textcat, texts, cats): 22 | docs = (tokenizer(text) for text in texts) 23 | tp = 0.0 # True positives 24 | fp = 1e-8 # False positives 25 | fn = 1e-8 # False negatives 26 | tn = 0.0 # True negatives 27 | for i, doc in enumerate(textcat.pipe(docs)): 28 | gold = cats[i] 29 | for label, score in doc.cats.items(): 30 | if label not in gold: 31 | continue 32 | if score >= 0.5 and gold[label] >= 0.5: 33 | tp += 1. 34 | elif score >= 0.5 and gold[label] < 0.5: 35 | fp += 1. 36 | elif score < 0.5 and gold[label] < 0.5: 37 | tn += 1 38 | elif score < 0.5 and gold[label] >= 0.5: 39 | fn += 1 40 | precision = tp / (tp + fp) 41 | recall = tp / (tp + fn) 42 | try: 43 | f_score = 2 * (precision * recall) / (precision + recall) 44 | except ZeroDivisionError: 45 | print("Warning! Zero Division Error, setting f_score to 1") 46 | f_score = 1 47 | return {'textcat_p': precision, 'textcat_r': recall, 'textcat_f': f_score} 48 | 49 | nlp = spacy.blank('en') # create blank Language class 50 | print("Created blank 'en' model") 51 | 52 | # add the text classifier to the pipeline if it doesn't exist 53 | # nlp.create_pipe works for built-ins that are registered with spaCy 54 | if 'textcat' not in nlp.pipe_names: 55 | textcat = nlp.create_pipe('textcat') 56 | nlp.add_pipe(textcat, last=True) 57 | # otherwise, get it, so we can add labels to it 58 | else: 59 | textcat = nlp.get_pipe('textcat') 60 | 61 | # add label to text classifier 62 | textcat.add_label('POSITIVE') 63 | 64 | n_texts=1000 65 | # load the IMDB dataset 66 | print("Loading IMDB data...") 67 | (train_texts, train_cats), (dev_texts, dev_cats) = load_data(limit=n_texts) 68 | print("Using {} examples ({} training, {} evaluation)" 69 | .format(n_texts, len(train_texts), len(dev_texts))) 70 | train_data = list(zip(train_texts, 71 | [{'cats': cats} for cats in train_cats])) 72 | 73 | n_iter=20 74 | # get names of other pipes to disable them during training 75 | other_pipes = [pipe for pipe in nlp.pipe_names if pipe != 'textcat'] 76 | with nlp.disable_pipes(*other_pipes): # only train textcat 77 | optimizer = nlp.begin_training() 78 | print("Training the model...") 79 | print('{:^5}\t{:^5}\t{:^5}\t{:^5}'.format('LOSS', 'P', 'R', 'F')) 80 | for i in range(n_iter): 81 | losses = {} 82 | # batch up the examples using spaCy's minibatch 83 | batches = minibatch(train_data, size=compounding(4., 32., 1.001)) 84 | for batch in batches: 85 | texts, annotations = zip(*batch) 86 | nlp.update(texts, annotations, sgd=optimizer, drop=0.2, 87 | losses=losses) 88 | with textcat.model.use_params(optimizer.averages): 89 | # evaluate on the dev data split off in load_data() 90 | scores = evaluate(nlp.tokenizer, textcat, dev_texts, dev_cats) 91 | print('{0:.3f}\t{1:.3f}\t{2:.3f}\t{3:.3f}' # print a simple table 92 | .format(losses['textcat'], scores['textcat_p'], 93 | scores['textcat_r'], scores['textcat_f'])) 94 | 95 | test_text_neg = "This movie was an terrible, awful rehash of past movies. I will never watch it again." 96 | doc = nlp(test_text_neg) 97 | print(test_text_neg, "\n", doc.cats) 98 | 99 | test_text_pos = "This great movie was a wonderful remake of the original version. I loved it!" 100 | doc = nlp(test_text_pos) 101 | print(test_text_pos, "\n", doc.cats) 102 | 103 | output_dir = "model" 104 | 105 | if output_dir is not None: 106 | output_dir = Path(output_dir) 107 | if not output_dir.exists(): 108 | output_dir.mkdir() 109 | with nlp.use_params(optimizer.averages): 110 | nlp.to_disk(output_dir) 111 | print("Saved model to directory:", output_dir) 112 | 113 | print("Loading from", output_dir) 114 | nlp2 = spacy.load(output_dir) 115 | doc2 = nlp2(test_text_neg) 116 | print(test_text_neg, "\n", doc2.cats) 117 | doc3 = nlp2(test_text_pos) 118 | print(test_text_pos, "\n", doc3.cats) 119 | -------------------------------------------------------------------------------- /jupyter-interactive-visualization/README.md: -------------------------------------------------------------------------------- 1 | # Interactive Data Visualization with Jupyter Notebooks 2 | 3 | ## Deploy 4 | 5 | ``` 6 | rsconnect deploy notebook jupyter-interactive-notebook.ipynb -n 7 | ``` 8 | 9 | ## Resources 10 | 11 | [Posit Connect User Guide - Jupyter](https://docs.posit.co/connect/user/jupyter-notebook/) -------------------------------------------------------------------------------- /jupyter-interactive-visualization/jupyter-interactive-visualization.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Interactive Python Visualization Libraries" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import numpy as np\n", 17 | "import plotly.offline as py\n", 18 | "import plotly.figure_factory as ff\n", 19 | "from bokeh.models import HoverTool, WheelZoomTool\n", 20 | "from bokeh.plotting import figure\n", 21 | "from bokeh.io import show, output_notebook\n", 22 | "output_notebook()" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "### Plotly" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "py.init_notebook_mode()\n", 39 | "t = np.linspace(-1, 1.2, 2000)\n", 40 | "x = (t**3) + (0.3 * np.random.randn(2000))\n", 41 | "y = (t**6) + (0.3 * np.random.randn(2000))\n", 42 | "\n", 43 | "colorscale = ['#7A4579', '#D56073', 'rgb(236,158,105)', (1, 1, 0.2), (0.98,0.98,0.98)]\n", 44 | "\n", 45 | "fig = ff.create_2d_density(\n", 46 | " x, y, colorscale=colorscale,\n", 47 | " hist_color='rgb(255, 237, 222)', point_size=3\n", 48 | ")\n", 49 | "\n", 50 | "py.iplot(fig, filename='histogram_subplots')" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": {}, 56 | "source": [ 57 | "### Bokeh" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "n = 500\n", 67 | "x = 2 + 2*np.random.standard_normal(n)\n", 68 | "y = 2 + 2*np.random.standard_normal(n)\n", 69 | "\n", 70 | "p = figure(title=\"Hexbin for 500 points\", match_aspect=True,\n", 71 | " tools=\"wheel_zoom,pan,reset\", background_fill_color='#440154')\n", 72 | "p.grid.visible = False\n", 73 | "\n", 74 | "r, bins = p.hexbin(x, y, size=0.5, hover_color=\"pink\", hover_alpha=0.8)\n", 75 | "\n", 76 | "p.circle(x, y, color=\"white\", size=1)\n", 77 | "\n", 78 | "p.add_tools(HoverTool(\n", 79 | " tooltips=[(\"count\", \"@c\"), (\"(q,r)\", \"(@q, @r)\")],\n", 80 | " mode=\"mouse\", point_policy=\"follow_mouse\", renderers=[r]\n", 81 | "))\n", 82 | "\n", 83 | "p.toolbar.active_scroll = p.select_one(WheelZoomTool)\n", 84 | "\n", 85 | "show(p)" 86 | ] 87 | } 88 | ], 89 | "metadata": { 90 | "interpreter": { 91 | "hash": "dc96255f0fa222d90f1ebb5772b5f11b98f04048a39992a2e32a0420b667e6f9" 92 | }, 93 | "kernelspec": { 94 | "display_name": "Python 3.8.6 64-bit ('3.8.6': pyenv)", 95 | "name": "python3" 96 | }, 97 | "language_info": { 98 | "codemirror_mode": { 99 | "name": "ipython", 100 | "version": 3 101 | }, 102 | "file_extension": ".py", 103 | "mimetype": "text/x-python", 104 | "name": "python", 105 | "nbconvert_exporter": "python", 106 | "pygments_lexer": "ipython3", 107 | "version": "3.8.6" 108 | } 109 | }, 110 | "nbformat": 4, 111 | "nbformat_minor": 2 112 | } 113 | -------------------------------------------------------------------------------- /jupyter-interactive-visualization/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "jupyter-static", 6 | "entrypoint": "jupyter-interactive-visualization.ipynb" 7 | }, 8 | "python": { 9 | "version": "3.10.12", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.2.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "jupyter-interactive-visualization.ipynb": { 18 | "checksum": "20b7f819b6b3f4fff5df28e05d6dd924" 19 | }, 20 | "requirements.txt": { 21 | "checksum": "d59c8f2d87dd7d7a4633b6104332aec4" 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /jupyter-interactive-visualization/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | plotly 3 | bokeh 4 | notebook -------------------------------------------------------------------------------- /jupyter-voila/.internal.yml: -------------------------------------------------------------------------------- 1 | default: 2 | content: 3 | - name: "Voila Dashboards" 4 | path: "." 5 | description: "Turn your Jupyter Notebooks into dashboards with Voilà." 6 | tag: 7 | - "Examples|Python|Voila" 8 | url: "/python-examples/voila/" 9 | image: "voila.png" 10 | -------------------------------------------------------------------------------- /jupyter-voila/README.md: -------------------------------------------------------------------------------- 1 | # Interactive Notebooks with Voilà 2 | 3 | Voilà allows you to convert a Jupyter Notebook into an interactive dashboard. 4 | 5 | 6 | ## Notebooks 7 | 8 | - example visualizations using [bqplot](./bqplot.ipynb) and [ipyvolume](./ipyvolume.ipynb) 9 | - a brief introduction to [secure hashes](./hash.ipynb) 10 | 11 | ## Deploy 12 | 13 | ``` 14 | rsconnect deploy voila . --multi-notebook -n 15 | ``` 16 | 17 | ## Resources 18 | 19 | - [Posit Connect User Guide - Voilà](https://docs.posit.co/connect/user/publishing-cli-notebook/#interactive-voila-deployment) 20 | -------------------------------------------------------------------------------- /jupyter-voila/bqplot.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# So easy, *voilà*!\n", 8 | "\n", 9 | "In this example notebook, we demonstrate how Voilà can render custom Jupyter widgets such as [bqplot](https://github.com/bloomberg/bqplot). " 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import warnings\n", 19 | "warnings.filterwarnings('ignore')" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": null, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "import numpy as np\n", 29 | "from bqplot import pyplot as plt\n", 30 | "\n", 31 | "plt.figure(1, title='Line Chart')\n", 32 | "np.random.seed(0)\n", 33 | "n = 200\n", 34 | "x = np.linspace(0.0, 10.0, n)\n", 35 | "y = np.cumsum(np.random.randn(n))\n", 36 | "plt.plot(x, y)\n", 37 | "plt.show()" 38 | ] 39 | } 40 | ], 41 | "metadata": { 42 | "kernelspec": { 43 | "display_name": "Python 3", 44 | "language": "python", 45 | "name": "python3" 46 | }, 47 | "language_info": { 48 | "codemirror_mode": { 49 | "name": "ipython", 50 | "version": 3 51 | }, 52 | "file_extension": ".py", 53 | "mimetype": "text/x-python", 54 | "name": "python", 55 | "nbconvert_exporter": "python", 56 | "pygments_lexer": "ipython3", 57 | "version": "3.7.3" 58 | } 59 | }, 60 | "nbformat": 4, 61 | "nbformat_minor": 2 62 | } 63 | -------------------------------------------------------------------------------- /jupyter-voila/hash.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import ipywidgets as widgets\n", 10 | "import hashlib, glob\n", 11 | "from IPython.display import display" 12 | ] 13 | }, 14 | { 15 | "attachments": {}, 16 | "cell_type": "markdown", 17 | "metadata": {}, 18 | "source": [ 19 | "# hash\n", 20 | "\n", 21 | "How can you check that a copy of a file you send to someone isn't accidentally modified in transit?\n", 22 | "\n", 23 | "[Secure hashes](https://en.wikipedia.org/wiki/Cryptographic_hash_function) are one way to perform such a check.\n", 24 | "\n", 25 | "1. Choose a hashing algorithm.\n", 26 | "2. Upload a small file to the dashboard.\n", 27 | "3. Generate a hash.\n", 28 | "4. [Send](https://wormhole.app/) the file to a friend, and have them repeat steps 1, 2, and 3. (Email works too.)\n", 29 | "5. Compare hashes!" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "algo = widgets.ToggleButtons(\n", 39 | " options=[\"md5\", \"sha1\", \"sha256\"],\n", 40 | " description=\"Select a hashing algorithm:\"\n", 41 | ")\n", 42 | "\n", 43 | "uploader = widgets.FileUpload()\n", 44 | "select_file = widgets.Select(options=\"\", description =\"Select a file to hash:\")\n", 45 | "files = glob.glob(\"\")\n", 46 | "current_file = None\n", 47 | "out = widgets.Output()\n", 48 | "button = widgets.Button(description = \"Generate file hash:\")" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": null, 54 | "metadata": {}, 55 | "outputs": [], 56 | "source": [ 57 | "# adapted from\n", 58 | "# https://stackoverflow.com/a/44873382\n", 59 | "# in python 3.11+ you can use hashlib.file_hash() instead \n", 60 | "def shasum(filename, digest):\n", 61 | " h = hashlib.new(digest)\n", 62 | " b = bytearray(128*1024)\n", 63 | " mv = memoryview(b)\n", 64 | " with open(filename, 'rb', buffering=0) as f:\n", 65 | " for n in iter(lambda : f.readinto(mv), 0):\n", 66 | " h.update(mv[:n])\n", 67 | " print(f\"{digest}: {h.hexdigest()}\")\n", 68 | " return f\"{digest}: {h.hexdigest()}\"" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "# adpated from\n", 78 | "# https://github.com/fomightez/3Dscatter_plot-binder\n", 79 | "@out.capture(clear_output=True, wait=True)\n", 80 | "def on_button_clicked(b):\n", 81 | " global uploader, algo, select_file, current_file\n", 82 | " current_file = select_file.value\n", 83 | " filehash = shasum(current_file, algo.value)\n", 84 | " vbox.children = [algo, uploader, select_file, button, out]\n", 85 | "\n", 86 | "def on_file_upload(change):\n", 87 | " global uploader, select_file, current_file\n", 88 | " for details_dict in change[\"new\"]:\n", 89 | " files.append(details_dict[\"name\"])\n", 90 | "\n", 91 | " with open(details_dict[\"name\"], \"wb\") as f:\n", 92 | " f.write(details_dict[\"content\"])\n", 93 | " \n", 94 | " current_file = select_file.value\n", 95 | " select_file.options = tuple(files)\n", 96 | " select_file.value\n", 97 | "\n", 98 | "button.on_click(on_button_clicked)\n", 99 | "vbox = widgets.VBox([algo, uploader, select_file, button])" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "uploader.observe(on_file_upload, \"value\")\n", 109 | "vbox" 110 | ] 111 | } 112 | ], 113 | "metadata": { 114 | "kernelspec": { 115 | "display_name": ".venv", 116 | "language": "python", 117 | "name": "python3" 118 | }, 119 | "language_info": { 120 | "name": "python", 121 | "version": "3.8.6" 122 | }, 123 | "orig_nbformat": 4, 124 | "vscode": { 125 | "interpreter": { 126 | "hash": "58b987bbdc7808225bbe23a8c8714df9864b164f01439c091b16b96ae31865d2" 127 | } 128 | } 129 | }, 130 | "nbformat": 4, 131 | "nbformat_minor": 2 132 | } 133 | -------------------------------------------------------------------------------- /jupyter-voila/ipyvolume.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# So easy, *voilà*!\n", 8 | "\n", 9 | "In this example notebook, we demonstrate how Voilà can render custom Jupyter widgets such as [ipyvolume](https://github.com/maartenbreddels/ipyvolume). " 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import ipyvolume as ipv\n", 19 | "ipv.examples.example_ylm();" 20 | ] 21 | } 22 | ], 23 | "metadata": { 24 | "kernelspec": { 25 | "display_name": "Python 3", 26 | "language": "python", 27 | "name": "python3" 28 | }, 29 | "language_info": { 30 | "codemirror_mode": { 31 | "name": "ipython", 32 | "version": 3 33 | }, 34 | "file_extension": ".py", 35 | "mimetype": "text/x-python", 36 | "name": "python", 37 | "nbconvert_exporter": "python", 38 | "pygments_lexer": "ipython3", 39 | "version": "3.7.3" 40 | } 41 | }, 42 | "nbformat": 4, 43 | "nbformat_minor": 2 44 | } 45 | -------------------------------------------------------------------------------- /jupyter-voila/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "jupyter-voila", 6 | "entrypoint": "" 7 | }, 8 | "python": { 9 | "version": "3.10.12", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.3.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "44e80fabd94a432739599512034f4738" 19 | }, 20 | ".internal.yml": { 21 | "checksum": "e214c5fc2f19138e48b9579e3a731e00" 22 | }, 23 | "README.md": { 24 | "checksum": "b9980b76a9c5b600cd33ff3e70e007b6" 25 | }, 26 | "bqplot.ipynb": { 27 | "checksum": "9f283b29889500e6c78e83ad1257e03f" 28 | }, 29 | "hash.ipynb": { 30 | "checksum": "72f44e0fa678573992c7cf553ecb9a6f" 31 | }, 32 | "ipyvolume.ipynb": { 33 | "checksum": "f8df24eb0418afaa205b0cb1dbb49628" 34 | }, 35 | "voila.png": { 36 | "checksum": "7b1b11e04f0ded89a30adc69b86d1834" 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /jupyter-voila/requirements.txt: -------------------------------------------------------------------------------- 1 | anyio==4.1.0 2 | argon2-cffi==23.1.0 3 | argon2-cffi-bindings==21.2.0 4 | arrow==1.3.0 5 | asttokens==2.4.1 6 | attrs==23.1.0 7 | Babel==2.13.1 8 | beautifulsoup4==4.12.2 9 | bleach==6.1.0 10 | bqplot==0.12.42 11 | certifi==2023.11.17 12 | cffi==1.16.0 13 | charset-normalizer==3.3.2 14 | comm==0.2.0 15 | contourpy==1.2.0 16 | cycler==0.12.1 17 | decorator==5.1.1 18 | defusedxml==0.7.1 19 | exceptiongroup==1.2.0 20 | executing==2.0.1 21 | fastjsonschema==2.19.0 22 | fonttools==4.46.0 23 | fqdn==1.5.1 24 | idna==3.6 25 | ipydatawidgets==4.3.5 26 | ipython==8.18.1 27 | ipyvolume==0.6.3 28 | ipyvue==1.10.1 29 | ipyvuetify==1.8.10 30 | ipywebrtc==0.6.0 31 | ipywidgets==8.1.1 32 | isoduration==20.11.0 33 | jedi==0.19.1 34 | Jinja2==3.1.2 35 | json5==0.9.14 36 | jsonpointer==2.4 37 | jsonschema==4.20.0 38 | jsonschema-specifications==2023.11.2 39 | jupyter-events==0.9.0 40 | jupyter_client==8.6.0 41 | jupyter_core==5.5.0 42 | jupyter_server==2.12.0 43 | jupyter_server_terminals==0.4.4 44 | jupyterlab-widgets==3.0.9 45 | jupyterlab_pygments==0.3.0 46 | jupyterlab_server==2.25.2 47 | kiwisolver==1.4.5 48 | MarkupSafe==2.1.3 49 | matplotlib==3.8.2 50 | matplotlib-inline==0.1.6 51 | mistune==3.0.2 52 | nbclient==0.7.4 53 | nbconvert==7.12.0 54 | nbformat==5.9.2 55 | numpy==1.26.2 56 | overrides==7.4.0 57 | packaging==23.2 58 | pandas==2.1.3 59 | pandocfilters==1.5.0 60 | parso==0.8.3 61 | pexpect==4.9.0 62 | Pillow==10.1.0 63 | platformdirs==4.1.0 64 | prometheus-client==0.19.0 65 | prompt-toolkit==3.0.41 66 | ptyprocess==0.7.0 67 | pure-eval==0.2.2 68 | pycparser==2.21 69 | Pygments==2.17.2 70 | pyparsing==3.1.1 71 | python-dateutil==2.8.2 72 | python-json-logger==2.0.7 73 | pythreejs==2.4.2 74 | pytz==2023.3.post1 75 | PyYAML==6.0.1 76 | pyzmq==25.1.2 77 | referencing==0.31.1 78 | requests==2.31.0 79 | rfc3339-validator==0.1.4 80 | rfc3986-validator==0.1.1 81 | rpds-py==0.13.2 82 | Send2Trash==1.8.2 83 | six==1.16.0 84 | sniffio==1.3.0 85 | soupsieve==2.5 86 | stack-data==0.6.3 87 | terminado==0.18.0 88 | tinycss2==1.2.1 89 | tornado==6.4 90 | traitlets==5.14.0 91 | traittypes==0.2.1 92 | types-python-dateutil==2.8.19.14 93 | tzdata==2023.3 94 | uri-template==1.3.0 95 | urllib3==2.1.0 96 | voila==0.5.5 97 | wcwidth==0.2.12 98 | webcolors==1.13 99 | webencodings==0.5.1 100 | websocket-client==1.7.0 101 | websockets==12.0 102 | widgetsnbextension==4.0.9 103 | -------------------------------------------------------------------------------- /jupyter-voila/voila.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/jupyter-voila/voila.png -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | default: 2 | just --list 3 | 4 | # https://github.com/rnorth/gh-combine-prs 5 | # use combine-prs extension to bump all requirements files 6 | dependabot: 7 | gh combine-prs --query "author:app/dependabot" 8 | 9 | # set up virtual environment in working directory 10 | bootstrap: 11 | if test ! -e {{invocation_directory()}}/.venv; then python -m venv {{invocation_directory()}}/.venv; fi 12 | {{invocation_directory()}}/.venv/bin/python -m pip install --upgrade pip wheel setuptools 13 | if test -f {{invocation_directory()}}/requirements.txt; \ 14 | then \ 15 | {{invocation_directory()}}/.venv/bin/python -m pip install -r {{invocation_directory()}}/requirements.txt; \ 16 | fi 17 | 18 | # remove virtual environment from working directory 19 | clean: 20 | rm -rf {{invocation_directory()}}/.venv 21 | 22 | # deploy manifest in current directory to target server 23 | deploy connect: 24 | rsconnect deploy manifest {{invocation_directory()}}/manifest.json -n {{connect}} 25 | 26 | release: 27 | gh release create -------------------------------------------------------------------------------- /python-examples.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | -------------------------------------------------------------------------------- /quarto-lightbox/.gitignore: -------------------------------------------------------------------------------- 1 | /.quarto/ 2 | -------------------------------------------------------------------------------- /quarto-lightbox/.internal.yml: -------------------------------------------------------------------------------- 1 | default: 2 | content: 3 | - name: "Literate Programming with Quarto" 4 | path: "." 5 | description: "An open-source scientific and technical publishing system." 6 | tag: 7 | - "Examples|Python|Quarto" 8 | url: "/python-examples/quarto" 9 | image: "quarto-lightbox.png" -------------------------------------------------------------------------------- /quarto-lightbox/README.md: -------------------------------------------------------------------------------- 1 | # Quarto With the Jupyter Engine and Lightbox Extension 2 | 3 | Note on Lightbox: This example has been tested and works with Quarto versions 1.0.36 and 1.1.189 and Lightbox version 0.1.3. 4 | 5 | ## Deploy 6 | 7 | Steps: 8 | 9 | * Test locally 10 | * Acquire an [API key](https://docs.posit.co/connect/user/api-keys/) 11 | * Publish with your method of choice 12 | 13 | Tip: Use [quarto projects](https://quarto.org/docs/projects/quarto-projects.html) for the most robust publishing experience. The rsconnect-python package (and git backed publishing on Connect) as of 2022/11/07 do not have support for publishing standalone documents and needs quarto content to be in a project. 14 | 15 | **Quarto CLI** 16 | 17 | ```bash 18 | quarto publish connect quarto-python-lightbox.qmd 19 | ``` 20 | 21 | **rsconnect-python** 22 | 23 | Configure your server address: 24 | ``` 25 | rsconnect add \ 26 | --api-key \ 27 | --server \ 28 | --name 29 | ``` 30 | 31 | Create the requirements file: 32 | ``` 33 | python -m pip freeze > requirements.txt 34 | ``` 35 | 36 | Publish: 37 | ``` 38 | rsconnect deploy quarto . -n 39 | ``` 40 | 41 | [Important: If your Quarto content contains R code, you cannot use the rsconnect-python CLI's rsconnect deploy quarto function. You can still use rsconnect deploy manifest to deploy content for which a manifest has already been generated.](https://quarto.org/docs/publishing/rstudio-connect.html) Instead use [rsconnect](https://github.com/rstudio/rsconnect). 42 | 43 | **git-backed** 44 | 45 | Writing the manifest file: 46 | 47 | ```bash 48 | rsconnect write-manifest quarto . 49 | ``` 50 | 51 | ## Resources 52 | 53 | - [Posit Connect User Guide - Quarto (Python)](https://docs.posit.co/connect/user/publishing-cli-quarto/) 54 | - [quarto cli](https://quarto.org/docs/publishing/rstudio-connect.html) 55 | - [rsconnect](https://github.com/rstudio/rsconnect) 56 | - [rsconnect-python](https://github.com/rstudio/rsconnect-python) 57 | - [quarto projects](https://quarto.org/docs/projects/quarto-projects.html) 58 | - [Posit Connect User Guide - Git Backed Publishing ](https://docs.posit.co/connect/user/git-backed/) 59 | - [Quarto Version Manager](https://github.com/dpastoor/qvm) 60 | - [Lightbox Quarto Extension](https://github.com/quarto-ext/lightbox) 61 | -------------------------------------------------------------------------------- /quarto-lightbox/_extensions/quarto-ext/lightbox/_extension.yml: -------------------------------------------------------------------------------- 1 | title: Lightbox 2 | author: RStudio, PBC 3 | version: 0.1.3 4 | contributes: 5 | filters: 6 | - lightbox.lua 7 | -------------------------------------------------------------------------------- /quarto-lightbox/_extensions/quarto-ext/lightbox/lightbox.css: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | div.gslide div.gslide-description, 5 | div.gslide-description .gslide-title, 6 | div.gslide-description .gslide-desc { 7 | color: var(--quarto-body-color); 8 | background-color: var(--quarto-body-bg); 9 | } -------------------------------------------------------------------------------- /quarto-lightbox/_extensions/quarto-ext/lightbox/lightbox.lua: -------------------------------------------------------------------------------- 1 | -- whether we're automatically lightboxing 2 | local auto = false 3 | 4 | -- whether we need lightbox dependencies added 5 | local needsLightbox = false 6 | 7 | -- a counter used to ensure each image is in its own gallery 8 | local imgCount = 0 9 | 10 | -- attributes to forward from the image to the newly created link 11 | local kForwardedAttr = { 12 | "title", "description", "desc-position", 13 | "type", "effect", "zoomable", "draggable" 14 | } 15 | 16 | local kLightboxClass = "lightbox" 17 | local kNoLightboxClass = "nolightbox" 18 | local kGalleryPrefix = "quarto-lightbox-gallery-" 19 | 20 | -- A list of images already within links that we can use to filter 21 | local imagesWithinLinks = pandoc.List({}) 22 | 23 | return { 24 | { 25 | Meta = function(meta) 26 | 27 | -- If the mode is auto, we need go ahead and 28 | -- run if there are any images (ideally we would) 29 | -- filter to images in the body, but that can be 30 | -- left for future me to deal with 31 | -- supports: 32 | -- lightbox: auto 33 | -- or 34 | -- lightbox: 35 | -- match: auto 36 | local lbMeta = meta.lightbox 37 | if lbMeta ~= nil and type(lbMeta) == 'table' then 38 | if lbMeta[1] ~= nil then 39 | if lbMeta[1].text == "auto" then 40 | auto = true 41 | end 42 | elseif lbMeta.match ~= nil and pandoc.utils.stringify(lbMeta.match) == 'auto' then 43 | auto = true 44 | elseif lbMeta == true then 45 | auto = true 46 | end 47 | end 48 | end, 49 | -- Find images that are already within links 50 | -- we'll use this to filter out these images if 51 | -- the most is auto 52 | Link = function(linkEl) 53 | pandoc.walk_inline(linkEl, { 54 | Image = function(imageEl) 55 | imagesWithinLinks[#imagesWithinLinks + 1] = imageEl 56 | end 57 | }) 58 | end 59 | }, 60 | { 61 | Image = function(imgEl) 62 | if quarto.doc.isFormat("html:js") then 63 | local isAlreadyLinked = imagesWithinLinks:includes(imgEl) 64 | if (not isAlreadyLinked and auto and not imgEl.classes:includes(kNoLightboxClass)) 65 | or imgEl.classes:includes('lightbox') then 66 | -- note that we need to include the dependency for lightbox 67 | needsLightbox = true 68 | imgCount = imgCount + 1 69 | 70 | -- remove the class from the image 71 | imgEl.attr.classes = imgEl.attr.classes:filter(function(clz) 72 | return clz ~= kLightboxClass 73 | end) 74 | 75 | -- attributes for the link 76 | local linkAttributes = {} 77 | 78 | -- mark this image as a lightbox target 79 | linkAttributes.class = kLightboxClass 80 | 81 | -- get the alt text from image and use that as title 82 | local title = nil 83 | if imgEl.caption ~= nil and #imgEl.caption > 0 then 84 | linkAttributes.title = pandoc.utils.stringify(imgEl.caption) 85 | elseif imgEl.attributes['fig-alt'] ~= nil and #imgEl.attributes['fig-alt'] > 0 then 86 | linkAttributes.title = pandoc.utils.stringify(imgEl.attributes['fig-alt']) 87 | end 88 | 89 | -- move a group attribute to the link, if present 90 | if imgEl.attr.attributes.group ~= nil then 91 | linkAttributes.gallery = imgEl.attr.attributes.group 92 | imgEl.attr.attributes.group = nil 93 | else 94 | linkAttributes.gallery = kGalleryPrefix .. imgCount 95 | end 96 | 97 | -- forward any other known attributes 98 | for i, v in ipairs(kForwardedAttr) do 99 | if imgEl.attr.attributes[v] ~= nil then 100 | -- forward the attribute 101 | linkAttributes[v] = imgEl.attr.attributes[v] 102 | 103 | -- clear the attribute 104 | imgEl.attr.attributes[v] = nil 105 | end 106 | 107 | -- clear the title 108 | if (imgEl.title == 'fig:') then 109 | imgEl.title = "" 110 | end 111 | 112 | end 113 | 114 | -- wrap decorated images in a link with appropriate attrs 115 | local link = pandoc.Link({imgEl}, imgEl.src, nil, linkAttributes) 116 | return link 117 | end 118 | end 119 | end, 120 | Meta = function(meta) 121 | -- If we discovered lightbox-able images 122 | -- we need to include the dependencies 123 | if needsLightbox then 124 | -- add the dependency 125 | quarto.doc.addHtmlDependency({ 126 | name = 'glightbox', 127 | scripts = {'resources/js/glightbox.min.js'}, 128 | stylesheets = {'resources/css/glightbox.min.css', 'lightbox.css'} 129 | }) 130 | 131 | -- read lightbox options 132 | local lbMeta = meta.lightbox 133 | local lbOptions = {} 134 | local readEffect = function(el) 135 | local val = pandoc.utils.stringify(el) 136 | if val == "fade" or val == "zoom" or val == "none" then 137 | return val 138 | else 139 | error("Invalid effect " + val) 140 | end 141 | end 142 | 143 | -- permitted options include: 144 | -- lightbox: 145 | -- effect: zoom | fade | none 146 | -- desc-position: top | bottom | left |right 147 | -- loop: true | false 148 | -- class: 149 | local effect = "zoom" 150 | local descPosition = "bottom" 151 | local loop = true 152 | local skin = nil 153 | 154 | -- The selector controls which elements are targeted. 155 | -- currently, it always targets .lightbox elements 156 | -- and there is no way for the user to change this 157 | local selector = "." .. kLightboxClass 158 | 159 | if lbMeta ~= nil and type(lbMeta) == 'table' then 160 | if lbMeta.effect ~= nil then 161 | effect = readEffect(lbMeta.effect) 162 | end 163 | 164 | if lbMeta['desc-position'] ~= nil then 165 | descPosition = pandoc.utils.stringify(lbMeta['desc-position']) 166 | end 167 | 168 | if lbMeta['css-class'] ~= nil then 169 | skin = pandoc.utils.stringify(lbMeta['css-class']) 170 | end 171 | 172 | if lbMeta.loop ~= nil then 173 | loop = lbMeta.loop 174 | end 175 | end 176 | 177 | -- Generate the options to configure lightbox 178 | local options = { 179 | selector = selector, 180 | closeEffect = effect, 181 | openEffect = effect, 182 | descPosition = descPosition, 183 | loop = loop, 184 | } 185 | if skin ~= nil then 186 | options.skin = skin 187 | end 188 | local optionsJson = quarto.json.encode(options) 189 | 190 | -- generate the initialization script with the correct options 191 | local scriptTag = "" 192 | 193 | -- inject the rendering code 194 | quarto.doc.includeText("after-body", scriptTag) 195 | 196 | end 197 | end 198 | }} 199 | -------------------------------------------------------------------------------- /quarto-lightbox/_publish.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/quarto-lightbox/_publish.yml -------------------------------------------------------------------------------- /quarto-lightbox/_quarto.yml: -------------------------------------------------------------------------------- 1 | project: 2 | title: "quarto-python-lightbox" 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /quarto-lightbox/img/Chengdu-pandas-d10.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/quarto-lightbox/img/Chengdu-pandas-d10.jpg -------------------------------------------------------------------------------- /quarto-lightbox/img/Lion_waiting_in_Namibia.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/quarto-lightbox/img/Lion_waiting_in_Namibia.jpg -------------------------------------------------------------------------------- /quarto-lightbox/img/Panthera_tigris_corbetti_(Tierpark_Berlin)_832-714-(118).jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/quarto-lightbox/img/Panthera_tigris_corbetti_(Tierpark_Berlin)_832-714-(118).jpg -------------------------------------------------------------------------------- /quarto-lightbox/img/The_Wizard_of_Oz_1955_Lobby_Card.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/quarto-lightbox/img/The_Wizard_of_Oz_1955_Lobby_Card.jpg -------------------------------------------------------------------------------- /quarto-lightbox/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "quarto-static" 6 | }, 7 | "quarto": { 8 | "version": "1.4.514", 9 | "engines": [ 10 | "jupyter" 11 | ] 12 | }, 13 | "python": { 14 | "version": "3.10.12", 15 | "package_manager": { 16 | "name": "pip", 17 | "version": "23.3.1", 18 | "package_file": "requirements.txt" 19 | } 20 | }, 21 | "files": { 22 | "requirements.txt": { 23 | "checksum": "6ec2a900c6496c928bb6012c36c69efb" 24 | }, 25 | ".gitignore": { 26 | "checksum": "06d306ff933a023ca365a9fbf6589381" 27 | }, 28 | ".internal.yml": { 29 | "checksum": "1e5a127a851c07e0ca7655bb3d5c7713" 30 | }, 31 | "README.md": { 32 | "checksum": "eca5c4e1f4827d2562c5de3abc100cdc" 33 | }, 34 | "_extensions/quarto-ext/lightbox/_extension.yml": { 35 | "checksum": "3db0b546785dbeb0117afef962c424bd" 36 | }, 37 | "_extensions/quarto-ext/lightbox/lightbox.css": { 38 | "checksum": "f94b9cae12f142eaf34dc631afa138fc" 39 | }, 40 | "_extensions/quarto-ext/lightbox/lightbox.lua": { 41 | "checksum": "2ceffdc78094bca8e55b4be6d37cb12d" 42 | }, 43 | "_extensions/quarto-ext/lightbox/resources/css/glightbox.min.css": { 44 | "checksum": "9b438b29cef1c212d1c65a877ffc7232" 45 | }, 46 | "_extensions/quarto-ext/lightbox/resources/js/glightbox.min.js": { 47 | "checksum": "2b4c8cbaade24ecb58bcb0d89694ccee" 48 | }, 49 | "_publish.yml": { 50 | "checksum": "d41d8cd98f00b204e9800998ecf8427e" 51 | }, 52 | "_quarto.yml": { 53 | "checksum": "ab8efbe8e2d5444405b716b3bb917b3f" 54 | }, 55 | "img/Chengdu-pandas-d10.jpg": { 56 | "checksum": "55ef49ea2db1762990435c43e2a8dca0" 57 | }, 58 | "img/Lion_waiting_in_Namibia.jpg": { 59 | "checksum": "c407f20233958896edd2bc90cc1cba12" 60 | }, 61 | "img/Panthera_tigris_corbetti_(Tierpark_Berlin)_832-714-(118).jpg": { 62 | "checksum": "40aa373fb50f06112555bcc941aaf6e0" 63 | }, 64 | "img/The_Wizard_of_Oz_1955_Lobby_Card.jpg": { 65 | "checksum": "bdbf9246bd2eb7ff5942b3bdc0f3a401" 66 | }, 67 | "quarto-lightbox.png": { 68 | "checksum": "1105422772d95ab055a6c8a2f96c7061" 69 | }, 70 | "quarto-python-lightbox.qmd": { 71 | "checksum": "c1083bb396a02e22466cca0b5c5e7f6c" 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /quarto-lightbox/quarto-lightbox.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/quarto-lightbox/quarto-lightbox.png -------------------------------------------------------------------------------- /quarto-lightbox/quarto-python-lightbox.qmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Quarto matplotlib and lightbox demo" 3 | filters: 4 | - lightbox 5 | lightbox: auto 6 | format: 7 | html: 8 | code-fold: true 9 | jupyter: python3 10 | resource_files: 11 | - "_extensions/quarto-ext/lightbox/lightbox.lua" 12 | - "_extensions/quarto-ext/lightbox/lightbox.css" 13 | - "_extensions/quarto-ext/lightbox/_extension.yml" 14 | - "_extensions/quarto-ext/lightbox/resources/css/glightbox.min.css" 15 | - "_extensions/quarto-ext/lightbox/resources/js/glightbox.min.js" 16 | --- 17 | 18 | ## Quarto 19 | 20 | Quarto enables you to weave together content and executable code into a finished document. To learn more about Quarto see . 21 | 22 | ## matplotlib demo 23 | 24 | Quarto can use different languages and packages, for example [Python](https://quarto.org/docs/computations/python.html). This example is using the Jupyter engine (python3 kernel) following the Quarto instructions on [this page](https://quarto.org/docs/tools/rstudio.html#jupyter-engine). 25 | 26 | For example, inserting Python code creating a line plot on a polar axis using Jupyter for rendering (taken from the Quarto documentation): 27 | 28 | ```{python} 29 | #| label: fig-polar 30 | #| fig-cap: "A line plot on a polar axis" 31 | 32 | import numpy as np 33 | import matplotlib.pyplot as plt 34 | 35 | r = np.arange(0, 2, 0.01) 36 | theta = 2 * np.pi * r 37 | fig, ax = plt.subplots( 38 | subplot_kw = {'projection': 'polar'} 39 | ) 40 | ax.plot(theta, r) 41 | ax.set_rticks([0.5, 1, 1.5, 2]) 42 | ax.grid(True) 43 | plt.show() 44 | ``` 45 | 46 | ## lightbox 47 | 48 | Quarto can arrange images for you, with the ability to both arrange them in various grid configurations as well as for a pop-out interaction when clicked on. 49 | 50 | These images are interactive, try clicking on one! 51 | 52 | ::: {layout-ncol="2"} 53 | 54 | ![Lions](img/Lion_waiting_in_Namibia.jpg){group="my-gallery"} 55 | 56 | ![and tigers](img/Panthera_tigris_corbetti_%28Tierpark_Berlin%29_832-714-%28118%29.jpg){group="my-gallery"} 57 | 58 | ![and bears](img/Chengdu-pandas-d10.jpg){group="my-gallery"} 59 | 60 | ![oh my!](img/The_Wizard_of_Oz_1955_Lobby_Card.jpg){group="my-gallery"} 61 | 62 | ::: 63 | 64 | References: 65 | 66 | - Tips on dealing with displaying images in a rendered quarto document: 67 | 68 | - Install the lightbox extension: 69 | 70 | Images are from Wikipedia and in the public domain: 71 | 72 | - Lions: 73 | - Tigers: 74 | - Bears: 75 | - Wizard of Oz: 76 | 77 | Check out the code: [here](https://github.com/sol-eng/python-examples/tree/main/quarto-lightbox/) 78 | -------------------------------------------------------------------------------- /quarto-lightbox/requirements.txt: -------------------------------------------------------------------------------- 1 | appnope==0.1.3 2 | asttokens==2.4.1 3 | attrs==23.1.0 4 | comm==0.2.0 5 | contourpy==1.2.0 6 | cycler==0.12.1 7 | debugpy==1.8.0 8 | decorator==5.1.1 9 | exceptiongroup==1.2.0 10 | executing==2.0.1 11 | fastjsonschema==2.19.0 12 | fonttools==4.46.0 13 | ipykernel==6.27.1 14 | ipython==8.18.1 15 | jedi==0.19.1 16 | jsonschema==4.20.0 17 | jsonschema-specifications==2023.11.2 18 | jupyter_client==8.6.0 19 | jupyter_core==5.5.0 20 | kiwisolver==1.4.5 21 | matplotlib==3.8.2 22 | matplotlib-inline==0.1.6 23 | nbclient==0.9.0 24 | nbformat==5.9.2 25 | nest-asyncio==1.5.8 26 | numpy==1.26.2 27 | packaging==23.2 28 | parso==0.8.3 29 | pexpect==4.9.0 30 | Pillow==10.1.0 31 | platformdirs==4.1.0 32 | prompt-toolkit==3.0.41 33 | psutil==5.9.6 34 | ptyprocess==0.7.0 35 | pure-eval==0.2.2 36 | Pygments==2.17.2 37 | pyparsing==3.1.1 38 | python-dateutil==2.8.2 39 | PyYAML==6.0.1 40 | pyzmq==25.1.2 41 | quarto==0.1.0 42 | referencing==0.31.1 43 | rpds-py==0.13.2 44 | six==1.16.0 45 | stack-data==0.6.3 46 | tornado==6.4 47 | traitlets==5.14.0 48 | wcwidth==0.2.12 49 | -------------------------------------------------------------------------------- /reticulated-image-classifier/.internal.yml: -------------------------------------------------------------------------------- 1 | default: 2 | content: 3 | - name: "Reticulated Image Classification with Shiny and PyTorch" 4 | path: "." 5 | description: "A Shiny for R app powered by a PyTorch Model" 6 | tag: 7 | - "Examples|Python|Reticulate" 8 | url: "/python-examples/shiny-pytorch/" 9 | image: "reticulated-image-classifier.png" 10 | -------------------------------------------------------------------------------- /reticulated-image-classifier/DESCRIPTION: -------------------------------------------------------------------------------- 1 | Title: Image Classifier 2 | Author: RStudio, Inc. 3 | AuthorUrl: http://www.rstudio.com/ 4 | License: Apache 2.0 5 | Tags: image-classifier 6 | Type: Shiny 7 | -------------------------------------------------------------------------------- /reticulated-image-classifier/README.md: -------------------------------------------------------------------------------- 1 | # Image Classification with Shiny and PyTorch 2 | 3 | 4 | ## Deploy 5 | 6 | ```r 7 | rsconnect::deployApp() 8 | ``` 9 | 10 | ## Resources 11 | 12 | - [Posit Connect User Guide - Shiny](https://docs.posit.co/connect/user/shiny/) 13 | - [Deploying Reticulated Content](https://solutions.rstudio.com/r/reticulate/#setting-up-a-reticulated-project) 14 | -------------------------------------------------------------------------------- /reticulated-image-classifier/app.R: -------------------------------------------------------------------------------- 1 | library(magrittr) 2 | library(reticulate) # Used to call Tensorflow Python script 3 | library(shiny) 4 | library(shinycssloaders) 5 | 6 | behavior <- config::get("image") 7 | stopifnot(behavior %in% c("upload", "fetch-image-url")) 8 | 9 | # Load source of Python image classifier script 10 | source_python("image-classifier.py") 11 | 12 | server <- function(input, output, session) { 13 | 14 | # where the image that should be classified is on disk 15 | image_path <- reactiveVal("./img/cat.jpg") 16 | 17 | image_prefix <- "pytorch_image" 18 | 19 | # the configurable selector for fetch-image-url vs. upload 20 | output$image_selector <- renderUI({ 21 | if (behavior == "fetch-image-url") { 22 | list( 23 | textInput("file1", label = h5("Enter Image URL:"), value = ""), 24 | actionButton("fetch-image-url", "Fetch Image") 25 | ) 26 | } else if (behavior == "upload") { 27 | fileInput("file_upload", label = h5("Upload an Image:")) 28 | } else { 29 | stop("Invalid configuration. Please chose 'fetch-image-url' or 'upload'") 30 | } 31 | }) 32 | 33 | # handle upload 34 | observe({ 35 | req(input$file_upload) 36 | upload_file <- input$file_upload 37 | image_path(upload_file$datapath[[1]]) 38 | }) 39 | 40 | # handle fetch-image-url 41 | observeEvent(input[["fetch-image-url"]], { 42 | req(input$file1) 43 | tryCatch( 44 | { 45 | # Fetch image from URL 46 | temp_fetch_image_url <- fs::file_temp(image_prefix, ext = ".jpg") 47 | downloader::download(input$file1, temp_fetch_image_url) 48 | 49 | image_path(temp_fetch_image_url) 50 | }, 51 | error = function(e) { 52 | # usually, you would not expose this to the user 53 | # without a little sanitization 54 | showNotification(as.character(safeError(e)), type = "warning") 55 | } 56 | ) 57 | }) 58 | 59 | output$contents <- renderTable({ 60 | req(image_path()) 61 | 62 | tryCatch( 63 | { 64 | # Call function from PyTorch Python script to classify image 65 | results <- classify_image_pytorch(image_path = image_path()) 66 | }, 67 | error = function(e) { 68 | # usually, you would not expose this to the user 69 | # without a little sanitization 70 | showNotification(as.character(safeError(e)), type = "warning") 71 | } 72 | ) 73 | return(results) 74 | }) 75 | 76 | # render the image 77 | output$image1 <- renderImage({ 78 | req(image_path()) 79 | 80 | # Copy the image to temp space 81 | new_path <- fs::file_copy(image_path(), fs::file_temp(image_prefix, ext = ".jpg")) 82 | 83 | # Return a list containing the filename 84 | if (is.null(new_path)) { 85 | return(NULL) 86 | } else { 87 | return(list(src = new_path, style = htmltools::css(width = "100%"))) 88 | } 89 | }, deleteFile = FALSE) 90 | 91 | # default images 92 | observeEvent(input$oil_platform, image_path("./img/oil_platform.jpg")) 93 | observeEvent(input$truck, image_path("./img/truck.jpg")) 94 | observeEvent(input$flower, image_path("./img/flower.jpg")) 95 | observeEvent(input$cat, image_path("./img/cat.jpg")) 96 | observeEvent(input$dog, image_path("./img/dog.jpg")) 97 | } 98 | 99 | ui <- fluidPage( 100 | titlePanel("Image Classifier"), 101 | sidebarLayout( 102 | sidebarPanel( 103 | uiOutput("image_selector"), 104 | helpText("Your image will be classified using PyTorch."), 105 | helpText("The resulting predictions will be shown along with their confidence level."), 106 | hr(), 107 | helpText("Or, choose an example image:"), 108 | actionButton("dog", "Dog"), 109 | actionButton("cat", "Cat"), 110 | actionButton("flower", "Flower"), 111 | tags$br(), 112 | tags$br(), 113 | actionButton("truck", "Truck"), 114 | actionButton("oil_platform", "Oil Platform"), 115 | hr(), 116 | helpText("View", a("source code on GitHub", href = "https://github.com/sol-eng/python-examples/tree/master/image-classifier")) 117 | ), 118 | mainPanel( 119 | # Output 120 | tableOutput("contents") %>% withSpinner(), 121 | imageOutput("image1", height = NULL) %>% withSpinner(color = "#ffffff") 122 | ) 123 | ) 124 | ) 125 | 126 | shinyApp(ui = ui, server = server) -------------------------------------------------------------------------------- /reticulated-image-classifier/config.yml: -------------------------------------------------------------------------------- 1 | default: 2 | image: "fetch-image-url" 3 | quickstart: 4 | image: "upload" 5 | -------------------------------------------------------------------------------- /reticulated-image-classifier/image-classifier.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | StripTrailingWhitespace: Yes 17 | -------------------------------------------------------------------------------- /reticulated-image-classifier/image-classifier.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import requests 4 | from PIL import Image 5 | from torchvision import models, transforms 6 | from torch.autograd import Variable 7 | 8 | # Set PyTorch model directory 9 | os.environ["TORCH_HOME"] = "./model" 10 | 11 | squeeze = models.squeezenet1_1(pretrained=True) 12 | squeeze.eval() 13 | 14 | normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) 15 | 16 | preprocess = transforms.Compose( 17 | [ 18 | transforms.Resize(256), 19 | transforms.CenterCrop(224), 20 | transforms.ToTensor(), 21 | normalize, 22 | ] 23 | ) 24 | 25 | with open("labels.json") as f: 26 | labels_data = json.load(f) 27 | 28 | labels = {int(key): value for (key, value) in labels_data.items()} 29 | 30 | 31 | def classify_image_pytorch(image_path): 32 | 33 | img_pil = Image.open(image_path) 34 | img_tensor = preprocess(img_pil) 35 | img_tensor.unsqueeze_(0) 36 | img_variable = Variable(img_tensor) 37 | fc_out = squeeze(img_variable) 38 | 39 | top_k = fc_out.data.numpy()[0].argsort()[-5:][::-1] 40 | results = [] 41 | for prediction in top_k: 42 | description = labels[prediction] 43 | score = fc_out.data.numpy()[0][prediction] 44 | results.append(("%s (score = %.5f)" % (description, score))) 45 | 46 | return results 47 | -------------------------------------------------------------------------------- /reticulated-image-classifier/img/cat.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-image-classifier/img/cat.jpg -------------------------------------------------------------------------------- /reticulated-image-classifier/img/dog.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-image-classifier/img/dog.jpg -------------------------------------------------------------------------------- /reticulated-image-classifier/img/flower.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-image-classifier/img/flower.jpg -------------------------------------------------------------------------------- /reticulated-image-classifier/img/oil_platform.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-image-classifier/img/oil_platform.jpg -------------------------------------------------------------------------------- /reticulated-image-classifier/img/truck.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-image-classifier/img/truck.jpg -------------------------------------------------------------------------------- /reticulated-image-classifier/model/hub/checkpoints/squeezenet1_1-b8a52dc0.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-image-classifier/model/hub/checkpoints/squeezenet1_1-b8a52dc0.pth -------------------------------------------------------------------------------- /reticulated-image-classifier/pytorch-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-image-classifier/pytorch-logo.png -------------------------------------------------------------------------------- /reticulated-image-classifier/renv/.gitignore: -------------------------------------------------------------------------------- 1 | cellar/ 2 | sandbox/ 3 | library/ 4 | local/ 5 | lock/ 6 | python/ 7 | staging/ 8 | -------------------------------------------------------------------------------- /reticulated-image-classifier/renv/settings.dcf: -------------------------------------------------------------------------------- 1 | bioconductor.version: 2 | external.libraries: 3 | ignored.packages: 4 | package.dependency.fields: Imports, Depends, LinkingTo 5 | r.version: 6 | snapshot.type: implicit 7 | use.cache: TRUE 8 | vcs.ignore.cellar: TRUE 9 | vcs.ignore.library: TRUE 10 | vcs.ignore.local: TRUE 11 | -------------------------------------------------------------------------------- /reticulated-image-classifier/requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2023.7.22 2 | charset-normalizer==3.1.0 3 | filelock==3.10.0 4 | idna==3.4 5 | Jinja2==3.1.2 6 | MarkupSafe==2.1.2 7 | mpmath==1.3.0 8 | networkx==3.0 9 | numpy==1.24.2 10 | Pillow==10.0.1 11 | requests==2.31.0 12 | sympy==1.11.1 13 | torch==2.0.0 14 | torchvision==0.15.1 15 | typing_extensions==4.5.0 16 | urllib3==1.26.18 17 | -------------------------------------------------------------------------------- /reticulated-image-classifier/reticulated-image-classifier.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-image-classifier/reticulated-image-classifier.png -------------------------------------------------------------------------------- /reticulated-rmarkdown-notebook/.gitignore: -------------------------------------------------------------------------------- 1 | *.csv 2 | *.nb.html 3 | -------------------------------------------------------------------------------- /reticulated-rmarkdown-notebook/README.md: -------------------------------------------------------------------------------- 1 | # Data Analysis and Visualization with pandas and ggplot2 2 | 3 | Read more about 4 | 5 | https://rstudio.github.io/reticulate/articles/r_markdown.html 6 | 7 | ## deploy 8 | 9 | ``` 10 | rsconnect::deployDoc() 11 | ``` 12 | 13 | ## Resources 14 | 15 | - [Posit Connect User Guide - RMarkdown](https://docs.posit.co/connect/user/rmarkdown/) 16 | - [Deploying Reticulated Content](https://solutions.rstudio.com/r/reticulate/#setting-up-a-reticulated-project) -------------------------------------------------------------------------------- /reticulated-rmarkdown-notebook/renv/.gitignore: -------------------------------------------------------------------------------- 1 | cellar/ 2 | sandbox/ 3 | library/ 4 | local/ 5 | lock/ 6 | python/ 7 | staging/ 8 | -------------------------------------------------------------------------------- /reticulated-rmarkdown-notebook/renv/settings.dcf: -------------------------------------------------------------------------------- 1 | bioconductor.version: 2 | external.libraries: 3 | ignored.packages: 4 | package.dependency.fields: Imports, Depends, LinkingTo 5 | r.version: 6 | snapshot.type: implicit 7 | use.cache: TRUE 8 | vcs.ignore.cellar: TRUE 9 | vcs.ignore.library: TRUE 10 | vcs.ignore.local: TRUE 11 | -------------------------------------------------------------------------------- /reticulated-rmarkdown-notebook/requirements.txt: -------------------------------------------------------------------------------- 1 | # requirements.txt generated by rsconnect-python on 2023-03-21 20:59:46.133264 2 | numpy==1.24.2 3 | pandas==1.5.3 4 | pip==23.3 5 | python-dateutil==2.8.2 6 | pytz==2022.7.1 7 | setuptools==67.6.0 8 | six==1.16.0 9 | wheel==0.40.0 10 | 11 | -------------------------------------------------------------------------------- /reticulated-rmarkdown-notebook/rmarkdown-notebook.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Reticulated R Markdown" 3 | subtitle: "Calling Python from an R Markdown document" 4 | output: html_notebook 5 | --- 6 | 7 | *** 8 | 9 | ### Setup 10 | 11 | ```{r setup} 12 | library(reticulate) 13 | readr::write_csv(nycflights13::flights, file = "flights.csv") 14 | 15 | print(reticulate::py_config()) 16 | ``` 17 | 18 | *** 19 | 20 | # From Python to R 21 | 22 | ### Use Python to manipulate data 23 | 24 | ```{python data} 25 | import pandas 26 | flights = pandas.read_csv("flights.csv") 27 | flights = flights[flights['dest'] == "ORD"] 28 | flights = flights[['carrier', 'dep_delay', 'arr_delay']] 29 | flights = flights.dropna() 30 | flights 31 | ``` 32 | 33 | ### Use R to visualize Pandas DataFrame 34 | 35 | ```{r, fig.width=7, fig.height=3} 36 | library(ggplot2) 37 | ggplot(py$flights, aes(carrier, arr_delay)) + geom_point() + geom_jitter() 38 | ``` 39 | 40 | *** 41 | 42 | # From R to Python 43 | 44 | ### Use R to read and manipulate data 45 | 46 | ```{r, message=FALSE} 47 | library(readr) 48 | library(dplyr) 49 | flights <- read_csv("flights.csv") %>% 50 | filter(dest == "ORD") %>% 51 | select(carrier, dep_delay, arr_delay) %>% 52 | na.omit() 53 | ``` 54 | 55 | ### Use Python to print R dataframe 56 | 57 | ```{python print} 58 | print(r.flights.head(10)) 59 | ``` 60 | 61 | *** 62 | 63 | See the [reticulate website](https://rstudio.github.io/reticulate/index.html) for details. 64 | -------------------------------------------------------------------------------- /reticulated-rmarkdown-notebook/rmarkdown-notebook.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | StripTrailingWhitespace: Yes 17 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/README.md: -------------------------------------------------------------------------------- 1 | # Sentiment Analysis with Plumber and spaCy 2 | 3 | ## Deploy 4 | 5 | ``` 6 | rsconnect::deployAPI() 7 | ``` 8 | ## Resources 9 | 10 | - [Posit Connect User Guide - Plumber](https://docs.posit.co/connect/user/plumber/) 11 | - [Deploying Reticulated Content](https://solutions.rstudio.com/r/reticulate/#setting-up-a-reticulated-project) -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/model/meta.json: -------------------------------------------------------------------------------- 1 | { 2 | "lang":"en", 3 | "name":"model", 4 | "version":"0.0.0", 5 | "spacy_version":">=2.3.7", 6 | "description":"", 7 | "author":"", 8 | "email":"", 9 | "url":"", 10 | "license":"", 11 | "spacy_git_version":"cae72e46d", 12 | "vectors":{ 13 | "width":0, 14 | "vectors":0, 15 | "keys":0, 16 | "name":"spacy_pretrained_vectors" 17 | }, 18 | "pipeline":[ 19 | "textcat" 20 | ], 21 | "factories":{ 22 | "textcat":"textcat" 23 | }, 24 | "labels":{ 25 | "textcat":[ 26 | "POSITIVE" 27 | ] 28 | } 29 | } -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/model/textcat/cfg: -------------------------------------------------------------------------------- 1 | { 2 | "labels":[ 3 | "POSITIVE" 4 | ], 5 | "pretrained_vectors":null, 6 | "pretrained_dims":null 7 | } -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/model/textcat/model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-api/model/textcat/model -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/model/tokenizer: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-api/model/tokenizer -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/model/vocab/key2row: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-api/model/vocab/key2row -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/model/vocab/lexemes.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-api/model/vocab/lexemes.bin -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/model/vocab/lookups.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-api/model/vocab/lookups.bin -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/model/vocab/vectors: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-api/model/vocab/vectors -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/plumber.R: -------------------------------------------------------------------------------- 1 | library(plumber) 2 | library(reticulate) 3 | 4 | #* @apiTitle Sentiment Analysis Text API 5 | #* @apiDescription A Plumber API that uses R and Python to evaluate sentiment in 6 | #* text input using a pretrained spaCy model 7 | 8 | source_python('predict.py') 9 | 10 | #* @param input Text to classify 11 | #* @get /predict 12 | function(input = "This is happy text!") { 13 | result = predict(input) 14 | } 15 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/predict.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import spacy 4 | 5 | model_dir = "model" 6 | 7 | def predict(input): 8 | nlp = spacy.load(model_dir) 9 | doc = nlp(input) 10 | result = (input, doc.cats) 11 | return result 12 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/renv.lock: -------------------------------------------------------------------------------- 1 | { 2 | "R": { 3 | "Version": "4.1.0", 4 | "Repositories": [ 5 | { 6 | "Name": "CRAN", 7 | "URL": "https://cran.rstudio.com" 8 | } 9 | ] 10 | }, 11 | "Packages": { 12 | "Matrix": { 13 | "Package": "Matrix", 14 | "Version": "1.3-4", 15 | "Source": "Repository", 16 | "Repository": "CRAN", 17 | "Hash": "4ed05e9c9726267e4a5872e09c04587c" 18 | }, 19 | "R6": { 20 | "Package": "R6", 21 | "Version": "2.5.1", 22 | "Source": "Repository", 23 | "Repository": "CRAN", 24 | "Hash": "470851b6d5d0ac559e9d01bb352b4021" 25 | }, 26 | "Rcpp": { 27 | "Package": "Rcpp", 28 | "Version": "1.0.7", 29 | "Source": "Repository", 30 | "Repository": "CRAN", 31 | "Hash": "dab19adae4440ae55aa8a9d238b246bb" 32 | }, 33 | "base64enc": { 34 | "Package": "base64enc", 35 | "Version": "0.1-3", 36 | "Source": "Repository", 37 | "Repository": "RSPM", 38 | "Hash": "543776ae6848fde2f48ff3816d0628bc" 39 | }, 40 | "crayon": { 41 | "Package": "crayon", 42 | "Version": "1.4.1", 43 | "Source": "Repository", 44 | "Repository": "RSPM", 45 | "Hash": "e75525c55c70e5f4f78c9960a4b402e9" 46 | }, 47 | "curl": { 48 | "Package": "curl", 49 | "Version": "4.3.2", 50 | "Source": "Repository", 51 | "Repository": "RSPM", 52 | "Hash": "022c42d49c28e95d69ca60446dbabf88" 53 | }, 54 | "digest": { 55 | "Package": "digest", 56 | "Version": "0.6.25", 57 | "Source": "Repository", 58 | "Repository": "RSPM", 59 | "Hash": "f697db7d92b7028c4b3436e9603fb636" 60 | }, 61 | "ellipsis": { 62 | "Package": "ellipsis", 63 | "Version": "0.3.2", 64 | "Source": "Repository", 65 | "Repository": "RSPM", 66 | "Hash": "bb0eec2fe32e88d9e2836c2f73ea2077" 67 | }, 68 | "evaluate": { 69 | "Package": "evaluate", 70 | "Version": "0.14", 71 | "Source": "Repository", 72 | "Repository": "RSPM", 73 | "Hash": "ec8ca05cffcc70569eaaad8469d2a3a7" 74 | }, 75 | "glue": { 76 | "Package": "glue", 77 | "Version": "1.4.2", 78 | "Source": "Repository", 79 | "Repository": "RSPM", 80 | "Hash": "6efd734b14c6471cfe443345f3e35e29" 81 | }, 82 | "here": { 83 | "Package": "here", 84 | "Version": "1.0.1", 85 | "Source": "Repository", 86 | "Repository": "CRAN", 87 | "Hash": "24b224366f9c2e7534d2344d10d59211" 88 | }, 89 | "highr": { 90 | "Package": "highr", 91 | "Version": "0.9", 92 | "Source": "Repository", 93 | "Repository": "RSPM", 94 | "Hash": "8eb36c8125038e648e5d111c0d7b2ed4" 95 | }, 96 | "htmltools": { 97 | "Package": "htmltools", 98 | "Version": "0.5.1.1", 99 | "Source": "Repository", 100 | "Repository": "RSPM", 101 | "Hash": "af2c2531e55df5cf230c4b5444fc973c" 102 | }, 103 | "httpuv": { 104 | "Package": "httpuv", 105 | "Version": "1.6.3", 106 | "Source": "Repository", 107 | "Repository": "CRAN", 108 | "Hash": "65e865802fe6dd1bafef1dae5b80a844" 109 | }, 110 | "jsonlite": { 111 | "Package": "jsonlite", 112 | "Version": "1.7.2", 113 | "Source": "Repository", 114 | "Repository": "RSPM", 115 | "Hash": "98138e0994d41508c7a6b84a0600cfcb" 116 | }, 117 | "knitr": { 118 | "Package": "knitr", 119 | "Version": "1.33", 120 | "Source": "Repository", 121 | "Repository": "RSPM", 122 | "Hash": "0bc1b5da1b0eb07cd4b727e95e9ff0b8" 123 | }, 124 | "later": { 125 | "Package": "later", 126 | "Version": "1.3.0", 127 | "Source": "Repository", 128 | "Repository": "CRAN", 129 | "Hash": "7e7b457d7766bc47f2a5f21cc2984f8e" 130 | }, 131 | "lattice": { 132 | "Package": "lattice", 133 | "Version": "0.20-45", 134 | "Source": "Repository", 135 | "Repository": "CRAN", 136 | "Hash": "b64cdbb2b340437c4ee047a1f4c4377b" 137 | }, 138 | "lifecycle": { 139 | "Package": "lifecycle", 140 | "Version": "1.0.1", 141 | "Source": "Repository", 142 | "Repository": "CRAN", 143 | "Hash": "a6b6d352e3ed897373ab19d8395c98d0" 144 | }, 145 | "magrittr": { 146 | "Package": "magrittr", 147 | "Version": "2.0.1", 148 | "Source": "Repository", 149 | "Repository": "RSPM", 150 | "Hash": "41287f1ac7d28a92f0a286ed507928d3" 151 | }, 152 | "markdown": { 153 | "Package": "markdown", 154 | "Version": "1.1", 155 | "Source": "Repository", 156 | "Repository": "RSPM", 157 | "Hash": "61e4a10781dd00d7d81dd06ca9b94e95" 158 | }, 159 | "mime": { 160 | "Package": "mime", 161 | "Version": "0.12", 162 | "Source": "Repository", 163 | "Repository": "CRAN", 164 | "Hash": "18e9c28c1d3ca1560ce30658b22ce104" 165 | }, 166 | "plumber": { 167 | "Package": "plumber", 168 | "Version": "1.1.0", 169 | "Source": "Repository", 170 | "Repository": "RSPM", 171 | "Hash": "a367cf9e97db99014056d75f3a05b86c" 172 | }, 173 | "png": { 174 | "Package": "png", 175 | "Version": "0.1-7", 176 | "Source": "Repository", 177 | "Repository": "CRAN", 178 | "Hash": "03b7076c234cb3331288919983326c55" 179 | }, 180 | "promises": { 181 | "Package": "promises", 182 | "Version": "1.2.0.1", 183 | "Source": "Repository", 184 | "Repository": "RSPM", 185 | "Hash": "4ab2c43adb4d4699cf3690acd378d75d" 186 | }, 187 | "renv": { 188 | "Package": "renv", 189 | "Version": "0.14.0", 190 | "Source": "Repository", 191 | "Repository": "CRAN", 192 | "Hash": "30e5eba91b67f7f4d75d31de14bbfbdc" 193 | }, 194 | "reticulate": { 195 | "Package": "reticulate", 196 | "Version": "1.22", 197 | "Source": "Repository", 198 | "Repository": "CRAN", 199 | "Hash": "b34a8bb69005168078d1d546a53912b2" 200 | }, 201 | "rlang": { 202 | "Package": "rlang", 203 | "Version": "0.4.11", 204 | "Source": "Repository", 205 | "Repository": "RSPM", 206 | "Hash": "515f341d3affe0de9e4a7f762efb0456" 207 | }, 208 | "rmarkdown": { 209 | "Package": "rmarkdown", 210 | "Version": "2.9.5", 211 | "Source": "GitHub", 212 | "RemoteType": "github", 213 | "RemoteHost": "api.github.com", 214 | "RemoteRepo": "rmarkdown", 215 | "RemoteUsername": "rstudio", 216 | "RemoteRef": "master", 217 | "RemoteSha": "0af6b3556adf6e393b2da23c66c695724ea7bd2d", 218 | "Hash": "9ed52ad6879613547a205bc5088bb4ed" 219 | }, 220 | "rprojroot": { 221 | "Package": "rprojroot", 222 | "Version": "2.0.2", 223 | "Source": "Repository", 224 | "Repository": "RSPM", 225 | "Hash": "249d8cd1e74a8f6a26194a91b47f21d1" 226 | }, 227 | "sodium": { 228 | "Package": "sodium", 229 | "Version": "1.1", 230 | "Source": "Repository", 231 | "Repository": "CRAN", 232 | "Hash": "9f07fda8802e9a2ed7e91f20f2d814ce" 233 | }, 234 | "stringi": { 235 | "Package": "stringi", 236 | "Version": "1.7.5", 237 | "Source": "Repository", 238 | "Repository": "CRAN", 239 | "Hash": "cd50dc9b449de3d3b47cdc9976886999" 240 | }, 241 | "stringr": { 242 | "Package": "stringr", 243 | "Version": "1.4.0", 244 | "Source": "Repository", 245 | "Repository": "RSPM", 246 | "Hash": "0759e6b6c0957edb1311028a49a35e76" 247 | }, 248 | "swagger": { 249 | "Package": "swagger", 250 | "Version": "3.33.1", 251 | "Source": "Repository", 252 | "Repository": "RSPM", 253 | "Hash": "f28d25ed70c903922254157c11b0081d" 254 | }, 255 | "tinytex": { 256 | "Package": "tinytex", 257 | "Version": "0.33", 258 | "Source": "Repository", 259 | "Repository": "CRAN", 260 | "Hash": "6e0ad90ac5669e35d5456cb61b295acb" 261 | }, 262 | "webutils": { 263 | "Package": "webutils", 264 | "Version": "1.1", 265 | "Source": "Repository", 266 | "Repository": "RSPM", 267 | "Hash": "75d8b5b05fe22659b54076563f83f26a" 268 | }, 269 | "withr": { 270 | "Package": "withr", 271 | "Version": "2.4.2", 272 | "Source": "GitHub", 273 | "RemoteType": "github", 274 | "RemoteHost": "api.github.com", 275 | "RemoteUsername": "r-lib", 276 | "RemoteRepo": "withr", 277 | "RemoteRef": "master", 278 | "RemoteSha": "62555321e6e37521ba2452fa68f6eeb20f649558", 279 | "Hash": "26ce4259bfeba1a3726d3ceee611f0d2" 280 | }, 281 | "xfun": { 282 | "Package": "xfun", 283 | "Version": "0.25", 284 | "Source": "Repository", 285 | "Repository": "CRAN", 286 | "Hash": "853d45ffff0a9af1e0af017cd359f75e" 287 | }, 288 | "yaml": { 289 | "Package": "yaml", 290 | "Version": "2.2.1", 291 | "Source": "Repository", 292 | "Repository": "RSPM", 293 | "Hash": "2826c5d9efb0a88f657c7a679c7106db" 294 | } 295 | } 296 | } 297 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/renv/.gitignore: -------------------------------------------------------------------------------- 1 | library/ 2 | local/ 3 | lock/ 4 | python/ 5 | staging/ 6 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/renv/settings.dcf: -------------------------------------------------------------------------------- 1 | external.libraries: 2 | ignored.packages: 3 | package.dependency.fields: Imports, Depends, LinkingTo 4 | r.version: 5 | snapshot.type: implicit 6 | use.cache: TRUE 7 | vcs.ignore.library: TRUE 8 | vcs.ignore.local: TRUE 9 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/requirements.txt: -------------------------------------------------------------------------------- 1 | --index-url https://packagemanager.rstudio.com/pypi/latest/simple 2 | spacy==2.2.3 3 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/sentiment-analysis.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | StripTrailingWhitespace: Yes 17 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/spacy_logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-api/spacy_logo.jpg -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-api/train.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Sentiment Analysis with Python in RStudio" 3 | output: html_document 4 | --- 5 | 6 | ### Import modules 7 | 8 | ```{python} 9 | from __future__ import unicode_literals, print_function 10 | import plac 11 | import random 12 | from pathlib import Path 13 | import thinc.extra.datasets 14 | 15 | import spacy 16 | from spacy.util import minibatch, compounding 17 | ``` 18 | 19 | ### Define helper functions 20 | 21 | ```{python} 22 | def load_data(limit=0, split=0.8): 23 | """Load data from the IMDB dataset.""" 24 | # Partition off part of the train data for evaluation 25 | train_data, _ = thinc.extra.datasets.imdb() 26 | random.shuffle(train_data) 27 | train_data = train_data[-limit:] 28 | texts, labels = zip(*train_data) 29 | cats = [{'POSITIVE': bool(y)} for y in labels] 30 | split = int(len(train_data) * split) 31 | return (texts[:split], cats[:split]), (texts[split:], cats[split:]) 32 | ``` 33 | 34 | ```{python} 35 | def evaluate(tokenizer, textcat, texts, cats): 36 | docs = (tokenizer(text) for text in texts) 37 | tp = 0.0 # True positives 38 | fp = 1e-8 # False positives 39 | fn = 1e-8 # False negatives 40 | tn = 0.0 # True negatives 41 | for i, doc in enumerate(textcat.pipe(docs)): 42 | gold = cats[i] 43 | for label, score in doc.cats.items(): 44 | if label not in gold: 45 | continue 46 | if score >= 0.5 and gold[label] >= 0.5: 47 | tp += 1. 48 | elif score >= 0.5 and gold[label] < 0.5: 49 | fp += 1. 50 | elif score < 0.5 and gold[label] < 0.5: 51 | tn += 1 52 | elif score < 0.5 and gold[label] >= 0.5: 53 | fn += 1 54 | precision = tp / (tp + fp) 55 | recall = tp / (tp + fn) 56 | try: 57 | f_score = 2 * (precision * recall) / (precision + recall) 58 | except ZeroDivisionError: 59 | print("Warning! Zero Division Error, setting f_score to 1") 60 | f_score = 1 61 | return {'textcat_p': precision, 'textcat_r': recall, 'textcat_f': f_score} 62 | ``` 63 | 64 | ### Load blank model 65 | 66 | ```{python} 67 | nlp = spacy.blank('en') # create blank Language class 68 | print("Created blank 'en' model") 69 | ``` 70 | 71 | ### Setup text classifier 72 | 73 | ```{python} 74 | # add the text classifier to the pipeline if it doesn't exist 75 | # nlp.create_pipe works for built-ins that are registered with spaCy 76 | if 'textcat' not in nlp.pipe_names: 77 | textcat = nlp.create_pipe('textcat') 78 | nlp.add_pipe(textcat, last=True) 79 | # otherwise, get it, so we can add labels to it 80 | else: 81 | textcat = nlp.get_pipe('textcat') 82 | 83 | # add label to text classifier 84 | textcat.add_label('POSITIVE') 85 | ``` 86 | 87 | ### Load text data 88 | 89 | ```{python} 90 | n_texts=10000 91 | # load the IMDB dataset 92 | print("Loading IMDB data...") 93 | (train_texts, train_cats), (dev_texts, dev_cats) = load_data(limit=n_texts) 94 | print("Using {} examples ({} training, {} evaluation)" 95 | .format(n_texts, len(train_texts), len(dev_texts))) 96 | train_data = list(zip(train_texts, 97 | [{'cats': cats} for cats in train_cats])) 98 | ``` 99 | 100 | ### Train the model 101 | 102 | ```{python} 103 | n_iter=20 104 | # get names of other pipes to disable them during training 105 | other_pipes = [pipe for pipe in nlp.pipe_names if pipe != 'textcat'] 106 | with nlp.disable_pipes(*other_pipes): # only train textcat 107 | optimizer = nlp.begin_training() 108 | print("Training the model...") 109 | print('{:^5}\t{:^5}\t{:^5}\t{:^5}'.format('LOSS', 'P', 'R', 'F')) 110 | for i in range(n_iter): 111 | losses = {} 112 | # batch up the examples using spaCy's minibatch 113 | batches = minibatch(train_data, size=compounding(4., 32., 1.001)) 114 | for batch in batches: 115 | texts, annotations = zip(*batch) 116 | nlp.update(texts, annotations, sgd=optimizer, drop=0.2, 117 | losses=losses) 118 | with textcat.model.use_params(optimizer.averages): 119 | # evaluate on the dev data split off in load_data() 120 | scores = evaluate(nlp.tokenizer, textcat, dev_texts, dev_cats) 121 | print('{0:.3f}\t{1:.3f}\t{2:.3f}\t{3:.3f}' # print a simple table 122 | .format(losses['textcat'], scores['textcat_p'], 123 | scores['textcat_r'], scores['textcat_f'])) 124 | ``` 125 | 126 | ### Test the trained model 127 | 128 | ```{python} 129 | test_text_neg = "This movie was an terrible, awful rehash of past movies. I will never watch it again." 130 | doc = nlp(test_text_neg) 131 | print(test_text_neg, "\n", doc.cats) 132 | 133 | test_text_pos = "This great movie was a wonderful remake of the original version. I loved it!" 134 | doc = nlp(test_text_pos) 135 | print(test_text_pos, "\n", doc.cats) 136 | ``` 137 | 138 | ### Save model to disk 139 | 140 | ```{python} 141 | output_dir = "model" 142 | 143 | if output_dir is not None: 144 | output_dir = Path(output_dir) 145 | if not output_dir.exists(): 146 | output_dir.mkdir() 147 | with nlp.use_params(optimizer.averages): 148 | nlp.to_disk(output_dir) 149 | print("Saved model to directory:", output_dir) 150 | ``` 151 | 152 | ### Test the saved model 153 | 154 | ```{python} 155 | print("Loading from", output_dir) 156 | nlp2 = spacy.load(output_dir) 157 | doc2 = nlp2(test_text_neg) 158 | print(test_text_neg, "\n", doc2.cats) 159 | doc3 = nlp2(test_text_pos) 160 | print(test_text_pos, "\n", doc3.cats) 161 | ``` 162 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/README.md: -------------------------------------------------------------------------------- 1 | # Sentiment Analysis with spaCy and Shiny 2 | 3 | ## Setup 4 | 5 | ```bash 6 | python train.py 7 | ``` 8 | 9 | ## Deploy 10 | 11 | ``` 12 | rsconnect::deployApp() 13 | ``` 14 | 15 | ## Resources 16 | 17 | - [Posit Connect User Guide - Shiny](https://docs.posit.co/connect/user/shiny/) 18 | - [Deploying Reticulated Content](https://solutions.rstudio.com/r/reticulate/#setting-up-a-reticulated-project) -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/app.R: -------------------------------------------------------------------------------- 1 | library(shiny) 2 | library(reticulate) 3 | 4 | source_python('predict.py') 5 | source_python('namesgenerator.py') 6 | 7 | to_title <- function(x) { 8 | s <- strsplit(x, " ")[[1]] 9 | paste(toupper(substring(s, 1,1)), substring(s, 2), 10 | sep="", collapse=" ") 11 | } 12 | 13 | ui <- fluidPage( 14 | tags$head( 15 | htmltools::tags$style( 16 | list( 17 | paste0(".fa-times-circle {", 18 | htmltools::css("color" = "red"), 19 | "}"), 20 | paste0(".fa-star {", 21 | htmltools::css("color" = "gold"), 22 | "}"), 23 | paste0(".shiny-flow-layout>div {", 24 | htmltools::css("width" = "40%"), 25 | "}") 26 | ) 27 | ) 28 | ), 29 | titlePanel("Sentiment Analysis"), 30 | shiny::inputPanel( 31 | tagList( 32 | textInput( 33 | "raw_title", 34 | "Movie Title", 35 | placeholder = "Title of the movie" 36 | ), 37 | textAreaInput( 38 | "raw_review", 39 | "Movie Review", 40 | placeholder = "Type your review here..." 41 | ), 42 | actionButton("submit", "Submit Review"), 43 | br(), 44 | br() 45 | ), 46 | tagList( 47 | actionButton("sample_1", "Sample Review 1"), 48 | actionButton("sample_2", "Sample Review 2"), 49 | actionButton("sample_3", "Sample Review 3"), 50 | actionButton("sample_4", "Sample Review 4") 51 | ) 52 | ), 53 | shiny::fluidRow( 54 | uiOutput("review") 55 | ) 56 | ) 57 | 58 | server <- function(input, output, session) { 59 | raw_html <- reactiveVal() 60 | observeEvent(input$submit, { 61 | req(input$raw_title, input$raw_review) 62 | 63 | raw_html( 64 | build_pretty_output(input$raw_title, input$raw_review) 65 | ) 66 | }) 67 | 68 | observeEvent(input$sample_1, { 69 | raw_html( 70 | build_pretty_output( 71 | "The Incredibles", 72 | paste( 73 | "This movie was well named - incredible!", 74 | "So much excitement and fun, along with quality", 75 | "heroes and hearty laughter. I recommend this movie highly!" 76 | ) 77 | ) 78 | ) 79 | }) 80 | 81 | observeEvent(input$sample_2, { 82 | raw_html( 83 | build_pretty_output( 84 | "The Not So Incredibles", 85 | paste( 86 | "What were the writers thinking?", 87 | "It's almost like they forgot what their job", 88 | "was, or how to make a movie entertaining. This", 89 | "movie was bores-ville from start to finish, simply", 90 | "awful. Don't waste your time going to see this one!" 91 | ) 92 | ) 93 | ) 94 | }) 95 | 96 | observeEvent(input$sample_3, { 97 | raw_html( 98 | build_pretty_output( 99 | "Questionable Questioners", 100 | paste( 101 | "The Questionable Questioners will make you question...", 102 | "Why are we here? What are we doing? Why am I writing this review?", 103 | "I don't know where I stand. Maybe it was not so great,", 104 | "or maybe it was pretty good after all.", 105 | "I think you will just have to see for yourself." 106 | ) 107 | ) 108 | ) 109 | }) 110 | 111 | observeEvent(input$sample_4, { 112 | raw_html( 113 | build_pretty_output( 114 | "Briefing Briefers", 115 | paste( 116 | "What did I just watch? It was ok." 117 | ) 118 | ) 119 | ) 120 | }) 121 | 122 | output$review <- renderUI(raw_html()) 123 | } 124 | 125 | build_pretty_output <- function(title, review) { 126 | reviewer_name <- to_title(py$get_random_name(sep = " ")) 127 | 128 | predict_output <- py$predict(input = review) 129 | 130 | score <- predict_output[[2]][["POSITIVE"]] 131 | num_stars <- (score + 0.1) %/% 0.2 132 | 133 | score_pretty <- format(round(score * 100, 2), nsmall = 2) 134 | adj <- switch( 135 | num_stars + 1, 136 | "A scathing review", 137 | "A harsh review", 138 | "A disappointed review", 139 | "A less-than-stellar review", 140 | "A excited review", 141 | "A glowing review") 142 | 143 | if (num_stars) { 144 | ui_icon <- shiny::icon( 145 | name = "star", 146 | lib = "font-awesome", 147 | class = "fa-4x" 148 | ) 149 | 150 | ui_output <- tagList( 151 | h2(to_title(title)), 152 | h4(paste( 153 | adj, 154 | "from", 155 | reviewer_name 156 | )), 157 | do.call( 158 | div, 159 | lapply( 160 | seq_len(num_stars), 161 | function(x, ui_icon){return(ui_icon)}, 162 | ui_icon=ui_icon) 163 | ), 164 | br(), 165 | p(paste("Score:", score_pretty, "/", "100")), 166 | shiny::wellPanel(tags$blockquote(review)) 167 | ) 168 | } else { 169 | ui_icon <- shiny::icon( 170 | name = "times-circle", 171 | lib = "font-awesome", 172 | class = "fa-4x" 173 | ) 174 | ui_output <- tagList( 175 | h2(to_title(title)), 176 | h4(paste( 177 | adj, 178 | "from", 179 | reviewer_name 180 | )), 181 | div( 182 | ui_icon 183 | ), 184 | br(), 185 | p(paste("Score:", score_pretty, "/", "100")), 186 | shiny::wellPanel(tags$blockquote(review)) 187 | ) 188 | } 189 | 190 | return( 191 | div( 192 | ui_output, 193 | style = htmltools::css("text-align" = "center") 194 | ) 195 | ) 196 | } 197 | 198 | 199 | shinyApp(ui = ui, server = server) 200 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/model/meta.json: -------------------------------------------------------------------------------- 1 | { 2 | "lang":"en", 3 | "name":"model", 4 | "version":"0.0.0", 5 | "spacy_version":">=2.3.7", 6 | "description":"", 7 | "author":"", 8 | "email":"", 9 | "url":"", 10 | "license":"", 11 | "spacy_git_version":"cae72e46d", 12 | "vectors":{ 13 | "width":0, 14 | "vectors":0, 15 | "keys":0, 16 | "name":"spacy_pretrained_vectors" 17 | }, 18 | "pipeline":[ 19 | "textcat" 20 | ], 21 | "factories":{ 22 | "textcat":"textcat" 23 | }, 24 | "labels":{ 25 | "textcat":[ 26 | "POSITIVE" 27 | ] 28 | } 29 | } -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/model/textcat/cfg: -------------------------------------------------------------------------------- 1 | { 2 | "labels":[ 3 | "POSITIVE" 4 | ], 5 | "pretrained_vectors":null, 6 | "pretrained_dims":null 7 | } -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/model/textcat/model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-app/model/textcat/model -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/model/tokenizer: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-app/model/tokenizer -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/model/vocab/key2row: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-app/model/vocab/key2row -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/model/vocab/lexemes.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-app/model/vocab/lexemes.bin -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/model/vocab/lookups.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-app/model/vocab/lookups.bin -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/model/vocab/vectors: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated-sentiment-analysis-app/model/vocab/vectors -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/predict.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import spacy 4 | 5 | model_dir = "model" 6 | 7 | def predict(input): 8 | nlp = spacy.load(model_dir) 9 | doc = nlp(input) 10 | result = (input, doc.cats) 11 | return result 12 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/renv/.gitignore: -------------------------------------------------------------------------------- 1 | library/ 2 | local/ 3 | lock/ 4 | python/ 5 | staging/ 6 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/renv/settings.dcf: -------------------------------------------------------------------------------- 1 | external.libraries: 2 | ignored.packages: 3 | package.dependency.fields: Imports, Depends, LinkingTo 4 | r.version: 5 | snapshot.type: implicit 6 | use.cache: TRUE 7 | vcs.ignore.library: TRUE 8 | vcs.ignore.local: TRUE 9 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/requirements.txt: -------------------------------------------------------------------------------- 1 | --index-url https://packagemanager.rstudio.com/pypi/latest/simple 2 | blis==0.7.4 3 | catalogue==1.0.0 4 | certifi==2023.7.22 5 | charset-normalizer==2.0.6 6 | cymem==2.0.5 7 | idna==3.2 8 | murmurhash==1.0.5 9 | numpy==1.22.0 10 | plac==1.1.3 11 | preshed==3.0.5 12 | requests==2.31.0 13 | spacy==2.3.7 14 | srsly==1.0.5 15 | thinc==7.4.5 16 | tqdm==4.62.3 17 | urllib3==2.0.7 18 | wasabi==0.8.2 19 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/sentiment-analysis-app.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | StripTrailingWhitespace: Yes 17 | -------------------------------------------------------------------------------- /reticulated-sentiment-analysis-app/train.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Sentiment Analysis with Python in RStudio" 3 | output: html_document 4 | --- 5 | 6 | ### Import modules 7 | 8 | ```{python} 9 | from __future__ import unicode_literals, print_function 10 | import plac 11 | import random 12 | from pathlib import Path 13 | import thinc.extra.datasets 14 | 15 | import spacy 16 | from spacy.util import minibatch, compounding 17 | ``` 18 | 19 | ### Define helper functions 20 | 21 | ```{python} 22 | def load_data(limit=0, split=0.8): 23 | """Load data from the IMDB dataset.""" 24 | # Partition off part of the train data for evaluation 25 | train_data, _ = thinc.extra.datasets.imdb() 26 | random.shuffle(train_data) 27 | train_data = train_data[-limit:] 28 | texts, labels = zip(*train_data) 29 | cats = [{'POSITIVE': bool(y)} for y in labels] 30 | split = int(len(train_data) * split) 31 | return (texts[:split], cats[:split]), (texts[split:], cats[split:]) 32 | ``` 33 | 34 | ```{python} 35 | def evaluate(tokenizer, textcat, texts, cats): 36 | docs = (tokenizer(text) for text in texts) 37 | tp = 0.0 # True positives 38 | fp = 1e-8 # False positives 39 | fn = 1e-8 # False negatives 40 | tn = 0.0 # True negatives 41 | for i, doc in enumerate(textcat.pipe(docs)): 42 | gold = cats[i] 43 | for label, score in doc.cats.items(): 44 | if label not in gold: 45 | continue 46 | if score >= 0.5 and gold[label] >= 0.5: 47 | tp += 1. 48 | elif score >= 0.5 and gold[label] < 0.5: 49 | fp += 1. 50 | elif score < 0.5 and gold[label] < 0.5: 51 | tn += 1 52 | elif score < 0.5 and gold[label] >= 0.5: 53 | fn += 1 54 | precision = tp / (tp + fp) 55 | recall = tp / (tp + fn) 56 | try: 57 | f_score = 2 * (precision * recall) / (precision + recall) 58 | except ZeroDivisionError: 59 | print("Warning! Zero Division Error, setting f_score to 1") 60 | f_score = 1 61 | return {'textcat_p': precision, 'textcat_r': recall, 'textcat_f': f_score} 62 | ``` 63 | 64 | ### Load blank model 65 | 66 | ```{python} 67 | nlp = spacy.blank('en') # create blank Language class 68 | print("Created blank 'en' model") 69 | ``` 70 | 71 | ### Setup text classifier 72 | 73 | ```{python} 74 | # add the text classifier to the pipeline if it doesn't exist 75 | # nlp.create_pipe works for built-ins that are registered with spaCy 76 | if 'textcat' not in nlp.pipe_names: 77 | textcat = nlp.create_pipe('textcat') 78 | nlp.add_pipe(textcat, last=True) 79 | # otherwise, get it, so we can add labels to it 80 | else: 81 | textcat = nlp.get_pipe('textcat') 82 | 83 | # add label to text classifier 84 | textcat.add_label('POSITIVE') 85 | ``` 86 | 87 | ### Load text data 88 | 89 | ```{python} 90 | n_texts=10000 91 | # load the IMDB dataset 92 | print("Loading IMDB data...") 93 | (train_texts, train_cats), (dev_texts, dev_cats) = load_data(limit=n_texts) 94 | print("Using {} examples ({} training, {} evaluation)" 95 | .format(n_texts, len(train_texts), len(dev_texts))) 96 | train_data = list(zip(train_texts, 97 | [{'cats': cats} for cats in train_cats])) 98 | ``` 99 | 100 | ### Train the model 101 | 102 | ```{python} 103 | n_iter=20 104 | # get names of other pipes to disable them during training 105 | other_pipes = [pipe for pipe in nlp.pipe_names if pipe != 'textcat'] 106 | with nlp.disable_pipes(*other_pipes): # only train textcat 107 | optimizer = nlp.begin_training() 108 | print("Training the model...") 109 | print('{:^5}\t{:^5}\t{:^5}\t{:^5}'.format('LOSS', 'P', 'R', 'F')) 110 | for i in range(n_iter): 111 | losses = {} 112 | # batch up the examples using spaCy's minibatch 113 | batches = minibatch(train_data, size=compounding(4., 32., 1.001)) 114 | for batch in batches: 115 | texts, annotations = zip(*batch) 116 | nlp.update(texts, annotations, sgd=optimizer, drop=0.2, 117 | losses=losses) 118 | with textcat.model.use_params(optimizer.averages): 119 | # evaluate on the dev data split off in load_data() 120 | scores = evaluate(nlp.tokenizer, textcat, dev_texts, dev_cats) 121 | print('{0:.3f}\t{1:.3f}\t{2:.3f}\t{3:.3f}' # print a simple table 122 | .format(losses['textcat'], scores['textcat_p'], 123 | scores['textcat_r'], scores['textcat_f'])) 124 | ``` 125 | 126 | ### Test the trained model 127 | 128 | ```{python} 129 | test_text_neg = "This movie was an terrible, awful rehash of past movies. I will never watch it again." 130 | doc = nlp(test_text_neg) 131 | print(test_text_neg, "\n", doc.cats) 132 | 133 | test_text_pos = "This great movie was a wonderful remake of the original version. I loved it!" 134 | doc = nlp(test_text_pos) 135 | print(test_text_pos, "\n", doc.cats) 136 | ``` 137 | 138 | ### Save model to disk 139 | 140 | ```{python} 141 | output_dir = "model" 142 | 143 | if output_dir is not None: 144 | output_dir = Path(output_dir) 145 | if not output_dir.exists(): 146 | output_dir.mkdir() 147 | with nlp.use_params(optimizer.averages): 148 | nlp.to_disk(output_dir) 149 | print("Saved model to directory:", output_dir) 150 | ``` 151 | 152 | ### Test the saved model 153 | 154 | ```{python} 155 | print("Loading from", output_dir) 156 | nlp2 = spacy.load(output_dir) 157 | doc2 = nlp2(test_text_neg) 158 | print(test_text_neg, "\n", doc2.cats) 159 | doc3 = nlp2(test_text_pos) 160 | print(test_text_pos, "\n", doc3.cats) 161 | ``` 162 | -------------------------------------------------------------------------------- /reticulated_python.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/reticulated_python.png -------------------------------------------------------------------------------- /shiny-income-share/.internal.yml: -------------------------------------------------------------------------------- 1 | default: 2 | content: 3 | - name: "Interactive Data Exploration with Shiny for Python" 4 | path: "." 5 | description: "Easy web apps for data science without the compromises." 6 | tag: 7 | - "Examples|Python|Shiny" 8 | url: "/python-examples/shiny/" 9 | image: "shiny-income-share.png" 10 | -------------------------------------------------------------------------------- /shiny-income-share/README.md: -------------------------------------------------------------------------------- 1 | # Data Visualization with Shiny for Python 2 | 3 | ## Deploy 4 | 5 | ``` 6 | rsconnect deploy shiny . -n 7 | ``` 8 | 9 | ## Resources 10 | 11 | [Posit Connect User Guide - Shiny for Python](https://docs.posit.co/connect/user/shiny-python/) 12 | -------------------------------------------------------------------------------- /shiny-income-share/app.py: -------------------------------------------------------------------------------- 1 | import altair as alt 2 | import pandas as pd 3 | from shinywidgets import output_widget, render_widget 4 | from shiny import App, reactive, ui 5 | 6 | income_shares = pd.read_csv("data.csv") 7 | countries = income_shares["Entity"].unique().tolist() 8 | 9 | select_countries = { 10 | "default": ["Australia", "China", "Germany", "Japan", "United States"], 11 | "latam": ["Argentina", "Uruguay"], 12 | "apac": ["Australia", "China", "Singapore", "Japan", "Korea, South"], 13 | "emea": ["Mauritius", "France", "Italy", "Norway", "Spain"], 14 | "na": ["United States", "Canada"], 15 | } 16 | 17 | app_ui = ui.page_fluid( 18 | ui.panel_title("Top 5% Income Share"), 19 | ui.p("Share of income received by the richest 5% of the population"), 20 | ui.layout_sidebar( 21 | ui.panel_sidebar( 22 | ui.input_selectize( 23 | "countries", 24 | "Countries:", 25 | choices=countries, 26 | multiple=True, 27 | selected=select_countries["default"], 28 | ), 29 | ui.p("Regions:"), 30 | ui.TagList( 31 | ui.div( 32 | {"class": "btn-group"}, 33 | ui.input_action_button("apac", "APAC"), 34 | ui.input_action_button("emea", "EMEA"), 35 | ui.input_action_button("latam", "LATAM"), 36 | ui.input_action_button("na", "NA"), 37 | ) 38 | ), 39 | ui.input_slider( 40 | "year_range", 41 | "Year Range:", 42 | min=1946, 43 | max=2015, 44 | value=(1946, 2015), 45 | sep="", 46 | ), 47 | ), 48 | ui.panel_main( 49 | output_widget("income_plot", width="800px"), 50 | ), 51 | ), 52 | ) 53 | 54 | 55 | def server(input, output, session): 56 | @reactive.Calc 57 | def plot_data(): 58 | df = income_shares.loc[ 59 | (income_shares["Entity"].isin(input.countries())) 60 | & (income_shares["Year"] >= input.year_range()[0]) 61 | & (income_shares["Year"] <= input.year_range()[1]) 62 | ] 63 | return df 64 | 65 | @output 66 | @render_widget 67 | def income_plot(): 68 | chart = ( 69 | alt.Chart(plot_data()) 70 | .mark_line() 71 | .encode( 72 | x=alt.X("Year", axis=alt.Axis(format="d")), 73 | y=alt.Y("Percent", axis=alt.Axis(format="~s")), 74 | color="Entity", 75 | strokeDash="Entity", 76 | ) 77 | ) 78 | return chart 79 | 80 | def make_button_listener(name): 81 | @reactive.Effect 82 | @reactive.event(input[name]) 83 | def _(): 84 | ui.update_selectize("countries", selected=select_countries[name]) 85 | 86 | for name in select_countries.keys(): 87 | make_button_listener(name) 88 | 89 | 90 | app = App(app_ui, server) 91 | if __name__ == "__main__": 92 | app.run() 93 | -------------------------------------------------------------------------------- /shiny-income-share/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-shiny", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.10.0", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "79e99f906f2f27d63b7412606918bd8a" 19 | }, 20 | "app.py": { 21 | "checksum": "a7726fc4fe5374b54158a049180653a1" 22 | }, 23 | "data.csv": { 24 | "checksum": "8da5e3f4e5d46c024310cd04354f81bd" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /shiny-income-share/requirements.txt: -------------------------------------------------------------------------------- 1 | --index-url https://p3m.dev/pypi/2023-12-05/simple 2 | anywidget 3 | shiny 4 | shinywidgets 5 | vega 6 | altair 7 | pandas 8 | six 9 | -------------------------------------------------------------------------------- /shiny-income-share/shiny-income-share.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/shiny-income-share/shiny-income-share.png -------------------------------------------------------------------------------- /streamlit-income-share/.internal.yml: -------------------------------------------------------------------------------- 1 | default: 2 | content: 3 | - name: "Interactive Data Exploration with Streamlit" 4 | path: "." 5 | description: "A Streamlit application makes it easy to transform your analysis into an interactive dashboard using Python." 6 | tag: 7 | - "Examples|Python|Streamlit" 8 | url: "/python-examples/streamlit/" 9 | image: "streamlit-income-share.png" 10 | -------------------------------------------------------------------------------- /streamlit-income-share/README.md: -------------------------------------------------------------------------------- 1 | # Data Visualization with Streamlit 2 | 3 | ## Deploy 4 | 5 | ``` 6 | rsconnect deploy streamlit . -n 7 | ``` 8 | 9 | ## Resources 10 | 11 | [Posit Connect User Guide - Streamlit](https://docs.posit.co/connect/user/streamlit/) 12 | -------------------------------------------------------------------------------- /streamlit-income-share/app.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | 3 | import altair as alt 4 | import pandas as pd 5 | import streamlit as st 6 | 7 | HERE = os.path.dirname(os.path.abspath(__file__)) 8 | 9 | st.title("Top 5%" " income share") 10 | st.markdown("Share of income received by the richest 5%" " of the population.") 11 | DATA = os.path.join(HERE, "data.csv") 12 | 13 | 14 | @st.cache_data 15 | def load_data(nrows): 16 | return pd.read_csv("./data.csv", nrows=nrows) 17 | 18 | 19 | data_load_state = st.text("Loading data...") 20 | data = load_data(10000) 21 | data_load_state.text("") 22 | 23 | countries = st.multiselect( 24 | "Countries", 25 | list(sorted({d for d in data["Entity"]})), 26 | default=["Australia", "China", "Germany", "Japan", "United States"], 27 | ) 28 | earliest_year = data["Year"].min() 29 | latest_year = data["Year"].max() 30 | min_year, max_year = st.slider( 31 | "Year Range", 32 | min_value=int(earliest_year), 33 | max_value=int(latest_year), 34 | value=[int(earliest_year), int(latest_year)], 35 | ) 36 | filtered_data = data[data["Entity"].isin(countries)] 37 | filtered_data = filtered_data[filtered_data["Year"] >= min_year] 38 | filtered_data = filtered_data[filtered_data["Year"] <= max_year] 39 | 40 | chart = ( 41 | alt.Chart(filtered_data) 42 | .mark_line() 43 | .encode( 44 | x=alt.X("Year", axis=alt.Axis(format="d")), 45 | y=alt.Y("Percent", axis=alt.Axis(format="~s")), 46 | color="Entity", 47 | strokeDash="Entity", 48 | ) 49 | ) 50 | st.altair_chart(chart, use_container_width=True) 51 | 52 | if st.checkbox("Show raw data"): 53 | st.subheader("Raw data") 54 | st.write(filtered_data) 55 | 56 | st.markdown("Source: ") 57 | -------------------------------------------------------------------------------- /streamlit-income-share/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "locale": "en_US.UTF-8", 4 | "metadata": { 5 | "appmode": "python-streamlit", 6 | "entrypoint": "app" 7 | }, 8 | "python": { 9 | "version": "3.10.0", 10 | "package_manager": { 11 | "name": "pip", 12 | "version": "23.0.1", 13 | "package_file": "requirements.txt" 14 | } 15 | }, 16 | "files": { 17 | "requirements.txt": { 18 | "checksum": "dc5fd71da5f6d9a5ac6121e4f25a8f25" 19 | }, 20 | "app.py": { 21 | "checksum": "2b45ae76a59492827ba0370737a2a3d5" 22 | }, 23 | "data.csv": { 24 | "checksum": "8da5e3f4e5d46c024310cd04354f81bd" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /streamlit-income-share/requirements.txt: -------------------------------------------------------------------------------- 1 | altair==4.2.2 2 | attrs==22.2.0 3 | blinker==1.5 4 | cachetools==5.3.0 5 | certifi==2023.7.22 6 | charset-normalizer==3.1.0 7 | click==8.1.3 8 | decorator==5.1.1 9 | entrypoints==0.4 10 | gitdb==4.0.10 11 | GitPython==3.1.37 12 | idna==3.4 13 | importlib-metadata==6.1.0 14 | Jinja2==3.1.2 15 | jsonschema==4.17.3 16 | markdown-it-py==2.2.0 17 | MarkupSafe==2.1.2 18 | mdurl==0.1.2 19 | numpy==1.24.2 20 | packaging==23.0 21 | pandas==1.5.3 22 | Pillow==10.0.1 23 | protobuf==3.20.3 24 | pyarrow==14.0.1 25 | pydeck==0.8.0 26 | Pygments==2.15.0 27 | Pympler==1.0.1 28 | pyrsistent==0.19.3 29 | python-dateutil==2.8.2 30 | pytz==2022.7.1 31 | pytz-deprecation-shim==0.1.0.post0 32 | requests==2.31.0 33 | rich==13.3.2 34 | semver==2.13.0 35 | six==1.16.0 36 | smmap==5.0.0 37 | streamlit==1.20.0 38 | toml==0.10.2 39 | toolz==0.12.0 40 | tornado==6.3.3 41 | typing_extensions==4.5.0 42 | tzdata==2022.7 43 | tzlocal==4.3 44 | urllib3==1.26.18 45 | validators==0.20.0 46 | zipp==3.15.0 47 | -------------------------------------------------------------------------------- /streamlit-income-share/streamlit-income-share.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sol-eng/python-examples/84ab6f706f41ea9c23720699f0bed7e97743f7d4/streamlit-income-share/streamlit-income-share.png --------------------------------------------------------------------------------