├── .DS_Store
├── .github
├── FUNDING.yml
├── dependabot.yml
└── workflows
│ ├── Streamlit-workflow.yml
│ ├── Youtube-workflow.yml
│ ├── bioinfo-workflow.yml
│ ├── blog-post-workflow.yml
│ ├── ml-automation.yml
│ └── python-automation.yml
├── .gitmodules
├── Bioinformatics with Python
└── README.md
├── LICENSE
├── Machine-Learning for all
└── README.md
├── Python-Automation
└── README.md
├── README.md
└── Streamlit-Python
├── .DS_Store
├── README.md
├── Streamlit-AgGrid-Usage
├── .DS_Store
├── .gitignore
├── Data
│ └── covid-variants.csv
├── README.md
├── __pycache__
│ └── database.cpython-39.pyc
├── database.py
├── pages
│ ├── aggrid-button-app.py
│ └── agimage.py
├── requirements.txt
└── streamlit-ag-app.py
├── Streamlit_EmbedTweets
└── app.py
├── Streamlit_Firestore_SocialApp
├── README.md
└── app.py
├── Streamlit_GoogleSheets_Automation
└── main.py
├── Streamlit_Notion_Integration
└── app.py
└── Streamlit_WeatherApp
└── weatherapp.py
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/avrabyt/YouTube-Tutorials/148ba08fea304bd04916268d6c4e2cd1052d67b7/.DS_Store
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: avrabyt
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
14 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | updates:
4 | - package-ecosystem: gitsubmodule
5 | schedule:
6 | interval: "daily"
7 | directory: /
8 |
--------------------------------------------------------------------------------
/.github/workflows/Streamlit-workflow.yml:
--------------------------------------------------------------------------------
1 | name: Streamlit Playlist
2 | on:
3 | schedule:
4 | # Runs every hour, on the hour
5 | - cron: "0 0 * * *"
6 | workflow_dispatch:
7 |
8 | jobs:
9 | update-readme-with-youtube:
10 | name: Update this repo's README with latest videos from YouTube
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v2
14 | - uses: gautamkrishnar/blog-post-workflow@master
15 | with:
16 | readme_path: "./Streamlit-Python/README.md"
17 | comment_tag_name: "STREAMLIT"
18 | max_post_count: 50
19 | feed_list: "https://www.youtube.com/feeds/videos.xml?playlist_id=PLqQrRCH56DH8JSoGC3hsciV-dQhgFGS1K"
20 | custom_tags: "channelId/yt:channelId/,videoId/yt:videoId/"
21 | date_format: "mmm d, yyyy"
22 | template: '
$newline'
23 | committer_email: "avrab.yt@gmail.com"
24 |
--------------------------------------------------------------------------------
/.github/workflows/Youtube-workflow.yml:
--------------------------------------------------------------------------------
1 | name: Latest YouTube Videos
2 | on:
3 | schedule:
4 | # Runs every hour, on the hour
5 | - cron: "0 0 * * *"
6 | workflow_dispatch:
7 |
8 | jobs:
9 | update-readme-with-youtube:
10 | name: Update this repo's README with latest videos from YouTube
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v2
14 | - uses: gautamkrishnar/blog-post-workflow@master
15 | with:
16 | comment_tag_name: "YOUTUBE"
17 | feed_list: "https://www.youtube.com/feeds/videos.xml?channel_id=UCDMP6ATYKNXMvn2ok1gfM7Q"
18 | custom_tags: "channelId/yt:channelId/,videoId/yt:videoId/"
19 | date_format: "mmm d, yyyy"
20 | template: '$newline'
21 | committer_email: "avrab.yt@gmail.com"
22 |
--------------------------------------------------------------------------------
/.github/workflows/bioinfo-workflow.yml:
--------------------------------------------------------------------------------
1 | name: Bioinformatics Playlist
2 | on:
3 | schedule:
4 | # Runs every hour, on the hour
5 | - cron: "0 0 * * *"
6 | workflow_dispatch:
7 |
8 | jobs:
9 | update-readme-with-youtube:
10 | # https://youtube.com/playlist?list=PLqQrRCH56DH-nTgLekThf60dkOguB-fRi
11 | name: Update this repo's README with latest videos from Bioinformatics Play List
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v2
15 | - uses: gautamkrishnar/blog-post-workflow@master
16 | with:
17 | readme_path: "./Bioinformatics with Python/README.md"
18 | comment_tag_name: "Bioinformatics"
19 | max_post_count: 50
20 | feed_list: "https://www.youtube.com/feeds/videos.xml?playlist_id=PLqQrRCH56DH-nTgLekThf60dkOguB-fRi"
21 | custom_tags: "channelId/yt:channelId/,videoId/yt:videoId/"
22 | date_format: "mmm d, yyyy"
23 | template: '$newline'
24 | committer_email: "avrab.yt@gmail.com"
25 |
--------------------------------------------------------------------------------
/.github/workflows/blog-post-workflow.yml:
--------------------------------------------------------------------------------
1 | name: Latest blog post workflow
2 | on:
3 | schedule: # Run workflow automatically
4 | - cron: '0 0 * * *' # Runs every hour, on the hour
5 | workflow_dispatch: # Run workflow manually (without waiting for the cron to be called), through the GitHub Actions Workflow page directly
6 |
7 | jobs:
8 | update-readme-with-blog:
9 | name: Update this repo's README with latest blog posts
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout
13 | uses: actions/checkout@v2
14 | - name: Pull in dev.to posts
15 | uses: gautamkrishnar/blog-post-workflow@v1
16 | with:
17 | feed_list: "https://medium.com/feed/@avra42"
18 |
--------------------------------------------------------------------------------
/.github/workflows/ml-automation.yml:
--------------------------------------------------------------------------------
1 | name: ML Playlist
2 | on:
3 | schedule:
4 | # Runs every hour, on the hour
5 | - cron: "0 0 * * *"
6 | workflow_dispatch:
7 |
8 | jobs:
9 | update-readme-with-youtube:
10 | # https://youtube.com/playlist?list=PLqQrRCH56DH_H3MgBEFPdU787x2tGDpyd
11 | name: Update this repo's README with latest videos from PlayList of ML
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v2
15 | - uses: gautamkrishnar/blog-post-workflow@master
16 | with:
17 | readme_path: "./Machine-Learning for all/README.md"
18 | comment_tag_name: "ML"
19 | max_post_count: 50
20 | feed_list: "https://www.youtube.com/feeds/videos.xml?playlist_id=PLqQrRCH56DH_H3MgBEFPdU787x2tGDpyd"
21 | custom_tags: "channelId/yt:channelId/,videoId/yt:videoId/"
22 | date_format: "mmm d, yyyy"
23 | template: '$newline'
24 | committer_email: "avrab.yt@gmail.com"
25 |
--------------------------------------------------------------------------------
/.github/workflows/python-automation.yml:
--------------------------------------------------------------------------------
1 | name: Python Automation Playlist
2 | on:
3 | schedule:
4 | # Runs at 00:00 UTC every day.
5 | - cron: "0 0 * * *"
6 | workflow_dispatch:
7 |
8 | jobs:
9 | update-readme-with-youtube:
10 | # #https://youtube.com/playlist?list=PLqQrRCH56DH9OTHBZ0j4f_NtAh1SPW0Cg
11 | name: Update this repo's README with latest videos from PlayList of Python Automation
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v2
15 | - uses: gautamkrishnar/blog-post-workflow@master
16 | with:
17 | readme_path: "./Python-Automation/README.md"
18 | comment_tag_name: "PythonAutomation"
19 | max_post_count: 50
20 | feed_list: "https://www.youtube.com/feeds/videos.xml?playlist_id=PLqQrRCH56DH9OTHBZ0j4f_NtAh1SPW0Cg"
21 | custom_tags: "channelId/yt:channelId/,videoId/yt:videoId/"
22 | date_format: "mmm d, yyyy"
23 | template: '$newline'
24 | committer_email: "avrab.yt@gmail.com"
25 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "Bioinformatics with Python/st-speckmol"]
2 | path = Bioinformatics with Python/st-speckmol
3 | url = https://github.com/avrabyt/st-speckmol.git
4 | [submodule "Python-Automation/Python-GoogleSheet-YT-Automation"]
5 | path = Python-Automation/Python-GoogleSheet-YT-Automation
6 | url = https://github.com/avrabyt/Python-GoogleSheet-YT-Automation.git
7 | [submodule "Machine-Learning for all/Image-Compressor"]
8 | path = Machine-Learning for all/Image-Compressor
9 | url = https://github.com/avrabyt/Image-Compressor.git
10 | [submodule "Streamlit-Python/Streamlit-Readme-to-App"]
11 | path = Streamlit-Python/Streamlit-Readme-to-App
12 | url = https://github.com/avrabyt/Streamlit-Readme-to-App.git
13 | [submodule "Holiday-coding-session"]
14 | path = Holiday-coding-session
15 | url = https://github.com/avrabyt/Holiday-coding-session.git
16 | [submodule "Streamlit-Readme-to-App"]
17 | path = Streamlit-Readme-to-App
18 | url = https://github.com/avrabyt/Streamlit-Readme-to-App.git
19 | [submodule "OpenAI-Streamlit-YouTube"]
20 | path = OpenAI-Streamlit-YouTube
21 | url = https://github.com/avrabyt/OpenAI-Streamlit-YouTube.git
22 | [submodule "LangChain-Web-Apps"]
23 | path = LangChain-Web-Apps
24 | url = https://github.com/avrabyt/LangChain-Web-Apps.git
25 | [submodule "RAG-Chatbot"]
26 | path = RAG-Chatbot
27 | url = https://github.com/avrabyt/RAG-Chatbot.git
28 | [submodule "GPT4-turbo-with-vision-demo"]
29 | path = GPT4-turbo-with-vision-demo
30 | url = https://github.com/avrabyt/GPT4-turbo-with-vision-demo.git
31 |
--------------------------------------------------------------------------------
/Bioinformatics with Python/README.md:
--------------------------------------------------------------------------------
1 | # [Bioinformatics with Python Play List](https://youtube.com/playlist?list=PLqQrRCH56DH-nTgLekThf60dkOguB-fRi) ⬇️
2 | [](https://github.com/avrabyt/YouTube-Tutorials/actions/workflows/bioinfo-workflow.yml)
3 |
4 |
6 |
7 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Avra
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Machine-Learning for all/README.md:
--------------------------------------------------------------------------------
1 | # [Machine-Learning for all Play List](https://youtube.com/playlist?list=PLqQrRCH56DH_H3MgBEFPdU787x2tGDpyd) ⬇️
2 |
3 |
5 |
7 |
9 |
11 |
13 |
15 |
17 |
19 |
21 |
23 |
25 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/Python-Automation/README.md:
--------------------------------------------------------------------------------
1 | # [Python-Automation Play List](https://youtube.com/playlist?list=PLqQrRCH56DH9OTHBZ0j4f_NtAh1SPW0Cg) ⬇️
2 |
3 | [](https://github.com/avrabyt/YouTube-Tutorials/actions/workflows/python-automation.yml)
4 |
5 |
7 |
9 |
11 |
13 |
15 |
17 |
19 |
20 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # [YouTube-Tutorials](https://www.youtube.com/c/Avra_b)
2 |
3 | [](https://github.com/avrabyt/YouTube-Tutorials/actions/workflows/Streamlit-workflow.yml)
4 | [](https://github.com/avrabyt/YouTube-Tutorials/actions/workflows/ml-automation.yml)
5 | [](https://github.com/avrabyt/YouTube-Tutorials/actions/workflows/bioinfo-workflow.yml)
6 |
7 | **Topics overview :** `Python` `Streamlit` `Machine-Learning` `openAI` `Google-sheets` `Automation` `Pandas` `Bioinformatics` `chatGPT`
8 |
9 | 1. [Streamlit-Python](https://github.com/avrabyt/YouTube-Tutorials/tree/main/Streamlit-Python)
10 | 2. [Machine-Learning for all](https://github.com/avrabyt/YouTube-Tutorials/tree/main/Machine-Learning%20for%20all)
11 | 3. [AI /chatGPT etc](https://github.com/avrabyt/YouTube-Tutorials/tree/main/Machine-Learning%20for%20all)
12 | 4. [Python-Automation](https://github.com/avrabyt/YouTube-Tutorials/tree/main/Python-Automation)
13 | 5. [Bioinformatics with Python](https://github.com/avrabyt/YouTube-Tutorials/tree/main/Bioinformatics%20with%20Python)
14 | 6. [openAI / chatGPT Web Apps](https://github.com/avrabyt/Holiday-coding-session/tree/5af12fbdc474b07f70397390e5040096b92814d2)
15 |
16 |
17 | ### 📺 Latest YouTube Videos
18 | 
19 | 
20 | [](https://github.com/avrabyt/YouTube-Tutorials/actions/workflows/Youtube-workflow.yml)
21 |
22 |
23 |
25 |
27 |
29 |
31 |
33 |
34 |
35 |
36 |
37 | ### 📑 Latest Blog posts
38 | [](https://github.com/avrabyt/YouTube-Tutorials/actions/workflows/blog-post-workflow.yml)
39 |
40 |
41 | - [How to Build Full Stack Agentic Apps](https://medium.com/databutton/how-to-build-full-stack-agentic-apps-d3c64c9bb11b?source=rss-bf79cad6afa1------2)
42 | - [What I Built: A Multi-Agent AI Chatbot App using Databutton and Swarm](https://medium.com/databutton/what-i-built-a-multi-agent-ai-chatbot-app-using-databutton-and-swarm-c616f5b7ed11?source=rss-bf79cad6afa1------2)
43 | - [I was thinking about an AI SaaS idea and came up with this.](https://medium.com/databutton/i-was-thinking-about-an-ai-saas-idea-and-came-up-with-this-0ba5e44a2ce9?source=rss-bf79cad6afa1------2)
44 | - [What AI Tool I Would Pick While Building My Next SaaS App](https://medium.com/@avra42/what-ai-tool-i-would-pick-while-building-my-next-saas-app-ab5493863401?source=rss-bf79cad6afa1------2)
45 | - [9 Tips I Learned from an AI Micro SaaS Founder Who Sold it for Six Figures](https://medium.com/databutton/9-tips-i-learned-from-an-ai-micro-saas-founder-who-sold-it-for-six-figures-f951b85c7198?source=rss-bf79cad6afa1------2)
46 |
47 | -----
48 | **Weekly Newsletter** : https://weekly-aistacks.beehiiv.com/subscribe
49 |
50 |
51 |
52 |
53 | [](https://www.youtube.com/c/Avra_b)
54 | [](https://medium.com/@avra42)
55 |
--------------------------------------------------------------------------------
/Streamlit-Python/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/avrabyt/YouTube-Tutorials/148ba08fea304bd04916268d6c4e2cd1052d67b7/Streamlit-Python/.DS_Store
--------------------------------------------------------------------------------
/Streamlit-Python/README.md:
--------------------------------------------------------------------------------
1 | # [Streamlit-Python Play List](https://youtube.com/playlist?list=PLqQrRCH56DH8JSoGC3hsciV-dQhgFGS1K) ⬇️
2 |
3 | [](https://github.com/avrabyt/YouTube-Tutorials/actions/workflows/Streamlit-workflow.yml)
4 |
5 |
7 |
9 |
11 |
13 |
15 |
17 |
19 |
21 |
23 |
25 |
27 |
29 |
31 |
33 |
34 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/avrabyt/YouTube-Tutorials/148ba08fea304bd04916268d6c4e2cd1052d67b7/Streamlit-Python/Streamlit-AgGrid-Usage/.DS_Store
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/.gitignore:
--------------------------------------------------------------------------------
1 | .streamlit
2 | .streamlit/secrets.toml
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/README.md:
--------------------------------------------------------------------------------
1 | ## Here's a test app to try 🎈
2 | [](https://avra-youtube-aggrid.streamlit.app)
3 |
4 | Feel free to contribute or report bugs.
5 |
6 | ## Relevant Post :
7 | I published a Meidum Article on ,
8 | ```
9 | Automate Streamlit Web App using Interactive AgGrid with Google Sheets
10 | ```
11 |
12 | [here's](https://medium.com/towards-data-science/automate-streamlit-web-app-using-interactive-aggrid-with-google-sheets-81b93fd9e648) the link to the article.
13 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/__pycache__/database.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/avrabyt/YouTube-Tutorials/148ba08fea304bd04916268d6c4e2cd1052d67b7/Streamlit-Python/Streamlit-AgGrid-Usage/__pycache__/database.cpython-39.pyc
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/database.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | from google.oauth2 import service_account
3 | import gspread
4 |
5 | def send_to_database(res):
6 | # Create a Google Authentication connection object
7 | scope = ['https://spreadsheets.google.com/feeds',
8 | 'https://www.googleapis.com/auth/drive']
9 |
10 | credentials = service_account.Credentials.from_service_account_info(
11 | st.secrets["gcp_service_account"], scopes = scope)
12 | gc = gspread.authorize(credentials)
13 | sh = gc.open("AgGrid-Database")
14 | worksheet = sh.worksheet("Sheet1") #
15 | my_bar = st.progress(0)
16 | for ind in res.index:
17 | percent_complete = (ind+1)/len(res)
18 | my_bar.progress(percent_complete)
19 | values_list = worksheet.col_values(1)
20 | length_row = len(values_list)
21 | worksheet.update_cell(length_row+1, 1, res['Type'][ind])
22 | worksheet.update_cell(length_row+1, 2, str(res['Quantity'][ind]))
23 | worksheet.update_cell(length_row+1, 3, str(res['Price'][ind]))
24 |
25 | return st.success("Updated to Database ", icon="✅")
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/pages/aggrid-button-app.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import pandas as pd
3 | from st_aggrid import AgGrid, JsCode
4 | from st_aggrid.grid_options_builder import GridOptionsBuilder
5 | from database import *
6 |
7 | # This demo is mainly from the suggestion on Community Post
8 | # https://www.youtube.com/channel/UCDMP6ATYKNXMvn2ok1gfM7Q/community?lb=UgkxMTe1HSFYPta6YDSZCXqkSCp2cKfyiYmU
9 | # ".....Another suggestion for streamlit-aggrid features to explore is buttons inside the aggrid.
10 | # I think it would be interesting because there aren't many examples of that in the Streamlit forum.""
11 | @st.experimental_memo
12 | def convert_df(df):
13 | return df.to_csv(index=False).encode('utf-8')
14 |
15 | st.sidebar.markdown('''
16 | - ## Medium Article :
17 | [**Automate Streamlit Web App using Interactive AgGrid with Google Sheets**](https://medium.com/towards-data-science/automate-streamlit-web-app-using-interactive-aggrid-with-google-sheets-81b93fd9e648).
18 |
19 | - ## Link to the YouTube videos :
20 | - 1. [AgGrid Part 1](https://youtu.be/F54ELJwspos)
21 | - 2. [AgGrid Part 2](https://youtu.be/Zs9-8trPadU)
22 | - 3. [AgGrid Part 3](https://youtu.be/sOFM334iILs)
23 | ''' )
24 |
25 | st.header("AgGrid Demo `Part 3`")
26 |
27 | with st.expander('TL;DR', expanded=True):
28 |
29 | st.markdown('''
30 | Medium Article :
31 | [**Automate Streamlit Web App using Interactive AgGrid with Google Sheets**](https://medium.com/towards-data-science/automate-streamlit-web-app-using-interactive-aggrid-with-google-sheets-81b93fd9e648).
32 |
33 | > Demonstrates how to use the `AgGrid` library in a Streamlit app to create an `interactive` data table.
34 | > It shows how to `connect` the table to a `Google Sheets` database and send data from the table to the database.
35 | > Additionally implementing `JavaScript` callbacks for adding rows to the AgGrid table.Implementing `button` within AgGrid table.
36 | > Also, `Downloading` the AgGrid table
37 | Link to the YouTube video : [AgGrid Part 3](https://youtu.be/sOFM334iILs)
38 |
39 | ''')
40 |
41 |
42 | with st.expander(' Previosuly : ', expanded=False):
43 | st.markdown('''
44 |
45 | ✅ 1. `Working` with AgGrid Table
46 |
47 | ✅ 2. `Highlighting` AgGrid Table
48 |
49 | ✅ 3. `Deleting` rows in AgGrid Table
50 |
51 | > Link to the YouTube videos :
52 | > - 1. [AgGrid Part 1](https://youtu.be/F54ELJwspos)
53 | > - 2. [AgGrid Part 2](https://youtu.be/Zs9-8trPadU)
54 | ''')
55 |
56 |
57 | # Dump any DataFrame
58 | d = {'Type':['Notebook', 'DVDs'] ,'Quantity': [1, 2],'Price': [400, 200]}
59 | df = pd.DataFrame(data = d)
60 |
61 | # Dump as AgGrid Table
62 | # AgGrid(df)
63 |
64 | # JavaScript function
65 | # api.applyTransaction({add: [{}]}) # This line would end row at the end always
66 | # Finding row index is important to add row just after the selected index
67 | js_add_row = JsCode("""
68 | function(e) {
69 | let api = e.api;
70 | let rowPos = e.rowIndex + 1;
71 | api.applyTransaction({addIndex: rowPos, add: [{}]})
72 | };
73 | """
74 | )
75 |
76 | # cellRenderer with a button component.
77 | # Resources:
78 | # https://blog.ag-grid.com/cell-renderers-in-ag-grid-every-different-flavour/
79 | # https://www.w3schools.com/css/css3_buttons.asp
80 | cellRenderer_addButton = JsCode('''
81 | class BtnCellRenderer {
82 | init(params) {
83 | this.params = params;
84 | this.eGui = document.createElement('div');
85 | this.eGui.innerHTML = `
86 |
87 |
102 | ↓ Add
105 |
106 | `;
107 | }
108 |
109 | getGui() {
110 | return this.eGui;
111 | }
112 |
113 | };
114 | ''')
115 |
116 | # Dump as AgGrid Table
117 | # AgGrid(df)
118 | gd = GridOptionsBuilder.from_dataframe(df)
119 | gd.configure_default_column(editable=True)
120 | gd.configure_column(field = '🔧',
121 | onCellClicked = js_add_row,
122 | cellRenderer = cellRenderer_addButton,
123 | lockPosition='left')
124 | gridoptions = gd.build()
125 | # This part for updating the Grid so that Streamlit doesnot rerun from whole
126 | with st.form('Inventory') as f:
127 | st.header('Inventory List 🔖')
128 | response = AgGrid(df,
129 | gridOptions = gridoptions,
130 | editable=True,
131 | allow_unsafe_jscode = True,
132 | theme = 'balham',
133 | height = 200,
134 | fit_columns_on_grid_load = True)
135 | st.write(" *Note: Don't forget to hit enter ↩ on new entry.*")
136 | st.form_submit_button("Confirm item(s) 🔒", type="primary")
137 | # Dump )
138 | st.subheader("Updated Inventory")
139 | res = response['data']
140 | st.table(res)
141 | st.subheader("Visualize Inventory")
142 | st.bar_chart(data=res, x = 'Type', y = 'Price')
143 | st.subheader("Store Inventory")
144 | col1,col2 = st.columns(2)
145 | # https://docs.streamlit.io/knowledge-base/using-streamlit/how-download-pandas-dataframe-csv
146 | csv = convert_df(response['data'])
147 | col1.write("Save in Local Machine?")
148 | col1.download_button(
149 | "Press to Download 🗳️",
150 | csv,
151 | "file.csv",
152 | "text/csv",
153 | key='download-csv'
154 | )
155 |
156 | col2.write("Save in Shared Cloud?")
157 | if col2.button("Update to Database 🚀 "):
158 | send_to_database(res)
159 |
160 | st.sidebar.video('https://youtu.be/sOFM334iILs')
161 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/pages/agimage.py:
--------------------------------------------------------------------------------
1 | # Import Modules
2 | import streamlit as st
3 | import pandas as pd
4 | from st_aggrid import AgGrid, JsCode
5 | from st_aggrid.grid_options_builder import GridOptionsBuilder
6 |
7 | # Dummy data
8 | data = {
9 | 'image_url': ['https://m.media-amazon.com/images/M/MV5BMDFkYTc0MGEtZmNhMC00ZDIzLWFmNTEtODM1ZmRlYWMwMWFmXkEyXkFqcGdeQXVyMTMxODk2OTU@._V1_SY1000_CR0,0,675,1000_AL_.jpg',
10 | 'https://m.media-amazon.com/images/M/MV5BM2MyNjYxNmUtYTAwNi00MTYxLWJmNWYtYzZlODY3ZTk3OTFlXkEyXkFqcGdeQXVyNzkwMjQ5NzM@._V1_SY1000_CR0,0,704,1000_AL_.jpg',
11 | 'https://m.media-amazon.com/images/M/MV5BMWMwMGQzZTItY2JlNC00OWZiLWIyMDctNDk2ZDQ2YjRjMWQ0XkEyXkFqcGdeQXVyNzkwMjQ5NzM@._V1_SY1000_CR0,0,679,1000_AL_.jpg',
12 | 'https://m.media-amazon.com/images/M/MV5BMTMxNTMwODM0NF5BMl5BanBnXkFtZTcwODAyMTk2Mw@@._V1_SY1000_CR0,0,675,1000_AL_.jpg'],
13 | 'name': ['The Shawshank Redemption', 'The Godfather', 'The Godfather: Part II', 'The Dark Knight'],
14 | 'year': [1994, 1972, 1974, 2008],
15 | 'description': ['Two imprisoned men bond over a number of years, finding solace and eventual redemption through acts of common decency.',
16 | 'The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.',
17 | 'The early life and career of Vito Corleone in 1920s New York is portrayed while his son, Michael, expands and tightens his grip on the family crime syndicate.',
18 | 'When the menace known as the Joker emerges from his mysterious past, he wreaks havoc and chaos on the people of Gotham, the Dark Knight must accept one of the greatest psychological and physical tests of his ability to fight injustice.'],
19 | 'rating': [9.2, 9.2, 9.0, 9.0],
20 | }
21 | df = pd.DataFrame(data)
22 | #st.write(df)
23 | st.header("AgGrid Demo `Part 4`: Grid table with Image Display")
24 |
25 | with st.expander('TL;DR', expanded=True):
26 |
27 | st.markdown('''
28 | Medium Article :
29 | [**Enhancing AgGrid table with Image Display in Streamlit Apps**](https://medium.com/the-streamlit-teacher/enhancing-aggrid-table-with-image-display-in-streamlit-apps-425b6e989d5b).
30 | > The streamlit-aggrid library allows us to easily add the AgGrid component to a Streamlit app and customize it with various options.
31 | > We can use a custom cell renderer function to display images in cells of the AgGrid component.
32 | > By combining the powerful features of AgGrid with the simplicity of Streamlit, we can create interactive and informative data visualization apps quickly and easily.
33 |
34 | Link to the YouTube video : [AgGrid Part 4 : Streamlit AgGrid Extras - Display Image within the Table | JavaScript Injection | Python](https://youtu.be/3Ax3S8g2bak)
35 | ''')
36 | render_image = JsCode('''
37 |
38 | function renderImage(params){
39 | // Create a new image element
40 | var img = new Image();
41 |
42 | img.src = params.value;
43 |
44 | img.width = 35;
45 | img.height = 35;
46 |
47 | return img;
48 |
49 | }
50 | ''')
51 |
52 | # build gridoptions object
53 |
54 | # Build GridOptions object
55 | options_builder = GridOptionsBuilder.from_dataframe(df)
56 | options_builder.configure_column('image_url', cellRenderer = render_image)
57 | options_builder.configure_selection(selection_mode="single", use_checkbox=True)
58 | grid_options = options_builder.build()
59 |
60 | # Create AgGrid component
61 | grid = AgGrid(df,
62 | gridOptions = grid_options,
63 | allow_unsafe_jscode=True,
64 | height=200, width=500, theme='streamlit')
65 |
66 | sel_row = grid["selected_rows"]
67 | if sel_row:
68 | col1, col2 = st.columns(2)
69 | st.info(sel_row[0]['description'])
70 | col1.image(sel_row[0]['image_url'],caption = sel_row[0]['name'])
71 | col2.subheader("Rating: " + str(sel_row[0]['rating']))
72 |
73 | st.sidebar.markdown('''
74 | - ## Medium Article :
75 | [**Enhancing AgGrid table with Image Display in Streamlit Apps**](https://medium.com/the-streamlit-teacher/enhancing-aggrid-table-with-image-display-in-streamlit-apps-425b6e989d5b)
76 |
77 | - ## Link to the YouTube videos :
78 | - 1. [AgGrid Part 1](https://youtu.be/F54ELJwspos)
79 | - 2. [AgGrid Part 2](https://youtu.be/Zs9-8trPadU)
80 | - 3. [AgGrid Part 3](https://youtu.be/sOFM334iILs)
81 | - 4. [AgGrid Part 4](https://youtu.be/3Ax3S8g2bak)
82 | ''' )
83 | st.sidebar.video('https://youtu.be/3Ax3S8g2bak')
84 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/requirements.txt:
--------------------------------------------------------------------------------
1 | streamlit
2 | pandas
3 | streamlit_aggrid
4 | gspread==3.7.0
5 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit-AgGrid-Usage/streamlit-ag-app.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import pandas as pd
3 | from st_aggrid import AgGrid, GridUpdateMode, JsCode
4 | from st_aggrid.grid_options_builder import GridOptionsBuilder
5 | import glob
6 | # Functions
7 | # Fixing path issue with explicitly (bad practice!)
8 | file = glob.glob('Streamlit-Python/Streamlit-AgGrid-Usage/Data/*', recursive=True)
9 |
10 | @st.cache
11 | def data_upload(file):
12 | df = pd.read_csv(file[0])
13 | return df
14 |
15 | st.header("AgGrid Demo `Part 1` & `Part 2`")
16 | st.sidebar.title("AgGrid Examples")
17 | df = data_upload(file)
18 | if st.checkbox("Show Streamlit Default Dataframe"):
19 | st.subheader("This is how Default Streamlit Dataframe looks!")
20 | st.dataframe(data=df)
21 | # st.info(len(df))
22 |
23 | _funct = st.sidebar.radio(label="Functions", options = ['Display','Highlight','Delete'])
24 |
25 | st.sidebar.markdown('''
26 | - ## Medium Article :
27 | [**Automate Streamlit Web App using Interactive AgGrid with Google Sheets**](https://medium.com/towards-data-science/automate-streamlit-web-app-using-interactive-aggrid-with-google-sheets-81b93fd9e648).
28 |
29 | - ## Link to the YouTube videos :
30 | - 1. [AgGrid Part 1](https://youtu.be/F54ELJwspos)
31 | - 2. [AgGrid Part 2](https://youtu.be/Zs9-8trPadU)
32 | - 3. [AgGrid Part 3](https://youtu.be/sOFM334iILs)
33 | ''' )
34 |
35 | st.sidebar.video('https://youtu.be/F54ELJwspos')
36 | st.sidebar.video('https://youtu.be/Zs9-8trPadU')
37 |
38 | st.subheader("This is how AgGrid Table looks!")
39 |
40 | gd = GridOptionsBuilder.from_dataframe(df)
41 | gd.configure_pagination(enabled=True)
42 | gd.configure_default_column(editable=True,groupable=True)
43 | # _______________________________________________________________
44 | # Enabling tooltip - YouTube-Query by Alexis-Raja Brachet
45 | # gd.configure_default_column(editable=True,groupable=True,tooltipField = "variant")
46 | # hover in any rows( under any columns), the variant of that row, will pop up as tootltip information.However,
47 | # I'm yet to figure out, how to implement more than one column information (what I mean - let's say - ["variant", "date"] collectively as tooltip information) ,
48 | # also, it's bit slow in the begginig when I tested.
49 | # ________________________________________________________________
50 |
51 | if _funct == 'Display':
52 | sel_mode = st.radio('Selection Type', options = ['single', 'multiple'])
53 | gd.configure_selection(selection_mode=sel_mode,use_checkbox=True)
54 | gridoptions = gd.build()
55 | grid_table = AgGrid(df,gridOptions=gridoptions,
56 | update_mode= GridUpdateMode.SELECTION_CHANGED,
57 | height = 500,
58 | allow_unsafe_jscode=True,
59 | #enable_enterprise_modules = True,
60 | theme = 'balham')
61 |
62 | sel_row = grid_table["selected_rows"]
63 | st.subheader("Output")
64 | st.write(sel_row)
65 | if _funct == 'Highlight':
66 | col_opt = st.selectbox(label ='Select column',options = df.columns)
67 | cellstyle_jscode = JsCode("""
68 | function(params){
69 | if (params.value == 'Alpha') {
70 | return {
71 | 'color': 'black',
72 | 'backgroundColor' : 'orange'
73 | }
74 | }
75 | if (params.value == 'B.1.258') {
76 | return{
77 | 'color' : 'black',
78 | 'backgroundColor' : 'red'
79 | }
80 | }
81 | else{
82 | return{
83 | 'color': 'black',
84 | 'backgroundColor': 'lightpink'
85 | }
86 | }
87 |
88 | };
89 | """)
90 | gd.configure_columns(col_opt, cellStyle=cellstyle_jscode)
91 | gridOptions = gd.build()
92 | grid_table = AgGrid(df,
93 | gridOptions = gridOptions,
94 | enable_enterprise_modules = True,
95 | fit_columns_on_grid_load = True,
96 | height=500,
97 | width='100%',
98 | # theme = "material",
99 | update_mode = GridUpdateMode.SELECTION_CHANGED,
100 | reload_data = True,
101 | allow_unsafe_jscode=True,
102 | )
103 | if _funct == 'Delete':
104 |
105 | js = JsCode("""
106 | function(e) {
107 | let api = e.api;
108 | let sel = api.getSelectedRows();
109 | api.applyTransaction({remove: sel})
110 | };
111 | """
112 | )
113 |
114 | gd.configure_selection(selection_mode= 'single')
115 | gd.configure_grid_options(onRowSelected = js,pre_selected_rows=[])
116 | gridOptions = gd.build()
117 | grid_table = AgGrid(df,
118 | gridOptions = gridOptions,
119 | enable_enterprise_modules = True,
120 | fit_columns_on_grid_load = True,
121 | height=500,
122 | width='100%',
123 | # theme = "streamlit",
124 | update_mode = GridUpdateMode.SELECTION_CHANGED,
125 | reload_data = True,
126 | allow_unsafe_jscode=True,
127 | )
128 | st.balloons()
129 | st.info("Total Rows :" + str(len(grid_table['data'])))
130 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit_EmbedTweets/app.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import streamlit.components.v1 as components
3 | import requests
4 |
5 | def theTweet(tweet_url):
6 | api = "https://publish.twitter.com/oembed?url={}".format(tweet_url)
7 | response = requests.get(api)
8 | res = response.json()["html"]
9 | return res
10 |
11 | input = st.text_input("Enter your tweet url")
12 | if input:
13 | res = theTweet(input)
14 | st.write(res)
15 | components.html(res,height= 700)
16 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit_Firestore_SocialApp/README.md:
--------------------------------------------------------------------------------
1 | ## Quick notes below ⏬
2 |
3 | For reference
4 |
5 | [Streamlit](https://streamlit.io)
6 |
7 | [Firebase Products](https://firebase.google.com/products-build)
8 |
9 | [Firebase](https://firebase.google.com)
10 |
11 |
12 |
13 |
14 | You may be have come across this issues, here's few fixes
15 |
16 | [ImportError: No module named google.cloud](https://stackoverflow.com/questions/44397506/importerror-no-module-named-google-cloud)
17 |
18 | [ImportError: Failed to import the Cloud Firestore library for Python](https://stackoverflow.com/questions/48264536/importerror-failed-to-import-the-cloud-firestore-library-for-python)
19 |
20 | [https://pycryptodome.readthedocs.io/en/latest/src/installation.html#windows-from-sources-python-3-5-and-newer](https://pycryptodome.readthedocs.io/en/latest/src/installation.html#windows-from-sources-python-3-5-and-newer)
21 |
22 |
23 |
24 | ## How the Configuration file 🔨 looks like
25 |
26 | ```yaml
27 | firebaseConfig = {
28 | 'apiKey': " ",
29 | 'authDomain': " ",
30 | 'projectId': " ",
31 | 'databaseURL': " ",
32 | 'storageBucket': " ",
33 | 'messagingSenderId': "",
34 | 'appId': " ",
35 | 'measurementId': " "
36 | }
37 |
38 | ```
39 | --------
40 |
41 | PART 1
42 |
43 | [https://youtu.be/KLmSfHcOXlc](https://youtu.be/KLmSfHcOXlc)
44 |
45 | PART 2
46 |
47 | [https://youtu.be/uzc3OEjjbn8](https://youtu.be/uzc3OEjjbn8)
48 |
49 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit_Firestore_SocialApp/app.py:
--------------------------------------------------------------------------------
1 | # Modules
2 | import pyrebase
3 | import streamlit as st
4 | from datetime import datetime
5 |
6 | # Configuration Key
7 | firebaseConfig = {
8 | 'apiKey': " ",
9 | 'authDomain': " ",
10 | 'projectId': " ",
11 | 'databaseURL': " ",
12 | 'storageBucket': " ",
13 | 'messagingSenderId': "",
14 | 'appId': " ",
15 | 'measurementId': " "
16 | }
17 |
18 | # Firebase Authentication
19 | firebase = pyrebase.initialize_app(firebaseConfig)
20 | auth = firebase.auth()
21 |
22 | # Database
23 | db = firebase.database()
24 | storage = firebase.storage()
25 | st.sidebar.title("Our community app")
26 |
27 | # Authentication
28 | choice = st.sidebar.selectbox('login/Signup', ['Login', 'Sign up'])
29 |
30 | # Obtain User Input for email and password
31 | email = st.sidebar.text_input('Please enter your email address')
32 | password = st.sidebar.text_input('Please enter your password',type = 'password')
33 |
34 | # App
35 |
36 | # Sign up Block
37 | if choice == 'Sign up':
38 | handle = st.sidebar.text_input(
39 | 'Please input your app handle name', value='Default')
40 | submit = st.sidebar.button('Create my account')
41 |
42 | if submit:
43 | user = auth.create_user_with_email_and_password(email, password)
44 | st.success('Your account is created suceesfully!')
45 | st.balloons()
46 | # Sign in
47 | user = auth.sign_in_with_email_and_password(email, password)
48 | db.child(user['localId']).child("Handle").set(handle)
49 | db.child(user['localId']).child("ID").set(user['localId'])
50 | st.title('Welcome' + handle)
51 | st.info('Login via login drop down selection')
52 |
53 | # Login Block
54 | if choice == 'Login':
55 | login = st.sidebar.checkbox('Login')
56 | if login:
57 | user = auth.sign_in_with_email_and_password(email,password)
58 | st.write('', unsafe_allow_html=True)
59 | bio = st.radio('Jump to',['Home','Workplace Feeds', 'Settings'])
60 |
61 | # SETTINGS PAGE
62 | if bio == 'Settings':
63 | # CHECK FOR IMAGE
64 | nImage = db.child(user['localId']).child("Image").get().val()
65 | # IMAGE FOUND
66 | if nImage is not None:
67 | # We plan to store all our image under the child image
68 | Image = db.child(user['localId']).child("Image").get()
69 | for img in Image.each():
70 | img_choice = img.val()
71 | #st.write(img_choice)
72 | st.image(img_choice)
73 | exp = st.beta_expander('Change Bio and Image')
74 | # User plan to change profile picture
75 | with exp:
76 | newImgPath = st.text_input('Enter full path of your profile imgae')
77 | upload_new = st.button('Upload')
78 | if upload_new:
79 | uid = user['localId']
80 | fireb_upload = storage.child(uid).put(newImgPath,user['idToken'])
81 | a_imgdata_url = storage.child(uid).get_url(fireb_upload['downloadTokens'])
82 | db.child(user['localId']).child("Image").push(a_imgdata_url)
83 | st.success('Success!')
84 | # IF THERE IS NO IMAGE
85 | else:
86 | st.info("No profile picture yet")
87 | newImgPath = st.text_input('Enter full path of your profile image')
88 | upload_new = st.button('Upload')
89 | if upload_new:
90 | uid = user['localId']
91 | # Stored Initated Bucket in Firebase
92 | fireb_upload = storage.child(uid).put(newImgPath,user['idToken'])
93 | # Get the url for easy access
94 | a_imgdata_url = storage.child(uid).get_url(fireb_upload['downloadTokens'])
95 | # Put it in our real time database
96 | db.child(user['localId']).child("Image").push(a_imgdata_url)
97 | # HOME PAGE
98 | elif bio == 'Home':
99 | col1, col2 = st.beta_columns(2)
100 |
101 | # col for Profile picture
102 | with col1:
103 | nImage = db.child(user['localId']).child("Image").get().val()
104 | if nImage is not None:
105 | val = db.child(user['localId']).child("Image").get()
106 | for img in val.each():
107 | img_choice = img.val()
108 | st.image(img_choice,use_column_width=True)
109 | else:
110 | st.info("No profile picture yet. Go to Edit Profile and choose one!")
111 |
112 | post = st.text_input("Let's share my current mood as a post!",max_chars = 100)
113 | add_post = st.button('Share Posts')
114 | if add_post:
115 | now = datetime.now()
116 | dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
117 | post = {'Post:' : post,
118 | 'Timestamp' : dt_string}
119 | results = db.child(user['localId']).child("Posts").push(post)
120 | st.balloons()
121 |
122 | # This coloumn for the post Display
123 | with col2:
124 |
125 | all_posts = db.child(user['localId']).child("Posts").get()
126 | if all_posts.val() is not None:
127 | for Posts in reversed(all_posts.each()):
128 | #st.write(Posts.key()) # Morty
129 | st.code(Posts.val(),language = '')
130 | # WORKPLACE FEED PAGE
131 | else:
132 | all_users = db.get()
133 | res = []
134 | # Store all the users handle name
135 | for users_handle in all_users.each():
136 | k = users_handle.val()["Handle"]
137 | res.append(k)
138 | # Total users
139 | nl = len(res)
140 | st.write('Total users here: '+ str(nl))
141 |
142 | # Allow the user to choose which other user he/she wants to see
143 | choice = st.selectbox('My Collegues',res)
144 | push = st.button('Show Profile')
145 |
146 | # Show the choosen Profile
147 | if push:
148 | for users_handle in all_users.each():
149 | k = users_handle.val()["Handle"]
150 | #
151 | if k == choice:
152 | lid = users_handle.val()["ID"]
153 |
154 | handlename = db.child(lid).child("Handle").get().val()
155 |
156 | st.markdown(handlename, unsafe_allow_html=True)
157 |
158 | nImage = db.child(lid).child("Image").get().val()
159 | if nImage is not None:
160 | val = db.child(lid).child("Image").get()
161 | for img in val.each():
162 | img_choice = img.val()
163 | st.image(img_choice)
164 | else:
165 | st.info("No profile picture yet. Go to Edit Profile and choose one!")
166 |
167 | # All posts
168 | all_posts = db.child(lid).child("Posts").get()
169 | if all_posts.val() is not None:
170 | for Posts in reversed(all_posts.each()):
171 | st.code(Posts.val(),language = '')
172 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit_GoogleSheets_Automation/main.py:
--------------------------------------------------------------------------------
1 | # Streamlit-Google Sheet
2 | ## Modules
3 | import streamlit as st
4 | from pandas import DataFrame
5 |
6 | from gspread_pandas import Spread,Client
7 | from google.oauth2 import service_account
8 |
9 | # Application Related Module
10 | import pubchempy as pcp
11 | from pysmiles import read_smiles
12 | #
13 | import networkx as nx
14 | import matplotlib.pyplot as plt
15 |
16 | from datetime import datetime
17 |
18 | # Disable certificate verification (Not necessary always)
19 | import ssl
20 | ssl._create_default_https_context = ssl._create_unverified_context
21 |
22 | # Create a Google Authentication connection object
23 | scope = ['https://spreadsheets.google.com/feeds',
24 | 'https://www.googleapis.com/auth/drive']
25 |
26 | credentials = service_account.Credentials.from_service_account_info(
27 | st.secrets["gcp_service_account"], scopes = scope)
28 | client = Client(scope=scope,creds=credentials)
29 | spreadsheetname = "Database"
30 | spread = Spread(spreadsheetname,client = client)
31 |
32 | # Check the connection
33 | st.write(spread.url)
34 |
35 | sh = client.open(spreadsheetname)
36 | worksheet_list = sh.worksheets()
37 |
38 | # Functions
39 | @st.cache()
40 | # Get our worksheet names
41 | def worksheet_names():
42 | sheet_names = []
43 | for sheet in worksheet_list:
44 | sheet_names.append(sheet.title)
45 | return sheet_names
46 |
47 | # Get the sheet as dataframe
48 | def load_the_spreadsheet(spreadsheetname):
49 | worksheet = sh.worksheet(spreadsheetname)
50 | df = DataFrame(worksheet.get_all_records())
51 | return df
52 |
53 | # Update to Sheet
54 | def update_the_spreadsheet(spreadsheetname,dataframe):
55 | col = ['Compound CID','Time_stamp']
56 | spread.df_to_sheet(dataframe[col],sheet = spreadsheetname,index = False)
57 | st.sidebar.info('Updated to GoogleSheet')
58 |
59 |
60 | st.header('Streamlit Chemical Inventory')
61 |
62 | # Check whether the sheets exists
63 | what_sheets = worksheet_names()
64 | #st.sidebar.write(what_sheets)
65 | ws_choice = st.sidebar.radio('Available worksheets',what_sheets)
66 |
67 | # Load data from worksheets
68 | df = load_the_spreadsheet(ws_choice)
69 | # Show the availibility as selection
70 | select_CID = st.sidebar.selectbox('CID',list(df['Compound CID']))
71 |
72 | # Now we can use the pubchempy module to dump information
73 | comp = pcp.Compound.from_cid(select_CID)
74 | comp_dict = comp.to_dict() # Converting to a dictinoary
75 | # What Information look for ?
76 | options = ['molecular_weight' ,'molecular_formula',
77 | 'charge','atoms','elements','bonds']
78 | show_me = st.radio('What you want to see?',options)
79 |
80 | st.info(comp_dict[show_me])
81 | name = comp_dict['iupac_name']
82 | st.markdown(name)
83 | plot = st.checkbox('Canonical Smiles Plot')
84 |
85 | if plot:
86 | sm = comp_dict['canonical_smiles']
87 | mol = read_smiles(comp_dict['canonical_smiles'])
88 | elements = nx.get_node_attributes(mol, name = "element")
89 | nx.draw(mol, with_labels=True, labels = elements, pos=nx.spring_layout(mol))
90 | fig , ax = plt.subplots()
91 | nx.draw(mol, with_labels=True, labels = elements, pos = nx.spring_layout(mol))
92 | st.pyplot(fig)
93 |
94 | add = st.sidebar.checkbox('Add CID')
95 | if add :
96 | cid_entry = st.sidebar.text_input('New CID')
97 | confirm_input = st.sidebar.button('Confirm')
98 |
99 | if confirm_input:
100 | now = datetime.now()
101 | opt = {'Compound CID': [cid_entry],
102 | 'Time_stamp' : [now]}
103 | opt_df = DataFrame(opt)
104 | df = load_the_spreadsheet('Pending CID')
105 | new_df = df.append(opt_df,ignore_index=True)
106 | update_the_spreadsheet('Pending CID',new_df)
107 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit_Notion_Integration/app.py:
--------------------------------------------------------------------------------
1 | # Modules
2 | import streamlit as st
3 | import requests
4 |
5 | # Notion Token
6 | token = ''
7 | databaseID = ''
8 |
9 | # Headers
10 | headers = {
11 | "Authorization": "Bearer " + token,
12 | "Content-Type": "application/json",
13 | "Notion-Version": "2021-05-13"
14 | }
15 |
16 | # Functions
17 | # Database Query
18 | def queryDatabase(databaseId, headers):
19 | readUrl = f"https://api.notion.com/v1/databases/{databaseId}/query"
20 | res = requests.request("POST", readUrl, headers=headers)
21 | data = res.json()
22 | return res , data
23 |
24 | # Database retrieve
25 | def retrieveDatabase(databaseId, headers):
26 | readUrl = f"https://api.notion.com/v1/databases/{databaseId}"
27 | res = requests.request("GET", readUrl, headers=headers)
28 | data = res.json()
29 | return res , data
30 |
31 | # Calling the function
32 | res , data = queryDatabase(databaseID,headers)
33 |
34 | # Dump onto Streamlit
35 | st.write(res.status_code)
36 | st.json(data)
37 |
--------------------------------------------------------------------------------
/Streamlit-Python/Streamlit_WeatherApp/weatherapp.py:
--------------------------------------------------------------------------------
1 |
2 | # Modules
3 | import streamlit as st
4 | import requests
5 | from datetime import datetime , timedelta
6 | import pandas as pd
7 | import matplotlib.pyplot as plt
8 | import time
9 |
10 | # INSERT YOUR API KEY WHICH YOU PASTED IN YOUR secrets.toml file
11 | api_key = st.secrets["api_key"]
12 |
13 | url = 'http://api.openweathermap.org/data/2.5/weather?q={}&appid={}'
14 | url_1 = 'https://api.openweathermap.org/data/2.5/onecall/timemachine?lat={}&lon={}&dt={}&appid={}'
15 |
16 | # Function for LATEST WEATHER DATA
17 | def getweather(city):
18 | result = requests.get(url.format(city, api_key))
19 | if result:
20 | json = result.json()
21 | #st.write(json)
22 | country = json['sys']['country']
23 | temp = json['main']['temp'] - 273.15
24 | temp_feels = json['main']['feels_like'] - 273.15
25 | humid = json['main']['humidity'] - 273.15
26 | icon = json['weather'][0]['icon']
27 | lon = json['coord']['lon']
28 | lat = json['coord']['lat']
29 | des = json['weather'][0]['description']
30 | res = [country, round(temp,1),round(temp_feels,1),
31 | humid,lon,lat,icon,des]
32 | return res , json
33 | else:
34 | print("error in search !")
35 |
36 | # Function for HISTORICAL DATA
37 | def get_hist_data(lat,lon,start):
38 | res = requests.get(url_1.format(lat,lon,start,api_key))
39 | data = res.json()
40 | temp = []
41 | for hour in data["hourly"]:
42 | t = hour["temp"]
43 | temp.append(t)
44 | return data , temp
45 |
46 | # Let's write the Application
47 |
48 | st.header('Streamlit Weather Report')
49 | st.markdown('https://openweathermap.org/api')
50 |
51 | im1,im2 = st.columns(2)
52 | with im2:
53 | image0 = 'random4.jpg'
54 | st.image(image0,use_column_width=True,caption = 'Somewhere in The Netherlands.')
55 | with im1:
56 | image1 = 'OPENWEATHER.png'
57 | st.image(image1, caption='We will use Open Weather Map API as our Data Resource.',use_column_width=True)
58 |
59 | col1, col2 = st.columns(2)
60 |
61 | with col1:
62 | city_name = st.text_input("Enter a city name")
63 | #show_hist = st.checkbox('Show me history')
64 | with col2:
65 | if city_name:
66 | res , json = getweather(city_name)
67 | #st.write(res)
68 | st.success('Current: ' + str(round(res[1],2)))
69 | st.info('Feels Like: ' + str(round(res[2],2)))
70 | #st.info('Humidity: ' + str(round(res[3],2)))
71 | st.subheader('Status: ' + res[7])
72 | web_str = "![Alt Text]"+"(http://openweathermap.org/img/wn/"+str(res[6])+"@2x.png)"
73 | st.markdown(web_str)
74 |
75 | if city_name:
76 | show_hist = st.expander(label = 'Last 5 Days History')
77 | with show_hist:
78 | start_date_string = st.date_input('Current Date')
79 | #start_date_string = str('2021-06-26')
80 | date_df = []
81 | max_temp_df = []
82 | for i in range(5):
83 | date_Str = start_date_string - timedelta(i)
84 | start_date = datetime.strptime(str(date_Str),"%Y-%m-%d")
85 | timestamp_1 = datetime.timestamp(start_date)
86 | #res , json = getweather(city_name)
87 | his , temp = get_hist_data(res[5],res[4],int(timestamp_1))
88 | date_df.append(date_Str)
89 | max_temp_df.append(max(temp) - 273.5)
90 |
91 | df = pd.DataFrame()
92 | df['Date'] = date_df
93 | df['Max temp'] = max_temp_df
94 | st.table(df)
95 |
96 | st.map(pd.DataFrame({'lat' : [res[5]] , 'lon' : [res[4]]},columns = ['lat','lon']))
97 |
--------------------------------------------------------------------------------