βββ .github
βββ FUNDING.yml
βββ .gitignore
βββ .pre-commit-config.yaml
βββ Home.py
βββ LICENSE
βββ Procfile
βββ README.md
βββ data
βββ cog_files.txt
βββ html
β βββ sfo_buildings.html
βββ realtor_data_dict.csv
βββ scotland_xyz.tsv
βββ us_counties.geojson
βββ us_metro_areas.geojson
βββ us_nation.geojson
βββ us_states.geojson
βββ environment-bk.yml
βββ index.html
βββ packages.txt
βββ pages
βββ 10_π_Earth_Engine_Datasets.py
βββ 11_π§±_Ordnance_Survey.py
βββ 12_π²_Land_Cover_Mapping.py
βββ 13_ποΈ_Global_Building_Footprints.py
βββ 1_π·_Timelapse.py
βββ 2_π _U.S._Housing.py
βββ 3_πͺ_Split_Map.py
βββ 4_π₯_Heatmap.py
βββ 5_π_Marker_Cluster.py
βββ 6_πΊοΈ_Basemaps.py
βββ 7_π¦_Web_Map_Service.py
βββ 8_ποΈ_Raster_Data_Visualization.py
βββ 9_π²_Vector_Data_Visualization.py
βββ postBuild
βββ requirements.txt
βββ setup.sh
βββ streamlit_app.py
βββ streamlit_call.py
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: giswqs
2 | custom:
3 | - buymeacoffee.com/giswqs
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 | # *.html
6 | private/
7 | .vscode/
8 |
9 | # C extensions
10 | *.so
11 |
12 | # Distribution / packaging
13 | .Python
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | wheels/
26 | pip-wheel-metadata/
27 | share/python-wheels/
28 | *.egg-info/
29 | .installed.cfg
30 | *.egg
31 | MANIFEST
32 |
33 | # PyInstaller
34 | # Usually these files are written by a python script from a template
35 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
36 | *.manifest
37 | *.spec
38 |
39 | # Installer logs
40 | pip-log.txt
41 | pip-delete-this-directory.txt
42 |
43 | # Unit test / coverage reports
44 | htmlcov/
45 | .tox/
46 | .nox/
47 | .coverage
48 | .coverage.*
49 | .cache
50 | nosetests.xml
51 | coverage.xml
52 | *.cover
53 | *.py,cover
54 | .hypothesis/
55 | .pytest_cache/
56 |
57 | # Translations
58 | *.mo
59 | *.pot
60 |
61 | # Django stuff:
62 | *.log
63 | local_settings.py
64 | db.sqlite3
65 | db.sqlite3-journal
66 |
67 | # Flask stuff:
68 | instance/
69 | .webassets-cache
70 |
71 | # Scrapy stuff:
72 | .scrapy
73 |
74 | # Sphinx documentation
75 | docs/_build/
76 |
77 | # PyBuilder
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v5.0.0
4 | hooks:
5 | - id: check-toml
6 | - id: check-yaml
7 | - id: end-of-file-fixer
8 | types: [python]
9 | - id: trailing-whitespace
10 | - id: requirements-txt-fixer
11 | - id: check-added-large-files
12 | args: ["--maxkb=500"]
13 |
14 | - repo: https://github.com/psf/black
15 | rev: 25.1.0
16 | hooks:
17 | - id: black-jupyter
18 | language_version: python3.11
19 |
20 | # - repo: https://github.com/codespell-project/codespell
21 | # rev: v2.3.0
22 | # hooks:
23 | # - id: codespell
24 | # args: [--toml, pyproject-codespell.precommit-toml]
25 |
26 | - repo: https://github.com/kynan/nbstripout
27 | rev: 0.8.1
28 | hooks:
29 | - id: nbstripout
30 |
--------------------------------------------------------------------------------
/Home.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import leafmap.foliumap as leafmap
3 |
4 | st.set_page_config(layout="wide")
5 |
6 | st.sidebar.title("About")
7 | st.sidebar.info(
8 | """
9 | - Web App URL: <https://streamlit.gishub.org>
10 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
11 | """
12 | )
13 |
14 | st.sidebar.title("Contact")
15 | st.sidebar.info(
16 | """
17 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
18 | """
19 | )
20 |
21 | st.sidebar.title("Support")
22 | st.sidebar.info(
23 | """
24 | If you want to reward my work, I'd love a cup of coffee from you. Thanks!
25 | [buymeacoffee.com/giswqs](http://buymeacoffee.com/giswqs)
26 | """
27 | )
28 |
29 |
30 | st.title("Streamlit for Geospatial Applications")
31 |
32 | st.markdown(
33 | """
34 | This multi-page web app demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and open-source mapping libraries,
35 | such as [leafmap](https://leafmap.org), [geemap](https://geemap.org), [pydeck](https://deckgl.readthedocs.io), and [kepler.gl](https://docs.kepler.gl/docs/keplergl-jupyter).
36 | This is an open-source project and you are very welcome to contribute your comments, questions, resources, and apps as [issues](https://github.com/giswqs/streamlit-geospatial/issues) or
37 | [pull requests](https://github.com/giswqs/streamlit-geospatial/pulls) to the [GitHub repository](https://github.com/giswqs/streamlit-geospatial).
38 |
39 | """
40 | )
41 |
42 | st.info("Click on the left sidebar menu to navigate to the different apps.")
43 |
44 | st.subheader("Timelapse of Satellite Imagery")
45 | st.markdown(
46 | """
47 | The following timelapse animations were created using the Timelapse web app. Click `Timelapse` on the left sidebar menu to create your own timelapse for any location around the globe.
48 | """
49 | )
50 |
51 | row1_col1, row1_col2 = st.columns(2)
52 | with row1_col1:
53 | st.image("https://github.com/giswqs/data/raw/main/timelapse/spain.gif")
54 | st.image("https://github.com/giswqs/data/raw/main/timelapse/las_vegas.gif")
55 |
56 | with row1_col2:
57 | st.image("https://github.com/giswqs/data/raw/main/timelapse/goes.gif")
58 | st.image("https://github.com/giswqs/data/raw/main/timelapse/fire.gif")
59 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Qiusheng Wu
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Procfile:
--------------------------------------------------------------------------------
1 | web: sh setup.sh && streamlit run Home.py
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # streamlit-geospatial
2 |
3 | A streamlit multipage app for geospatial applications. It can be deployed to [Streamlit Cloud](https://streamlit.io/cloud), [Heroku](https://heroku.com/), or [MyBinder](https://mybinder.org/).
4 |
5 | [](https://mybinder.org/v2/gh/giswqs/streamlit-geospatial/master?urlpath=proxy/8501/)
6 |
7 | - Web app: <https://streamlit.gishub.org>
8 | - Source code: <https://github.com/giswqs/streamlit-geospatial>
9 |
10 | ## Instructions
11 |
12 | 1. For the GitHub repository to your GitHub account.
13 | 2. Customize the sidebar by changing the sidebar text and logo in each Python file.
14 | 3. Find your favorite emoji from https://emojipedia.org.
15 | 4. Add a new app to the `pages/` directory with an emoji in the file name, e.g., 1_π_Chart.py.
16 |
17 | ## Demo
18 |
19 | 
20 |
21 | ## Real Estate Data and Market Trends
22 |
23 | 
24 |
--------------------------------------------------------------------------------
/data/cog_files.txt:
--------------------------------------------------------------------------------
1 | https://www.maxar.com/open-data/california-colorado-fires
2 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-02-16/pine-gulch-fire20/1030010076004E00.tif
3 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-08-18/pine-gulch-fire20/1040010041D3B300.tif
4 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-11-13/grizzly-creek-fire20/1040010045785200.tif
5 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-11-13/grizzly-creek-fire20/10400100443AEC00.tif
6 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-02-06/czu-lightning-complex-fire/104001004941E100.tif
7 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-02-18/cameron-peak-fire20/103001008DA5B500.tif
8 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-02-22/czu-lightning-complex-fire/103001008DB2E200.tif
9 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-04-01/grizzly-creek-fire20/104001004881EF00.tif
10 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-04-17/czu-lightning-complex-fire/103001008F905300.tif
11 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-04-17/czu-lightning-complex-fire/1030010092B22200.tif
12 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-06-27/czu-lightning-complex-fire/1030010094A52300.tif
13 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-09-08/czu-lightning-complex-fire/103001009C9FBB00.tif
14 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-09-24/lnu-lightning-complex-fire/103001009A079B00.tif
15 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-10-05/czu-lightning-complex-fire/103001009C10F800.tif
16 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-10-05/czu-lightning-complex-fire/103001009A266800.tif
17 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-11-04/czu-lightning-complex-fire/1050010019917900.tif
18 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-11-04/czu-lightning-complex-fire/1050010019917800.tif
19 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-11-18/czu-lightning-complex-fire/1050010019C2F600.tif
20 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-11-28/cameron-peak-fire20/103001009D72E000.tif
21 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-12-10/czu-lightning-complex-fire/105001001A3A8700.tif
22 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-12-28/lnu-lightning-complex-fire/10300100A1972700.tif
23 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-12-28/lnu-lightning-complex-fire/103001009F5D6B00.tif
24 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-01-15/cameron-peak-fire20/1040010057992100.tif
25 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-04-15/lnu-lightning-complex-fire/10300100A4B23600.tif
26 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-04-23/czu-lightning-complex-fire/10300100A589D100.tif
27 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-05-09/lnu-lightning-complex-fire/10300100A332EE00.tif
28 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-05-23/river-carmel-fires/10300100A77E9400.tif
29 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-05-23/river-carmel-fires/10300100A500A500.tif
30 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-05-24/river-carmel-fires/105001001D64E200.tif
31 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-06-27/lnu-lightning-complex-fire/10300100A8663800.tif
32 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-06-30/river-carmel-fires/10300100A9D60C00.tif
33 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-06-30/czu-lightning-complex-fire/10300100A8C66400.tif
34 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-06-30/czu-lightning-complex-fire/10300100A8892900.tif
35 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-11/czu-lightning-complex-fire/10300100AB381200.tif
36 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-11/czu-lightning-complex-fire/10300100AA180600.tif
37 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-13/pine-gulch-fire20/10300100AA57D700.tif
38 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-20/lnu-lightning-complex-fire/104001005C529000.tif
39 | https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-28/pine-gulch-fire20/104001005DB06E00.tif
40 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-14/pine-gulch-fire20/10300100AAC8DD00.tif
41 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-16/pine-gulch-fire20/104001005D4A6100.tif
42 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-17/grizzly-creek-fire20/10300100ACCA3700.tif
43 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-17/cameron-peak-fire20/10300100AB4ED400.tif
44 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/swir-cog/104A0100606FFE00.tif
45 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/pine-gulch-fire20/10300100ACD06200.tif
46 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/pine-gulch-fire20/10300100AAD4A000.tif
47 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/pine-gulch-fire20/10300100AA293800.tif
48 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/lnu-lightning-complex-fire/10400100606FFE00.tif
49 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/10300100ACBA2B00.tif
50 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/10300100AA49F600.tif
51 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/lnu-lightning-complex-fire/104001005C1AC900.tif
52 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/104001005F9F5300.tif
53 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/104001005F453300.tif
54 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/10300100ADC14400.tif
55 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/czu-lightning-complex-fire/104001005F43D400.tif
56 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-23/grizzly-creek-fire20/104001005FA09C00.tif
57 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-23/grizzly-creek-fire20/104001005DC71000.tif
58 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-26/river-carmel-fires/105001001F58F000.tif
59 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-26/lnu-lightning-complex-fire/10300100AC163A00.tif
60 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-29/river-carmel-fires/10300100AAD27500.tif
61 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-29/river-carmel-fires/10300100A9C75A00.tif
62 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-09-03/cameron-peak-fire20/1040010060188800.tif
63 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-09-03/cameron-peak-fire20/104001005F7E6500.tif
64 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-09-03/cameron-peak-fire20/10300100AE685A00.tif
65 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-09-04/cameron-peak-fire20/1040010060761C00.tif
66 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-05/cameron-peak-fire20/104001006113B700.tif
67 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-05/cameron-peak-fire20/10400100610CD400.tif
68 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/1040010062B14C00.tif
69 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/10400100626BFA00.tif
70 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/10400100622A6600.tif
71 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/10400100606B6300.tif
72 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/104001005F908800.tif
73 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-15/cameron-peak-fire20/10500100205EDA00.tif
74 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-15/cameron-peak-fire20/10500100205ED900.tif
75 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-22/east-troublesome-fire20/10300100B0004A00.tif
76 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-22/east-troublesome-fire20/10300100AD0D1200.tif
77 | https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-22/east-troublesome-fire20/10300100AD0CA600.tif
78 |
--------------------------------------------------------------------------------
/data/html/sfo_buildings.html:
--------------------------------------------------------------------------------
1 | <!DOCTYPE html>
2 | <html lang="en">
3 | <head>
4 | <meta charset="utf-8">
5 | <!-- Include the CesiumJS JavaScript and CSS files -->
6 | <script src="https://cesium.com/downloads/cesiumjs/releases/1.88/Build/Cesium/Cesium.js"></script>
7 | <link href="https://cesium.com/downloads/cesiumjs/releases/1.88/Build/Cesium/Widgets/widgets.css" rel="stylesheet">
8 | </head>
9 | <body>
10 | <div id="cesiumContainer"></div>
11 | <script>
12 | // Your access token can be found at: https://cesium.com/ion/tokens.
13 | // Replace `your_access_token` with your Cesium ion access token.
14 |
15 | Cesium.Ion.defaultAccessToken = 'your_access_token';
16 |
17 | // Initialize the Cesium Viewer in the HTML element with the `cesiumContainer` ID.
18 | const viewer = new Cesium.Viewer('cesiumContainer', {
19 | terrainProvider: Cesium.createWorldTerrain()
20 | });
21 | // Add Cesium OSM Buildings, a global 3D buildings layer.
22 | const buildingTileset = viewer.scene.primitives.add(Cesium.createOsmBuildings());
23 | // Fly the camera to San Francisco at the given longitude, latitude, and height.
24 | viewer.camera.flyTo({
25 | destination : Cesium.Cartesian3.fromDegrees(-122.4175, 37.655, 400),
26 | orientation : {
27 | heading : Cesium.Math.toRadians(0.0),
28 | pitch : Cesium.Math.toRadians(-15.0),
29 | }
30 | });
31 | </script>
32 | </div>
33 | </body>
34 | </html>
35 |
--------------------------------------------------------------------------------
/data/realtor_data_dict.csv:
--------------------------------------------------------------------------------
1 | Name,Label,Description
2 | median_listing_price,Median Listing Price,The median listing price within the specified geography during the specified month.
3 | median_listing_price_mm,Median Listing Price M/M,The percentage change in the median listing price from the previous month.
4 | median_listing_price_yy,Median Listing Price Y/Y,The percentage change in the median listing price from the same month in the previous year.
5 | active_listing_count,Active Listing Count,"The count of active listings within the specified geography during the specified month. The active listing count tracks the number of for sale properties on the market, excluding pending listings where a pending status is available. This is a snapshot measure of how many active listings can be expected on any given day of the specified month."
6 | active_listing_count_mm,Active Listing Count M/M,The percentage change in the active listing count from the previous month.
7 | active_listing_count_yy,Active Listing Count Y/Y,The percentage change in the active listing count from the same month in the previous year.
8 | median_days_on_market,Days on Market,The median number of days property listings spend on the market within the specified geography during the specified month. Time spent on the market is defined as the time between the initial listing of a property and either its closing date or the date it is taken off the market.
9 | median_days_on_market_mm,Days on Market M/M,The percentage change in the median days on market from the previous month.
10 | median_days_on_market_yy,Days on Market Y/Y,The percentage change in the median days on market from the same month in the previous year.
11 | new_listing_count,New Listing Count,The count of new listings added to the market within the specified geography. The new listing count represents a typical weekβs worth of new listings in a given month. The new listing count can be multiplied by the number of weeks in a month to produce a monthly new listing count.
12 | new_listing_count_mm,New Listing Count M/M,The percentage change in the new listing count from the previous month.
13 | new_listing_count_yy,New Listing Count Y/Y,The percentage change in the new listing count from the same month in the previous year.
14 | price_increased_count,Price Increase Count,The count of listings which have had their price increased within the specified geography. The price increase count represents a typical weekβs worth of listings which have had their price increased in a given month. The price increase count can be multiplied by the number of weeks in a month to produce a monthly price increase count.
15 | price_increased_count_mm,Price Increase Count M/M,The percentage change in the price increase count from the previous month.
16 | price_increased_count_yy,Price Increase Count Y/Y,The percentage change in the price increase count from the same month in the previous year.
17 | price_reduced_count,Price Decrease Count,The count of listings which have had their price reduced within the specified geography. The price decrease count represents a typical weekβs worth of listings which have had their price reduced in a given month. The price decrease count can be multiplied by the number of weeks in a month to produce a monthly price decrease count.
18 | price_reduced_count_mm,Price Decrease Count M/M,The percentage change in the price decrease count from the previous month.
19 | price_reduced_count_yy,Price Decrease Count Y/Y,The percentage change in the price decrease count from the same month in the previous year.
20 | pending_listing_count,Pending Listing Count,"The count of pending listings within the specified geography during the specified month, if a pending definition is available for that geography. This is a snapshot measure of how many pending listings can be expected on any given day of the specified month."
21 | pending_listing_count_mm,Pending Listing Count M/M,The percentage change in the pending listing count from the previous month.
22 | pending_listing_count_yy,Pending Listing Count Y/Y,The percentage change in the pending listing count from the same month in the previous year.
23 | median_listing_price_per_square_foot,Median List Price Per Sqft,The median listing price per square foot within the specified geography during the specified month.
24 | median_listing_price_per_square_foot_mm,Median List Price Per Sqft M/M,The percentage change in the median listing price per square foot from the previous month.
25 | median_listing_price_per_square_foot_yy,Median List Price Per Sqft Y/Y,The percentage change in the median listing price per square foot from the same month in the previous year.
26 | median_square_feet,Median Listing Sqft,The median listing square feet within the specified geography during the specified month.
27 | median_square_feet_mm,Median Listing Sqft M/M,The percentage change in the median listing square feet from the previous month.
28 | median_square_feet_yy,Median Listing Sqft Y/Y,The percentage change in the median listing square feet from the same month in the previous year.
29 | average_listing_price,Avg Listing Price,The average listing price within the specified geography during the specified month.
30 | average_listing_price_mm,Avg Listing Price M/M,The percentage change in the average listing price from the previous month.
31 | average_listing_price_yy,Avg Listing Price Y/Y,The percentage change in the average listing price from the same month in the previous year.
32 | total_listing_count,Total Listing Count,The total of both active listings and pending listings within the specified geography during the specified month. This is a snapshot measure of how many total listings can be expected on any given day of the specified month.
33 | total_listing_count_mm,Total Listing Count M/M,The percentage change in the total listing count from the previous month.
34 | total_listing_count_yy,Total Listing Count Y/Y,The percentage change in the total listing count from the same month in the previous year.
35 | pending_ratio,Pending Ratio,The ratio of the pending listing count to the active listing count within the specified geography during the specified month.
36 | pending_ratio_mm,Pending Ratio M/M,The change in the pending ratio from the previous month.
37 | pending_ratio_yy,Pending Ratio Y/Y,The change in the pending ratio from the same month in the previous year.
38 |
--------------------------------------------------------------------------------
/data/scotland_xyz.tsv:
--------------------------------------------------------------------------------
1 | Name URL
2 | Ordnance Survey - Air Photos, 1944-1950 - 1:10,560 https://geo.nls.uk/maps/air-photos/{z}/{x}/{y}.png
3 | Ordnance Survey - Six Inch Scotland, 1843-1882 - 1:10,560 https://mapseries-tilesets.s3.amazonaws.com/os/6inchfirst/{z}/{x}/{y}.png
4 | War Office, Great Britain 1:25,000. GSGS 3906, 1940-43 https://mapseries-tilesets.s3.amazonaws.com/gsgs3906/{z}/{x}/{y}.png
5 | Roy - Roy Highlands, 1747-1752 - 1:36000 https://mapseries-tilesets.s3.amazonaws.com/roy/highlands/{z}/{x}/{y}.png
6 | Roy - Roy Lowlands, 1752-1755 - 1:36000 https://mapseries-tilesets.s3.amazonaws.com/roy/lowlands/{z}/{x}/{y}.png
7 | Great Britain - OS 1:10,560, 1949-1970 https://mapseries-tilesets.s3.amazonaws.com/os/britain10knatgrid/{z}/{x}/{y}.png
8 | Great Britain - Bartholomew Half Inch, 1897-1907 https://mapseries-tilesets.s3.amazonaws.com/bartholomew_great_britain/{z}/{x}/{y}.png
9 | OS 25 inch, 1892-1914 - Scotland South https://mapseries-tilesets.s3.amazonaws.com/25_inch/scotland_1/{z}/{x}/{y}.png
10 | OS 25 inch, 1892-1914 - Scotland North https://mapseries-tilesets.s3.amazonaws.com/25_inch/scotland_2/{z}/{x}/{y}.png
11 | OS 25 inch, 1892-1914 - Bedfordshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/bedfordshire/{z}/{x}/{y}.png
12 | OS 25 inch, 1892-1914 - Berkshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/berkshire/{z}/{x}/{y}.png
13 | OS 25 inch, 1892-1914 - Buckinghamshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/buckingham/{z}/{x}/{y}.png
14 | OS 25 inch, 1892-1914 - Cambridgeshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/cambridge/{z}/{x}/{y}.png
15 | OS 25 inch, 1892-1914 - Cheshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/cheshire/{z}/{x}/{y}.png
16 | OS 25 inch, 1892-1914 - Cornwall https://mapseries-tilesets.s3.amazonaws.com/25_inch/cornwall/{z}/{x}/{y}.png
17 | OS 25 inch, 1892-1914 - Cumberland https://mapseries-tilesets.s3.amazonaws.com/25_inch/cumberland/{z}/{x}/{y}.png
18 | OS 25 inch, 1892-1914 - Devon https://mapseries-tilesets.s3.amazonaws.com/25_inch/devon/{z}/{x}/{y}.png
19 | OS 25 inch, 1892-1914 - Dorset https://mapseries-tilesets.s3.amazonaws.com/25_inch/dorset/{z}/{x}/{y}.png
20 | OS 25 inch, 1892-1914 - Durham https://mapseries-tilesets.s3.amazonaws.com/25_inch/durham/{z}/{x}/{y}.png
21 | OS 25 inch, 1892-1914 - Essex https://mapseries-tilesets.s3.amazonaws.com/25_inch/essex/{z}/{x}/{y}.png
22 | OS 25 inch, 1892-1914 - Gloucestershire https://mapseries-tilesets.s3.amazonaws.com/25_inch/gloucestershire/{z}/{x}/{y}.png
23 | OS 25 inch, 1892-1914 - Hampshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/hampshire/{z}/{x}/{y}.png
24 | OS 25 inch, 1892-1914 - Herefordshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/herefordshire/{z}/{x}/{y}.png
25 | OS 25 inch, 1892-1914 - Hertfordshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/hertfordshire/{z}/{x}/{y}.png
26 | OS 25 inch, 1892-1914 - Huntingdon https://mapseries-tilesets.s3.amazonaws.com/25_inch/huntingdon/{z}/{x}/{y}.png
27 | OS 25 inch, 1892-1914 - Kent https://mapseries-tilesets.s3.amazonaws.com/25_inch/kent/{z}/{x}/{y}.png
28 | OS 25 inch, 1892-1914 - Lancashire https://mapseries-tilesets.s3.amazonaws.com/25_inch/lancashire/{z}/{x}/{y}.png
29 | OS 25 inch, 1892-1914 - Leicestershire https://mapseries-tilesets.s3.amazonaws.com/25_inch/leicestershire/{z}/{x}/{y}.png
30 | OS 25 inch, 1892-1914 - Lincolnshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/lincolnshire/{z}/{x}/{y}.png
31 | OS 25 inch, 1892-1914 - London https://mapseries-tilesets.s3.amazonaws.com/25_inch/london/{z}/{x}/{y}.png
32 | OS 25 inch, 1892-1914 - Middlesex https://mapseries-tilesets.s3.amazonaws.com/25_inch/middlesex/{z}/{x}/{y}.png
33 | OS 25 inch, 1892-1914 - Norfolk https://mapseries-tilesets.s3.amazonaws.com/25_inch/norfolk/{z}/{x}/{y}.png
34 | OS 25 inch, 1892-1914 - Northamptonshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/northampton/{z}/{x}/{y}.png
35 | OS 25 inch, 1892-1914 - Northumberland https://mapseries-tilesets.s3.amazonaws.com/25_inch/northumberland/{z}/{x}/{y}.png
36 | OS 25 inch, 1892-1914 - Nottinghamshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/nottinghamshire/{z}/{x}/{y}.png
37 | OS 25 inch, 1892-1914 - Oxford https://mapseries-tilesets.s3.amazonaws.com/25_inch/oxford/{z}/{x}/{y}.png
38 | OS 25 inch, 1892-1914 - Rutland https://mapseries-tilesets.s3.amazonaws.com/25_inch/rutland/{z}/{x}/{y}.png
39 | OS 25 inch, 1892-1914 - Shropshire / Derbyshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/Shrop_Derby/{z}/{x}/{y}.png
40 | OS 25 inch, 1892-1914 - Somerset https://mapseries-tilesets.s3.amazonaws.com/25_inch/somerset/{z}/{x}/{y}.png
41 | OS 25 inch, 1892-1914 - Stafford https://mapseries-tilesets.s3.amazonaws.com/25_inch/stafford/{z}/{x}/{y}.png
42 | OS 25 inch, 1892-1914 - Suffolk https://mapseries-tilesets.s3.amazonaws.com/25_inch/suffolk/{z}/{x}/{y}.png
43 | OS 25 inch, 1892-1914 - Surrey https://mapseries-tilesets.s3.amazonaws.com/25_inch/surrey/{z}/{x}/{y}.png
44 | OS 25 inch, 1892-1914 - Sussex https://mapseries-tilesets.s3.amazonaws.com/25_inch/sussex/{z}/{x}/{y}.png
45 | OS 25 inch, 1892-1914 - Wales https://mapseries-tilesets.s3.amazonaws.com/25_inch/wales/{z}/{x}/{y}.png
46 | OS 25 inch, 1892-1914 - Warwick https://mapseries-tilesets.s3.amazonaws.com/25_inch/warwick/{z}/{x}/{y}.png
47 | OS 25 inch, 1892-1914 - Westmorland https://mapseries-tilesets.s3.amazonaws.com/25_inch/westmorland/{z}/{x}/{y}.png
48 | OS 25 inch, 1892-1914 - Wiltshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/wiltshire2nd/{z}/{x}/{y}.png
49 | OS 25 inch, 1892-1914 - Worcestershire https://mapseries-tilesets.s3.amazonaws.com/25_inch/Worcestershire/{z}/{x}/{y}.png
50 | OS 25 inch, 1892-1914 - Yorkshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/yorkshire/{z}/{x}/{y}.png
51 | OS 25 inch, 1892-1914 'Holes' (fills gaps in series) https://geo.nls.uk/mapdata3/os/25_inch_holes_england/{z}/{x}/{y}.png
52 |
--------------------------------------------------------------------------------
/environment-bk.yml:
--------------------------------------------------------------------------------
1 | name: geo
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - gdal=3.4.3
6 | - pip
7 | - pip:
8 | - geopandas
9 | - keplergl
10 | - streamlit
11 | - localtileserver
12 | - palettable
13 | - streamlit-folium
14 | - streamlit-keplergl
15 | - streamlit-bokeh-events
16 | - git+https://github.com/giswqs/leafmap
17 | - git+https://github.com/giswqs/geemap
18 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 | <!DOCTYPE html>
2 | <html>
3 | <head>
4 | <title>Streamlit for Geospatial</title>
5 | <style type="text/css">
6 | html {
7 | overflow: auto;
8 | }
9 | html,
10 | body,
11 | div,
12 | iframe {
13 | margin: 0px;
14 | padding: 0px;
15 | height: 100%;
16 | border: none;
17 | }
18 | iframe {
19 | display: block;
20 | width: 100%;
21 | border: none;
22 | overflow-y: auto;
23 | overflow-x: hidden;
24 | }
25 | </style>
26 | </head>
27 | <body>
28 | <iframe
29 | src="https://share.streamlit.io/giswqs/streamlit-geospatial/app.py"
30 | frameborder="0"
31 | marginheight="0"
32 | marginwidth="0"
33 | width="100%"
34 | height="100%"
35 | scrolling="auto"
36 | >
37 | </iframe>
38 | </body>
39 | </html>
40 |
--------------------------------------------------------------------------------
/packages.txt:
--------------------------------------------------------------------------------
1 | ffmpeg
2 | gifsicle
3 | build-essential
4 | python3-dev
5 | gdal-bin
6 | libgdal-dev
7 | libproj-dev
8 | libgeos-dev
9 | proj-bin
10 |
--------------------------------------------------------------------------------
/pages/10_π_Earth_Engine_Datasets.py:
--------------------------------------------------------------------------------
1 | import ee
2 | import json
3 | import streamlit as st
4 | import geemap.foliumap as geemap
5 |
6 | st.set_page_config(layout="wide")
7 |
8 | st.sidebar.info(
9 | """
10 | - Web App URL: <https://streamlit.gishub.org>
11 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
12 | """
13 | )
14 |
15 | st.sidebar.title("Contact")
16 | st.sidebar.info(
17 | """
18 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
19 | """
20 | )
21 |
22 |
23 | def nlcd():
24 |
25 | # st.header("National Land Cover Database (NLCD)")
26 |
27 | row1_col1, row1_col2 = st.columns([3, 1])
28 | width = 950
29 | height = 600
30 |
31 | Map = geemap.Map(center=[40, -100], zoom=4)
32 |
33 | # Select the seven NLCD epoches after 2000.
34 | years = ["2001", "2004", "2006", "2008", "2011", "2013", "2016", "2019"]
35 |
36 | # Get an NLCD image by year.
37 | def getNLCD(year):
38 | # Import the NLCD collection.
39 | dataset = ee.ImageCollection("USGS/NLCD_RELEASES/2019_REL/NLCD")
40 |
41 | # Filter the collection by year.
42 | nlcd = dataset.filter(ee.Filter.eq("system:index", year)).first()
43 |
44 | # Select the land cover band.
45 | landcover = nlcd.select("landcover")
46 | return landcover
47 |
48 | with row1_col2:
49 | selected_year = st.multiselect("Select a year", years)
50 | add_legend = st.checkbox("Show legend")
51 |
52 | if selected_year:
53 | for year in selected_year:
54 | Map.addLayer(getNLCD(year), {}, "NLCD " + year)
55 |
56 | if add_legend:
57 | Map.add_legend(
58 | legend_title="NLCD Land Cover Classification", builtin_legend="NLCD"
59 | )
60 | with row1_col1:
61 | Map.to_streamlit(width=width, height=height)
62 |
63 | else:
64 | with row1_col1:
65 | Map.to_streamlit(width=width, height=height)
66 |
67 |
68 | def search_data():
69 |
70 | # st.header("Search Earth Engine Data Catalog")
71 |
72 | Map = geemap.Map()
73 |
74 | if "ee_assets" not in st.session_state:
75 | st.session_state["ee_assets"] = None
76 | if "asset_titles" not in st.session_state:
77 | st.session_state["asset_titles"] = None
78 |
79 | col1, col2 = st.columns([2, 1])
80 |
81 | dataset = None
82 | with col2:
83 | keyword = st.text_input("Enter a keyword to search (e.g., elevation)", "")
84 | if keyword:
85 | ee_assets = geemap.search_ee_data(keyword)
86 | asset_titles = [x["title"] for x in ee_assets]
87 | asset_types = [x["type"] for x in ee_assets]
88 |
89 | translate = {
90 | "image_collection": "ee.ImageCollection('",
91 | "image": "ee.Image('",
92 | "table": "ee.FeatureCollection('",
93 | "table_collection": "ee.FeatureCollection('",
94 | }
95 |
96 | dataset = st.selectbox("Select a dataset", asset_titles)
97 | if len(ee_assets) > 0:
98 | st.session_state["ee_assets"] = ee_assets
99 | st.session_state["asset_titles"] = asset_titles
100 |
101 | if dataset is not None:
102 | with st.expander("Show dataset details", True):
103 | index = asset_titles.index(dataset)
104 |
105 | html = geemap.ee_data_html(st.session_state["ee_assets"][index])
106 | html = html.replace("\n", "")
107 | st.markdown(html, True)
108 |
109 | ee_id = ee_assets[index]["id"]
110 | uid = ee_assets[index]["uid"]
111 | st.markdown(f"""**Earth Engine Snippet:** `{ee_id}`""")
112 | ee_asset = f"{translate[asset_types[index]]}{ee_id}')"
113 |
114 | if ee_asset.startswith("ee.ImageCollection"):
115 | ee_asset = ee.ImageCollection(ee_id)
116 | elif ee_asset.startswith("ee.Image"):
117 | ee_asset = ee.Image(ee_id)
118 | elif ee_asset.startswith("ee.FeatureCollection"):
119 | ee_asset = ee.FeatureCollection(ee_id)
120 |
121 | vis_params = st.text_input(
122 | "Enter visualization parameters as a dictionary", {}
123 | )
124 | layer_name = st.text_input("Enter a layer name", uid)
125 | button = st.button("Add dataset to map")
126 | if button:
127 | vis = {}
128 | try:
129 | if vis_params.strip() == "":
130 | # st.error("Please enter visualization parameters")
131 | vis_params = "{}"
132 | vis = json.loads(vis_params.replace("'", '"'))
133 | if not isinstance(vis, dict):
134 | st.error("Visualization parameters must be a dictionary")
135 | try:
136 | Map.addLayer(ee_asset, vis, layer_name)
137 | except Exception as e:
138 | st.error(f"Error adding layer: {e}")
139 | except Exception as e:
140 | st.error(f"Invalid visualization parameters: {e}")
141 |
142 | with col1:
143 | Map.to_streamlit()
144 | else:
145 | with col1:
146 | Map.to_streamlit()
147 |
148 |
149 | def app():
150 | st.title("Earth Engine Data Catalog")
151 |
152 | apps = ["Search Earth Engine Data Catalog", "National Land Cover Database (NLCD)"]
153 |
154 | selected_app = st.selectbox("Select an app", apps)
155 |
156 | if selected_app == "National Land Cover Database (NLCD)":
157 | nlcd()
158 | elif selected_app == "Search Earth Engine Data Catalog":
159 | search_data()
160 |
161 |
162 | app()
163 |
--------------------------------------------------------------------------------
/pages/11_π§±_Ordnance_Survey.py:
--------------------------------------------------------------------------------
1 | import folium
2 | import pandas as pd
3 | import streamlit as st
4 | import leafmap.foliumap as leafmap
5 | import folium.plugins as plugins
6 |
7 | st.set_page_config(layout="wide")
8 |
9 | st.sidebar.info(
10 | """
11 | - Web App URL: <https://streamlit.gishub.org>
12 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
13 | """
14 | )
15 |
16 | st.sidebar.title("Contact")
17 | st.sidebar.info(
18 | """
19 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
20 | """
21 | )
22 |
23 | st.title("National Library of Scotland XYZ Layers")
24 | df = pd.read_csv("data/scotland_xyz.tsv", sep="\t")
25 | basemaps = leafmap.basemaps
26 | names = df["Name"].values.tolist() + list(basemaps.keys())
27 | links = df["URL"].values.tolist() + list(basemaps.values())
28 |
29 | col1, col2, col3, col4, col5, col6, col7 = st.columns([3, 3, 1, 1, 1, 1.5, 1.5])
30 | with col1:
31 | left_name = st.selectbox(
32 | "Select the left layer",
33 | names,
34 | index=names.index("Great Britain - Bartholomew Half Inch, 1897-1907"),
35 | )
36 |
37 | with col2:
38 | right_name = st.selectbox(
39 | "Select the right layer",
40 | names,
41 | index=names.index("HYBRID"),
42 | )
43 |
44 | with col3:
45 | # lat = st.slider('Latitude', -90.0, 90.0, 55.68, step=0.01)
46 | lat = st.text_input("Latitude", " 55.68")
47 |
48 | with col4:
49 | # lon = st.slider('Longitude', -180.0, 180.0, -2.98, step=0.01)
50 | lon = st.text_input("Longitude", "-2.98")
51 |
52 | with col5:
53 | # zoom = st.slider('Zoom', 1, 24, 6, step=1)
54 | zoom = st.text_input("Zoom", "6")
55 |
56 | with col6:
57 | checkbox = st.checkbox("Add OS 25 inch")
58 |
59 | # with col7:
60 | with st.expander("Acknowledgements"):
61 | markdown = """
62 | The map tile access is by kind arrangement of the National Library of Scotland on the understanding that re-use is for personal purposes. They host most of the map layers except these:
63 | - The Roy Maps are owned by the British Library.
64 | - The Great Britain β OS maps 1:25,000, 1937-61 and One Inch 7th series, 1955-61 are hosted by MapTiler.
65 |
66 | If you wish you use these layers within a website, or for a commercial or public purpose, please view the [National Library of Scotland Historic Maps Subscription API](https://maps.nls.uk/projects/subscription-api/) or contact them at maps@nls.uk.
67 | """
68 | st.markdown(markdown, unsafe_allow_html=True)
69 |
70 | m = leafmap.Map(
71 | center=[float(lat), float(lon)],
72 | zoom=int(zoom),
73 | locate_control=True,
74 | draw_control=False,
75 | measure_control=False,
76 | )
77 | measure = plugins.MeasureControl(position="bottomleft", active_color="orange")
78 | measure.add_to(m)
79 |
80 | if left_name in basemaps:
81 | left_layer = basemaps[left_name]
82 | else:
83 | left_layer = folium.TileLayer(
84 | tiles=links[names.index(left_name)],
85 | name=left_name,
86 | attr="National Library of Scotland",
87 | overlay=True,
88 | )
89 |
90 | if right_name in basemaps:
91 | right_layer = basemaps[right_name]
92 | else:
93 | right_layer = folium.TileLayer(
94 | tiles=links[names.index(right_name)],
95 | name=right_name,
96 | attr="National Library of Scotland",
97 | overlay=True,
98 | )
99 |
100 | if checkbox:
101 | for index, name in enumerate(names):
102 | if "OS 25 inch" in name:
103 | m.add_tile_layer(
104 | links[index], name, attribution="National Library of Scotland"
105 | )
106 |
107 | m.split_map(left_layer, right_layer)
108 | m.to_streamlit(height=600)
109 |
--------------------------------------------------------------------------------
/pages/12_π²_Land_Cover_Mapping.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import ee
3 | import streamlit as st
4 | import geemap.foliumap as geemap
5 |
6 | st.set_page_config(layout="wide")
7 |
8 | st.sidebar.info(
9 | """
10 | - Web App URL: <https://streamlit.gishub.org>
11 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
12 | """
13 | )
14 |
15 | st.sidebar.title("Contact")
16 | st.sidebar.info(
17 | """
18 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
19 | """
20 | )
21 |
22 | st.title("Comparing Global Land Cover Maps")
23 |
24 | col1, col2 = st.columns([4, 1])
25 |
26 | Map = geemap.Map()
27 | Map.add_basemap("ESA WorldCover 2020 S2 FCC")
28 | Map.add_basemap("ESA WorldCover 2020 S2 TCC")
29 | Map.add_basemap("HYBRID")
30 |
31 | esa = ee.ImageCollection("ESA/WorldCover/v100").first()
32 | esa_vis = {"bands": ["Map"]}
33 |
34 |
35 | esri = ee.ImageCollection(
36 | "projects/sat-io/open-datasets/landcover/ESRI_Global-LULC_10m"
37 | ).mosaic()
38 | esri_vis = {
39 | "min": 1,
40 | "max": 10,
41 | "palette": [
42 | "#1A5BAB",
43 | "#358221",
44 | "#A7D282",
45 | "#87D19E",
46 | "#FFDB5C",
47 | "#EECFA8",
48 | "#ED022A",
49 | "#EDE9E4",
50 | "#F2FAFF",
51 | "#C8C8C8",
52 | ],
53 | }
54 |
55 |
56 | markdown = """
57 | - [Dynamic World Land Cover](https://developers.google.com/earth-engine/datasets/catalog/GOOGLE_DYNAMICWORLD_V1?hl=en)
58 | - [ESA Global Land Cover](https://developers.google.com/earth-engine/datasets/catalog/ESA_WorldCover_v100)
59 | - [ESRI Global Land Cover](https://samapriya.github.io/awesome-gee-community-datasets/projects/esrilc2020)
60 |
61 | """
62 |
63 | with col2:
64 |
65 | longitude = st.number_input("Longitude", -180.0, 180.0, -89.3998)
66 | latitude = st.number_input("Latitude", -90.0, 90.0, 43.0886)
67 | zoom = st.number_input("Zoom", 0, 20, 11)
68 |
69 | Map.setCenter(longitude, latitude, zoom)
70 |
71 | start = st.date_input("Start Date for Dynamic World", datetime.date(2020, 1, 1))
72 | end = st.date_input("End Date for Dynamic World", datetime.date(2021, 1, 1))
73 |
74 | start_date = start.strftime("%Y-%m-%d")
75 | end_date = end.strftime("%Y-%m-%d")
76 |
77 | region = ee.Geometry.BBox(-179, -89, 179, 89)
78 | dw = geemap.dynamic_world(region, start_date, end_date, return_type="hillshade")
79 |
80 | layers = {
81 | "Dynamic World": geemap.ee_tile_layer(dw, {}, "Dynamic World Land Cover"),
82 | "ESA Land Cover": geemap.ee_tile_layer(esa, esa_vis, "ESA Land Cover"),
83 | "ESRI Land Cover": geemap.ee_tile_layer(esri, esri_vis, "ESRI Land Cover"),
84 | }
85 |
86 | options = list(layers.keys())
87 | left = st.selectbox("Select a left layer", options, index=1)
88 | right = st.selectbox("Select a right layer", options, index=0)
89 |
90 | left_layer = layers[left]
91 | right_layer = layers[right]
92 |
93 | Map.split_map(left_layer, right_layer)
94 |
95 | legend = st.selectbox("Select a legend", options, index=options.index(right))
96 | if legend == "Dynamic World":
97 | Map.add_legend(
98 | title="Dynamic World Land Cover",
99 | builtin_legend="Dynamic_World",
100 | )
101 | elif legend == "ESA Land Cover":
102 | Map.add_legend(title="ESA Land Cover", builtin_legend="ESA_WorldCover")
103 | elif legend == "ESRI Land Cover":
104 | Map.add_legend(title="ESRI Land Cover", builtin_legend="ESRI_LandCover")
105 |
106 | with st.expander("Data sources"):
107 | st.markdown(markdown)
108 |
109 |
110 | with col1:
111 | Map.to_streamlit(height=750)
112 |
--------------------------------------------------------------------------------
/pages/13_ποΈ_Global_Building_Footprints.py:
--------------------------------------------------------------------------------
1 | import ee
2 | import geemap.foliumap as geemap
3 | import geopandas as gpd
4 | import streamlit as st
5 |
6 | st.set_page_config(layout="wide")
7 |
8 |
9 | def ee_authenticate(token_name="EARTHENGINE_TOKEN"):
10 | geemap.ee_initialize(token_name=token_name)
11 |
12 |
13 | st.sidebar.info(
14 | """
15 | - Web App URL: <https://streamlit.gishub.org>
16 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
17 | """
18 | )
19 |
20 | st.sidebar.title("Contact")
21 | st.sidebar.info(
22 | """
23 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
24 | """
25 | )
26 |
27 | st.title("Global Building Footprints")
28 |
29 | col1, col2 = st.columns([8, 2])
30 |
31 |
32 | @st.cache_data
33 | def read_data(url):
34 | return gpd.read_file(url)
35 |
36 |
37 | countries = (
38 | "https://github.com/giswqs/geemap/raw/master/examples/data/countries.geojson"
39 | )
40 | states = "https://github.com/giswqs/geemap/raw/master/examples/data/us_states.json"
41 |
42 | countries_gdf = read_data(countries)
43 | states_gdf = read_data(states)
44 |
45 | country_names = countries_gdf["NAME"].values.tolist()
46 | country_names.remove("United States of America")
47 | country_names.append("USA")
48 | country_names.sort()
49 | country_names = [name.replace(".", "").replace(" ", "_") for name in country_names]
50 |
51 | state_names = states_gdf["name"].values.tolist()
52 |
53 | basemaps = list(geemap.basemaps)
54 |
55 | Map = geemap.Map()
56 |
57 | with col2:
58 |
59 | basemap = st.selectbox("Select a basemap", basemaps, index=basemaps.index("HYBRID"))
60 | Map.add_basemap(basemap)
61 |
62 | country = st.selectbox(
63 | "Select a country", country_names, index=country_names.index("USA")
64 | )
65 |
66 | if country == "USA":
67 | state = st.selectbox(
68 | "Select a state", state_names, index=state_names.index("Florida")
69 | )
70 | layer_name = state
71 |
72 | try:
73 | fc = ee.FeatureCollection(
74 | f"projects/sat-io/open-datasets/MSBuildings/US/{state}"
75 | )
76 | except:
77 | st.error("No data available for the selected state.")
78 |
79 | else:
80 | try:
81 | fc = ee.FeatureCollection(
82 | f"projects/sat-io/open-datasets/MSBuildings/{country}"
83 | )
84 | except:
85 | st.error("No data available for the selected country.")
86 |
87 | layer_name = country
88 |
89 | color = st.color_picker("Select a color", "#FF5500")
90 |
91 | style = {"fillColor": "00000000", "color": color}
92 |
93 | split = st.checkbox("Split-panel map")
94 |
95 | if split:
96 | left = geemap.ee_tile_layer(fc.style(**style), {}, "Left")
97 | right = left
98 | Map.split_map(left, right)
99 | else:
100 | Map.addLayer(fc.style(**style), {}, layer_name)
101 |
102 | Map.centerObject(fc.first(), zoom=16)
103 |
104 | with st.expander("Data Sources"):
105 | st.info(
106 | """
107 | [Microsoft Building Footprints](https://gee-community-catalog.org/projects/msbuildings/)
108 | """
109 | )
110 |
111 |
112 | with col1:
113 |
114 | Map.to_streamlit(height=1000)
115 |
--------------------------------------------------------------------------------
/pages/1_π·_Timelapse.py:
--------------------------------------------------------------------------------
1 | import ee
2 | import json
3 | import os
4 | import warnings
5 | import datetime
6 | import fiona
7 | import geopandas as gpd
8 | import folium
9 | import streamlit as st
10 | import geemap.colormaps as cm
11 | import geemap.foliumap as geemap
12 | from datetime import date
13 | from shapely.geometry import Polygon
14 |
15 | st.set_page_config(layout="wide")
16 | warnings.filterwarnings("ignore")
17 |
18 |
19 | @st.cache_data
20 | def ee_authenticate(token_name="EARTHENGINE_TOKEN"):
21 | geemap.ee_initialize(token_name=token_name)
22 |
23 |
24 | st.sidebar.info(
25 | """
26 | - Web App URL: <https://streamlit.gishub.org>
27 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
28 | """
29 | )
30 |
31 | st.sidebar.title("Contact")
32 | st.sidebar.info(
33 | """
34 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
35 | """
36 | )
37 |
38 | goes_rois = {
39 | "Creek Fire, CA (2020-09-05)": {
40 | "region": Polygon(
41 | [
42 | [-121.003418, 36.848857],
43 | [-121.003418, 39.049052],
44 | [-117.905273, 39.049052],
45 | [-117.905273, 36.848857],
46 | [-121.003418, 36.848857],
47 | ]
48 | ),
49 | "start_time": "2020-09-05T15:00:00",
50 | "end_time": "2020-09-06T02:00:00",
51 | },
52 | "Bomb Cyclone (2021-10-24)": {
53 | "region": Polygon(
54 | [
55 | [-159.5954, 60.4088],
56 | [-159.5954, 24.5178],
57 | [-114.2438, 24.5178],
58 | [-114.2438, 60.4088],
59 | ]
60 | ),
61 | "start_time": "2021-10-24T14:00:00",
62 | "end_time": "2021-10-25T01:00:00",
63 | },
64 | "Hunga Tonga Volcanic Eruption (2022-01-15)": {
65 | "region": Polygon(
66 | [
67 | [-192.480469, -32.546813],
68 | [-192.480469, -8.754795],
69 | [-157.587891, -8.754795],
70 | [-157.587891, -32.546813],
71 | [-192.480469, -32.546813],
72 | ]
73 | ),
74 | "start_time": "2022-01-15T03:00:00",
75 | "end_time": "2022-01-15T07:00:00",
76 | },
77 | "Hunga Tonga Volcanic Eruption Closer Look (2022-01-15)": {
78 | "region": Polygon(
79 | [
80 | [-178.901367, -22.958393],
81 | [-178.901367, -17.85329],
82 | [-171.452637, -17.85329],
83 | [-171.452637, -22.958393],
84 | [-178.901367, -22.958393],
85 | ]
86 | ),
87 | "start_time": "2022-01-15T03:00:00",
88 | "end_time": "2022-01-15T07:00:00",
89 | },
90 | }
91 |
92 |
93 | landsat_rois = {
94 | "Aral Sea": Polygon(
95 | [
96 | [57.667236, 43.834527],
97 | [57.667236, 45.996962],
98 | [61.12793, 45.996962],
99 | [61.12793, 43.834527],
100 | [57.667236, 43.834527],
101 | ]
102 | ),
103 | "Dubai": Polygon(
104 | [
105 | [54.541626, 24.763044],
106 | [54.541626, 25.427152],
107 | [55.632019, 25.427152],
108 | [55.632019, 24.763044],
109 | [54.541626, 24.763044],
110 | ]
111 | ),
112 | "Hong Kong International Airport": Polygon(
113 | [
114 | [113.825226, 22.198849],
115 | [113.825226, 22.349758],
116 | [114.085121, 22.349758],
117 | [114.085121, 22.198849],
118 | [113.825226, 22.198849],
119 | ]
120 | ),
121 | "Las Vegas, NV": Polygon(
122 | [
123 | [-115.554199, 35.804449],
124 | [-115.554199, 36.558188],
125 | [-113.903503, 36.558188],
126 | [-113.903503, 35.804449],
127 | [-115.554199, 35.804449],
128 | ]
129 | ),
130 | "Pucallpa, Peru": Polygon(
131 | [
132 | [-74.672699, -8.600032],
133 | [-74.672699, -8.254983],
134 | [-74.279938, -8.254983],
135 | [-74.279938, -8.600032],
136 | ]
137 | ),
138 | "Sierra Gorda, Chile": Polygon(
139 | [
140 | [-69.315491, -22.837104],
141 | [-69.315491, -22.751488],
142 | [-69.190006, -22.751488],
143 | [-69.190006, -22.837104],
144 | [-69.315491, -22.837104],
145 | ]
146 | ),
147 | }
148 |
149 | modis_rois = {
150 | "World": Polygon(
151 | [
152 | [-171.210938, -57.136239],
153 | [-171.210938, 79.997168],
154 | [177.539063, 79.997168],
155 | [177.539063, -57.136239],
156 | [-171.210938, -57.136239],
157 | ]
158 | ),
159 | "Africa": Polygon(
160 | [
161 | [-18.6983, 38.1446],
162 | [-18.6983, -36.1630],
163 | [52.2293, -36.1630],
164 | [52.2293, 38.1446],
165 | ]
166 | ),
167 | "USA": Polygon(
168 | [
169 | [-127.177734, 23.725012],
170 | [-127.177734, 50.792047],
171 | [-66.269531, 50.792047],
172 | [-66.269531, 23.725012],
173 | [-127.177734, 23.725012],
174 | ]
175 | ),
176 | }
177 |
178 | ocean_rois = {
179 | "Gulf of Mexico": Polygon(
180 | [
181 | [-101.206055, 15.496032],
182 | [-101.206055, 32.361403],
183 | [-75.673828, 32.361403],
184 | [-75.673828, 15.496032],
185 | [-101.206055, 15.496032],
186 | ]
187 | ),
188 | "North Atlantic Ocean": Polygon(
189 | [
190 | [-85.341797, 24.046464],
191 | [-85.341797, 45.02695],
192 | [-55.810547, 45.02695],
193 | [-55.810547, 24.046464],
194 | [-85.341797, 24.046464],
195 | ]
196 | ),
197 | "World": Polygon(
198 | [
199 | [-171.210938, -57.136239],
200 | [-171.210938, 79.997168],
201 | [177.539063, 79.997168],
202 | [177.539063, -57.136239],
203 | [-171.210938, -57.136239],
204 | ]
205 | ),
206 | }
207 |
208 |
209 | @st.cache_data
210 | def uploaded_file_to_gdf(data):
211 | import tempfile
212 | import os
213 | import uuid
214 |
215 | _, file_extension = os.path.splitext(data.name)
216 | file_id = str(uuid.uuid4())
217 | file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
218 |
219 | with open(file_path, "wb") as file:
220 | file.write(data.getbuffer())
221 |
222 | if file_path.lower().endswith(".kml"):
223 | fiona.drvsupport.supported_drivers["KML"] = "rw"
224 | gdf = gpd.read_file(file_path, driver="KML")
225 | else:
226 | gdf = gpd.read_file(file_path)
227 |
228 | return gdf
229 |
230 |
231 | def app():
232 |
233 | today = date.today()
234 |
235 | st.title("Create Satellite Timelapse")
236 |
237 | st.markdown(
238 | """
239 | An interactive web app for creating [Landsat](https://developers.google.com/earth-engine/datasets/catalog/landsat)/[GOES](https://jstnbraaten.medium.com/goes-in-earth-engine-53fbc8783c16) timelapse for any location around the globe.
240 | The app was built using [streamlit](https://streamlit.io), [geemap](https://geemap.org), and [Google Earth Engine](https://earthengine.google.com). For more info, check out my streamlit [blog post](https://blog.streamlit.io/creating-satellite-timelapse-with-streamlit-and-earth-engine).
241 | """
242 | )
243 |
244 | row1_col1, row1_col2 = st.columns([2, 1])
245 |
246 | if st.session_state.get("zoom_level") is None:
247 | st.session_state["zoom_level"] = 4
248 |
249 | st.session_state["ee_asset_id"] = None
250 | st.session_state["bands"] = None
251 | st.session_state["palette"] = None
252 | st.session_state["vis_params"] = None
253 |
254 | with row1_col1:
255 | ee_authenticate(token_name="EARTHENGINE_TOKEN")
256 | m = geemap.Map(
257 | basemap="HYBRID",
258 | plugin_Draw=True,
259 | Draw_export=True,
260 | locate_control=True,
261 | plugin_LatLngPopup=False,
262 | )
263 | m.add_basemap("ROADMAP")
264 |
265 | with row1_col2:
266 |
267 | keyword = st.text_input("Search for a location:", "")
268 | if keyword:
269 | locations = geemap.geocode(keyword)
270 | if locations is not None and len(locations) > 0:
271 | str_locations = [str(g)[1:-1] for g in locations]
272 | location = st.selectbox("Select a location:", str_locations)
273 | loc_index = str_locations.index(location)
274 | selected_loc = locations[loc_index]
275 | lat, lng = selected_loc.lat, selected_loc.lng
276 | folium.Marker(location=[lat, lng], popup=location).add_to(m)
277 | m.set_center(lng, lat, 12)
278 | st.session_state["zoom_level"] = 12
279 |
280 | collection = st.selectbox(
281 | "Select a satellite image collection: ",
282 | [
283 | "Any Earth Engine ImageCollection",
284 | "Landsat TM-ETM-OLI Surface Reflectance",
285 | "Sentinel-2 MSI Surface Reflectance",
286 | "Geostationary Operational Environmental Satellites (GOES)",
287 | "MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km",
288 | "MODIS Gap filled Land Surface Temperature Daily",
289 | "MODIS Ocean Color SMI",
290 | "USDA National Agriculture Imagery Program (NAIP)",
291 | ],
292 | index=1,
293 | )
294 |
295 | if collection in [
296 | "Landsat TM-ETM-OLI Surface Reflectance",
297 | "Sentinel-2 MSI Surface Reflectance",
298 | ]:
299 | roi_options = ["Uploaded GeoJSON"] + list(landsat_rois.keys())
300 |
301 | elif collection == "Geostationary Operational Environmental Satellites (GOES)":
302 | roi_options = ["Uploaded GeoJSON"] + list(goes_rois.keys())
303 |
304 | elif collection in [
305 | "MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km",
306 | "MODIS Gap filled Land Surface Temperature Daily",
307 | ]:
308 | roi_options = ["Uploaded GeoJSON"] + list(modis_rois.keys())
309 | elif collection == "MODIS Ocean Color SMI":
310 | roi_options = ["Uploaded GeoJSON"] + list(ocean_rois.keys())
311 | else:
312 | roi_options = ["Uploaded GeoJSON"]
313 |
314 | if collection == "Any Earth Engine ImageCollection":
315 | keyword = st.text_input("Enter a keyword to search (e.g., MODIS):", "")
316 | if keyword:
317 |
318 | assets = geemap.search_ee_data(keyword)
319 | ee_assets = []
320 | for asset in assets:
321 | if asset["ee_id_snippet"].startswith("ee.ImageCollection"):
322 | ee_assets.append(asset)
323 |
324 | asset_titles = [x["title"] for x in ee_assets]
325 | dataset = st.selectbox("Select a dataset:", asset_titles)
326 | if len(ee_assets) > 0:
327 | st.session_state["ee_assets"] = ee_assets
328 | st.session_state["asset_titles"] = asset_titles
329 | index = asset_titles.index(dataset)
330 | ee_id = ee_assets[index]["id"]
331 | else:
332 | ee_id = ""
333 |
334 | if dataset is not None:
335 | with st.expander("Show dataset details", False):
336 | index = asset_titles.index(dataset)
337 | html = geemap.ee_data_html(st.session_state["ee_assets"][index])
338 | st.markdown(html, True)
339 | # elif collection == "MODIS Gap filled Land Surface Temperature Daily":
340 | # ee_id = ""
341 | else:
342 | ee_id = ""
343 |
344 | asset_id = st.text_input("Enter an ee.ImageCollection asset ID:", ee_id)
345 |
346 | if asset_id:
347 | with st.expander("Customize band combination and color palette", True):
348 | try:
349 | col = ee.ImageCollection.load(asset_id)
350 | st.session_state["ee_asset_id"] = asset_id
351 | except:
352 | st.error("Invalid Earth Engine asset ID.")
353 | st.session_state["ee_asset_id"] = None
354 | return
355 |
356 | img_bands = col.first().bandNames().getInfo()
357 | if len(img_bands) >= 3:
358 | default_bands = img_bands[:3][::-1]
359 | else:
360 | default_bands = img_bands[:]
361 | bands = st.multiselect(
362 | "Select one or three bands (RGB):", img_bands, default_bands
363 | )
364 | st.session_state["bands"] = bands
365 |
366 | if len(bands) == 1:
367 | palette_options = st.selectbox(
368 | "Color palette",
369 | cm.list_colormaps(),
370 | index=2,
371 | )
372 | palette_values = cm.get_palette(palette_options, 15)
373 | palette = st.text_area(
374 | "Enter a custom palette:",
375 | palette_values,
376 | )
377 | st.write(
378 | cm.plot_colormap(cmap=palette_options, return_fig=True)
379 | )
380 | st.session_state["palette"] = json.loads(
381 | palette.replace("'", '"')
382 | )
383 |
384 | if bands:
385 | vis_params = st.text_area(
386 | "Enter visualization parameters",
387 | "{'bands': ["
388 | + ", ".join([f"'{band}'" for band in bands])
389 | + "]}",
390 | )
391 | else:
392 | vis_params = st.text_area(
393 | "Enter visualization parameters",
394 | "{}",
395 | )
396 | try:
397 | st.session_state["vis_params"] = json.loads(
398 | vis_params.replace("'", '"')
399 | )
400 | st.session_state["vis_params"]["palette"] = st.session_state[
401 | "palette"
402 | ]
403 | except Exception as e:
404 | st.session_state["vis_params"] = None
405 | st.error(
406 | f"Invalid visualization parameters. It must be a dictionary."
407 | )
408 |
409 | elif collection == "MODIS Gap filled Land Surface Temperature Daily":
410 | with st.expander("Show dataset details", False):
411 | st.markdown(
412 | """
413 | See the [Awesome GEE Community Datasets](https://samapriya.github.io/awesome-gee-community-datasets/projects/daily_lst/).
414 | """
415 | )
416 |
417 | MODIS_options = ["Daytime (1:30 pm)", "Nighttime (1:30 am)"]
418 | MODIS_option = st.selectbox("Select a MODIS dataset:", MODIS_options)
419 | if MODIS_option == "Daytime (1:30 pm)":
420 | st.session_state["ee_asset_id"] = (
421 | "projects/sat-io/open-datasets/gap-filled-lst/gf_day_1km"
422 | )
423 | else:
424 | st.session_state["ee_asset_id"] = (
425 | "projects/sat-io/open-datasets/gap-filled-lst/gf_night_1km"
426 | )
427 |
428 | palette_options = st.selectbox(
429 | "Color palette",
430 | cm.list_colormaps(),
431 | index=90,
432 | )
433 | palette_values = cm.get_palette(palette_options, 15)
434 | palette = st.text_area(
435 | "Enter a custom palette:",
436 | palette_values,
437 | )
438 | st.write(cm.plot_colormap(cmap=palette_options, return_fig=True))
439 | st.session_state["palette"] = json.loads(palette.replace("'", '"'))
440 | elif collection == "MODIS Ocean Color SMI":
441 | with st.expander("Show dataset details", False):
442 | st.markdown(
443 | """
444 | See the [Earth Engine Data Catalog](https://developers.google.com/earth-engine/datasets/catalog/NASA_OCEANDATA_MODIS-Aqua_L3SMI).
445 | """
446 | )
447 |
448 | MODIS_options = ["Aqua", "Terra"]
449 | MODIS_option = st.selectbox("Select a satellite:", MODIS_options)
450 | st.session_state["ee_asset_id"] = MODIS_option
451 | # if MODIS_option == "Daytime (1:30 pm)":
452 | # st.session_state[
453 | # "ee_asset_id"
454 | # ] = "projects/sat-io/open-datasets/gap-filled-lst/gf_day_1km"
455 | # else:
456 | # st.session_state[
457 | # "ee_asset_id"
458 | # ] = "projects/sat-io/open-datasets/gap-filled-lst/gf_night_1km"
459 |
460 | band_dict = {
461 | "Chlorophyll a concentration": "chlor_a",
462 | "Normalized fluorescence line height": "nflh",
463 | "Particulate organic carbon": "poc",
464 | "Sea surface temperature": "sst",
465 | "Remote sensing reflectance at band 412nm": "Rrs_412",
466 | "Remote sensing reflectance at band 443nm": "Rrs_443",
467 | "Remote sensing reflectance at band 469nm": "Rrs_469",
468 | "Remote sensing reflectance at band 488nm": "Rrs_488",
469 | "Remote sensing reflectance at band 531nm": "Rrs_531",
470 | "Remote sensing reflectance at band 547nm": "Rrs_547",
471 | "Remote sensing reflectance at band 555nm": "Rrs_555",
472 | "Remote sensing reflectance at band 645nm": "Rrs_645",
473 | "Remote sensing reflectance at band 667nm": "Rrs_667",
474 | "Remote sensing reflectance at band 678nm": "Rrs_678",
475 | }
476 |
477 | band_options = list(band_dict.keys())
478 | band = st.selectbox(
479 | "Select a band",
480 | band_options,
481 | band_options.index("Sea surface temperature"),
482 | )
483 | st.session_state["band"] = band_dict[band]
484 |
485 | colors = cm.list_colormaps()
486 | palette_options = st.selectbox(
487 | "Color palette",
488 | colors,
489 | index=colors.index("coolwarm"),
490 | )
491 | palette_values = cm.get_palette(palette_options, 15)
492 | palette = st.text_area(
493 | "Enter a custom palette:",
494 | palette_values,
495 | )
496 | st.write(cm.plot_colormap(cmap=palette_options, return_fig=True))
497 | st.session_state["palette"] = json.loads(palette.replace("'", '"'))
498 |
499 | sample_roi = st.selectbox(
500 | "Select a sample ROI or upload a GeoJSON file:",
501 | roi_options,
502 | index=0,
503 | )
504 |
505 | add_outline = st.checkbox(
506 | "Overlay an administrative boundary on timelapse", False
507 | )
508 |
509 | if add_outline:
510 |
511 | with st.expander("Customize administrative boundary", True):
512 |
513 | overlay_options = {
514 | "User-defined": None,
515 | "Continents": "continents",
516 | "Countries": "countries",
517 | "US States": "us_states",
518 | "China": "china",
519 | }
520 |
521 | overlay = st.selectbox(
522 | "Select an administrative boundary:",
523 | list(overlay_options.keys()),
524 | index=2,
525 | )
526 |
527 | overlay_data = overlay_options[overlay]
528 |
529 | if overlay_data is None:
530 | overlay_data = st.text_input(
531 | "Enter an HTTP URL to a GeoJSON file or an ee.FeatureCollection asset id:",
532 | "https://raw.githubusercontent.com/giswqs/geemap/master/examples/data/countries.geojson",
533 | )
534 |
535 | overlay_color = st.color_picker(
536 | "Select a color for the administrative boundary:", "#000000"
537 | )
538 | overlay_width = st.slider(
539 | "Select a line width for the administrative boundary:", 1, 20, 1
540 | )
541 | overlay_opacity = st.slider(
542 | "Select an opacity for the administrative boundary:",
543 | 0.0,
544 | 1.0,
545 | 1.0,
546 | 0.05,
547 | )
548 | else:
549 | overlay_data = None
550 | overlay_color = "black"
551 | overlay_width = 1
552 | overlay_opacity = 1
553 |
554 | with row1_col1:
555 |
556 | with st.expander(
557 | "Steps: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Expand this tab to see a demo π"
558 | ):
559 | video_empty = st.empty()
560 |
561 | data = st.file_uploader(
562 | "Upload a GeoJSON file to use as an ROI. Customize timelapse parameters and then click the Submit button ππ",
563 | type=["geojson", "kml", "zip"],
564 | )
565 |
566 | crs = "epsg:4326"
567 | if sample_roi == "Uploaded GeoJSON":
568 | if data is None:
569 | # st.info(
570 | # "Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click Submit button"
571 | # )
572 | if collection in [
573 | "Geostationary Operational Environmental Satellites (GOES)",
574 | "USDA National Agriculture Imagery Program (NAIP)",
575 | ] and (not keyword):
576 | m.set_center(-100, 40, 3)
577 | # else:
578 | # m.set_center(4.20, 18.63, zoom=2)
579 | else:
580 | if collection in [
581 | "Landsat TM-ETM-OLI Surface Reflectance",
582 | "Sentinel-2 MSI Surface Reflectance",
583 | ]:
584 | gdf = gpd.GeoDataFrame(
585 | index=[0], crs=crs, geometry=[landsat_rois[sample_roi]]
586 | )
587 | elif (
588 | collection
589 | == "Geostationary Operational Environmental Satellites (GOES)"
590 | ):
591 | gdf = gpd.GeoDataFrame(
592 | index=[0], crs=crs, geometry=[goes_rois[sample_roi]["region"]]
593 | )
594 | elif collection == "MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km":
595 | gdf = gpd.GeoDataFrame(
596 | index=[0], crs=crs, geometry=[modis_rois[sample_roi]]
597 | )
598 |
599 | if sample_roi != "Uploaded GeoJSON":
600 |
601 | if collection in [
602 | "Landsat TM-ETM-OLI Surface Reflectance",
603 | "Sentinel-2 MSI Surface Reflectance",
604 | ]:
605 | gdf = gpd.GeoDataFrame(
606 | index=[0], crs=crs, geometry=[landsat_rois[sample_roi]]
607 | )
608 | elif (
609 | collection
610 | == "Geostationary Operational Environmental Satellites (GOES)"
611 | ):
612 | gdf = gpd.GeoDataFrame(
613 | index=[0], crs=crs, geometry=[goes_rois[sample_roi]["region"]]
614 | )
615 | elif collection in [
616 | "MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km",
617 | "MODIS Gap filled Land Surface Temperature Daily",
618 | ]:
619 | gdf = gpd.GeoDataFrame(
620 | index=[0], crs=crs, geometry=[modis_rois[sample_roi]]
621 | )
622 | elif collection == "MODIS Ocean Color SMI":
623 | gdf = gpd.GeoDataFrame(
624 | index=[0], crs=crs, geometry=[ocean_rois[sample_roi]]
625 | )
626 | try:
627 | st.session_state["roi"] = geemap.gdf_to_ee(gdf, geodesic=False)
628 | except Exception as e:
629 | st.error(e)
630 | st.error("Please draw another ROI and try again.")
631 | return
632 | m.add_gdf(gdf, "ROI")
633 |
634 | elif data:
635 | gdf = uploaded_file_to_gdf(data)
636 | try:
637 | st.session_state["roi"] = geemap.gdf_to_ee(gdf, geodesic=False)
638 | m.add_gdf(gdf, "ROI")
639 | except Exception as e:
640 | st.error(e)
641 | st.error("Please draw another ROI and try again.")
642 | return
643 |
644 | m.to_streamlit(height=600)
645 |
646 | with row1_col2:
647 |
648 | if collection in [
649 | "Landsat TM-ETM-OLI Surface Reflectance",
650 | "Sentinel-2 MSI Surface Reflectance",
651 | ]:
652 |
653 | if collection == "Landsat TM-ETM-OLI Surface Reflectance":
654 | sensor_start_year = 1984
655 | timelapse_title = "Landsat Timelapse"
656 | timelapse_speed = 5
657 | elif collection == "Sentinel-2 MSI Surface Reflectance":
658 | sensor_start_year = 2015
659 | timelapse_title = "Sentinel-2 Timelapse"
660 | timelapse_speed = 5
661 | video_empty.video("https://youtu.be/VVRK_-dEjR4")
662 |
663 | with st.form("submit_landsat_form"):
664 |
665 | roi = None
666 | if st.session_state.get("roi") is not None:
667 | roi = st.session_state.get("roi")
668 | out_gif = geemap.temp_file_path(".gif")
669 |
670 | title = st.text_input(
671 | "Enter a title to show on the timelapse: ", timelapse_title
672 | )
673 | RGB = st.selectbox(
674 | "Select an RGB band combination:",
675 | [
676 | "Red/Green/Blue",
677 | "NIR/Red/Green",
678 | "SWIR2/SWIR1/NIR",
679 | "NIR/SWIR1/Red",
680 | "SWIR2/NIR/Red",
681 | "SWIR2/SWIR1/Red",
682 | "SWIR1/NIR/Blue",
683 | "NIR/SWIR1/Blue",
684 | "SWIR2/NIR/Green",
685 | "SWIR1/NIR/Red",
686 | "SWIR2/NIR/SWIR1",
687 | "SWIR1/NIR/SWIR2",
688 | ],
689 | index=9,
690 | )
691 |
692 | frequency = st.selectbox(
693 | "Select a temporal frequency:",
694 | ["year", "quarter", "month"],
695 | index=0,
696 | )
697 |
698 | with st.expander("Customize timelapse"):
699 |
700 | speed = st.slider("Frames per second:", 1, 30, timelapse_speed)
701 | dimensions = st.slider(
702 | "Maximum dimensions (Width*Height) in pixels", 768, 2000, 768
703 | )
704 | progress_bar_color = st.color_picker(
705 | "Progress bar color:", "#0000ff"
706 | )
707 | years = st.slider(
708 | "Start and end year:",
709 | sensor_start_year,
710 | today.year,
711 | (sensor_start_year, today.year),
712 | )
713 | months = st.slider("Start and end month:", 1, 12, (1, 12))
714 | font_size = st.slider("Font size:", 10, 50, 30)
715 | font_color = st.color_picker("Font color:", "#ffffff")
716 | apply_fmask = st.checkbox(
717 | "Apply fmask (remove clouds, shadows, snow)", True
718 | )
719 | font_type = st.selectbox(
720 | "Select the font type for the title:",
721 | ["arial.ttf", "alibaba.otf"],
722 | index=0,
723 | )
724 | fading = st.slider(
725 | "Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
726 | )
727 | mp4 = st.checkbox("Save timelapse as MP4", True)
728 |
729 | empty_text = st.empty()
730 | empty_image = st.empty()
731 | empty_fire_image = st.empty()
732 | empty_video = st.container()
733 | submitted = st.form_submit_button("Submit")
734 | if submitted:
735 |
736 | if sample_roi == "Uploaded GeoJSON" and data is None:
737 | empty_text.warning(
738 | "Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
739 | )
740 | else:
741 |
742 | empty_text.text("Computing... Please wait...")
743 |
744 | start_year = years[0]
745 | end_year = years[1]
746 | start_date = str(months[0]).zfill(2) + "-01"
747 | end_date = str(months[1]).zfill(2) + "-30"
748 | bands = RGB.split("/")
749 |
750 | try:
751 | if collection == "Landsat TM-ETM-OLI Surface Reflectance":
752 | out_gif = geemap.landsat_timelapse(
753 | roi=roi,
754 | out_gif=out_gif,
755 | start_year=start_year,
756 | end_year=end_year,
757 | start_date=start_date,
758 | end_date=end_date,
759 | bands=bands,
760 | apply_fmask=apply_fmask,
761 | frames_per_second=speed,
762 | # dimensions=dimensions,
763 | dimensions=768,
764 | overlay_data=overlay_data,
765 | overlay_color=overlay_color,
766 | overlay_width=overlay_width,
767 | overlay_opacity=overlay_opacity,
768 | frequency=frequency,
769 | date_format=None,
770 | title=title,
771 | title_xy=("2%", "90%"),
772 | add_text=True,
773 | text_xy=("2%", "2%"),
774 | text_sequence=None,
775 | font_type=font_type,
776 | font_size=font_size,
777 | font_color=font_color,
778 | add_progress_bar=True,
779 | progress_bar_color=progress_bar_color,
780 | progress_bar_height=5,
781 | loop=0,
782 | mp4=mp4,
783 | fading=fading,
784 | )
785 | elif collection == "Sentinel-2 MSI Surface Reflectance":
786 | out_gif = geemap.sentinel2_timelapse(
787 | roi=roi,
788 | out_gif=out_gif,
789 | start_year=start_year,
790 | end_year=end_year,
791 | start_date=start_date,
792 | end_date=end_date,
793 | bands=bands,
794 | apply_fmask=apply_fmask,
795 | frames_per_second=speed,
796 | dimensions=768,
797 | # dimensions=dimensions,
798 | overlay_data=overlay_data,
799 | overlay_color=overlay_color,
800 | overlay_width=overlay_width,
801 | overlay_opacity=overlay_opacity,
802 | frequency=frequency,
803 | date_format=None,
804 | title=title,
805 | title_xy=("2%", "90%"),
806 | add_text=True,
807 | text_xy=("2%", "2%"),
808 | text_sequence=None,
809 | font_type=font_type,
810 | font_size=font_size,
811 | font_color=font_color,
812 | add_progress_bar=True,
813 | progress_bar_color=progress_bar_color,
814 | progress_bar_height=5,
815 | loop=0,
816 | mp4=mp4,
817 | fading=fading,
818 | )
819 | except:
820 | empty_text.error(
821 | "An error occurred while computing the timelapse. Your probably requested too much data. Try reducing the ROI or timespan."
822 | )
823 | st.stop()
824 |
825 | if out_gif is not None and os.path.exists(out_gif):
826 |
827 | empty_text.text(
828 | "Right click the GIF to save it to your computerπ"
829 | )
830 | empty_image.image(out_gif)
831 |
832 | out_mp4 = out_gif.replace(".gif", ".mp4")
833 | if mp4 and os.path.exists(out_mp4):
834 | with empty_video:
835 | st.text(
836 | "Right click the MP4 to save it to your computerπ"
837 | )
838 | st.video(out_gif.replace(".gif", ".mp4"))
839 |
840 | else:
841 | empty_text.error(
842 | "Something went wrong. You probably requested too much data. Try reducing the ROI or timespan."
843 | )
844 |
845 | elif collection == "Geostationary Operational Environmental Satellites (GOES)":
846 |
847 | video_empty.video("https://youtu.be/16fA2QORG4A")
848 |
849 | with st.form("submit_goes_form"):
850 |
851 | roi = None
852 | if st.session_state.get("roi") is not None:
853 | roi = st.session_state.get("roi")
854 | out_gif = geemap.temp_file_path(".gif")
855 |
856 | satellite = st.selectbox("Select a satellite:", ["GOES-17", "GOES-16"])
857 | earliest_date = datetime.date(2017, 7, 10)
858 | latest_date = datetime.date.today()
859 |
860 | if sample_roi == "Uploaded GeoJSON":
861 | roi_start_date = today - datetime.timedelta(days=2)
862 | roi_end_date = today - datetime.timedelta(days=1)
863 | roi_start_time = datetime.time(14, 00)
864 | roi_end_time = datetime.time(1, 00)
865 | else:
866 | roi_start = goes_rois[sample_roi]["start_time"]
867 | roi_end = goes_rois[sample_roi]["end_time"]
868 | roi_start_date = datetime.datetime.strptime(
869 | roi_start[:10], "%Y-%m-%d"
870 | )
871 | roi_end_date = datetime.datetime.strptime(roi_end[:10], "%Y-%m-%d")
872 | roi_start_time = datetime.time(
873 | int(roi_start[11:13]), int(roi_start[14:16])
874 | )
875 | roi_end_time = datetime.time(
876 | int(roi_end[11:13]), int(roi_end[14:16])
877 | )
878 |
879 | start_date = st.date_input("Select the start date:", roi_start_date)
880 | end_date = st.date_input("Select the end date:", roi_end_date)
881 |
882 | with st.expander("Customize timelapse"):
883 |
884 | add_fire = st.checkbox("Add Fire/Hotspot Characterization", False)
885 |
886 | scan_type = st.selectbox(
887 | "Select a scan type:", ["Full Disk", "CONUS", "Mesoscale"]
888 | )
889 |
890 | start_time = st.time_input(
891 | "Select the start time of the start date:", roi_start_time
892 | )
893 |
894 | end_time = st.time_input(
895 | "Select the end time of the end date:", roi_end_time
896 | )
897 |
898 | start = (
899 | start_date.strftime("%Y-%m-%d")
900 | + "T"
901 | + start_time.strftime("%H:%M:%S")
902 | )
903 | end = (
904 | end_date.strftime("%Y-%m-%d")
905 | + "T"
906 | + end_time.strftime("%H:%M:%S")
907 | )
908 |
909 | speed = st.slider("Frames per second:", 1, 30, 5)
910 | add_progress_bar = st.checkbox("Add a progress bar", True)
911 | progress_bar_color = st.color_picker(
912 | "Progress bar color:", "#0000ff"
913 | )
914 | font_size = st.slider("Font size:", 10, 50, 20)
915 | font_color = st.color_picker("Font color:", "#ffffff")
916 | fading = st.slider(
917 | "Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
918 | )
919 | mp4 = st.checkbox("Save timelapse as MP4", True)
920 |
921 | empty_text = st.empty()
922 | empty_image = st.empty()
923 | empty_video = st.container()
924 | empty_fire_text = st.empty()
925 | empty_fire_image = st.empty()
926 |
927 | submitted = st.form_submit_button("Submit")
928 | if submitted:
929 | if sample_roi == "Uploaded GeoJSON" and data is None:
930 | empty_text.warning(
931 | "Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
932 | )
933 | else:
934 | empty_text.text("Computing... Please wait...")
935 |
936 | geemap.goes_timelapse(
937 | roi,
938 | out_gif,
939 | start_date=start,
940 | end_date=end,
941 | data=satellite,
942 | scan=scan_type.replace(" ", "_").lower(),
943 | dimensions=768,
944 | framesPerSecond=speed,
945 | date_format="YYYY-MM-dd HH:mm",
946 | xy=("3%", "3%"),
947 | text_sequence=None,
948 | font_type="arial.ttf",
949 | font_size=font_size,
950 | font_color=font_color,
951 | add_progress_bar=add_progress_bar,
952 | progress_bar_color=progress_bar_color,
953 | progress_bar_height=5,
954 | loop=0,
955 | overlay_data=overlay_data,
956 | overlay_color=overlay_color,
957 | overlay_width=overlay_width,
958 | overlay_opacity=overlay_opacity,
959 | mp4=mp4,
960 | fading=fading,
961 | )
962 |
963 | if out_gif is not None and os.path.exists(out_gif):
964 | empty_text.text(
965 | "Right click the GIF to save it to your computerπ"
966 | )
967 | empty_image.image(out_gif)
968 |
969 | out_mp4 = out_gif.replace(".gif", ".mp4")
970 | if mp4 and os.path.exists(out_mp4):
971 | with empty_video:
972 | st.text(
973 | "Right click the MP4 to save it to your computerπ"
974 | )
975 | st.video(out_gif.replace(".gif", ".mp4"))
976 |
977 | if add_fire:
978 | out_fire_gif = geemap.temp_file_path(".gif")
979 | empty_fire_text.text(
980 | "Delineating Fire Hotspot... Please wait..."
981 | )
982 | geemap.goes_fire_timelapse(
983 | out_fire_gif,
984 | start_date=start,
985 | end_date=end,
986 | data=satellite,
987 | scan=scan_type.replace(" ", "_").lower(),
988 | region=roi,
989 | dimensions=768,
990 | framesPerSecond=speed,
991 | date_format="YYYY-MM-dd HH:mm",
992 | xy=("3%", "3%"),
993 | text_sequence=None,
994 | font_type="arial.ttf",
995 | font_size=font_size,
996 | font_color=font_color,
997 | add_progress_bar=add_progress_bar,
998 | progress_bar_color=progress_bar_color,
999 | progress_bar_height=5,
1000 | loop=0,
1001 | )
1002 | if os.path.exists(out_fire_gif):
1003 | empty_fire_image.image(out_fire_gif)
1004 | else:
1005 | empty_text.text(
1006 | "Something went wrong, either the ROI is too big or there are no data available for the specified date range. Please try a smaller ROI or different date range."
1007 | )
1008 |
1009 | elif collection == "MODIS Vegetation Indices (NDVI/EVI) 16-Day Global 1km":
1010 |
1011 | video_empty.video("https://youtu.be/16fA2QORG4A")
1012 |
1013 | satellite = st.selectbox("Select a satellite:", ["Terra", "Aqua"])
1014 | band = st.selectbox("Select a band:", ["NDVI", "EVI"])
1015 |
1016 | with st.form("submit_modis_form"):
1017 |
1018 | roi = None
1019 | if st.session_state.get("roi") is not None:
1020 | roi = st.session_state.get("roi")
1021 | out_gif = geemap.temp_file_path(".gif")
1022 |
1023 | with st.expander("Customize timelapse"):
1024 |
1025 | start = st.date_input(
1026 | "Select a start date:", datetime.date(2000, 2, 8)
1027 | )
1028 | end = st.date_input("Select an end date:", datetime.date.today())
1029 |
1030 | start_date = start.strftime("%Y-%m-%d")
1031 | end_date = end.strftime("%Y-%m-%d")
1032 |
1033 | speed = st.slider("Frames per second:", 1, 30, 5)
1034 | add_progress_bar = st.checkbox("Add a progress bar", True)
1035 | progress_bar_color = st.color_picker(
1036 | "Progress bar color:", "#0000ff"
1037 | )
1038 | font_size = st.slider("Font size:", 10, 50, 20)
1039 | font_color = st.color_picker("Font color:", "#ffffff")
1040 |
1041 | font_type = st.selectbox(
1042 | "Select the font type for the title:",
1043 | ["arial.ttf", "alibaba.otf"],
1044 | index=0,
1045 | )
1046 | fading = st.slider(
1047 | "Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
1048 | )
1049 | mp4 = st.checkbox("Save timelapse as MP4", True)
1050 |
1051 | empty_text = st.empty()
1052 | empty_image = st.empty()
1053 | empty_video = st.container()
1054 |
1055 | submitted = st.form_submit_button("Submit")
1056 | if submitted:
1057 | if sample_roi == "Uploaded GeoJSON" and data is None:
1058 | empty_text.warning(
1059 | "Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
1060 | )
1061 | else:
1062 |
1063 | empty_text.text("Computing... Please wait...")
1064 |
1065 | geemap.modis_ndvi_timelapse(
1066 | roi,
1067 | out_gif,
1068 | satellite,
1069 | band,
1070 | start_date,
1071 | end_date,
1072 | 768,
1073 | speed,
1074 | overlay_data=overlay_data,
1075 | overlay_color=overlay_color,
1076 | overlay_width=overlay_width,
1077 | overlay_opacity=overlay_opacity,
1078 | mp4=mp4,
1079 | fading=fading,
1080 | )
1081 |
1082 | geemap.reduce_gif_size(out_gif)
1083 |
1084 | empty_text.text(
1085 | "Right click the GIF to save it to your computerπ"
1086 | )
1087 | empty_image.image(out_gif)
1088 |
1089 | out_mp4 = out_gif.replace(".gif", ".mp4")
1090 | if mp4 and os.path.exists(out_mp4):
1091 | with empty_video:
1092 | st.text(
1093 | "Right click the MP4 to save it to your computerπ"
1094 | )
1095 | st.video(out_gif.replace(".gif", ".mp4"))
1096 |
1097 | elif collection == "Any Earth Engine ImageCollection":
1098 |
1099 | with st.form("submit_ts_form"):
1100 | with st.expander("Customize timelapse"):
1101 |
1102 | title = st.text_input(
1103 | "Enter a title to show on the timelapse: ", "Timelapse"
1104 | )
1105 | start_date = st.date_input(
1106 | "Select the start date:", datetime.date(2020, 1, 1)
1107 | )
1108 | end_date = st.date_input(
1109 | "Select the end date:", datetime.date.today()
1110 | )
1111 | frequency = st.selectbox(
1112 | "Select a temporal frequency:",
1113 | ["year", "quarter", "month", "day", "hour", "minute", "second"],
1114 | index=0,
1115 | )
1116 | reducer = st.selectbox(
1117 | "Select a reducer for aggregating data:",
1118 | ["median", "mean", "min", "max", "sum", "variance", "stdDev"],
1119 | index=0,
1120 | )
1121 | data_format = st.selectbox(
1122 | "Select a date format to show on the timelapse:",
1123 | [
1124 | "YYYY-MM-dd",
1125 | "YYYY",
1126 | "YYMM-MM",
1127 | "YYYY-MM-dd HH:mm",
1128 | "YYYY-MM-dd HH:mm:ss",
1129 | "HH:mm",
1130 | "HH:mm:ss",
1131 | "w",
1132 | "M",
1133 | "d",
1134 | "D",
1135 | ],
1136 | index=0,
1137 | )
1138 |
1139 | speed = st.slider("Frames per second:", 1, 30, 5)
1140 | add_progress_bar = st.checkbox("Add a progress bar", True)
1141 | progress_bar_color = st.color_picker(
1142 | "Progress bar color:", "#0000ff"
1143 | )
1144 | font_size = st.slider("Font size:", 10, 50, 30)
1145 | font_color = st.color_picker("Font color:", "#ffffff")
1146 | font_type = st.selectbox(
1147 | "Select the font type for the title:",
1148 | ["arial.ttf", "alibaba.otf"],
1149 | index=0,
1150 | )
1151 | fading = st.slider(
1152 | "Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
1153 | )
1154 | mp4 = st.checkbox("Save timelapse as MP4", True)
1155 |
1156 | empty_text = st.empty()
1157 | empty_image = st.empty()
1158 | empty_video = st.container()
1159 | empty_fire_image = st.empty()
1160 |
1161 | roi = None
1162 | if st.session_state.get("roi") is not None:
1163 | roi = st.session_state.get("roi")
1164 | out_gif = geemap.temp_file_path(".gif")
1165 |
1166 | submitted = st.form_submit_button("Submit")
1167 | if submitted:
1168 |
1169 | if sample_roi == "Uploaded GeoJSON" and data is None:
1170 | empty_text.warning(
1171 | "Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
1172 | )
1173 | else:
1174 |
1175 | empty_text.text("Computing... Please wait...")
1176 | try:
1177 | geemap.create_timelapse(
1178 | st.session_state.get("ee_asset_id"),
1179 | start_date=start_date.strftime("%Y-%m-%d"),
1180 | end_date=end_date.strftime("%Y-%m-%d"),
1181 | region=roi,
1182 | frequency=frequency,
1183 | reducer=reducer,
1184 | date_format=data_format,
1185 | out_gif=out_gif,
1186 | bands=st.session_state.get("bands"),
1187 | palette=st.session_state.get("palette"),
1188 | vis_params=st.session_state.get("vis_params"),
1189 | dimensions=768,
1190 | frames_per_second=speed,
1191 | crs="EPSG:3857",
1192 | overlay_data=overlay_data,
1193 | overlay_color=overlay_color,
1194 | overlay_width=overlay_width,
1195 | overlay_opacity=overlay_opacity,
1196 | title=title,
1197 | title_xy=("2%", "90%"),
1198 | add_text=True,
1199 | text_xy=("2%", "2%"),
1200 | text_sequence=None,
1201 | font_type=font_type,
1202 | font_size=font_size,
1203 | font_color=font_color,
1204 | add_progress_bar=add_progress_bar,
1205 | progress_bar_color=progress_bar_color,
1206 | progress_bar_height=5,
1207 | loop=0,
1208 | mp4=mp4,
1209 | fading=fading,
1210 | )
1211 | except:
1212 | empty_text.error(
1213 | "An error occurred while computing the timelapse. You probably requested too much data. Try reducing the ROI or timespan."
1214 | )
1215 |
1216 | empty_text.text(
1217 | "Right click the GIF to save it to your computerπ"
1218 | )
1219 | empty_image.image(out_gif)
1220 |
1221 | out_mp4 = out_gif.replace(".gif", ".mp4")
1222 | if mp4 and os.path.exists(out_mp4):
1223 | with empty_video:
1224 | st.text(
1225 | "Right click the MP4 to save it to your computerπ"
1226 | )
1227 | st.video(out_gif.replace(".gif", ".mp4"))
1228 |
1229 | elif collection in [
1230 | "MODIS Gap filled Land Surface Temperature Daily",
1231 | "MODIS Ocean Color SMI",
1232 | ]:
1233 |
1234 | with st.form("submit_ts_form"):
1235 | with st.expander("Customize timelapse"):
1236 |
1237 | title = st.text_input(
1238 | "Enter a title to show on the timelapse: ",
1239 | "Surface Temperature",
1240 | )
1241 | start_date = st.date_input(
1242 | "Select the start date:", datetime.date(2018, 1, 1)
1243 | )
1244 | end_date = st.date_input(
1245 | "Select the end date:", datetime.date(2020, 12, 31)
1246 | )
1247 | frequency = st.selectbox(
1248 | "Select a temporal frequency:",
1249 | ["year", "quarter", "month", "week", "day"],
1250 | index=2,
1251 | )
1252 | reducer = st.selectbox(
1253 | "Select a reducer for aggregating data:",
1254 | ["median", "mean", "min", "max", "sum", "variance", "stdDev"],
1255 | index=0,
1256 | )
1257 |
1258 | vis_params = st.text_area(
1259 | "Enter visualization parameters",
1260 | "",
1261 | help="Enter a string in the format of a dictionary, such as '{'min': 23, 'max': 32}'",
1262 | )
1263 |
1264 | speed = st.slider("Frames per second:", 1, 30, 5)
1265 | add_progress_bar = st.checkbox("Add a progress bar", True)
1266 | progress_bar_color = st.color_picker(
1267 | "Progress bar color:", "#0000ff"
1268 | )
1269 | font_size = st.slider("Font size:", 10, 50, 30)
1270 | font_color = st.color_picker("Font color:", "#ffffff")
1271 | font_type = st.selectbox(
1272 | "Select the font type for the title:",
1273 | ["arial.ttf", "alibaba.otf"],
1274 | index=0,
1275 | )
1276 | add_colorbar = st.checkbox("Add a colorbar", True)
1277 | colorbar_label = st.text_input(
1278 | "Enter the colorbar label:", "Surface Temperature (Β°C)"
1279 | )
1280 | fading = st.slider(
1281 | "Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
1282 | )
1283 | mp4 = st.checkbox("Save timelapse as MP4", True)
1284 |
1285 | empty_text = st.empty()
1286 | empty_image = st.empty()
1287 | empty_video = st.container()
1288 |
1289 | roi = None
1290 | if st.session_state.get("roi") is not None:
1291 | roi = st.session_state.get("roi")
1292 | out_gif = geemap.temp_file_path(".gif")
1293 |
1294 | submitted = st.form_submit_button("Submit")
1295 | if submitted:
1296 |
1297 | if sample_roi == "Uploaded GeoJSON" and data is None:
1298 | empty_text.warning(
1299 | "Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
1300 | )
1301 | else:
1302 |
1303 | empty_text.text("Computing... Please wait...")
1304 | try:
1305 | if (
1306 | collection
1307 | == "MODIS Gap filled Land Surface Temperature Daily"
1308 | ):
1309 | out_gif = geemap.create_timelapse(
1310 | st.session_state.get("ee_asset_id"),
1311 | start_date=start_date.strftime("%Y-%m-%d"),
1312 | end_date=end_date.strftime("%Y-%m-%d"),
1313 | region=roi,
1314 | bands=None,
1315 | frequency=frequency,
1316 | reducer=reducer,
1317 | date_format=None,
1318 | out_gif=out_gif,
1319 | palette=st.session_state.get("palette"),
1320 | vis_params=None,
1321 | dimensions=768,
1322 | frames_per_second=speed,
1323 | crs="EPSG:3857",
1324 | overlay_data=overlay_data,
1325 | overlay_color=overlay_color,
1326 | overlay_width=overlay_width,
1327 | overlay_opacity=overlay_opacity,
1328 | title=title,
1329 | title_xy=("2%", "90%"),
1330 | add_text=True,
1331 | text_xy=("2%", "2%"),
1332 | text_sequence=None,
1333 | font_type=font_type,
1334 | font_size=font_size,
1335 | font_color=font_color,
1336 | add_progress_bar=add_progress_bar,
1337 | progress_bar_color=progress_bar_color,
1338 | progress_bar_height=5,
1339 | add_colorbar=add_colorbar,
1340 | colorbar_label=colorbar_label,
1341 | loop=0,
1342 | mp4=mp4,
1343 | fading=fading,
1344 | )
1345 | elif collection == "MODIS Ocean Color SMI":
1346 | if vis_params.startswith("{") and vis_params.endswith(
1347 | "}"
1348 | ):
1349 | vis_params = json.loads(
1350 | vis_params.replace("'", '"')
1351 | )
1352 | else:
1353 | vis_params = None
1354 | out_gif = geemap.modis_ocean_color_timelapse(
1355 | st.session_state.get("ee_asset_id"),
1356 | start_date=start_date.strftime("%Y-%m-%d"),
1357 | end_date=end_date.strftime("%Y-%m-%d"),
1358 | region=roi,
1359 | bands=st.session_state["band"],
1360 | frequency=frequency,
1361 | reducer=reducer,
1362 | date_format=None,
1363 | out_gif=out_gif,
1364 | palette=st.session_state.get("palette"),
1365 | vis_params=vis_params,
1366 | dimensions=768,
1367 | frames_per_second=speed,
1368 | crs="EPSG:3857",
1369 | overlay_data=overlay_data,
1370 | overlay_color=overlay_color,
1371 | overlay_width=overlay_width,
1372 | overlay_opacity=overlay_opacity,
1373 | title=title,
1374 | title_xy=("2%", "90%"),
1375 | add_text=True,
1376 | text_xy=("2%", "2%"),
1377 | text_sequence=None,
1378 | font_type=font_type,
1379 | font_size=font_size,
1380 | font_color=font_color,
1381 | add_progress_bar=add_progress_bar,
1382 | progress_bar_color=progress_bar_color,
1383 | progress_bar_height=5,
1384 | add_colorbar=add_colorbar,
1385 | colorbar_label=colorbar_label,
1386 | loop=0,
1387 | mp4=mp4,
1388 | fading=fading,
1389 | )
1390 | except:
1391 | empty_text.error(
1392 | "Something went wrong. You probably requested too much data. Try reducing the ROI or timespan."
1393 | )
1394 |
1395 | if out_gif is not None and os.path.exists(out_gif):
1396 |
1397 | geemap.reduce_gif_size(out_gif)
1398 |
1399 | empty_text.text(
1400 | "Right click the GIF to save it to your computerπ"
1401 | )
1402 | empty_image.image(out_gif)
1403 |
1404 | out_mp4 = out_gif.replace(".gif", ".mp4")
1405 | if mp4 and os.path.exists(out_mp4):
1406 | with empty_video:
1407 | st.text(
1408 | "Right click the MP4 to save it to your computerπ"
1409 | )
1410 | st.video(out_gif.replace(".gif", ".mp4"))
1411 |
1412 | else:
1413 | st.error(
1414 | "Something went wrong. You probably requested too much data. Try reducing the ROI or timespan."
1415 | )
1416 |
1417 | elif collection == "USDA National Agriculture Imagery Program (NAIP)":
1418 |
1419 | with st.form("submit_naip_form"):
1420 | with st.expander("Customize timelapse"):
1421 |
1422 | title = st.text_input(
1423 | "Enter a title to show on the timelapse: ", "NAIP Timelapse"
1424 | )
1425 |
1426 | years = st.slider(
1427 | "Start and end year:",
1428 | 2003,
1429 | today.year,
1430 | (2003, today.year),
1431 | )
1432 |
1433 | bands = st.selectbox(
1434 | "Select a band combination:", ["N/R/G", "R/G/B"], index=0
1435 | )
1436 |
1437 | speed = st.slider("Frames per second:", 1, 30, 3)
1438 | add_progress_bar = st.checkbox("Add a progress bar", True)
1439 | progress_bar_color = st.color_picker(
1440 | "Progress bar color:", "#0000ff"
1441 | )
1442 | font_size = st.slider("Font size:", 10, 50, 30)
1443 | font_color = st.color_picker("Font color:", "#ffffff")
1444 | font_type = st.selectbox(
1445 | "Select the font type for the title:",
1446 | ["arial.ttf", "alibaba.otf"],
1447 | index=0,
1448 | )
1449 | fading = st.slider(
1450 | "Fading duration (seconds) for each frame:", 0.0, 3.0, 0.0
1451 | )
1452 | mp4 = st.checkbox("Save timelapse as MP4", True)
1453 |
1454 | empty_text = st.empty()
1455 | empty_image = st.empty()
1456 | empty_video = st.container()
1457 | empty_fire_image = st.empty()
1458 |
1459 | roi = None
1460 | if st.session_state.get("roi") is not None:
1461 | roi = st.session_state.get("roi")
1462 | out_gif = geemap.temp_file_path(".gif")
1463 |
1464 | submitted = st.form_submit_button("Submit")
1465 | if submitted:
1466 |
1467 | if sample_roi == "Uploaded GeoJSON" and data is None:
1468 | empty_text.warning(
1469 | "Steps to create a timelapse: Draw a rectangle on the map -> Export it as a GeoJSON -> Upload it back to the app -> Click the Submit button. Alternatively, you can select a sample ROI from the dropdown list."
1470 | )
1471 | else:
1472 |
1473 | empty_text.text("Computing... Please wait...")
1474 | try:
1475 | geemap.naip_timelapse(
1476 | roi,
1477 | years[0],
1478 | years[1],
1479 | out_gif,
1480 | bands=bands.split("/"),
1481 | palette=st.session_state.get("palette"),
1482 | vis_params=None,
1483 | dimensions=768,
1484 | frames_per_second=speed,
1485 | crs="EPSG:3857",
1486 | overlay_data=overlay_data,
1487 | overlay_color=overlay_color,
1488 | overlay_width=overlay_width,
1489 | overlay_opacity=overlay_opacity,
1490 | title=title,
1491 | title_xy=("2%", "90%"),
1492 | add_text=True,
1493 | text_xy=("2%", "2%"),
1494 | text_sequence=None,
1495 | font_type=font_type,
1496 | font_size=font_size,
1497 | font_color=font_color,
1498 | add_progress_bar=add_progress_bar,
1499 | progress_bar_color=progress_bar_color,
1500 | progress_bar_height=5,
1501 | loop=0,
1502 | mp4=mp4,
1503 | fading=fading,
1504 | )
1505 | except:
1506 | empty_text.error(
1507 | "Something went wrong. You either requested too much data or the ROI is outside the U.S."
1508 | )
1509 |
1510 | if out_gif is not None and os.path.exists(out_gif):
1511 |
1512 | empty_text.text(
1513 | "Right click the GIF to save it to your computerπ"
1514 | )
1515 | empty_image.image(out_gif)
1516 |
1517 | out_mp4 = out_gif.replace(".gif", ".mp4")
1518 | if mp4 and os.path.exists(out_mp4):
1519 | with empty_video:
1520 | st.text(
1521 | "Right click the MP4 to save it to your computerπ"
1522 | )
1523 | st.video(out_gif.replace(".gif", ".mp4"))
1524 |
1525 | else:
1526 | st.error(
1527 | "Something went wrong. You either requested too much data or the ROI is outside the U.S."
1528 | )
1529 |
1530 |
1531 | try:
1532 | app()
1533 | except Exception as e:
1534 | pass
1535 |
--------------------------------------------------------------------------------
/pages/2_π _U.S._Housing.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import os
3 | import pathlib
4 | import requests
5 | import zipfile
6 | import pandas as pd
7 | import pydeck as pdk
8 | import geopandas as gpd
9 | import streamlit as st
10 | import leafmap.colormaps as cm
11 | from leafmap.common import hex_to_rgb
12 |
13 | st.set_page_config(layout="wide")
14 |
15 | st.sidebar.info(
16 | """
17 | - Web App URL: <https://streamlit.gishub.org>
18 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
19 | """
20 | )
21 |
22 | st.sidebar.title("Contact")
23 | st.sidebar.info(
24 | """
25 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
26 | """
27 | )
28 |
29 | STREAMLIT_STATIC_PATH = pathlib.Path(st.__path__[0]) / "static"
30 | # We create a downloads directory within the streamlit static asset directory
31 | # and we write output files to it
32 | DOWNLOADS_PATH = STREAMLIT_STATIC_PATH / "downloads"
33 | if not DOWNLOADS_PATH.is_dir():
34 | DOWNLOADS_PATH.mkdir()
35 |
36 | # Data source: https://www.realtor.com/research/data/
37 | # link_prefix = "https://econdata.s3-us-west-2.amazonaws.com/Reports/"
38 | link_prefix = "https://raw.githubusercontent.com/giswqs/data/main/housing/"
39 |
40 | data_links = {
41 | "weekly": {
42 | "national": link_prefix + "Core/listing_weekly_core_aggregate_by_country.csv",
43 | "metro": link_prefix + "Core/listing_weekly_core_aggregate_by_metro.csv",
44 | },
45 | "monthly_current": {
46 | "national": link_prefix + "Core/RDC_Inventory_Core_Metrics_Country.csv",
47 | "state": link_prefix + "Core/RDC_Inventory_Core_Metrics_State.csv",
48 | "metro": link_prefix + "Core/RDC_Inventory_Core_Metrics_Metro.csv",
49 | "county": link_prefix + "Core/RDC_Inventory_Core_Metrics_County.csv",
50 | "zip": link_prefix + "Core/RDC_Inventory_Core_Metrics_Zip.csv",
51 | },
52 | "monthly_historical": {
53 | "national": link_prefix + "Core/RDC_Inventory_Core_Metrics_Country_History.csv",
54 | "state": link_prefix + "Core/RDC_Inventory_Core_Metrics_State_History.csv",
55 | "metro": link_prefix + "Core/RDC_Inventory_Core_Metrics_Metro_History.csv",
56 | "county": link_prefix + "Core/RDC_Inventory_Core_Metrics_County_History.csv",
57 | "zip": link_prefix + "Core/RDC_Inventory_Core_Metrics_Zip_History.csv",
58 | },
59 | "hotness": {
60 | "metro": link_prefix
61 | + "Hotness/RDC_Inventory_Hotness_Metrics_Metro_History.csv",
62 | "county": link_prefix
63 | + "Hotness/RDC_Inventory_Hotness_Metrics_County_History.csv",
64 | "zip": link_prefix + "Hotness/RDC_Inventory_Hotness_Metrics_Zip_History.csv",
65 | },
66 | }
67 |
68 |
69 | def get_data_columns(df, category, frequency="monthly"):
70 | if frequency == "monthly":
71 | if category.lower() == "county":
72 | del_cols = ["month_date_yyyymm", "county_fips", "county_name"]
73 | elif category.lower() == "state":
74 | del_cols = ["month_date_yyyymm", "state", "state_id"]
75 | elif category.lower() == "national":
76 | del_cols = ["month_date_yyyymm", "country"]
77 | elif category.lower() == "metro":
78 | del_cols = ["month_date_yyyymm", "cbsa_code", "cbsa_title", "HouseholdRank"]
79 | elif category.lower() == "zip":
80 | del_cols = ["month_date_yyyymm", "postal_code", "zip_name", "flag"]
81 | elif frequency == "weekly":
82 | if category.lower() == "national":
83 | del_cols = ["week_end_date", "geo_country"]
84 | elif category.lower() == "metro":
85 | del_cols = ["week_end_date", "cbsa_code", "cbsa_title", "hh_rank"]
86 |
87 | cols = df.columns.values.tolist()
88 |
89 | for col in cols:
90 | if col.strip() in del_cols:
91 | cols.remove(col)
92 | if category.lower() == "metro":
93 | return cols[2:]
94 | else:
95 | return cols[1:]
96 |
97 |
98 | @st.cache_data
99 | def get_inventory_data(url):
100 | df = pd.read_csv(url)
101 | url = url.lower()
102 | if "county" in url:
103 | df["county_fips"] = df["county_fips"].map(str)
104 | df["county_fips"] = df["county_fips"].str.zfill(5)
105 | elif "state" in url:
106 | df["STUSPS"] = df["state_id"].str.upper()
107 | elif "metro" in url:
108 | df["cbsa_code"] = df["cbsa_code"].map(str)
109 | elif "zip" in url:
110 | df["postal_code"] = df["postal_code"].map(str)
111 | df["postal_code"] = df["postal_code"].str.zfill(5)
112 |
113 | if "listing_weekly_core_aggregate_by_country" in url:
114 | columns = get_data_columns(df, "national", "weekly")
115 | for column in columns:
116 | if column != "median_days_on_market_by_day_yy":
117 | df[column] = df[column].str.rstrip("%").astype(float) / 100
118 | if "listing_weekly_core_aggregate_by_metro" in url:
119 | columns = get_data_columns(df, "metro", "weekly")
120 | for column in columns:
121 | if column != "median_days_on_market_by_day_yy":
122 | df[column] = df[column].str.rstrip("%").astype(float) / 100
123 | df["cbsa_code"] = df["cbsa_code"].str[:5]
124 | return df
125 |
126 |
127 | def filter_weekly_inventory(df, week):
128 | df = df[df["week_end_date"] == week]
129 | return df
130 |
131 |
132 | def get_start_end_year(df):
133 | start_year = int(str(df["month_date_yyyymm"].min())[:4])
134 | end_year = int(str(df["month_date_yyyymm"].max())[:4])
135 | return start_year, end_year
136 |
137 |
138 | def get_periods(df):
139 | return [str(d) for d in list(set(df["month_date_yyyymm"].tolist()))]
140 |
141 |
142 | @st.cache_data
143 | def get_geom_data(category):
144 |
145 | prefix = (
146 | "https://raw.githubusercontent.com/giswqs/streamlit-geospatial/master/data/"
147 | )
148 | links = {
149 | "national": prefix + "us_nation.geojson",
150 | "state": prefix + "us_states.geojson",
151 | "county": prefix + "us_counties.geojson",
152 | "metro": prefix + "us_metro_areas.geojson",
153 | "zip": "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_zcta510_500k.zip",
154 | }
155 |
156 | if category.lower() == "zip":
157 | r = requests.get(links[category])
158 | out_zip = os.path.join(DOWNLOADS_PATH, "cb_2018_us_zcta510_500k.zip")
159 | with open(out_zip, "wb") as code:
160 | code.write(r.content)
161 | zip_ref = zipfile.ZipFile(out_zip, "r")
162 | zip_ref.extractall(DOWNLOADS_PATH)
163 | gdf = gpd.read_file(out_zip.replace("zip", "shp"))
164 | else:
165 | gdf = gpd.read_file(links[category])
166 | return gdf
167 |
168 |
169 | def join_attributes(gdf, df, category):
170 |
171 | new_gdf = None
172 | if category == "county":
173 | new_gdf = gdf.merge(df, left_on="GEOID", right_on="county_fips", how="outer")
174 | elif category == "state":
175 | new_gdf = gdf.merge(df, left_on="STUSPS", right_on="STUSPS", how="outer")
176 | elif category == "national":
177 | if "geo_country" in df.columns.values.tolist():
178 | df["country"] = None
179 | df.loc[0, "country"] = "United States"
180 | new_gdf = gdf.merge(df, left_on="NAME", right_on="country", how="outer")
181 | elif category == "metro":
182 | new_gdf = gdf.merge(df, left_on="CBSAFP", right_on="cbsa_code", how="outer")
183 | elif category == "zip":
184 | new_gdf = gdf.merge(df, left_on="GEOID10", right_on="postal_code", how="outer")
185 | return new_gdf
186 |
187 |
188 | def select_non_null(gdf, col_name):
189 | new_gdf = gdf[~gdf[col_name].isna()]
190 | return new_gdf
191 |
192 |
193 | def select_null(gdf, col_name):
194 | new_gdf = gdf[gdf[col_name].isna()]
195 | return new_gdf
196 |
197 |
198 | def get_data_dict(name):
199 | in_csv = os.path.join(os.getcwd(), "data/realtor_data_dict.csv")
200 | df = pd.read_csv(in_csv)
201 | label = list(df[df["Name"] == name]["Label"])[0]
202 | desc = list(df[df["Name"] == name]["Description"])[0]
203 | return label, desc
204 |
205 |
206 | def get_weeks(df):
207 | seq = list(set(df[~df["week_end_date"].isnull()]["week_end_date"].tolist()))
208 | weeks = [
209 | datetime.date(int(d.split("/")[2]), int(d.split("/")[0]), int(d.split("/")[1]))
210 | for d in seq
211 | ]
212 | weeks.sort()
213 | return weeks
214 |
215 |
216 | def get_saturday(in_date):
217 | idx = (in_date.weekday() + 1) % 7
218 | sat = in_date + datetime.timedelta(6 - idx)
219 | return sat
220 |
221 |
222 | def app():
223 |
224 | st.title("U.S. Real Estate Data and Market Trends")
225 | st.markdown(
226 | """**Introduction:** This interactive dashboard is designed for visualizing U.S. real estate data and market trends at multiple levels (i.e., national,
227 | state, county, and metro). The data sources include [Real Estate Data](https://www.realtor.com/research/data) from realtor.com and
228 | [Cartographic Boundary Files](https://www.census.gov/geographies/mapping-files/time-series/geo/carto-boundary-file.html) from U.S. Census Bureau.
229 | Several open-source packages are used to process the data and generate the visualizations, e.g., [streamlit](https://streamlit.io),
230 | [geopandas](https://geopandas.org), [leafmap](https://leafmap.org), and [pydeck](https://deckgl.readthedocs.io).
231 | """
232 | )
233 |
234 | with st.expander("See a demo"):
235 | st.image("https://i.imgur.com/Z3dk6Tr.gif")
236 |
237 | row1_col1, row1_col2, row1_col3, row1_col4, row1_col5 = st.columns(
238 | [0.6, 0.8, 0.6, 1.4, 2]
239 | )
240 | with row1_col1:
241 | frequency = st.selectbox("Monthly/weekly data", ["Monthly", "Weekly"])
242 | with row1_col2:
243 | types = ["Current month data", "Historical data"]
244 | if frequency == "Weekly":
245 | types.remove("Current month data")
246 | cur_hist = st.selectbox(
247 | "Current/historical data",
248 | types,
249 | )
250 | with row1_col3:
251 | if frequency == "Monthly":
252 | scale = st.selectbox(
253 | "Scale", ["National", "State", "Metro", "County"], index=3
254 | )
255 | else:
256 | scale = st.selectbox("Scale", ["National", "Metro"], index=1)
257 |
258 | gdf = get_geom_data(scale.lower())
259 |
260 | if frequency == "Weekly":
261 | inventory_df = get_inventory_data(data_links["weekly"][scale.lower()])
262 | weeks = get_weeks(inventory_df)
263 | with row1_col1:
264 | selected_date = st.date_input("Select a date", value=weeks[-1])
265 | saturday = get_saturday(selected_date)
266 | selected_period = saturday.strftime("%-m/%-d/%Y")
267 | if saturday not in weeks:
268 | st.error(
269 | "The selected date is not available in the data. Please select a date between {} and {}".format(
270 | weeks[0], weeks[-1]
271 | )
272 | )
273 | selected_period = weeks[-1].strftime("%-m/%-d/%Y")
274 | inventory_df = get_inventory_data(data_links["weekly"][scale.lower()])
275 | inventory_df = filter_weekly_inventory(inventory_df, selected_period)
276 |
277 | if frequency == "Monthly":
278 | if cur_hist == "Current month data":
279 | inventory_df = get_inventory_data(
280 | data_links["monthly_current"][scale.lower()]
281 | )
282 | selected_period = get_periods(inventory_df)[0]
283 | else:
284 | with row1_col2:
285 | inventory_df = get_inventory_data(
286 | data_links["monthly_historical"][scale.lower()]
287 | )
288 | start_year, end_year = get_start_end_year(inventory_df)
289 | periods = get_periods(inventory_df)
290 | with st.expander("Select year and month", True):
291 | selected_year = st.slider(
292 | "Year",
293 | start_year,
294 | end_year,
295 | value=start_year,
296 | step=1,
297 | )
298 | selected_month = st.slider(
299 | "Month",
300 | min_value=1,
301 | max_value=12,
302 | value=int(periods[0][-2:]),
303 | step=1,
304 | )
305 | selected_period = str(selected_year) + str(selected_month).zfill(2)
306 | if selected_period not in periods:
307 | st.error("Data not available for selected year and month")
308 | selected_period = periods[0]
309 | inventory_df = inventory_df[
310 | inventory_df["month_date_yyyymm"] == int(selected_period)
311 | ]
312 |
313 | data_cols = get_data_columns(inventory_df, scale.lower(), frequency.lower())
314 |
315 | with row1_col4:
316 | selected_col = st.selectbox("Attribute", data_cols)
317 | with row1_col5:
318 | show_desc = st.checkbox("Show attribute description")
319 | if show_desc:
320 | try:
321 | label, desc = get_data_dict(selected_col.strip())
322 | markdown = f"""
323 | **{label}**: {desc}
324 | """
325 | st.markdown(markdown)
326 | except:
327 | st.warning("No description available for selected attribute")
328 |
329 | row2_col1, row2_col2, row2_col3, row2_col4, row2_col5, row2_col6 = st.columns(
330 | [0.6, 0.68, 0.7, 0.7, 1.5, 0.8]
331 | )
332 |
333 | palettes = cm.list_colormaps()
334 | with row2_col1:
335 | palette = st.selectbox("Color palette", palettes, index=palettes.index("Blues"))
336 | with row2_col2:
337 | n_colors = st.slider("Number of colors", min_value=2, max_value=20, value=8)
338 | with row2_col3:
339 | show_nodata = st.checkbox("Show nodata areas", value=True)
340 | with row2_col4:
341 | show_3d = st.checkbox("Show 3D view", value=False)
342 | with row2_col5:
343 | if show_3d:
344 | elev_scale = st.slider(
345 | "Elevation scale", min_value=1, max_value=1000000, value=1, step=10
346 | )
347 | with row2_col6:
348 | st.info("Press Ctrl and move the left mouse button.")
349 | else:
350 | elev_scale = 1
351 |
352 | gdf = join_attributes(gdf, inventory_df, scale.lower())
353 | gdf_null = select_null(gdf, selected_col)
354 | gdf = select_non_null(gdf, selected_col)
355 | gdf = gdf.sort_values(by=selected_col, ascending=True)
356 |
357 | colors = cm.get_palette(palette, n_colors)
358 | colors = [hex_to_rgb(c) for c in colors]
359 |
360 | for i, ind in enumerate(gdf.index):
361 | index = int(i / (len(gdf) / len(colors)))
362 | if index >= len(colors):
363 | index = len(colors) - 1
364 | gdf.loc[ind, "R"] = colors[index][0]
365 | gdf.loc[ind, "G"] = colors[index][1]
366 | gdf.loc[ind, "B"] = colors[index][2]
367 |
368 | initial_view_state = pdk.ViewState(
369 | latitude=40,
370 | longitude=-100,
371 | zoom=3,
372 | max_zoom=16,
373 | pitch=0,
374 | bearing=0,
375 | height=900,
376 | width=None,
377 | )
378 |
379 | min_value = gdf[selected_col].min()
380 | max_value = gdf[selected_col].max()
381 | color = "color"
382 | # color_exp = f"[({selected_col}-{min_value})/({max_value}-{min_value})*255, 0, 0]"
383 | color_exp = f"[R, G, B]"
384 |
385 | geojson = pdk.Layer(
386 | "GeoJsonLayer",
387 | gdf,
388 | pickable=True,
389 | opacity=0.5,
390 | stroked=True,
391 | filled=True,
392 | extruded=show_3d,
393 | wireframe=True,
394 | get_elevation=f"{selected_col}",
395 | elevation_scale=elev_scale,
396 | # get_fill_color="color",
397 | get_fill_color=color_exp,
398 | get_line_color=[0, 0, 0],
399 | get_line_width=2,
400 | line_width_min_pixels=1,
401 | )
402 |
403 | geojson_null = pdk.Layer(
404 | "GeoJsonLayer",
405 | gdf_null,
406 | pickable=True,
407 | opacity=0.2,
408 | stroked=True,
409 | filled=True,
410 | extruded=False,
411 | wireframe=True,
412 | # get_elevation="properties.ALAND/100000",
413 | # get_fill_color="color",
414 | get_fill_color=[200, 200, 200],
415 | get_line_color=[0, 0, 0],
416 | get_line_width=2,
417 | line_width_min_pixels=1,
418 | )
419 |
420 | # tooltip = {"text": "Name: {NAME}"}
421 |
422 | # tooltip_value = f"<b>Value:</b> {median_listing_price}""
423 | tooltip = {
424 | "html": "<b>Name:</b> {NAME}<br><b>Value:</b> {"
425 | + selected_col
426 | + "}<br><b>Date:</b> "
427 | + selected_period
428 | + "",
429 | "style": {"backgroundColor": "steelblue", "color": "white"},
430 | }
431 |
432 | layers = [geojson]
433 | if show_nodata:
434 | layers.append(geojson_null)
435 |
436 | r = pdk.Deck(
437 | layers=layers,
438 | initial_view_state=initial_view_state,
439 | map_style="light",
440 | tooltip=tooltip,
441 | )
442 |
443 | row3_col1, row3_col2 = st.columns([6, 1])
444 |
445 | with row3_col1:
446 | st.pydeck_chart(r)
447 | with row3_col2:
448 | st.write(
449 | cm.create_colormap(
450 | palette,
451 | label=selected_col.replace("_", " ").title(),
452 | width=0.2,
453 | height=3,
454 | orientation="vertical",
455 | vmin=min_value,
456 | vmax=max_value,
457 | font_size=10,
458 | )
459 | )
460 | row4_col1, row4_col2, row4_col3 = st.columns([1, 2, 3])
461 | with row4_col1:
462 | show_data = st.checkbox("Show raw data")
463 | with row4_col2:
464 | show_cols = st.multiselect("Select columns", data_cols)
465 | with row4_col3:
466 | show_colormaps = st.checkbox("Preview all color palettes")
467 | if show_colormaps:
468 | st.write(cm.plot_colormaps(return_fig=True))
469 | if show_data:
470 | if scale == "National":
471 | st.dataframe(gdf[["NAME", "GEOID"] + show_cols])
472 | elif scale == "State":
473 | st.dataframe(gdf[["NAME", "STUSPS"] + show_cols])
474 | elif scale == "County":
475 | st.dataframe(gdf[["NAME", "STATEFP", "COUNTYFP"] + show_cols])
476 | elif scale == "Metro":
477 | st.dataframe(gdf[["NAME", "CBSAFP"] + show_cols])
478 | elif scale == "Zip":
479 | st.dataframe(gdf[["GEOID10"] + show_cols])
480 |
481 |
482 | app()
483 |
--------------------------------------------------------------------------------
/pages/3_πͺ_Split_Map.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import leafmap.foliumap as leafmap
3 |
4 | st.set_page_config(layout="wide")
5 |
6 | st.sidebar.info(
7 | """
8 | - Web App URL: <https://streamlit.gishub.org>
9 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
10 | """
11 | )
12 |
13 | st.sidebar.title("Contact")
14 | st.sidebar.info(
15 | """
16 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
17 | """
18 | )
19 |
20 | st.title("Split-panel Map")
21 |
22 | with st.expander("See source code"):
23 | with st.echo():
24 | m = leafmap.Map()
25 | m.split_map(
26 | left_layer="ESA WorldCover 2020 S2 FCC", right_layer="ESA WorldCover 2020"
27 | )
28 | m.add_legend(title="ESA Land Cover", builtin_legend="ESA_WorldCover")
29 |
30 | m.to_streamlit(height=700)
31 |
--------------------------------------------------------------------------------
/pages/4_π₯_Heatmap.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import leafmap.foliumap as leafmap
3 |
4 | st.set_page_config(layout="wide")
5 |
6 | st.sidebar.info(
7 | """
8 | - Web App URL: <https://streamlit.gishub.org>
9 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
10 | """
11 | )
12 |
13 | st.sidebar.title("Contact")
14 | st.sidebar.info(
15 | """
16 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
17 | """
18 | )
19 |
20 | st.title("Heatmap")
21 |
22 | with st.expander("See source code"):
23 | with st.echo():
24 | filepath = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_cities.csv"
25 | m = leafmap.Map(center=[40, -100], zoom=4)
26 | m.add_heatmap(
27 | filepath,
28 | latitude="latitude",
29 | longitude="longitude",
30 | value="pop_max",
31 | name="Heat map",
32 | radius=20,
33 | )
34 | m.to_streamlit(height=700)
35 |
--------------------------------------------------------------------------------
/pages/5_π_Marker_Cluster.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import leafmap.foliumap as leafmap
3 |
4 | st.set_page_config(layout="wide")
5 |
6 | st.sidebar.info(
7 | """
8 | - Web App URL: <https://streamlit.gishub.org>
9 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
10 | """
11 | )
12 |
13 | st.sidebar.title("Contact")
14 | st.sidebar.info(
15 | """
16 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
17 | """
18 | )
19 |
20 | st.title("Marker Cluster")
21 |
22 | with st.expander("See source code"):
23 | with st.echo():
24 |
25 | m = leafmap.Map(center=[40, -100], zoom=4)
26 | cities = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_cities.csv"
27 | regions = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_regions.geojson"
28 |
29 | m.add_geojson(regions, layer_name="US Regions")
30 | m.add_points_from_xy(
31 | cities,
32 | x="longitude",
33 | y="latitude",
34 | color_column="region",
35 | icon_names=["gear", "map", "leaf", "globe"],
36 | spin=True,
37 | add_legend=True,
38 | )
39 |
40 | m.to_streamlit(height=700)
41 |
--------------------------------------------------------------------------------
/pages/6_πΊοΈ_Basemaps.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import leafmap.foliumap as leafmap
3 |
4 | st.set_page_config(layout="wide")
5 |
6 | st.sidebar.info(
7 | """
8 | - Web App URL: <https://streamlit.gishub.org>
9 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
10 | """
11 | )
12 |
13 | st.sidebar.title("Contact")
14 | st.sidebar.info(
15 | """
16 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
17 | """
18 | )
19 |
20 |
21 | def app():
22 | st.title("Search Basemaps")
23 | st.markdown(
24 | """
25 | This app is a demonstration of searching and loading basemaps from [xyzservices](https://github.com/geopandas/xyzservices) and [Quick Map Services (QMS)](https://github.com/nextgis/quickmapservices). Selecting from 1000+ basemaps with a few clicks.
26 | """
27 | )
28 |
29 | with st.expander("See demo"):
30 | st.image("https://i.imgur.com/0SkUhZh.gif")
31 |
32 | row1_col1, row1_col2 = st.columns([3, 1])
33 | width = 800
34 | height = 600
35 | tiles = None
36 |
37 | with row1_col2:
38 |
39 | checkbox = st.checkbox("Search Quick Map Services (QMS)")
40 | keyword = st.text_input("Enter a keyword to search and press Enter:")
41 | empty = st.empty()
42 |
43 | if keyword:
44 | options = leafmap.search_xyz_services(keyword=keyword)
45 | if checkbox:
46 | qms = leafmap.search_qms(keyword=keyword)
47 | if qms is not None:
48 | options = options + qms
49 |
50 | tiles = empty.multiselect("Select XYZ tiles to add to the map:", options)
51 |
52 | with row1_col1:
53 | m = leafmap.Map()
54 |
55 | if tiles is not None:
56 | for tile in tiles:
57 | m.add_xyz_service(tile)
58 |
59 | m.to_streamlit(height=height)
60 |
61 |
62 | app()
63 |
--------------------------------------------------------------------------------
/pages/7_π¦_Web_Map_Service.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import json
3 | import streamlit as st
4 | import leafmap.foliumap as leafmap
5 |
6 | st.set_page_config(layout="wide")
7 |
8 | st.sidebar.info(
9 | """
10 | - Web App URL: <https://streamlit.gishub.org>
11 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
12 | """
13 | )
14 |
15 | st.sidebar.title("Contact")
16 | st.sidebar.info(
17 | """
18 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
19 | """
20 | )
21 |
22 | # Define a whitelist of trusted URLs
23 | trusted_urls = [
24 | "https://services.terrascope.be/wms/v2",
25 | # Add more trusted URLs here
26 | ]
27 |
28 |
29 | @st.cache_data
30 | def get_layers(url):
31 | options = leafmap.get_wms_layers(url)
32 | return options
33 |
34 |
35 | def is_trusted_url(url):
36 | return url in trusted_urls
37 |
38 |
39 | def app():
40 | st.title("Web Map Service (WMS)")
41 | st.markdown(
42 | """
43 | This app is a demonstration of loading Web Map Service (WMS) layers. Simply enter the URL of the WMS service
44 | in the text box below and press Enter to retrieve the layers. Go to https://apps.nationalmap.gov/services to find
45 | some WMS URLs if needed.
46 | """
47 | )
48 |
49 | row1_col1, row1_col2 = st.columns([3, 1.3])
50 | width = 800
51 | height = 600
52 | layers = None
53 |
54 | with row1_col2:
55 |
56 | esa_landcover = "https://services.terrascope.be/wms/v2"
57 | url = st.text_input(
58 | "Enter a WMS URL:", value="https://services.terrascope.be/wms/v2"
59 | )
60 | empty = st.empty()
61 |
62 | if url:
63 |
64 | if is_trusted_url(url):
65 | options = get_layers(url)
66 | # Process options as needed
67 | else:
68 | st.error(
69 | "The entered URL is not trusted. Please enter a valid WMS URL."
70 | )
71 |
72 | default = None
73 | if url == esa_landcover:
74 | default = "WORLDCOVER_2020_MAP"
75 | layers = empty.multiselect(
76 | "Select WMS layers to add to the map:", options, default=default
77 | )
78 | add_legend = st.checkbox("Add a legend to the map", value=True)
79 | if default == "WORLDCOVER_2020_MAP":
80 | legend = str(leafmap.builtin_legends["ESA_WorldCover"])
81 | else:
82 | legend = ""
83 | if add_legend:
84 | legend_text = st.text_area(
85 | "Enter a legend as a dictionary {label: color}",
86 | value=legend,
87 | height=200,
88 | )
89 |
90 | with row1_col1:
91 | m = leafmap.Map(center=(36.3, 0), zoom=2)
92 |
93 | if layers is not None:
94 | for layer in layers:
95 | m.add_wms_layer(
96 | url, layers=layer, name=layer, attribution=" ", transparent=True
97 | )
98 | if add_legend and legend_text:
99 | legend_dict = json.loads(legend_text.replace("'", '"'))
100 | m.add_legend(legend_dict=legend_dict)
101 |
102 | m.to_streamlit(height=height)
103 |
104 |
105 | app()
106 |
--------------------------------------------------------------------------------
/pages/8_ποΈ_Raster_Data_Visualization.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import leafmap.foliumap as leafmap
4 | import leafmap.colormaps as cm
5 | import streamlit as st
6 |
7 | st.set_page_config(layout="wide")
8 |
9 | st.sidebar.info(
10 | """
11 | - Web App URL: <https://streamlit.gishub.org>
12 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
13 | """
14 | )
15 |
16 | st.sidebar.title("Contact")
17 | st.sidebar.info(
18 | """
19 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
20 | """
21 | )
22 |
23 |
24 | @st.cache_data
25 | def load_cog_list():
26 | print(os.getcwd())
27 | in_txt = os.path.join(os.getcwd(), "data/cog_files.txt")
28 | with open(in_txt) as f:
29 | return [line.strip() for line in f.readlines()[1:]]
30 |
31 |
32 | @st.cache_data
33 | def get_palettes():
34 | return list(cm.palettes.keys())
35 | # palettes = dir(palettable.matplotlib)[:-16]
36 | # return ["matplotlib." + p for p in palettes]
37 |
38 |
39 | st.title("Visualize Raster Datasets")
40 | st.markdown(
41 | """
42 | An interactive web app for visualizing local raster datasets and Cloud Optimized GeoTIFF ([COG](https://www.cogeo.org)). The app was built using [streamlit](https://streamlit.io), [leafmap](https://leafmap.org), and [Titiler](https://developmentseed.org/titiler/).
43 |
44 |
45 | """
46 | )
47 |
48 |
49 | def is_trusted_url(url):
50 | if url.startswith("https://opendata.digitalglobe.com/events/california-fire-2020/"):
51 | return True
52 | else:
53 | return False
54 |
55 |
56 | row1_col1, row1_col2 = st.columns([2, 1])
57 |
58 | with row1_col1:
59 | cog_list = load_cog_list()
60 | cog = st.selectbox("Select a sample Cloud Opitmized GeoTIFF (COG)", cog_list)
61 |
62 | with row1_col2:
63 | empty = st.empty()
64 |
65 | url = empty.text_input(
66 | "Enter a HTTP URL to a Cloud Optimized GeoTIFF (COG)",
67 | cog,
68 | )
69 |
70 | if is_trusted_url(url):
71 | try:
72 | options = leafmap.cog_bands(url)
73 | except Exception as e:
74 | st.error(e)
75 | if len(options) > 3:
76 | default = options[:3]
77 | else:
78 | default = options[0]
79 | bands = st.multiselect("Select bands to display", options, default=options)
80 |
81 | if len(bands) == 1 or len(bands) == 3:
82 | pass
83 | else:
84 | st.error("Please select one or three bands")
85 | else:
86 | st.error("Please enter a trusted URL")
87 |
88 | add_params = st.checkbox("Add visualization parameters")
89 | if add_params:
90 | vis_params = st.text_area("Enter visualization parameters", "{}")
91 | else:
92 | vis_params = {}
93 |
94 | if len(vis_params) > 0:
95 | try:
96 | vis_params = json.loads(vis_params.replace("'", '"'))
97 | except Exception as e:
98 | st.error(
99 | f"Invalid visualization parameters. It should be a dictionary. Error: {e}"
100 | )
101 | vis_params = {}
102 |
103 | submit = st.button("Submit")
104 |
105 | m = leafmap.Map(latlon_control=False)
106 |
107 | if submit:
108 | if url:
109 | try:
110 | m.add_cog_layer(url, bands=bands, **vis_params)
111 | except Exception as e:
112 | with row1_col2:
113 | st.error(e)
114 | st.error("Work in progress. Try it again later.")
115 |
116 | with row1_col1:
117 | m.to_streamlit()
118 |
--------------------------------------------------------------------------------
/pages/9_π²_Vector_Data_Visualization.py:
--------------------------------------------------------------------------------
1 | import os
2 | import fiona
3 | import geopandas as gpd
4 | import streamlit as st
5 |
6 | st.set_page_config(layout="wide")
7 |
8 | st.sidebar.info(
9 | """
10 | - Web App URL: <https://streamlit.gishub.org>
11 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
12 | """
13 | )
14 |
15 | st.sidebar.title("Contact")
16 | st.sidebar.info(
17 | """
18 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
19 | """
20 | )
21 |
22 |
23 | # Define a whitelist of trusted URLs
24 | trusted_urls = [
25 | "https://github.com/giswqs/streamlit-geospatial/raw/master/data/us_states.geojson",
26 | # Add more trusted URLs here
27 | ]
28 |
29 |
30 | def is_trusted_url(url):
31 | return url in trusted_urls
32 |
33 |
34 | def save_uploaded_file(file_content, file_name):
35 | """
36 | Save the uploaded file to a temporary directory
37 | """
38 | import tempfile
39 | import os
40 | import uuid
41 |
42 | _, file_extension = os.path.splitext(file_name)
43 | file_id = str(uuid.uuid4())
44 | file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
45 |
46 | with open(file_path, "wb") as file:
47 | file.write(file_content.getbuffer())
48 |
49 | return file_path
50 |
51 |
52 | def app():
53 |
54 | st.title("Upload Vector Data")
55 |
56 | row1_col1, row1_col2 = st.columns([2, 1])
57 | width = 950
58 | height = 600
59 |
60 | with row1_col2:
61 |
62 | backend = st.selectbox(
63 | "Select a plotting backend", ["folium", "kepler.gl", "pydeck"], index=2
64 | )
65 |
66 | if backend == "folium":
67 | import leafmap.foliumap as leafmap
68 | elif backend == "kepler.gl":
69 | import leafmap.kepler as leafmap
70 | elif backend == "pydeck":
71 | import leafmap.deck as leafmap
72 |
73 | url = st.text_input(
74 | "Enter a URL to a vector dataset",
75 | "https://github.com/giswqs/streamlit-geospatial/raw/master/data/us_states.geojson",
76 | )
77 |
78 | data = st.file_uploader(
79 | "Upload a vector dataset", type=["geojson", "kml", "zip", "tab"]
80 | )
81 |
82 | container = st.container()
83 |
84 | if data or is_trusted_url(url):
85 | if data:
86 | file_path = save_uploaded_file(data, data.name)
87 | layer_name = os.path.splitext(data.name)[0]
88 | elif url:
89 | file_path = url
90 | layer_name = url.split("/")[-1].split(".")[0]
91 |
92 | with row1_col1:
93 | if file_path.lower().endswith(".kml"):
94 | fiona.drvsupport.supported_drivers["KML"] = "rw"
95 | gdf = gpd.read_file(file_path, driver="KML")
96 | else:
97 | gdf = gpd.read_file(file_path)
98 | lon, lat = leafmap.gdf_centroid(gdf)
99 | if backend == "pydeck":
100 |
101 | column_names = gdf.columns.values.tolist()
102 | random_column = None
103 | with container:
104 | random_color = st.checkbox("Apply random colors", True)
105 | if random_color:
106 | random_column = st.selectbox(
107 | "Select a column to apply random colors", column_names
108 | )
109 |
110 | m = leafmap.Map(center=(lat, lon))
111 | m.add_gdf(gdf, random_color_column=random_column)
112 | st.pydeck_chart(m)
113 |
114 | else:
115 | m = leafmap.Map(center=(lat, lon), draw_export=True)
116 | m.add_gdf(gdf, layer_name=layer_name)
117 | # m.add_vector(file_path, layer_name=layer_name)
118 | if backend == "folium":
119 | m.zoom_to_gdf(gdf)
120 | m.to_streamlit(width=width, height=height)
121 |
122 | else:
123 | with row1_col1:
124 | m = leafmap.Map()
125 | st.pydeck_chart(m)
126 |
127 |
128 | app()
129 |
--------------------------------------------------------------------------------
/postBuild:
--------------------------------------------------------------------------------
1 | # enable nbserverproxy
2 | jupyter serverextension enable --sys-prefix nbserverproxy
3 | # streamlit launches at startup
4 | mv streamlit_call.py ${NB_PYTHON_PREFIX}/lib/python*/site-packages/
5 | # enable streamlit extension
6 | jupyter serverextension enable --sys-prefix streamlit_call
7 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | --find-links=https://girder.github.io/large_image_wheels GDAL
2 | # cartopy
3 | folium==0.13.0
4 | geemap[extra]
5 | geopandas
6 | jupyter-server-proxy
7 | keplergl
8 | leafmap
9 | localtileserver
10 | nbserverproxy
11 | owslib
12 | palettable
13 | plotly
14 | streamlit
15 | streamlit-bokeh-events
16 | streamlit-folium
17 | streamlit-keplergl
18 | tropycal
19 | # git+https://github.com/giswqs/leafmap
20 | # git+https://github.com/giswqs/geemap
21 |
22 |
--------------------------------------------------------------------------------
/setup.sh:
--------------------------------------------------------------------------------
1 | # sudo add-apt-repository ppa:ubuntugis/ppa && sudo apt-get update
2 | # sudo apt-get update
3 | # sudo apt-get install python3-dev
4 | # sudo apt-get install gdal-bin
5 | # sudo apt-get install libgdal-dev
6 | # export CPLUS_INCLUDE_PATH=/usr/include/gdal
7 | # export C_INCLUDE_PATH=/usr/include/gdal
8 | # gdal-config --version
9 | # pip install GDAL==$(gdal-config --version | awk -F'[.]' '{print $1"."$2}') localtileserver
10 |
11 | mkdir -p ~/.streamlit/
12 | echo "\
13 | [server]\n\
14 | headless = true\n\
15 | port = $PORT\n\
16 | enableCORS = false\n\
17 | \n\
18 | " > ~/.streamlit/config.toml
--------------------------------------------------------------------------------
/streamlit_app.py:
--------------------------------------------------------------------------------
1 | import streamlit as st
2 | import leafmap.foliumap as leafmap
3 |
4 | st.set_page_config(layout="wide")
5 |
6 | st.sidebar.info(
7 | """
8 | - Web App URL: <https://streamlit.gishub.org>
9 | - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
10 | """
11 | )
12 |
13 | st.sidebar.title("Contact")
14 | st.sidebar.info(
15 | """
16 | Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
17 | """
18 | )
19 |
20 | # Customize page title
21 | st.title("Streamlit for Geospatial Applications")
22 |
23 | st.markdown(
24 | """
25 | This multipage app template demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and [leafmap](https://leafmap.org). It is an open-source project and you are very welcome to contribute to the [GitHub repository](https://github.com/giswqs/streamlit-multipage-template).
26 | """
27 | )
28 |
29 | st.header("Instructions")
30 |
31 | markdown = """
32 | 1. For the [GitHub repository](https://github.com/giswqs/streamlit-multipage-template) or [use it as a template](https://github.com/giswqs/streamlit-multipage-template/generate) for your own project.
33 | 2. Customize the sidebar by changing the sidebar text and logo in each Python files.
34 | 3. Find your favorite emoji from https://emojipedia.org.
35 | 4. Add a new app to the `pages/` directory with an emoji in the file name, e.g., `1_π_Chart.py`.
36 |
37 | """
38 |
39 | st.markdown(markdown)
40 |
41 | m = leafmap.Map(minimap_control=True)
42 | m.add_basemap("OpenTopoMap")
43 | m.to_streamlit(height=500)
44 |
--------------------------------------------------------------------------------
/streamlit_call.py:
--------------------------------------------------------------------------------
1 | from subprocess import Popen
2 |
3 |
4 | def load_jupyter_server_extension(nbapp):
5 | """serve the streamlit app"""
6 | Popen(
7 | [
8 | "streamlit",
9 | "run",
10 | "Home.py",
11 | "--browser.serverAddress=0.0.0.0",
12 | "--server.enableCORS=False",
13 | ]
14 | )
15 |
--------------------------------------------------------------------------------