├── .coveragerc
├── .gitignore
├── 3dbm.png
├── Analysis.ipynb
├── Figures.ipynb
├── LICENSE
├── README.md
├── __init__.py
├── cityCompare.py
├── cityPlot.py
├── cityStats.py
├── cityjson.py
├── extractLod.py
├── geometry.py
├── helpers
├── __init__.py
├── geometry.py
├── mesh.py
├── minimumBoundingBox.py
└── smallestenclosingcircle.py
├── notebook.sh
├── pyvista_tests.ipynb
├── requirements.txt
├── shape_index.py
├── tests
├── __init__.py
└── test_shape_index.py
└── volume.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit=tests/*
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
140 | .vscode/
141 |
142 | *.csv
--------------------------------------------------------------------------------
/3dbm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tudelft3d/3d-building-metrics/ca50dae866c66d51845ac383f2b64f190409d092/3dbm.png
--------------------------------------------------------------------------------
/Figures.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "d7a0dadd",
6 | "metadata": {},
7 | "source": [
8 | "# Figures\n",
9 | "\n",
10 | "A notebook to make figures of mesh differences"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 90,
16 | "id": "31fc7a30",
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "import json\n",
21 | "import pyvista as pv\n",
22 | "import pymesh\n",
23 | "from helpers.mesh import *\n",
24 | "import cityjson\n",
25 | "import pandas as pd\n",
26 | "import os\n",
27 | "\n",
28 | "def rpath(path):\n",
29 | " return os.path.expanduser(path)\n",
30 | "\n",
31 | "df = pd.read_csv(rpath(\"~/3DBAG_09/all/lod2.2.csv\"))"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": 91,
37 | "id": "796b6f96",
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "def load_citymodel(file):\n",
42 | " cm = json.load(file)\n",
43 | "\n",
44 | " if \"transform\" in cm:\n",
45 | " s = cm[\"transform\"][\"scale\"]\n",
46 | " t = cm[\"transform\"][\"translate\"]\n",
47 | " verts = [[v[0] * s[0] + t[0], v[1] * s[1] + t[1], v[2] * s[2] + t[2]]\n",
48 | " for v in cm[\"vertices\"]]\n",
49 | " else:\n",
50 | " verts = cm[\"vertices\"]\n",
51 | "\n",
52 | " # mesh points\n",
53 | " vertices = np.array(verts)\n",
54 | "\n",
55 | " return cm, vertices\n",
56 | "\n",
57 | "def get_geometry(co, lod):\n",
58 | " \"\"\"Returns the geometry of the given LoD.\n",
59 | " \n",
60 | " If lod is None then it returns the first one.\n",
61 | " \"\"\"\n",
62 | "\n",
63 | " if len(co[\"geometry\"]) == 0:\n",
64 | " return None\n",
65 | "\n",
66 | " if lod is None:\n",
67 | " return co[\"geometry\"][0]\n",
68 | "\n",
69 | " for geom in co[\"geometry\"]:\n",
70 | " if str(geom[\"lod\"]) == str(lod):\n",
71 | " return geom\n",
72 | "\n",
73 | "def is_valid(mesh):\n",
74 | " return mesh.volume > 0 and mesh.n_open_edges == 0\n",
75 | "\n",
76 | "def load_building(objid, tile_id=None, tile_csv=None, lod=\"2.2\"): \n",
77 | " if tile_id is None:\n",
78 | " tile_id = tile_csv.set_index(\"id\").loc[objid][\"tile_id\"]\n",
79 | "\n",
80 | " filename = rpath(f\"~/3DBAG_09/{tile_id}.json\")\n",
81 | "\n",
82 | " with open(filename, \"rb\") as file:\n",
83 | " cm, verts = load_citymodel(file)\n",
84 | "\n",
85 | " building = cm[\"CityObjects\"][objid]\n",
86 | "\n",
87 | " geom = get_geometry(building, lod)\n",
88 | " mesh = cityjson.to_triangulated_polydata(geom, verts)\n",
89 | " \n",
90 | " return mesh, geom, verts"
91 | ]
92 | },
93 | {
94 | "cell_type": "code",
95 | "execution_count": null,
96 | "id": "2b960b1e",
97 | "metadata": {},
98 | "outputs": [],
99 | "source": [
100 | "selected_ids = [\n",
101 | " \"NL.IMBAG.Pand.0599100000702379-0\",\n",
102 | " \"NL.IMBAG.Pand.0518100001635181-0\",\n",
103 | " \"NL.IMBAG.Pand.0599100000701103-0\",\n",
104 | " \"NL.IMBAG.Pand.0518100000225439-0\",\n",
105 | " \"NL.IMBAG.Pand.0518100000273015-0\",\n",
106 | " \"NL.IMBAG.Pand.0363100012075730-0\",\n",
107 | " \"NL.IMBAG.Pand.0363100012185598-0\",\n",
108 | " \"NL.IMBAG.Pand.0344100000031226-0\",\n",
109 | " \"NL.IMBAG.Pand.0344100000077683-0\",\n",
110 | " \"NL.IMBAG.Pand.0344100000099499-0\",\n",
111 | " \"NL.IMBAG.Pand.0599100000080428-0\",\n",
112 | " \"NL.IMBAG.Pand.0518100000230634-0\",\n",
113 | " \"NL.IMBAG.Pand.0518100000206625-0\",\n",
114 | " \"NL.IMBAG.Pand.0518100000226316-0\",\n",
115 | " \"NL.IMBAG.Pand.0518100000282020-0\",\n",
116 | " \"NL.IMBAG.Pand.0518100000222277-0\",\n",
117 | " \"NL.IMBAG.Pand.0629100000020777-0\",\n",
118 | " \"NL.IMBAG.Pand.0363100012236081-0\",\n",
119 | " \"NL.IMBAG.Pand.0599100000432858-0\",\n",
120 | " \"NL.IMBAG.Pand.0518100000206625-0\"\n",
121 | "]\n",
122 | "\n",
123 | "footprints = []\n",
124 | "for objid in selected_ids:\n",
125 | " mesh, geom, verts = load_building(objid, tile_csv=df)\n",
126 | " \n",
127 | " footprint = cityjson.to_shapely(geom, verts, ground_only=True)\n",
128 | " \n",
129 | " footprints.append([objid, footprint])"
130 | ]
131 | },
132 | {
133 | "cell_type": "code",
134 | "execution_count": null,
135 | "id": "43a8ef63",
136 | "metadata": {},
137 | "outputs": [],
138 | "source": [
139 | "import geopandas\n",
140 | "\n",
141 | "footprints = geopandas.GeoDataFrame(footprints, columns=[\"id\", \"geometry\"], geometry=\"geometry\")"
142 | ]
143 | },
144 | {
145 | "cell_type": "code",
146 | "execution_count": null,
147 | "id": "9c1e3d02",
148 | "metadata": {},
149 | "outputs": [],
150 | "source": [
151 | "footprints.to_file(\"footprints.gpkg\", driver=\"GPKG\")"
152 | ]
153 | },
154 | {
155 | "cell_type": "markdown",
156 | "id": "c26368ac",
157 | "metadata": {},
158 | "source": [
159 | "# Single building plots\n",
160 | "\n",
161 | "First, let's load the building (we can load two different LoDs):"
162 | ]
163 | },
164 | {
165 | "cell_type": "code",
166 | "execution_count": null,
167 | "id": "db645626",
168 | "metadata": {},
169 | "outputs": [],
170 | "source": [
171 | "# Duplicate to save time from loading every time\n",
172 | "\n",
173 | "# For figure 1\n",
174 | "# NL.IMBAG.Pand.0603100000009302-0\n",
175 | "# NL.IMBAG.Pand.0489100000210413-0\n",
176 | "# NL.IMBAG.Pand.0344100000045583-0\n",
177 | "\n",
178 | "# objid = \"NL.IMBAG.Pand.0489100000210413-0\"\n",
179 | "\n",
180 | "# Example building\n",
181 | "# NL.IMBAG.Pand.0518100000337631\n",
182 | "\n",
183 | "# Building with one hole\n",
184 | "# NL.IMBAG.Pand.0599100000601466-0\n",
185 | "objid=\"NL.IMBAG.Pand.0518100000337631-0\"\n",
186 | "\n",
187 | "def load_bagid(objid, lod=\"2.2\"):\n",
188 | " \n",
189 | " tile_id = df.set_index(\"id\").loc[objid][\"tile_id\"]\n",
190 | "\n",
191 | "# NL.IMBAG.Pand.0599100000763318 - Van Nelle\n",
192 | "\n",
193 | " filename = rpath(f\"~/3DBAG_09/{tile_id}.json\")\n",
194 | "\n",
195 | " with open(filename, \"rb\") as file:\n",
196 | " cm, verts = load_citymodel(file)\n",
197 | "\n",
198 | " building = cm[\"CityObjects\"][objid]\n",
199 | " \n",
200 | " geom = get_geometry(building, lod)\n",
201 | " \n",
202 | " mesh = cityjson.to_triangulated_polydata(geom, verts)\n",
203 | " \n",
204 | " return mesh, geom, verts\n",
205 | "\n",
206 | "lod_source = \"1.2\"\n",
207 | "lod_dest = \"2.2\"\n",
208 | "\n",
209 | "mesh_source, geom_source, verts = load_bagid(objid, lod_source)\n",
210 | "mesh_dest, geom_dest, verts = load_bagid(objid, lod_dest)"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 95,
216 | "id": "52b027e2",
217 | "metadata": {},
218 | "outputs": [
219 | {
220 | "data": {
221 | "application/vnd.jupyter.widget-view+json": {
222 | "model_id": "cd5f6f3f6de1406aa3ceb215dcbf6704",
223 | "version_major": 2,
224 | "version_minor": 0
225 | },
226 | "text/plain": [
227 | "ViewInteractiveWidget(height=2048, layout=Layout(height='auto', width='100%'), width=2048)"
228 | ]
229 | },
230 | "metadata": {},
231 | "output_type": "display_data"
232 | }
233 | ],
234 | "source": [
235 | "p = pv.Plotter(window_size=[2048, 2048])\n",
236 | "\n",
237 | "p.background_color = \"white\"\n",
238 | "\n",
239 | "# p.enable_parallel_projection()\n",
240 | "\n",
241 | "p.add_mesh(mesh_dest.extract_feature_edges(), color=\"black\", line_width=5)\n",
242 | "p.add_mesh(mesh_dest, scalars=\"semantics\", cmap=[\"black\", \"red\", \"lightgrey\"], color=\"lightgrey\", ambient=0.7, diffuse=0.5)\n",
243 | "\n",
244 | "p.show()\n",
245 | "\n",
246 | "p.save_graphic(rpath(\"~/figures/simple/example-main-semantics.pdf\"))"
247 | ]
248 | },
249 | {
250 | "cell_type": "markdown",
251 | "id": "34d6c406",
252 | "metadata": {},
253 | "source": [
254 | "## Plot boolean operations\n",
255 | "\n",
256 | "This will plot the boolean operation outcome of the loaded LoDs:"
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": null,
262 | "id": "fc3bf2f9",
263 | "metadata": {},
264 | "outputs": [],
265 | "source": [
266 | "pm_source = to_pymesh(mesh_source)\n",
267 | "pm_dest = to_pymesh(mesh_dest)\n",
268 | "\n",
269 | "inter = intersect(pm_source, pm_dest)\n",
270 | "diff = difference(pm_source, pm_dest)\n",
271 | "op_diff = difference(pm_dest, pm_source)\n",
272 | "sym_diff = symmetric_difference(pm_source, pm_dest)\n",
273 | "\n",
274 | "inter_vista = to_pyvista(inter)\n",
275 | "inter_vista[\"source\"] = inter.get_attribute(\"source\")\n",
276 | "\n",
277 | "diff_vista = to_pyvista(diff)\n",
278 | "diff_vista[\"source\"] = diff.get_attribute(\"source\")\n",
279 | "\n",
280 | "op_diff_vista = to_pyvista(op_diff)\n",
281 | "op_diff_vista[\"source\"] = op_diff.get_attribute(\"source\")\n",
282 | "\n",
283 | "sym_diff_vista = to_pyvista(sym_diff)\n",
284 | "sym_diff_vista[\"source\"] = sym_diff.get_attribute(\"source\")\n",
285 | "\n",
286 | "p = pv.Plotter(window_size=[4096, 640], shape=(1,7))\n",
287 | "\n",
288 | "p.background_color = \"white\"\n",
289 | "\n",
290 | "p.add_mesh(mesh_source, color=\"blue\", opacity=0.5)\n",
291 | "p.add_mesh(mesh_dest, color=\"orange\", opacity=0.5)\n",
292 | "\n",
293 | "p.add_mesh(mesh_source.extract_feature_edges(), color=\"blue\", line_width=3, label=lod_source)\n",
294 | "p.add_mesh(mesh_dest.extract_feature_edges(), color=\"orange\", line_width=3, label=lod_dest)\n",
295 | "\n",
296 | "p.subplot(0,1)\n",
297 | "\n",
298 | "p.add_mesh(mesh_source, color=\"lightgrey\")\n",
299 | "\n",
300 | "p.subplot(0,2)\n",
301 | "\n",
302 | "p.add_mesh(mesh_dest, color=\"lightgrey\")\n",
303 | "\n",
304 | "p.subplot(0,3)\n",
305 | "\n",
306 | "# p.add_mesh(inter_vista, scalars=\"source\", cmap=[\"orange\", \"blue\"], label=\"Intersection\")\n",
307 | "\n",
308 | "p.add_mesh(inter_vista, color=\"lightgrey\", label=\"Intersection\")\n",
309 | "\n",
310 | "p.subplot(0,4)\n",
311 | "\n",
312 | "p.add_mesh(sym_diff_vista, color=\"lightgrey\", label=\"Intersection\")\n",
313 | "\n",
314 | "p.subplot(0,5)\n",
315 | "\n",
316 | "p.add_mesh(diff_vista, color=\"lightgrey\", label=\"Intersection\")\n",
317 | "\n",
318 | "p.subplot(0,6)\n",
319 | "\n",
320 | "p.add_mesh(op_diff_vista, color=\"lightgrey\", label=\"Intersection\")\n",
321 | "\n",
322 | "p.show()\n",
323 | "# p.save_graphic(f\"{objid}-boolean.pdf\", raster=False)"
324 | ]
325 | },
326 | {
327 | "cell_type": "markdown",
328 | "id": "e5499b19",
329 | "metadata": {},
330 | "source": [
331 | "## Plot building and its related volumes (convex hull, OOBB and AABB)\n",
332 | "\n",
333 | "This will plot a figure with four subfigures showing the building and its convex hull, OOBB and AABB:"
334 | ]
335 | },
336 | {
337 | "cell_type": "code",
338 | "execution_count": null,
339 | "id": "bb6fbc20",
340 | "metadata": {
341 | "scrolled": false
342 | },
343 | "outputs": [],
344 | "source": [
345 | "# --- End of duplicate\n",
346 | "\n",
347 | "import pymesh\n",
348 | "import cityjson\n",
349 | "import geometry\n",
350 | "\n",
351 | "# solid_color = \"lightgrey\"\n",
352 | "# trans_color = \"red\"\n",
353 | "# opacity = 0.7\n",
354 | "# edges_on = True\n",
355 | "# border_width = 2.0\n",
356 | "\n",
357 | "# rot_angle = 0\n",
358 | "\n",
359 | "# from shapely.geometry import Point\n",
360 | "# obb_2d = Point(0, 0).buffer(10).minimum_rotated_rectangle\n",
361 | "# mesh = pv.Sphere(10).clip(\"z\", invert=False)\n",
362 | "\n",
363 | "# A palette of colours (actually materials) to use\n",
364 | "colour_palette = {\n",
365 | " \"grey\": {\n",
366 | " \"mesh_color\": \"grey\",\n",
367 | " \"ambient\": 0.9,\n",
368 | " \"diffuse\": 0.3\n",
369 | " },\n",
370 | " \"pastel_green\": {\n",
371 | " \"mesh_color\": \"#b6e2d3\",\n",
372 | " \"ambient\": 0.6,\n",
373 | " \"diffuse\": 0.1\n",
374 | " },\n",
375 | " \"pastel_blue\": {\n",
376 | " \"mesh_color\": \"#81abbc\",\n",
377 | " \"ambient\": 0.6,\n",
378 | " \"diffuse\": 0.1\n",
379 | " },\n",
380 | " \"pastel_red\": {\n",
381 | " \"mesh_color\": \"#fbd2c9\",\n",
382 | " \"ambient\": 0.6,\n",
383 | " \"diffuse\": 0.1\n",
384 | " },\n",
385 | " \"pastel_yellow\": {\n",
386 | " \"mesh_color\": \"#fff4bd\",\n",
387 | " \"ambient\": 0.6,\n",
388 | " \"diffuse\": 0.1\n",
389 | " }\n",
390 | "}\n",
391 | "\n",
392 | "def plot(mesh,\n",
393 | " geom,\n",
394 | " verts,\n",
395 | " mesh_color=\"lightgrey\",\n",
396 | " ambient=0.0,\n",
397 | " diffuse=1.0,\n",
398 | " trans_color=\"red\",\n",
399 | " opacity=0.7,\n",
400 | " edges_on=True,\n",
401 | " solid_edges=\"black\",\n",
402 | " border_width=5.0,\n",
403 | " rot_angle=0,\n",
404 | " label=None,\n",
405 | " filename=None):\n",
406 | " \"\"\"Plots the mesh provided alongside its convex hull, OOBB and AABB.\"\"\"\n",
407 | "\n",
408 | " pm_mesh = to_pymesh(mesh)\n",
409 | "\n",
410 | " pm_ch = pymesh.convex_hull(pm_mesh)\n",
411 | "\n",
412 | " convex_hull = to_pyvista(pm_ch)\n",
413 | "\n",
414 | " # Compute OBB with shapely\n",
415 | " obb_2d = cityjson.to_shapely(geom, verts, ground_only=False).minimum_rotated_rectangle\n",
416 | " min_z = np.min(mesh.clean().points[:, 2])\n",
417 | " max_z = np.max(mesh.clean().points[:, 2])\n",
418 | " obb = geometry.extrude(obb_2d, min_z, max_z)\n",
419 | "\n",
420 | " points = np.array([[p[0], p[1], min_z] for p in obb_2d.boundary.coords])\n",
421 | " obb.points += np.mean(points, axis=0)\n",
422 | "\n",
423 | " aobb = mesh.outline(generate_faces=True)\n",
424 | "\n",
425 | " p = pv.Plotter(window_size=[2048, 2048], shape=(2,2))\n",
426 | " # p.add_title(\"test\", color=\"black\")\n",
427 | "\n",
428 | " p.background_color = \"white\"\n",
429 | "\n",
430 | " centroid = [(mesh.bounds[0] + mesh.bounds[1]) / 2, (mesh.bounds[2] + mesh.bounds[3]) / 2, (mesh.bounds[4] + mesh.bounds[5]) / 2]\n",
431 | " mesh.rotate_z(rot_angle, centroid)\n",
432 | "\n",
433 | " p.add_mesh(mesh, color=mesh_color, ambient=ambient, diffuse=diffuse)\n",
434 | " if edges_on:\n",
435 | " p.add_mesh(mesh.extract_feature_edges(feature_angle=10), color=solid_edges, line_width=border_width)\n",
436 | "\n",
437 | " p.subplot(0, 1)\n",
438 | "\n",
439 | " convex_hull.rotate_z(rot_angle, centroid)\n",
440 | "\n",
441 | " p.add_mesh(mesh, color=mesh_color, ambient=ambient, diffuse=diffuse)\n",
442 | " p.add_mesh(convex_hull, color=trans_color, opacity=opacity)\n",
443 | " if edges_on:\n",
444 | " p.add_mesh(convex_hull.extract_feature_edges(feature_angle=10), color=trans_color, line_width=border_width)\n",
445 | "\n",
446 | " p.subplot(1, 0)\n",
447 | "\n",
448 | " obb.rotate_z(rot_angle, centroid)\n",
449 | "\n",
450 | " p.add_mesh(mesh, color=mesh_color, ambient=ambient, diffuse=diffuse)\n",
451 | " p.add_mesh(obb, color=trans_color, opacity=opacity)\n",
452 | " if edges_on:\n",
453 | " p.add_mesh(obb.extract_feature_edges(), color=trans_color, line_width=border_width)\n",
454 | "\n",
455 | " p.subplot(1, 1)\n",
456 | "\n",
457 | " aobb.rotate_z(rot_angle, centroid)\n",
458 | "\n",
459 | " p.add_mesh(mesh, color=mesh_color, ambient=ambient, diffuse=diffuse)\n",
460 | " p.add_mesh(aobb, color=trans_color, opacity=opacity)\n",
461 | " if edges_on:\n",
462 | " p.add_mesh(aobb.extract_feature_edges(), color=trans_color, line_width=border_width)\n",
463 | "\n",
464 | " p.show()\n",
465 | " p.reset_camera()\n",
466 | " p.link_views()\n",
467 | " \n",
468 | " if not filename is None:\n",
469 | " p.save_graphic(filename)\n",
470 | " \n",
471 | " p.close()\n",
472 | "# p.set_position(np.mean(mesh.points, axis=0) + [50, 0, 30])\n",
473 | "# objid=\"hemisphere\"\n",
474 | "\n",
475 | "plot(mesh_dest, geom_dest, verts, border_width=2.0, **colour_palette[\"grey\"], solid_edges=\"white\", filename=rpath(\"~/figures/example-volumes.pdf\"))"
476 | ]
477 | },
478 | {
479 | "cell_type": "markdown",
480 | "id": "ab0b5e64",
481 | "metadata": {},
482 | "source": [
483 | "## Plot mesh and its voxels"
484 | ]
485 | },
486 | {
487 | "cell_type": "code",
488 | "execution_count": null,
489 | "id": "3e61b5d1",
490 | "metadata": {},
491 | "outputs": [],
492 | "source": [
493 | "voxel = pv.voxelize(mesh_dest, density=0.5, check_surface=False)\n",
494 | "# voxel.plot(show_edges=True, text=f\"[{objid}] Voxelized\")\n",
495 | "\n",
496 | "p = pv.Plotter(window_size=[2048, 2048])\n",
497 | "\n",
498 | "p.background_color = \"white\"\n",
499 | "\n",
500 | "p.add_mesh(voxel, line_width=2.0, color='lightgrey', ambient=0.5, diffuse=0.8, show_edges=True)\n",
501 | "# p.add_mesh(voxel.extract_all_edges(), line_width=2.0, color='white')\n",
502 | "# p.add_mesh(voxel.cell_centers(), color='black')\n",
503 | "# p.add_mesh(mesh_dest, color=\"grey\")\n",
504 | "# p.add_mesh(mesh_dest.extract_feature_edges(feature_angle=10), color='black')\n",
505 | "# p.add_mesh(pv.PolyData(np.mean(voxel.cell_centers().points, axis=0)), color='white')\n",
506 | "\n",
507 | "p.show()\n",
508 | "\n",
509 | "p.save_graphic(rpath(\"~/figures/example-grid.pdf\"))"
510 | ]
511 | },
512 | {
513 | "cell_type": "markdown",
514 | "id": "0f03b587",
515 | "metadata": {},
516 | "source": [
517 | "## Plot mesh with surface grid\n",
518 | "\n",
519 | "This will create a surface grid for the mesh and plot it on top of it:"
520 | ]
521 | },
522 | {
523 | "cell_type": "code",
524 | "execution_count": null,
525 | "id": "bdf7331a",
526 | "metadata": {},
527 | "outputs": [],
528 | "source": [
529 | "from shape_index import create_surface_grid\n",
530 | "\n",
531 | "sgrid = pv.PolyData(create_surface_grid(mesh_dest, 0.6))\n",
532 | "\n",
533 | "p = pv.Plotter(window_size=[2048, 2048])\n",
534 | "\n",
535 | "p.add_mesh(mesh_dest, color=\"lightgrey\", ambient=1, diffuse=0)\n",
536 | "p.add_mesh(mesh_dest.extract_feature_edges(), color=\"black\", line_width=10)\n",
537 | "p.add_mesh(sgrid, render_points_as_spheres=True, point_size=10, color=\"black\")\n",
538 | "\n",
539 | "p.show()"
540 | ]
541 | },
542 | {
543 | "cell_type": "code",
544 | "execution_count": null,
545 | "id": "73ef775d",
546 | "metadata": {},
547 | "outputs": [],
548 | "source": [
549 | "p.save_graphic(rpath(\"~/figures/example-surface-grid.pdf\"))"
550 | ]
551 | },
552 | {
553 | "cell_type": "markdown",
554 | "id": "96787d7d",
555 | "metadata": {},
556 | "source": [
557 | "## Plot the mesh with holes (if any)\n",
558 | "\n",
559 | "First, let's find a mesh with holes:"
560 | ]
561 | },
562 | {
563 | "cell_type": "code",
564 | "execution_count": null,
565 | "id": "a5ca9bd8",
566 | "metadata": {},
567 | "outputs": [],
568 | "source": [
569 | "objid = \"NL.IMBAG.Pand.0599100000254048-0\"\n",
570 | "mesh_holes, _, _ = load_bagid(objid, \"2.2\")"
571 | ]
572 | },
573 | {
574 | "cell_type": "code",
575 | "execution_count": null,
576 | "id": "0eef898f",
577 | "metadata": {},
578 | "outputs": [],
579 | "source": [
580 | "edges = mesh_holes.extract_feature_edges(boundary_edges=True,\n",
581 | " feature_edges=False,\n",
582 | " manifold_edges=False)\n",
583 | "\n",
584 | "p = pv.Plotter(window_size=[2048, 1024])\n",
585 | "\n",
586 | "p.add_mesh(mesh_holes, color=\"grey\", ambient=0.9, diffuse=0.3)\n",
587 | "p.add_mesh(mesh_holes.extract_feature_edges(boundary_edges=False), color=\"black\", line_width=3)\n",
588 | "if mesh_holes.n_open_edges:\n",
589 | " p.add_mesh(edges, color='red', line_width=6)\n",
590 | "\n",
591 | "# p.add_title(f\"{objid} {'is watertight' if mesh_holes.n_open_edges == 0 else f'has {mesh_holes.n_open_edges} open edges'}\", 8) \n",
592 | "\n",
593 | "# Zoom in to the hole of NL.IMBAG.Pand.0599100000254048-0\n",
594 | "# p.camera.position = (93492.6, 439805, 79.0279)\n",
595 | "# p.camera.focal_point = (93446.4, 439759, 32.7941)\n",
596 | "\n",
597 | "p.show()\n",
598 | "p.save_graphic(rpath(\"~/figures/example-holes.pdf\"))\n",
599 | "p.close()"
600 | ]
601 | },
602 | {
603 | "cell_type": "markdown",
604 | "id": "9fa89069",
605 | "metadata": {},
606 | "source": [
607 | "# Figures about clustering\n",
608 | "\n",
609 | "A set of figures related to some cluster analysis conducted in the Analysis notebook.\n",
610 | "\n",
611 | "Let's load the data:"
612 | ]
613 | },
614 | {
615 | "cell_type": "code",
616 | "execution_count": null,
617 | "id": "8f1272f7",
618 | "metadata": {},
619 | "outputs": [],
620 | "source": [
621 | "clustering = pd.read_csv(\"clustering_200k_30n_11f_average.csv\")\n",
622 | "\n",
623 | "# show_n = 5\n",
624 | "\n",
625 | "# p = pv.Plotter(shape=(1, show_n))\n",
626 | "\n",
627 | "# sample = clustering[clustering[\"cluster\"] != 5].sample(n = show_n)\n",
628 | "\n",
629 | "# selected_ids = sample[\"id\"]\n",
630 | "\n",
631 | "# i = 0\n",
632 | "# for objid in selected_ids:\n",
633 | "# p.subplot(0, i)\n",
634 | "\n",
635 | "# mesh, geom, verts = load_building(objid, tile_csv=df)\n",
636 | "# p.add_title(str(sample.iloc[i][\"cluster\"]))\n",
637 | "# p.add_mesh(mesh)\n",
638 | " \n",
639 | "# i += 1\n",
640 | "\n",
641 | "# p.show()"
642 | ]
643 | },
644 | {
645 | "cell_type": "markdown",
646 | "id": "381329e5",
647 | "metadata": {},
648 | "source": [
649 | "Let's see the distribution of buildings among clusters:"
650 | ]
651 | },
652 | {
653 | "cell_type": "code",
654 | "execution_count": null,
655 | "id": "6aa310e5",
656 | "metadata": {},
657 | "outputs": [],
658 | "source": [
659 | "cluster_stats = pd.DataFrame(clustering.groupby(\"cluster\").size(), columns=[\"count\"])\n",
660 | "cluster_stats[\"perc\"] = cluster_stats[\"count\"] / len(clustering)\n",
661 | "cluster_stats.sort_values([\"perc\"], ascending=False)"
662 | ]
663 | },
664 | {
665 | "cell_type": "code",
666 | "execution_count": null,
667 | "id": "a43a2868",
668 | "metadata": {},
669 | "outputs": [],
670 | "source": [
671 | "from plotnine import *\n",
672 | "%matplotlib inline\n",
673 | "\n",
674 | "(ggplot(clustering) # defining what data to use\n",
675 | " + aes(x=\"cluster\") # defining what variable to use\n",
676 | " + geom_bar() # defining the type of plot to use\n",
677 | " + scale_y_continuous(trans=\"log10\")\n",
678 | " + coord_flip()\n",
679 | ")"
680 | ]
681 | },
682 | {
683 | "cell_type": "markdown",
684 | "id": "e543f5fc",
685 | "metadata": {},
686 | "source": [
687 | "## Clusters plots of samples\n",
688 | "\n",
689 | "This will plot samples of 9 (or less) buildings per cluster:"
690 | ]
691 | },
692 | {
693 | "cell_type": "code",
694 | "execution_count": null,
695 | "id": "072f1bfe",
696 | "metadata": {},
697 | "outputs": [],
698 | "source": [
699 | "import math\n",
700 | "\n",
701 | "def get_shape(c):\n",
702 | " \"\"\"Returns the most orthogonal shape that can reach a certain size\"\"\"\n",
703 | " high = math.floor(math.sqrt(c))\n",
704 | " \n",
705 | " return high, math.floor(c / high)\n",
706 | "\n",
707 | "def plot_cluster(cluster_label,\n",
708 | " show_class=True,\n",
709 | " shape=(3,3),\n",
710 | " **kwargs\n",
711 | " ):\n",
712 | " \"\"\"Plots the specified cluster. If `cell_size` is set, then the `window_size` is ignored.\"\"\"\n",
713 | " \n",
714 | " size_x = shape[0]\n",
715 | " size_y = shape[1]\n",
716 | "\n",
717 | " show_n = size_x * size_y\n",
718 | "\n",
719 | " label_df = clustering[clustering[\"cluster\"] == cluster_label]\n",
720 | " \n",
721 | " if len(label_df) > show_n:\n",
722 | " sample = label_df.sample(n = show_n)\n",
723 | " else:\n",
724 | " sample = label_df\n",
725 | " \n",
726 | " return plot_buildings(sample, label=str(cluster_label) if show_class else None, shape=shape, **kwargs)\n",
727 | " \n",
728 | "def plot_buildings(sample,\n",
729 | " shape=(3,3),\n",
730 | " allow_reshape=True,\n",
731 | " window_size=[1024, 768],\n",
732 | " cell_size=None,\n",
733 | " show=True,\n",
734 | " filename=None,\n",
735 | " label=None,\n",
736 | " show_ids=True,\n",
737 | " mesh_color=\"grey\",\n",
738 | " ambient=0.8,\n",
739 | " diffuse=0.5,\n",
740 | " edge_color=None,\n",
741 | " close_plotter=True\n",
742 | " ):\n",
743 | " \"\"\"Plots a dataframe of buildings. If `cell_size` is set, then the `window_size` is ignored.\"\"\"\n",
744 | " \n",
745 | " size_x = shape[0]\n",
746 | " size_y = shape[1]\n",
747 | "\n",
748 | " show_n = size_x * size_y\n",
749 | " \n",
750 | " if allow_reshape:\n",
751 | " if len(sample) == 1:\n",
752 | " size_x = 1\n",
753 | " size_y = 1\n",
754 | " show_n = 1\n",
755 | " elif len(sample) < show_n:\n",
756 | " size_x, size_y = get_shape(len(sample))\n",
757 | " show_n = size_x * size_y\n",
758 | " \n",
759 | " if len(sample) > show_n:\n",
760 | " sample = sample.sample(n = show_n)\n",
761 | " \n",
762 | " # If cell_size is set, resize the window respectively\n",
763 | " if not cell_size is None:\n",
764 | " window_size = [size_y*cell_size[0], size_x*cell_size[1]]\n",
765 | " \n",
766 | " p = pv.Plotter(window_size=window_size, shape=(size_x, size_y))\n",
767 | " \n",
768 | " p.background_color = \"white\"\n",
769 | " \n",
770 | " if not label is None:\n",
771 | " p.add_text(label, color=\"black\")\n",
772 | "\n",
773 | " selected_ids = sample[\"id\"]\n",
774 | "\n",
775 | " i = 0\n",
776 | " j = 0\n",
777 | " for objid in selected_ids:\n",
778 | " p.subplot(i, j)\n",
779 | "\n",
780 | " mesh, geom, verts = load_building(objid, tile_csv=df)\n",
781 | " p.add_mesh(mesh, color=mesh_color, ambient=ambient, diffuse=diffuse)\n",
782 | " if show_ids:\n",
783 | " p.add_text(objid, font_size=10, position=\"lower_right\", color=\"black\")\n",
784 | " #TODO: Add an option to print sublabels here\n",
785 | " if not edge_color is None:\n",
786 | " p.add_mesh(mesh.extract_feature_edges(feature_angle=20), color=edge_color, line_width=2)\n",
787 | "\n",
788 | " j += 1\n",
789 | " if j > size_y - 1:\n",
790 | " i += 1\n",
791 | " j = 0\n",
792 | " \n",
793 | " p.reset_camera()\n",
794 | " \n",
795 | " if show:\n",
796 | " p.show()\n",
797 | " \n",
798 | " if not filename is None:\n",
799 | " p.save_graphic(filename)\n",
800 | " \n",
801 | " if close_plotter:\n",
802 | " p.close()\n",
803 | " \n",
804 | " return p\n",
805 | "\n",
806 | "p = plot_cluster(19, window_size=[2048, 2048], **colour_palette[\"pastel_blue\"], show_class=False, show_ids=False, close_plotter=False, edge_color=\"white\")\n",
807 | "# plot_cluster(1, shape=(2,3))\n",
808 | "# plot_cluster(2)\n",
809 | "# plot_cluster(2, shape=(5, 5), allow_reshape=False)"
810 | ]
811 | },
812 | {
813 | "cell_type": "code",
814 | "execution_count": null,
815 | "id": "94de4c24",
816 | "metadata": {},
817 | "outputs": [],
818 | "source": [
819 | "p.save_graphic(rpath(\"~/figures/clusters/sample_200k_30n_average/19_blue.pdf\"))"
820 | ]
821 | },
822 | {
823 | "cell_type": "code",
824 | "execution_count": null,
825 | "id": "db248d89",
826 | "metadata": {
827 | "scrolled": false
828 | },
829 | "outputs": [],
830 | "source": [
831 | "out_folder = rpath(\"~/figures/clusters/sample_200k_30n_average\")\n",
832 | "\n",
833 | "import os\n",
834 | "\n",
835 | "if not os.path.exists(out_folder):\n",
836 | " os.mkdir(out_folder)\n",
837 | "\n",
838 | "labels = np.sort(pd.unique(clustering[\"cluster\"]))\n",
839 | "# labels = [4, 6, 3]\n",
840 | "\n",
841 | "# colors = [\"#54086b\", \"#ff0bac\", \"#00bec5\", \"#050833\"]\n",
842 | "# colors = [\"#4f0000\", \"#d9ced6\", \"#303437\", \"#303437\"]\n",
843 | "# colors = [\"#b6e2d3\", \"#fbd2c9\",\"#fff4bd\", \"#81abbc\"]\n",
844 | "for i, c in enumerate(labels[6:]):\n",
845 | " print(f\"Plotting {c}...\")\n",
846 | "# color = colors[i]\n",
847 | " plot_cluster(c, cell_size=[683, 683], filename=f\"{out_folder}/{c}.pdf\", show_class=False, show_ids=False, edge_color=None)\n",
848 | "\n",
849 | "print(\"Done!\")"
850 | ]
851 | }
852 | ],
853 | "metadata": {
854 | "kernelspec": {
855 | "display_name": "Python 3 (ipykernel)",
856 | "language": "python",
857 | "name": "python3"
858 | },
859 | "language_info": {
860 | "codemirror_mode": {
861 | "name": "ipython",
862 | "version": 3
863 | },
864 | "file_extension": ".py",
865 | "mimetype": "text/x-python",
866 | "name": "python",
867 | "nbconvert_exporter": "python",
868 | "pygments_lexer": "ipython3",
869 | "version": "3.8.10"
870 | }
871 | },
872 | "nbformat": 4,
873 | "nbformat_minor": 5
874 | }
875 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 3D geoinformation research group at TU Delft
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 3DBM
2 |
3 |
4 |
5 |
6 |
7 | 3D Building Metrics. Elevating geometric analysis for urban morphology, solar potential, CFD etc to the next level 😉
8 |
9 | ## Installation
10 |
11 | You need to install all dependencies first:
12 |
13 | ```
14 | pip install -r requirements.txt
15 | ```
16 |
17 | Then take your time and install [pymesh](https://pymesh.readthedocs.io/en/latest/installation.html).
18 |
19 | ## Wat is het?
20 |
21 | A cool script that computes a lot metrics from 3D geometries (mostly intended for buildings).
22 |
23 | The following metrics are computed:
24 |
25 | | Type | Metrics |
26 | | --- | --- |
27 | | Geometric Properties | Number of vertices, Number of surfaces, Number of vertices by semantic type (i.e. ground, roof, wall), Number of surfaces by semantic type (i.e. ground, roof, wall), Min/Max/Range/Mean/Median/Std/Mode height |
28 | | Derived Properties | Footprint perimeter, Volume, Volume of convex hull, Volume of Object-Oriented Bounding Box, Volume of Axis-Oriented Bounding Box, Volume of voxelised building, Length and width of the Object-Oriented Bounding Box, Surface area, Surface area by semantic surface, Horizontal elongation, Min/Max vertical elongation, Form factor |
29 | | Spatial distribution | Shared walls, Nearest neighbour |
30 | | Shape indices | Circularity/Hemisphericality\*, Convexity 2D/3D\*, Fractality 2D/3D\*, Rectangularity/Cuboidness\*, Squareness/Cubeness\*, Cohesion 2D/3D\*, Proximity 2D/3D+, Exchange 2D/3D+, Spin 2D/3D+, Perimeter/Circumference\*, Depth 2D/3D+, Girth 2D/3D+, Dispersion 2D/3Dx, Range 2D/3D\*, Equivalent Rectangular/Cuboid\*, Roughnessx |
31 |
32 | - \* formula-based index, size-independent by definition
33 | - + index based on interior grid points (discretised), normalised
34 | - x index based on surface grid points (discretised), normalised
35 |
36 | ## Omg, how amazing! Any issues?
37 |
38 | Yeah:
39 | - It works with only `MultiSurface` and `Solid` (the latter, only for the first shell)
40 | - It only parses the first geometry
41 | - Expects semantic surfaces
42 |
43 | ## How?
44 |
45 | Running it, saving it, and including a [val3dity](https://github.com/tudelft3d/val3dity) report:
46 |
47 | ```
48 | python cityStats.py [file_path] -o [output.csv] [-v val3dity_report.json]
49 | ```
50 |
51 | Default is single-threaded, define the number of threads with:
52 |
53 | ```
54 | python cityStats.py [file_path] -j [number]
55 | ```
56 |
57 | Visualising a specific building, which can help with troubleshooting:
58 |
59 | ```
60 | python cityStats.py [file_path] -p -f [unique_id]
61 | ```
62 |
63 | Running multiple files in a folder and checking with [val3dity](https://github.com/tudelft3d/val3dity) (make sure you have val3dity installed):
64 |
65 | ```
66 | for i in *.json; do val3dity $i --report "${i%.json}_v3.json"; python cityStats.py $i -o "${i%.json}.csv" -v "${i%.json}_v3.json"; done
67 | ```
68 |
69 | ## Can I visualise a model?
70 |
71 | Tuurlijk! Just:
72 |
73 | ```
74 | python cityPlot.py [file_path]
75 | ```
76 |
77 | ## Tutorial please!
78 |
79 | 1) Download or `git clone` this repository.
80 |
81 | 2) Install all dependencies: `pip install -r requirements.txt`.
82 |
83 | 3) Download a tile from 3D BAG: `wget --header='Accept-Encoding: gzip' https://data.3dbag.nl/cityjson/v210908_fd2cee53/3dbag_v210908_fd2cee53_5910.json`
84 |
85 | 4) Run the stats on the data: `python cityStats.py 3dbag_v210908_fd2cee53_5910.json -o 5910.csv`
86 |
87 | 5) The resutling file `5910.csv` contains all metrics computed for this tile.
88 |
89 | You may also run this with a [val3dity](http://geovalidation.bk.tudelft.nl/val3dity/) report. You may download the val3dity report as a json file from the aforementioned website. Assuming the report's filename is `report.json` you can run:
90 |
91 | ```
92 | python cityStats.py 3dbag_v210908_fd2cee53_5910.json -v report.json -o 5910.csv
93 | ```
94 |
95 | Then the result will contain more info related to the validation of geometries.
96 |
97 | ## If you use 3DBM in a scientific context, please cite this article:
98 |
99 | Anna Labetski, Stelios Vitalis, Filip Biljecki, Ken Arroyo Ohori & Jantien Stoter (2023): 3D building metrics for urban morphology. International Journal of Geographical Information Science, 37(1): 36-67. DOI: 10.1080/13658816.2022.2103818
100 |
101 | [Article available here.](https://doi.org/10.1080/13658816.2022.2103818)
102 |
103 | ```
104 | @article{Labetski2023,
105 | Author = {Anna Labetski and Stelios Vitalis and Filip Biljecki and Ken {Arroyo Ohori} and Jantien Stoter},
106 | Title = {{3D} building metrics for urban morphology},
107 | Journal = {International Journal of Geographical Information Science},
108 | Volume = {37},
109 | Number = {1},
110 | Pages = {36-67},
111 | Year = {2023},
112 | Publisher = {Taylor & Francis},
113 | Doi = {10.1080/13658816.2022.2103818}
114 | ```
115 |
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tudelft3d/3d-building-metrics/ca50dae866c66d51845ac383f2b64f190409d092/__init__.py
--------------------------------------------------------------------------------
/cityCompare.py:
--------------------------------------------------------------------------------
1 | import click
2 | import json
3 | import numpy as np
4 | from tqdm import tqdm
5 | import cityjson
6 | from helpers.mesh import difference, symmetric_difference, to_pymesh, to_pyvista, intersect
7 | import pyvista as pv
8 | import pandas as pd
9 | import pymesh
10 | from pymeshfix import MeshFix
11 | import os
12 |
13 | def load_citymodel(file):
14 | cm = json.load(file)
15 |
16 | if "transform" in cm:
17 | s = cm["transform"]["scale"]
18 | t = cm["transform"]["translate"]
19 | verts = [[v[0] * s[0] + t[0], v[1] * s[1] + t[1], v[2] * s[2] + t[2]]
20 | for v in cm["vertices"]]
21 | else:
22 | verts = cm["vertices"]
23 |
24 | # mesh points
25 | vertices = np.array(verts)
26 |
27 | return cm, vertices
28 |
29 | def get_geometry(co, lod):
30 | """Returns the geometry of the given LoD.
31 |
32 | If lod is None then it returns the first one.
33 | """
34 |
35 | if len(co["geometry"]) == 0:
36 | return None
37 |
38 | if lod is None:
39 | return co["geometry"][0]
40 |
41 | for geom in co["geometry"]:
42 | if str(geom["lod"]) == str(lod):
43 | return geom
44 |
45 | def repair_mesh(mesh):
46 | mfix = MeshFix(mesh)
47 | mfix.repair(verbose=False)
48 |
49 | fixed = mfix.mesh
50 |
51 | return fixed
52 |
53 | def is_valid(mesh):
54 | return mesh.volume > 0 and mesh.n_open_edges == 0
55 |
56 | def compare(co_id,
57 | obj_source,
58 | obj_dest,
59 | lod_source,
60 | lod_destination,
61 | verts_source,
62 | verts_dest,
63 | repair=False,
64 | export_path=None,
65 | plot=False,
66 | engine="igl"):
67 | geom_source = get_geometry(obj_source, lod_source)
68 | geom_dest = get_geometry(obj_dest, lod_destination)
69 |
70 | if geom_source is None or geom_dest is None:
71 | raise ValueError("Geometry is missing for source or destination.")
72 |
73 | mesh_source = cityjson.to_triangulated_polydata(geom_source, verts_source)
74 | mesh_dest = cityjson.to_triangulated_polydata(geom_dest, verts_dest)
75 |
76 | if not is_valid(mesh_source) or not is_valid(mesh_dest):
77 | if repair and not is_valid(mesh_source):
78 | mesh_source = repair_mesh(mesh_source)
79 |
80 | if repair and not is_valid(mesh_dest):
81 | mesh_dest = repair_mesh(mesh_dest)
82 |
83 | if not is_valid(mesh_source) or not is_valid(mesh_dest):
84 | raise ValueError("The source or desintation object is not a closed volume.")
85 |
86 | pm_source = to_pymesh(mesh_source)
87 | pm_dest = to_pymesh(mesh_dest)
88 |
89 | try:
90 | inter = intersect(pm_source, pm_dest, engine)
91 | sym_dif = symmetric_difference(pm_source, pm_dest, engine)
92 | dest_minus_source = difference(pm_dest, pm_source, engine)
93 | except Exception as e:
94 | raise ValueError(f"Problem intersecting: {str(e)}")
95 |
96 | if not export_path is None:
97 | pymesh.save_mesh(export_path, dest_minus_source)
98 |
99 | if plot:
100 | inter_vista = to_pyvista(inter)
101 |
102 | p = pv.Plotter()
103 |
104 | p.background_color = "white"
105 |
106 | p.add_mesh(mesh_source, color="blue", opacity=0.1)
107 | p.add_mesh(mesh_dest, color="orange", opacity=0.1)
108 |
109 | p.add_mesh(mesh_source.extract_feature_edges(), color="blue", line_width=3, label=lod_source)
110 | p.add_mesh(mesh_dest.extract_feature_edges(), color="orange", line_width=3, label=lod_destination)
111 |
112 | p.add_mesh(inter_vista, color="lightgrey", label="Intersection")
113 | # p.add_mesh(sym_dif_vista, color="black", opacity=0.8, label="Symmetric Difference")
114 |
115 | p.add_legend()
116 |
117 | p.show()
118 |
119 | return co_id, {
120 | "source_volume": mesh_source.volume,
121 | "destination_volume": mesh_dest.volume,
122 | "intersection_volume": inter.volume,
123 | "symmetric_difference_volume": sym_dif.volume,
124 | "destination_minus_source": dest_minus_source.volume
125 | }
126 |
127 | @click.command()
128 | @click.argument("source", type=click.File("rb"))
129 | @click.argument("destination", type=click.File("rb"))
130 | @click.option("--lod-source", type=str)
131 | @click.option("--lod-destination", type=str)
132 | @click.option("--engine", default="igl")
133 | @click.option("--limit", type=int)
134 | @click.option("--plot", flag_value=True)
135 | @click.option("-f", "--filter")
136 | @click.option("-o", "--output")
137 | @click.option("-r", "--repair", flag_value=True)
138 | @click.option("-e", "--export-geometry", flag_value=True)
139 | @click.option("-j", "--jobs", default=1)
140 | @click.option("--break_on_error", flag_value=True)
141 | def main(source,
142 | destination,
143 | lod_source,
144 | lod_destination,
145 | engine,
146 | limit,
147 | plot,
148 | filter,
149 | output,
150 | repair,
151 | export_geometry,
152 | jobs,
153 | break_on_error):
154 | cm_source, verts_source = load_citymodel(source)
155 | cm_dest, verts_dest = load_citymodel(destination)
156 |
157 | i = 0
158 |
159 | result = {}
160 |
161 | output_path = "output_geom"
162 | if export_geometry and not os.path.isdir(output_path):
163 | os.mkdir(output_path)
164 |
165 | from concurrent.futures import ProcessPoolExecutor
166 |
167 | num_objs = len(cm_source["CityObjects"])
168 | num_cores = jobs
169 |
170 | with ProcessPoolExecutor(max_workers=num_cores) as pool:
171 | with tqdm(total=num_objs if limit is None else limit) as progress:
172 | futures = []
173 |
174 | for co_id in cm_source["CityObjects"]:
175 | if not co_id in cm_dest["CityObjects"]:
176 | print(f"WARNING: {co_id} missing from destination file.")
177 | progress.total -= 1
178 | continue
179 |
180 | if not filter is None and filter != co_id:
181 | progress.total -= 1
182 | continue
183 |
184 | future = pool.submit(compare,
185 | co_id,
186 | cm_source["CityObjects"][co_id],
187 | cm_dest["CityObjects"][co_id],
188 | lod_source,
189 | lod_destination,
190 | verts_source,
191 | verts_dest,
192 | repair,
193 | os.path.join(output_path, f"{co_id}.obj"),
194 | plot,
195 | engine)
196 | future.add_done_callback(lambda p: progress.update())
197 | futures.append(future)
198 |
199 | i += 1
200 | if not limit is None and i >= limit:
201 | break
202 |
203 | for future in futures:
204 | try:
205 | co_id, vals = future.result()
206 | if not vals is None:
207 | result[co_id] = vals
208 | except Exception as e:
209 | print(f"Problem with {co_id}: {e}")
210 | if break_on_error:
211 | raise e
212 |
213 | df = pd.DataFrame.from_dict(result, orient="index")
214 |
215 | if output is None:
216 | print(df)
217 | else:
218 | df.to_csv(output)
219 |
220 | if __name__ == "__main__":
221 | main()
222 |
--------------------------------------------------------------------------------
/cityPlot.py:
--------------------------------------------------------------------------------
1 | import click
2 | import json
3 | import numpy as np
4 | from cityjson import to_triangulated_polydata
5 | import pyvista as pv
6 | from tqdm import tqdm
7 |
8 | @click.command()
9 | @click.argument("input", type=click.File("rb"))
10 | @click.option("--save", flag_value=True)
11 | def main(input, save):
12 | cm = json.load(input)
13 |
14 | if "transform" in cm:
15 | s = cm["transform"]["scale"]
16 | t = cm["transform"]["translate"]
17 | verts = [[v[0] * s[0] + t[0], v[1] * s[1] + t[1], v[2] * s[2] + t[2]]
18 | for v in cm["vertices"]]
19 | else:
20 | verts = cm["vertices"]
21 |
22 | lods = set([geom["lod"] for obj in cm["CityObjects"]
23 | for geom in cm["CityObjects"][obj]["geometry"]])
24 |
25 | if len(lods) > 1:
26 | lod = click.prompt("Select an LoD:", click.Choice(lods))
27 | else:
28 | lod = str(list(lods)[0])
29 |
30 | # mesh points
31 | vertices = np.array(verts)
32 |
33 | p = pv.Plotter()
34 |
35 | meshes = []
36 |
37 | for obj in tqdm(cm["CityObjects"]):
38 | co = cm["CityObjects"][obj]
39 |
40 | for geom in co["geometry"]:
41 | if str(geom["lod"]) == lod:
42 | mesh = to_triangulated_polydata(geom, vertices)
43 | meshes.append(mesh)
44 |
45 | p.add_mesh(mesh)
46 |
47 | p.show()
48 |
49 | if save:
50 | block = pv.MultiBlock(meshes)
51 | block.save("cm.vtm")
52 |
53 | if __name__ == "__main__":
54 | main()
--------------------------------------------------------------------------------
/cityStats.py:
--------------------------------------------------------------------------------
1 | import json
2 | import math
3 | from concurrent.futures import ProcessPoolExecutor
4 |
5 | import click
6 | import matplotlib.pyplot as plt
7 | import numpy as np
8 | import pandas as pd
9 | import geopandas
10 | import pyvista as pv
11 | import rtree.index
12 | import scipy.spatial as ss
13 | from pymeshfix import MeshFix
14 | from tqdm import tqdm
15 |
16 | import cityjson
17 | import geometry
18 | import shape_index as si
19 |
20 | def get_bearings(values, num_bins, weights):
21 | """Divides the values depending on the bins"""
22 |
23 | n = num_bins * 2
24 |
25 | bins = np.arange(n + 1) * 360 / n
26 |
27 | count, bin_edges = np.histogram(values, bins=bins, weights=weights)
28 |
29 | # move last bin to front, so eg 0.01° and 359.99° will be binned together
30 | count = np.roll(count, 1)
31 | bin_counts = count[::2] + count[1::2]
32 |
33 | # because we merged the bins, their edges are now only every other one
34 | bin_edges = bin_edges[range(0, len(bin_edges), 2)]
35 |
36 | return bin_counts, bin_edges
37 |
38 | def get_wall_bearings(dataset, num_bins):
39 | """Returns the bearings of the azimuth angle of the normals for vertical
40 | surfaces of a dataset"""
41 |
42 | normals = dataset.face_normals
43 |
44 | if "semantics" in dataset.cell_data:
45 | wall_idxs = [s == "WallSurface" for s in dataset.cell_data["semantics"]]
46 | else:
47 | wall_idxs = [n[2] == 0 for n in normals]
48 |
49 | normals = normals[wall_idxs]
50 |
51 | azimuth = [point_azimuth(n) for n in normals]
52 |
53 | sized = dataset.compute_cell_sizes()
54 | surface_areas = sized.cell_data["Area"][wall_idxs]
55 |
56 | return get_bearings(azimuth, num_bins, surface_areas)
57 |
58 | def get_roof_bearings(dataset, num_bins):
59 | """Returns the bearings of the (vertical surfaces) of a dataset"""
60 |
61 | normals = dataset.face_normals
62 |
63 | if "semantics" in dataset.cell_data:
64 | roof_idxs = [s == "RoofSurface" for s in dataset.cell_data["semantics"]]
65 | else:
66 | roof_idxs = [n[2] > 0 for n in normals]
67 |
68 | normals = normals[roof_idxs]
69 |
70 | xz_angle = [azimuth(n[0], n[2]) for n in normals]
71 | yz_angle = [azimuth(n[1], n[2]) for n in normals]
72 |
73 | sized = dataset.compute_cell_sizes()
74 | surface_areas = sized.cell_data["Area"][roof_idxs]
75 |
76 | xz_counts, bin_edges = get_bearings(xz_angle, num_bins, surface_areas)
77 | yz_counts, bin_edges = get_bearings(yz_angle, num_bins, surface_areas)
78 |
79 | return xz_counts, yz_counts, bin_edges
80 |
81 | def orientation_plot(
82 | bin_counts,
83 | bin_edges,
84 | num_bins=36,
85 | title=None,
86 | title_y=1.05,
87 | title_font=None,
88 | show=False
89 | ):
90 | if title_font is None:
91 | title_font = {"family": "DejaVu Sans", "size": 12, "weight": "bold"}
92 |
93 | width = 2 * np.pi / num_bins
94 |
95 | positions = np.radians(bin_edges[:-1])
96 |
97 | radius = bin_counts / bin_counts.sum()
98 |
99 | fig, ax = plt.subplots(figsize=(5, 5), subplot_kw={"projection": "polar"})
100 | ax.set_theta_zero_location("N")
101 | ax.set_theta_direction("clockwise")
102 | ax.set_ylim(top=radius.max())
103 |
104 | # configure the y-ticks and remove their labels
105 | ax.set_yticks(np.linspace(0, radius.max(), 5))
106 | ax.set_yticklabels(labels="")
107 |
108 | # configure the x-ticks and their labels
109 | xticklabels = ["N", "", "E", "", "S", "", "W", ""]
110 | ax.set_xticks(ax.get_xticks())
111 | ax.set_xticklabels(labels=xticklabels)
112 | ax.tick_params(axis="x", which="major", pad=-2)
113 |
114 | # draw the bars
115 | ax.bar(
116 | positions,
117 | height=radius,
118 | width=width,
119 | align="center",
120 | bottom=0,
121 | zorder=2
122 | )
123 |
124 | if title:
125 | ax.set_title(title, y=title_y, fontdict=title_font)
126 |
127 | if show:
128 | plt.show()
129 |
130 | return plt
131 |
132 | def get_surface_plot(
133 | dataset,
134 | num_bins=36,
135 | title=None,
136 | title_y=1.05,
137 | title_font=None
138 | ):
139 | """Returns a plot for the surface normals of a polyData"""
140 |
141 | bin_counts, bin_edges = get_wall_bearings(dataset, num_bins)
142 |
143 | return orientation_plot(bin_counts, bin_edges)
144 |
145 |
146 | def azimuth(dx, dy):
147 | """Returns the azimuth angle for the given coordinates"""
148 |
149 | return (math.atan2(dx, dy) * 180 / np.pi) % 360
150 |
151 | def point_azimuth(p):
152 | """Returns the azimuth angle of the given point"""
153 |
154 | return azimuth(p[0], p[1])
155 |
156 | def point_zenith(p):
157 | """Return the zenith angle of the given 3d point"""
158 |
159 | z = [0.0, 0.0, 1.0]
160 |
161 | cosine_angle = np.dot(p, z) / (np.linalg.norm(p) * np.linalg.norm(z))
162 | angle = np.arccos(cosine_angle)
163 |
164 | return (angle * 180 / np.pi) % 360
165 |
166 | def compute_stats(values, percentile = 90, percentage = 75):
167 | """
168 | Returns the stats (mean, median, max, min, range etc.) for a set of values.
169 | """
170 | hDic = {'Mean': np.mean(values), 'Median': np.median(values),
171 | 'Max': max(values), 'Min': min(values), 'Range': (max(values) - min(values)),
172 | 'Std': np.std(values)}
173 | m = max([values.count(a) for a in values])
174 | if percentile:
175 | hDic['Percentile'] = np.percentile(values, percentile)
176 | if percentage:
177 | hDic['Percentage'] = (percentage/100.0) * hDic['Range'] + hDic['Min']
178 | if m>1:
179 | hDic['ModeStatus'] = 'Y'
180 | modeCount = [x for x in values if values.count(x) == m][0]
181 | hDic['Mode'] = modeCount
182 | else:
183 | hDic['ModeStatus'] = 'N'
184 | hDic['Mode'] = np.mean(values)
185 | return hDic
186 |
187 | def add_value(dict, key, value):
188 | """Does dict[key] = dict[key] + value"""
189 |
190 | if key in dict:
191 | dict[key] = dict[key] + value
192 | else:
193 | area[key] = value
194 |
195 | def convexhull_volume(points):
196 | """Returns the volume of the convex hull"""
197 |
198 | try:
199 | return ss.ConvexHull(points).volume
200 | except:
201 | return 0
202 |
203 | def boundingbox_volume(points):
204 | """Returns the volume of the bounding box"""
205 |
206 | minx = min(p[0] for p in points)
207 | maxx = max(p[0] for p in points)
208 | miny = min(p[1] for p in points)
209 | maxy = max(p[1] for p in points)
210 | minz = min(p[2] for p in points)
211 | maxz = max(p[2] for p in points)
212 |
213 | return (maxx - minx) * (maxy - miny) * (maxz - minz)
214 |
215 | def get_errors_from_report(report, objid, cm):
216 | """Return the report for the feature of the given obj"""
217 |
218 | if not "features" in report:
219 | return []
220 |
221 | fid = objid
222 |
223 | obj = cm["CityObjects"][objid]
224 | primidx = 0
225 |
226 | if not "geometry" in obj or len(obj["geometry"]) == 0:
227 | return []
228 |
229 | if "parents" in obj:
230 | parid = obj["parents"][0]
231 |
232 | primidx = cm["CityObjects"][parid]["children"].index(objid)
233 | fid = parid
234 |
235 | for f in report["features"]:
236 | if f["id"] == fid:
237 | if "errors" in f["primitives"][primidx]:
238 | return list(map(lambda e: e["code"], f["primitives"][primidx]["errors"]))
239 | else:
240 | return []
241 |
242 | return []
243 |
244 | def validate_report(report, cm):
245 | """Returns true if the report is actually for this file"""
246 |
247 | # TODO: Actually validate the report and that it corresponds to this cm
248 | return True
249 |
250 | def tree_generator_function(cm, verts):
251 | for i, objid in enumerate(cm["CityObjects"]):
252 | obj = cm["CityObjects"][objid]
253 |
254 | if len(obj["geometry"]) == 0:
255 | continue
256 |
257 | xmin, xmax, ymin, ymax, zmin, zmax = cityjson.get_bbox(obj["geometry"][0], verts)
258 | yield (i, (xmin, ymin, zmin, xmax, ymax, zmax), objid)
259 |
260 | def get_neighbours(cm, obj, r, verts):
261 | """Return the neighbours of the given building"""
262 |
263 | building = cm["CityObjects"][obj]
264 |
265 | if len(building["geometry"]) == 0:
266 | return []
267 |
268 | geom = building["geometry"][0]
269 | xmin, xmax, ymin, ymax, zmin, zmax = cityjson.get_bbox(geom, verts)
270 | objids = [n.object
271 | for n in r.intersection((xmin,
272 | ymin,
273 | zmin,
274 | xmax,
275 | ymax,
276 | zmax),
277 | objects=True)
278 | if n.object != obj]
279 |
280 | if len(objids) == 0:
281 | objids = [n.object for n in r.nearest((xmin, ymin, zmin, xmax, ymax, zmax), 5, objects=True) if n.object != obj]
282 |
283 | return [cm["CityObjects"][objid]["geometry"][0] for objid in objids]
284 |
285 | class StatValuesBuilder:
286 |
287 | def __init__(self, values, indices_list) -> None:
288 | self.__values = values
289 | self.__indices_list = indices_list
290 |
291 | def compute_index(self, index_name):
292 | """Returns True if the given index is supposed to be computed"""
293 |
294 | return self.__indices_list is None or index_name in self.__indices_list
295 |
296 | def add_index(self, index_name, index_func):
297 | """Adds the given index value to the dict"""
298 |
299 | if self.compute_index(index_name):
300 | self.__values[index_name] = index_func()
301 | else:
302 | self.__values[index_name] = "NC"
303 |
304 | def process_building(building,
305 | obj,
306 | errors,
307 | filter,
308 | repair,
309 | plot_buildings,
310 | density_2d,
311 | density_3d,
312 | vertices,
313 | neighbours=[],
314 | custom_indices=None):
315 |
316 | if not filter is None and filter != obj:
317 | return obj, None
318 |
319 | # TODO: Add options for all skip conditions below
320 |
321 | # Skip if type is not Building or Building part
322 | if not building["type"] in ["Building", "BuildingPart"]:
323 | return obj, None
324 |
325 | # Skip if no geometry
326 | if not "geometry" in building or len(building["geometry"]) == 0:
327 | return obj, None
328 |
329 | geom = building["geometry"][0]
330 |
331 | mesh = cityjson.to_polydata(geom, vertices).clean()
332 |
333 | try:
334 | tri_mesh = cityjson.to_triangulated_polydata(geom, vertices).clean()
335 | except:
336 | print(f"{obj} geometry parsing crashed! Omitting...")
337 | return obj, {"type": building["type"]}
338 |
339 | tri_mesh, t = geometry.move_to_origin(tri_mesh)
340 |
341 | if plot_buildings:
342 | print(f"Plotting {obj}")
343 | tri_mesh.plot(show_grid=True)
344 |
345 | # get_surface_plot(dataset, title=obj)
346 |
347 | bin_count, bin_edges = get_wall_bearings(mesh, 36)
348 |
349 | xzc, yzc, be = get_roof_bearings(mesh, 36)
350 | # plot_orientations(xzc, be, title=f"XZ orientation [{obj}]")
351 | # plot_orientations(yzc, be, title=f"YZ orientation [{obj}]")
352 |
353 | # total_xy = total_xy + bin_count
354 | # total_xz = total_xz + xzc
355 | # total_yz = total_yz + yzc
356 |
357 | if repair:
358 | mfix = MeshFix(tri_mesh)
359 | mfix.repair()
360 |
361 | fixed = mfix.mesh
362 | else:
363 | fixed = tri_mesh
364 |
365 | # holes = mfix.extract_holes()
366 |
367 | # plotter = pv.Plotter()
368 | # plotter.add_mesh(dataset, color=True)
369 | # plotter.add_mesh(holes, color='r', line_width=5)
370 | # plotter.enable_eye_dome_lighting() # helps depth perception
371 | # _ = plotter.show()
372 |
373 | points = cityjson.get_points(geom, vertices)
374 |
375 | aabb_volume = boundingbox_volume(points)
376 |
377 | ch_volume = convexhull_volume(points)
378 |
379 | area, point_count, surface_count = geometry.area_by_surface(mesh)
380 |
381 | if "semantics" in geom:
382 | roof_points = geometry.get_points_of_type(mesh, "RoofSurface")
383 | ground_points = geometry.get_points_of_type(mesh, "GroundSurface")
384 | else:
385 | roof_points = []
386 | ground_points = []
387 |
388 | if len(roof_points) == 0:
389 | height_stats = compute_stats([0])
390 | ground_z = 0
391 | else:
392 | height_stats = compute_stats([v[2] for v in roof_points])
393 | if len(ground_points) > 0:
394 | ground_z = min([v[2] for v in ground_points])
395 | else:
396 | ground_z = mesh.bounds[4]
397 |
398 | if len(ground_points) > 0:
399 | shape = cityjson.to_shapely(geom, vertices)
400 | else:
401 | shape = cityjson.to_shapely(geom, vertices, ground_only=False)
402 |
403 | obb_2d = cityjson.to_shapely(geom, vertices, ground_only=False).minimum_rotated_rectangle
404 |
405 | # Compute OBB with shapely
406 | min_z = np.min(mesh.clean().points[:, 2])
407 | max_z = np.max(mesh.clean().points[:, 2])
408 | obb = geometry.extrude(obb_2d, min_z, max_z)
409 |
410 | # Get the dimensions of the 2D oriented bounding box
411 | S, L = si.get_box_dimensions(obb_2d)
412 |
413 | values = {
414 | "type": building["type"],
415 | "lod": geom["lod"],
416 | "point_count": len(points),
417 | "unique_point_count": fixed.n_points,
418 | "surface_count": len(cityjson.get_surface_boundaries(geom)),
419 | "actual_volume": fixed.volume,
420 | "convex_hull_volume": ch_volume,
421 | "obb_volume": obb.volume,
422 | "aabb_volume": aabb_volume,
423 | "footprint_perimeter": shape.length,
424 | "obb_width": S,
425 | "obb_length": L,
426 | "surface_area": mesh.area,
427 | "ground_area": area["GroundSurface"],
428 | "wall_area": area["WallSurface"],
429 | "roof_area": area["RoofSurface"],
430 | "ground_point_count": point_count["GroundSurface"],
431 | "wall_point_count": point_count["WallSurface"],
432 | "roof_point_count": point_count["RoofSurface"],
433 | "ground_surface-count": surface_count["GroundSurface"],
434 | "wall_surface_count": surface_count["WallSurface"],
435 | "roof_surface_count": surface_count["RoofSurface"],
436 | "max_Z": height_stats["Max"],
437 | "min_Z": height_stats["Min"],
438 | "height_range": height_stats["Range"],
439 | "mean_Z": height_stats["Mean"],
440 | "median_Z": height_stats["Median"],
441 | "std_Z": height_stats["Std"],
442 | "mode_Z": height_stats["Mode"] if height_stats["ModeStatus"] == "Y" else "NA",
443 | "ground_Z": ground_z,
444 | "orientation_values": str(bin_count),
445 | "orientation_edges": str(bin_edges),
446 | "errors": str(errors),
447 | "valid": len(errors) == 0,
448 | "hole_count": tri_mesh.n_open_edges,
449 | "geometry": shape
450 | }
451 |
452 | if custom_indices is None or len(custom_indices) > 0:
453 | voxel = pv.voxelize(tri_mesh, density=density_3d, check_surface=False)
454 | grid = voxel.cell_centers().points
455 |
456 | shared_area = 0
457 |
458 | closest_distance = 10000
459 |
460 | if len(neighbours) > 0:
461 | # Get neighbour meshes
462 | n_meshes = [cityjson.to_triangulated_polydata(geom, vertices).clean()
463 | for geom in neighbours]
464 |
465 | for mesh in n_meshes:
466 | mesh.points -= t
467 |
468 | # Compute shared walls
469 | walls = np.hstack([geometry.intersect_surfaces([fixed, neighbour])
470 | for neighbour in n_meshes])
471 |
472 | shared_area = sum([wall["area"][0] for wall in walls])
473 |
474 | # Find the closest distance
475 | for mesh in n_meshes:
476 | mesh.compute_implicit_distance(fixed, inplace=True)
477 |
478 | closest_distance = min(closest_distance, np.min(mesh["implicit_distance"]))
479 |
480 | closest_distance = max(closest_distance, 0)
481 | else:
482 | closest_distance = "NA"
483 |
484 | builder = StatValuesBuilder(values, custom_indices)
485 |
486 | builder.add_index("2d_grid_point_count", lambda: len(si.create_grid_2d(shape, density=density_2d)))
487 | builder.add_index("3d_grid_point_count", lambda: len(grid))
488 |
489 | builder.add_index("circularity_2d", lambda: si.circularity(shape))
490 | builder.add_index("hemisphericality_3d", lambda: si.hemisphericality(fixed))
491 | builder.add_index("convexity_2d", lambda: shape.area / shape.convex_hull.area)
492 | builder.add_index("convexity_3d", lambda: fixed.volume / ch_volume)
493 | builder.add_index("convexity_3d", lambda: fixed.volume / ch_volume)
494 | builder.add_index("fractality_2d", lambda: si.fractality_2d(shape))
495 | builder.add_index("fractality_3d", lambda: si.fractality_3d(fixed))
496 | builder.add_index("rectangularity_2d", lambda: shape.area / shape.minimum_rotated_rectangle.area)
497 | builder.add_index("rectangularity_3d", lambda: fixed.volume / obb.volume)
498 | builder.add_index("squareness_2d", lambda: si.squareness(shape))
499 | builder.add_index("cubeness_3d", lambda: si.cubeness(fixed))
500 | builder.add_index("horizontal_elongation", lambda: si.elongation(S, L))
501 | builder.add_index("min_vertical_elongation", lambda: si.elongation(L, height_stats["Max"]))
502 | builder.add_index("max_vertical_elongation", lambda: si.elongation(S, height_stats["Max"]))
503 | builder.add_index("form_factor_3D", lambda: shape.area / math.pow(fixed.volume, 2/3))
504 | builder.add_index("equivalent_rectangularity_index_2d", lambda: si.equivalent_rectangular_index(shape))
505 | builder.add_index("equivalent_prism_index_3d", lambda: si.equivalent_prism_index(fixed, obb))
506 | builder.add_index("proximity_index_2d_", lambda: si.proximity_2d(shape, density=density_2d))
507 | builder.add_index("proximity_index_3d", lambda: si.proximity_3d(tri_mesh, grid, density=density_3d) if len(grid) > 2 else "NA")
508 | builder.add_index("exchange_index_2d", lambda: si.exchange_2d(shape))
509 | builder.add_index("exchange_index_3d", lambda: si.exchange_3d(tri_mesh, density=density_3d))
510 | builder.add_index("spin_index_2d", lambda: si.spin_2d(shape, density=density_2d))
511 | builder.add_index("spin_index_3d", lambda: si.spin_3d(tri_mesh, grid, density=density_3d) if len(grid) > 2 else "NA")
512 | builder.add_index("perimeter_index_2d", lambda: si.perimeter_index(shape))
513 | builder.add_index("circumference_index_3d", lambda: si.circumference_index_3d(tri_mesh))
514 | builder.add_index("depth_index_2d", lambda: si.depth_2d(shape, density=density_2d))
515 | builder.add_index("depth_index_3d", lambda: si.depth_3d(tri_mesh, density=density_3d) if len(grid) > 2 else "NA")
516 | builder.add_index("girth_index_2d", lambda: si.girth_2d(shape))
517 | builder.add_index("girth_index_3d", lambda: si.girth_3d(tri_mesh, grid, density=density_3d) if len(grid) > 2 else "NA")
518 | builder.add_index("dispersion_index_2d", lambda: si.dispersion_2d(shape, density=density_2d))
519 | builder.add_index("dispersion_index_3d", lambda: si.dispersion_3d(tri_mesh, grid, density=density_3d) if len(grid) > 2 else "NA")
520 | builder.add_index("range_index_2d", lambda: si.range_2d(shape))
521 | builder.add_index("range_index_3d", lambda: si.range_3d(tri_mesh))
522 | builder.add_index("roughness_index_2d", lambda: si.roughness_index_2d(shape, density=density_2d))
523 | builder.add_index("roughness_index_3d", lambda: si.roughness_index_3d(tri_mesh, grid, density_2d) if len(grid) > 2 else "NA")
524 | builder.add_index("shared_walls_area", lambda: shared_area)
525 | builder.add_index("closest_distance", lambda: closest_distance)
526 |
527 | return obj, values
528 |
529 | # Assume semantic surfaces
530 | @click.command()
531 | @click.argument("input", type=click.File("rb"))
532 | @click.option('-o', '--output', type=click.File("wb"))
533 | @click.option('-g', '--gpkg')
534 | @click.option('-v', '--val3dity-report', type=click.File("rb"))
535 | @click.option('-f', '--filter')
536 | @click.option('-r', '--repair', flag_value=True)
537 | @click.option('-p', '--plot-buildings', flag_value=True)
538 | @click.option('--without-indices', flag_value=True)
539 | @click.option('-s', '--single-threaded', flag_value=True)
540 | @click.option('-b', '--break-on-error', flag_value=True)
541 | @click.option('-j', '--jobs', default=1)
542 | @click.option('--density-2d', default=1.0)
543 | @click.option('--density-3d', default=1.0)
544 | def main(input,
545 | output,
546 | gpkg,
547 | val3dity_report,
548 | filter,
549 | repair,
550 | plot_buildings,
551 | without_indices,
552 | single_threaded,
553 | break_on_error,
554 | jobs,
555 | density_2d,
556 | density_3d):
557 | cm = json.load(input)
558 |
559 | if "transform" in cm:
560 | s = cm["transform"]["scale"]
561 | t = cm["transform"]["translate"]
562 | verts = [[v[0] * s[0] + t[0], v[1] * s[1] + t[1], v[2] * s[2] + t[2]]
563 | for v in cm["vertices"]]
564 | else:
565 | verts = cm["vertices"]
566 |
567 | if val3dity_report is None:
568 | report = {}
569 | else:
570 | report = json.load(val3dity_report)
571 |
572 | if not validate_report(report, cm):
573 | print("This doesn't seem like the right report for this file.")
574 | return
575 |
576 | # mesh points
577 | vertices = np.array(verts)
578 |
579 | epointsListSemantics = {}
580 |
581 | stats = {}
582 |
583 | total_xy = np.zeros(36)
584 | total_xz = np.zeros(36)
585 | total_yz = np.zeros(36)
586 |
587 | # Build the index of the city model
588 | p = rtree.index.Property()
589 | p.dimension = 3
590 | r = rtree.index.Index(tree_generator_function(cm, vertices), properties=p)
591 |
592 | if single_threaded or jobs == 1:
593 | for obj in tqdm(cm["CityObjects"]):
594 | errors = get_errors_from_report(report, obj, cm)
595 |
596 | neighbours = get_neighbours(cm, obj, r, verts)
597 |
598 | indices_list = [] if without_indices else None
599 |
600 | try:
601 | obj, vals = process_building(cm["CityObjects"][obj],
602 | obj,
603 | errors,
604 | filter,
605 | repair,
606 | plot_buildings,
607 | density_2d,
608 | density_3d,
609 | vertices,
610 | neighbours,
611 | indices_list)
612 | if not vals is None:
613 | stats[obj] = vals
614 | except Exception as e:
615 | print(f"Problem with {obj}")
616 | if break_on_error:
617 | raise e
618 |
619 | else:
620 | from concurrent.futures import ProcessPoolExecutor
621 |
622 | num_objs = len(cm["CityObjects"])
623 | num_cores = jobs
624 |
625 | with ProcessPoolExecutor(max_workers=num_cores) as pool:
626 | with tqdm(total=num_objs) as progress:
627 | futures = []
628 |
629 | for obj in cm["CityObjects"]:
630 | errors = get_errors_from_report(report, obj, cm)
631 |
632 | neighbours = get_neighbours(cm, obj, r, verts)
633 |
634 | indices_list = [] if without_indices else None
635 |
636 | future = pool.submit(process_building,
637 | cm["CityObjects"][obj],
638 | obj,
639 | errors,
640 | filter,
641 | repair,
642 | plot_buildings,
643 | density_2d,
644 | density_3d,
645 | vertices,
646 | neighbours,
647 | indices_list)
648 | future.add_done_callback(lambda p: progress.update())
649 | futures.append(future)
650 |
651 | results = []
652 | for future in futures:
653 | try:
654 | obj, vals = future.result()
655 | if not vals is None:
656 | stats[obj] = vals
657 | except Exception as e:
658 | print(f"Problem with {obj}")
659 | if break_on_error:
660 | raise e
661 |
662 | # orientation_plot(total_xy, bin_edges, title="Orientation plot")
663 | # orientation_plot(total_xz, bin_edges, title="XZ plot")
664 | # orientation_plot(total_yz, bin_edges, title="YZ plot")
665 |
666 | click.echo("Building data frame...")
667 |
668 | df = pd.DataFrame.from_dict(stats, orient="index")
669 | df.index.name = "id"
670 |
671 | if output is None:
672 | print(df)
673 | else:
674 | click.echo("Writing output...")
675 | df.to_csv(output)
676 |
677 | if not gpkg is None:
678 | gdf = geopandas.GeoDataFrame(df, geometry="geometry")
679 | gdf.to_file(gpkg, driver="GPKG")
680 |
681 | if __name__ == "__main__":
682 | main()
683 |
--------------------------------------------------------------------------------
/cityjson.py:
--------------------------------------------------------------------------------
1 | """Module with functions for manipulating CityJSON data"""
2 |
3 | import numpy as np
4 | from shapely.geometry import Polygon, MultiPolygon
5 | from helpers.geometry import project_2d, surface_normal, triangulate, triangulate_polygon
6 | import pyvista as pv
7 |
8 | def get_surface_boundaries(geom):
9 | """Returns the boundaries for all surfaces"""
10 |
11 | if geom["type"] == "MultiSurface" or geom["type"] == "CompositeSurface":
12 | return geom["boundaries"]
13 | elif geom["type"] == "Solid":
14 | return geom["boundaries"][0]
15 | else:
16 | raise Exception("Geometry not supported")
17 |
18 | def get_points(geom, verts):
19 | """Return the points of the geometry"""
20 |
21 | boundaries = get_surface_boundaries(geom)
22 |
23 | f = [v for ring in boundaries for v in ring[0]]
24 | points = [verts[i] for i in f]
25 |
26 | return points
27 |
28 | def to_shapely(geom, vertices, ground_only=True):
29 | """Returns a shapely geometry of the footprint from a CityJSON geometry"""
30 |
31 | boundaries = get_surface_boundaries(geom)
32 |
33 | if ground_only and "semantics" in geom:
34 | semantics = geom["semantics"]
35 | if geom["type"] == "MultiSurface":
36 | values = semantics["values"]
37 | else:
38 | values = semantics["values"][0]
39 |
40 | ground_idxs = [semantics["surfaces"][i]["type"] == "GroundSurface" for i in values]
41 |
42 | boundaries = np.array(boundaries, dtype=object)[ground_idxs]
43 |
44 | shape = MultiPolygon([Polygon([vertices[v] for v in boundary[0]]) for boundary in boundaries])
45 |
46 | shape = shape.buffer(0)
47 |
48 | return shape
49 |
50 | def to_polydata(geom, vertices):
51 | """Returns the polydata mesh from a CityJSON geometry"""
52 |
53 | boundaries = get_surface_boundaries(geom)
54 |
55 | f = [[len(r[0])] + r[0] for r in [f for f in boundaries]]
56 | faces = np.hstack(f)
57 |
58 | mesh = pv.PolyData(vertices, faces, n_faces=len(boundaries))
59 |
60 | if "semantics" in geom:
61 | semantics = geom["semantics"]
62 | if geom["type"] == "MultiSurface":
63 | values = semantics["values"]
64 | else:
65 | values = semantics["values"][0]
66 |
67 | mesh.cell_data["semantics"] = [semantics["surfaces"][i]["type"] for i in values]
68 |
69 | return mesh
70 |
71 | def to_triangulated_polydata(geom, vertices, clean=True):
72 | """Returns the polydata mesh from a CityJSON geometry"""
73 |
74 | boundaries = get_surface_boundaries(geom)
75 |
76 | if "semantics" in geom:
77 | semantics = geom["semantics"]
78 | if geom["type"] == "MultiSurface":
79 | values = semantics["values"]
80 | else:
81 | values = semantics["values"][0]
82 |
83 | semantic_types = [semantics["surfaces"][i]["type"] for i in values]
84 |
85 | points = []
86 | triangles = []
87 | semantics = []
88 | triangle_count = 0
89 | for fid, face in enumerate(boundaries):
90 | try:
91 | new_points, new_triangles = triangulate_polygon(face, vertices, len(points))
92 | except:
93 | continue
94 |
95 | points.extend(new_points)
96 | triangles.extend(new_triangles)
97 | t_count = int(len(new_triangles) / 4)
98 |
99 | triangle_count += t_count
100 |
101 | if "semantics" in geom:
102 | semantics.extend([semantic_types[fid] for _ in np.arange(t_count)])
103 |
104 | mesh = pv.PolyData(points, triangles, n_faces=triangle_count)
105 |
106 | if "semantics" in geom:
107 | mesh["semantics"] = semantics
108 |
109 | if clean:
110 | mesh = mesh.clean()
111 |
112 | return mesh
113 |
114 | def get_bbox(geom, verts):
115 | pts = np.array(get_points(geom, verts))
116 |
117 | return np.hstack([[np.min(pts[:, i]), np.max(pts[:, i])] for i in range(np.shape(pts)[1])])
--------------------------------------------------------------------------------
/extractLod.py:
--------------------------------------------------------------------------------
1 | import click
2 | import json
3 |
4 | @click.command()
5 | @click.argument("input", type=click.File("rb"))
6 | @click.option("-l", "--lod")
7 | @click.option("-o", "--output", type=click.File("w"))
8 | def main(input, lod, output):
9 | cm = json.load(input)
10 |
11 | if "transform" in cm:
12 | s = cm["transform"]["scale"]
13 | t = cm["transform"]["translate"]
14 | verts = [[v[0] * s[0] + t[0], v[1] * s[1] + t[1], v[2] * s[2] + t[2]]
15 | for v in cm["vertices"]]
16 | else:
17 | verts = cm["vertices"]
18 |
19 | lods = set([str(geom["lod"]) for obj in cm["CityObjects"]
20 | for geom in cm["CityObjects"][obj]["geometry"]])
21 |
22 | if not str(lod) in lods:
23 | print("LoD not found in the dataset!")
24 | exit()
25 |
26 | for co_id in cm["CityObjects"]:
27 | co = cm["CityObjects"][co_id]
28 |
29 | new_geom = []
30 |
31 | for geom in co["geometry"]:
32 | if str(geom["lod"]) == str(lod):
33 | new_geom.append(geom)
34 |
35 | co["geometry"] = new_geom
36 |
37 | json.dump(cm, output)
38 |
39 | if __name__ == "__main__":
40 | main()
--------------------------------------------------------------------------------
/geometry.py:
--------------------------------------------------------------------------------
1 | """Module to manipulate geometry of pyvista meshes"""
2 |
3 | import numpy as np
4 | import pyvista as pv
5 | from helpers.geometry import plane_params, project_mesh, to_3d
6 | from scipy.spatial import distance_matrix
7 | from sklearn.cluster import AgglomerativeClustering
8 |
9 | def get_points_of_type(mesh, surface_type):
10 | """Returns the points that belong to the given surface type"""
11 |
12 | if not "semantics" in mesh.cell_data:
13 | return []
14 |
15 | idxs = [s == surface_type for s in mesh.cell_data["semantics"]]
16 |
17 | points = np.array([mesh.cell_points(i) for i in range(mesh.number_of_cells)], dtype=object)
18 |
19 | if all([i == False for i in idxs]):
20 | return []
21 |
22 | return np.vstack(points[idxs])
23 |
24 | def move_to_origin(mesh):
25 | """Moves the object to the origin"""
26 | pts = mesh.points
27 | t = np.min(pts, axis=0)
28 | mesh.points = mesh.points - t
29 |
30 | return mesh, t
31 |
32 | def extrude(shape, min, max):
33 | """Create a pyvista mesh from a polygon"""
34 |
35 | points = np.array([[p[0], p[1], min] for p in shape.boundary.coords])
36 | mesh = pv.PolyData(points).delaunay_2d()
37 |
38 | if min == max:
39 | return mesh
40 |
41 | # Transform to 0, 0, 0 to avoid precision issues
42 | pts = mesh.points
43 | t = np.mean(pts, axis=0)
44 | mesh.points = mesh.points - t
45 |
46 | mesh = mesh.extrude([0.0, 0.0, max - min], capping=True)
47 |
48 | # Transform back to origina coords
49 | # mesh.points = mesh.points + t
50 |
51 | mesh = mesh.clean().triangulate()
52 |
53 | return mesh
54 |
55 | def area_by_surface(mesh, tri_mesh=None):
56 | """Compute the area per semantic surface"""
57 |
58 | area = {
59 | "GroundSurface": 0,
60 | "WallSurface": 0,
61 | "RoofSurface": 0
62 | }
63 |
64 | point_count = {
65 | "GroundSurface": 0,
66 | "WallSurface": 0,
67 | "RoofSurface": 0
68 | }
69 |
70 | surface_count = {
71 | "GroundSurface": 0,
72 | "WallSurface": 0,
73 | "RoofSurface": 0
74 | }
75 |
76 | # Compute the triangulated surfaces to fix issues with areas
77 | if tri_mesh is None:
78 | tri_mesh = mesh.triangulate()
79 |
80 | if "semantics" in mesh.cell_data:
81 | # Compute area per surface type
82 | sized = tri_mesh.compute_cell_sizes()
83 | surface_areas = sized.cell_data["Area"]
84 |
85 | points_per_cell = np.array([mesh.cell_n_points(i) for i in range(mesh.number_of_cells)])
86 |
87 | for surface_type in area:
88 | triangle_idxs = [s == surface_type for s in tri_mesh.cell_data["semantics"]]
89 | area[surface_type] = sum(surface_areas[triangle_idxs])
90 |
91 | face_idxs = [s == surface_type for s in mesh.cell_data["semantics"]]
92 |
93 | point_count[surface_type] = sum(points_per_cell[face_idxs])
94 | surface_count[surface_type] = sum(face_idxs)
95 |
96 | return area, point_count, surface_count
97 |
98 | def face_planes(mesh):
99 | """Return the params of all planes in a given mesh"""
100 |
101 | return [plane_params(mesh.face_normals[i], mesh.cell_points(i)[0])
102 | for i in range(mesh.n_cells)]
103 |
104 | def cluster_meshes(meshes, threshold=0.1):
105 | """Clusters the faces of the given meshes"""
106 |
107 | n_meshes = len(meshes)
108 |
109 | # Compute the "absolute" plane params for every face of the two meshes
110 | planes = [face_planes(mesh) for mesh in meshes]
111 | mesh_ids = [[m for _ in range(meshes[m].n_cells)] for m in range(n_meshes)]
112 |
113 | # Find the common planes between the two faces
114 | all_planes = np.concatenate(planes)
115 | all_labels, n_clusters = cluster_faces(all_planes, threshold)
116 | areas = []
117 |
118 | labels = np.array_split(all_labels, [meshes[m].n_cells for m in range(n_meshes - 1)])
119 |
120 | return labels, n_clusters
121 |
122 | def cluster_faces(data, threshold=0.1):
123 | """Clusters the given planes"""
124 | ndata = np.array(data)
125 |
126 | dm1 = distance_matrix(ndata, ndata)
127 | dm2 = distance_matrix(ndata, -ndata)
128 |
129 | dist_mat = np.minimum(dm1, dm2)
130 |
131 | clustering = AgglomerativeClustering(n_clusters=None,
132 | distance_threshold=threshold,
133 | affinity='precomputed',
134 | linkage='average').fit(dist_mat)
135 |
136 | return clustering.labels_, clustering.n_clusters_
137 |
138 | def intersect_surfaces(meshes):
139 | """Return the intersection between the surfaces of multiple meshes"""
140 |
141 | def get_area_from_ring(areas, area, geom, normal, origin, subtract=False):
142 | pts = to_3d(geom.coords, normal, origin)
143 | common_mesh = pv.PolyData(pts, faces=[len(pts)] + list(range(len(pts))))
144 | if subtract:
145 | common_mesh["area"] = [-area]
146 | else:
147 | common_mesh["area"] = [area]
148 | areas.append(common_mesh)
149 |
150 | def get_area_from_polygon(areas, geom, normal, origin):
151 | # polygon with holes:
152 | if geom.boundary.type == 'MultiLineString':
153 | get_area_from_ring(areas, geom.area, geom.boundary[0], normal, origin)
154 | for sgeom in geom.boundary[1:]:
155 | get_area_from_ring(areas, 0, sgeom, normal, origin, subtract=True)
156 | # polygon without holes:
157 | elif geom.boundary.type == 'LineString':
158 | get_area_from_ring(areas, geom.area, geom.boundary, normal, origin)
159 |
160 | n_meshes = len(meshes)
161 |
162 | areas = []
163 |
164 | labels, n_clusters = cluster_meshes(meshes)
165 |
166 | for plane in range(n_clusters):
167 | # For every common plane, extract the faces that belong to it
168 | idxs = [[i for i, p in enumerate(labels[m]) if p == plane] for m in range(n_meshes)]
169 |
170 | if any([len(idx) == 0 for idx in idxs]):
171 | continue
172 |
173 | msurfaces = [mesh.extract_cells(idxs[i]).extract_surface() for i, mesh in enumerate(meshes)]
174 |
175 | # Set the normal and origin point for a plane to project the faces
176 | origin = msurfaces[0].clean().points[0]
177 | normal = msurfaces[0].face_normals[0]
178 |
179 | # Create the two 2D polygons by projecting the faces
180 | polys = [project_mesh(msurface, normal, origin) for msurface in msurfaces]
181 |
182 | # Intersect the 2D polygons
183 | inter = polys[0]
184 | for i in range(1, len(polys)):
185 | inter = inter.intersection(polys[i])
186 |
187 | if inter.area > 0.001:
188 | if inter.type == "MultiPolygon" or inter.type == "GeometryCollection":
189 | for geom in inter.geoms:
190 | if geom.type != "Polygon":
191 | continue
192 | get_area_from_polygon(areas, geom, normal, origin)
193 | elif inter.type == "Polygon":
194 | get_area_from_polygon(areas, inter, normal, origin)
195 |
196 | return areas
197 |
--------------------------------------------------------------------------------
/helpers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tudelft3d/3d-building-metrics/ca50dae866c66d51845ac383f2b64f190409d092/helpers/__init__.py
--------------------------------------------------------------------------------
/helpers/geometry.py:
--------------------------------------------------------------------------------
1 | """Module with functions for 3D geometrical operations"""
2 |
3 | import numpy as np
4 | import mapbox_earcut as earcut
5 | import pyvista as pv
6 | from shapely.geometry import Polygon, MultiPolygon
7 |
8 | def surface_normal(poly):
9 | n = [0.0, 0.0, 0.0]
10 |
11 | for i, v_curr in enumerate(poly):
12 | v_next = poly[(i+1) % len(poly)]
13 | n[0] += (v_curr[1] - v_next[1]) * (v_curr[2] + v_next[2])
14 | n[1] += (v_curr[2] - v_next[2]) * (v_curr[0] + v_next[0])
15 | n[2] += (v_curr[0] - v_next[0]) * (v_curr[1] + v_next[1])
16 |
17 | if all([c == 0 for c in n]):
18 | raise ValueError("No normal. Possible colinear points!")
19 |
20 | normalised = [i/np.linalg.norm(n) for i in n]
21 |
22 | return normalised
23 |
24 | def axes_of_normal(normal):
25 | """Returns an x-axis and y-axis on a plane of the given normal"""
26 | if normal[2] > 0.001 or normal[2] < -0.001:
27 | x_axis = [1, 0, -normal[0]/normal[2]]
28 | elif normal[1] > 0.001 or normal[1] < -0.001:
29 | x_axis = [1, -normal[0]/normal[1], 0]
30 | else:
31 | x_axis = [-normal[1] / normal[0], 1, 0]
32 |
33 | x_axis = x_axis / np.linalg.norm(x_axis)
34 | y_axis = np.cross(normal, x_axis)
35 |
36 | return x_axis, y_axis
37 |
38 | def project_2d(points, normal, origin=None):
39 | if origin is None:
40 | origin = points[0]
41 |
42 | x_axis, y_axis = axes_of_normal(normal)
43 |
44 | return [[np.dot(p - origin, x_axis), np.dot(p - origin, y_axis)] for p in points]
45 |
46 | def triangulate(mesh):
47 | """Triangulates a mesh in the proper way"""
48 |
49 | final_mesh = pv.PolyData()
50 | n_cells = mesh.n_cells
51 | for i in np.arange(n_cells):
52 | if not mesh.cell_type(i) in [5, 6, 7, 9, 10]:
53 | continue
54 |
55 | pts = mesh.cell_points(i)
56 | p = project_2d(pts, mesh.face_normals[i])
57 | result = earcut.triangulate_float32(p, [len(p)])
58 |
59 | t_count = len(result.reshape(-1,3))
60 | triangles = np.hstack([[3] + list(t) for t in result.reshape(-1,3)])
61 |
62 | new_mesh = pv.PolyData(pts, triangles, n_faces=t_count)
63 | for k in mesh.cell_data:
64 | new_mesh[k] = [mesh.cell_data[k][i] for _ in np.arange(t_count)]
65 |
66 | final_mesh = final_mesh + new_mesh
67 |
68 | return final_mesh
69 |
70 | def triangulate_polygon(face, vertices, offset = 0):
71 | """Returns the points and triangles for a given CityJSON polygon"""
72 |
73 | points = vertices[np.hstack(face)]
74 | normal = surface_normal(points)
75 | holes = [0]
76 | for ring in face:
77 | holes.append(len(ring) + holes[-1])
78 | holes = holes[1:]
79 |
80 | points_2d = project_2d(points, normal)
81 |
82 | result = earcut.triangulate_float32(points_2d, holes)
83 |
84 | result += offset
85 |
86 | t_count = len(result.reshape(-1,3))
87 | if t_count == 0:
88 | return points, []
89 | triangles = np.hstack([[3] + list(t) for t in result.reshape(-1,3)])
90 |
91 | return points, triangles
92 |
93 | def plane_params(normal, origin, rounding=2):
94 | """Returns the params (a, b, c, d) of the plane equation for the given
95 | normal and origin point.
96 | """
97 | a, b, c = np.round_(normal, 3)
98 | x0, y0, z0 = origin
99 |
100 | d = -(a * x0 + b * y0 + c * z0)
101 |
102 | if rounding >= 0:
103 | d = round(d, rounding)
104 |
105 | return np.array([a, b, c, d])
106 |
107 | def project_mesh(mesh, normal, origin):
108 | """Project the faces of a mesh to the given plane"""
109 |
110 | p = []
111 | for i in range(mesh.n_cells):
112 | pts = mesh.cell_points(i)
113 |
114 | pts_2d = project_2d(pts, normal, origin)
115 |
116 | p.append(Polygon(pts_2d))
117 |
118 | return MultiPolygon(p).buffer(0)
119 |
120 | def to_3d(points, normal, origin):
121 | """Returns the 3d coordinates of a 2d points from a given plane"""
122 |
123 | xa, ya = axes_of_normal(normal)
124 |
125 | mat = np.array([xa, ya])
126 | pts = np.array(points)
127 |
128 | return np.dot(pts, mat) + origin
129 |
--------------------------------------------------------------------------------
/helpers/mesh.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pymesh
3 | import pyvista as pv
4 |
5 | def to_pymesh(mesh):
6 | """Returns a pymesh from a pyvista PolyData"""
7 | v = mesh.points
8 | f = mesh.faces.reshape(-1, 4)[:, 1:]
9 |
10 | return pymesh.form_mesh(v, f)
11 |
12 | def to_pyvista(mesh):
13 | """Return a PolyData from a pymesh"""
14 | v = mesh.vertices
15 | f = mesh.faces
16 |
17 | f = np.hstack([[len(f)] + list(f) for f in mesh.faces])
18 |
19 | return pv.PolyData(v, f, len(mesh.faces))
20 |
21 | def intersect(mesh1, mesh2, engine="igl"):
22 | """Returns the intersection of two meshes (in pymesh format)"""
23 |
24 | return pymesh.boolean(mesh1, mesh2, operation="intersection", engine=engine)
25 |
26 | def symmetric_difference(mesh1, mesh2, engine="igl"):
27 | """Returns the symmetric difference of two volumes (in pymesh format)"""
28 |
29 | return pymesh.boolean(mesh1, mesh2, operation="symmetric_difference", engine=engine)
30 |
31 | def difference(mesh1, mesh2, engine="igl"):
32 | """Returns the difference between two volumes (in pymesh format)"""
33 |
34 | return pymesh.boolean(mesh1, mesh2, operation="difference", engine=engine)
--------------------------------------------------------------------------------
/helpers/minimumBoundingBox.py:
--------------------------------------------------------------------------------
1 | # important functions: MinimumBoundingBox
2 |
3 | from scipy.spatial import ConvexHull
4 | from math import sqrt
5 | import numpy as np
6 | from math import atan2, cos, sin, pi
7 | from collections import namedtuple
8 |
9 |
10 | def unit_vector(pt0, pt1):
11 | # returns an unit vector that points in the direction of pt0 to pt1
12 | dis_0_to_1 = sqrt((pt0[0] - pt1[0])**2 + (pt0[1] - pt1[1])**2)
13 | return (pt1[0] - pt0[0]) / dis_0_to_1, \
14 | (pt1[1] - pt0[1]) / dis_0_to_1
15 |
16 |
17 | def orthogonal_vector(vector):
18 | # from vector returns a orthogonal/perpendicular vector of equal length
19 | return -1 * vector[1], vector[0]
20 |
21 |
22 | def bounding_area(index, hull):
23 | unit_vector_p = unit_vector(hull[index], hull[index+1])
24 | unit_vector_o = orthogonal_vector(unit_vector_p)
25 |
26 | dis_p = tuple(np.dot(unit_vector_p, pt) for pt in hull)
27 | dis_o = tuple(np.dot(unit_vector_o, pt) for pt in hull)
28 |
29 | min_p = min(dis_p)
30 | min_o = min(dis_o)
31 | len_p = max(dis_p) - min_p
32 | len_o = max(dis_o) - min_o
33 |
34 | return {'area': len_p * len_o,
35 | 'length_parallel': len_p,
36 | 'length_orthogonal': len_o,
37 | 'rectangle_center': (min_p + len_p / 2, min_o + len_o / 2),
38 | 'unit_vector': unit_vector_p,
39 | }
40 |
41 |
42 | def to_xy_coordinates(unit_vector_angle, point):
43 | # returns converted unit vector coordinates in x, y coordinates
44 | angle_orthogonal = unit_vector_angle + pi / 2
45 | return point[0] * cos(unit_vector_angle) + point[1] * cos(angle_orthogonal), \
46 | point[0] * sin(unit_vector_angle) + point[1] * sin(angle_orthogonal)
47 |
48 |
49 | def rotate_points(center_of_rotation, angle, points):
50 | # Requires: center_of_rotation to be a 2d vector. ex: (1.56, -23.4)
51 | # angle to be in radians
52 | # points to be a list or tuple of points. ex: ((1.56, -23.4), (1.56, -23.4))
53 | # Effects: rotates a point cloud around the center_of_rotation point by angle
54 | rot_points = []
55 | ang = []
56 | for pt in points:
57 | diff = tuple([pt[d] - center_of_rotation[d] for d in range(2)])
58 | diff_angle = atan2(diff[1], diff[0]) + angle
59 | ang.append(diff_angle)
60 | diff_length = sqrt(sum([d**2 for d in diff]))
61 | rot_points.append((center_of_rotation[0] + diff_length * cos(diff_angle),
62 | center_of_rotation[1] + diff_length * sin(diff_angle)))
63 |
64 | return rot_points
65 |
66 |
67 | def rectangle_corners(rectangle):
68 | # Requires: the output of mon_bounding_rectangle
69 | # Effects: returns the corner locations of the bounding rectangle
70 | corner_points = []
71 | for i1 in (.5, -.5):
72 | for i2 in (i1, -1 * i1):
73 | corner_points.append((rectangle['rectangle_center'][0] + i1 * rectangle['length_parallel'],
74 | rectangle['rectangle_center'][1] + i2 * rectangle['length_orthogonal']))
75 |
76 | return rotate_points(rectangle['rectangle_center'], rectangle['unit_vector_angle'], corner_points)
77 |
78 |
79 | BoundingBox = namedtuple('BoundingBox', ('area',
80 | 'length_parallel',
81 | 'length_orthogonal',
82 | 'rectangle_center',
83 | 'unit_vector',
84 | 'unit_vector_angle',
85 | 'corner_points'
86 | )
87 | )
88 |
89 |
90 | # use this function to find the listed properties of the minimum bounding box of a point cloud
91 | def MinimumBoundingBox(points):
92 | # Requires: points to be a list or tuple of 2D points. ex: ((5, 2), (3, 4), (6, 8))
93 | # needs to be more than 2 points
94 | # Effects: returns a namedtuple that contains:
95 | # area: area of the rectangle
96 | # length_parallel: length of the side that is parallel to unit_vector
97 | # length_orthogonal: length of the side that is orthogonal to unit_vector
98 | # rectangle_center: coordinates of the rectangle center
99 | # (use rectangle_corners to get the corner points of the rectangle)
100 | # unit_vector: direction of the length_parallel side. RADIANS
101 | # (it's orthogonal vector can be found with the orthogonal_vector function
102 | # unit_vector_angle: angle of the unit vector
103 | # corner_points: set that contains the corners of the rectangle
104 |
105 | if len(points) <= 2: raise ValueError('More than two points required.')
106 |
107 | hull_ordered = [points[index] for index in ConvexHull(points).vertices]
108 | hull_ordered.append(hull_ordered[0])
109 | hull_ordered = tuple(hull_ordered)
110 |
111 | min_rectangle = bounding_area(0, hull_ordered)
112 | for i in range(1, len(hull_ordered)-1):
113 | rectangle = bounding_area(i, hull_ordered)
114 | if rectangle['area'] < min_rectangle['area']:
115 | min_rectangle = rectangle
116 |
117 | min_rectangle['unit_vector_angle'] = atan2(min_rectangle['unit_vector'][1], min_rectangle['unit_vector'][0])
118 | min_rectangle['rectangle_center'] = to_xy_coordinates(min_rectangle['unit_vector_angle'], min_rectangle['rectangle_center'])
119 |
120 | # this is ugly but a quick hack and is being changed in the speedup branch
121 | return BoundingBox(
122 | area = min_rectangle['area'],
123 | length_parallel = min_rectangle['length_parallel'],
124 | length_orthogonal = min_rectangle['length_orthogonal'],
125 | rectangle_center = min_rectangle['rectangle_center'],
126 | unit_vector = min_rectangle['unit_vector'],
127 | unit_vector_angle = min_rectangle['unit_vector_angle'],
128 | corner_points = set(rectangle_corners(min_rectangle))
129 | )
130 |
--------------------------------------------------------------------------------
/helpers/smallestenclosingcircle.py:
--------------------------------------------------------------------------------
1 | #
2 | # Smallest enclosing circle - Library (Python)
3 | #
4 | # Copyright (c) 2020 Project Nayuki
5 | # https://www.nayuki.io/page/smallest-enclosing-circle
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU Lesser General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU Lesser General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU Lesser General Public License
18 | # along with this program (see COPYING.txt and COPYING.LESSER.txt).
19 | # If not, see .
20 | #
21 |
22 | import math, random
23 |
24 |
25 | # Data conventions: A point is a pair of floats (x, y). A circle is a triple of floats (center x, center y, radius).
26 |
27 | # Returns the smallest circle that encloses all the given points. Runs in expected O(n) time, randomized.
28 | # Input: A sequence of pairs of floats or ints, e.g. [(0,5), (3.1,-2.7)].
29 | # Output: A triple of floats representing a circle.
30 | # Note: If 0 points are given, None is returned. If 1 point is given, a circle of radius 0 is returned.
31 | #
32 | # Initially: No boundary points known
33 | def make_circle(points):
34 | # Convert to float and randomize order
35 | shuffled = [(float(x), float(y)) for (x, y) in points]
36 | random.shuffle(shuffled)
37 |
38 | # Progressively add points to circle or recompute circle
39 | c = None
40 | for (i, p) in enumerate(shuffled):
41 | if c is None or not is_in_circle(c, p):
42 | c = _make_circle_one_point(shuffled[ : i + 1], p)
43 | return c
44 |
45 |
46 | # One boundary point known
47 | def _make_circle_one_point(points, p):
48 | c = (p[0], p[1], 0.0)
49 | for (i, q) in enumerate(points):
50 | if not is_in_circle(c, q):
51 | if c[2] == 0.0:
52 | c = make_diameter(p, q)
53 | else:
54 | c = _make_circle_two_points(points[ : i + 1], p, q)
55 | return c
56 |
57 |
58 | # Two boundary points known
59 | def _make_circle_two_points(points, p, q):
60 | circ = make_diameter(p, q)
61 | left = None
62 | right = None
63 | px, py = p
64 | qx, qy = q
65 |
66 | # For each point not in the two-point circle
67 | for r in points:
68 | if is_in_circle(circ, r):
69 | continue
70 |
71 | # Form a circumcircle and classify it on left or right side
72 | cross = _cross_product(px, py, qx, qy, r[0], r[1])
73 | c = make_circumcircle(p, q, r)
74 | if c is None:
75 | continue
76 | elif cross > 0.0 and (left is None or _cross_product(px, py, qx, qy, c[0], c[1]) > _cross_product(px, py, qx, qy, left[0], left[1])):
77 | left = c
78 | elif cross < 0.0 and (right is None or _cross_product(px, py, qx, qy, c[0], c[1]) < _cross_product(px, py, qx, qy, right[0], right[1])):
79 | right = c
80 |
81 | # Select which circle to return
82 | if left is None and right is None:
83 | return circ
84 | elif left is None:
85 | return right
86 | elif right is None:
87 | return left
88 | else:
89 | return left if (left[2] <= right[2]) else right
90 |
91 |
92 | def make_diameter(a, b):
93 | cx = (a[0] + b[0]) / 2
94 | cy = (a[1] + b[1]) / 2
95 | r0 = math.hypot(cx - a[0], cy - a[1])
96 | r1 = math.hypot(cx - b[0], cy - b[1])
97 | return (cx, cy, max(r0, r1))
98 |
99 |
100 | def make_circumcircle(a, b, c):
101 | # Mathematical algorithm from Wikipedia: Circumscribed circle
102 | ox = (min(a[0], b[0], c[0]) + max(a[0], b[0], c[0])) / 2
103 | oy = (min(a[1], b[1], c[1]) + max(a[1], b[1], c[1])) / 2
104 | ax = a[0] - ox; ay = a[1] - oy
105 | bx = b[0] - ox; by = b[1] - oy
106 | cx = c[0] - ox; cy = c[1] - oy
107 | d = (ax * (by - cy) + bx * (cy - ay) + cx * (ay - by)) * 2.0
108 | if d == 0.0:
109 | return None
110 | x = ox + ((ax*ax + ay*ay) * (by - cy) + (bx*bx + by*by) * (cy - ay) + (cx*cx + cy*cy) * (ay - by)) / d
111 | y = oy + ((ax*ax + ay*ay) * (cx - bx) + (bx*bx + by*by) * (ax - cx) + (cx*cx + cy*cy) * (bx - ax)) / d
112 | ra = math.hypot(x - a[0], y - a[1])
113 | rb = math.hypot(x - b[0], y - b[1])
114 | rc = math.hypot(x - c[0], y - c[1])
115 | return (x, y, max(ra, rb, rc))
116 |
117 |
118 | _MULTIPLICATIVE_EPSILON = 1 + 1e-14
119 |
120 | def is_in_circle(c, p):
121 | return c is not None and math.hypot(p[0] - c[0], p[1] - c[1]) <= c[2] * _MULTIPLICATIVE_EPSILON
122 |
123 |
124 | # Returns twice the signed area of the triangle defined by (x0, y0), (x1, y1), (x2, y2).
125 | def _cross_product(x0, y0, x1, y1, x2, y2):
126 | return (x1 - x0) * (y2 - y0) - (y1 - y0) * (x2 - x0)
127 |
--------------------------------------------------------------------------------
/notebook.sh:
--------------------------------------------------------------------------------
1 | export DISPLAY=:99.0;
2 | # export VTKI_OFF_SCREEN=True;
3 | export PYVISTA_OFF_SCREEN=true;
4 | export PYVISTA_USE_IPYVTK=true
5 | Xvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 &
6 | ~/pymesh_new/bin/jupyter-notebook
7 |
--------------------------------------------------------------------------------
/pyvista_tests.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "attachments": {},
5 | "cell_type": "markdown",
6 | "id": "7f3186d1",
7 | "metadata": {},
8 | "source": [
9 | "# Playing with CityJSON and pyvista\n",
10 | "\n",
11 | "This is a notebook with experiments and demonstration of what we can do with `pyvista` and `pymesh`\n",
12 | "\n",
13 | "First we do some imports:"
14 | ]
15 | },
16 | {
17 | "cell_type": "code",
18 | "execution_count": null,
19 | "id": "ac5dd1cb",
20 | "metadata": {},
21 | "outputs": [],
22 | "source": [
23 | "import json\n",
24 | "import numpy as np\n",
25 | "import pyvista as pv\n",
26 | "from pymeshfix import MeshFix\n",
27 | "import cityjson\n",
28 | "import shapely\n",
29 | "import math\n",
30 | "from tqdm import tqdm\n",
31 | "import os\n",
32 | "\n",
33 | "def rpath(path):\n",
34 | " return os.path.expanduser(path)"
35 | ]
36 | },
37 | {
38 | "attachments": {},
39 | "cell_type": "markdown",
40 | "id": "9eea4e4a",
41 | "metadata": {},
42 | "source": [
43 | "Let's load some CityJSON file and have some minor boilerplate code:"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": null,
49 | "id": "b546fa93",
50 | "metadata": {},
51 | "outputs": [],
52 | "source": [
53 | "float_formatter = \"{:.3f}\".format\n",
54 | "np.set_printoptions(formatter={'float_kind':float_formatter})\n",
55 | "\n",
56 | "bag_tile = 7173\n",
57 | "\n",
58 | "models = {\n",
59 | " \"DenHaag\": \"\",\n",
60 | " \"Helsinki\": rpath(\"~/Dropbox/CityJSON/Helsinki/CityGML_BUILDINGS_LOD2_NOTEXTURES_672496x2.json\"),\n",
61 | " \"Vienna\": rpath(\"~/Dropbox/CityJSON/Vienna/Vienna_102081.json\"),\n",
62 | " \"Montreal\": rpath(\"~/Dropbox/CityJSON/Montreal/VM05_2009.json\"),\n",
63 | " \"random\": rpath(\"~/Downloads/random10_1.json\"),\n",
64 | " \"Delfshaven\": rpath(\"~/Dropbox/CityJSON/rotterdam/Version Old/3-20-DELFSHAVEN_uncompressed.json\"),\n",
65 | " \"NYC\": rpath(\"~/Dropbox/CityJSON/NewYork/NYCsubset.json\"),\n",
66 | " \"bag_tile\": rpath(f\"~/3DBAG_09/LoD2.2/{bag_tile}.json\")\n",
67 | "}\n",
68 | "\n",
69 | "filename = models[bag_tile]\n",
70 | "\n",
71 | "with open(filename) as file:\n",
72 | " cm = json.load(file)\n",
73 | "\n",
74 | "if \"transform\" in cm:\n",
75 | " s = cm[\"transform\"][\"scale\"]\n",
76 | " t = cm[\"transform\"][\"translate\"]\n",
77 | " verts = [[v[0] * s[0] + t[0], v[1] * s[1] + t[1], v[2] * s[2] + t[2]]\n",
78 | " for v in cm[\"vertices\"]]\n",
79 | "else:\n",
80 | " verts = cm[\"vertices\"]\n",
81 | "\n",
82 | "# mesh points\n",
83 | "vertices = np.array(verts)"
84 | ]
85 | },
86 | {
87 | "attachments": {},
88 | "cell_type": "markdown",
89 | "id": "989f82ae",
90 | "metadata": {},
91 | "source": [
92 | "## Load a city object in `pyvista`\n",
93 | "\n",
94 | "Pick a city object to play with:"
95 | ]
96 | },
97 | {
98 | "cell_type": "code",
99 | "execution_count": null,
100 | "id": "36cc657e",
101 | "metadata": {},
102 | "outputs": [],
103 | "source": [
104 | "# obj = \"GUID_816CA7F9-6357-447D-96E3-C74C5E47AABF_2\" # Den Haag\n",
105 | "# obj = list(cm[\"CityObjects\"].keys())[0]\n",
106 | "# obj = \"UUID_LOD2_012092-314dbb09-4cfb-41f1-b664\" # Vienna\n",
107 | "# obj = \"B-201391184551-8C37C51A0CFD\" # Montreal building with open edge\n",
108 | "# B-20139118400-95DC7A2E0FC0 Montreal\n",
109 | "# obj = \"B-201391183935-6B8EA1920647\" # Montreal fails\n",
110 | "# obj = \"BID_df39af63-fd91-4239-98d5-cd63def98167\" # Helsinki\n",
111 | "# obj = \"B-20131218142410-23B22495FF7A\" # random\n",
112 | "# obj = \"{D892C834-12D5-4122-A816-ED49E0609DFF}\" # Delfshaven\n",
113 | "# obj = \"UUID_LOD2_011530-cf0ab883-2798-437c-b530_1\" # Vienna\n",
114 | "# obj = \"NL.IMBAG.Pand.0599100000601918-0\" # 3338 - 102 roughness 3D\n",
115 | "# obj = \"NL.IMBAG.Pand.0344100000096859-0\" # 7108 - high roughness 3D\n",
116 | "# obj = \"NL.IMBAG.Pand.0363100012148630-4\" # 7108 - not that high elongation\n",
117 | "obj = \"NL.IMBAG.Pand.0363100012173535-0\" # 7173 - very low horizontal elongation\n",
118 | "\n",
119 | "building = cm[\"CityObjects\"][obj]"
120 | ]
121 | },
122 | {
123 | "attachments": {},
124 | "cell_type": "markdown",
125 | "id": "e8148e3d",
126 | "metadata": {},
127 | "source": [
128 | "Loading and plotting the first geometry as a pure `pyvista` (no triangulation done). This sometimes doesn't work because VTK doesn't play well with complex polygons (anything over a quad):"
129 | ]
130 | },
131 | {
132 | "cell_type": "code",
133 | "execution_count": null,
134 | "id": "62981075",
135 | "metadata": {},
136 | "outputs": [],
137 | "source": [
138 | "dataset = cityjson.to_polydata(building[\"geometry\"][0], vertices)\n",
139 | "dataset = dataset.clean()\n",
140 | "dataset.plot(show_edges=True)"
141 | ]
142 | },
143 | {
144 | "attachments": {},
145 | "cell_type": "markdown",
146 | "id": "c9a1cdeb",
147 | "metadata": {},
148 | "source": [
149 | "Now let's load the object as a triangulated mesh (the triangulation is handled by us):"
150 | ]
151 | },
152 | {
153 | "cell_type": "code",
154 | "execution_count": null,
155 | "id": "947b6dc9",
156 | "metadata": {},
157 | "outputs": [],
158 | "source": [
159 | "trimesh = cityjson.to_triangulated_polydata(building[\"geometry\"][0], vertices).clean()\n",
160 | "trimesh.plot(show_edges=True)"
161 | ]
162 | },
163 | {
164 | "cell_type": "code",
165 | "execution_count": null,
166 | "id": "fe044b6a",
167 | "metadata": {},
168 | "outputs": [],
169 | "source": [
170 | "trimesh.clean().plot()"
171 | ]
172 | },
173 | {
174 | "attachments": {},
175 | "cell_type": "markdown",
176 | "id": "8a6cf672",
177 | "metadata": {},
178 | "source": [
179 | "## Basic analysis of the mesh\n",
180 | "\n",
181 | "First, let's extract some feature edges:"
182 | ]
183 | },
184 | {
185 | "cell_type": "code",
186 | "execution_count": null,
187 | "id": "3558ad7a",
188 | "metadata": {},
189 | "outputs": [],
190 | "source": [
191 | "trimesh.clean().extract_feature_edges().plot()"
192 | ]
193 | },
194 | {
195 | "attachments": {},
196 | "cell_type": "markdown",
197 | "id": "2c9cdbdf",
198 | "metadata": {},
199 | "source": [
200 | "We can plot the edges alongside the mesh itself:"
201 | ]
202 | },
203 | {
204 | "cell_type": "code",
205 | "execution_count": null,
206 | "id": "a07663b9",
207 | "metadata": {},
208 | "outputs": [],
209 | "source": [
210 | "p = pv.Plotter()\n",
211 | "\n",
212 | "p.add_mesh(trimesh)\n",
213 | "p.add_mesh(trimesh.extract_feature_edges(), color='black')\n",
214 | "\n",
215 | "p.show()"
216 | ]
217 | },
218 | {
219 | "attachments": {},
220 | "cell_type": "markdown",
221 | "id": "eb44a5ac",
222 | "metadata": {},
223 | "source": [
224 | "Let's see if there are any open edges, so if the volume is watertight. Any open edges will be shown as red lines:"
225 | ]
226 | },
227 | {
228 | "cell_type": "code",
229 | "execution_count": null,
230 | "id": "c0148343",
231 | "metadata": {},
232 | "outputs": [],
233 | "source": [
234 | "edges = trimesh.extract_feature_edges(boundary_edges=True,\n",
235 | " feature_edges=False,\n",
236 | " manifold_edges=False)\n",
237 | "\n",
238 | "p = pv.Plotter()\n",
239 | "\n",
240 | "p.add_mesh(trimesh, opacity=1.0, show_edges=True)\n",
241 | "if trimesh.n_open_edges:\n",
242 | " p.add_mesh(edges, color='red', line_width=10)\n",
243 | "\n",
244 | "p.add_title(f\"{obj} {'is watertight' if trimesh.n_open_edges == 0 else f'has {trimesh.n_open_edges} open edges'}\", 8) \n",
245 | "\n",
246 | "p.show()"
247 | ]
248 | },
249 | {
250 | "attachments": {},
251 | "cell_type": "markdown",
252 | "id": "a124920c",
253 | "metadata": {},
254 | "source": [
255 | "We can now voxelize the mesh. If this is not watertight, we have to force the library to avoid checking for surfaces and the result might be slightly incorrent:"
256 | ]
257 | },
258 | {
259 | "cell_type": "code",
260 | "execution_count": null,
261 | "id": "f32358cb",
262 | "metadata": {},
263 | "outputs": [],
264 | "source": [
265 | "clean = dataset.clean()\n",
266 | "voxel = pv.voxelize(clean, density=clean.length/100, check_surface=False)\n",
267 | "voxel.plot(show_edges=True, text=f\"[{obj}] Voxelized\")"
268 | ]
269 | },
270 | {
271 | "attachments": {},
272 | "cell_type": "markdown",
273 | "id": "7e6d8326",
274 | "metadata": {},
275 | "source": [
276 | "Let's compare the voxelized volume against the actual one. This is an indication of the validity of the object:"
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "execution_count": null,
282 | "id": "5ab5e495",
283 | "metadata": {},
284 | "outputs": [],
285 | "source": [
286 | "print(f\"Voxel: {voxel.volume}\")\n",
287 | "print(f\"Actual: {clean.volume}\")"
288 | ]
289 | },
290 | {
291 | "attachments": {},
292 | "cell_type": "markdown",
293 | "id": "4219b046",
294 | "metadata": {},
295 | "source": [
296 | "We can extract the voxels centers to produce a grid of points for the mesh. This is slightly inconsistent, as some points are in and some outside of the object. This can be further cleaned later by computing the implicit distance of the points:"
297 | ]
298 | },
299 | {
300 | "cell_type": "code",
301 | "execution_count": null,
302 | "id": "4b6eba9c",
303 | "metadata": {},
304 | "outputs": [],
305 | "source": [
306 | "p = pv.Plotter()\n",
307 | "\n",
308 | "p.add_mesh(voxel, opacity=0.2, show_edges=True, color='yellow')\n",
309 | "p.add_mesh(voxel.cell_centers(), color='black')\n",
310 | "p.add_mesh(clean, color='grey')\n",
311 | "p.add_mesh(pv.PolyData(np.mean(voxel.cell_centers().points, axis=0)), color='white')\n",
312 | "\n",
313 | "p.show()"
314 | ]
315 | },
316 | {
317 | "attachments": {},
318 | "cell_type": "markdown",
319 | "id": "56ecfce1",
320 | "metadata": {},
321 | "source": [
322 | "The number of voxels is:"
323 | ]
324 | },
325 | {
326 | "cell_type": "code",
327 | "execution_count": null,
328 | "id": "73fcb247",
329 | "metadata": {},
330 | "outputs": [],
331 | "source": [
332 | "clean = trimesh.clean()\n",
333 | "# clean.points -= np.mean(clean.points, axis=0)\n",
334 | "voxel = pv.voxelize(clean, density=clean.length/100, check_surface=False)\n",
335 | "voxel.n_cells"
336 | ]
337 | },
338 | {
339 | "attachments": {},
340 | "cell_type": "markdown",
341 | "id": "e08b416d",
342 | "metadata": {},
343 | "source": [
344 | "## Compute the oriented bounding box\n",
345 | "\n",
346 | "... or object-aligned bounding box, or minimum bounding rectangle etc.\n",
347 | "\n",
348 | "We can compute this with a library we found on the web. The idea is that for buildings we can assume that an extruded 2D bounding box would work. So, let's do it in 2D first. This is the oriented bounding box:"
349 | ]
350 | },
351 | {
352 | "cell_type": "code",
353 | "execution_count": null,
354 | "id": "a51e8b74",
355 | "metadata": {},
356 | "outputs": [],
357 | "source": [
358 | "from helpers.minimumBoundingBox import MinimumBoundingBox\n",
359 | "\n",
360 | "obb_2d = MinimumBoundingBox([(p[0], p[1]) for p in dataset.clean().points])\n",
361 | "obb_2d.area"
362 | ]
363 | },
364 | {
365 | "attachments": {},
366 | "cell_type": "markdown",
367 | "id": "2b7f5539",
368 | "metadata": {},
369 | "source": [
370 | "Now let's extrude this with `pyvista`:"
371 | ]
372 | },
373 | {
374 | "cell_type": "code",
375 | "execution_count": null,
376 | "id": "eecff725",
377 | "metadata": {},
378 | "outputs": [],
379 | "source": [
380 | "ground_z = np.min(dataset.clean().points[:, 2])\n",
381 | "height = np.max(dataset.clean().points[:, 2]) - ground_z\n",
382 | "box = np.array([[p[0], p[1], ground_z] for p in list(obb_2d.corner_points)])\n",
383 | "\n",
384 | "obb = pv.PolyData(box).delaunay_2d()\n",
385 | "pts = obb.points\n",
386 | "\n",
387 | "t = np.mean(pts, axis=0)\n",
388 | "\n",
389 | "# We need to move the points to the origin before extruding due to VTK's precision issues\n",
390 | "obb.points = obb.points - t\n",
391 | "obb = obb.extrude([0.0, 0.0, height])\n",
392 | "obb.points = obb.points + t"
393 | ]
394 | },
395 | {
396 | "attachments": {},
397 | "cell_type": "markdown",
398 | "id": "e9b98ef6",
399 | "metadata": {},
400 | "source": [
401 | "Let's see how the oriented bounding box fits with the object itself:"
402 | ]
403 | },
404 | {
405 | "cell_type": "code",
406 | "execution_count": null,
407 | "id": "7df0cdae",
408 | "metadata": {},
409 | "outputs": [],
410 | "source": [
411 | "p = pv.Plotter()\n",
412 | "\n",
413 | "p.add_mesh(obb, opacity=0.3)\n",
414 | "p.add_mesh(trimesh)\n",
415 | "\n",
416 | "p.show()"
417 | ]
418 | },
419 | {
420 | "attachments": {},
421 | "cell_type": "markdown",
422 | "id": "d10e4a0a",
423 | "metadata": {},
424 | "source": [
425 | "Sometimes the extruded volume produces a weird volume, so we have to fix it first:"
426 | ]
427 | },
428 | {
429 | "cell_type": "code",
430 | "execution_count": null,
431 | "id": "493ebb29",
432 | "metadata": {},
433 | "outputs": [],
434 | "source": [
435 | "m = MeshFix(obb.clean().triangulate())\n",
436 | "m.repair()\n",
437 | "fixed_obb = m.mesh\n",
438 | "\n",
439 | "fixed_obb.volume"
440 | ]
441 | },
442 | {
443 | "attachments": {},
444 | "cell_type": "markdown",
445 | "id": "30839016",
446 | "metadata": {},
447 | "source": [
448 | "### Fully 3D oriented bounding box\n",
449 | "\n",
450 | "There is a library to compute the bounding box in 3D, but it's not very reliable. This is a small test here:"
451 | ]
452 | },
453 | {
454 | "cell_type": "code",
455 | "execution_count": null,
456 | "id": "98932470",
457 | "metadata": {},
458 | "outputs": [],
459 | "source": [
460 | "from pyobb.obb import OBB\n",
461 | "\n",
462 | "obb_full_3d = OBB.build_from_points(dataset.clean().points)"
463 | ]
464 | },
465 | {
466 | "attachments": {},
467 | "cell_type": "markdown",
468 | "id": "e9cf6acc",
469 | "metadata": {},
470 | "source": [
471 | "### 2D oriented bounding box from `shapely`\n",
472 | "\n",
473 | "Similar to the previous library for 2D extrusion, we can compute the OBB using shapely. Let's extract a flattened version of the object and compute its *minimum rotated rectangle* (aka OBB):"
474 | ]
475 | },
476 | {
477 | "cell_type": "code",
478 | "execution_count": null,
479 | "id": "0cd53bda",
480 | "metadata": {},
481 | "outputs": [],
482 | "source": [
483 | "from cityjson import to_shapely\n",
484 | "\n",
485 | "obb_2d = to_shapely(building[\"geometry\"][0], vertices).minimum_rotated_rectangle\n",
486 | "obb_2d"
487 | ]
488 | },
489 | {
490 | "attachments": {},
491 | "cell_type": "markdown",
492 | "id": "7177ca57",
493 | "metadata": {},
494 | "source": [
495 | "Now we can extrude (we wrote a function about this):"
496 | ]
497 | },
498 | {
499 | "cell_type": "code",
500 | "execution_count": null,
501 | "id": "9d918661",
502 | "metadata": {},
503 | "outputs": [],
504 | "source": [
505 | "from geometry import extrude\n",
506 | "\n",
507 | "ground_z = np.min(dataset.clean().points[:, 2])\n",
508 | "roof_z = np.max(dataset.clean().points[:, 2])\n",
509 | "obb = extrude(obb_2d, ground_z, roof_z)\n",
510 | "\n",
511 | "# p = pv.Plotter()\n",
512 | "\n",
513 | "# p.add_mesh(obb, show_edges=True, opacity=0.3)\n",
514 | "# p.add_mesh(trimesh)\n",
515 | "\n",
516 | "# p.show()"
517 | ]
518 | },
519 | {
520 | "attachments": {},
521 | "cell_type": "markdown",
522 | "id": "b394693f",
523 | "metadata": {},
524 | "source": [
525 | "Let's compare the volumes:"
526 | ]
527 | },
528 | {
529 | "cell_type": "code",
530 | "execution_count": null,
531 | "id": "a359fedf",
532 | "metadata": {},
533 | "outputs": [],
534 | "source": [
535 | "m = MeshFix(obb.clean().triangulate())\n",
536 | "m.repair()\n",
537 | "fixed_obb = m.mesh\n",
538 | "\n",
539 | "print(f\"Volume: {clean.volume}\")\n",
540 | "print(f\"OBB: {obb.volume}\")\n",
541 | "\n",
542 | "# p = pv.Plotter()\n",
543 | "\n",
544 | "# p.add_mesh(obb.clean(), show_edges=True, opacity=0.3)\n",
545 | "# p.add_mesh(trimesh)\n",
546 | "\n",
547 | "# p.show()"
548 | ]
549 | },
550 | {
551 | "attachments": {},
552 | "cell_type": "markdown",
553 | "id": "9746e2fc",
554 | "metadata": {},
555 | "source": [
556 | "## Compute shape metrics\n",
557 | "\n",
558 | "This is a banch of 2D and 3D metrics that can be used to describe a building:"
559 | ]
560 | },
561 | {
562 | "cell_type": "code",
563 | "execution_count": null,
564 | "id": "4a05088b",
565 | "metadata": {},
566 | "outputs": [],
567 | "source": [
568 | "from shapely.geometry import Point, MultiPoint, Polygon\n",
569 | "import math\n",
570 | "from tqdm.notebook import trange, tqdm\n",
571 | "import miniball\n",
572 | "\n",
573 | "def create_grid_2d(shape, density):\n",
574 | " \"\"\"Return the grid for a given polygon\"\"\"\n",
575 | " \n",
576 | " x_min, y_min, x_max, y_max = shape.bounds\n",
577 | " x = np.arange(x_min, x_max, density)\n",
578 | " y = np.arange(y_min, y_max, density)\n",
579 | " x, y = np.meshgrid(x, y)\n",
580 | " \n",
581 | " x = np.hstack(x)\n",
582 | " y = np.hstack(y)\n",
583 | " \n",
584 | " return [(x[i], y[i]) for i in range(len(x))]\n",
585 | "\n",
586 | "def create_grid_3d(mesh, density, check_surface=False):\n",
587 | " \"\"\"Returns the grid for a given mesh\"\"\"\n",
588 | " voxel = pv.voxelize(mesh, density=density, check_surface=check_surface)\n",
589 | " \n",
590 | " return voxel.cell_centers().points\n",
591 | "\n",
592 | "def distance(x, y):\n",
593 | " \"\"\"Returns the euclidean distance between two points\"\"\"\n",
594 | " \n",
595 | " return math.sqrt(sum([math.pow(x[c] - y[c], 2) for c in range(len(x))]))\n",
596 | "\n",
597 | "def cohesion_2d(shape, grid=None, density=1):\n",
598 | " \"\"\"Returns the cohesion index in 2D for a given polygon\"\"\"\n",
599 | " \n",
600 | " if grid is None:\n",
601 | " grid = create_grid_2d(shape, density)\n",
602 | " \n",
603 | " if isinstance(grid, list):\n",
604 | " grid = MultiPoint(grid).intersection(shape)\n",
605 | " \n",
606 | " d = 0\n",
607 | " for pi in tqdm(grid.geoms, desc=f\"Cohesion 2D (density={density})\"):\n",
608 | " for pj in grid.geoms:\n",
609 | " if pi == pj:\n",
610 | " continue\n",
611 | " \n",
612 | " d += pi.distance(pj)\n",
613 | "\n",
614 | " n = len(grid.geoms)\n",
615 | " return 0.9054 * math.sqrt(shape.area / math.pi) / (1 / (n * (n - 1)) * d)\n",
616 | "\n",
617 | "def cohesion_3d(mesh, grid=None, density=1, check_surface=False):\n",
618 | " \"\"\"Returns the cohesion index in 3D for a given mesh\"\"\"\n",
619 | " \n",
620 | " if grid is None:\n",
621 | " grid = create_grid_3d(density=density, check_surface=check_surface)\n",
622 | " \n",
623 | " d = 0\n",
624 | " for pi in tqdm(grid, desc=f\"Cohesion 3D (density={density})\"):\n",
625 | " for pj in grid:\n",
626 | " d += distance(pi, pj)\n",
627 | " \n",
628 | " \n",
629 | " n = len(grid)\n",
630 | " return 36 / 35 * math.pow(3 * mesh.volume / (4 * math.pi), 1/3) / (1 / (n * (n - 1)) * d)\n",
631 | "\n",
632 | "def proximity_2d(shape, density=1, grid=None):\n",
633 | " \"\"\"Returns the proximity index in 2D for a given polygon\"\"\"\n",
634 | " \n",
635 | " if grid is None:\n",
636 | " grid = create_grid_2d(shape, density)\n",
637 | " \n",
638 | " if isinstance(grid, list):\n",
639 | " grid = MultiPoint(grid).intersection(shape)\n",
640 | " \n",
641 | " return grid\n",
642 | " \n",
643 | " centroid = shape.centroid\n",
644 | " \n",
645 | " return 2 / 3 * math.sqrt(shape.area / math.pi) / np.mean([centroid.distance(p) for p in grid])\n",
646 | "\n",
647 | "def proximity_3d(mesh, grid=None, density=1, check_surface=False):\n",
648 | " \"\"\"Returns the cohesion index in 3D for a given mesh\"\"\"\n",
649 | " \n",
650 | " if grid is None:\n",
651 | " grid = create_grid_3d(mesh, density=density, check_surface=check_surface)\n",
652 | "\n",
653 | " centroid = np.mean(grid, axis=0)\n",
654 | " \n",
655 | " # TODO: Verify the formula here\n",
656 | " r = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)\n",
657 | "\n",
658 | " return (3 * r / 4) / np.mean([distance(centroid, p) for p in grid])\n",
659 | "\n",
660 | "def equal_volume_radius(volume):\n",
661 | " \"\"\"Returns the radius of the equal volume sphere\"\"\"\n",
662 | " \n",
663 | " return math.pow(3 * volume / (4 * math.pi), 1/3)\n",
664 | "\n",
665 | "def equal_volume_sphere(mesh, position=(0, 0, 0)):\n",
666 | " \"\"\"Returns the sphere that has the same volume as the given mesh\"\"\"\n",
667 | " \n",
668 | " r = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)\n",
669 | " \n",
670 | " return pv.Sphere(radius=r, center=position)\n",
671 | "\n",
672 | "def exchange_2d(shape):\n",
673 | " \"\"\"Returns the exchange index in 2D for a given polygon\"\"\"\n",
674 | " \n",
675 | " r = math.sqrt(shape.area / math.pi)\n",
676 | " \n",
677 | " eac = shape.centroid.buffer(r)\n",
678 | " \n",
679 | " return shape.intersection(eac).area / shape.area\n",
680 | "\n",
681 | "def exchange_3d(mesh, evs=None, density=0.25, engine=\"igl\"):\n",
682 | " \"\"\"Returns the exhange index in 3D for a given mesh\n",
683 | " \n",
684 | " mesh: The pyvista mesh to evaluate\n",
685 | " evs: The equal volume sphere (if provided speeds up the calculation)\n",
686 | " density: If no evs is provided, it is used to create a grid to compute the center of mass\n",
687 | " enginge: The engine for the boolean operations\n",
688 | " \"\"\"\n",
689 | " \n",
690 | " if evs is None:\n",
691 | " voxel = pv.voxelize(mesh, density=density)\n",
692 | " grid = voxel.cell_centers().points\n",
693 | "\n",
694 | " centroid = np.mean(grid, axis=0)\n",
695 | " evs = equal_volume_sphere(mesh, centroid)\n",
696 | " \n",
697 | " pm_mesh = to_pymesh(mesh)\n",
698 | " pm_evs = to_pymesh(evs)\n",
699 | " \n",
700 | " inter = pymesh.boolean(pm_mesh, pm_evs, operation=\"intersection\", engine=engine)\n",
701 | " \n",
702 | " return inter.volume / mesh.volume\n",
703 | "\n",
704 | "def spin_2d(shape, grid=None, density=1):\n",
705 | " if grid is None:\n",
706 | " grid = create_grid_2d(shape, density)\n",
707 | " \n",
708 | " if isinstance(grid, list):\n",
709 | " grid = MultiPoint(grid).intersection(shape)\n",
710 | " \n",
711 | " centroid = shape.centroid\n",
712 | " \n",
713 | " return 0.5 * (shape.area / math.pi) / np.mean([math.pow(centroid.distance(p), 2) for p in grid])\n",
714 | "\n",
715 | "def spin_3d(mesh, grid=None, density=1, check_surface=False):\n",
716 | " \"\"\"Returns the cohesion index in 3D for a given mesh\"\"\"\n",
717 | " \n",
718 | " if grid is None:\n",
719 | " voxel = pv.voxelize(mesh, density=density, check_surface=check_surface)\n",
720 | " grid = voxel.cell_centers().points\n",
721 | " \n",
722 | " centroid = np.mean(grid, axis=0)\n",
723 | " \n",
724 | " r = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)\n",
725 | " # TODO: Calculate the actual formula here\n",
726 | " return 3 / 5 * math.pow(r, 2) / np.mean([math.pow(distance(centroid, p), 2) for p in grid])\n",
727 | "\n",
728 | "def perimeter_index(shape):\n",
729 | " return 2 * math.sqrt(math.pi * shape.area) / shape.length\n",
730 | "\n",
731 | "def circumference_index_3d(mesh):\n",
732 | " return 4 * math.pi * math.pow(3 * mesh.volume / (4 * math.pi), 2 / 3) / mesh.area\n",
733 | " \n",
734 | "def depth_2d(shape, grid=None, density=1):\n",
735 | " if grid is None:\n",
736 | " grid = create_grid_2d(shape, density)\n",
737 | " \n",
738 | " if isinstance(grid, list):\n",
739 | " grid = MultiPoint(grid).intersection(shape)\n",
740 | " \n",
741 | " return 3 * np.mean([p.distance(shape.boundary) for p in grid]) / math.sqrt(shape.area / math.pi)\n",
742 | "\n",
743 | "def depth_3d(mesh, grid=None, density=1, check_surface=False):\n",
744 | " \"\"\"Returns the depth index in 3D for a given mesh\"\"\"\n",
745 | " \n",
746 | " if grid is None:\n",
747 | " voxel = pv.voxelize(mesh, density=density, check_surface=check_surface)\n",
748 | " grid = voxel.cell_centers()\n",
749 | " \n",
750 | " dist = grid.compute_implicit_distance(mesh)\n",
751 | " \n",
752 | " r = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)\n",
753 | " return 4 * np.mean(np.absolute(dist[\"implicit_distance\"])) / r\n",
754 | "\n",
755 | "from polylabel import polylabel\n",
756 | "\n",
757 | "def largest_inscribed_circle(shape):\n",
758 | " \"\"\"Returns the largest inscribed circle of a polygon in 2D\"\"\"\n",
759 | "\n",
760 | " centre, r = polylabel([list([list(c)[:2] for c in shape.boundary.coords])], with_distance=True) # ([0.5, 0.5], 0.5)\n",
761 | "\n",
762 | " lic = Point(centre).buffer(r)\n",
763 | " \n",
764 | " return lic\n",
765 | "\n",
766 | "def largest_inscribed_sphere(mesh, grid=None, density=1, check_surface=False):\n",
767 | " \"\"\"Returns the largest inscribed sphere of a mesh in 3D\"\"\"\n",
768 | " \n",
769 | " if grid is None:\n",
770 | " voxel = pv.voxelize(mesh, density=density, check_surface=check_surface)\n",
771 | " grid = voxel.cell_centers()\n",
772 | " \n",
773 | " if not isinstance(grid, pv.PolyData):\n",
774 | " grid = pv.PolyData(grid)\n",
775 | " \n",
776 | " dist = grid.compute_implicit_distance(mesh)\n",
777 | " \n",
778 | " # The largest inscribed circle's radius is the largest (internal) distance,\n",
779 | " # hence the lowest value (as internal distance is negative)\n",
780 | " lis_radius = np.min(dist[\"implicit_distance\"])\n",
781 | " lis_center = dist.points[np.where(dist[\"implicit_distance\"] == lis_radius)][0]\n",
782 | " \n",
783 | " return pv.Sphere(center=lis_center, radius=abs(lis_radius))\n",
784 | "\n",
785 | "def girth_2d(shape):\n",
786 | " \"\"\"Return the girth index in 2D for a given polygon\"\"\"\n",
787 | " \n",
788 | " lic = largest_inscribed_circle(shape)\n",
789 | " \n",
790 | " # Compute the radius as half the bounding box width\n",
791 | " r = (lic.bounds[2] - lic.bounds[0]) / 2\n",
792 | " \n",
793 | " return r / math.sqrt(shape.area / math.pi)\n",
794 | "\n",
795 | "def girth_3d(mesh, grid=None, density=1, check_surface=False):\n",
796 | " \"\"\"Return the girth index in 3D for a given mesh\"\"\"\n",
797 | " \n",
798 | " lis = largest_inscribed_sphere(mesh,\n",
799 | " grid=grid,\n",
800 | " density=density,\n",
801 | " check_surface=check_surface)\n",
802 | " \n",
803 | " r = (lis.bounds[1] - lis.bounds[0]) / 2\n",
804 | " r_evs = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)\n",
805 | " \n",
806 | " return r / r_evs\n",
807 | "\n",
808 | "def range_2d(shape):\n",
809 | " \"\"\"Returns the range index in 2D for a given polygon\"\"\"\n",
810 | " \n",
811 | " from helpers.smallestenclosingcircle import make_circle\n",
812 | "\n",
813 | " x, y, r = make_circle([c[:2] for c in shape.boundary.coords])\n",
814 | " \n",
815 | " return math.sqrt(shape.area / math.pi) / r\n",
816 | "\n",
817 | "import math\n",
818 | "\n",
819 | "def range_3d(mesh):\n",
820 | " \"\"\"Returns the range index in 3D for a given mesh\"\"\"\n",
821 | " \n",
822 | " _, r2 = miniball.get_bounding_ball(mesh.points)\n",
823 | " \n",
824 | " r_scc = math.sqrt(r2)\n",
825 | " \n",
826 | " V = mesh.volume\n",
827 | " \n",
828 | " return math.pow(3 * V / (4 * math.pi), 1/3) / r_scc\n",
829 | "\n",
830 | "fp = to_shapely(building[\"geometry\"][0], vertices)\n",
831 | "\n",
832 | "def print_stats(shape, mesh, density_2d=1, density_3d=2, check_surface=True):\n",
833 | " \"\"\"Computes and prints all stats for a geometry\"\"\"\n",
834 | " \n",
835 | " # Compute the grid for the given mesh\n",
836 | " voxel = pv.voxelize(mesh, density=density_3d, check_surface=check_surface)\n",
837 | " grid = voxel.cell_centers().points\n",
838 | " \n",
839 | "# print(f\"Cohesion | {cohesion_2d(shape):.5f} | {cohesion_3d(mesh, grid):.5f}\")\n",
840 | " print(f\"Proximity | {proximity_2d(shape, density=density_2d):.5f} | {proximity_3d(mesh, grid):.5f}\")\n",
841 | " print(f\"Exchange | {exchange_2d(shape):.5f} | {exchange_3d(mesh, density=density_3d):.5f}\")\n",
842 | " print(f\"Spin | {spin_2d(shape, density=density_2d):.5f} | {spin_3d(mesh, grid):.5f}\")\n",
843 | " print(f\"Perimeter/Circumference | {perimeter_index(shape):.5f} | {circumference_index_3d(mesh):.5f} \")\n",
844 | " print(f\"Depth | {depth_2d(shape, density=density_2d):.5f} | {depth_3d(mesh, density=density_3d):.5f} \")\n",
845 | " print(f\"Girth | {girth_2d(shape):.5f} | {girth_3d(mesh, grid):.5f}\")\n",
846 | " print(f\"Dispersion | {dispersion_2d(shape, density=density_2d):.5f} | {dispersion_3d(mesh, grid, density=density_3d):0.5f}\")\n",
847 | " print(f\"Range | {range_2d(shape):.5f} | {range_3d(mesh):.5f}\")\n",
848 | " print(f\"Roughness index | {roughness_index_2d(shape, density_2d)} | {roughness_index_3d(mesh, grid, density_2d)}\")\n",
849 | "\n",
850 | "# print_stats(fp, clean)\n",
851 | "# print_stats(shapely.geometry.Point(0, 0).buffer(10), pv.Sphere(10), density_3d=pv.Sphere(10).length/50)\n",
852 | "# spin_3d(evs, density=0.25)\n",
853 | "# roughness_index_3d(dataset)\n",
854 | "# print_stats(fp, dataset)"
855 | ]
856 | },
857 | {
858 | "cell_type": "code",
859 | "execution_count": null,
860 | "id": "68f0dbb5",
861 | "metadata": {},
862 | "outputs": [],
863 | "source": [
864 | "obb_pts = list(fp.minimum_rotated_rectangle.boundary.coords)\n",
865 | "\n",
866 | "S = Point(obb_pts[1]).distance(Point(obb_pts[0]))\n",
867 | "L = Point(obb_pts[2]).distance(Point(obb_pts[1]))\n",
868 | "\n",
869 | "if S > L:\n",
870 | " L, S = S, L\n",
871 | "\n",
872 | "1 - S / L"
873 | ]
874 | },
875 | {
876 | "attachments": {},
877 | "cell_type": "markdown",
878 | "id": "9818ab42",
879 | "metadata": {},
880 | "source": [
881 | "This is our own implementation on creating a grid that wraps the surface of a mesh:"
882 | ]
883 | },
884 | {
885 | "cell_type": "code",
886 | "execution_count": null,
887 | "id": "51a3c974",
888 | "metadata": {
889 | "scrolled": false
890 | },
891 | "outputs": [],
892 | "source": [
893 | "from helpers.geometry import surface_normal\n",
894 | "from shapely.geometry import Polygon\n",
895 | "\n",
896 | "def to_3d(points, normal, origin):\n",
897 | " \"\"\"Translate local 2D coordinates to 3D\"\"\"\n",
898 | " \n",
899 | " x_axis, y_axis = axes_of_normal(normal)\n",
900 | " \n",
901 | " return (np.repeat([origin], len(points), axis=0)\n",
902 | " + np.matmul(points, [x_axis, y_axis])) \n",
903 | "\n",
904 | "def axes_of_normal(normal):\n",
905 | " \"\"\"Returns an x-axis and y-axis on a plane of the given normal\"\"\"\n",
906 | " if normal[2] > 0.001 or normal[2] < -0.001:\n",
907 | " x_axis = [1, 0, -normal[0]/normal[2]];\n",
908 | " elif normal[1] > 0.001 or normal[1] < -0.001:\n",
909 | " x_axis = [1, -normal[0]/normal[1], 0];\n",
910 | " else:\n",
911 | " x_axis = [-normal[1] / normal[0], 1, 0];\n",
912 | " \n",
913 | " x_axis = x_axis / np.linalg.norm(x_axis)\n",
914 | " y_axis = np.cross(normal, x_axis)\n",
915 | "\n",
916 | " return x_axis, y_axis\n",
917 | "\n",
918 | "def project_2d(points, normal):\n",
919 | " origin = points[0]\n",
920 | "\n",
921 | " x_axis, y_axis = axes_of_normal(normal)\n",
922 | " \n",
923 | " return [[np.dot(p - origin, x_axis), np.dot(p - origin, y_axis)] for p in points]\n",
924 | "\n",
925 | "def create_surface_grid(mesh, density=1):\n",
926 | " \"\"\"Create a 2-dimensional grid along the surface of a 3D mesh\"\"\"\n",
927 | " \n",
928 | " result = []\n",
929 | " \n",
930 | " sized = mesh.compute_cell_sizes()\n",
931 | " \n",
932 | " for i in range(mesh.n_cells):\n",
933 | " if not mesh.cell_type(i) in [5, 6, 7, 9, 10]:\n",
934 | " continue\n",
935 | " \n",
936 | " pts = mesh.cell_points(i)\n",
937 | " \n",
938 | " normal = surface_normal(pts)\n",
939 | " \n",
940 | " pts_2d = project_2d(pts, normal)\n",
941 | " poly_2d = Polygon(pts_2d)\n",
942 | " \n",
943 | " grid = create_grid_2d(poly_2d, density)\n",
944 | " grid = MultiPoint(grid).intersection(poly_2d)\n",
945 | " \n",
946 | " if grid.is_empty:\n",
947 | " continue\n",
948 | " elif grid.geom_type == \"Point\":\n",
949 | " grid = np.array(grid.coords)\n",
950 | " else:\n",
951 | " grid = np.array([list(p.coords[0]) for p in grid.geoms])\n",
952 | " \n",
953 | " # TODO: Randomise the origin\n",
954 | " result.extend(list(to_3d(grid, normal, pts[0])))\n",
955 | " \n",
956 | " return result\n",
957 | "\n",
958 | "s_grid = pv.PolyData(create_surface_grid(trimesh, 1))\n",
959 | "\n",
960 | "p = pv.Plotter()\n",
961 | "\n",
962 | "p.add_mesh(dataset, opacity=0.9)\n",
963 | "# p.add_mesh(clean.extract_cells(82))\n",
964 | "p.add_mesh(s_grid)\n",
965 | "\n",
966 | "p.show()"
967 | ]
968 | },
969 | {
970 | "cell_type": "code",
971 | "execution_count": null,
972 | "id": "e2d9b73d",
973 | "metadata": {},
974 | "outputs": [],
975 | "source": [
976 | "pts = [[1138.221, 340727.568, 36.900],\n",
977 | " [1138.221, 340727.538, 36.900],\n",
978 | " [1138.221, 340727.508, 36.900]]\n",
979 | "\n",
980 | "def surface_normal(poly):\n",
981 | " n = [0.0, 0.0, 0.0]\n",
982 | "\n",
983 | " for i, v_curr in enumerate(poly):\n",
984 | " v_next = poly[(i+1) % len(poly)]\n",
985 | " n[0] += (v_curr[1] - v_next[1]) * (v_curr[2] + v_next[2])\n",
986 | " n[1] += (v_curr[2] - v_next[2]) * (v_curr[0] + v_next[0])\n",
987 | " n[2] += (v_curr[0] - v_next[0]) * (v_curr[1] + v_next[1])\n",
988 | " \n",
989 | " if all([c == 0 for c in n]):\n",
990 | " raise ValueError(\"No normal. Possible colinear points!\")\n",
991 | "\n",
992 | " normalised = [i/np.linalg.norm(n) for i in n]\n",
993 | "\n",
994 | " return normalised\n",
995 | "\n",
996 | "surface_normal(pts)"
997 | ]
998 | },
999 | {
1000 | "cell_type": "code",
1001 | "execution_count": null,
1002 | "id": "a351d1a9",
1003 | "metadata": {},
1004 | "outputs": [],
1005 | "source": [
1006 | "def dispersion_2d(shape, density=0.2):\n",
1007 | " \"\"\"Returns the dispersion index in 2d for a given polygon\"\"\"\n",
1008 | " \n",
1009 | " c = shape.centroid\n",
1010 | " b = shape.boundary\n",
1011 | " \n",
1012 | " r = math.sqrt(shape.area / math.pi)\n",
1013 | " \n",
1014 | " r_dev = 0\n",
1015 | " r_ibp = 0\n",
1016 | " for l in np.arange(0, b.length, density):\n",
1017 | " p = b.interpolate(l)\n",
1018 | " \n",
1019 | " r_dev += abs(p.distance(c) - r)\n",
1020 | " r_ibp += p.distance(c)\n",
1021 | " \n",
1022 | " return 1 - (r_dev / r_ibp)\n",
1023 | "\n",
1024 | "from handcalcs import handcalc\n",
1025 | "\n",
1026 | "# @handcalc()\n",
1027 | "def dispersion_3d(mesh, grid, density=0.5):\n",
1028 | " \"\"\"Returns the dispersion index in 3d for a given mesh\"\"\"\n",
1029 | " \n",
1030 | " centroid = np.mean(grid, axis=0)\n",
1031 | " \n",
1032 | " s_grid = create_surface_grid(mesh, density)\n",
1033 | " \n",
1034 | " r = equal_volume_radius(mesh.volume)\n",
1035 | " \n",
1036 | " r_dev = 0\n",
1037 | " r_ibp = 0\n",
1038 | " for p in s_grid:\n",
1039 | " d_i = distance(centroid, p)\n",
1040 | " r_dev += abs(d_i - r)\n",
1041 | " r_ibp += d_i\n",
1042 | "\n",
1043 | " return 1 - (r_dev / r_ibp)\n",
1044 | "\n",
1045 | "# voxel = pv.voxelize(dataset, density=0.5, check_surface=False)\n",
1046 | "# grid = voxel.cell_centers().points\n",
1047 | "# dispersion_3d(dataset, grid)"
1048 | ]
1049 | },
1050 | {
1051 | "cell_type": "code",
1052 | "execution_count": null,
1053 | "id": "868feaf9",
1054 | "metadata": {},
1055 | "outputs": [],
1056 | "source": [
1057 | "def roughness_index_2d(shape, density=0.2):\n",
1058 | " c = shape.centroid\n",
1059 | " b = shape.boundary\n",
1060 | " \n",
1061 | " r_ibp = 0\n",
1062 | " for l in np.arange(0, b.length, density):\n",
1063 | " p = b.interpolate(l)\n",
1064 | " \n",
1065 | " r_ibp += p.distance(c)\n",
1066 | " \n",
1067 | " m_r = r_ibp / math.floor(b.length / density)\n",
1068 | " \n",
1069 | " return 42.62 * math.pow(m_r, 2) / (shape.area + math.pow(shape.length, 2))\n",
1070 | "\n",
1071 | "def roughness_index_3d(mesh, grid, density=0.5):\n",
1072 | " centroid = np.mean(grid, axis=0)\n",
1073 | " \n",
1074 | " s_grid = create_surface_grid(mesh, density)\n",
1075 | " \n",
1076 | " r_ibp = 0\n",
1077 | " for p in s_grid:\n",
1078 | " d_i = distance(centroid, p)\n",
1079 | " r_ibp += d_i\n",
1080 | " \n",
1081 | " m_r = r_ibp / len(s_grid)\n",
1082 | " \n",
1083 | " return 48.735 * math.pow(m_r, 3) / (mesh.volume + math.pow(mesh.area, 3/2))\n",
1084 | "\n",
1085 | "mmm = dataset\n",
1086 | "voxel = pv.voxelize(mmm, density=0.5, check_surface=False)\n",
1087 | "grid = voxel.cell_centers().points\n",
1088 | "roughness_index_3d(mmm, grid, density=0.2)"
1089 | ]
1090 | },
1091 | {
1092 | "attachments": {},
1093 | "cell_type": "markdown",
1094 | "id": "afb64563",
1095 | "metadata": {},
1096 | "source": [
1097 | "Compute the minimum bounding sphere of the building:"
1098 | ]
1099 | },
1100 | {
1101 | "cell_type": "code",
1102 | "execution_count": null,
1103 | "id": "53a8de2d",
1104 | "metadata": {},
1105 | "outputs": [],
1106 | "source": [
1107 | "import miniball\n",
1108 | "C, r2 = miniball.get_bounding_ball(clean.clean().points)\n",
1109 | "\n",
1110 | "print(r2)\n",
1111 | "print(clean.bounds)\n",
1112 | "\n",
1113 | "p = pv.Plotter()\n",
1114 | "\n",
1115 | "p.add_mesh(clean)\n",
1116 | "# p.add_mesh(clean.extract_cells(82))\n",
1117 | "p.add_mesh(pv.Sphere(radius=math.sqrt(r2), center=C), opacity=0.2)\n",
1118 | "\n",
1119 | "p.show()"
1120 | ]
1121 | },
1122 | {
1123 | "attachments": {},
1124 | "cell_type": "markdown",
1125 | "id": "91596ce6",
1126 | "metadata": {},
1127 | "source": [
1128 | "Compute the largest inscribed sphere. This is based on creating a grid of a certain density and then picking the interior point of the largest distance from the boundaries (after computing the implicit distance):"
1129 | ]
1130 | },
1131 | {
1132 | "cell_type": "code",
1133 | "execution_count": null,
1134 | "id": "35f9047d",
1135 | "metadata": {},
1136 | "outputs": [],
1137 | "source": [
1138 | "p = pv.Plotter()\n",
1139 | "\n",
1140 | "p.add_mesh(clean, opacity=0.2)\n",
1141 | "p.add_mesh(largest_inscribed_sphere(clean, density=0.5))\n",
1142 | "\n",
1143 | "p.show()"
1144 | ]
1145 | },
1146 | {
1147 | "attachments": {},
1148 | "cell_type": "markdown",
1149 | "id": "4733446a",
1150 | "metadata": {},
1151 | "source": [
1152 | "## Playing with `pymesh`\n",
1153 | "\n",
1154 | "Since `pyvista` is unreliable regarding boolean operations, this is how to convert to/from `pymesh` to do operations. `pymesh` is harder to install (see [here](https://pymesh.readthedocs.io/en/latest/installation.html)) but the result seems to be robust:"
1155 | ]
1156 | },
1157 | {
1158 | "cell_type": "code",
1159 | "execution_count": null,
1160 | "id": "3200f83a",
1161 | "metadata": {},
1162 | "outputs": [],
1163 | "source": [
1164 | "import pymesh\n",
1165 | "\n",
1166 | "def to_pymesh(mesh):\n",
1167 | " \"\"\"Returns a pymesh from a pyvista PolyData\"\"\"\n",
1168 | " v = mesh.points\n",
1169 | " f = mesh.faces.reshape(-1, 4)[:, 1:]\n",
1170 | "\n",
1171 | " return pymesh.form_mesh(v, f)\n",
1172 | "\n",
1173 | "def to_pyvista(mesh):\n",
1174 | " \"\"\"Return a PolyData from a pymesh\"\"\"\n",
1175 | " if len(mesh.vertices) == 0:\n",
1176 | " return pv.PolyData()\n",
1177 | " \n",
1178 | " v = mesh.vertices\n",
1179 | " f = mesh.faces\n",
1180 | " \n",
1181 | " f = np.hstack([[len(f)] + list(f) for f in mesh.faces])\n",
1182 | " \n",
1183 | " return pv.PolyData(v, f, len(mesh.faces))\n",
1184 | "\n",
1185 | "evs = equal_volume_sphere(clean, position=np.mean(clean.points, axis=0))\n",
1186 | "\n",
1187 | "clean_pm = to_pymesh(clean)\n",
1188 | "evs_pm = to_pymesh(evs)\n",
1189 | "\n",
1190 | "p = pv.Plotter()\n",
1191 | "\n",
1192 | "p.add_mesh(clean)\n",
1193 | "p.add_mesh(evs)\n",
1194 | "\n",
1195 | "p.show()"
1196 | ]
1197 | },
1198 | {
1199 | "cell_type": "code",
1200 | "execution_count": null,
1201 | "id": "486439d2",
1202 | "metadata": {},
1203 | "outputs": [],
1204 | "source": [
1205 | "inter = pymesh.boolean(clean_pm, evs_pm, operation=\"intersection\", engine=\"igl\")"
1206 | ]
1207 | },
1208 | {
1209 | "cell_type": "code",
1210 | "execution_count": null,
1211 | "id": "6c8ee86f",
1212 | "metadata": {},
1213 | "outputs": [],
1214 | "source": [
1215 | "p = pv.Plotter()\n",
1216 | "\n",
1217 | "p.add_mesh(evs, opacity=0.2, color='yellow')\n",
1218 | "p.add_mesh(to_pyvista(inter), color='blue', show_edges=True)\n",
1219 | "\n",
1220 | "p.show()"
1221 | ]
1222 | },
1223 | {
1224 | "attachments": {},
1225 | "cell_type": "markdown",
1226 | "id": "921b4f84",
1227 | "metadata": {},
1228 | "source": [
1229 | "# Checking common walls between buildings"
1230 | ]
1231 | },
1232 | {
1233 | "cell_type": "code",
1234 | "execution_count": null,
1235 | "id": "13effd13",
1236 | "metadata": {},
1237 | "outputs": [],
1238 | "source": [
1239 | "building1 = cm[\"CityObjects\"][\"B-201391182831-87DCB2B9B03A\"]\n",
1240 | "\n",
1241 | "trimesh1 = cityjson.to_triangulated_polydata(building1[\"geometry\"][0], vertices).clean()\n",
1242 | "\n",
1243 | "building2 = cm[\"CityObjects\"][\"B-201391182833-69022997F0C9\"]\n",
1244 | "\n",
1245 | "trimesh2 = cityjson.to_triangulated_polydata(building2[\"geometry\"][0], vertices).clean()\n",
1246 | "\n",
1247 | "p = pv.Plotter()\n",
1248 | "\n",
1249 | "p.add_mesh(trimesh1, color=\"yellow\")\n",
1250 | "p.add_mesh(trimesh2, color=\"red\")\n",
1251 | "\n",
1252 | "# trimesh1.extract_cells(range(27, 29))\n",
1253 | "\n",
1254 | "p.show()"
1255 | ]
1256 | },
1257 | {
1258 | "cell_type": "code",
1259 | "execution_count": null,
1260 | "id": "fbbb4cc2",
1261 | "metadata": {
1262 | "scrolled": false
1263 | },
1264 | "outputs": [],
1265 | "source": [
1266 | "m1 = to_pymesh(trimesh1)\n",
1267 | "m2 = to_pymesh(trimesh2)\n",
1268 | "\n",
1269 | "wall = pymesh.boolean(m1, m2, operation=\"intersection\", engine=\"igl\")\n",
1270 | "\n",
1271 | "to_pyvista(wall)"
1272 | ]
1273 | },
1274 | {
1275 | "cell_type": "code",
1276 | "execution_count": null,
1277 | "id": "4886e5f6",
1278 | "metadata": {},
1279 | "outputs": [],
1280 | "source": [
1281 | "t1 = pv.PolyData([[0,0,0],[0,1,0],[1,0,0]], faces=[3, 0, 1, 2])\n",
1282 | "t2 = pv.PolyData([[0,0,0],[0,1,0],[1,0,0]], faces=[3, 0, 1, 2])\n",
1283 | "\n",
1284 | "m1 = to_pymesh(trimesh1)\n",
1285 | "m2 = to_pymesh(trimesh2)\n",
1286 | "\n",
1287 | "wall = pymesh.boolean(m1, m2, operation=\"intersection\", engine=\"igl\")\n",
1288 | "\n",
1289 | "to_pyvista(wall)"
1290 | ]
1291 | },
1292 | {
1293 | "cell_type": "code",
1294 | "execution_count": null,
1295 | "id": "dc1c7769",
1296 | "metadata": {},
1297 | "outputs": [],
1298 | "source": [
1299 | "from helpers.geometry import surface_normal, project_2d, axes_of_normal\n",
1300 | "from shapely.geometry import MultiPolygon, Polygon\n",
1301 | "\n",
1302 | "def is_on_plane(point, normal, origin):\n",
1303 | " a, b, c, d = get_plane_params(normal, origin)\n",
1304 | " \n",
1305 | " x, y, z = point\n",
1306 | " \n",
1307 | " return a * x + b * y + c * z + d == 0\n",
1308 | "\n",
1309 | "# intersect_surfaces(trimesh1, trimesh2)\n",
1310 | "\n",
1311 | "def plane_params(normal, origin, rounding=2, absolute=True):\n",
1312 | " \"\"\"Returns the params (a, b, c, d) of the plane equation\"\"\"\n",
1313 | " a, b, c = np.round_(normal, 3)\n",
1314 | " x0, y0, z0 = origin\n",
1315 | " \n",
1316 | " d = -(a * x0 + b * y0 + c * z0)\n",
1317 | " \n",
1318 | " if rounding >= 0:\n",
1319 | " d = round(d, rounding)\n",
1320 | " \n",
1321 | " return np.array([a, b, c, d])\n",
1322 | "\n",
1323 | "def face_planes(mesh):\n",
1324 | " return [plane_params(mesh.face_normals[i], mesh.cell_points(i)[0]) for i in range(mesh.n_cells)]\n",
1325 | "\n",
1326 | "def project_mesh(mesh, normal, origin):\n",
1327 | " p = []\n",
1328 | " for i in range(mesh.n_cells):\n",
1329 | " pts = mesh.cell_points(i)\n",
1330 | " \n",
1331 | " pts_2d = project_2d(pts, normal, origin)\n",
1332 | " \n",
1333 | " p.append(Polygon(pts_2d))\n",
1334 | " \n",
1335 | " return MultiPolygon(p).buffer(0)\n",
1336 | "\n",
1337 | "def to_3d(polygon, normal, origin):\n",
1338 | " xa, ya = axes_of_normal(normal)\n",
1339 | " \n",
1340 | " mat = np.array([xa, ya])\n",
1341 | " pts = np.array(polygon.boundary.coords)\n",
1342 | " \n",
1343 | " return np.dot(pts, mat) + origin\n",
1344 | "\n",
1345 | "def cluster_meshes(meshes, threshold=0.1):\n",
1346 | " \"\"\"Clusters the faces of the given meshes\"\"\"\n",
1347 | " \n",
1348 | " n_meshes = len(meshes)\n",
1349 | " \n",
1350 | " # Compute the \"absolute\" plane params for every face of the two meshes\n",
1351 | " planes = [face_planes(mesh) for mesh in meshes]\n",
1352 | " mesh_ids = [[m for _ in range(meshes[m].n_cells)] for m in range(n_meshes)]\n",
1353 | " \n",
1354 | " # Find the common planes between the two faces\n",
1355 | " all_planes = np.concatenate(planes)\n",
1356 | " all_labels, n_clusters = cluster_faces(all_planes, threshold)\n",
1357 | " areas = []\n",
1358 | " \n",
1359 | " labels = np.array_split(all_labels, [meshes[m].n_cells for m in range(n_meshes - 1)])\n",
1360 | " \n",
1361 | " return labels, n_clusters\n",
1362 | "\n",
1363 | "def intersect_surfaces(meshes):\n",
1364 | " \"\"\"Return the intersection between the surfaces of multiple meshes\"\"\"\n",
1365 | " \n",
1366 | " n_meshes = len(meshes)\n",
1367 | " \n",
1368 | " areas = []\n",
1369 | " \n",
1370 | " labels, n_clusters = cluster_meshes(meshes)\n",
1371 | " \n",
1372 | " for plane in range(n_clusters):\n",
1373 | " # For every common plane, extract the faces that belong to it\n",
1374 | " idxs = [[i for i, p in enumerate(labels[m]) if p == plane] for m in range(n_meshes)]\n",
1375 | " \n",
1376 | " if any([len(idx) == 0 for idx in idxs]):\n",
1377 | " continue\n",
1378 | " \n",
1379 | " msurfaces = [mesh.extract_cells(idxs[i]).extract_surface() for i, mesh in enumerate(meshes)]\n",
1380 | " \n",
1381 | " # Set the normal and origin point for a plane to project the faces\n",
1382 | " origin = msurfaces[0].clean().points[0]\n",
1383 | " normal = msurfaces[0].face_normals[0]\n",
1384 | " \n",
1385 | " # Create the two 2D polygons by projecting the faces\n",
1386 | " polys = [project_mesh(msurface, normal, origin) for msurface in msurfaces]\n",
1387 | " \n",
1388 | " # Intersect the 2D polygons\n",
1389 | " inter = polys[0]\n",
1390 | " for i in range(1, len(polys)):\n",
1391 | " inter = inter.intersection(polys[i])\n",
1392 | " \n",
1393 | " if inter.area > 0.001:\n",
1394 | " if inter.type == \"MultiPolygon\":\n",
1395 | " for geom in inter.geoms:\n",
1396 | " pts = to_3d(geom, normal, origin)\n",
1397 | " common_mesh = pv.PolyData(pts, faces=[len(pts)] + list(range(len(pts))))\n",
1398 | " common_mesh[\"area\"] = [geom.area]\n",
1399 | " areas.append(common_mesh)\n",
1400 | " else:\n",
1401 | " pts = to_3d(inter, normal, origin)\n",
1402 | " common_mesh = pv.PolyData(pts, faces=[len(pts)] + list(range(len(pts))))\n",
1403 | " common_mesh[\"area\"] = [inter.area]\n",
1404 | " areas.append(common_mesh)\n",
1405 | " \n",
1406 | " return areas\n",
1407 | "\n",
1408 | "def intersect_pairs(mesh, neighbours):\n",
1409 | " return np.hstack([intersect_surfaces([mesh, neighbour]) for neighbour in neighbours])\n",
1410 | "\n",
1411 | "t = np.mean(trimesh1.points, axis=0)\n",
1412 | "trimesh1.points -= t\n",
1413 | "trimesh2.points -= t\n",
1414 | "\n",
1415 | "labels = cluster_meshes([trimesh1, trimesh2])\n",
1416 | "labels\n",
1417 | "\n",
1418 | "# p = pv.Plotter()\n",
1419 | "\n",
1420 | "# p.add_mesh(trimesh1, scalars=labels[0])\n",
1421 | "# p.add_mesh(trimesh2, scalars=labels[1])\n",
1422 | "\n",
1423 | "# p.show()\n",
1424 | "\n",
1425 | "intersect_surfaces([trimesh1, trimesh2])[0].plot()\n",
1426 | "# trimesh1.extract_cells(range(27, 29)).extract_surface().area"
1427 | ]
1428 | },
1429 | {
1430 | "cell_type": "code",
1431 | "execution_count": null,
1432 | "id": "24a00ea2",
1433 | "metadata": {},
1434 | "outputs": [],
1435 | "source": [
1436 | "import rtree.index\n",
1437 | "import cityjson\n",
1438 | "\n",
1439 | "def get_bbox(geom, verts):\n",
1440 | " pts = np.array(cityjson.get_points(geom, verts))\n",
1441 | " \n",
1442 | " return np.hstack([[np.min(pts[:, i]), np.max(pts[:, i])] for i in range(np.shape(pts)[1])])\n",
1443 | "\n",
1444 | "def generator_function(cm, verts):\n",
1445 | " for i, objid in enumerate(cm[\"CityObjects\"]):\n",
1446 | " obj = cm[\"CityObjects\"][objid]\n",
1447 | " xmin, xmax, ymin, ymax, zmin, zmax = get_bbox(obj[\"geometry\"][0], verts)\n",
1448 | " yield (i, (xmin, ymin, zmin, xmax, ymax, zmax), objid)\n",
1449 | "\n",
1450 | "p = rtree.index.Property()\n",
1451 | "p.dimension = 3\n",
1452 | "r = rtree.index.Index(generator_function(cm, vertices), properties=p)"
1453 | ]
1454 | },
1455 | {
1456 | "cell_type": "code",
1457 | "execution_count": null,
1458 | "id": "96199100",
1459 | "metadata": {},
1460 | "outputs": [],
1461 | "source": [
1462 | "def plot_meshes(meshes, **kargs):\n",
1463 | " p = pv.Plotter(**kargs)\n",
1464 | " \n",
1465 | " p.add_mesh(meshes[0], color=\"red\")\n",
1466 | " for mesh in meshes[1:]:\n",
1467 | " p.add_mesh(mesh)\n",
1468 | " \n",
1469 | " p.show()"
1470 | ]
1471 | },
1472 | {
1473 | "cell_type": "code",
1474 | "execution_count": null,
1475 | "id": "6a6cb49d",
1476 | "metadata": {},
1477 | "outputs": [],
1478 | "source": [
1479 | "mainid = \"B-201391183614-E8E64E86E428\"\n",
1480 | "\n",
1481 | "xmin, xmax, ymin, ymax, zmin, zmax = get_bbox(cm[\"CityObjects\"][mainid][\"geometry\"][0], verts)\n",
1482 | "objids = [n.object for n in r.intersection((xmin, ymin, zmin, xmax, ymax, zmax), objects=True) if n.object != mainid]\n",
1483 | "\n",
1484 | "main_mesh = cityjson.to_triangulated_polydata(cm[\"CityObjects\"][mainid][\"geometry\"][0], vertices).clean()\n",
1485 | "meshes = [cityjson.to_triangulated_polydata(cm[\"CityObjects\"][objid][\"geometry\"][0], vertices).clean() for objid in objids]\n",
1486 | "\n",
1487 | "t = np.mean(main_mesh.points, axis=0)\n",
1488 | "\n",
1489 | "main_mesh.points -= t\n",
1490 | "for mesh in meshes:\n",
1491 | " mesh.points -= t\n",
1492 | "\n",
1493 | "plot_meshes([main_mesh] + meshes)"
1494 | ]
1495 | },
1496 | {
1497 | "cell_type": "code",
1498 | "execution_count": null,
1499 | "id": "d7900f75",
1500 | "metadata": {},
1501 | "outputs": [],
1502 | "source": [
1503 | "walls = intersect_pairs(main_mesh, meshes)"
1504 | ]
1505 | },
1506 | {
1507 | "cell_type": "code",
1508 | "execution_count": null,
1509 | "id": "ef79beab",
1510 | "metadata": {},
1511 | "outputs": [],
1512 | "source": [
1513 | "area_pv = sum([wall.triangulate().area for wall in walls])\n",
1514 | "area_2d = sum(wall[\"area\"][0] for wall in walls)\n",
1515 | "\n",
1516 | "print(f\"{area_pv} - {area_2d}\")"
1517 | ]
1518 | },
1519 | {
1520 | "cell_type": "code",
1521 | "execution_count": null,
1522 | "id": "dec1a10c",
1523 | "metadata": {},
1524 | "outputs": [],
1525 | "source": [
1526 | "main_mesh.plot(scalars=cluster_meshes([main_mesh])[0])"
1527 | ]
1528 | },
1529 | {
1530 | "cell_type": "code",
1531 | "execution_count": null,
1532 | "id": "3ad1f647",
1533 | "metadata": {},
1534 | "outputs": [],
1535 | "source": [
1536 | "from sklearn.cluster import AgglomerativeClustering\n",
1537 | "import numpy as np\n",
1538 | "import math\n",
1539 | "\n",
1540 | "def distance(x, y):\n",
1541 | " \"\"\"Returns the euclidean distance between two points\"\"\"\n",
1542 | " \n",
1543 | " return math.sqrt(sum([math.pow(x[c] - y[c], 2) for c in range(len(x))]))\n",
1544 | "\n",
1545 | "def abs_distance(x, y):\n",
1546 | " \"\"\"Returns the minimum absolute distance\"\"\"\n",
1547 | " \n",
1548 | " return min(distance(x, y), distance(x, [-e for e in y]))\n",
1549 | "\n",
1550 | "t = np.mean(main_mesh.points, axis=0)\n",
1551 | "main_mesh.points -= t\n",
1552 | "\n",
1553 | "def cluster_faces(data, threshold=0.1):\n",
1554 | "# distance_matrix = np.asarray([\n",
1555 | "# [abs_distance(data[first_index], data[second_index]) \n",
1556 | "# for first_index in range(len(data))] \n",
1557 | "# for second_index in range(len(data))])\n",
1558 | " ndata = np.array(data)\n",
1559 | " \n",
1560 | " dm1 = scipy.spatial.distance_matrix(ndata, ndata)\n",
1561 | " dm2 = scipy.spatial.distance_matrix(ndata, -ndata)\n",
1562 | "\n",
1563 | " distance_matrix = np.minimum(dm1, dm2)\n",
1564 | "\n",
1565 | " clustering = AgglomerativeClustering(n_clusters=None, distance_threshold=threshold, affinity='precomputed', linkage='average').fit(distance_matrix)\n",
1566 | " \n",
1567 | " return clustering.labels_, clustering.n_clusters_\n",
1568 | "\n",
1569 | "labels, n_clusters = cluster_faces(face_planes(main_mesh))\n",
1570 | "import matplotlib.pyplot as plt\n",
1571 | "boring_cmap = plt.cm.get_cmap(\"tab20b\", n_clusters)\n",
1572 | "main_mesh.plot(scalars=labels, show_edges=True, cmap=boring_cmap)\n",
1573 | "\n",
1574 | "main_mesh.save(\"cluster.vtk\")"
1575 | ]
1576 | },
1577 | {
1578 | "cell_type": "code",
1579 | "execution_count": null,
1580 | "id": "3e1918b3",
1581 | "metadata": {},
1582 | "outputs": [],
1583 | "source": [
1584 | "import time\n",
1585 | "\n",
1586 | "start = time.time()\n",
1587 | "\n",
1588 | "cluster_meshes(meshes)\n",
1589 | "\n",
1590 | "end = time.time()\n",
1591 | "print(end - start)"
1592 | ]
1593 | },
1594 | {
1595 | "cell_type": "code",
1596 | "execution_count": null,
1597 | "id": "a051fa9e",
1598 | "metadata": {},
1599 | "outputs": [],
1600 | "source": [
1601 | "start = time.time()\n",
1602 | "\n",
1603 | "labels, n_clusters = cluster_faces(face_planes(main_mesh))\n",
1604 | "\n",
1605 | "end = time.time()\n",
1606 | "print(end - start)"
1607 | ]
1608 | },
1609 | {
1610 | "cell_type": "code",
1611 | "execution_count": null,
1612 | "id": "1830d3d1",
1613 | "metadata": {},
1614 | "outputs": [],
1615 | "source": [
1616 | "data = np.array(face_planes(main_mesh))\n",
1617 | "\n",
1618 | "dm1 = scipy.spatial.distance_matrix(data, data)\n",
1619 | "dm2 = scipy.spatial.distance_matrix(data, -data)\n",
1620 | "\n",
1621 | "np.minimum(dm1, dm2)"
1622 | ]
1623 | }
1624 | ],
1625 | "metadata": {
1626 | "kernelspec": {
1627 | "display_name": "Python 3 (ipykernel)",
1628 | "language": "python",
1629 | "name": "python3"
1630 | },
1631 | "language_info": {
1632 | "codemirror_mode": {
1633 | "name": "ipython",
1634 | "version": 3
1635 | },
1636 | "file_extension": ".py",
1637 | "mimetype": "text/x-python",
1638 | "name": "python",
1639 | "nbconvert_exporter": "python",
1640 | "pygments_lexer": "ipython3",
1641 | "version": "3.10.11"
1642 | },
1643 | "toc-autonumbering": false,
1644 | "toc-showcode": false,
1645 | "toc-showmarkdowntxt": false
1646 | },
1647 | "nbformat": 4,
1648 | "nbformat_minor": 5
1649 | }
1650 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | pyvista===0.36.1
3 | scipy
4 | cython
5 | pymeshfix
6 | click
7 | pandas
8 | geopandas
9 | tqdm
10 | shapely===1.8.5
11 | jupyter
12 | mapbox-earcut
13 | python-polylabel
14 | miniball
15 | ipygany
16 | rtree
17 | scikit-learn===1.3.2
18 | pytest
19 | pytest-cov
--------------------------------------------------------------------------------
/shape_index.py:
--------------------------------------------------------------------------------
1 | """Module that computes indexes for shapely (2D) and polydata (3D) shapes"""
2 |
3 | import math
4 | from shapely.geometry import Point, MultiPoint, Polygon
5 | from helpers.geometry import surface_normal
6 | try:
7 | from helpers.mesh import to_pymesh, to_pyvista, intersect
8 | pymesh_exists = True
9 | except:
10 | print("WARNING: pymesh not found! Exchange index calculation will be omitted...")
11 | pymesh_exists = False
12 | import miniball
13 | import numpy as np
14 | import pyvista as pv
15 | # import pymesh
16 |
17 | def circularity(shape):
18 | """Returns circularity 2D for a given polygon"""
19 |
20 | return 4 * math.pi * shape.area / math.pow(shape.length, 2)
21 |
22 | def hemisphericality(mesh):
23 | """Returns hemisphericality for a given volume.
24 |
25 | Hemisphericality can be perceived as a similar metric
26 | to circularity in 2D. But in 3D no building is expected
27 | to be spherical, but can be relatively hemispherical
28 | (i.e. starting with a big footpring and narrowing towards
29 | the roof).
30 | """
31 |
32 | return 3 * math.sqrt(2) * math.sqrt(math.pi) * mesh.volume / math.pow(mesh.area, 3/2)
33 |
34 | def convexity_2d(shape):
35 | """Returns the convexity in 2D"""
36 |
37 | return shape.area / shape.convex_hull.area
38 |
39 | def fractality_2d(shape):
40 | """Returns the fractality in 2D for a given polygon"""
41 |
42 | return 1 - math.log(shape.area) / (2 * math.log(shape.length))
43 |
44 | def fractality_3d(mesh):
45 | """Returns the fractality in 3D for a given volume"""
46 |
47 | # TODO: Check this formula
48 | return 1 - math.log(mesh.volume) / (3/2 * math.log(mesh.area))
49 |
50 | def squareness(shape):
51 | """Returns the squareness in 2D for a given polygon"""
52 |
53 | return 4 * math.sqrt(shape.area) / shape.length
54 |
55 | def cubeness(mesh):
56 | """Returns the cubeness in 3D for a given volume"""
57 |
58 | return 6 * math.pow(mesh.volume, 2/3) / mesh.area
59 |
60 | def get_box_dimensions(box):
61 | """Given a box (as shapely polygon) returns its dimensions as a tuple
62 | (small, large)
63 | """
64 |
65 | obb_pts = list(box.boundary.coords)
66 |
67 | S = Point(obb_pts[1]).distance(Point(obb_pts[0]))
68 | L = Point(obb_pts[2]).distance(Point(obb_pts[1]))
69 |
70 | if S > L:
71 | L, S = S, L
72 |
73 | return S, L
74 |
75 | def elongation(S, L):
76 | """Returns the elongation for the given dimensions"""
77 |
78 | if S > L:
79 | return 1 - L / S
80 |
81 | return 1 - S / L
82 |
83 | def equivalent_rectangular_index(shape, obb_2d=None):
84 | """Returns the equivalent rectangular index"""
85 |
86 | if obb_2d is None:
87 | obb_2d = shape.minimum_rotated_rectangle
88 |
89 | k = math.sqrt(shape.area / obb_2d.area)
90 |
91 | return k * obb_2d.length / shape.length
92 |
93 | def equivalent_prism_index(mesh, obb):
94 | """Returns the equivalent rectangular prism index"""
95 |
96 | k = math.pow(mesh.volume / obb.volume, 2/3)
97 |
98 | # evrp: equal volume rectangular prism
99 | A_evrp = k * obb.area
100 |
101 | return A_evrp / mesh.area
102 |
103 | def create_grid_2d(shape, density):
104 | """Return the grid for a given polygon"""
105 |
106 | x_min, y_min, x_max, y_max = shape.bounds
107 | x = np.arange(x_min, x_max, density)
108 | y = np.arange(y_min, y_max, density)
109 | x, y = np.meshgrid(x, y)
110 |
111 | x = np.hstack(x)
112 | y = np.hstack(y)
113 |
114 | return [(x[i], y[i]) for i in range(len(x))]
115 |
116 | def create_grid_3d(mesh, density, check_surface=False):
117 | """Returns the grid for a given mesh"""
118 | voxel = pv.voxelize(mesh, density=density, check_surface=check_surface)
119 |
120 | return voxel.cell_centers().points
121 |
122 | def to_3d(points, normal, origin):
123 | """Translate local 2D coordinates to 3D"""
124 |
125 | x_axis, y_axis = axes_of_normal(normal)
126 |
127 | return (np.repeat([origin], len(points), axis=0)
128 | + np.matmul(points, [x_axis, y_axis]))
129 |
130 | def axes_of_normal(normal):
131 | """Returns an x-axis and y-axis on a plane of the given normal"""
132 | if normal[2] > 0.001 or normal[2] < -0.001:
133 | x_axis = [1, 0, -normal[0]/normal[2]];
134 | elif normal[1] > 0.001 or normal[1] < -0.001:
135 | x_axis = [1, -normal[0]/normal[1], 0];
136 | else:
137 | x_axis = [-normal[1] / normal[0], 1, 0];
138 |
139 | x_axis = x_axis / np.linalg.norm(x_axis)
140 | y_axis = np.cross(normal, x_axis)
141 |
142 | return x_axis, y_axis
143 |
144 | def project_2d(points, normal):
145 | origin = points[0]
146 |
147 | x_axis, y_axis = axes_of_normal(normal)
148 |
149 | return [[np.dot(p - origin, x_axis), np.dot(p - origin, y_axis)] for p in points]
150 |
151 | def create_surface_grid(mesh, density=1):
152 | """Create a 2-dimensional grid along the surface of a 3D mesh"""
153 |
154 | result = []
155 |
156 | sized = mesh.compute_cell_sizes()
157 |
158 | for i in range(mesh.n_cells):
159 | if not mesh.cell_type(i) in [5, 6, 7, 9, 10]:
160 | continue
161 |
162 | pts = mesh.cell_points(i)
163 |
164 | try:
165 | normal = surface_normal(pts)
166 | except:
167 | continue
168 |
169 | pts_2d = project_2d(pts, normal)
170 | poly_2d = Polygon(pts_2d)
171 |
172 | if not poly_2d.is_valid:
173 | continue
174 |
175 | grid = create_grid_2d(poly_2d, density)
176 | grid = MultiPoint(grid).intersection(poly_2d)
177 |
178 | if grid.is_empty:
179 | continue
180 | elif grid.geom_type == "Point":
181 | grid = np.array(grid.coords)
182 | else:
183 | grid = np.array([list(p.coords[0]) for p in grid.geoms])
184 |
185 | # TODO: Randomise the origin
186 | result.extend(list(to_3d(grid, normal, pts[0])))
187 |
188 | return result
189 |
190 | def distance(x, y):
191 | """Returns the euclidean distance between two points"""
192 |
193 | return math.sqrt(sum([math.pow(x[c] - y[c], 2) for c in range(len(x))]))
194 |
195 | def cohesion_2d(shape, grid=None, density=1):
196 | """Returns the cohesion index in 2D for a given polygon"""
197 |
198 | if grid is None:
199 | grid = create_grid_2d(shape, density)
200 |
201 | if isinstance(grid, list):
202 | grid = MultiPoint(grid).intersection(shape)
203 |
204 | d = 0
205 | for pi in grid.geoms:
206 | for pj in grid.geoms:
207 | if pi == pj:
208 | continue
209 |
210 | d += pi.distance(pj)
211 |
212 | n = len(grid.geoms)
213 | return 0.9054 * math.sqrt(shape.area / math.pi) / (1 / (n * (n - 1)) * d)
214 |
215 | def cohesion_3d(mesh, grid=None, density=1, check_surface=False):
216 | """Returns the cohesion index in 3D for a given mesh"""
217 |
218 | if grid is None:
219 | grid = create_grid_3d(density=density, check_surface=check_surface)
220 |
221 | d = 0
222 | for pi in grid:
223 | for pj in grid:
224 | d += distance(pi, pj)
225 |
226 |
227 | n = len(grid)
228 | return 36 / 35 * math.pow(3 * mesh.volume / (4 * math.pi), 1/3) / (1 / (n * (n - 1)) * d)
229 |
230 | def proximity_2d(shape, density=1, grid=None):
231 | """Returns the proximity index in 2D for a given polygon"""
232 |
233 | if grid is None:
234 | grid = create_grid_2d(shape, density)
235 |
236 | if isinstance(grid, list):
237 | grid = MultiPoint(grid).intersection(shape)
238 |
239 | if grid.is_empty:
240 | return -1
241 |
242 | if grid.geom_type == "Point":
243 | grid = MultiPoint([grid])
244 |
245 | centroid = shape.centroid
246 |
247 | return 2 / 3 * math.sqrt(shape.area / math.pi) / np.mean([centroid.distance(p) for p in grid.geoms])
248 |
249 | def proximity_3d(mesh, grid=None, density=1, check_surface=False):
250 | """Returns the cohesion index in 3D for a given mesh"""
251 |
252 | if grid is None:
253 | grid = create_grid_3d(mesh, density=density, check_surface=check_surface)
254 |
255 | centroid = np.mean(grid, axis=0)
256 |
257 | # TODO: Verify the formula here
258 | r = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)
259 |
260 | return (3 * r / 4) / np.mean([distance(centroid, p) for p in grid])
261 |
262 | def equal_volume_radius(volume):
263 | """Returns the radius of the equal volume sphere"""
264 |
265 | return math.pow(3 * volume / (4 * math.pi), 1/3)
266 |
267 | def equal_volume_sphere(mesh, position=(0, 0, 0)):
268 | """Returns the sphere that has the same volume as the given mesh"""
269 |
270 | r = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)
271 |
272 | return pv.Sphere(radius=r, center=position)
273 |
274 | def exchange_2d(shape):
275 | """Returns the exchange index in 2D for a given polygon"""
276 |
277 | r = math.sqrt(shape.area / math.pi)
278 |
279 | eac = shape.centroid.buffer(r)
280 |
281 | return shape.intersection(eac).area / shape.area
282 |
283 | def exchange_3d(mesh, evs=None, density=0.25, engine="igl"):
284 | """Returns the exhange index in 3D for a given mesh
285 |
286 | mesh: The pyvista mesh to evaluate
287 | evs: The equal volume sphere (if provided speeds up the calculation)
288 | density: If no evs is provided, it is used to create a grid to compute the center of mass
289 | enginge: The engine for the boolean operations
290 | """
291 |
292 | if not pymesh_exists:
293 | return -1
294 |
295 | if evs is None:
296 | voxel = pv.voxelize(mesh, density=density, check_surface=False)
297 | grid = voxel.cell_centers().points
298 |
299 | if len(grid) == 0:
300 | centroid = mesh.center
301 | else:
302 | centroid = np.mean(grid, axis=0)
303 |
304 | evs = equal_volume_sphere(mesh, centroid)
305 |
306 | if mesh.n_open_edges > 0:
307 | return -1
308 |
309 | pm_mesh = to_pymesh(mesh)
310 | pm_evs = to_pymesh(evs)
311 |
312 | try:
313 | inter = intersect(pm_mesh, pm_evs, engine)
314 | except:
315 | return -1
316 |
317 | return inter.volume / mesh.volume
318 |
319 | def spin_2d(shape, grid=None, density=1):
320 | if grid is None:
321 | grid = create_grid_2d(shape, density)
322 |
323 | if isinstance(grid, list):
324 | grid = MultiPoint(grid).intersection(shape)
325 |
326 | if grid.is_empty:
327 | return -1
328 |
329 | if grid.geom_type == "Point":
330 | grid = MultiPoint([grid])
331 |
332 | centroid = shape.centroid
333 |
334 | return 0.5 * (shape.area / math.pi) / np.mean([math.pow(centroid.distance(p), 2) for p in grid])
335 |
336 | def spin_3d(mesh, grid=None, density=1, check_surface=False):
337 | """Returns the cohesion index in 3D for a given mesh"""
338 |
339 | if grid is None:
340 | voxel = pv.voxelize(mesh, density=density, check_surface=check_surface)
341 | grid = voxel.cell_centers().points
342 |
343 | centroid = np.mean(grid, axis=0)
344 |
345 | r = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)
346 | # TODO: Calculate the actual formula here
347 | return 3 / 5 * math.pow(r, 2) / np.mean([math.pow(distance(centroid, p), 2) for p in grid])
348 |
349 | def perimeter_index(shape):
350 | return 2 * math.sqrt(math.pi * shape.area) / shape.length
351 |
352 | def circumference_index_3d(mesh):
353 | return 4 * math.pi * math.pow(3 * mesh.volume / (4 * math.pi), 2 / 3) / mesh.area
354 |
355 | def depth_2d(shape, grid=None, density=1):
356 | if grid is None:
357 | grid = create_grid_2d(shape, density)
358 |
359 | if isinstance(grid, list):
360 | grid = MultiPoint(grid).intersection(shape)
361 |
362 | if grid.is_empty:
363 | return -1
364 |
365 | if grid.geom_type == "Point":
366 | grid = MultiPoint([grid])
367 |
368 | return 3 * np.mean([p.distance(shape.boundary) for p in grid]) / math.sqrt(shape.area / math.pi)
369 |
370 | def depth_3d(mesh, grid=None, density=1, check_surface=False):
371 | """Returns the depth index in 3D for a given mesh"""
372 |
373 | if grid is None:
374 | voxel = pv.voxelize(mesh, density=density, check_surface=check_surface)
375 | grid = voxel.cell_centers()
376 |
377 | dist = grid.compute_implicit_distance(mesh)
378 |
379 | r = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)
380 | return 4 * np.mean(np.absolute(dist["implicit_distance"])) / r
381 |
382 | from polylabel import polylabel
383 |
384 | def largest_inscribed_circle(shape):
385 | """Returns the largest inscribed circle of a polygon in 2D"""
386 |
387 | centre, r = polylabel([list([list(c)[:2] for c in shape.boundary.coords])], with_distance=True) # ([0.5, 0.5], 0.5)
388 |
389 | lic = Point(centre).buffer(r)
390 |
391 | return lic
392 |
393 | def largest_inscribed_sphere(mesh, grid=None, density=1, check_surface=False):
394 | """Returns the largest inscribed sphere of a mesh in 3D"""
395 |
396 | if grid is None:
397 | voxel = pv.voxelize(mesh, density=density, check_surface=check_surface)
398 | grid = voxel.cell_centers()
399 |
400 | if not isinstance(grid, pv.PolyData):
401 | grid = pv.PolyData(grid)
402 |
403 | dist = grid.compute_implicit_distance(mesh)
404 |
405 | if grid.n_points == 0:
406 | return pv.Sphere(center=(0, 0, 0), radius=(mesh.bounds[2] - mesh.bounds[0]) / 2)
407 |
408 | # The largest inscribed circle's radius is the largest (internal) distance,
409 | # hence the lowest value (as internal distance is negative)
410 | lis_radius = np.min(dist["implicit_distance"])
411 | lis_center = dist.points[np.where(dist["implicit_distance"] == lis_radius)][0]
412 |
413 | return pv.Sphere(center=lis_center, radius=abs(lis_radius))
414 |
415 | def girth_2d(shape):
416 | """Return the girth index in 2D for a given polygon"""
417 |
418 | lic = largest_inscribed_circle(shape)
419 |
420 | if lic.is_empty:
421 | return -1
422 |
423 | # Compute the radius as half the bounding box width
424 | r = (lic.bounds[2] - lic.bounds[0]) / 2
425 |
426 | return r / math.sqrt(shape.area / math.pi)
427 |
428 | def girth_3d(mesh, grid=None, density=1, check_surface=False):
429 | """Return the girth index in 3D for a given mesh"""
430 |
431 | lis = largest_inscribed_sphere(mesh,
432 | grid=grid,
433 | density=density,
434 | check_surface=check_surface)
435 |
436 | r = (lis.bounds[1] - lis.bounds[0]) / 2
437 | r_evs = math.pow(3 * mesh.volume / (4 * math.pi), 1/3)
438 |
439 | return r / r_evs
440 |
441 | def range_2d(shape):
442 | """Returns the range index in 2D for a given polygon"""
443 |
444 | from helpers.smallestenclosingcircle import make_circle
445 |
446 | x, y, r = make_circle([c[:2] for c in shape.boundary.coords])
447 |
448 | return math.sqrt(shape.area / math.pi) / r
449 |
450 | def get_bounding_ball_radius(points):
451 | """Returns the bounding ball for a set of points"""
452 |
453 | try:
454 | _, r2 = miniball.get_bounding_ball(points)
455 | except:
456 | return -1
457 |
458 | return r2
459 |
460 | def range_3d(mesh):
461 | """Returns the range index in 3D for a given mesh"""
462 |
463 | r2 = -1
464 |
465 | pts = mesh.clean().points
466 | t = np.mean(pts, axis=0)
467 | pts = pts - t
468 |
469 | count = 0
470 |
471 | while r2 < 0:
472 | r2 = get_bounding_ball_radius(pts)
473 | count += 1
474 |
475 | if count > 10:
476 | return -1
477 |
478 | r_scc = math.sqrt(r2)
479 |
480 | return math.pow(3 * mesh.volume / (4 * math.pi), 1/3) / r_scc
481 |
482 | def dispersion_2d(shape, density=0.2):
483 | """Returns the dispersion index in 2d for a given polygon"""
484 |
485 | c = shape.centroid
486 | b = shape.boundary
487 |
488 | r = math.sqrt(shape.area / math.pi)
489 |
490 | r_dev = 0
491 | r_ibp = 0
492 | for l in np.arange(0, b.length, density):
493 | p = b.interpolate(l)
494 |
495 | r_dev += abs(p.distance(c) - r)
496 | r_ibp += p.distance(c)
497 |
498 | return 1 - (r_dev / r_ibp)
499 |
500 | def dispersion_3d(mesh, grid, density=0.5):
501 | """Returns the dispersion index in 3d for a given mesh"""
502 |
503 | centroid = np.mean(grid, axis=0)
504 |
505 | s_grid = create_surface_grid(mesh, density)
506 |
507 | r = equal_volume_radius(mesh.volume)
508 |
509 | r_dev = 0
510 | r_ibp = 0
511 | for p in s_grid:
512 | d_i = distance(centroid, p)
513 | r_dev += abs(d_i - r)
514 | r_ibp += d_i
515 |
516 | return 1 - (r_dev / r_ibp)
517 |
518 | def roughness_index_2d(shape, density=0.2):
519 | c = shape.centroid
520 | b = shape.boundary
521 |
522 | if b.length < 1:
523 | return -1
524 |
525 | r_ibp = 0
526 | for l in np.arange(0, b.length, density):
527 | p = b.interpolate(l)
528 |
529 | r_ibp += p.distance(c)
530 |
531 | m_r = r_ibp / math.floor(b.length / density)
532 |
533 | return 42.62 * math.pow(m_r, 2) / (shape.area + math.pow(shape.length, 2))
534 |
535 | def roughness_index_3d(mesh, grid, density=0.5):
536 | centroid = np.mean(grid, axis=0)
537 |
538 | s_grid = create_surface_grid(mesh, density)
539 |
540 | if len(s_grid) == 0:
541 | return -1
542 |
543 | r_ibp = 0
544 | for p in s_grid:
545 | d_i = distance(centroid, p)
546 | r_ibp += d_i
547 |
548 | m_r = r_ibp / len(s_grid)
549 |
550 | return 48.735 * math.pow(m_r, 3) / (mesh.volume + math.pow(mesh.area, 3/2))
551 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tudelft3d/3d-building-metrics/ca50dae866c66d51845ac383f2b64f190409d092/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_shape_index.py:
--------------------------------------------------------------------------------
1 | import shape_index
2 | import pyvista as pv
3 | from pytest import approx
4 | from shapely.geometry import Point
5 |
6 | def test_hemisphericality():
7 | hemisphere = pv.Sphere(radius=10).clip()
8 | index_value = shape_index.hemisphericality(hemisphere)
9 |
10 | assert index_value == approx(1.0, abs=1e-2)
11 |
12 | def test_fractality_2d():
13 | circle = Point(0,0).buffer(10)
14 | index_value = shape_index.fractality_2d(circle)
15 |
16 | assert index_value == approx(0.3, abs=1e-2)
17 |
18 | def test_fractality_3d():
19 | hemisphere = pv.Sphere(radius=10)
20 | index_value = shape_index.fractality_3d(hemisphere)
21 |
22 | assert index_value == approx(0.22, abs=1e-2)
23 |
24 | def test_cubeness_3d():
25 | cube = pv.Box().scale(10, inplace=False)
26 | index_value = shape_index.cubeness(cube)
27 |
28 | assert index_value == approx(1.0, abs=1e-2)
--------------------------------------------------------------------------------
/volume.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pyvista as pv
3 | import scipy.spatial as ss
4 | import sys
5 | from pymeshfix import MeshFix
6 |
7 | import json
8 |
9 | if len(sys.argv) < 2:
10 | print("Gimme a file, or I'll skip you!")
11 |
12 | filename = sys.argv[1]
13 |
14 | with open(filename, 'r') as f:
15 | cm = json.load(f)
16 |
17 | if "transform" in cm:
18 | s = cm["transform"]["scale"]
19 | t = cm["transform"]["translate"]
20 | verts = [[v[0] * s[0] + t[0], v[1] * s[1] + t[1], v[2] * s[2] + t[2]]
21 | for v in cm["vertices"]]
22 | else:
23 | verts = cm["vertices"]
24 |
25 | # mesh points
26 | vertices = np.array(verts)
27 |
28 | epointsListSemantics = {}
29 |
30 | print("id, type, actual volume, convex hull volume, area, ground area, wall area, roof area")
31 | for obj in cm["CityObjects"]:
32 | building = cm["CityObjects"][obj]
33 |
34 | if len(sys.argv) > 2 and obj != sys.argv[2]:
35 | continue
36 |
37 | # TODO: Add options for all skip conditions below
38 |
39 | # Skip if type is not Building or Building part
40 | if not building["type"] in ["Building", "BuildingPart"]:
41 | continue
42 |
43 | # Skip if no geometry
44 | if not "geometry" in building or len(building["geometry"]) == 0:
45 | continue
46 |
47 | geom = building["geometry"][0]
48 |
49 | # Skip if the geometry type is not supported
50 | if geom["type"] == "MultiSurface":
51 | boundaries = geom["boundaries"]
52 | elif geom["type"] == "Solid":
53 | boundaries = geom["boundaries"][0]
54 | else:
55 | continue
56 |
57 | f = [[len(r[0])] + r[0] for r in [f for f in boundaries]]
58 | faces = np.hstack(f)
59 |
60 | # Create the pyvista object
61 | dataset = pv.PolyData(vertices, faces)
62 |
63 | mfix = MeshFix(dataset)
64 | # mfix.repair()
65 |
66 | holes = mfix.extract_holes()
67 |
68 | # plotter = pv.Plotter()
69 | # plotter.add_mesh(dataset, color=True)
70 | # plotter.add_mesh(holes, color='r', line_width=5)
71 | # plotter.enable_eye_dome_lighting() # helps depth perception
72 | # _ = plotter.show()
73 |
74 | fixed = mfix.mesh
75 |
76 | # Compute the convex hull volume
77 | f = [v for ring in boundaries for v in ring[0]]
78 | points = [verts[i] for i in f]
79 | try:
80 | ch_volume = ss.ConvexHull(points).volume
81 | except:
82 | ch_volume = 0
83 |
84 | area = {
85 | "GroundSurface": 0,
86 | "WallSurface": 0,
87 | "RoofSurface": 0
88 | }
89 |
90 | epointsListSemantics[obj] = {"G": [], "R": []}
91 |
92 | if "semantics" in geom:
93 | # Compute area per surface type
94 | sized = dataset.compute_cell_sizes()
95 | surface_areas = sized.cell_data["Area"]
96 |
97 | semantics = geom["semantics"]
98 | for i in range(len(surface_areas)):
99 | if geom["type"] == "MultiSurface":
100 | t = semantics["surfaces"][semantics["values"][i]]["type"]
101 | elif geom["type"] == "Solid":
102 | t = semantics["surfaces"][semantics["values"][0][i]]["type"]
103 |
104 | if t in area:
105 | area[t] = area[t] + surface_areas[i]
106 | else:
107 | area[t] = surface_areas[i]
108 |
109 | if t == "GroundSurface":
110 | epointsListSemantics[obj]["G"].append([verts[v] for v in boundaries[i][0]])
111 | elif t == "RoofSurface":
112 | epointsListSemantics[obj]["R"].append([verts[v] for v in boundaries[i][0]])
113 |
114 | print(f"{obj}, {building['type']}, {fixed.volume}, {ch_volume}, {dataset.area}, {area['GroundSurface']}, {area['WallSurface']}, {area['RoofSurface']}")
--------------------------------------------------------------------------------