├── .gitignore
├── LICENSE
├── README.md
├── __init__.py
├── clients
├── QtMapServiceClient.py
└── QtMapServiceClient.ui
├── core
├── __init__.py
├── basemaps
│ ├── __init__.py
│ ├── gpkg.py
│ ├── mapservice.py
│ └── servicesDefs.py
├── checkdeps.py
├── errors.py
├── georaster
│ ├── __init__.py
│ ├── bigtiffwriter.py
│ ├── georaster.py
│ ├── georef.py
│ ├── img_utils.py
│ └── npimg.py
├── lib
│ ├── Tyf
│ │ ├── VERSION
│ │ ├── __init__.py
│ │ ├── decoders.py
│ │ ├── encoders.py
│ │ ├── gkd.py
│ │ ├── ifd.py
│ │ ├── tags.py
│ │ └── values.py
│ ├── imageio
│ │ ├── README.md
│ │ ├── __init__.py
│ │ ├── core
│ │ │ ├── __init__.py
│ │ │ ├── fetching.py
│ │ │ ├── findlib.py
│ │ │ ├── format.py
│ │ │ ├── functions.py
│ │ │ ├── request.py
│ │ │ └── util.py
│ │ ├── freeze.py
│ │ ├── plugins
│ │ │ ├── __init__.py
│ │ │ ├── _freeimage.py
│ │ │ └── freeimage.py
│ │ ├── resources
│ │ │ └── shipped_resources_go_here
│ │ └── testing.py
│ ├── shapefile.py
│ └── shapefile123.py
├── maths
│ ├── __init__.py
│ ├── akima.py
│ ├── fillnodata.py
│ ├── interpo.py
│ └── kmeans1D.py
├── proj
│ ├── __init__.py
│ ├── ellps.py
│ ├── reproj.py
│ ├── srs.py
│ ├── srv.py
│ └── utm.py
├── settings.json
├── settings.py
└── utils
│ ├── __init__.py
│ ├── bbox.py
│ ├── gradient.py
│ ├── timing.py
│ └── xy.py
├── geoscene.py
├── icons
├── asc.png
├── curve.png
├── delaunay.png
├── drop.png
├── exifCam.png
├── georefCam.png
├── layers.png
├── lidar.png
├── osm.png
├── osm_xml.png
├── raster.png
├── shp.png
├── terrain.png
└── voronoi.png
├── issue_template.md
├── operators
├── __init__.py
├── add_camera_exif.py
├── add_camera_georef.py
├── io_export_shp.py
├── io_get_dem.py
├── io_import_asc.py
├── io_import_georaster.py
├── io_import_osm.py
├── io_import_shp.py
├── lib
│ └── osm
│ │ ├── nominatim.py
│ │ └── overpy
│ │ ├── __about__.py
│ │ ├── __init__.py
│ │ ├── exception.py
│ │ └── helper.py
├── mesh_delaunay_voronoi.py
├── mesh_earth_sphere.py
├── nodes_terrain_analysis_builder.py
├── nodes_terrain_analysis_reclassify.py
├── object_drop.py
├── rsrc
│ └── gradients
│ │ ├── GMT_dem4.svg
│ │ ├── GMT_panoply.svg
│ │ ├── Gummy-Kids.svg
│ │ ├── Horizon_1.svg
│ │ ├── Ribbon-Colors.svg
│ │ ├── Spectral_11.svg
│ │ ├── Sunrise.svg
│ │ ├── abyss.svg
│ │ ├── alarm.p1.0.5.svg
│ │ ├── bath_114.svg
│ │ ├── bhw3_05.svg
│ │ ├── ch05p151010.svg
│ │ ├── cyanotype-sodableach_01.svg
│ │ ├── esri-bolivia.svg
│ │ ├── esri-ecuador.svg
│ │ ├── esri-europe_7.svg
│ │ ├── esri-italy.svg
│ │ ├── esri-mojave.svg
│ │ ├── esri-utah_1.svg
│ │ ├── fs2009.svg
│ │ ├── gem-16.svg
│ │ ├── heat.svg
│ │ ├── nrwc.svg
│ │ ├── pm3d01.svg
│ │ ├── precip_11lev.svg
│ │ ├── reds_01.svg
│ │ ├── sepiared_01.svg
│ │ ├── smart.svg
│ │ ├── stern.svg
│ │ ├── temp_19lev.svg
│ │ ├── temperature.svg
│ │ └── wiki-plumbago.svg
├── utils
│ ├── __init__.py
│ ├── bgis_utils.py
│ ├── delaunay_voronoi.py
│ └── georaster_utils.py
└── view3d_mapviewer.py
└── prefs.py
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | *.py[cod]
3 | *$py.class
4 |
5 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Blender GIS
2 | ==========
3 | Blender minimum version required : v2.83
4 |
5 | Note : Since 2022, the OpenTopography web service requires an API key. Please register to opentopography.org and request a key. This service is still free.
6 |
7 |
8 | [Wiki](https://github.com/domlysz/BlenderGIS/wiki/Home) - [FAQ](https://github.com/domlysz/BlenderGIS/wiki/FAQ) - [Quick start guide](https://github.com/domlysz/BlenderGIS/wiki/Quick-start) - [Flowchart](https://raw.githubusercontent.com/wiki/domlysz/blenderGIS/flowchart.jpg)
9 | --------------------
10 |
11 | ## Functionalities overview
12 |
13 | **GIS datafile import :** Import in Blender most commons GIS data format : Shapefile vector, raster image, geotiff DEM, OpenStreetMap xml.
14 |
15 | There are a lot of possibilities to create a 3D terrain from geographic data with BlenderGIS, check the [Flowchart](https://raw.githubusercontent.com/wiki/domlysz/blenderGIS/flowchart.jpg) to have an overview.
16 |
17 | Exemple : import vector contour lines, create faces by triangulation and put a topographic raster texture.
18 |
19 | 
20 |
21 | **Grab geodata directly from the web :** display dynamics web maps inside Blender 3d view, requests for OpenStreetMap data (buildings, roads ...), get true elevation data from the NASA SRTM mission.
22 |
23 | 
24 |
25 | **And more :** Manage georeferencing informations of a scene, compute a terrain mesh by Delaunay triangulation, drop objects on a terrain mesh, make terrain analysis using shader nodes, setup new cameras from geotagged photos, setup a camera to render with Blender a new georeferenced raster.
26 |
--------------------------------------------------------------------------------
/core/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 | logging.basicConfig(level=logging.getLevelName('INFO'))
3 |
4 | from .checkdeps import HAS_GDAL, HAS_PYPROJ, HAS_IMGIO, HAS_PIL
5 | from .settings import settings
6 | from .errors import OverlapError
7 |
8 | from .utils import XY, BBOX
9 |
10 | from .proj import SRS, Reproj, reprojPt, reprojPts, reprojBbox, reprojImg
11 |
12 | from .georaster import GeoRef, GeoRaster, NpImage
13 |
14 | from .basemaps import GRIDS, SOURCES, MapService, GeoPackage, TileMatrix
15 |
16 | from .lib import shapefile
17 |
--------------------------------------------------------------------------------
/core/basemaps/__init__.py:
--------------------------------------------------------------------------------
1 | from .servicesDefs import GRIDS, SOURCES
2 | from .mapservice import MapService, TileMatrix, BBoxRequest, BBoxRequestMZ
3 | from .gpkg import GeoPackage
4 |
--------------------------------------------------------------------------------
/core/checkdeps.py:
--------------------------------------------------------------------------------
1 | import logging
2 | log = logging.getLogger(__name__)
3 |
4 | #GDAL
5 | try:
6 | from osgeo import gdal
7 | except:
8 | HAS_GDAL = False
9 | log.debug('GDAL Python binding unavailable')
10 | else:
11 | HAS_GDAL = True
12 | log.debug('GDAL Python binding available')
13 |
14 |
15 | #PyProj
16 | try:
17 | import pyproj
18 | except:
19 | HAS_PYPROJ = False
20 | log.debug('PyProj unavailable')
21 | else:
22 | HAS_PYPROJ = True
23 | log.debug('PyProj available')
24 |
25 |
26 | #PIL/Pillow
27 | try:
28 | from PIL import Image
29 | except:
30 | HAS_PIL = False
31 | log.debug('Pillow unavailable')
32 | else:
33 | HAS_PIL = True
34 | log.debug('Pillow available')
35 |
36 |
37 | #Imageio freeimage plugin
38 | try:
39 | from .lib import imageio
40 | imageio.plugins._freeimage.get_freeimage_lib() #try to download freeimage lib
41 | except Exception as e:
42 | log.error("Cannot install ImageIO's Freeimage plugin", exc_info=True)
43 | HAS_IMGIO = False
44 | else:
45 | HAS_IMGIO = True
46 | log.debug('ImageIO Freeimage plugin available')
47 |
--------------------------------------------------------------------------------
/core/errors.py:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | class OverlapError(Exception):
5 | def __init__(self):
6 | pass
7 | def __str__(self):
8 | return "Non overlap data"
9 |
10 | class ReprojError(Exception):
11 | def __init__(self, value):
12 | self.value = value
13 | def __str__(self):
14 | return repr(self.value)
15 |
--------------------------------------------------------------------------------
/core/georaster/__init__.py:
--------------------------------------------------------------------------------
1 | from .georef import GeoRef
2 | from .georaster import GeoRaster
3 | from .npimg import NpImage
4 | from .bigtiffwriter import BigTiffWriter
5 | from .img_utils import getImgFormat, getImgDim, isValidStream
6 |
--------------------------------------------------------------------------------
/core/georaster/bigtiffwriter.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # This file is part of BlenderGIS
4 |
5 | # ***** GPL LICENSE BLOCK *****
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with this program. If not, see .
19 | # All rights reserved.
20 | # ***** GPL LICENSE BLOCK *****
21 |
22 |
23 | import os
24 | import numpy as np
25 | from .npimg import NpImage
26 |
27 |
28 | from ..checkdeps import HAS_GDAL, HAS_PIL, HAS_IMGIO
29 |
30 | if HAS_GDAL:
31 | from osgeo import gdal
32 |
33 |
34 | class BigTiffWriter():
35 | '''
36 | This class is designed to write a bigtif with jpeg compression
37 | writing a large tiff file without trigger a memory overflow is possible with the help of GDAL library
38 | jpeg compression allows to maintain a reasonable file size
39 | transparency or nodata are stored in an internal tiff mask because it's not possible to have an alpha channel when using jpg compression
40 | '''
41 |
42 |
43 | def __del__(self):
44 | # properly close gdal dataset
45 | self.ds = None
46 |
47 |
48 | def __init__(self, path, w, h, georef, geoTiffOptions={'TFW':'YES', 'TILED':'YES', 'BIGTIFF':'YES', 'COMPRESS':'JPEG', 'JPEG_QUALITY':80, 'PHOTOMETRIC':'YCBCR'}):
49 | '''
50 | path = fule system path for the ouput tiff
51 | w, h = width and height in pixels
52 | georef : a Georef object used to set georeferencing informations, optional
53 | geoTiffOptions : GDAL create option for tiff format
54 | '''
55 |
56 | if not HAS_GDAL:
57 | raise ImportError("GDAL interface unavailable")
58 |
59 |
60 | #control path validity
61 |
62 | self.w = w
63 | self.h = h
64 | self.size = (w, h)
65 |
66 | self.path = path
67 | self.georef = georef
68 |
69 | if geoTiffOptions.get('COMPRESS', None) == 'JPEG':
70 | #JPEG in tiff cannot have an alpha band, workaround is to use internal tiff mask
71 | self.useMask = True
72 | gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK', 'YES')
73 | n = 3 #RGB
74 | else:
75 | self.useMask = False
76 | n = 4 #RGBA
77 | self.nbBands = n
78 |
79 | options = [str(k) + '=' + str(v) for k, v in geoTiffOptions.items()]
80 |
81 | driver = gdal.GetDriverByName("GTiff")
82 | gdtype = gdal.GDT_Byte #GDT_UInt16, GDT_Int16, GDT_UInt32, GDT_Int32
83 | self.dtype = 'uint8'
84 |
85 | self.ds = driver.Create(path, w, h, n, gdtype, options)
86 | if self.useMask:
87 | self.ds.CreateMaskBand(gdal.GMF_PER_DATASET)#The mask band is shared between all bands on the dataset
88 | self.mask = self.ds.GetRasterBand(1).GetMaskBand()
89 | self.mask.Fill(255)
90 | elif n == 4:
91 | self.ds.GetRasterBand(4).Fill(255)
92 |
93 | #Write georef infos
94 | self.ds.SetGeoTransform(self.georef.toGDAL())
95 | if self.georef.crs is not None:
96 | self.ds.SetProjection(self.georef.crs.getOgrSpatialRef().ExportToWkt())
97 | #self.georef.toWorldFile(os.path.splitext(path)[0] + '.tfw')
98 |
99 |
100 | def paste(self, data, x, y):
101 | '''data = numpy array or NpImg'''
102 | img = NpImage(data)
103 | data = img.data
104 | #Write RGB
105 | for bandIdx in range(3): #writearray is available only at band level
106 | bandArray = data[:,:,bandIdx]
107 | self.ds.GetRasterBand(bandIdx+1).WriteArray(bandArray, x, y)
108 | #Process alpha
109 | hasAlpha = data.shape[2] == 4
110 | if hasAlpha:
111 | alpha = data[:,:,3]
112 | if self.useMask:
113 | self.mask.WriteArray(alpha, x, y)
114 | else:
115 | self.ds.GetRasterBand(4).WriteArray(alpha, x, y)
116 | else:
117 | pass # replaced by fill method
118 | '''
119 | #make alpha band or internal mask fully opaque
120 | h, w = data.shape[0], data.shape[1]
121 | alpha = np.full((h, w), 255, np.uint8)
122 | if self.useMask:
123 | self.mask.WriteArray(alpha, x, y)
124 | else:
125 | self.ds.GetRasterBand(4).WriteArray(alpha, x, y)
126 | '''
127 |
128 |
129 |
130 | def __repr__(self):
131 | return '\n'.join([
132 | "* Data infos :",
133 | " size {}".format(self.size),
134 | " type {}".format(self.dtype),
135 | " number of bands {}".format(self.nbBands),
136 | "* Georef & Geometry : \n{}".format(self.georef)
137 | ])
138 |
--------------------------------------------------------------------------------
/core/georaster/georaster.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # This file is part of BlenderGIS
4 |
5 | # ***** GPL LICENSE BLOCK *****
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with this program. If not, see .
19 | # All rights reserved.
20 | # ***** GPL LICENSE BLOCK *****
21 |
22 | import os
23 |
24 | import logging
25 | log = logging.getLogger(__name__)
26 |
27 | from ..lib import Tyf #geotags reader
28 |
29 | from .georef import GeoRef
30 | from .npimg import NpImage
31 | from .img_utils import getImgFormat, getImgDim
32 |
33 | from ..utils import XY as xy
34 | from ..errors import OverlapError
35 | from ..checkdeps import HAS_GDAL
36 |
37 | if HAS_GDAL:
38 | from osgeo import gdal
39 |
40 |
41 | class GeoRaster():
42 | '''A class to represent a georaster file'''
43 |
44 |
45 | def __init__(self, path, subBoxGeo=None, useGDAL=False):
46 | '''
47 | subBoxGeo : a BBOX object in CRS coordinate space
48 | useGDAL : use GDAL (if available) for extract raster informations
49 | '''
50 | self.path = path
51 | self.wfPath = self._getWfPath()
52 |
53 | self.format = None #image file format (jpeg, tiff, png ...)
54 | self.size = None #raster dimension (width, height) in pixel
55 | self.depth = None #8, 16, 32
56 | self.dtype = None #int, uint, float
57 | self.nbBands = None #number of bands
58 | self.noData = None
59 |
60 | self.georef = None
61 |
62 | if not useGDAL or not HAS_GDAL:
63 |
64 | self.format = getImgFormat(path)
65 | if self.format not in ['TIFF', 'BMP', 'PNG', 'JPEG', 'JPEG2000']:
66 | raise IOError("Unsupported format {}".format(self.format))
67 |
68 | if self.isTiff:
69 | self._fromTIFF()
70 | if not self.isGeoref and self.hasWorldFile:
71 | self.georef = GeoRef.fromWorldFile(self.wfPath, self.size)
72 | else:
73 | pass
74 | else:
75 | # Try to read file header
76 | w, h = getImgDim(self.path)
77 | if w is None or h is None:
78 | raise IOError("Unable to read raster size")
79 | else:
80 | self.size = xy(w, h)
81 | #georef
82 | if self.hasWorldFile:
83 | self.georef = GeoRef.fromWorldFile(self.wfPath, self.size)
84 | #TODO add function to extract dtype, nBands & depth from jpg, png, bmp or jpeg2000
85 |
86 | else:
87 | self._fromGDAL()
88 |
89 | if not self.isGeoref:
90 | raise IOError("Unable to read georef infos from worldfile or geotiff tags")
91 |
92 | if subBoxGeo is not None:
93 | self.georef.setSubBoxGeo(subBoxGeo)
94 |
95 |
96 | #GeoGef delegation by composition instead of inheritance
97 | #this special method is called whenever the requested attribute or method is not found in the object
98 | def __getattr__(self, attr):
99 | return getattr(self.georef, attr)
100 |
101 |
102 | ############################################
103 | # Initialization Helpers
104 | ############################################
105 |
106 | def _getWfPath(self):
107 | '''Try to find a worlfile path for this raster'''
108 | ext = self.path[-3:].lower()
109 | extTest = []
110 | extTest.append(ext[0] + ext[2] +'w')# tfx, jgw, pgw ...
111 | extTest.append(extTest[0]+'x')# tfwx
112 | extTest.append(ext+'w')# tifw
113 | extTest.append('wld')#*.wld
114 | extTest.extend( [ext.upper() for ext in extTest] )
115 | for wfExt in extTest:
116 | pathTest = self.path[0:len(self.path)-3] + wfExt
117 | if os.path.isfile(pathTest):
118 | return pathTest
119 | return None
120 |
121 | def _fromTIFF(self):
122 | '''Use Tyf to extract raster infos from geotiff tags'''
123 | if not self.isTiff or not self.fileExists:
124 | return
125 | tif = Tyf.open(self.path)[0]
126 | #Warning : Tyf object does not support k in dict test syntax nor get() method, use try block instead
127 | self.size = xy(tif['ImageWidth'], tif['ImageLength'])
128 | self.nbBands = tif['SamplesPerPixel']
129 | self.depth = tif['BitsPerSample']
130 | if self.nbBands > 1:
131 | self.depth = self.depth[0]
132 | sampleFormatMap = {1:'uint', 2:'int', 3:'float', None:'uint', 6:'complex'}
133 | try:
134 | self.dtype = sampleFormatMap[tif['SampleFormat']]
135 | except KeyError:
136 | self.dtype = 'uint'
137 | try:
138 | self.noData = float(tif['GDAL_NODATA'])
139 | except KeyError:
140 | self.noData = None
141 | #Get Georef
142 | try:
143 | self.georef = GeoRef.fromTyf(tif)
144 | except Exception as e:
145 | log.warning('Cannot extract georefencing informations from tif tags')#, exc_info=True)
146 | pass
147 |
148 |
149 | def _fromGDAL(self):
150 | '''Use GDAL to extract raster infos and init'''
151 | if self.path is None or not self.fileExists:
152 | raise IOError("Cannot find file on disk")
153 | ds = gdal.Open(self.path, gdal.GA_ReadOnly)
154 | self.size = xy(ds.RasterXSize, ds.RasterYSize)
155 | self.format = ds.GetDriver().ShortName
156 | if self.format in ['JP2OpenJPEG', 'JP2ECW', 'JP2KAK', 'JP2MrSID'] :
157 | self.format = 'JPEG2000'
158 | self.nbBands = ds.RasterCount
159 | b1 = ds.GetRasterBand(1) #first band (band index does not count from 0)
160 | self.noData = b1.GetNoDataValue()
161 | ddtype = gdal.GetDataTypeName(b1.DataType)#Byte, UInt16, Int16, UInt32, Int32, Float32, Float64
162 | if ddtype == "Byte":
163 | self.dtype = 'uint'
164 | self.depth = 8
165 | else:
166 | self.dtype = ddtype[0:len(ddtype)-2].lower()
167 | self.depth = int(ddtype[-2:])
168 | #Get Georef
169 | self.georef = GeoRef.fromGDAL(ds)
170 | #Close (gdal has no garbage collector)
171 | ds, b1 = None, None
172 |
173 | #######################################
174 | # Dynamic properties
175 | #######################################
176 | @property
177 | def fileExists(self):
178 | '''Test if the file exists on disk'''
179 | return os.path.isfile(self.path)
180 | @property
181 | def baseName(self):
182 | if self.path is not None:
183 | folder, fileName = os.path.split(self.path)
184 | baseName, ext = os.path.splitext(fileName)
185 | return baseName
186 | @property
187 | def isTiff(self):
188 | '''Flag if the image format is TIFF'''
189 | if self.format in ['TIFF', 'GTiff']:
190 | return True
191 | else:
192 | return False
193 | @property
194 | def hasWorldFile(self):
195 | return self.wfPath is not None
196 | @property
197 | def isGeoref(self):
198 | '''Flag if georef parameters have been extracted'''
199 | if self.georef is not None:
200 | if self.origin is not None and self.pxSize is not None and self.rotation is not None:
201 | return True
202 | else:
203 | return False
204 | else:
205 | return False
206 | @property
207 | def isOneBand(self):
208 | return self.nbBands == 1
209 | @property
210 | def isFloat(self):
211 | return self.dtype in ['Float', 'float']
212 | @property
213 | def ddtype(self):
214 | '''
215 | Get data type and depth in a concatenate string like
216 | 'int8', 'int16', 'uint16', 'int32', 'uint32', 'float32' ...
217 | Can be used to define numpy or gdal data type
218 | '''
219 | if self.dtype is None or self.depth is None:
220 | return None
221 | else:
222 | return self.dtype + str(self.depth)
223 |
224 |
225 | def __repr__(self):
226 | return '\n'.join([
227 | '* Paths infos :',
228 | ' path {}'.format(self.path),
229 | ' worldfile {}'.format(self.wfPath),
230 | ' format {}'.format(self.format),
231 | "* Data infos :",
232 | " size {}".format(self.size),
233 | " bit depth {}".format(self.depth),
234 | " data type {}".format(self.dtype),
235 | " number of bands {}".format(self.nbBands),
236 | " nodata value {}".format(self.noData),
237 | "* Georef & Geometry : \n{}".format(self.georef)
238 | ])
239 |
240 | #######################################
241 | # Methods
242 | #######################################
243 |
244 | def toGDAL(self):
245 | '''Get GDAL dataset'''
246 | return gdal.Open(self.path, gdal.GA_ReadOnly)
247 |
248 | def readAsNpArray(self, subset=True):
249 | '''Read raster pixels values as Numpy Array'''
250 |
251 | if subset and self.subBoxGeo is not None:
252 | #georef = GeoRef(self.size, self.pxSize, self.subBoxGeoOrigin, rot=self.rotation, pxCenter=True)
253 | img = NpImage(self.path, subBoxPx=self.subBoxPx, noData=self.noData, georef=self.georef, adjustGeoref=True)
254 | else:
255 | img = NpImage(self.path, noData=self.noData, georef=self.georef)
256 | return img
257 |
--------------------------------------------------------------------------------
/core/georaster/img_utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # This file is part of BlenderGIS
4 |
5 | # ***** GPL LICENSE BLOCK *****
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with this program. If not, see .
19 | # All rights reserved.
20 | # ***** GPL LICENSE BLOCK *****
21 |
22 |
23 | import struct
24 | import imghdr
25 |
26 |
27 | def isValidStream(data):
28 | if data is None:
29 | return False
30 | format = imghdr.what(None, data)
31 | if format is None:
32 | return False
33 | return True
34 |
35 |
36 | def getImgFormat(filepath):
37 | """
38 | Read header of an image file and try to determine it's format
39 | no requirements, support JPEG, JPEG2000, PNG, GIF, BMP, TIFF, EXR
40 | """
41 | format = None
42 | with open(filepath, 'rb') as fhandle:
43 | head = fhandle.read(32)
44 | # handle GIFs
45 | if head[:6] in (b'GIF87a', b'GIF89a'):
46 | format = 'GIF'
47 | # handle PNG
48 | elif head.startswith(b'\211PNG\r\n\032\n'):
49 | format = 'PNG'
50 | # handle JPEGs
51 | #elif head[6:10] in (b'JFIF', b'Exif')
52 | elif (b'JFIF' in head or b'Exif' in head or b'8BIM' in head) or head.startswith(b'\xff\xd8'):
53 | format = 'JPEG'
54 | # handle JPEG2000s
55 | elif head.startswith(b'\x00\x00\x00\x0cjP \r\n\x87\n'):
56 | format = 'JPEG2000'
57 | # handle BMP
58 | elif head.startswith(b'BM'):
59 | format = 'BMP'
60 | # handle TIFF
61 | elif head[:2] in (b'MM', b'II'):
62 | format = 'TIFF'
63 | # handle EXR
64 | elif head.startswith(b'\x76\x2f\x31\x01'):
65 | format = 'EXR'
66 | return format
67 |
68 |
69 |
70 | def getImgDim(filepath):
71 | """
72 | Return (width, height) for a given img file content
73 | no requirements, support JPEG, JPEG2000, PNG, GIF, BMP
74 | """
75 | width, height = None, None
76 |
77 | with open(filepath, 'rb') as fhandle:
78 | head = fhandle.read(32)
79 | # handle GIFs
80 | if head[:6] in (b'GIF87a', b'GIF89a'):
81 | try:
82 | width, height = struct.unpack("LL", head[16:24])
89 | except struct.error:
90 | # Maybe this is for an older PNG version.
91 | try:
92 | width, height = struct.unpack(">LL", head[8:16])
93 | except struct.error:
94 | raise ValueError("Invalid PNG file")
95 | # handle JPEGs
96 | elif (b'JFIF' in head or b'Exif' in head or b'8BIM' in head) or head.startswith(b'\xff\xd8'):
97 | try:
98 | fhandle.seek(0) # Read 0xff next
99 | size = 2
100 | ftype = 0
101 | while not 0xc0 <= ftype <= 0xcf:
102 | fhandle.seek(size, 1)
103 | byte = fhandle.read(1)
104 | while ord(byte) == 0xff:
105 | byte = fhandle.read(1)
106 | ftype = ord(byte)
107 | size = struct.unpack('>H', fhandle.read(2))[0] - 2
108 | # We are at a SOFn block
109 | fhandle.seek(1, 1) # Skip `precision' byte.
110 | height, width = struct.unpack('>HH', fhandle.read(4))
111 | except struct.error:
112 | raise ValueError("Invalid JPEG file")
113 | # handle JPEG2000s
114 | elif head.startswith(b'\x00\x00\x00\x0cjP \r\n\x87\n'):
115 | fhandle.seek(48)
116 | try:
117 | height, width = struct.unpack('>LL', fhandle.read(8))
118 | except struct.error:
119 | raise ValueError("Invalid JPEG2000 file")
120 | # handle BMP
121 | elif head.startswith(b'BM'):
122 | imgtype = 'BMP'
123 | try:
124 | width, height = struct.unpack(" _M_short else \
14 | (value, )
15 |
16 | def _2(value):
17 | if not isinstance(value, bytes):
18 | value = value.encode()
19 | value += b"\x00" if value[-1] != b"\x00" else ""
20 | return value
21 |
22 | _m_byte = 0
23 | _M_byte = 2**16
24 | def _3(value):
25 | value = int(value)
26 | return (_m_byte, ) if value < _m_byte else \
27 | (_M_byte, ) if value > _M_byte else \
28 | (value, )
29 |
30 | _m_long = 0
31 | _M_long = 2**32
32 | def _4(value):
33 | value = int(value)
34 | return (_m_long, ) if value < _m_long else \
35 | (_M_long, ) if value > _M_long else \
36 | (value, )
37 |
38 | def _5(value):
39 | if not isinstance(value, tuple): value = (value, )
40 | return reduce(tuple.__add__, [(f.numerator, f.denominator) for f in [fractions.Fraction(str(v)).limit_denominator(10000000) for v in value]])
41 |
42 | _m_s_short = -_M_short/2
43 | _M_s_short = _M_short/2-1
44 | def _6(value):
45 | value = int(value)
46 | return (_m_s_short, ) if value < _m_s_short else \
47 | (_M_s_short, ) if value > _M_s_short else \
48 | (value, )
49 |
50 | def _7(value):
51 | if not isinstance(value, bytes):
52 | value = value.encode()
53 | return value
54 |
55 | _m_s_byte = -_M_byte/2
56 | _M_s_byte = _M_byte/2-1
57 | def _8(value):
58 | value = int(value)
59 | return (_m_s_byte, ) if value < _m_s_byte else \
60 | (_M_s_byte, ) if value > _M_s_byte else \
61 | (value, )
62 |
63 | _m_s_long = -_M_long/2
64 | _M_s_long = _M_long/2-1
65 | def _9(value):
66 | value = int(value)
67 | return (_m_s_long, ) if value < _m_s_long else \
68 | (_M_s_long, ) if value > _M_s_long else \
69 | (value, )
70 |
71 | _10 = _5
72 |
73 | def _11(value):
74 | return (float(value), )
75 |
76 | _12 = _11
77 |
78 |
79 | #######################
80 | # Tag-specific encoders
81 |
82 | # XPTitle XPComment XBAuthor
83 | _0x9c9b = _0x9c9c = _0x9c9d = lambda value : reduce(tuple.__add__, [(ord(e), 0) for e in value])
84 | # UserComment GPSProcessingMethod
85 | _0x9286 = _0x1b = lambda value: b"ASCII\x00\x00\x00" + (value.encode() if not isinstance(value, bytes) else value)
86 | # GPSLatitudeRef
87 | _0x1 = lambda value: b"N\x00" if bool(value >= 0) == True else b"S\x00"
88 | # GPSLatitude
89 | def _0x2(value):
90 | value = abs(value)
91 |
92 | degrees = math.floor(value)
93 | minutes = (value - degrees) * 60
94 | seconds = (minutes - math.floor(minutes)) * 60
95 | minutes = math.floor(minutes)
96 |
97 | if seconds >= (60.-0.0001):
98 | seconds = 0.
99 | minutes += 1
100 |
101 | if minutes >= (60.-0.0001):
102 | minutes = 0.
103 | degrees += 1
104 |
105 | return _5((degrees, minutes, seconds))
106 | #GPSLongitudeRef
107 | _0x3 = lambda value: b"E\x00" if bool(value >= 0) == True else b"W\x00"
108 | #GPSLongitude
109 | _0x4 = _0x2
110 | #GPSAltitudeRef
111 | _0x5 = lambda value: _3(1 if value < 0 else 0)
112 | #GPSAltitude
113 | _0x6 = lambda value: _5(abs(value))
114 | # GPSTimeStamp
115 | _0x7 = lambda value: _5(tuple(float(e) for e in [value.hour, value.minute, value.second]))
116 | # GPSDateStamp
117 | _0x1d = lambda value: _2(value.strftime("%Y:%m:%d"))
118 | # DateTime DateTimeOriginal DateTimeDigitized
119 | _0x132 = _0x9003 = _0x9004 = lambda value: _2(value.strftime("%Y:%m:%d %H:%M:%S"))
120 |
--------------------------------------------------------------------------------
/core/lib/Tyf/gkd.py:
--------------------------------------------------------------------------------
1 | # -*- encoding: utf-8 -*-
2 | # Copyright 2012-2015, THOORENS Bruno - http://bruno.thoorens.free.fr/licences/tyf.html
3 | # ~ http://www.remotesensing.org/geotiff/spec/geotiffhome.html
4 |
5 | from . import ifd, tags, values, __geotiff__, __PY3__
6 | import collections
7 |
8 | GeoKeyModel = {
9 | 33550: collections.namedtuple("ModelPixelScale", "ScaleX, ScaleY, ScaleZ"),
10 | 33922: collections.namedtuple("ModelTiepoint", "I,J,K,X,Y,Z"),
11 | 34264: collections.namedtuple("ModelTransformation", "a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p")
12 | }
13 |
14 | def Transform(obj, x=0., y=0., z1=0.,z2=1.):
15 | return (
16 | obj[0] * x + obj[1] * y + obj[2] * z1 + obj[3] * z2,
17 | obj[4] * x + obj[5] * y + obj[6] * z1 + obj[7] * z2,
18 | obj[8] * x + obj[9] * y + obj[10] * z1 + obj[11] * z2,
19 | obj[12] * x + obj[13] * y + obj[14] * z1 + obj[15] * z2
20 | )
21 |
22 | _TAGS = {
23 | # GeoTIFF Configuration GeoKeys
24 | 1024: ("GTModelTypeGeoKey", [3], 0, None),
25 | 1025: ("GTRasterTypeGeoKey", [3], 1, None),
26 | 1026: ("GTCitationGeoKey", [2], None, None), # ASCII text
27 |
28 | # Geographic CS Parameter GeoKeys
29 | 2048: ("GeographicTypeGeoKey", [3], 4326, None), # epsg datum code [4001 - 4999]
30 | 2049: ("GeogCitationGeoKey", [2], None, None), # ASCII text
31 | 2050: ("GeogGeodeticDatumGeoKey", [3], None, None), # use 2048 !
32 | 2051: ("GeogPrimeMeridianGeoKey", [3], 8901, None), # epsg prime meridian code [8001 - 8999]
33 | 2052: ("GeogLinearUnitsGeoKey", [3], 9001, None), # epsg linear unit code [9000 - 9099]
34 | 2053: ("GeogLinearUnitSizeGeoKey", [12], None, None), # custom unit in meters
35 | 2054: ("GeogAngularUnitsGeoKey", [3], 9101, None),
36 | 2055: ("GeogAngularUnitsSizeGeoKey", [12], None, None), # custom unit in radians
37 | 2056: ("GeogEllipsoidGeoKey", [3], None, None), # epsg ellipsoid code [7000 - 7999]
38 | 2057: ("GeogSemiMajorAxisGeoKey", [12], None, None),
39 | 2058: ("GeogSemiMinorAxisGeoKey", [12], None, None),
40 | 2059: ("GeogInvFlatteningGeoKey", [12], None, None),
41 | 2060: ("GeogAzimuthUnitsGeoKey",[3], None, None),
42 | 2061: ("GeogPrimeMeridianLongGeoKey", [12], None, None), # custom prime meridian value in GeogAngularUnits
43 |
44 | # Projected CS Parameter GeoKeys
45 | 3072: ("ProjectedCSTypeGeoKey", [3], None, None), # epsg grid code [20000 - 32760]
46 | 3073: ("PCSCitationGeoKey", [2], None, None), # ASCII text
47 | 3074: ("ProjectionGeoKey", [3], None, None), # [10000 - 19999]
48 | 3075: ("ProjCoordTransGeoKey", [3], None, None),
49 | 3076: ("ProjLinearUnitsGeoKey", [3], None, None),
50 | 3077: ("ProjLinearUnitSizeGeoKey", [12], None, None), # custom unit in meters
51 | 3078: ("ProjStdParallel1GeoKey", [12], None, None),
52 | 3079: ("ProjStdParallel2GeoKey", [12], None, None),
53 | 3080: ("ProjNatOriginLongGeoKey", [12], None, None),
54 | 3081: ("ProjNatOriginLatGeoKey", [12], None, None),
55 | 3082: ("ProjFalseEastingGeoKey", [12], None, None),
56 | 3083: ("ProjFalseNorthingGeoKey", [12], None, None),
57 | 3084: ("ProjFalseOriginLongGeoKey", [12], None, None),
58 | 3085: ("ProjFalseOriginLatGeoKey", [12], None, None),
59 | 3086: ("ProjFalseOriginEastingGeoKey", [12], None, None),
60 | 3087: ("ProjFalseOriginNorthingGeoKey", [12], None, None),
61 | 3088: ("ProjCenterLongGeoKey", [12], None, None),
62 | 3089: ("ProjCenterLatGeoKey", [12], None, None),
63 | 3090: ("ProjCenterEastingGeoKey", [12], None, None),
64 | 3091: ("ProjFalseOriginNorthingGeoKey", [12], None, None),
65 | 3092: ("ProjScaleAtNatOriginGeoKey", [12], None, None),
66 | 3093: ("ProjScaleAtCenterGeoKey", [12], None, None),
67 | 3094: ("ProjAzimuthAngleGeoKey", [12], None, None),
68 | 3095: ("ProjStraightVertPoleLongGeoKey", [12], None, None),
69 |
70 | # Vertical CS Parameter Keys
71 | 4096: ("VerticalCSTypeGeoKey", [3], None, None),
72 | 4097: ("VerticalCitationGeoKey", [2], None, None),
73 | 4098: ("VerticalDatumGeoKey", [3], None, None),
74 | 4099: ("VerticalUnitsGeoKey", [3], None, None),
75 | }
76 |
77 | _2TAG = dict((v[0], t) for t,v in _TAGS.items())
78 | _2KEY = dict((v, k) for k,v in _2TAG.items())
79 |
80 | if __PY3__:
81 | import functools
82 | reduce = functools.reduce
83 | long = int
84 |
85 | class GkdTag(ifd.TiffTag):
86 | strict = True
87 |
88 | def __init__(self, tag=0x0, value=None, name="GeoTiff Tag"):
89 | self.name = name
90 | if tag == 0: return
91 | self.key, types, default, self.comment = _TAGS.get(tag, ("Unknown", [0,], None, "Undefined tag"))
92 | value = default if value == None else value
93 |
94 | self.tag = tag
95 | restricted = getattr(values, self.key, {})
96 |
97 | if restricted:
98 | reverse = dict((v,k) for k,v in restricted.items())
99 | if value in restricted:
100 | self.meaning = restricted.get(value)
101 | elif value in reverse:
102 | value = reverse[value]
103 | self.meaning = value
104 | elif GkdTag.strict:
105 | raise ValueError('"%s" value must be one of %s, get %s instead' % (self.key, list(restricted.keys()), value))
106 |
107 | self.type, self.count, self.value = self._encode(value, types)
108 |
109 | def __setattr__(self, attr, value):
110 | object.__setattr__(self, attr, value)
111 |
112 | def _encode(self, value, types):
113 | if isinstance(value, str): value = value.encode()
114 | elif not hasattr(value, "__len__"): value = (value, )
115 | typ = 0
116 | if 2 in types: typ = 34737
117 | elif 12 in types: typ = 34736
118 | return typ, len(value), value
119 |
120 | def _decode(self):
121 | if self.count == 1: return self.value[0]
122 | else: return self.value
123 |
124 |
125 | class Gkd(dict):
126 | tagname = "Geotiff Tag"
127 | version = __geotiff__[0]
128 | revision = __geotiff__[1:]
129 |
130 | def __init__(self, value={}, **pairs):
131 | dict.__init__(self)
132 | self.from_ifd(value, **pairs)
133 |
134 | def __getitem__(self, tag):
135 | if isinstance(tag, str): tag = _2TAG[tag]
136 | return dict.__getitem__(self, tag)._decode()
137 |
138 | def __setitem__(self, tag, value):
139 | if isinstance(tag, str): tag = _2TAG[tag]
140 | dict.__setitem__(self, tag, GkdTag(tag, value, name=self.tagname))
141 |
142 | def get(self, tag, error=None):
143 | if hasattr(self, "_%s" % tag): return getattr(self, "_%s" % tag)
144 | else: return dict.get(self, tag, error)
145 |
146 | def to_ifd(self):
147 | _34735, _34736, _34737, nbkey, _ifd = (), (), b"", 0, {}
148 | for key,tag in sorted(self.items(), key = lambda a: a[0]):
149 | if tag.type == 0:
150 | _34735 += (key, 0, 1) + tag.value
151 | nbkey += 1
152 | elif tag.type == 34736: # GeoDoubleParamsTag
153 | _34735 += (key, 34736, 1, len(_34736))
154 | _34736 += tag.value
155 | nbkey += 1
156 | elif tag.type == 34737: # GeoAsciiParamsTag
157 | _34735 += (key, 34737, tag.count+1, len(_34737))
158 | _34737 += tag.value + b"|"
159 | nbkey += 1
160 |
161 | result = ifd.Ifd()
162 | result.set(33922, 12, reduce(tuple.__add__, [tuple(e) for e in self.get(33922, ([0.,0.,0.,0.,0.,0.],))]))
163 | result.set(33550, 12, tuple(self.get(33550, (1.,1.,1.))))
164 | result.set(34264, 12, tuple(self.get(34264, (1.,0.,0.,0.,0.,-1.,0.,0.,0.,0.,1.,0.,0.,0.,0.,1.))))
165 | result.set(34735, 3, (self.version,) + self.revision + (nbkey,) + _34735)
166 | result.set(34736, 12, _34736)
167 | result.set(34737, 2, _34737)
168 | return result
169 |
170 | def from_ifd(self, ifd = {}, **kw):
171 | pairs = dict(ifd, **kw)
172 | for tag in [t for t in [33922, 33550, 34264] if t in pairs]: # ModelTiepointTag, ModelPixelScaleTag, ModelTransformationTag
173 | nt = GeoKeyModel[tag]
174 | if tag == 33922: # can be more than one TiePoint
175 | n = len(nt._fields)
176 | seq = ifd[tag]
177 | setattr(self, "_%s" % tag, tuple(nt(*seq[i:i+n]) for i in range(0, len(seq), n)))
178 | else:
179 | setattr(self, "_%s" % tag, nt(*ifd[tag]))
180 | if 34736 in pairs: # GeoDoubleParamsTag
181 | _34736 = ifd[34736]
182 | if 34737 in pairs: # GeoAsciiParamsTag
183 | _34737 = ifd[34737]
184 | if 34735 in pairs: # GeoKeyDirectoryTag
185 | _34735 = ifd[34735]
186 | self.version = _34735[0]
187 | self.revision = _34735[1:3]
188 | for (tag, typ, count, value) in zip(_34735[4::4],_34735[5::4],_34735[6::4],_34735[7::4]):
189 | if typ == 0: self[tag] = value
190 | elif typ == 34736: self[tag] = _34736[value]
191 | elif typ == 34737: self[tag] = _34737[value:value+count-1]
192 |
193 | def getModelTransformation(self, tie_index=0):
194 | if hasattr(self, "_34264"):
195 | matrix = GeoKeyModel[34264](*getattr(self, "_34264"))
196 | elif hasattr(self, "_33922") and hasattr(self, "_33550"):
197 | Sx, Sy, Sz = getattr(self, "_33550")
198 | I, J, K, X, Y, Z = getattr(self, "_33922")[tie_index]
199 | matrix = GeoKeyModel[34264](
200 | Sx, 0., 0., X - I*Sx,
201 | 0., -Sy, 0., Y + J*Sy,
202 | 0., 0. , Sz, Z - K*Sz,
203 | 0., 0. , 0., 1.
204 | )
205 | else:
206 | matrix = GeoKeyModel[34264](
207 | 1., 0. , 0., 0.,
208 | 0., -1., 0., 0.,
209 | 0., 0. , 1., 0.,
210 | 0., 0. , 0., 1.
211 | )
212 | return lambda x,y,z1=0.,z2=1.,m=matrix: Transform(m, x,y,z1,z2)
213 |
214 | def tags(self):
215 | for v in sorted(dict.values(self), key=lambda e:e.tag):
216 | yield v
217 |
--------------------------------------------------------------------------------
/core/lib/Tyf/ifd.py:
--------------------------------------------------------------------------------
1 | # -*- encoding:utf-8 -*-
2 | # Copyright 2012-2015, THOORENS Bruno - http://bruno.thoorens.free.fr/licences/tyf.html
3 |
4 | from . import io, os, tags, encoders, decoders, reduce, values, TYPES, urllib, StringIO
5 | import struct, fractions
6 |
7 | class TiffTag(object):
8 |
9 | # IFD entries values
10 | tag = 0x0
11 | type = 0
12 | count = 0
13 | value = None
14 |
15 | # end user side values
16 | key = "Undefined"
17 | name = "Undefined tag"
18 | comment = "Nothing about this tag"
19 | meaning = None
20 |
21 | def __init__(self, tag, type=None, value=None, name="Tiff tag"):
22 | self.key, _typ, default, self.comment = tags.get(tag)
23 | self.tag = tag
24 | self.name = name
25 | self.type = _typ[-1] if type == None else type
26 |
27 | if value != None: self._encode(value)
28 | elif default != None: self.value = (default,) if not hasattr(default, "len") else default
29 |
30 | def __setattr__(self, attr, value):
31 | if attr == "type":
32 | try: object.__setattr__(self, "_encoder", getattr(encoders, "_%s"%hex(self.tag)))
33 | except AttributeError: object.__setattr__(self, "_encoder", getattr(encoders, "_%s"%value))
34 | try: object.__setattr__(self, "_decoder", getattr(decoders, "_%s"%hex(self.tag)))
35 | except AttributeError: object.__setattr__(self, "_decoder", getattr(decoders, "_%s"%value))
36 | elif attr == "value":
37 | restricted = getattr(values, self.key, None)
38 | if restricted != None:
39 | v = value[0] if isinstance(value, tuple) else value
40 | self.meaning = restricted.get(v, "no description found [%r]" % (v,))
41 | self.count = len(value) // (1 if self.type not in [5,10] else 2)
42 | self._determine_if_offset()
43 | object.__setattr__(self, attr, value)
44 |
45 | def __repr__(self):
46 | return "<%s 0x%x: %s = %r>" % (self.name, self.tag, self.key, self.value) + ("" if not self.meaning else ' := %r'%self.meaning)
47 |
48 | def _encode(self, value):
49 | self.value = self._encoder(value)
50 |
51 | def _decode(self):
52 | return self._decoder(self.value)
53 |
54 | def _determine_if_offset(self):
55 | if self.count == 1 and self.type in [1, 2, 3, 4, 6, 7, 8, 9]: setattr(self, "value_is_offset", False)
56 | elif self.count <= 2 and self.type in [3, 8]: setattr(self, "value_is_offset", False)
57 | elif self.count <= 4 and self.type in [1, 2, 6, 7]: setattr(self, "value_is_offset", False)
58 | else: setattr(self, "value_is_offset", True)
59 |
60 | def _fill(self):
61 | s = struct.calcsize("="+TYPES[self.type][0])
62 | voidspace = (struct.calcsize("=L") - self.count*s)//s
63 | if self.type in [2, 7]: return self.value + b"\x00"*voidspace
64 | elif self.type in [1, 3, 6, 8]: return self.value + ((0,)*voidspace)
65 | return self.value
66 |
67 | def calcsize(self):
68 | return struct.calcsize("=" + TYPES[self.type][0] * (self.count*(2 if self.type in [5,10] else 1))) if self.value_is_offset else 0
69 |
70 |
71 | class Ifd(dict):
72 | tagname = "Tiff Tag"
73 |
74 | exif_ifd = property(lambda obj: obj.sub_ifd.get(34665, {}), None, None, "shortcut to EXIF sub ifd")
75 | gps_ifd = property(lambda obj: obj.sub_ifd.get(34853, {}), None, None, "shortcut to GPS sub ifd")
76 | has_raster = property(lambda obj: 273 in obj or 288 in obj or 324 in obj or 513 in obj, None, None, "return true if it contains raster data")
77 | raster_loaded = property(lambda obj: not(obj.has_raster) or bool(len(obj.stripes+obj.tiles+obj.free)+len(obj.jpegIF)), None, None, "")
78 | size = property(
79 | lambda obj: {
80 | "ifd": struct.calcsize("=H" + (len(obj)*"HHLL") + "L"),
81 | "data": reduce(int.__add__, [t.calcsize() for t in dict.values(obj)])
82 | }, None, None, "return ifd-packed size and data-packed size")
83 |
84 | def __init__(self, sub_ifd={}, **kwargs):
85 | self._sub_ifd = sub_ifd
86 | setattr(self, "tagname", kwargs.pop("tagname", "Tiff tag"))
87 | dict.__init__(self)
88 |
89 | self.sub_ifd = {}
90 | self.stripes = ()
91 | self.tiles = ()
92 | self.free = ()
93 | self.jpegIF = b""
94 |
95 | def __setitem__(self, tag, value):
96 | for t,(ts,tname) in self._sub_ifd.items():
97 | tag = tags._2tag(tag, family=ts)
98 | if tag in ts:
99 | if not t in self.sub_ifd:
100 | self.sub_ifd[t] = Ifd(sub_ifd={}, tagname=tname)
101 | self.sub_ifd[t].addtag(TiffTag(tag, value=value))
102 | return
103 | else:
104 | tag = tags._2tag(tag)
105 | dict.__setitem__(self, tag, TiffTag(tag, value=value, name=self.tagname))
106 |
107 | def __getitem__(self, tag):
108 | for i in self.sub_ifd.values():
109 | try: return i[tag]
110 | except KeyError: pass
111 | return dict.__getitem__(self, tags._2tag(tag))._decode()
112 |
113 | def _check(self):
114 | for key in self.sub_ifd:
115 | if key not in self:
116 | self.addtag(TiffTag(key, 4, 0, name=self.tagname))
117 |
118 | def set(self, tag, typ, value):
119 | for t,(ts,tname) in self._sub_ifd.items():
120 | if tag in ts:
121 | if not t in self.sub_ifd:
122 | self.sub_ifd[t] = Ifd(sub_ifd={}, tagname=tname)
123 | self.sub_ifd[t].set(tag, typ, value)
124 | return
125 | tifftag = TiffTag(tag=tag, type=typ, name=self.tagname)
126 | tifftag.value = (value,) if not hasattr(value, "__len__") else value
127 | tifftag.name = self.tagname
128 | dict.__setitem__(self, tag, tifftag)
129 |
130 | def get(self, tag):
131 | for i in self.sub_ifd.values():
132 | if tag in i: return i.get(tag)
133 | return dict.get(self, tags._2tag(tag))
134 |
135 | def addtag(self, tifftag):
136 | if isinstance(tifftag, TiffTag):
137 | tifftag.name = self.tagname
138 | dict.__setitem__(self, tifftag.tag, tifftag)
139 |
140 | def tags(self):
141 | for v in sorted(dict.values(self), key=lambda e:e.tag):
142 | yield v
143 | for i in self.sub_ifd.values():
144 | for v in sorted(dict.values(i), key=lambda e:e.tag):
145 | yield v
146 |
147 | def set_location(self, longitude, latitude, altitude=0.):
148 | if 34853 not in self._sub_ifd:
149 | self._sub_ifd[34853] = [tags.gpsT, "GPS tag"]
150 | self[1] = self[2] = latitude
151 | self[3] = self[4] = longitude
152 | self[5] = self[6] = altitude
153 |
154 | def get_location(self):
155 | if set([1,2,3,4,5,6]) <= set(self.gps_ifd.keys()):
156 | return (
157 | self[3] * self[4],
158 | self[1] * self[2],
159 | self[5] * self[6]
160 | )
161 |
162 | def load_location(self, zoom=15, size="256x256", mcolor="0xff00ff", format="png", scale=1):
163 | if set([1,2,3,4]) <= set(self.gps_ifd.keys()):
164 | gps_ifd = self.gps_ifd
165 | latitude = gps_ifd[1] * gps_ifd[2]
166 | longitude = gps_ifd[3] * gps_ifd[4]
167 | try:
168 | opener = urllib.urlopen("https://maps.googleapis.com/maps/api/staticmap?center=%s,%s&zoom=%s&size=%s&markers=color:%s%%7C%s,%s&format=%s&scale=%s" % (
169 | latitude, longitude,
170 | zoom, size, mcolor,
171 | latitude, longitude,
172 | format, scale
173 | ))
174 | except:
175 | return StringIO()
176 | else:
177 | return StringIO(opener.read())
178 | print("googleapis connexion error")
179 | else:
180 | return StringIO()
181 |
182 | def dump_location(self, tilename, zoom=15, size="256x256", mcolor="0xff00ff", format="png", scale=1):
183 | if set([1,2,3,4]) <= set(self.gps_ifd.keys()):
184 | gps_ifd = self.gps_ifd
185 | latitude = gps_ifd[1] * gps_ifd[2]
186 | longitude = gps_ifd[3] * gps_ifd[4]
187 | try:
188 | urllib.urlretrieve("https://maps.googleapis.com/maps/api/staticmap?center=%s,%s&zoom=%s&size=%s&markers=color:%s%%7C%s,%s&format=%s&scale=%s" % (
189 | latitude, longitude,
190 | zoom, size, mcolor,
191 | latitude, longitude,
192 | format, scale
193 | ),
194 | os.path.splitext(tilename)[0] + "."+format
195 | )
196 | except:
197 | print("googleapis connexion error")
198 |
--------------------------------------------------------------------------------
/core/lib/imageio/README.md:
--------------------------------------------------------------------------------
1 | This is the Python package that is installed on the user's system.
2 |
3 | It consists of a `core` module, which implements the basis of imageio.
4 | The `plugins` module contains the code to actually import/export images,
5 | organised in plugins.
6 |
7 | The `freeze` module provides functionality for freezing apps that make
8 | use of imageio.
9 |
--------------------------------------------------------------------------------
/core/lib/imageio/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2015, imageio contributors
3 | # imageio is distributed under the terms of the (new) BSD License.
4 |
5 | # This docstring is used at the index of the documentation pages, and
6 | # gets inserted into a slightly larger description (in setup.py) for
7 | # the page on Pypi:
8 | """
9 | Imageio is a Python library that provides an easy interface to read and
10 | write a wide range of image data, including animated images, volumetric
11 | data, and scientific formats. It is cross-platform, runs on Python 2.x
12 | and 3.x, and is easy to install.
13 |
14 | Main website: http://imageio.github.io
15 | """
16 |
17 | __version__ = '1.5'
18 |
19 | # Load some bits from core
20 | from .core import FormatManager, RETURN_BYTES # noqa
21 |
22 | # Instantiate format manager
23 | formats = FormatManager()
24 |
25 | # Load the functions
26 | from .core.functions import help # noqa
27 | from .core.functions import get_reader, get_writer # noqa
28 | from .core.functions import imread, mimread, volread, mvolread # noqa
29 | from .core.functions import imwrite, mimwrite, volwrite, mvolwrite # noqa
30 |
31 | # Load function aliases
32 | from .core.functions import read, save # noqa
33 | from .core.functions import imsave, mimsave, volsave, mvolsave # noqa
34 |
35 | # Load all the plugins
36 | from . import plugins # noqa
37 |
38 | # expose the show method of formats
39 | show_formats = formats.show
40 |
41 | # Clean up some names
42 | del FormatManager
43 |
--------------------------------------------------------------------------------
/core/lib/imageio/core/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2015, imageio contributors
3 | # Distributed under the (new) BSD License. See LICENSE.txt for more info.
4 |
5 | """ This subpackage provides the core functionality of imageio
6 | (everything but the plugins).
7 | """
8 |
9 | from .util import Image, Dict, asarray, image_as_uint, urlopen # noqa
10 | from .util import BaseProgressIndicator, StdoutProgressIndicator # noqa
11 | from .util import string_types, text_type, binary_type, IS_PYPY # noqa
12 | from .util import get_platform, appdata_dir, resource_dirs, has_module # noqa
13 | from .findlib import load_lib # noqa
14 | from .fetching import get_remote_file, InternetNotAllowedError # noqa
15 | from .request import Request, read_n_bytes, RETURN_BYTES # noqa
16 | from .format import Format, FormatManager # noqa
17 |
--------------------------------------------------------------------------------
/core/lib/imageio/core/fetching.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2015, imageio contributors
3 | # Based on code from the vispy project
4 | # Distributed under the (new) BSD License. See LICENSE.txt for more info.
5 |
6 | """Data downloading and reading functions
7 | """
8 |
9 | from __future__ import absolute_import, print_function, division
10 |
11 | from math import log
12 | import os
13 | from os import path as op
14 | import sys
15 | import shutil
16 | import time
17 |
18 | from . import appdata_dir, resource_dirs
19 | from . import StdoutProgressIndicator, string_types, urlopen
20 |
21 |
22 | class InternetNotAllowedError(IOError):
23 | """ Plugins that need resources can just use get_remote_file(), but
24 | should catch this error and silently ignore it.
25 | """
26 | pass
27 |
28 |
29 | def get_remote_file(fname, directory=None, force_download=False):
30 | """ Get a the filename for the local version of a file from the web
31 |
32 | Parameters
33 | ----------
34 | fname : str
35 | The relative filename on the remote data repository to download.
36 | These correspond to paths on
37 | ``https://github.com/imageio/imageio-binaries/``.
38 | directory : str | None
39 | The directory where the file will be cached if a download was
40 | required to obtain the file. By default, the appdata directory
41 | is used. This is also the first directory that is checked for
42 | a local version of the file.
43 | force_download : bool | str
44 | If True, the file will be downloaded even if a local copy exists
45 | (and this copy will be overwritten). Can also be a YYYY-MM-DD date
46 | to ensure a file is up-to-date (modified date of a file on disk,
47 | if present, is checked).
48 |
49 | Returns
50 | -------
51 | fname : str
52 | The path to the file on the local system.
53 | """
54 | _url_root = 'https://github.com/imageio/imageio-binaries/raw/master/'
55 | url = _url_root + fname
56 | fname = op.normcase(fname) # convert to native
57 | # Get dirs to look for the resource
58 | directory = directory or appdata_dir('imageio')
59 | dirs = resource_dirs()
60 | dirs.insert(0, directory) # Given dir has preference
61 | # Try to find the resource locally
62 | for dir in dirs:
63 | filename = op.join(dir, fname)
64 | if op.isfile(filename):
65 | if not force_download: # we're done
66 | return filename
67 | if isinstance(force_download, string_types):
68 | ntime = time.strptime(force_download, '%Y-%m-%d')
69 | ftime = time.gmtime(op.getctime(filename))
70 | if ftime >= ntime:
71 | return filename
72 | else:
73 | print('File older than %s, updating...' % force_download)
74 | break
75 |
76 | # If we get here, we're going to try to download the file
77 | if os.getenv('IMAGEIO_NO_INTERNET', '').lower() in ('1', 'true', 'yes'):
78 | raise InternetNotAllowedError('Will not download resource from the '
79 | 'internet because enironment variable '
80 | 'IMAGEIO_NO_INTERNET is set.')
81 | # Get filename to store to and make sure the dir exists
82 | filename = op.join(directory, fname)
83 | if not op.isdir(op.dirname(filename)):
84 | os.makedirs(op.abspath(op.dirname(filename)))
85 | # let's go get the file
86 | if os.getenv('CONTINUOUS_INTEGRATION', False): # pragma: no cover
87 | # On Travis, we retry a few times ...
88 | for i in range(2):
89 | try:
90 | _fetch_file(url, filename)
91 | return filename
92 | except IOError:
93 | time.sleep(0.5)
94 | else:
95 | _fetch_file(url, filename)
96 | return filename
97 | else: # pragma: no cover
98 | _fetch_file(url, filename)
99 | return filename
100 |
101 |
102 | def _fetch_file(url, file_name, print_destination=True):
103 | """Load requested file, downloading it if needed or requested
104 |
105 | Parameters
106 | ----------
107 | url: string
108 | The url of file to be downloaded.
109 | file_name: string
110 | Name, along with the path, of where downloaded file will be saved.
111 | print_destination: bool, optional
112 | If true, destination of where file was saved will be printed after
113 | download finishes.
114 | resume: bool, optional
115 | If true, try to resume partially downloaded files.
116 | """
117 | # Adapted from NISL:
118 | # https://github.com/nisl/tutorial/blob/master/nisl/datasets.py
119 |
120 | print('Imageio: %r was not found on your computer; '
121 | 'downloading it now.' % os.path.basename(file_name))
122 |
123 | temp_file_name = file_name + ".part"
124 | local_file = None
125 | initial_size = 0
126 | errors = []
127 | for tries in range(4):
128 | try:
129 | # Checking file size and displaying it alongside the download url
130 | remote_file = urlopen(url, timeout=5.)
131 | file_size = int(remote_file.headers['Content-Length'].strip())
132 | size_str = _sizeof_fmt(file_size)
133 | print('Try %i. Download from %s (%s)' % (tries+1, url, size_str))
134 | # Downloading data (can be extended to resume if need be)
135 | local_file = open(temp_file_name, "wb")
136 | _chunk_read(remote_file, local_file, initial_size=initial_size)
137 | # temp file must be closed prior to the move
138 | if not local_file.closed:
139 | local_file.close()
140 | shutil.move(temp_file_name, file_name)
141 | if print_destination is True:
142 | sys.stdout.write('File saved as %s.\n' % file_name)
143 | break
144 | except Exception as e:
145 | errors.append(e)
146 | print('Error while fetching file: %s.' % str(e))
147 | finally:
148 | if local_file is not None:
149 | if not local_file.closed:
150 | local_file.close()
151 | else:
152 | raise IOError('Unable to download %r. Perhaps there is a no internet '
153 | 'connection? If there is, please report this problem.' %
154 | os.path.basename(file_name))
155 |
156 |
157 | def _chunk_read(response, local_file, chunk_size=8192, initial_size=0):
158 | """Download a file chunk by chunk and show advancement
159 |
160 | Can also be used when resuming downloads over http.
161 |
162 | Parameters
163 | ----------
164 | response: urllib.response.addinfourl
165 | Response to the download request in order to get file size.
166 | local_file: file
167 | Hard disk file where data should be written.
168 | chunk_size: integer, optional
169 | Size of downloaded chunks. Default: 8192
170 | initial_size: int, optional
171 | If resuming, indicate the initial size of the file.
172 | """
173 | # Adapted from NISL:
174 | # https://github.com/nisl/tutorial/blob/master/nisl/datasets.py
175 |
176 | bytes_so_far = initial_size
177 | # Returns only amount left to download when resuming, not the size of the
178 | # entire file
179 | total_size = int(response.headers['Content-Length'].strip())
180 | total_size += initial_size
181 |
182 | progress = StdoutProgressIndicator('Downloading')
183 | progress.start('', 'bytes', total_size)
184 |
185 | while True:
186 | chunk = response.read(chunk_size)
187 | bytes_so_far += len(chunk)
188 | if not chunk:
189 | break
190 | _chunk_write(chunk, local_file, progress)
191 | progress.finish('Done')
192 |
193 |
194 | def _chunk_write(chunk, local_file, progress):
195 | """Write a chunk to file and update the progress bar"""
196 | local_file.write(chunk)
197 | progress.increase_progress(len(chunk))
198 | time.sleep(0.0001)
199 |
200 |
201 | def _sizeof_fmt(num):
202 | """Turn number of bytes into human-readable str"""
203 | units = ['bytes', 'kB', 'MB', 'GB', 'TB', 'PB']
204 | decimals = [0, 0, 1, 2, 2, 2]
205 | """Human friendly file size"""
206 | if num > 1:
207 | exponent = min(int(log(num, 1024)), len(units) - 1)
208 | quotient = float(num) / 1024 ** exponent
209 | unit = units[exponent]
210 | num_decimals = decimals[exponent]
211 | format_string = '{0:.%sf} {1}' % (num_decimals)
212 | return format_string.format(quotient, unit)
213 | return '0 bytes' if num == 0 else '1 byte'
214 |
--------------------------------------------------------------------------------
/core/lib/imageio/core/findlib.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2015, imageio contributors
3 | # Copyright (C) 2013, Zach Pincus, Almar Klein and others
4 |
5 | """ This module contains generic code to find and load a dynamic library.
6 | """
7 |
8 | from __future__ import absolute_import, print_function, division
9 |
10 | import os
11 | import sys
12 | import ctypes
13 |
14 |
15 | LOCALDIR = os.path.abspath(os.path.dirname(__file__))
16 |
17 |
18 | # More generic:
19 | # def get_local_lib_dirs(*libdirs):
20 | # """ Get a list of existing directories that end with one of the given
21 | # subdirs, and that are in the (sub)package that this modules is part of.
22 | # """
23 | # dirs = []
24 | # parts = __name__.split('.')
25 | # for i in reversed(range(len(parts))):
26 | # package_name = '.'.join(parts[:i])
27 | # package = sys.modules.get(package_name, None)
28 | # if package:
29 | # dirs.append(os.path.abspath(os.path.dirname(package.__file__)))
30 | # dirs = [os.path.join(d, sub) for sub in libdirs for d in dirs]
31 | # return [d for d in dirs if os.path.isdir(d)]
32 |
33 |
34 | def looks_lib(fname):
35 | """ Returns True if the given filename looks like a dynamic library.
36 | Based on extension, but cross-platform and more flexible.
37 | """
38 | fname = fname.lower()
39 | if sys.platform.startswith('win'):
40 | return fname.endswith('.dll')
41 | elif sys.platform.startswith('darwin'):
42 | return fname.endswith('.dylib')
43 | else:
44 | return fname.endswith('.so') or '.so.' in fname
45 |
46 |
47 | def generate_candidate_libs(lib_names, lib_dirs=None):
48 | """ Generate a list of candidate filenames of what might be the dynamic
49 | library corresponding with the given list of names.
50 | Returns (lib_dirs, lib_paths)
51 | """
52 | lib_dirs = lib_dirs or []
53 |
54 | # Get system dirs to search
55 | sys_lib_dirs = ['/lib',
56 | '/usr/lib',
57 | '/usr/lib/x86_64-linux-gnu',
58 | '/usr/local/lib',
59 | '/opt/local/lib', ]
60 |
61 | # Get Python dirs to search (shared if for Pyzo)
62 | py_sub_dirs = ['lib', 'DLLs', 'Library/bin', 'shared']
63 | py_lib_dirs = [os.path.join(sys.prefix, d) for d in py_sub_dirs]
64 | if hasattr(sys, 'base_prefix'):
65 | py_lib_dirs += [os.path.join(sys.base_prefix, d) for d in py_sub_dirs]
66 |
67 | # Get user dirs to search (i.e. HOME)
68 | home_dir = os.path.expanduser('~')
69 | user_lib_dirs = [os.path.join(home_dir, d) for d in ['lib']]
70 |
71 | # Select only the dirs for which a directory exists, and remove duplicates
72 | potential_lib_dirs = lib_dirs + sys_lib_dirs + py_lib_dirs + user_lib_dirs
73 | lib_dirs = []
74 | for ld in potential_lib_dirs:
75 | if os.path.isdir(ld) and ld not in lib_dirs:
76 | lib_dirs.append(ld)
77 |
78 | # Now attempt to find libraries of that name in the given directory
79 | # (case-insensitive)
80 | lib_paths = []
81 | for lib_dir in lib_dirs:
82 | # Get files, prefer short names, last version
83 | files = os.listdir(lib_dir)
84 | files = reversed(sorted(files))
85 | files = sorted(files, key=len)
86 | for lib_name in lib_names:
87 | # Test all filenames for name and ext
88 | for fname in files:
89 | if fname.lower().startswith(lib_name) and looks_lib(fname):
90 | lib_paths.append(os.path.join(lib_dir, fname))
91 |
92 | # Return (only the items which are files)
93 | lib_paths = [lp for lp in lib_paths if os.path.isfile(lp)]
94 | return lib_dirs, lib_paths
95 |
96 |
97 | def load_lib(exact_lib_names, lib_names, lib_dirs=None):
98 | """ load_lib(exact_lib_names, lib_names, lib_dirs=None)
99 |
100 | Load a dynamic library.
101 |
102 | This function first tries to load the library from the given exact
103 | names. When that fails, it tries to find the library in common
104 | locations. It searches for files that start with one of the names
105 | given in lib_names (case insensitive). The search is performed in
106 | the given lib_dirs and a set of common library dirs.
107 |
108 | Returns ``(ctypes_library, library_path)``
109 | """
110 |
111 | # Checks
112 | assert isinstance(exact_lib_names, list)
113 | assert isinstance(lib_names, list)
114 | if lib_dirs is not None:
115 | assert isinstance(lib_dirs, list)
116 | exact_lib_names = [n for n in exact_lib_names if n]
117 | lib_names = [n for n in lib_names if n]
118 |
119 | # Get reference name (for better messages)
120 | if lib_names:
121 | the_lib_name = lib_names[0]
122 | elif exact_lib_names:
123 | the_lib_name = exact_lib_names[0]
124 | else:
125 | raise ValueError("No library name given.")
126 |
127 | # Collect filenames of potential libraries
128 | # First try a few bare library names that ctypes might be able to find
129 | # in the default locations for each platform.
130 | lib_dirs, lib_paths = generate_candidate_libs(lib_names, lib_dirs)
131 | lib_paths = exact_lib_names + lib_paths
132 |
133 | # Select loader
134 | if sys.platform.startswith('win'):
135 | loader = ctypes.windll
136 | else:
137 | loader = ctypes.cdll
138 |
139 | # Try to load until success
140 | the_lib = None
141 | errors = []
142 | for fname in lib_paths:
143 | try:
144 | the_lib = loader.LoadLibrary(fname)
145 | break
146 | except Exception:
147 | # Don't record errors when it couldn't load the library from an
148 | # exact name -- this fails often, and doesn't provide any useful
149 | # debugging information anyway, beyond "couldn't find library..."
150 | if fname not in exact_lib_names:
151 | # Get exception instance in Python 2.x/3.x compatible manner
152 | e_type, e_value, e_tb = sys.exc_info()
153 | del e_tb
154 | errors.append((fname, e_value))
155 |
156 | # No success ...
157 | if the_lib is None:
158 | if errors:
159 | # No library loaded, and load-errors reported for some
160 | # candidate libs
161 | err_txt = ['%s:\n%s' % (l, str(e)) for l, e in errors]
162 | msg = ('One or more %s libraries were found, but ' +
163 | 'could not be loaded due to the following errors:\n%s')
164 | raise OSError(msg % (the_lib_name, '\n\n'.join(err_txt)))
165 | else:
166 | # No errors, because no potential libraries found at all!
167 | msg = 'Could not find a %s library in any of:\n%s'
168 | raise OSError(msg % (the_lib_name, '\n'.join(lib_dirs)))
169 |
170 | # Done
171 | return the_lib, fname
172 |
--------------------------------------------------------------------------------
/core/lib/imageio/freeze.py:
--------------------------------------------------------------------------------
1 | """
2 | Helper functions for freezing imageio.
3 | """
4 |
5 | import sys
6 |
7 |
8 | def get_includes():
9 | if sys.version_info[0] == 3:
10 | urllib = ['email', 'urllib.request', ]
11 | else:
12 | urllib = ['urllib2']
13 | return urllib + ['numpy', 'zipfile', 'io']
14 |
15 |
16 | def get_excludes():
17 | return []
18 |
--------------------------------------------------------------------------------
/core/lib/imageio/plugins/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2015, imageio contributors
3 | # imageio is distributed under the terms of the (new) BSD License.
4 |
5 | """
6 |
7 | Imagio is plugin-based. Every supported format is provided with a
8 | plugin. You can write your own plugins to make imageio support
9 | additional formats. And we would be interested in adding such code to the
10 | imageio codebase!
11 |
12 |
13 | What is a plugin
14 | ----------------
15 |
16 | In imageio, a plugin provides one or more :class:`.Format` objects, and
17 | corresponding :class:`.Reader` and :class:`.Writer` classes.
18 | Each Format object represents an implementation to read/write a
19 | particular file format. Its Reader and Writer classes do the actual
20 | reading/saving.
21 |
22 | The reader and writer objects have a ``request`` attribute that can be
23 | used to obtain information about the read or write :class:`.Request`, such as
24 | user-provided keyword arguments, as well get access to the raw image
25 | data.
26 |
27 |
28 | Registering
29 | -----------
30 |
31 | Strictly speaking a format can be used stand alone. However, to allow
32 | imageio to automatically select it for a specific file, the format must
33 | be registered using ``imageio.formats.add_format()``.
34 |
35 | Note that a plugin is not required to be part of the imageio package; as
36 | long as a format is registered, imageio can use it. This makes imageio very
37 | easy to extend.
38 |
39 |
40 | What methods to implement
41 | --------------------------
42 |
43 | Imageio is designed such that plugins only need to implement a few
44 | private methods. The public API is implemented by the base classes.
45 | In effect, the public methods can be given a descent docstring which
46 | does not have to be repeated at the plugins.
47 |
48 | For the Format class, the following needs to be implemented/specified:
49 |
50 | * The format needs a short name, a description, and a list of file
51 | extensions that are common for the file-format in question.
52 | These ase set when instantiation the Format object.
53 | * Use a docstring to provide more detailed information about the
54 | format/plugin, such as parameters for reading and saving that the user
55 | can supply via keyword arguments.
56 | * Implement ``_can_read(request)``, return a bool.
57 | See also the :class:`.Request` class.
58 | * Implement ``_can_write(request)``, dito.
59 |
60 | For the Format.Reader class:
61 |
62 | * Implement ``_open(**kwargs)`` to initialize the reader. Deal with the
63 | user-provided keyword arguments here.
64 | * Implement ``_close()`` to clean up.
65 | * Implement ``_get_length()`` to provide a suitable length based on what
66 | the user expects. Can be ``inf`` for streaming data.
67 | * Implement ``_get_data(index)`` to return an array and a meta-data dict.
68 | * Implement ``_get_meta_data(index)`` to return a meta-data dict. If index
69 | is None, it should return the 'global' meta-data.
70 |
71 | For the Format.Writer class:
72 |
73 | * Implement ``_open(**kwargs)`` to initialize the writer. Deal with the
74 | user-provided keyword arguments here.
75 | * Implement ``_close()`` to clean up.
76 | * Implement ``_append_data(im, meta)`` to add data (and meta-data).
77 | * Implement ``_set_meta_data(meta)`` to set the global meta-data.
78 |
79 | """
80 |
81 | # First import plugins that we want to take precedence over freeimage
82 | from . import freeimage # noqa
83 |
84 |
--------------------------------------------------------------------------------
/core/lib/imageio/resources/shipped_resources_go_here:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/core/lib/imageio/resources/shipped_resources_go_here
--------------------------------------------------------------------------------
/core/lib/imageio/testing.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2015, imageio contributors
3 | # Distributed under the (new) BSD License. See LICENSE.txt for more info.
4 |
5 | """ Functionality used for testing. This code itself is not covered in tests.
6 | """
7 |
8 | from __future__ import absolute_import, print_function, division
9 |
10 | import os
11 | import sys
12 | import inspect
13 | import shutil
14 | import atexit
15 |
16 | import pytest
17 |
18 | # Get root dir
19 | THIS_DIR = os.path.abspath(os.path.dirname(__file__))
20 | ROOT_DIR = THIS_DIR
21 | for i in range(9):
22 | ROOT_DIR = os.path.dirname(ROOT_DIR)
23 | if os.path.isfile(os.path.join(ROOT_DIR, '.gitignore')):
24 | break
25 |
26 |
27 | STYLE_IGNORES = ['E226',
28 | 'E241',
29 | 'E265',
30 | 'E266', # too many leading '#' for block comment
31 | 'E402', # module level import not at top of file
32 | 'E731', # do not assign a lambda expression, use a def
33 | 'W291',
34 | 'W293',
35 | 'W503', # line break before binary operator
36 | ]
37 |
38 |
39 | ## Functions to use in tests
40 |
41 | def run_tests_if_main(show_coverage=False):
42 | """ Run tests in a given file if it is run as a script
43 |
44 | Coverage is reported for running this single test. Set show_coverage to
45 | launch the report in the web browser.
46 | """
47 | local_vars = inspect.currentframe().f_back.f_locals
48 | if not local_vars.get('__name__', '') == '__main__':
49 | return
50 | # we are in a "__main__"
51 | os.chdir(ROOT_DIR)
52 | fname = str(local_vars['__file__'])
53 | _clear_imageio()
54 | _enable_faulthandler()
55 | pytest.main('-v -x --color=yes --cov imageio '
56 | '--cov-config .coveragerc --cov-report html %s' % repr(fname))
57 | if show_coverage:
58 | import webbrowser
59 | fname = os.path.join(ROOT_DIR, 'htmlcov', 'index.html')
60 | webbrowser.open_new_tab(fname)
61 |
62 |
63 | _the_test_dir = None
64 |
65 |
66 | def get_test_dir():
67 | global _the_test_dir
68 | if _the_test_dir is None:
69 | # Define dir
70 | from imageio.core import appdata_dir
71 | _the_test_dir = os.path.join(appdata_dir('imageio'), 'testdir')
72 | # Clear and create it now
73 | clean_test_dir(True)
74 | os.makedirs(_the_test_dir)
75 | os.makedirs(os.path.join(_the_test_dir, 'images'))
76 | # And later
77 | atexit.register(clean_test_dir)
78 | return _the_test_dir
79 |
80 |
81 | def clean_test_dir(strict=False):
82 | if os.path.isdir(_the_test_dir):
83 | try:
84 | shutil.rmtree(_the_test_dir)
85 | except Exception:
86 | if strict:
87 | raise
88 |
89 |
90 | def need_internet():
91 | if os.getenv('IMAGEIO_NO_INTERNET', '').lower() in ('1', 'true', 'yes'):
92 | pytest.skip('No internet')
93 |
94 |
95 | ## Functions to use from make
96 |
97 | def test_unit(cov_report='term'):
98 | """ Run all unit tests. Returns exit code.
99 | """
100 | orig_dir = os.getcwd()
101 | os.chdir(ROOT_DIR)
102 | try:
103 | _clear_imageio()
104 | _enable_faulthandler()
105 | return pytest.main('-v --cov imageio --cov-config .coveragerc '
106 | '--cov-report %s tests' % cov_report)
107 | finally:
108 | os.chdir(orig_dir)
109 | import imageio
110 | print('Tests were performed on', str(imageio))
111 |
112 |
113 | def test_style():
114 | """ Test style using flake8
115 | """
116 | # Test if flake is there
117 | try:
118 | from flake8.main import main # noqa
119 | except ImportError:
120 | print('Skipping flake8 test, flake8 not installed')
121 | return
122 |
123 | # Reporting
124 | print('Running flake8 on %s' % ROOT_DIR)
125 | sys.stdout = FileForTesting(sys.stdout)
126 |
127 | # Init
128 | ignores = STYLE_IGNORES.copy()
129 | fail = False
130 | count = 0
131 |
132 | # Iterate over files
133 | for dir, dirnames, filenames in os.walk(ROOT_DIR):
134 | dir = os.path.relpath(dir, ROOT_DIR)
135 | # Skip this dir?
136 | exclude_dirs = set(['.git', 'docs', 'build', 'dist', '__pycache__'])
137 | if exclude_dirs.intersection(dir.split(os.path.sep)):
138 | continue
139 | # Check all files ...
140 | for fname in filenames:
141 | if fname.endswith('.py'):
142 | # Get test options for this file
143 | filename = os.path.join(ROOT_DIR, dir, fname)
144 | skip, extra_ignores = _get_style_test_options(filename)
145 | if skip:
146 | continue
147 | # Test
148 | count += 1
149 | thisfail = _test_style(filename, ignores + extra_ignores)
150 | if thisfail:
151 | fail = True
152 | print('----')
153 | sys.stdout.flush()
154 |
155 | # Report result
156 | sys.stdout.revert()
157 | if not count:
158 | raise RuntimeError(' Arg! flake8 did not check any files')
159 | elif fail:
160 | raise RuntimeError(' Arg! flake8 failed (checked %i files)' % count)
161 | else:
162 | print(' Hooray! flake8 passed (checked %i files)' % count)
163 |
164 |
165 | ## Requirements
166 |
167 | def _enable_faulthandler():
168 | """ Enable faulthandler (if we can), so that we get tracebacks
169 | on segfaults.
170 | """
171 | try:
172 | import faulthandler
173 | faulthandler.enable()
174 | print('Faulthandler enabled')
175 | except Exception:
176 | print('Could not enable faulthandler')
177 |
178 |
179 | def _clear_imageio():
180 | # Remove ourselves from sys.modules to force an import
181 | for key in list(sys.modules.keys()):
182 | if key.startswith('imageio'):
183 | del sys.modules[key]
184 |
185 |
186 | class FileForTesting(object):
187 | """ Alternative to stdout that makes path relative to ROOT_DIR
188 | """
189 | def __init__(self, original):
190 | self._original = original
191 |
192 | def write(self, msg):
193 | if msg.startswith(ROOT_DIR):
194 | msg = os.path.relpath(msg, ROOT_DIR)
195 | self._original.write(msg)
196 | self._original.flush()
197 |
198 | def flush(self):
199 | self._original.flush()
200 |
201 | def revert(self):
202 | sys.stdout = self._original
203 |
204 |
205 | def _get_style_test_options(filename):
206 | """ Returns (skip, ignores) for the specifies source file.
207 | """
208 | skip = False
209 | ignores = []
210 | text = open(filename, 'rb').read().decode('utf-8')
211 | # Iterate over lines
212 | for i, line in enumerate(text.splitlines()):
213 | if i > 20:
214 | break
215 | if line.startswith('# styletest:'):
216 | if 'skip' in line:
217 | skip = True
218 | elif 'ignore' in line:
219 | words = line.replace(',', ' ').split(' ')
220 | words = [w.strip() for w in words if w.strip()]
221 | words = [w for w in words if
222 | (w[1:].isnumeric() and w[0] in 'EWFCN')]
223 | ignores.extend(words)
224 | return skip, ignores
225 |
226 |
227 | def _test_style(filename, ignore):
228 | """ Test style for a certain file.
229 | """
230 | if isinstance(ignore, (list, tuple)):
231 | ignore = ','.join(ignore)
232 |
233 | orig_dir = os.getcwd()
234 | orig_argv = sys.argv
235 |
236 | os.chdir(ROOT_DIR)
237 | sys.argv[1:] = [filename]
238 | sys.argv.append('--ignore=' + ignore)
239 | try:
240 | from flake8.main import main
241 | main()
242 | except SystemExit as ex:
243 | if ex.code in (None, 0):
244 | return False
245 | else:
246 | return True
247 | finally:
248 | os.chdir(orig_dir)
249 | sys.argv[:] = orig_argv
250 |
--------------------------------------------------------------------------------
/core/maths/__init__.py:
--------------------------------------------------------------------------------
1 | from .interpo import scale, linearInterpo
2 | '''
3 | from .maths.kmeans1D import kmeans1d, getBreaks
4 | from . import akima
5 | from fillnodata import replace_nans
6 | '''
7 |
--------------------------------------------------------------------------------
/core/maths/akima.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # akima.py
3 |
4 | # Copyright (c) 2007-2015, Christoph Gohlke
5 | # Copyright (c) 2007-2015, The Regents of the University of California
6 | # Produced at the Laboratory for Fluorescence Dynamics
7 | # All rights reserved.
8 | #
9 | # Redistribution and use in source and binary forms, with or without
10 | # modification, are permitted provided that the following conditions are met:
11 | #
12 | # * Redistributions of source code must retain the above copyright
13 | # notice, this list of conditions and the following disclaimer.
14 | # * Redistributions in binary form must reproduce the above copyright
15 | # notice, this list of conditions and the following disclaimer in the
16 | # documentation and/or other materials provided with the distribution.
17 | # * Neither the name of the copyright holders nor the names of any
18 | # contributors may be used to endorse or promote products derived
19 | # from this software without specific prior written permission.
20 | #
21 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
25 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31 | # POSSIBILITY OF SUCH DAMAGE.
32 |
33 | """Interpolation of data points in a plane based on Akima's method.
34 |
35 | Akima's interpolation method uses a continuously differentiable sub-spline
36 | built from piecewise cubic polynomials. The resultant curve passes through
37 | the given data points and will appear smooth and natural.
38 |
39 | :Author:
40 | `Christoph Gohlke `_
41 |
42 | :Organization:
43 | Laboratory for Fluorescence Dynamics, University of California, Irvine
44 |
45 | :Version: 2015.01.29
46 |
47 | Requirements
48 | ------------
49 | * `CPython 2.7 or 3.4 `_
50 | * `Numpy 1.8 `_
51 | * `Akima.c 2015.01.29 `_ (optional speedup)
52 | * `Matplotlib 1.4 `_ (optional for plotting)
53 |
54 | Notes
55 | -----
56 | Consider using `scipy.interpolate.Akima1DInterpolator
57 | `_.
58 |
59 | References
60 | ----------
61 | (1) A new method of interpolation and smooth curve fitting based
62 | on local procedures. Hiroshi Akima, J. ACM, October 1970, 17(4), 589-602.
63 |
64 | Examples
65 | --------
66 | >>> def example():
67 | ... '''Plot interpolated Gaussian noise.'''
68 | ... x = numpy.sort(numpy.random.random(10) * 100)
69 | ... y = numpy.random.normal(0.0, 0.1, size=len(x))
70 | ... x2 = numpy.arange(x[0], x[-1], 0.05)
71 | ... y2 = interpolate(x, y, x2)
72 | ... from matplotlib import pyplot
73 | ... pyplot.title("Akima interpolation of Gaussian noise")
74 | ... pyplot.plot(x2, y2, "b-")
75 | ... pyplot.plot(x, y, "ro")
76 | ... pyplot.show()
77 | >>> example()
78 |
79 | """
80 |
81 | import numpy
82 |
83 | __version__ = '2015.01.29'
84 | __docformat__ = 'restructuredtext en'
85 | __all__ = 'interpolate',
86 |
87 |
88 | def interpolate(x, y, x_new, axis=-1, out=None):
89 | """Return interpolated data using Akima's method.
90 |
91 | This Python implementation is inspired by the Matlab(r) code by
92 | N. Shamsundar. It lacks certain capabilities of the C implementation
93 | such as the output array argument and interpolation along an axis of a
94 | multidimensional data array.
95 |
96 | Parameters
97 | ----------
98 | x : array like
99 | 1D array of monotonically increasing real values.
100 | y : array like
101 | N-D array of real values. y's length along the interpolation
102 | axis must be equal to the length of x.
103 | x_new : array like
104 | New independent variables.
105 | axis : int
106 | Specifies axis of y along which to interpolate. Interpolation
107 | defaults to last axis of y.
108 | out : array
109 | Optional array to receive results. Dimension at axis must equal
110 | length of x.
111 |
112 | Examples
113 | --------
114 | >>> interpolate([0, 1, 2], [0, 0, 1], [0.5, 1.5])
115 | array([-0.125, 0.375])
116 | >>> x = numpy.sort(numpy.random.random(10) * 10)
117 | >>> y = numpy.random.normal(0.0, 0.1, size=len(x))
118 | >>> z = interpolate(x, y, x)
119 | >>> numpy.allclose(y, z)
120 | True
121 | >>> x = x[:10]
122 | >>> y = numpy.reshape(y, (10, -1))
123 | >>> z = numpy.reshape(y, (10, -1))
124 | >>> interpolate(x, y, x, axis=0, out=z)
125 | >>> numpy.allclose(y, z)
126 | True
127 |
128 | """
129 | x = numpy.array(x, dtype=numpy.float64, copy=True)
130 | y = numpy.array(y, dtype=numpy.float64, copy=True)
131 | xi = numpy.array(x_new, dtype=numpy.float64, copy=True)
132 |
133 | if axis != -1 or out is not None or y.ndim != 1:
134 | raise NotImplementedError("implemented in C extension module")
135 |
136 | if x.ndim != 1 or xi.ndim != 1:
137 | raise ValueError("x-arrays must be one dimensional")
138 |
139 | n = len(x)
140 | if n < 2:
141 | raise ValueError("array too small")
142 | if n != y.shape[axis]:
143 | raise ValueError("size of x-array must match data shape")
144 |
145 | dx = numpy.diff(x)
146 | if any(dx <= 0.0):
147 | raise ValueError("x-axis not valid")
148 |
149 | if any(xi < x[0]) or any(xi > x[-1]):
150 | raise ValueError("interpolation x-axis out of bounds")
151 |
152 | m = numpy.diff(y) / dx
153 | mm = 2.0 * m[0] - m[1]
154 | mmm = 2.0 * mm - m[0]
155 | mp = 2.0 * m[n - 2] - m[n - 3]
156 | mpp = 2.0 * mp - m[n - 2]
157 |
158 | m1 = numpy.concatenate(([mmm], [mm], m, [mp], [mpp]))
159 |
160 | dm = numpy.abs(numpy.diff(m1))
161 | f1 = dm[2:n + 2]
162 | f2 = dm[0:n]
163 | f12 = f1 + f2
164 |
165 | ids = numpy.nonzero(f12 > 1e-9 * numpy.max(f12))[0]
166 | b = m1[1:n + 1]
167 |
168 | b[ids] = (f1[ids] * m1[ids + 1] + f2[ids] * m1[ids + 2]) / f12[ids]
169 | c = (3.0 * m - 2.0 * b[0:n - 1] - b[1:n]) / dx
170 | d = (b[0:n - 1] + b[1:n] - 2.0 * m) / dx ** 2
171 |
172 | bins = numpy.digitize(xi, x)
173 | bins = numpy.minimum(bins, n - 1) - 1
174 | bb = bins[0:len(xi)]
175 | wj = xi - x[bb]
176 |
177 | return ((wj * d[bb] + c[bb]) * wj + b[bb]) * wj + y[bb]
178 |
179 |
--------------------------------------------------------------------------------
/core/maths/fillnodata.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # This file is part of BlenderGIS
4 |
5 | # ***** GPL LICENSE BLOCK *****
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with this program. If not, see .
19 | # All rights reserved.
20 | # ***** GPL LICENSE BLOCK *****
21 |
22 |
23 |
24 |
25 | ########################################
26 | # Inpainting function
27 | # http://astrolitterbox.blogspot.fr/2012/03/healing-holes-in-arrays-in-python.html
28 | # https://github.com/gasagna/openpiv-python/blob/master/openpiv/src/lib.pyx
29 |
30 |
31 | import numpy as np
32 |
33 | DTYPEf = np.float32
34 | #DTYPEi = np.int32
35 |
36 |
37 | def replace_nans(array, max_iter, tolerance, kernel_size=1, method='localmean'):
38 | """
39 | Replace NaN elements in an array using an iterative image inpainting algorithm.
40 | The algorithm is the following:
41 | 1) For each element in the input array, replace it by a weighted average
42 | of the neighbouring elements which are not NaN themselves. The weights depends
43 | of the method type. If ``method=localmean`` weight are equal to 1/( (2*kernel_size+1)**2 -1 )
44 | 2) Several iterations are needed if there are adjacent NaN elements.
45 | If this is the case, information is "spread" from the edges of the missing
46 | regions iteratively, until the variation is below a certain threshold.
47 |
48 | Parameters
49 | ----------
50 | array : 2d np.ndarray
51 | an array containing NaN elements that have to be replaced
52 |
53 | max_iter : int
54 | the number of iterations
55 |
56 | kernel_size : int
57 | the size of the kernel, default is 1
58 |
59 | method : str
60 | the method used to replace invalid values. Valid options are 'localmean', 'idw'.
61 |
62 | Returns
63 | -------
64 | filled : 2d np.ndarray
65 | a copy of the input array, where NaN elements have been replaced.
66 | """
67 |
68 | filled = np.empty( [array.shape[0], array.shape[1]], dtype=DTYPEf)
69 | kernel = np.empty( (2*kernel_size+1, 2*kernel_size+1), dtype=DTYPEf )
70 |
71 | # indices where array is NaN
72 | inans, jnans = np.nonzero( np.isnan(array) )
73 |
74 | # number of NaN elements
75 | n_nans = len(inans)
76 |
77 | # arrays which contain replaced values to check for convergence
78 | replaced_new = np.zeros( n_nans, dtype=DTYPEf)
79 | replaced_old = np.zeros( n_nans, dtype=DTYPEf)
80 |
81 | # depending on kernel type, fill kernel array
82 | if method == 'localmean':
83 | # weight are equal to 1/( (2*kernel_size+1)**2 -1 )
84 | for i in range(2*kernel_size+1):
85 | for j in range(2*kernel_size+1):
86 | kernel[i,j] = 1
87 | #print(kernel, 'kernel')
88 | elif method == 'idw':
89 | kernel = np.array([[0, 0.5, 0.5, 0.5,0],
90 | [0.5,0.75,0.75,0.75,0.5],
91 | [0.5,0.75,1,0.75,0.5],
92 | [0.5,0.75,0.75,0.5,1],
93 | [0, 0.5, 0.5 ,0.5 ,0]])
94 | #print(kernel, 'kernel')
95 | else:
96 | raise ValueError("method not valid. Should be one of 'localmean', 'idw'.")
97 |
98 | # fill new array with input elements
99 | for i in range(array.shape[0]):
100 | for j in range(array.shape[1]):
101 | filled[i,j] = array[i,j]
102 |
103 | # make several passes
104 | # until we reach convergence
105 | for it in range(max_iter):
106 | #print('Fill NaN iteration', it)
107 | # for each NaN element
108 | for k in range(n_nans):
109 | i = inans[k]
110 | j = jnans[k]
111 |
112 | # initialize to zero
113 | filled[i,j] = 0.0
114 | n = 0
115 |
116 | # loop over the kernel
117 | for I in range(2*kernel_size+1):
118 | for J in range(2*kernel_size+1):
119 |
120 | # if we are not out of the boundaries
121 | if i+I-kernel_size < array.shape[0] and i+I-kernel_size >= 0:
122 | if j+J-kernel_size < array.shape[1] and j+J-kernel_size >= 0:
123 |
124 | # if the neighbour element is not NaN itself.
125 | if filled[i+I-kernel_size, j+J-kernel_size] == filled[i+I-kernel_size, j+J-kernel_size] :
126 |
127 | # do not sum itself
128 | if I-kernel_size != 0 and J-kernel_size != 0:
129 |
130 | # convolve kernel with original array
131 | filled[i,j] = filled[i,j] + filled[i+I-kernel_size, j+J-kernel_size]*kernel[I, J]
132 | n = n + 1*kernel[I,J]
133 | # divide value by effective number of added elements
134 | if n != 0:
135 | filled[i,j] = filled[i,j] / n
136 | replaced_new[k] = filled[i,j]
137 | else:
138 | filled[i,j] = np.nan
139 |
140 | # check if mean square difference between values of replaced
141 | # elements is below a certain tolerance
142 | #print('tolerance', np.mean( (replaced_new-replaced_old)**2 ))
143 | if np.mean( (replaced_new-replaced_old)**2 ) < tolerance:
144 | break
145 | else:
146 | for l in range(n_nans):
147 | replaced_old[l] = replaced_new[l]
148 |
149 | return filled
150 |
151 |
152 | def sincinterp(image, x, y, kernel_size=3 ):
153 | """
154 | Re-sample an image at intermediate positions between pixels.
155 | This function uses a cardinal interpolation formula which limits
156 | the loss of information in the resampling process. It uses a limited
157 | number of neighbouring pixels.
158 |
159 | The new image :math:`im^+` at fractional locations :math:`x` and :math:`y` is computed as:
160 | .. math::
161 | im^+(x,y) = \sum_{i=-\mathtt{kernel\_size}}^{i=\mathtt{kernel\_size}} \sum_{j=-\mathtt{kernel\_size}}^{j=\mathtt{kernel\_size}} \mathtt{image}(i,j) sin[\pi(i-\mathtt{x})] sin[\pi(j-\mathtt{y})] / \pi(i-\mathtt{x}) / \pi(j-\mathtt{y})
162 |
163 | Parameters
164 | ----------
165 | image : np.ndarray, dtype np.int32
166 | the image array.
167 |
168 | x : two dimensions np.ndarray of floats
169 | an array containing fractional pixel row
170 | positions at which to interpolate the image
171 |
172 | y : two dimensions np.ndarray of floats
173 | an array containing fractional pixel column
174 | positions at which to interpolate the image
175 |
176 | kernel_size : int
177 | interpolation is performed over a ``(2*kernel_size+1)*(2*kernel_size+1)``
178 | submatrix in the neighbourhood of each interpolation point.
179 |
180 | Returns
181 | -------
182 | im : np.ndarray, dtype np.float64
183 | the interpolated value of ``image`` at the points specified by ``x`` and ``y``
184 | """
185 |
186 | # the output array
187 | r = np.zeros( [x.shape[0], x.shape[1]], dtype=DTYPEf)
188 |
189 | # fast pi
190 | pi = 3.1419
191 |
192 | # for each point of the output array
193 | for I in range(x.shape[0]):
194 | for J in range(x.shape[1]):
195 |
196 | #loop over all neighbouring grid points
197 | for i in range( int(x[I,J])-kernel_size, int(x[I,J])+kernel_size+1 ):
198 | for j in range( int(y[I,J])-kernel_size, int(y[I,J])+kernel_size+1 ):
199 | # check that we are in the boundaries
200 | if i >= 0 and i <= image.shape[0] and j >= 0 and j <= image.shape[1]:
201 | if (i-x[I,J]) == 0.0 and (j-y[I,J]) == 0.0:
202 | r[I,J] = r[I,J] + image[i,j]
203 | elif (i-x[I,J]) == 0.0:
204 | r[I,J] = r[I,J] + image[i,j] * np.sin( pi*(j-y[I,J]) )/( pi*(j-y[I,J]) )
205 | elif (j-y[I,J]) == 0.0:
206 | r[I,J] = r[I,J] + image[i,j] * np.sin( pi*(i-x[I,J]) )/( pi*(i-x[I,J]) )
207 | else:
208 | r[I,J] = r[I,J] + image[i,j] * np.sin( pi*(i-x[I,J]) )*np.sin( pi*(j-y[I,J]) )/( pi*pi*(i-x[I,J])*(j-y[I,J]))
209 | return r
210 |
--------------------------------------------------------------------------------
/core/maths/interpo.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | #Scale/normalize function : linear stretch from lowest value to highest value
4 | #########################################
5 | def scale(inVal, inMin, inMax, outMin, outMax):
6 | return (inVal - inMin) * (outMax - outMin) / (inMax - inMin) + outMin
7 |
8 |
9 |
10 | def linearInterpo(x1, x2, y1, y2, x):
11 | #Linear interpolation = y1 + slope * tx
12 | dx = x2 - x1
13 | dy = y2-y1
14 | slope = dy/dx
15 | tx = x - x1 #position from x1 (target x)
16 | return y1 + slope * tx
17 |
--------------------------------------------------------------------------------
/core/maths/kmeans1D.py:
--------------------------------------------------------------------------------
1 | """
2 | kmeans1D.py
3 | Author : domlysz@gmail.com
4 | Date : february 2016
5 | License : GPL
6 |
7 | This file is part of BlenderGIS.
8 | This is a kmeans implementation optimized for 1D data.
9 |
10 | Original kmeans code :
11 | https://gist.github.com/iandanforth/5862470
12 |
13 | 1D optimizations are inspired from this talking :
14 | http://stats.stackexchange.com/questions/40454/determine-different-clusters-of-1d-data-from-database
15 |
16 | Optimizations consists to :
17 | -sort the data and initialize clusters with a quantile classification
18 | -compute distance in 1D instead of euclidean
19 | -optimize only the borders of the clusters instead of test each cluster values
20 |
21 | Clustering results are similar to Jenks natural break and ckmeans algorithms.
22 | There are Python implementations of these alg. based on javascript code from simple-statistics library :
23 | * Jenks : https://gist.github.com/llimllib/4974446 (https://gist.github.com/tmcw/4977508)
24 | * Ckmeans : https://github.com/llimllib/ckmeans (https://github.com/simple-statistics/simple-statistics/blob/master/src/ckmeans.js)
25 |
26 | But both are terribly slow because there is a lot of exponential-time looping. These algorithms makes this somewhat inevitable.
27 | In contrast, this script works in a reasonable time, but keep in mind it's not Jenks. We just use cluster's centroids (mean) as
28 | reference to distribute the values while Jenks try to minimize within-class variance, and maximizes between group variance.
29 | """
30 |
31 | from ..utils.timing import perf_clock
32 |
33 |
34 | def kmeans1d(data, k, cutoff=False, maxIter=False):
35 | '''
36 | Compute natural breaks of a one dimensionnal list through an optimized kmeans algorithm
37 | Inputs:
38 | * data = input list, must be sorted beforehand
39 | * k = number of expected classes
40 | * cutoff (optional) = stop algorithm when centroids shift are under this value
41 | * maxIter (optional) = stop algorithm when iteration count reach this value
42 | Output:
43 | * A list of k clusters. A cluster is represented by a tuple containing first and last index of the cluster's values.
44 | Use these index on the input data list to retreive the effectives values containing in a cluster.
45 | '''
46 |
47 | def getClusterValues(cluster):
48 | i, j = cluster
49 | return data[i:j+1]
50 |
51 | def getClusterCentroid(cluster):
52 | values = getClusterValues(cluster)
53 | return sum(values) / len(values)
54 |
55 | n = len(data)
56 | if k >= n:
57 | raise ValueError('Too many expected classes')
58 | if k == 1:
59 | return [ [0, n-1] ]
60 |
61 | # Step 1: Create k clusters with quantile classification
62 | # quantile = number of value per clusters
63 | q = int(n // k) #with floor, last cluster will be bigger the others, with ceil it will be smaller
64 | if q == 1:
65 | raise ValueError('Too many expected classes')
66 | # define a cluster with its first and last index
67 | clusters = [ [i, i+q-1] for i in range(0, q*k, q)]
68 | # adjust the last index of the last cluster to the effective number of value
69 | clusters[-1][1] = n-1
70 |
71 | # Get centroids before first iter
72 | centroids = [getClusterCentroid(c) for c in clusters]
73 |
74 | # Loop through the dataset until the clusters stabilize
75 | loopCounter = 0
76 | changeOccured = True
77 |
78 | while changeOccured:
79 | loopCounter += 1
80 |
81 | # Will be set to true if at least one border has been adjusted
82 | changeOccured = False
83 |
84 | # Step 2 : for each border...
85 | for i in range(k-1):
86 | c1 = clusters[i] #current cluster
87 | c2 = clusters[i+1] #next cluster
88 |
89 | #tag if this border has been adjusted or not
90 | adjusted = False
91 |
92 | # Test the distance between the right border of the current cluster and the neightbors centroids
93 | # Move the values if it's closer to the next cluster's centroid.
94 | # Then, test the new right border or stop if no more move is needed.
95 | while True:
96 | if c1[0] == c1[1]:
97 | # only one value remaining in the current cluster
98 | # stop executing any more move to avoid having an empty cluster
99 | break
100 | breakValue = data[c1[1]]
101 | dst1 = abs(breakValue - centroids[i])
102 | dst2 = abs(breakValue - centroids[i+1])
103 | if dst1 > dst2:
104 | # Adjust border : move last value of the current cluster to the next cluster
105 | c1[1] -= 1 #decrease right border index of current cluster
106 | c2[0] -= 1 #decrease left border index of the next cluster
107 | adjusted = True
108 | else:
109 | break
110 |
111 | # Test left border of next cluster only if we don't have adjusted the right border of current cluster
112 | if not adjusted:
113 | # Test the distance between the left border of the next cluster and the neightbors centroids
114 | # Move the values if it's closer to the current cluster's centroid.
115 | # Then, test the new left border or stop if no more move is needed.
116 | while True:
117 | if c2[0] == c2[1]:
118 | # only one value remaining in the next cluster
119 | # stop executing any more move to avoid having an empty cluster
120 | break
121 | breakValue = data[c2[0]]
122 | dst1 = abs(breakValue - centroids[i])
123 | dst2 = abs(breakValue - centroids[i+1])
124 | if dst2 > dst1:
125 | # Adjust border : move first value of the next cluster to the current cluster
126 | c2[0] += 1 #increase left border index of the next cluster
127 | c1[1] += 1 #increase right border index of current cluster
128 | adjusted = True
129 | else:
130 | break
131 |
132 | # Loop again if some borders were adjusted
133 | # or stop looping if no more move are possible
134 | if adjusted:
135 | changeOccured = True
136 |
137 | # Update centroids and compute the bigger shift
138 | newCentroids = [getClusterCentroid(c) for c in clusters]
139 | biggest_shift = max([abs(newCentroids[i] - centroids[i]) for i in range(k)])
140 | centroids = newCentroids
141 |
142 | # Force stopping the main loop ...
143 | # > if the centroids have stopped moving much (in the case we set a cutoff value)
144 | # > or if we reach max iteration value (in the case we set a maxIter value)
145 | if (cutoff and biggest_shift < cutoff) or (maxIter and loopCounter == maxIter):
146 | break
147 |
148 | #print("Converged after %s iterations" % loopCounter)
149 | return clusters
150 |
151 |
152 | #-----------------
153 | #Helpers to get values from clusters's indices list returning by kmeans1d function
154 |
155 | def getClustersValues(data, clusters):
156 | return [data[i:j+1] for i, j in clusters]
157 |
158 | def getBreaks(data, clusters, includeBounds=False):
159 | if includeBounds:
160 | return [data[0]] + [data[j] for i, j in clusters]
161 | else:
162 | return [data[j] for i, j in clusters[:-1]]
163 |
164 |
165 |
166 | if __name__ == '__main__':
167 | import random, time
168 |
169 | #make data with a gap between 1000 and 2000
170 | data = [random.uniform(0, 1000) for i in range(10000)]
171 | data.extend([random.uniform(2000, 4000) for i in range(10000)])
172 | data.sort()
173 |
174 | k = 4
175 |
176 | print('---------------')
177 | print('%i values, %i classes' %(len(data),k))
178 | t1 = perf_clock()
179 | clusters = kmeans1d(data, k)
180 | t2 = perf_clock()
181 | print('Completed in %f seconds' %(t2-t1))
182 |
183 | print('Breaks :')
184 | print(getBreaks(data, clusters))
185 |
186 | print('Clusters details (nb values, min, max) :')
187 | for clusterValues in getClustersValues(data, clusters):
188 | print( len(clusterValues), clusterValues[0], clusterValues[-1] )
189 |
--------------------------------------------------------------------------------
/core/proj/__init__.py:
--------------------------------------------------------------------------------
1 | from .srs import SRS
2 | from .reproj import Reproj, reprojPt, reprojPts, reprojBbox, reprojImg
3 | from .srv import EPSGIO, TWCC
4 | from .ellps import dd2meters, meters2dd, Ellps, GRS80
5 |
--------------------------------------------------------------------------------
/core/proj/ellps.py:
--------------------------------------------------------------------------------
1 | import math
2 |
3 |
4 | class Ellps():
5 | """ellipsoid"""
6 | def __init__(self, a, b):
7 | self.a = a#equatorial radius in meters
8 | self.b = b#polar radius in meters
9 | self.f = (self.a-self.b)/self.a#inverse flat
10 | self.perimeter = (2*math.pi*self.a)#perimeter at equator
11 |
12 | GRS80 = Ellps(6378137, 6356752.314245)
13 |
14 | def dd2meters(dst):
15 | """
16 | Basic function to approximaly convert a short distance in decimal degrees to meters
17 | Only true at equator and along horizontal axis
18 | """
19 | k = GRS80.perimeter/360
20 | return dst * k
21 |
22 | def meters2dd(dst):
23 | k = GRS80.perimeter/360
24 | return dst / k
25 |
--------------------------------------------------------------------------------
/core/proj/srs.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # ***** GPL LICENSE BLOCK *****
4 | #
5 | # This program is free software: you can redistribute it and/or modify
6 | # it under the terms of the GNU General Public License as published by
7 | # the Free Software Foundation, either version 3 of the License, or
8 | # (at your option) any later version.
9 | #
10 | # This program is distributed in the hope that it will be useful,
11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 | # GNU General Public License for more details.
14 | #
15 | # You should have received a copy of the GNU General Public License
16 | # along with this program. If not, see .
17 | # All rights reserved.
18 | # ***** GPL LICENSE BLOCK *****
19 | import logging
20 | log = logging.getLogger(__name__)
21 |
22 | from .utm import UTM, UTM_EPSG_CODES
23 | from .srv import EPSGIO
24 |
25 | from ..checkdeps import HAS_GDAL, HAS_PYPROJ
26 |
27 | if HAS_GDAL:
28 | from osgeo import osr, gdal
29 |
30 | if HAS_PYPROJ:
31 | import pyproj
32 |
33 | class SRS():
34 |
35 | '''
36 | A simple class to handle Spatial Ref System inputs
37 | '''
38 |
39 | @classmethod
40 | def validate(cls, crs):
41 | try:
42 | cls(crs)
43 | return True
44 | except Exception as e:
45 | log.error('Cannot initialize crs', exc_info=True)
46 | return False
47 |
48 | def __init__(self, crs):
49 | '''
50 | Valid crs input can be :
51 | > an epsg code (integer or string)
52 | > a SRID string (AUTH:CODE)
53 | > a proj4 string
54 | '''
55 |
56 | #force cast to string
57 | crs = str(crs)
58 |
59 | #case 1 : crs is just a code
60 | if crs.isdigit():
61 | self.auth = 'EPSG' #assume authority is EPSG
62 | self.code = int(crs)
63 | self.proj4 = '+init=epsg:'+str(self.code)
64 | #note : 'epsg' must be lower case to be compatible with gdal osr
65 |
66 | #case 2 crs is in the form AUTH:CODE
67 | elif ':' in crs:
68 | self.auth, self.code = crs.split(':')
69 | if self.code.isdigit(): #what about non integer code ??? (IGNF:LAMB93)
70 | self.code = int(self.code)
71 | if self.auth.startswith('+init='):
72 | _, self.auth = self.auth.split('=')
73 | self.auth = self.auth.upper()
74 | self.proj4 = '+init=' + self.auth.lower() + ':' + str(self.code)
75 | else:
76 | raise ValueError('Invalid CRS : '+crs)
77 |
78 | #case 3 : crs is proj4 string
79 | elif all([param.startswith('+') for param in crs.split(' ') if param]):
80 | self.auth = None
81 | self.code = None
82 | self.proj4 = crs
83 |
84 | else:
85 | raise ValueError('Invalid CRS : '+crs)
86 |
87 | @classmethod
88 | def fromGDAL(cls, ds):
89 | if not HAS_GDAL:
90 | raise ImportError('GDAL not available')
91 | wkt = ds.GetProjection()
92 | if not wkt: #empty string
93 | raise ImportError('This raster has no projection')
94 | crs = osr.SpatialReference()
95 | crs.ImportFromWkt(wkt)
96 | return cls(crs.ExportToProj4())
97 |
98 | @property
99 | def SRID(self):
100 | if self.isSRID:
101 | return self.auth + ':' + str(self.code)
102 | else:
103 | return None
104 |
105 | @property
106 | def hasCode(self):
107 | return self.code is not None
108 |
109 | @property
110 | def hasAuth(self):
111 | return self.auth is not None
112 |
113 | @property
114 | def isSRID(self):
115 | return self.hasAuth and self.hasCode
116 |
117 | @property
118 | def isEPSG(self):
119 | return self.auth == 'EPSG' and self.code is not None
120 |
121 | @property
122 | def isWM(self):
123 | return self.auth == 'EPSG' and self.code == 3857
124 |
125 | @property
126 | def isWGS84(self):
127 | return self.auth == 'EPSG' and self.code == 4326
128 |
129 | @property
130 | def isUTM(self):
131 | return self.auth == 'EPSG' and self.code in UTM_EPSG_CODES
132 |
133 | def __str__(self):
134 | '''Return the best string representation for this crs'''
135 | if self.isSRID:
136 | return self.SRID
137 | else:
138 | return self.proj4
139 |
140 | def __eq__(self, srs2):
141 | return self.__str__() == srs2.__str__()
142 |
143 | def getOgrSpatialRef(self):
144 | '''Build gdal osr spatial ref object'''
145 | if not HAS_GDAL:
146 | raise ImportError('GDAL not available')
147 |
148 | prj = osr.SpatialReference()
149 |
150 | if self.isEPSG:
151 | r = prj.ImportFromEPSG(self.code)
152 | else:
153 | r = prj.ImportFromProj4(self.proj4)
154 |
155 | #ImportFromEPSG and ImportFromProj4 do not raise any exception
156 | #but return zero if the projection is valid
157 | if r > 0:
158 | raise ValueError('Cannot initialize osr : ' + self.proj4)
159 |
160 | return prj
161 |
162 |
163 | def getPyProj(self):
164 | '''Build pyproj object'''
165 | if not HAS_PYPROJ:
166 | raise ImportError('PYPROJ not available')
167 | if self.isSRID:
168 | return pyproj.Proj(self.SRID)
169 | else:
170 | try:
171 | return pyproj.Proj(self.proj4)
172 | except Exception as e:
173 | raise ValueError('Cannot initialize pyproj object for projection {}. Error : {}'.format(self.proj4, e))
174 |
175 |
176 | def loadProj4(self):
177 | '''Return a Python dict of proj4 parameters'''
178 | dc = {}
179 | if self.proj4 is None:
180 | return dc
181 | for param in self.proj4.split(' '):
182 | if param.count('=') == 1:
183 | k, v = param.split('=')
184 | try:
185 | v = float(v)
186 | except ValueError:
187 | pass
188 | dc[k] = v
189 | else:
190 | pass
191 | return dc
192 |
193 | @property
194 | def isGeo(self):
195 | if self.code == 4326:
196 | return True
197 | elif HAS_GDAL:
198 | prj = self.getOgrSpatialRef()
199 | isGeo = prj.IsGeographic()
200 | return isGeo == 1
201 | elif HAS_PYPROJ:
202 | prj = self.getPyProj()
203 | return prj.crs.is_geographic
204 | else:
205 | return None
206 |
207 | def getWKT(self):
208 | if HAS_GDAL:
209 | prj = self.getOgrSpatialRef()
210 | return prj.ExportToWkt()
211 | elif self.isEPSG:
212 | return EPSGIO.getEsriWkt(self.code)
213 | else:
214 | raise NotImplementedError
215 |
--------------------------------------------------------------------------------
/core/proj/srv.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # ***** GPL LICENSE BLOCK *****
4 | #
5 | # This program is free software: you can redistribute it and/or modify
6 | # it under the terms of the GNU General Public License as published by
7 | # the Free Software Foundation, either version 3 of the License, or
8 | # (at your option) any later version.
9 | #
10 | # This program is distributed in the hope that it will be useful,
11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 | # GNU General Public License for more details.
14 | #
15 | # You should have received a copy of the GNU General Public License
16 | # along with this program. If not, see .
17 | # All rights reserved.
18 | # ***** GPL LICENSE BLOCK *****
19 | import logging
20 | log = logging.getLogger(__name__)
21 |
22 |
23 | from urllib.request import Request, urlopen
24 | from urllib.error import URLError, HTTPError
25 | import json
26 |
27 | from .. import settings
28 |
29 | USER_AGENT = settings.user_agent
30 |
31 | DEFAULT_TIMEOUT = 2
32 | REPROJ_TIMEOUT = 60
33 |
34 | ######################################
35 | # EPSG.io
36 | # https://github.com/klokantech/epsg.io
37 |
38 |
39 | class EPSGIO():
40 |
41 | @staticmethod
42 | def ping():
43 | url = "http://epsg.io"
44 | try:
45 | rq = Request(url, headers={'User-Agent': USER_AGENT})
46 | urlopen(rq, timeout=DEFAULT_TIMEOUT)
47 | return True
48 | except URLError as e:
49 | log.error('Cannot ping {} web service, {}'.format(url, e.reason))
50 | return False
51 | except HTTPError as e:
52 | log.error('Cannot ping {} web service, http error {}'.format(url, e.code))
53 | return False
54 | except:
55 | raise
56 |
57 |
58 | @staticmethod
59 | def reprojPt(epsg1, epsg2, x1, y1):
60 |
61 | url = "http://epsg.io/trans?x={X}&y={Y}&z={Z}&s_srs={CRS1}&t_srs={CRS2}"
62 |
63 | url = url.replace("{X}", str(x1))
64 | url = url.replace("{Y}", str(y1))
65 | url = url.replace("{Z}", '0')
66 | url = url.replace("{CRS1}", str(epsg1))
67 | url = url.replace("{CRS2}", str(epsg2))
68 |
69 | log.debug(url)
70 |
71 | try:
72 | rq = Request(url, headers={'User-Agent': USER_AGENT})
73 | response = urlopen(rq, timeout=REPROJ_TIMEOUT).read().decode('utf8')
74 | except (URLError, HTTPError) as err:
75 | log.error('Http request fails url:{}, code:{}, error:{}'.format(url, err.code, err.reason))
76 | raise
77 |
78 | obj = json.loads(response)
79 |
80 | return (float(obj['x']), float(obj['y']))
81 |
82 | @staticmethod
83 | def reprojPts(epsg1, epsg2, points):
84 |
85 | if len(points) == 1:
86 | x, y = points[0]
87 | return [EPSGIO.reprojPt(epsg1, epsg2, x, y)]
88 |
89 | urlTemplate = "http://epsg.io/trans?data={POINTS}&s_srs={CRS1}&t_srs={CRS2}"
90 |
91 | urlTemplate = urlTemplate.replace("{CRS1}", str(epsg1))
92 | urlTemplate = urlTemplate.replace("{CRS2}", str(epsg2))
93 |
94 | #data = ';'.join([','.join(map(str, p)) for p in points])
95 |
96 | precision = 4
97 | data = [','.join( [str(round(v, precision)) for v in p] ) for p in points ]
98 | part, parts = [], []
99 | for i,p in enumerate(data):
100 | l = sum([len(p) for p in part]) + len(';'*len(part))
101 | if l + len(p) < 4000: #limit is 4094
102 | part.append(p)
103 | else:
104 | parts.append(part)
105 | part = [p]
106 | if i == len(data)-1:
107 | parts.append(part)
108 | parts = [';'.join(part) for part in parts]
109 |
110 | result = []
111 | for part in parts:
112 | url = urlTemplate.replace("{POINTS}", part)
113 | log.debug(url)
114 |
115 | try:
116 | rq = Request(url, headers={'User-Agent': USER_AGENT})
117 | response = urlopen(rq, timeout=REPROJ_TIMEOUT).read().decode('utf8')
118 | except (URLError, HTTPError) as err:
119 | log.error('Http request fails url:{}, code:{}, error:{}'.format(url, err.code, err.reason))
120 | raise
121 |
122 | obj = json.loads(response)
123 | result.extend( [(float(p['x']), float(p['y'])) for p in obj] )
124 |
125 | return result
126 |
127 | @staticmethod
128 | def search(query):
129 | query = str(query).replace(' ', '+')
130 | url = "http://epsg.io/?q={QUERY}&format=json"
131 | url = url.replace("{QUERY}", query)
132 | log.debug('Search crs : {}'.format(url))
133 | rq = Request(url, headers={'User-Agent': USER_AGENT})
134 | response = urlopen(rq, timeout=DEFAULT_TIMEOUT).read().decode('utf8')
135 | obj = json.loads(response)
136 | log.debug('Search results : {}'.format([ (r['code'], r['name']) for r in obj['results'] ]))
137 | return obj['results']
138 |
139 | @staticmethod
140 | def getEsriWkt(epsg):
141 | url = "http://epsg.io/{CODE}.esriwkt"
142 | url = url.replace("{CODE}", str(epsg))
143 | log.debug(url)
144 | rq = Request(url, headers={'User-Agent': USER_AGENT})
145 | wkt = urlopen(rq, timeout=DEFAULT_TIMEOUT).read().decode('utf8')
146 | return wkt
147 |
148 |
149 |
150 |
151 | ######################################
152 | # World Coordinate Converter
153 | # https://github.com/ClemRz/TWCC
154 |
155 | class TWCC():
156 |
157 | @staticmethod
158 | def reprojPt(epsg1, epsg2, x1, y1):
159 |
160 | url = "http://twcc.fr/en/ws/?fmt=json&x={X}&y={Y}&in=EPSG:{CRS1}&out=EPSG:{CRS2}"
161 |
162 | url = url.replace("{X}", str(x1))
163 | url = url.replace("{Y}", str(y1))
164 | url = url.replace("{Z}", '0')
165 | url = url.replace("{CRS1}", str(epsg1))
166 | url = url.replace("{CRS2}", str(epsg2))
167 |
168 | rq = Request(url, headers={'User-Agent': USER_AGENT})
169 | response = urlopen(rq, timeout=REPROJ_TIMEOUT).read().decode('utf8')
170 | obj = json.loads(response)
171 |
172 | return (float(obj['point']['x']), float(obj['point']['y']))
173 |
174 |
175 | ######################################
176 | #http://spatialreference.org/ref/epsg/2154/esriwkt/
177 |
178 | #class SpatialRefOrg():
179 |
180 |
181 |
182 | ######################################
183 | #http://prj2epsg.org/search
184 |
--------------------------------------------------------------------------------
/core/proj/utm.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | #Original code from https://github.com/Turbo87/utm
4 | #>simplified version that only handle utm zones (and not latitude bands from MGRS grid)
5 | #>reverse coord order : latlon --> lonlat
6 | #>add support for UTM EPSG codes
7 |
8 | # more infos : http://geokov.com/education/utm.aspx
9 | # formulas : https://en.wikipedia.org/wiki/Universal_Transverse_Mercator_coordinate_system
10 |
11 | import math
12 |
13 |
14 | K0 = 0.9996
15 |
16 | E = 0.00669438
17 | E2 = E * E
18 | E3 = E2 * E
19 | E_P2 = E / (1.0 - E)
20 |
21 | SQRT_E = math.sqrt(1 - E)
22 | _E = (1 - SQRT_E) / (1 + SQRT_E)
23 | _E2 = _E * _E
24 | _E3 = _E2 * _E
25 | _E4 = _E3 * _E
26 | _E5 = _E4 * _E
27 |
28 | M1 = (1 - E / 4 - 3 * E2 / 64 - 5 * E3 / 256)
29 | M2 = (3 * E / 8 + 3 * E2 / 32 + 45 * E3 / 1024)
30 | M3 = (15 * E2 / 256 + 45 * E3 / 1024)
31 | M4 = (35 * E3 / 3072)
32 |
33 | P2 = (3. / 2 * _E - 27. / 32 * _E3 + 269. / 512 * _E5)
34 | P3 = (21. / 16 * _E2 - 55. / 32 * _E4)
35 | P4 = (151. / 96 * _E3 - 417. / 128 * _E5)
36 | P5 = (1097. / 512 * _E4)
37 |
38 | R = 6378137
39 |
40 |
41 | class OutOfRangeError(ValueError):
42 | pass
43 |
44 |
45 | def longitude_to_zone_number(longitude):
46 | return int((longitude + 180) / 6) + 1
47 |
48 | def latitude_to_northern(latitude):
49 | return latitude >= 0
50 |
51 | def lonlat_to_zone_northern(lon, lat):
52 | zone = longitude_to_zone_number(lon)
53 | north = latitude_to_northern(lat)
54 | return zone, north
55 |
56 | def zone_number_to_central_longitude(zone_number):
57 | return (zone_number - 1) * 6 - 180 + 3
58 |
59 |
60 | # Each UTM zone on WGS84 datum has a dedicated EPSG code : 326xx for north hemisphere and 327xx for south
61 | # where xx is the zone number from 1 to 60
62 |
63 | #UTM_EPSG_CODES = ['326' + str(i).zfill(2) for i in range(1,61)] + ['327' + str(i).zfill(2) for i in range(1,61)]
64 | UTM_EPSG_CODES = [32600 + i for i in range(1,61)] + [32700 + i for i in range(1,61)]
65 |
66 | def _code_from_epsg(epsg):
67 | '''Return & validate EPSG code str from user input'''
68 | epsg = str(epsg)
69 | if epsg.isdigit():
70 | code = epsg
71 | elif ':' in epsg:
72 | auth, code = epsg.split(':')
73 | else:
74 | raise ValueError('Invalid UTM EPSG code')
75 | if code in map(str, UTM_EPSG_CODES):
76 | return code
77 | else:
78 | raise ValueError('Invalid UTM EPSG code')
79 |
80 | def epsg_to_zone_northern(epsg):
81 | code = _code_from_epsg(epsg)
82 | zone = int(code[-2:])
83 | if code[2] == '6':
84 | northern = True
85 | else:
86 | northern = False
87 | return zone, northern
88 |
89 | def lonlat_to_epsg(longitude, latitude):
90 | zone = longitude_to_zone_number(longitude)
91 | if latitude_to_northern(latitude):
92 | return 'EPSG:326' + str(zone).zfill(2)
93 | else:
94 | return 'EPSG:327' + str(zone).zfill(2)
95 |
96 | def zone_northern_to_epsg(zone, northern):
97 | if northern:
98 | return 'EPSG:326' + str(zone).zfill(2)
99 | else:
100 | return 'EPSG:327' + str(zone).zfill(2)
101 |
102 |
103 | ######
104 |
105 | class UTM():
106 |
107 | def __init__(self, zone, north):
108 | '''
109 | zone : UTM zone number
110 | north : True if north hemesphere, False if south
111 | '''
112 | if not 1 <= zone <= 60:
113 | raise OutOfRangeError('zone number out of range (must be between 1 and 60)')
114 | self.zone_number = zone
115 | self.northern = north
116 |
117 | @classmethod
118 | def init_from_epsg(cls, epsg):
119 | zone, north = epsg_to_zone_northern(epsg)
120 | return cls(zone, north)
121 |
122 | @classmethod
123 | def init_from_lonlat(cls, lon, lat):
124 | zone, north = lonlat_to_zone_northern(lon, lat)
125 | return cls(zone, north)
126 |
127 |
128 | def utm_to_lonlat(self, easting, northing):
129 |
130 | if not 100000 <= easting < 1000000:
131 | raise OutOfRangeError('easting out of range (must be between 100.000 m and 999.999 m)')
132 | if not 0 <= northing <= 10000000:
133 | raise OutOfRangeError('northing out of range (must be between 0 m and 10.000.000 m)')
134 |
135 | x = easting - 500000
136 | y = northing
137 |
138 | if not self.northern:
139 | y -= 10000000
140 |
141 | m = y / K0
142 | mu = m / (R * M1)
143 |
144 | p_rad = (mu +
145 | P2 * math.sin(2 * mu) +
146 | P3 * math.sin(4 * mu) +
147 | P4 * math.sin(6 * mu) +
148 | P5 * math.sin(8 * mu))
149 |
150 | p_sin = math.sin(p_rad)
151 | p_sin2 = p_sin * p_sin
152 |
153 | p_cos = math.cos(p_rad)
154 |
155 | p_tan = p_sin / p_cos
156 | p_tan2 = p_tan * p_tan
157 | p_tan4 = p_tan2 * p_tan2
158 |
159 | ep_sin = 1 - E * p_sin2
160 | ep_sin_sqrt = math.sqrt(1 - E * p_sin2)
161 |
162 | n = R / ep_sin_sqrt
163 | r = (1 - E) / ep_sin
164 |
165 | c = _E * p_cos**2
166 | c2 = c * c
167 |
168 | d = x / (n * K0)
169 | d2 = d * d
170 | d3 = d2 * d
171 | d4 = d3 * d
172 | d5 = d4 * d
173 | d6 = d5 * d
174 |
175 | latitude = (p_rad - (p_tan / r) *
176 | (d2 / 2 -
177 | d4 / 24 * (5 + 3 * p_tan2 + 10 * c - 4 * c2 - 9 * E_P2)) +
178 | d6 / 720 * (61 + 90 * p_tan2 + 298 * c + 45 * p_tan4 - 252 * E_P2 - 3 * c2))
179 |
180 | longitude = (d -
181 | d3 / 6 * (1 + 2 * p_tan2 + c) +
182 | d5 / 120 * (5 - 2 * c + 28 * p_tan2 - 3 * c2 + 8 * E_P2 + 24 * p_tan4)) / p_cos
183 |
184 | return (math.degrees(longitude) + zone_number_to_central_longitude(self.zone_number),
185 | math.degrees(latitude))
186 |
187 |
188 | def lonlat_to_utm(self, longitude, latitude):
189 | if not -80.0 <= latitude <= 84.0:
190 | raise OutOfRangeError('latitude out of range (must be between 80 deg S and 84 deg N)')
191 | if not -180.0 <= longitude <= 180.0:
192 | raise OutOfRangeError('longitude out of range (must be between 180 deg W and 180 deg E)')
193 |
194 | lat_rad = math.radians(latitude)
195 | lat_sin = math.sin(lat_rad)
196 | lat_cos = math.cos(lat_rad)
197 |
198 | lat_tan = lat_sin / lat_cos
199 | lat_tan2 = lat_tan * lat_tan
200 | lat_tan4 = lat_tan2 * lat_tan2
201 |
202 | lon_rad = math.radians(longitude)
203 | central_lon = zone_number_to_central_longitude(self.zone_number)
204 | central_lon_rad = math.radians(central_lon)
205 |
206 | n = R / math.sqrt(1 - E * lat_sin**2)
207 | c = E_P2 * lat_cos**2
208 |
209 | a = lat_cos * (lon_rad - central_lon_rad)
210 | a2 = a * a
211 | a3 = a2 * a
212 | a4 = a3 * a
213 | a5 = a4 * a
214 | a6 = a5 * a
215 |
216 | m = R * (M1 * lat_rad -
217 | M2 * math.sin(2 * lat_rad) +
218 | M3 * math.sin(4 * lat_rad) -
219 | M4 * math.sin(6 * lat_rad))
220 |
221 | easting = K0 * n * (a +
222 | a3 / 6 * (1 - lat_tan2 + c) +
223 | a5 / 120 * (5 - 18 * lat_tan2 + lat_tan4 + 72 * c - 58 * E_P2)) + 500000
224 |
225 | northing = K0 * (m + n * lat_tan * (a2 / 2 +
226 | a4 / 24 * (5 - lat_tan2 + 9 * c + 4 * c**2) +
227 | a6 / 720 * (61 - 58 * lat_tan2 + lat_tan4 + 600 * c - 330 * E_P2)))
228 |
229 | if not self.northern:
230 | northing += 10000000
231 |
232 | return easting, northing
233 |
234 |
235 |
236 |
237 |
--------------------------------------------------------------------------------
/core/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "proj_engine": "AUTO",
3 | "img_engine": "AUTO",
4 | "user_agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0"
5 | }
6 |
--------------------------------------------------------------------------------
/core/settings.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 | import os
3 | import json
4 |
5 | from .checkdeps import HAS_GDAL, HAS_PYPROJ, HAS_IMGIO, HAS_PIL
6 |
7 | def getAvailableProjEngines():
8 | engines = ['AUTO', 'BUILTIN']
9 | #if EPSGIO.ping():
10 | engines.append('EPSGIO')
11 | if HAS_GDAL:
12 | engines.append('GDAL')
13 | if HAS_PYPROJ:
14 | engines.append('PYPROJ')
15 | return engines
16 |
17 | def getAvailableImgEngines():
18 | engines = ['AUTO']
19 | if HAS_GDAL:
20 | engines.append('GDAL')
21 | if HAS_IMGIO:
22 | engines.append('IMGIO')
23 | if HAS_PIL:
24 | engines.append('PIL')
25 | return engines
26 |
27 |
28 | class Settings():
29 |
30 | def __init__(self, **kwargs):
31 | self._proj_engine = kwargs['proj_engine']
32 | self._img_engine = kwargs['img_engine']
33 | self.user_agent = kwargs['user_agent']
34 |
35 | @property
36 | def proj_engine(self):
37 | return self._proj_engine
38 |
39 | @proj_engine.setter
40 | def proj_engine(self, engine):
41 | if engine not in getAvailableProjEngines():
42 | raise IOError
43 | else:
44 | self._proj_engine = engine
45 |
46 | @property
47 | def img_engine(self):
48 | return self._img_engine
49 |
50 | @img_engine.setter
51 | def img_engine(self, engine):
52 | if engine not in getAvailableImgEngines():
53 | raise IOError
54 | else:
55 | self._img_engine = engine
56 |
57 |
58 | cfgFile = os.path.join(os.path.dirname(__file__), "settings.json")
59 |
60 | with open(cfgFile, 'r') as cfg:
61 | prefs = json.load(cfg)
62 |
63 | settings = Settings(**prefs)
64 |
--------------------------------------------------------------------------------
/core/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .xy import XY
2 | from .bbox import BBOX
3 | from .gradient import Color, Stop, Gradient
4 | from .timing import perf_clock
5 |
--------------------------------------------------------------------------------
/core/utils/bbox.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # This file is part of BlenderGIS
4 |
5 | # ***** GPL LICENSE BLOCK *****
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with this program. If not, see .
19 | # All rights reserved.
20 | # ***** GPL LICENSE BLOCK *****
21 |
22 | from . import XY
23 | import logging
24 | log = logging.getLogger(__name__)
25 |
26 | class BBOX(dict):
27 | '''A class to represent a bounding box'''
28 |
29 | def __init__(self, *args, **kwargs):
30 | '''
31 | Three ways for init a BBOX class:
32 | - from a list of values ordered from bottom left to upper right
33 | >> BBOX(xmin, ymin, xmax, ymax) or BBOX(xmin, ymin, zmin, xmax, ymax, zmax)
34 | - from a tuple contained a list of values ordered from bottom left to upper right
35 | >> BBOX( (xmin, ymin, xmax, ymax) ) or BBOX( (xmin, ymin, zmin, xmax, ymax, zmax) )
36 | - from keyword arguments with no particular order
37 | >> BBOX(xmin=, ymin=, xmax=, ymax=) or BBOX(xmin=, ymin=, zmin=, xmax=, ymax=, zmax=)
38 | '''
39 | if args:
40 | if len(args) == 1: #maybee we pass directly a tuple
41 | args = args[0]
42 | if len(args) == 4:
43 | self.xmin, self.ymin, self.xmax, self.ymax = args
44 | elif len(args) == 6:
45 | self.xmin, self.ymin, self.zmin, self.xmax, self.ymax, self.zmax = args
46 | else:
47 | raise ValueError('BBOX() initialization expects 4 or 6 arguments, got %g' % len(args))
48 | elif kwargs:
49 | if not all( [kw in kwargs for kw in ['xmin', 'ymin', 'xmax', 'ymax']] ):
50 | raise ValueError('invalid keyword arguments')
51 | self.xmin, self.xmax = kwargs['xmin'], kwargs['xmax']
52 | self.ymin, self.ymax = kwargs['ymin'], kwargs['ymax']
53 | if 'zmin' in kwargs and 'zmax' in kwargs:
54 | self.zmin, self.zmax = kwargs['zmin'], kwargs['zmax']
55 |
56 | def __str__(self):
57 | if self.hasZ:
58 | return 'xmin:%g, ymin:%g, zmin:%g, xmax:%g, ymax:%g, zmax:%g' % tuple(self)
59 | else:
60 | return 'xmin:%g, ymin:%g, xmax:%g, ymax:%g' % tuple(self)
61 |
62 | def __getitem__(self, attr):
63 | '''access attributes like a dictionnary'''
64 | return getattr(self, attr)
65 |
66 | def __setitem__(self, key, value):
67 | '''set attributes like a dictionnary'''
68 | setattr(self, key, value)
69 |
70 | def __iter__(self):
71 | '''iterate overs values in bottom left to upper right order
72 | allows support of unpacking and conversion to tuple or list'''
73 | if self.hasZ:
74 | return iter([self.xmin, self.ymin, self.zmin, self.xmax, self.ymax, self.ymax])
75 | else:
76 | return iter([self.xmin, self.ymin, self.xmax, self.ymax])
77 |
78 | def keys(self):
79 | '''override dict keys() method'''
80 | return self.__dict__.keys()
81 |
82 | def items(self):
83 | '''override dict keys() method'''
84 | return self.__dict__.items()
85 |
86 | def values(self):
87 | '''override dict keys() method'''
88 | return self.__dict__.values()
89 |
90 | @classmethod
91 | def fromXYZ(cls, lst):
92 | '''Create a BBOX from a flat list of values ordered following XYZ axis
93 | --> (xmin, xmax, ymin, ymax) or (xmin, xmax, ymin, ymax, zmin, zmax)'''
94 | if len(lst) == 4:
95 | xmin, xmax, ymin, ymax = lst
96 | return cls(xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax)
97 | elif len(lst) == 6:
98 | xmin, xmax, ymin, ymax, zmin, zmax = lst
99 | return cls(xmin=xmin, ymin=ymin, zmin=zmin, xmax=xmax, ymax=ymax, zmax=zmax)
100 |
101 | def toXYZ(self):
102 | '''Export to simple tuple of values ordered following XYZ axis'''
103 | if self.hasZ:
104 | return (self.xmin, self.xmax, self.ymin, self.ymax, self.zmin, self.zmax)
105 | else:
106 | return (self.xmin, self.xmax, self.ymin, self.ymax)
107 |
108 | @classmethod
109 | def fromLatlon(cls, lst):
110 | '''Create a 2D BBOX from a list of values ordered as latlon format (latmin, lonmin, latmax, lonmax) <--> (min, xmin, ymax, xmax)'''
111 | ymin, xmin, ymax, xmax = lst
112 | return cls(xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax)
113 |
114 | def toLatlon(self):
115 | '''Export to simple tuple of values ordered as latlon format in 2D'''
116 | return (self.ymin, self.xmin, self.ymax, self.xmax)
117 |
118 | @property
119 | def hasZ(self):
120 | '''Check if this bbox is in 3D'''
121 | if hasattr(self, 'zmin') and hasattr(self, 'zmax'):
122 | return True
123 | else:
124 | return False
125 |
126 | def to2D(self):
127 | '''Cast 3d bbox to 2d >> discard zmin and zmax values'''
128 | return BBOX(self.xmin, self.ymin, self.xmax, self.ymax)
129 |
130 | def toGeo(self, geoscn):
131 | '''Convert the BBOX into Spatial Ref System space defined in Scene'''
132 | if geoscn.isBroken or not geoscn.isGeoref:
133 | log.warning('Cannot convert bbox, invalid georef')
134 | return None
135 | xmax = geoscn.crsx + (self.xmax * geoscn.scale)
136 | ymax = geoscn.crsy + (self.ymax * geoscn.scale)
137 | xmin = geoscn.crsx + (self.xmin * geoscn.scale)
138 | ymin = geoscn.crsy + (self.ymin * geoscn.scale)
139 | if self.hasZ:
140 | return BBOX(xmin, ymin, self.zmin, xmax, ymax, self.zmax)
141 | else:
142 | return BBOX(xmin, ymin, xmax, ymax)
143 |
144 | def __eq__(self, bb):
145 | '''Test if 2 bbox are equals'''
146 | if self.xmin == bb.xmin and self.xmax == bb.xmax and self.ymin == bb.ymin and self.ymax == bb.ymax:
147 | if self.hasZ and bb.hasZ:
148 | if self.zmin == bb.zmin and self.zmax == bb.zmax:
149 | return True
150 | else:
151 | return True
152 |
153 | def overlap(self, bb):
154 | '''Test if 2 bbox objects have intersection areas (in 2D only)'''
155 | def test_overlap(a_min, a_max, b_min, b_max):
156 | return not ((a_min > b_max) or (b_min > a_max))
157 | return test_overlap(self.xmin, self.xmax, bb.xmin, bb.xmax) and test_overlap(self.ymin, self.ymax, bb.ymin, bb.ymax)
158 |
159 | def isWithin(self, bb):
160 | '''Test if this bbox is within another bbox'''
161 | if bb.xmin <= self.xmin and bb.xmax >= self.xmax and bb.ymin <= self.ymin and bb.ymax >= self.ymax:
162 | return True
163 | else:
164 | return False
165 |
166 | def contains(self, bb):
167 | '''Test if this bbox contains another bbox'''
168 | if bb.xmin > self.xmin and bb.xmax < self.xmax and bb.ymin > self.ymin and bb.ymax < self.ymax:
169 | return True
170 | else:
171 | return False
172 |
173 | def __add__(self, bb):
174 | '''Use '+' operator to perform the union of 2 bbox'''
175 | xmax = max(self.xmax, bb.xmax)
176 | xmin = min(self.xmin, bb.xmin)
177 | ymax = max(self.ymax, bb.ymax)
178 | ymin = min(self.ymin, bb.ymin)
179 | if self.hasZ and bb.hasZ:
180 | zmax = max(self.zmax, bb.zmax)
181 | zmin = min(self.zmin, bb.zmin)
182 | return BBOX(xmin, ymin, zmin, xmax, ymax, zmax)
183 | else:
184 | return BBOX(xmin, ymin, xmax, ymax)
185 |
186 | def shift(self, dx, dy):
187 | '''translate the bbox in 2D'''
188 | self.xmin += dx
189 | self.xmax += dx
190 | self.ymin += dy
191 | self.ymax += dy
192 |
193 | @property
194 | def center(self):
195 | x = (self.xmin + self.xmax) / 2
196 | y = (self.ymin + self.ymax) / 2
197 | if self.hasZ:
198 | z = (self.zmin + self.zmax) / 2
199 | return XY(x,y,z)
200 | else:
201 | return XY(x,y)
202 |
203 | @property
204 | def dimensions(self):
205 | dx = self.xmax - self.xmin
206 | dy = self.ymax - self.ymin
207 | if self.hasZ:
208 | dz = self.zmax - self.zmin
209 | return XY(dx,dy,dz)
210 | else:
211 | return XY(dx,dy)
212 |
213 | ################
214 | ## 2D properties
215 |
216 | @property
217 | def corners(self):
218 | '''Get the list of corners coords, starting from upperleft and ordered clockwise'''
219 | return [ self.ul, self.ur, self.br, self.bl ]
220 |
221 | @property
222 | def ul(self):
223 | '''upper left corner'''
224 | return XY(self.xmin, self.ymax)
225 | @property
226 | def ur(self):
227 | '''upper right corner'''
228 | return XY(self.xmax, self.ymax)
229 | @property
230 | def bl(self):
231 | '''bottom left corner'''
232 | return XY(self.xmin, self.ymin)
233 | @property
234 | def br(self):
235 | '''bottom right corner'''
236 | return XY(self.xmax, self.ymin)
237 |
--------------------------------------------------------------------------------
/core/utils/timing.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | def perf_clock():
4 | if hasattr(time, 'clock'):
5 | return time.clock()
6 | elif hasattr(time, 'perf_counter'):
7 | return time.perf_counter()
8 | else:
9 | raise Exception("Python time lib doesn't contain a suitable clock function")
--------------------------------------------------------------------------------
/core/utils/xy.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # This file is part of BlenderGIS
4 |
5 | # ***** GPL LICENSE BLOCK *****
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with this program. If not, see .
19 | # All rights reserved.
20 | # ***** GPL LICENSE BLOCK *****
21 |
22 |
23 | class XY(object):
24 | '''A class to represent 2-tuple value'''
25 | def __init__(self, x, y, z=None):
26 | '''
27 | You can use the constructor in many ways:
28 | XY(0, 1) - passing two arguments
29 | XY(x=0, y=1) - passing keywords arguments
30 | XY(**{'x': 0, 'y': 1}) - unpacking a dictionary
31 | XY(*[0, 1]) - unpacking a list or a tuple (or a generic iterable)
32 | '''
33 | if z is None:
34 | self.data=[x, y]
35 | else:
36 | self.data=[x, y, z]
37 | def __str__(self):
38 | if self.z is not None:
39 | return "(%s, %s, %s)"%(self.x, self.y, self.z)
40 | else:
41 | return "(%s, %s)"%(self.x,self.y)
42 | def __repr__(self):
43 | return self.__str__()
44 | def __getitem__(self,item):
45 | return self.data[item]
46 | def __setitem__(self, idx, value):
47 | self.data[idx] = value
48 | def __iter__(self):
49 | return iter(self.data)
50 | def __len__(self):
51 | return len(self.data)
52 | @property
53 | def x(self):
54 | return self.data[0]
55 | @property
56 | def y(self):
57 | return self.data[1]
58 | @property
59 | def z(self):
60 | try:
61 | return self.data[2]
62 | except IndexError:
63 | return None
64 | @property
65 | def xy(self):
66 | return self.data[:2]
67 | @property
68 | def xyz(self):
69 | return self.data
70 |
--------------------------------------------------------------------------------
/icons/asc.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/asc.png
--------------------------------------------------------------------------------
/icons/curve.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/curve.png
--------------------------------------------------------------------------------
/icons/delaunay.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/delaunay.png
--------------------------------------------------------------------------------
/icons/drop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/drop.png
--------------------------------------------------------------------------------
/icons/exifCam.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/exifCam.png
--------------------------------------------------------------------------------
/icons/georefCam.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/georefCam.png
--------------------------------------------------------------------------------
/icons/layers.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/layers.png
--------------------------------------------------------------------------------
/icons/lidar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/lidar.png
--------------------------------------------------------------------------------
/icons/osm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/osm.png
--------------------------------------------------------------------------------
/icons/osm_xml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/osm_xml.png
--------------------------------------------------------------------------------
/icons/raster.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/raster.png
--------------------------------------------------------------------------------
/icons/shp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/shp.png
--------------------------------------------------------------------------------
/icons/terrain.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/terrain.png
--------------------------------------------------------------------------------
/icons/voronoi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PivotStack/BlenderGIS/7acdaae88ddcb43345daca9e4b310fd8bc626ce4/icons/voronoi.png
--------------------------------------------------------------------------------
/issue_template.md:
--------------------------------------------------------------------------------
1 |
9 |
10 | # **Blender and OS versions**
11 |
12 |
13 |
14 | # **Describe the bug**
15 |
16 |
17 |
18 | # **How to Reproduce**
19 |
20 |
21 |
22 | # **Error message**
23 |
24 |
25 |
--------------------------------------------------------------------------------
/operators/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["add_camera_exif", "add_camera_georef", "io_export_shp", "io_get_srtm", "io_import_georaster", "io_import_osm", "io_import_shp", "io_import_asc", "mesh_delaunay_voronoi", "nodes_terrain_analysis_builder", "nodes_terrain_analysis_reclassify", "view3d_mapviewer", "object_drop", "mesh_earth_sphere"]
2 |
--------------------------------------------------------------------------------
/operators/add_camera_georef.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | # ***** GPL LICENSE BLOCK *****
4 | #
5 | # This program is free software: you can redistribute it and/or modify
6 | # it under the terms of the GNU General Public License as published by
7 | # the Free Software Foundation, either version 3 of the License, or
8 | # (at your option) any later version.
9 | #
10 | # This program is distributed in the hope that it will be useful,
11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 | # GNU General Public License for more details.
14 | #
15 | # You should have received a copy of the GNU General Public License
16 | # along with this program. If not, see .
17 | # All rights reserved.
18 | # ***** GPL LICENSE BLOCK *****
19 | import logging
20 | log = logging.getLogger(__name__)
21 |
22 | import bpy
23 | from mathutils import Vector
24 | from bpy.props import StringProperty, BoolProperty, EnumProperty, FloatProperty
25 |
26 | from .utils import getBBOX
27 | from ..geoscene import GeoScene
28 |
29 |
30 |
31 | class CAMERA_OT_add_georender_cam(bpy.types.Operator):
32 | '''
33 | Add a new georef camera or update an existing one
34 | A georef camera is a top view orthographic camera that can be used to render a map
35 | The camera is setting to encompass the selected object, the output spatial resolution (meters/pixel) can be set by the user
36 | A worldfile is writen in BLender text editor, it can be used to georef the output render
37 | '''
38 | bl_idname = "camera.georender"
39 | bl_label = "Georef cam"
40 | bl_description = "Create or update a camera to render a georeferencing map"
41 | bl_options = {"REGISTER", "UNDO"}
42 |
43 |
44 | name: StringProperty(name = "Camera name", default="Georef cam", description="")
45 | target_res: FloatProperty(name = "Pixel size", default=5, description="Pixel size in map units/pixel", min=0.00001)
46 | zLocOffset: FloatProperty(name = "Z loc. off.", default=50, description="Camera z location offet, defined as percentage of z dimension of the target mesh", min=0)
47 |
48 | redo = 0
49 | bbox = None #global var used to avoid recomputing the bbox at each redo
50 |
51 | def check(self, context):
52 | return True
53 |
54 |
55 | def draw(self, context):
56 | layout = self.layout
57 | layout.prop(self, 'name')
58 | layout.prop(self, 'target_res')
59 | layout.prop(self, 'zLocOffset')
60 |
61 | @classmethod
62 | def poll(cls, context):
63 | return context.mode == 'OBJECT'
64 |
65 | def execute(self, context):#every times operator redo options are modified
66 |
67 | #Operator redo count
68 | self.redo += 1
69 |
70 | #Check georef
71 | scn = context.scene
72 | geoscn = GeoScene(scn)
73 | if not geoscn.isGeoref:
74 | self.report({'ERROR'}, "Scene isn't georef")
75 | return {'CANCELLED'}
76 |
77 | #Validate selection
78 | objs = bpy.context.selected_objects
79 | if (not objs or len(objs) > 2) or \
80 | (len(objs) == 1 and not objs[0].type == 'MESH') or \
81 | (len(objs) == 2 and not set( (objs[0].type, objs[1].type )) == set( ('MESH','CAMERA') ) ):
82 | self.report({'ERROR'}, "Pre-selection is incorrect")
83 | return {'CANCELLED'}
84 |
85 | #Flag new camera creation
86 | if len(objs) == 2:
87 | newCam = False
88 | else:
89 | newCam = True
90 |
91 | #Get georef data
92 | dx, dy = geoscn.getOriginPrj()
93 |
94 | #Allocate obj
95 | for obj in objs:
96 | if obj.type == 'MESH':
97 | georefObj = obj
98 | elif obj.type == 'CAMERA':
99 | camObj = obj
100 | cam = camObj.data
101 |
102 | #do not recompute bbox at operator redo because zdim is miss-evaluated
103 | #when redoing the op on an obj that have a displace modifier on it
104 | #TODO find a less hacky fix
105 | if self.bbox is None:
106 | bbox = getBBOX.fromObj(georefObj, applyTransform = True)
107 | self.bbox = bbox
108 | else:
109 | bbox = self.bbox
110 |
111 | locx, locy, locz = bbox.center
112 | dimx, dimy, dimz = bbox.dimensions
113 | if dimz == 0:
114 | dimz = 1
115 | #dimx, dimy, dimz = georefObj.dimensions #dimensions property apply object transformations (scale and rot.)
116 |
117 | #Set active cam
118 | if newCam:
119 | cam = bpy.data.cameras.new(name=self.name)
120 | cam['mapRes'] = self.target_res #custom prop
121 | camObj = bpy.data.objects.new(name=self.name, object_data=cam)
122 | scn.collection.objects.link(camObj)
123 | scn.camera = camObj
124 | elif self.redo == 1: #first exec, get initial camera res
125 | scn.camera = camObj
126 | try:
127 | self.target_res = cam['mapRes']
128 | except KeyError:
129 | self.report({'ERROR'}, "This camera has not map resolution property")
130 | return {'CANCELLED'}
131 | else: #following exec, set camera res in redo panel
132 | try:
133 | cam['mapRes'] = self.target_res
134 | except KeyError:
135 | self.report({'ERROR'}, "This camera has not map resolution property")
136 | return {'CANCELLED'}
137 |
138 | #Set camera data
139 | cam.type = 'ORTHO'
140 | cam.ortho_scale = max((dimx, dimy)) #ratio = max((dimx, dimy)) / min((dimx, dimy))
141 |
142 | #General offset used to set cam z loc and clip end distance
143 | #needed to avoid clipping/black hole effects
144 | offset = dimz * self.zLocOffset/100
145 |
146 | #Set camera location
147 | camLocZ = bbox['zmin'] + dimz + offset
148 | camObj.location = (locx, locy, camLocZ)
149 |
150 | #Set camera clipping
151 | cam.clip_start = 0
152 | cam.clip_end = dimz + offset*2
153 | cam.show_limits = True
154 |
155 | if not newCam:
156 | if self.redo == 1:#first exec, get initial camera name
157 | self.name = camObj.name
158 | else:#following exec, set camera name in redo panel
159 | camObj.name = self.name
160 | camObj.data.name = self.name
161 |
162 | #Update selection
163 | bpy.ops.object.select_all(action='DESELECT')
164 | camObj.select_set(True)
165 | context.view_layer.objects.active = camObj
166 |
167 | #setup scene
168 | scn.camera = camObj
169 | scn.render.resolution_x = int(dimx / self.target_res)
170 | scn.render.resolution_y = int(dimy / self.target_res)
171 | scn.render.resolution_percentage = 100
172 |
173 | #Write wf
174 | res = self.target_res#dimx / scene.render.resolution_x
175 | rot = 0
176 | x = bbox['xmin'] + dx
177 | y = bbox['ymax'] + dy
178 | wf_data = '\n'.join(map(str, [res, rot, rot, -res, x+res/2, y-res/2]))
179 | wf_name = camObj.name + '.wld'
180 | if wf_name in bpy.data.texts:
181 | wfText = bpy.data.texts[wf_name]
182 | wfText.clear()
183 | else:
184 | wfText = bpy.data.texts.new(name=wf_name)
185 | wfText.write(wf_data)
186 |
187 | #Purge old wf text
188 | for wfText in bpy.data.texts:
189 | name, ext = wfText.name[:-4], wfText.name[-4:]
190 | if ext == '.wld' and name not in bpy.data.objects:
191 | bpy.data.texts.remove(wfText)
192 |
193 | return {'FINISHED'}
194 |
195 |
196 | def register():
197 | try:
198 | bpy.utils.register_class(CAMERA_OT_add_georender_cam)
199 | except ValueError as e:
200 | log.warning('{} is already registered, now unregister and retry... '.format(CAMERA_OT_add_georender_cam))
201 | unregister()
202 | bpy.utils.register_class(CAMERA_OT_add_georender_cam)
203 |
204 |
205 | def unregister():
206 | bpy.utils.unregister_class(CAMERA_OT_add_georender_cam)
207 |
--------------------------------------------------------------------------------
/operators/io_get_dem.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 |
4 | import logging
5 | log = logging.getLogger(__name__)
6 |
7 | from urllib.request import Request, urlopen
8 | from urllib.error import URLError, HTTPError
9 |
10 | import bpy
11 | import bmesh
12 | from bpy.types import Operator, Panel, AddonPreferences
13 | from bpy.props import StringProperty, IntProperty, FloatProperty, BoolProperty, EnumProperty, FloatVectorProperty
14 |
15 | from ..geoscene import GeoScene
16 | from .utils import adjust3Dview, getBBOX, isTopView
17 | from ..core.proj import SRS, reprojBbox
18 |
19 | from ..core import settings
20 | USER_AGENT = settings.user_agent
21 |
22 | PKG, SUBPKG = __package__.split('.', maxsplit=1)
23 |
24 | TIMEOUT = 120
25 |
26 | class IMPORTGIS_OT_dem_query(Operator):
27 | """Import elevation data from a web service"""
28 |
29 | bl_idname = "importgis.dem_query"
30 | bl_description = 'Query for elevation data from a web service'
31 | bl_label = "Get elevation (SRTM)"
32 | bl_options = {"UNDO"}
33 |
34 | def invoke(self, context, event):
35 |
36 | #check georef
37 | geoscn = GeoScene(context.scene)
38 | if not geoscn.isGeoref:
39 | self.report({'ERROR'}, "Scene is not georef")
40 | return {'CANCELLED'}
41 | if geoscn.isBroken:
42 | self.report({'ERROR'}, "Scene georef is broken, please fix it beforehand")
43 | return {'CANCELLED'}
44 |
45 | #return self.execute(context)
46 | return context.window_manager.invoke_props_dialog(self)#, width=350)
47 |
48 | def draw(self,context):
49 | prefs = context.preferences.addons[PKG].preferences
50 | layout = self.layout
51 | row = layout.row(align=True)
52 | row.prop(prefs, "demServer", text='Server')
53 | if 'opentopography' in prefs.demServer:
54 | row = layout.row(align=True)
55 | row.prop(prefs, "opentopography_api_key", text='Api Key')
56 |
57 | @classmethod
58 | def poll(cls, context):
59 | return context.mode == 'OBJECT'
60 |
61 | def execute(self, context):
62 |
63 | prefs = bpy.context.preferences.addons[PKG].preferences
64 | scn = context.scene
65 | geoscn = GeoScene(scn)
66 | crs = SRS(geoscn.crs)
67 |
68 | #Validate selection
69 | objs = bpy.context.selected_objects
70 | aObj = context.active_object
71 | if len(objs) == 1 and aObj.type == 'MESH':
72 | onMesh = True
73 | bbox = getBBOX.fromObj(aObj).toGeo(geoscn)
74 | elif isTopView(context):
75 | onMesh = False
76 | bbox = getBBOX.fromTopView(context).toGeo(geoscn)
77 | else:
78 | self.report({'ERROR'}, "Please define the query extent in orthographic top view or by selecting a reference object")
79 | return {'CANCELLED'}
80 |
81 | if bbox.dimensions.x > 1000000 or bbox.dimensions.y > 1000000:
82 | self.report({'ERROR'}, "Too large extent")
83 | return {'CANCELLED'}
84 |
85 | bbox = reprojBbox(geoscn.crs, 4326, bbox)
86 |
87 | if 'SRTM' in prefs.demServer:
88 | if bbox.ymin > 60:
89 | self.report({'ERROR'}, "SRTM is not available beyond 60 degrees north")
90 | return {'CANCELLED'}
91 | if bbox.ymax < -56:
92 | self.report({'ERROR'}, "SRTM is not available below 56 degrees south")
93 | return {'CANCELLED'}
94 |
95 | if 'opentopography' in prefs.demServer:
96 | if not prefs.opentopography_api_key:
97 | self.report({'ERROR'}, "Please register to opentopography.org and request for an API key")
98 | return {'CANCELLED'}
99 |
100 | #Set cursor representation to 'loading' icon
101 | w = context.window
102 | w.cursor_set('WAIT')
103 |
104 | #url template
105 | #http://opentopo.sdsc.edu/otr/getdem?demtype=SRTMGL3&west=-120.168457&south=36.738884&east=-118.465576&north=38.091337&outputFormat=GTiff
106 | e = 0.002 #opentopo service does not always respect the entire bbox, so request for a little more
107 | xmin, xmax = bbox.xmin - e, bbox.xmax + e
108 | ymin, ymax = bbox.ymin - e, bbox.ymax + e
109 |
110 | url = prefs.demServer.format(W=xmin, E=xmax, S=ymin, N=ymax, API_KEY=prefs.opentopography_api_key)
111 | log.debug(url)
112 |
113 | # Download the file from url and save it locally
114 | # opentopo return a geotiff object in wgs84
115 | if bpy.data.is_saved:
116 | filePath = os.path.join(os.path.dirname(bpy.data.filepath), 'srtm.tif')
117 | else:
118 | filePath = os.path.join(bpy.app.tempdir, 'srtm.tif')
119 |
120 | #we can directly init NpImg from blob but if gdal is not used as image engine then georef will not be extracted
121 | #Alternatively, we can save on disk, open with GeoRaster class (will use tyf if gdal not available)
122 | rq = Request(url, headers={'User-Agent': USER_AGENT})
123 | try:
124 | with urlopen(rq, timeout=TIMEOUT) as response, open(filePath, 'wb') as outFile:
125 | data = response.read() # a `bytes` object
126 | outFile.write(data) #
127 | except (URLError, HTTPError) as err:
128 | log.error('Http request fails url:{}, code:{}, error:{}'.format(url, getattr(err, 'code', None), err.reason))
129 | self.report({'ERROR'}, "Cannot reach OpenTopography web service, check logs for more infos")
130 | return {'CANCELLED'}
131 | except TimeoutError:
132 | log.error('Http request does not respond. url:{}, code:{}, error:{}'.format(url, getattr(err, 'code', None), err.reason))
133 | info = "Cannot reach SRTM web service provider, server can be down or overloaded. Please retry later"
134 | log.info(info)
135 | self.report({'ERROR'}, info)
136 | return {'CANCELLED'}
137 |
138 | if not onMesh:
139 | bpy.ops.importgis.georaster(
140 | 'EXEC_DEFAULT',
141 | filepath = filePath,
142 | reprojection = True,
143 | rastCRS = 'EPSG:4326',
144 | importMode = 'DEM',
145 | subdivision = 'subsurf',
146 | demInterpolation = True)
147 | else:
148 | bpy.ops.importgis.georaster(
149 | 'EXEC_DEFAULT',
150 | filepath = filePath,
151 | reprojection = True,
152 | rastCRS = 'EPSG:4326',
153 | importMode = 'DEM',
154 | subdivision = 'subsurf',
155 | demInterpolation = True,
156 | demOnMesh = True,
157 | objectsLst = [str(i) for i, obj in enumerate(scn.collection.all_objects) if obj.name == bpy.context.active_object.name][0],
158 | clip = False,
159 | fillNodata = False)
160 |
161 | bbox = getBBOX.fromScn(scn)
162 | adjust3Dview(context, bbox, zoomToSelect=False)
163 |
164 | return {'FINISHED'}
165 |
166 |
167 | def register():
168 | try:
169 | bpy.utils.register_class(IMPORTGIS_OT_dem_query)
170 | except ValueError as e:
171 | log.warning('{} is already registered, now unregister and retry... '.format(IMPORTGIS_OT_srtm_query))
172 | unregister()
173 | bpy.utils.register_class(IMPORTGIS_OT_dem_query)
174 |
175 | def unregister():
176 | bpy.utils.unregister_class(IMPORTGIS_OT_dem_query)
177 |
--------------------------------------------------------------------------------
/operators/lib/osm/nominatim.py:
--------------------------------------------------------------------------------
1 | import os, ssl
2 |
3 | import logging
4 | log = logging.getLogger(__name__)
5 |
6 | import json
7 |
8 | from urllib.request import urlopen
9 | from urllib.request import Request
10 | from urllib.parse import quote_plus
11 |
12 | TIMEOUT = 2
13 |
14 | def nominatimQuery(
15 | query,
16 | base_url = 'https://nominatim.openstreetmap.org/',
17 | referer = None,
18 | user_agent = None,
19 | format = 'json',
20 | limit = 10):
21 |
22 | url = base_url + 'search?'
23 | url += 'format=' + format
24 | url += '&q=' + quote_plus(query)
25 | url += '&limit=' + str(limit)
26 |
27 | log.debug('Nominatim search request : {}'.format(url))
28 |
29 | req = Request(url)
30 | if referer:
31 | req.add_header('Referer', referer)
32 | if user_agent:
33 | req.add_header('User-Agent', user_agent)
34 |
35 | response = urlopen(req, timeout=TIMEOUT)
36 |
37 | r = json.loads(response.read().decode('utf-8'))
38 |
39 | return r
40 |
--------------------------------------------------------------------------------
/operators/lib/osm/overpy/__about__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | "__author__",
3 | "__copyright__",
4 | "__email__",
5 | "__license__",
6 | "__summary__",
7 | "__title__",
8 | "__uri__",
9 | "__version__",
10 | ]
11 |
12 | __title__ = "overpy"
13 | __summary__ = "Python Wrapper to access the OpenStreepMap Overpass API"
14 | __uri__ = "https://github.com/DinoTools/python-overpy"
15 |
16 | __version__ = "0.3.1"
17 |
18 | __author__ = "PhiBo (DinoTools)"
19 | __email__ = ""
20 |
21 | __license__ = "MIT"
22 | __copyright__ = "Copyright 2014-2015 %s" % __author__
23 |
--------------------------------------------------------------------------------
/operators/lib/osm/overpy/exception.py:
--------------------------------------------------------------------------------
1 | class OverPyException(BaseException):
2 | """OverPy base exception"""
3 | pass
4 |
5 |
6 | class DataIncomplete(OverPyException):
7 | """
8 | Raised if the requested data isn't available in the result.
9 | Try to improve the query or to resolve the missing data.
10 | """
11 | def __init__(self, *args, **kwargs):
12 | OverPyException.__init__(
13 | self,
14 | "Data incomplete try to improve the query to resolve the missing data",
15 | *args,
16 | **kwargs
17 | )
18 |
19 |
20 | class ElementDataWrongType(OverPyException):
21 | """
22 | Raised if the provided element does not match the expected type.
23 |
24 | :param type_expected: The expected element type
25 | :type type_expected: String
26 | :param type_provided: The provided element type
27 | :type type_provided: String|None
28 | """
29 | def __init__(self, type_expected, type_provided=None):
30 | self.type_expected = type_expected
31 | self.type_provided = type_provided
32 |
33 | def __str__(self):
34 | return "Type expected '%s' but '%s' provided" % (
35 | self.type_expected,
36 | str(self.type_provided)
37 | )
38 |
39 |
40 | class OverpassBadRequest(OverPyException):
41 | """
42 | Raised if the Overpass API service returns a syntax error.
43 |
44 | :param query: The encoded query how it was send to the server
45 | :type query: Bytes
46 | :param msgs: List of error messages
47 | :type msgs: List
48 | """
49 | def __init__(self, query, msgs=None):
50 | self.query = query
51 | if msgs is None:
52 | msgs = []
53 | self.msgs = msgs
54 |
55 | def __str__(self):
56 | tmp_msgs = []
57 | for tmp_msg in self.msgs:
58 | if not isinstance(tmp_msg, str):
59 | tmp_msg = str(tmp_msg)
60 | tmp_msgs.append(tmp_msg)
61 |
62 | return "\n".join(tmp_msgs)
63 |
64 |
65 | class OverpassGatewayTimeout(OverPyException):
66 | """
67 | Raised if load of the Overpass API service is too high and it can't handle the request.
68 | """
69 | def __init__(self):
70 | OverPyException.__init__(self, "Server load too high")
71 |
72 |
73 | class OverpassTooManyRequests(OverPyException):
74 | """
75 | Raised if the Overpass API service returns a 429 status code.
76 | """
77 | def __init__(self):
78 | OverPyException.__init__(self, "Too many requests")
79 |
80 |
81 | class OverpassUnknownContentType(OverPyException):
82 | """
83 | Raised if the reported content type isn't handled by OverPy.
84 |
85 | :param content_type: The reported content type
86 | :type content_type: None or String
87 | """
88 | def __init__(self, content_type):
89 | self.content_type = content_type
90 |
91 | def __str__(self):
92 | if self.content_type is None:
93 | return "No content type returned"
94 | return "Unknown content type: %s" % self.content_type
95 |
96 |
97 | class OverpassUnknownHTTPStatusCode(OverPyException):
98 | """
99 | Raised if the returned HTTP status code isn't handled by OverPy.
100 |
101 | :param code: The HTTP status code
102 | :type code: Integer
103 | """
104 | def __init__(self, code):
105 | self.code = code
106 |
107 | def __str__(self):
108 | return "Unknown/Unhandled status code: %d" % self.code
--------------------------------------------------------------------------------
/operators/lib/osm/overpy/helper.py:
--------------------------------------------------------------------------------
1 | __author__ = 'mjob'
2 |
3 | import overpy
4 |
5 |
6 | def get_street(street, areacode, api=None):
7 | """
8 | Retrieve streets in a given bounding area
9 |
10 | :param overpy.Overpass api: First street of intersection
11 | :param String street: Name of street
12 | :param String areacode: The OSM id of the bounding area
13 | :return: Parsed result
14 | :raises overpy.exception.OverPyException: If something bad happens.
15 | """
16 | if api is None:
17 | api = overpy.Overpass()
18 |
19 | query = """
20 | area(%s)->.location;
21 | (
22 | way[highway][name="%s"](area.location);
23 | - (
24 | way[highway=service](area.location);
25 | way[highway=track](area.location);
26 | );
27 | );
28 | out body;
29 | >;
30 | out skel qt;
31 | """
32 |
33 | data = api.query(query % (areacode, street))
34 |
35 | return data
36 |
37 |
38 | def get_intersection(street1, street2, areacode, api=None):
39 | """
40 | Retrieve intersection of two streets in a given bounding area
41 |
42 | :param overpy.Overpass api: First street of intersection
43 | :param String street1: Name of first street of intersection
44 | :param String street2: Name of second street of intersection
45 | :param String areacode: The OSM id of the bounding area
46 | :return: List of intersections
47 | :raises overpy.exception.OverPyException: If something bad happens.
48 | """
49 | if api is None:
50 | api = overpy.Overpass()
51 |
52 | query = """
53 | area(%s)->.location;
54 | (
55 | way[highway][name="%s"](area.location); node(w)->.n1;
56 | way[highway][name="%s"](area.location); node(w)->.n2;
57 | );
58 | node.n1.n2;
59 | out meta;
60 | """
61 |
62 | data = api.query(query % (areacode, street1, street2))
63 |
64 | return data.get_nodes()
65 |
--------------------------------------------------------------------------------
/operators/mesh_earth_sphere.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from bpy.types import Operator
3 | from bpy.props import IntProperty
4 |
5 | from math import cos, sin, radians, sqrt
6 | from mathutils import Vector
7 |
8 | import logging
9 | log = logging.getLogger(__name__)
10 |
11 |
12 | def lonlat2xyz(R, lon, lat):
13 | lon, lat = radians(lon), radians(lat)
14 | x = R * cos(lat) * cos(lon)
15 | y = R * cos(lat) * sin(lon)
16 | z = R *sin(lat)
17 | return Vector((x, y, z))
18 |
19 |
20 | class OBJECT_OT_earth_sphere(Operator):
21 | bl_idname = "earth.sphere"
22 | bl_label = "lonlat to sphere"
23 | bl_description = "Transform longitude/latitude data to a sphere like earth globe"
24 | bl_options = {"REGISTER", "UNDO"}
25 |
26 | radius: IntProperty(name = "Radius", default=100, description="Sphere radius", min=1)
27 |
28 | def execute(self, context):
29 | scn = bpy.context.scene
30 | objs = bpy.context.selected_objects
31 |
32 | if not objs:
33 | self.report({'INFO'}, "No selected object")
34 | return {'CANCELLED'}
35 |
36 | for obj in objs:
37 | if obj.type != 'MESH':
38 | log.warning("Object {} is not a mesh".format(obj.name))
39 | continue
40 |
41 | w, h, thick = obj.dimensions
42 | if w > 360:
43 | log.warning("Longitude of object {} exceed 360°".format(obj.name))
44 | continue
45 | if h > 180:
46 | log.warning("Latitude of object {} exceed 180°".format(obj.name))
47 | continue
48 |
49 | mesh = obj.data
50 | m = obj.matrix_world
51 | for vertex in mesh.vertices:
52 | co = m @ vertex.co
53 | lon, lat = co.x, co.y
54 | vertex.co = m.inverted() @ lonlat2xyz(self.radius, lon, lat)
55 |
56 | return {'FINISHED'}
57 |
58 | EARTH_RADIUS = 6378137 #meters
59 | def getZDelta(d):
60 | '''delta value for adjusting z across earth curvature
61 | http://webhelp.infovista.com/Planet/62/Subsystems/Raster/Content/help/analysis/viewshedanalysis.html'''
62 | return sqrt(EARTH_RADIUS**2 + d**2) - EARTH_RADIUS
63 |
64 |
65 | class OBJECT_OT_earth_curvature(Operator):
66 | bl_idname = "earth.curvature"
67 | bl_label = "Earth curvature correction"
68 | bl_description = "Apply earth curvature correction for viewsheed analysis"
69 | bl_options = {"REGISTER", "UNDO"}
70 |
71 | def execute(self, context):
72 | scn = bpy.context.scene
73 | obj = bpy.context.view_layer.objects.active
74 |
75 | if not obj:
76 | self.report({'INFO'}, "No active object")
77 | return {'CANCELLED'}
78 |
79 | if obj.type != 'MESH':
80 | self.report({'INFO'}, "Selection isn't a mesh")
81 | return {'CANCELLED'}
82 |
83 | mesh = obj.data
84 | viewpt = scn.cursor.location
85 |
86 | for vertex in mesh.vertices:
87 | d = (viewpt.xy - vertex.co.xy).length
88 | vertex.co.z = vertex.co.z - getZDelta(d)
89 |
90 | return {'FINISHED'}
91 |
92 |
93 | classes = [
94 | OBJECT_OT_earth_sphere,
95 | OBJECT_OT_earth_curvature
96 | ]
97 |
98 | def register():
99 | for cls in classes:
100 | try:
101 | bpy.utils.register_class(cls)
102 | except ValueError as e:
103 | log.warning('{} is already registered, now unregister and retry... '.format(cls))
104 | bpy.utils.unregister_class(cls)
105 | bpy.utils.register_class(cls)
106 |
107 | def unregister():
108 | for cls in classes:
109 | bpy.utils.unregister_class(cls)
110 |
--------------------------------------------------------------------------------
/operators/object_drop.py:
--------------------------------------------------------------------------------
1 | # ##### BEGIN GPL LICENSE BLOCK #####
2 | #
3 | # This program is free software; you can redistribute it and/or
4 | # modify it under the terms of the GNU General Public License
5 | # as published by the Free Software Foundation; either version 2
6 | # of the License, or (at your option) any later version.
7 | #
8 | # This program is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with this program; if not, write to the Free Software Foundation,
15 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
16 | #
17 | # ##### END GPL LICENSE BLOCK #####
18 |
19 | # Original Drop to Ground addon code from Unnikrishnan(kodemax), Florian Meyer(testscreenings)
20 |
21 | import logging
22 | log = logging.getLogger(__name__)
23 |
24 | import bpy
25 | import bmesh
26 |
27 | from .utils import DropToGround, getBBOX
28 |
29 | from mathutils import Vector, Matrix
30 | from bpy.types import Operator
31 | from bpy.props import BoolProperty, EnumProperty
32 |
33 |
34 | def get_align_matrix(location, normal):
35 | up = Vector((0, 0, 1))
36 | angle = normal.angle(up)
37 | axis = up.cross(normal)
38 | mat_rot = Matrix.Rotation(angle, 4, axis)
39 | mat_loc = Matrix.Translation(location)
40 | mat_align = mat_rot @ mat_loc
41 | return mat_align
42 |
43 | def get_lowest_world_co(ob, mat_parent=None):
44 | bme = bmesh.new()
45 | bme.from_mesh(ob.data)
46 | mat_to_world = ob.matrix_world.copy()
47 | if mat_parent:
48 | mat_to_world = mat_parent @ mat_to_world
49 | lowest = None
50 | for v in bme.verts:
51 | if not lowest:
52 | lowest = v
53 | if (mat_to_world @ v.co).z < (mat_to_world @ lowest.co).z:
54 | lowest = v
55 | lowest_co = mat_to_world @ lowest.co
56 | bme.free()
57 |
58 | return lowest_co
59 |
60 |
61 | class OBJECT_OT_drop_to_ground(Operator):
62 | bl_idname = "object.drop"
63 | bl_label = "Drop to Ground"
64 | bl_description = ("Drop selected objects on the Active object")
65 | bl_options = {"REGISTER", "UNDO"} #register needed to draw operator options/redo panel
66 |
67 | align: BoolProperty(
68 | name="Align to ground",
69 | description="Aligns the objects' rotation to the ground",
70 | default=False)
71 |
72 | axisAlign: EnumProperty(
73 | items = [("N", "Normal", "Ground normal"), ("X", "X", "Ground X normal"), ("Y", "Y", "Ground Y normal"), ("Z", "Z", "Ground Z normal")],
74 | name="Align axis",
75 | description="")
76 |
77 | useOrigin: BoolProperty(
78 | name="Use Origins",
79 | description="Drop to objects' origins\n"
80 | "Use this option for dropping all types of Objects",
81 | default=False)
82 |
83 | #this method will disable the button if the conditions are not respected
84 | @classmethod
85 | def poll(cls, context):
86 | act_obj = context.active_object
87 | return (context.mode == 'OBJECT'
88 | and len(context.selected_objects) >= 2
89 | and act_obj
90 | and act_obj.type in {'MESH', 'FONT', 'META', 'CURVE', 'SURFACE'}
91 | )
92 |
93 | def draw(self, context):
94 | layout = self.layout
95 | layout.prop(self, 'align')
96 | if self.align:
97 | layout.prop(self, 'axisAlign')
98 | layout.prop(self, 'useOrigin')
99 |
100 |
101 | def execute(self, context):
102 |
103 | bpy.context.view_layer.update() #needed to make raycast function redoable (evaluate objects)
104 | ground = context.active_object
105 | obs = context.selected_objects
106 | if ground in obs:
107 | obs.remove(ground)
108 | scn = context.scene
109 | rayCaster = DropToGround(scn, ground)
110 |
111 | for ob in obs:
112 | if self.useOrigin:
113 | minLoc = ob.location
114 | else:
115 | minLoc = get_lowest_world_co(ob)
116 | #minLoc = min([(ob.matrix_world * v.co).z for v in ob.data.vertices])
117 | #getBBOX.fromObj(ob).zmin #what xy coords ???
118 |
119 | if not minLoc:
120 | msg = "Object {} is of type {} works only with Use Center option " \
121 | "checked".format(ob.name, ob.type)
122 | log.info(msg)
123 |
124 | x, y = minLoc.x, minLoc.y
125 | hit = rayCaster.rayCast(x, y)
126 |
127 | if not hit.hit:
128 | log.info(ob.name + " did not hit the Active Object")
129 | continue
130 |
131 | # simple drop down
132 | down = hit.loc - minLoc
133 | ob.location += down
134 | #ob.location = hit.loc
135 |
136 | # drop with align to hit normal
137 | if self.align:
138 | vect = ob.location - hit.loc
139 | # rotate object to align with face normal
140 | normal = get_align_matrix(hit.loc, hit.normal)
141 | rot = normal.to_euler()
142 | if self.axisAlign == "X":
143 | rot.y = 0
144 | rot.z = 0
145 | elif self.axisAlign == "Y":
146 | rot.x = 0
147 | rot.z = 0
148 | elif self.axisAlign == "Z":
149 | rot.x = 0
150 | rot.y = 0
151 | matrix = ob.matrix_world.copy().to_3x3()
152 | matrix.rotate(rot)
153 | matrix = matrix.to_4x4()
154 | ob.matrix_world = matrix
155 | # move_object to hit_location
156 | ob.location = hit.loc
157 | # move object above surface again
158 | vect.rotate(rot)
159 | ob.location += vect
160 |
161 |
162 | return {'FINISHED'}
163 |
164 | def register():
165 | try:
166 | bpy.utils.register_class(OBJECT_OT_drop_to_ground)
167 | except ValueError as e:
168 | log.warning('{} is already registered, now unregister and retry... '.format(OBJECT_OT_drop_to_ground))
169 | unregister()
170 | bpy.utils.register_class(OBJECT_OT_drop_to_ground)
171 |
172 |
173 | def unregister():
174 | bpy.utils.unregister_class(OBJECT_OT_drop_to_ground)
175 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/GMT_panoply.svg:
--------------------------------------------------------------------------------
1 |
2 |
47 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/Gummy-Kids.svg:
--------------------------------------------------------------------------------
1 |
2 |
24 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/Horizon_1.svg:
--------------------------------------------------------------------------------
1 |
2 |
31 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/Ribbon-Colors.svg:
--------------------------------------------------------------------------------
1 |
2 |
24 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/Spectral_11.svg:
--------------------------------------------------------------------------------
1 |
2 |
37 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/Sunrise.svg:
--------------------------------------------------------------------------------
1 |
2 |
28 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/abyss.svg:
--------------------------------------------------------------------------------
1 |
2 |
22 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/alarm.p1.0.5.svg:
--------------------------------------------------------------------------------
1 |
2 |
36 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/bhw3_05.svg:
--------------------------------------------------------------------------------
1 |
2 |
23 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/fs2009.svg:
--------------------------------------------------------------------------------
1 |
2 |
29 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/heat.svg:
--------------------------------------------------------------------------------
1 |
2 |
20 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/nrwc.svg:
--------------------------------------------------------------------------------
1 |
2 |
26 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/pm3d01.svg:
--------------------------------------------------------------------------------
1 |
2 |
79 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/precip_11lev.svg:
--------------------------------------------------------------------------------
1 |
2 |
39 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/temp_19lev.svg:
--------------------------------------------------------------------------------
1 |
2 |
55 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/temperature.svg:
--------------------------------------------------------------------------------
1 |
2 |
51 |
--------------------------------------------------------------------------------
/operators/rsrc/gradients/wiki-plumbago.svg:
--------------------------------------------------------------------------------
1 |
2 |
75 |
--------------------------------------------------------------------------------
/operators/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .bgis_utils import placeObj, adjust3Dview, showTextures, addTexture, getBBOX, DropToGround, mouseTo3d, isTopView
2 | from .georaster_utils import rasterExtentToMesh, geoRastUVmap, setDisplacer, bpyGeoRaster, exportAsMesh
3 | from .delaunay_voronoi import computeVoronoiDiagram, computeDelaunayTriangulation
4 |
--------------------------------------------------------------------------------
/operators/utils/bgis_utils.py:
--------------------------------------------------------------------------------
1 |
2 | import bpy
3 | from mathutils import Vector, Matrix
4 | from mathutils.bvhtree import BVHTree
5 | from bpy_extras.view3d_utils import region_2d_to_location_3d, region_2d_to_vector_3d
6 |
7 | from ...core import BBOX
8 |
9 | def isTopView(context):
10 | if context.area.type == 'VIEW_3D':
11 | reg3d = context.region_data
12 | else:
13 | return False
14 | return reg3d.view_perspective == 'ORTHO' and tuple(reg3d.view_matrix.to_euler()) == (0,0,0)
15 |
16 | def mouseTo3d(context, x, y):
17 | '''Convert event.mouse_region to world coordinates'''
18 | if context.area.type != 'VIEW_3D':
19 | raise Exception('Wrong context')
20 | coords = (x, y)
21 | reg = context.region
22 | reg3d = context.region_data
23 | vec = region_2d_to_vector_3d(reg, reg3d, coords)
24 | loc = region_2d_to_location_3d(reg, reg3d, coords, vec) #WARNING, this function return indeterminate value when view3d clip distance is too large
25 | return loc
26 |
27 |
28 | class DropToGround():
29 | '''A class to perform raycasting accross z axis'''
30 |
31 | def __init__(self, scn, ground, method='OBJ'):
32 | self.method = method # 'BVH' or 'OBJ'
33 | self.scn = scn
34 | self.ground = ground
35 | self.bbox = getBBOX.fromObj(ground, applyTransform=True)
36 | self.mw = self.ground.matrix_world
37 | self.mwi = self.mw.inverted()
38 | if self.method == 'BVH':
39 | self.bvh = BVHTree.FromObject(self.ground, bpy.context.evaluated_depsgraph_get(), deform=True)
40 |
41 | def rayCast(self, x, y):
42 | #Hit vector
43 | offset = 100
44 | orgWldSpace = Vector((x, y, self.bbox.zmax + offset))
45 | orgObjSpace = self.mwi @ orgWldSpace
46 | direction = Vector((0,0,-1)) #down
47 | #build ray cast hit namespace object
48 | class RayCastHit(): pass
49 | rcHit = RayCastHit()
50 | #raycast
51 | if self.method == 'OBJ':
52 | rcHit.hit, rcHit.loc, rcHit.normal, rcHit.faceIdx = self.ground.ray_cast(orgObjSpace, direction)
53 | elif self.method == 'BVH':
54 | rcHit.loc, rcHit.normal, rcHit.faceIdx, rcHit.dst = self.bvh.ray_cast(orgObjSpace, direction)
55 | if not rcHit.loc:
56 | rcHit.hit = False
57 | else:
58 | rcHit.hit = True
59 | #adjust values
60 | if not rcHit.hit:
61 | #return same original 2d point with z=0
62 | rcHit.loc = Vector((orgWldSpace.x, orgWldSpace.y, 0)) #elseZero
63 | else:
64 | rcHit.hit = True
65 |
66 | rcHit.loc = self.mw @ rcHit.loc
67 | return rcHit
68 |
69 | def placeObj(mesh, objName):
70 | '''Build and add a new object from a given mesh'''
71 | bpy.ops.object.select_all(action='DESELECT')
72 | #create an object with that mesh
73 | obj = bpy.data.objects.new(objName, mesh)
74 | # Link object to scene
75 | bpy.context.scene.collection.objects.link(obj)
76 | bpy.context.view_layer.objects.active = obj
77 | obj.select_set(True)
78 | #bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY')
79 | return obj
80 |
81 |
82 | def adjust3Dview(context, bbox, zoomToSelect=True):
83 | '''adjust all 3d views clip distance to match the submited bbox'''
84 | dst = round(max(bbox.dimensions))
85 | k = 5 #increase factor
86 | dst = dst * k
87 | # set each 3d view
88 | areas = context.screen.areas
89 | for area in areas:
90 | if area.type == 'VIEW_3D':
91 | space = area.spaces.active
92 | if dst < 100:
93 | space.clip_start = 1
94 | elif dst < 1000:
95 | space.clip_start = 10
96 | else:
97 | space.clip_start = 100
98 | #Adjust clip end distance if the new obj is largest than actual setting
99 | if space.clip_end < dst:
100 | if dst > 10000000:
101 | dst = 10000000 #too large clip distance broke the 3d view
102 | space.clip_end = dst
103 | if zoomToSelect:
104 | overrideContext = context.copy()
105 | overrideContext['area'] = area
106 | overrideContext['region'] = area.regions[-1]
107 | bpy.ops.view3d.view_selected(overrideContext)
108 |
109 |
110 | def showTextures(context):
111 | '''Force view mode with textures'''
112 | scn = context.scene
113 | for area in context.screen.areas:
114 | if area.type == 'VIEW_3D':
115 | space = area.spaces.active
116 | if space.shading.type == 'SOLID':
117 | space.shading.color_type = 'TEXTURE'
118 |
119 |
120 | def addTexture(mat, img, uvLay, name='texture'):
121 | '''Set a new image texture to a given material and following a given uv map'''
122 | engine = bpy.context.scene.render.engine
123 | mat.use_nodes = True
124 | node_tree = mat.node_tree
125 | node_tree.nodes.clear()
126 | # create uv map node
127 | uvMapNode = node_tree.nodes.new('ShaderNodeUVMap')
128 | uvMapNode.uv_map = uvLay.name
129 | uvMapNode.location = (-800, 200)
130 | # create image texture node
131 | textureNode = node_tree.nodes.new('ShaderNodeTexImage')
132 | textureNode.image = img
133 | textureNode.extension = 'CLIP'
134 | textureNode.show_texture = True
135 | textureNode.location = (-400, 200)
136 | # Create BSDF diffuse node
137 | diffuseNode = node_tree.nodes.new('ShaderNodeBsdfPrincipled')#ShaderNodeBsdfDiffuse
138 | diffuseNode.location = (0, 200)
139 | # Create output node
140 | outputNode = node_tree.nodes.new('ShaderNodeOutputMaterial')
141 | outputNode.location = (400, 200)
142 | # Connect the nodes
143 | node_tree.links.new(uvMapNode.outputs['UV'] , textureNode.inputs['Vector'])
144 | node_tree.links.new(textureNode.outputs['Color'] , diffuseNode.inputs['Base Color'])#diffuseNode.inputs['Color'])
145 | node_tree.links.new(diffuseNode.outputs['BSDF'] , outputNode.inputs['Surface'])
146 |
147 |
148 | class getBBOX():
149 |
150 | '''Utilities to build BBOX object from various Blender context'''
151 |
152 | @staticmethod
153 | def fromObj(obj, applyTransform = True):
154 | '''Create a 3D BBOX from Blender object'''
155 | if applyTransform:
156 | boundPts = [obj.matrix_world @ Vector(corner) for corner in obj.bound_box]
157 | else:
158 | boundPts = obj.bound_box
159 | xmin = min([pt[0] for pt in boundPts])
160 | xmax = max([pt[0] for pt in boundPts])
161 | ymin = min([pt[1] for pt in boundPts])
162 | ymax = max([pt[1] for pt in boundPts])
163 | zmin = min([pt[2] for pt in boundPts])
164 | zmax = max([pt[2] for pt in boundPts])
165 | return BBOX(xmin=xmin, ymin=ymin, zmin=zmin, xmax=xmax, ymax=ymax, zmax=zmax)
166 |
167 | @classmethod
168 | def fromScn(cls, scn):
169 | '''Create a 3D BBOX from Blender Scene
170 | union of bounding box of all objects containing in the scene'''
171 | #objs = scn.collection.objects
172 | objs = [obj for obj in scn.collection.all_objects if obj.empty_display_type != 'IMAGE']
173 | if len(objs) == 0:
174 | scnBbox = BBOX(0,0,0,0,0,0)
175 | else:
176 | scnBbox = cls.fromObj(objs[0])
177 | for obj in objs:
178 | bbox = cls.fromObj(obj)
179 | scnBbox += bbox
180 | return scnBbox
181 |
182 | @staticmethod
183 | def fromBmesh(bm):
184 | '''Create a 3D bounding box from a bmesh object'''
185 | xmin = min([pt.co.x for pt in bm.verts])
186 | xmax = max([pt.co.x for pt in bm.verts])
187 | ymin = min([pt.co.y for pt in bm.verts])
188 | ymax = max([pt.co.y for pt in bm.verts])
189 | zmin = min([pt.co.z for pt in bm.verts])
190 | zmax = max([pt.co.z for pt in bm.verts])
191 | #
192 | return BBOX(xmin=xmin, ymin=ymin, zmin=zmin, xmax=xmax, ymax=ymax, zmax=zmax)
193 |
194 | @staticmethod
195 | def fromTopView(context):
196 | '''Create a 2D BBOX from Blender 3dview if the view is top left ortho else return None'''
197 | scn = context.scene
198 | area = context.area
199 | if area.type != 'VIEW_3D':
200 | return None
201 | reg = context.region
202 | reg3d = context.region_data
203 | if reg3d.view_perspective != 'ORTHO' or tuple(reg3d.view_matrix.to_euler()) != (0,0,0):
204 | print("View3d must be in top ortho")
205 | return None
206 | #
207 | loc = mouseTo3d(context, area.width, area.height)
208 | xmax, ymax = loc.x, loc.y
209 | #
210 | loc = mouseTo3d(context, 0, 0)
211 | xmin, ymin = loc.x, loc.y
212 | #
213 | return BBOX(xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax)
214 |
--------------------------------------------------------------------------------