├── .gitignore ├── Contributing.md ├── License.txt ├── README.md ├── installReportTools.py └── solutionreporttools ├── __init__.py ├── common.py ├── csvexport.py ├── dataprep.py ├── gptools.py └── reporttools.py /.gitignore: -------------------------------------------------------------------------------- 1 | ################# 2 | ## Eclipse 3 | ################# 4 | 5 | *.pydevproject 6 | .project 7 | .metadata 8 | bin/ 9 | tmp/ 10 | *.tmp 11 | *.bak 12 | *.swp 13 | *~.nib 14 | local.properties 15 | .classpath 16 | .settings/ 17 | .loadpath 18 | 19 | # External tool builders 20 | .externalToolBuilders/ 21 | 22 | # Locally stored "Eclipse launch configurations" 23 | *.launch 24 | 25 | # CDT-specific 26 | .cproject 27 | 28 | # PDT-specific 29 | .buildpath 30 | 31 | 32 | ################# 33 | ## Visual Studio 34 | ################# 35 | 36 | ## Ignore Visual Studio temporary files, build results, and 37 | ## files generated by popular Visual Studio add-ons. 38 | 39 | # User-specific files 40 | *.suo 41 | *.user 42 | *.sln.docstates 43 | 44 | # Build results 45 | 46 | [Dd]ebug/ 47 | [Rr]elease/ 48 | x64/ 49 | build/ 50 | [Bb]in/ 51 | [Oo]bj/ 52 | 53 | # MSTest test Results 54 | [Tt]est[Rr]esult*/ 55 | [Bb]uild[Ll]og.* 56 | 57 | *_i.c 58 | *_p.c 59 | *.ilk 60 | *.meta 61 | *.obj 62 | *.pch 63 | *.pdb 64 | *.pgc 65 | *.pgd 66 | *.rsp 67 | *.sbr 68 | *.tlb 69 | *.tli 70 | *.tlh 71 | *.tmp 72 | *.tmp_proj 73 | *.log 74 | *.vspscc 75 | *.vssscc 76 | .builds 77 | *.pidb 78 | *.log 79 | *.scc 80 | 81 | # Visual C++ cache files 82 | ipch/ 83 | *.aps 84 | *.ncb 85 | *.opensdf 86 | *.sdf 87 | *.cachefile 88 | 89 | # Visual Studio profiler 90 | *.psess 91 | *.vsp 92 | *.vspx 93 | 94 | # Guidance Automation Toolkit 95 | *.gpState 96 | 97 | # ReSharper is a .NET coding add-in 98 | _ReSharper*/ 99 | *.[Rr]e[Ss]harper 100 | 101 | # TeamCity is a build add-in 102 | _TeamCity* 103 | 104 | # DotCover is a Code Coverage Tool 105 | *.dotCover 106 | 107 | # NCrunch 108 | *.ncrunch* 109 | .*crunch*.local.xml 110 | 111 | # Installshield output folder 112 | [Ee]xpress/ 113 | 114 | # DocProject is a documentation generator add-in 115 | DocProject/buildhelp/ 116 | DocProject/Help/*.HxT 117 | DocProject/Help/*.HxC 118 | DocProject/Help/*.hhc 119 | DocProject/Help/*.hhk 120 | DocProject/Help/*.hhp 121 | DocProject/Help/Html2 122 | DocProject/Help/html 123 | 124 | # Click-Once directory 125 | publish/ 126 | 127 | # Publish Web Output 128 | *.Publish.xml 129 | *.pubxml 130 | 131 | # NuGet Packages Directory 132 | ## TODO: If you have NuGet Package Restore enabled, uncomment the next line 133 | #packages/ 134 | 135 | # Windows Azure Build Output 136 | csx 137 | *.build.csdef 138 | 139 | # Windows Store app package directory 140 | AppPackages/ 141 | 142 | # Others 143 | sql/ 144 | *.Cache 145 | ClientBin/ 146 | [Ss]tyle[Cc]op.* 147 | ~$* 148 | *~ 149 | *.dbmdl 150 | *.[Pp]ublish.xml 151 | *.pfx 152 | *.publishsettings 153 | 154 | # RIA/Silverlight projects 155 | Generated_Code/ 156 | 157 | # Backup & report files from converting an old project file to a newer 158 | # Visual Studio version. Backup files are not needed, because we have git ;-) 159 | _UpgradeReport_Files/ 160 | Backup*/ 161 | UpgradeLog*.XML 162 | UpgradeLog*.htm 163 | 164 | # SQL Server files 165 | App_Data/*.mdf 166 | App_Data/*.ldf 167 | 168 | ############# 169 | ## Windows detritus 170 | ############# 171 | 172 | # Windows image file caches 173 | Thumbs.db 174 | ehthumbs.db 175 | 176 | # Folder config file 177 | Desktop.ini 178 | 179 | # Recycle Bin used on file shares 180 | $RECYCLE.BIN/ 181 | 182 | # Mac crap 183 | .DS_Store 184 | 185 | 186 | ############# 187 | ## Python 188 | ############# 189 | 190 | *.py[co] 191 | 192 | # Packages 193 | *.egg 194 | *.egg-info 195 | dist/ 196 | build/ 197 | eggs/ 198 | parts/ 199 | var/ 200 | sdist/ 201 | develop-eggs/ 202 | .installed.cfg 203 | 204 | # Installer logs 205 | pip-log.txt 206 | 207 | # Unit test / coverage reports 208 | .coverage 209 | .tox 210 | 211 | 212 | #Mr Developer 213 | .mr.developer.cfg 214 | 215 | # Tools, notes, outputs 216 | ___*.* 217 | ___* 218 | *.log 219 | /configs/___GlobalLoginInfo.json 220 | 221 | [Ss]ampleData/ 222 | source/[Ss]ampleData/ 223 | source/[Tt]est[Cc]onfigs/ 224 | /Maps and GDBs 225 | /*.wpr 226 | /*.wpu 227 | /*.wpu 228 | /*.wpr 229 | 230 | Maps and GDBs/ 231 | portal_item_defs/ 232 | Content/ 233 | Groups/ 234 | icons/ 235 | 236 | .idea/ -------------------------------------------------------------------------------- /Contributing.md: -------------------------------------------------------------------------------- 1 | Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing). -------------------------------------------------------------------------------- /License.txt: -------------------------------------------------------------------------------- 1 | Apache License - 2.0 2 | 3 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 4 | 5 | 1. Definitions. 6 | 7 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. 8 | 9 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. 10 | 11 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control 12 | with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management 13 | of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial 14 | ownership of such entity. 15 | 16 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. 17 | 18 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, 19 | and configuration files. 20 | 21 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to 22 | compiled object code, generated documentation, and conversions to other media types. 23 | 24 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice 25 | that is included in or attached to the work (an example is provided in the Appendix below). 26 | 27 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the 28 | editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes 29 | of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, 30 | the Work and Derivative Works thereof. 31 | 32 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work 33 | or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual 34 | or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of 35 | electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on 36 | electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for 37 | the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing 38 | by the copyright owner as "Not a Contribution." 39 | 40 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and 41 | subsequently incorporated within the Work. 42 | 43 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, 44 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, 45 | publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 46 | 47 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, 48 | non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, 49 | sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are 50 | necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was 51 | submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work 52 | or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You 53 | under this License for that Work shall terminate as of the date such litigation is filed. 54 | 55 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, 56 | and in Source or Object form, provided that You meet the following conditions: 57 | 58 | 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and 59 | 60 | 2. You must cause any modified files to carry prominent notices stating that You changed the files; and 61 | 62 | 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices 63 | from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and 64 | 65 | 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a 66 | readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the 67 | Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the 68 | Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever 69 | such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. 70 | You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, 71 | provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to 72 | Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your 73 | modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with 74 | the conditions stated in this License. 75 | 76 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You 77 | to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, 78 | nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 79 | 80 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except 81 | as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 82 | 83 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides 84 | its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, 85 | any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for 86 | determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under 87 | this License. 88 | 89 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required 90 | by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, 91 | including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the 92 | use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or 93 | any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 94 | 95 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a 96 | fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting 97 | such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree 98 | to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your 99 | accepting any such warranty or additional liability. 100 | 101 | END OF TERMS AND CONDITIONS 102 | 103 | Copyright 2014 Esri Inc. 104 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | #Solution Data Automation Reporting Tools 2 | Collection of ArcGIS for Utilities and ArcGIS for Telecommunication solutions 3 | 4 | Collection of Python tools to quickly create reports about your organization's assets and service area. The prior version of these tools with the ArcRest Helper routine have been merged with [ArcRest](https://github.com/Esri/ArcREST). 5 | 6 | Click [here](https://github.com/Esri/utilities-solution-data-automation/releases/tag/12_18_14_Release) for the last release of the helper tools in this repo 7 | 8 | 9 | ## Features 10 | * Reclass report 11 | * Reporting Area Calculations 12 | * Summarize assets within a reporting area 13 | 14 | 15 | ## Instructions 16 | 17 | 1. Fork and then clone the repo. 18 | 2. Run and try the samples. 19 | 20 | ## Requirements 21 | 22 | * Python 2.7.x 23 | * ArcPy 24 | * ArcGIS Desktop 10.2.x, 10.3 25 | * [ArcRest](https://github.com/Esri/ArcREST) 26 | 27 | ## Installation 28 | 29 | To install the package, run the setup.py. This should copy it to your python's site-package folder. 30 | 31 | Running Installation Code: 32 | c:\src>python.exe installReportTools.py install 33 | 34 | ## Issues 35 | 36 | Find a bug or want to request a new feature? Please let us know by submitting an issue. 37 | 38 | ## Contributing 39 | 40 | Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing). 41 | 42 | ## Licensing 43 | Copyright 2013 Esri 44 | 45 | Licensed under the Apache License, Version 2.0 (the "License"); 46 | you may not use this file except in compliance with the License. 47 | You may obtain a copy of the License at 48 | 49 | http://www.apache.org/licenses/LICENSE-2.0 50 | 51 | Unless required by applicable law or agreed to in writing, software 52 | distributed under the License is distributed on an "AS IS" BASIS, 53 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 54 | See the License for the specific language governing permissions and 55 | limitations under the License. 56 | 57 | A copy of the license is available in the repository's [license.txt]( https://raw.github.com/Esri/quickstart-map-js/master/license.txt) file. 58 | 59 | [](Esri Tags: ArcGIS ArcGIS-Online Maps Solutions Utilities ArcGISSolutions Telecommunications) 60 | [](Esri Language: Python)​ 61 | 62 | -------------------------------------------------------------------------------- /installReportTools.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | setup( 3 | author="Mike Miller", 4 | author_email="mmiller@esri.com", 5 | description="ArcGIS solution reporting tools", 6 | license='Apache', 7 | url='www.github.com/MikeMillerGIS/solutions-data-automation', 8 | name='solutionreporttools', 9 | version='2.0.0', 10 | packages=['solutionreporttools'], 11 | package_dir={'':''} 12 | ) -------------------------------------------------------------------------------- /solutionreporttools/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.2" -------------------------------------------------------------------------------- /solutionreporttools/common.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import json 4 | import inspect 5 | import random 6 | import string 7 | import datetime 8 | import time 9 | import traceback 10 | from urlparse import urlparse 11 | import gc 12 | 13 | class CommonError(Exception): 14 | """ raised when error occurs in utility module functions """ 15 | pass 16 | 17 | #---------------------------------------------------------------------- 18 | def noneToValue(value,newValue): 19 | if value is None: 20 | return newValue 21 | else: 22 | return value 23 | #---------------------------------------------------------------------- 24 | def getLayerIndex(url): 25 | urlInfo = None 26 | urlSplit = None 27 | inx = None 28 | try: 29 | urlInfo = urlparse(url) 30 | urlSplit = str(urlInfo.path).split('/') 31 | inx = urlSplit[len(urlSplit)-1] 32 | 33 | if is_number(inx): 34 | return int(inx) 35 | 36 | except: 37 | return 0 38 | finally: 39 | urlInfo = None 40 | urlSplit = None 41 | 42 | del urlInfo 43 | del urlSplit 44 | 45 | gc.collect() 46 | #---------------------------------------------------------------------- 47 | def getLayerName(url): 48 | urlInfo = None 49 | urlSplit = None 50 | try: 51 | urlInfo = urlparse(url) 52 | urlSplit = str(urlInfo.path).split('/') 53 | name = urlSplit[len(urlSplit)-3] 54 | return name 55 | except: 56 | return url 57 | 58 | finally: 59 | urlInfo = None 60 | urlSplit = None 61 | 62 | del urlInfo 63 | del urlSplit 64 | 65 | gc.collect() 66 | #---------------------------------------------------------------------- 67 | def random_string_generator(size=6, chars=string.ascii_uppercase): 68 | try: 69 | return ''.join(random.choice(chars) for _ in range(size)) 70 | except: 71 | line, filename, synerror = trace() 72 | raise CommonError({ 73 | "function": "random_string_generator", 74 | "line": line, 75 | "filename": filename, 76 | "synerror": synerror, 77 | } 78 | ) 79 | finally: 80 | pass 81 | #---------------------------------------------------------------------- 82 | def random_int_generator(maxrange): 83 | try: 84 | return random.randint(0,maxrange) 85 | except: 86 | line, filename, synerror = trace() 87 | raise CommonError({ 88 | "function": "random_int_generator", 89 | "line": line, 90 | "filename": filename, 91 | "synerror": synerror, 92 | } 93 | ) 94 | finally: 95 | pass 96 | #---------------------------------------------------------------------- 97 | def local_time_to_online(dt=None): 98 | """ 99 | converts datetime object to a UTC timestamp for AGOL 100 | Inputs: 101 | dt - datetime object 102 | Output: 103 | Long value 104 | """ 105 | is_dst = None 106 | utc_offset = None 107 | try: 108 | if dt is None: 109 | dt = datetime.datetime.now() 110 | 111 | is_dst = time.daylight > 0 and time.localtime().tm_isdst > 0 112 | utc_offset = (time.altzone if is_dst else time.timezone) 113 | 114 | return (time.mktime(dt.timetuple()) * 1000) + (utc_offset * 1000) 115 | except: 116 | line, filename, synerror = trace() 117 | raise CommonError({ 118 | "function": "local_time_to_online", 119 | "line": line, 120 | "filename": filename, 121 | "synerror": synerror, 122 | } 123 | ) 124 | finally: 125 | is_dst = None 126 | utc_offset = None 127 | 128 | del is_dst 129 | del utc_offset 130 | 131 | #---------------------------------------------------------------------- 132 | def online_time_to_string(value,timeFormat): 133 | """ 134 | Converts a timestamp to date/time string 135 | Inputs: 136 | value - timestamp as long 137 | timeFormat - output date/time format 138 | Output: 139 | string 140 | """ 141 | try: 142 | return datetime.datetime.fromtimestamp(value /1000).strftime(timeFormat) 143 | except: 144 | line, filename, synerror = trace() 145 | raise CommonError({ 146 | "function": "online_time_to_string", 147 | "line": line, 148 | "filename": filename, 149 | "synerror": synerror, 150 | } 151 | ) 152 | finally: 153 | pass 154 | #---------------------------------------------------------------------- 155 | def is_number(s): 156 | try: 157 | float(s) 158 | return True 159 | except ValueError: 160 | pass 161 | 162 | try: 163 | import unicodedata 164 | unicodedata.numeric(s) 165 | return True 166 | except (TypeError, ValueError): 167 | pass 168 | 169 | return False 170 | #---------------------------------------------------------------------- 171 | def init_config_json(config_file): 172 | json_data = None 173 | try: 174 | if os.path.exists(config_file): 175 | #Load the config file 176 | 177 | with open(config_file) as json_file: 178 | json_data = json.load(json_file) 179 | return unicode_convert(json_data) 180 | else: 181 | return None 182 | except: 183 | line, filename, synerror = trace() 184 | raise CommonError({ 185 | "function": "init_config_json", 186 | "line": line, 187 | "filename": filename, 188 | "synerror": synerror, 189 | } 190 | ) 191 | finally: 192 | json_data = None 193 | 194 | del json_data 195 | 196 | gc.collect() 197 | 198 | #---------------------------------------------------------------------- 199 | def write_config_json(config_file, data): 200 | outfile = None 201 | try: 202 | with open(config_file, 'w') as outfile: 203 | json.dump(data, outfile) 204 | except: 205 | line, filename, synerror = trace() 206 | raise CommonError({ 207 | "function": "init_config_json", 208 | "line": line, 209 | "filename": filename, 210 | "synerror": synerror, 211 | } 212 | ) 213 | finally: 214 | outfile = None 215 | 216 | del outfile 217 | 218 | gc.collect() 219 | 220 | #---------------------------------------------------------------------- 221 | def unicode_convert(obj): 222 | try: 223 | """ converts unicode to anscii """ 224 | 225 | if isinstance(obj, dict): 226 | return {unicode_convert(key): unicode_convert(value) for key, value in obj.items()} 227 | elif isinstance(obj, list): 228 | return [unicode_convert(element) for element in obj] 229 | elif isinstance(obj, unicode): 230 | return obj.encode('utf-8') 231 | else: 232 | return obj 233 | except: 234 | return obj 235 | def find_replace_string(obj,find,replace): 236 | try: 237 | strobj = str(obj) 238 | newStr = string.replace(strobj,find, replace) 239 | if newStr == strobj: 240 | return obj 241 | else: 242 | return newStr 243 | 244 | except: 245 | line, filename, synerror = trace() 246 | raise CommonError({ 247 | "function": "find_replace_string", 248 | "line": line, 249 | "filename": filename, 250 | "synerror": synerror, 251 | } 252 | ) 253 | finally: 254 | pass 255 | def find_replace(obj,find,replace): 256 | 257 | """ searchs an object and does a find and replace """ 258 | try: 259 | if isinstance(obj, dict): 260 | return {find_replace(key,find,replace): find_replace(value,find,replace) for key, value in obj.items()} 261 | elif isinstance(obj, list): 262 | return [find_replace(element,find,replace) for element in obj] 263 | elif obj == find: 264 | return unicode_convert(replace) 265 | else: 266 | try: 267 | return unicode_convert(find_replace_string(obj, find, replace)) 268 | #obj = unicode_convert(json.loads(obj)) 269 | #return find_replace(obj,find,replace) 270 | except: 271 | return unicode_convert(obj) 272 | except: 273 | line, filename, synerror = trace() 274 | raise CommonError({ 275 | "function": "find_replace", 276 | "line": line, 277 | "filename": filename, 278 | "synerror": synerror, 279 | } 280 | ) 281 | finally: 282 | pass 283 | #---------------------------------------------------------------------- 284 | def init_log(log_file,): 285 | 286 | #Create the log file 287 | log = None 288 | try: 289 | log = open(log_file, 'a') 290 | 291 | #Change the output to both the windows and log file 292 | #original = sys.stdout 293 | sys.stdout = Tee(sys.stdout, log) 294 | except: 295 | pass 296 | return log 297 | 298 | #---------------------------------------------------------------------- 299 | class Tee(object): 300 | """ Combines standard output with a file for logging""" 301 | 302 | def __init__(self, *files): 303 | self.files = files 304 | def write(self, obj): 305 | for f in self.files: 306 | f.write(obj) 307 | 308 | #---------------------------------------------------------------------- 309 | def trace(): 310 | """ 311 | trace finds the line, the filename 312 | and error message and returns it 313 | to the user 314 | """ 315 | tb = sys.exc_info()[2] 316 | tbinfo = traceback.format_tb(tb)[0] 317 | filename = inspect.getfile( inspect.currentframe() ) 318 | # script name + line number 319 | line = tbinfo.split(", ")[1] 320 | # Get Python syntax error 321 | # 322 | synerror = traceback.format_exc().splitlines()[-1] 323 | return line, filename, synerror 324 | 325 | 326 | -------------------------------------------------------------------------------- /solutionreporttools/csvexport.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: ArcGIS for Gas Utilities 3 | @contact: ArcGISTeamUtilities@esri.com 4 | @company: Esri 5 | @version: 1.0 6 | @description: Class is used to export a feature into CSV using field alias 7 | @requirements: Python 2.7.x, ArcGIS 10.2 8 | @copyright: Esri, 2015 9 | @original source of script is from http://mappatondo.blogspot.com/2012/10/this-is-my-python-way-to-export-feature.html with modifications 10 | """ 11 | import sys, arcpy, csv 12 | from arcpy import env 13 | class ReportToolsError(Exception): 14 | """ raised when error occurs in utility module functions """ 15 | pass 16 | def trace(): 17 | """ 18 | trace finds the line, the filename 19 | and error message and returns it 20 | to the user 21 | """ 22 | import traceback, inspect 23 | tb = sys.exc_info()[2] 24 | tbinfo = traceback.format_tb(tb)[0] 25 | filename = inspect.getfile(inspect.currentframe()) 26 | # script name + line number 27 | line = tbinfo.split(", ")[1] 28 | # Get Python syntax error 29 | # 30 | synerror = traceback.format_exc().splitlines()[-1] 31 | return line, filename, synerror 32 | class CSVExport: 33 | _tempWorkspace = None 34 | _layers = None 35 | _CSVLocation = None 36 | 37 | def __init__(self, CSVLocation="", layer=None, workspace = None): 38 | # Gets the values of where the temp feature class resides and 39 | # the output location of the CSV. 40 | try: 41 | 42 | self._tempWorkspace = workspace 43 | self._layer = layer 44 | self._CSVLocation = CSVLocation 45 | 46 | except arcpy.ExecuteError: 47 | line, filename, synerror = trace() 48 | raise ReportToolsError({ 49 | "function": "create_report_layers_using_config", 50 | "line": line, 51 | "filename": filename, 52 | "synerror": synerror, 53 | "arcpyError": arcpy.GetMessages(2), 54 | } 55 | ) 56 | except: 57 | line, filename, synerror = trace() 58 | raise ReportToolsError({ 59 | "function": "create_report_layers_using_config", 60 | "line": line, 61 | "filename": filename, 62 | "synerror": synerror, 63 | } 64 | ) 65 | 66 | 67 | def WriteCSV(self): 68 | # This function writes the CSV. It writes the header then the rows. This script omits the SHAPE fields. 69 | try: 70 | env.workspace = self._tempWorkspace 71 | 72 | #fc = arcpy.ListFeatureClasses(self._layers) 73 | # for fcs in self._layer: 74 | fcs = self._layer 75 | if arcpy.Exists(fcs): 76 | with open(self._CSVLocation, 'wb') as outFile: 77 | print "%s create" % self._CSVLocation 78 | linewriter = csv.writer(outFile, delimiter = ',') 79 | 80 | fcdescribe = arcpy.Describe(fcs) 81 | flds = fcdescribe.Fields 82 | 83 | # skip shape fields and derivatives 84 | attrs = ("areaFieldName", "lengthFieldName", "shapeFieldName") 85 | resFields = [getattr(fcdescribe, attr) for attr in attrs 86 | if hasattr(fcdescribe, attr)] 87 | 88 | header,fldLst = zip(*((fld.AliasName, fld.name) for fld in flds 89 | if fld.name not in resFields)) 90 | 91 | linewriter.writerow([h.encode('utf8') if isinstance(h, unicode) else h for h in header]) 92 | linewriter.writerows([[r.encode('utf8') if isinstance(r, unicode) else r for r in row] 93 | for row in arcpy.da.SearchCursor(fcs, fldLst)]) 94 | 95 | print "CSV file complete" 96 | return True 97 | except arcpy.ExecuteError: 98 | line, filename, synerror = trace() 99 | raise ReportToolsError({ 100 | "function": "create_report_layers_using_config", 101 | "line": line, 102 | "filename": filename, 103 | "synerror": synerror, 104 | "arcpyError": arcpy.GetMessages(2), 105 | } 106 | ) 107 | except: 108 | line, filename, synerror = trace() 109 | raise ReportToolsError({ 110 | "function": "create_report_layers_using_config", 111 | "line": line, 112 | "filename": filename, 113 | "synerror": synerror, 114 | } 115 | ) 116 | -------------------------------------------------------------------------------- /solutionreporttools/dataprep.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: ArcGIS for Utilities 3 | @contact: ArcGISTeamUtilities@esri.com 4 | @company: Esri 5 | @version: 1.0 6 | @description: Used to prep data for reporting and other geoprocessing task by copying datasets from enterprise to 7 | local geodatabases. 8 | @requirements: Python 2.7.x, ArcGIS 10.2 9 | @copyright: Esri, 2015 10 | @Usage: temp = DataPrep.DataPrep(configFilePath="path to config file") to initialize 11 | (each of the below can be called independently) 12 | temp.CopyData() 13 | This can also be called from a different application. Just need to pass in the config as a DICT object 14 | """ 15 | import arcpy 16 | import os 17 | import sys 18 | import common as Common 19 | import subprocess 20 | 21 | class DataPrepError(Exception): 22 | """ raised when error occurs in utility module functions """ 23 | pass 24 | #---------------------------------------------------------------------- 25 | def trace(): 26 | """ 27 | trace finds the line, the filename 28 | and error message and returns it 29 | to the user 30 | """ 31 | import traceback, inspect 32 | tb = sys.exc_info()[2] 33 | tbinfo = traceback.format_tb(tb)[0] 34 | filename = inspect.getfile(inspect.currentframe()) 35 | # script name + line number 36 | line = tbinfo.split(", ")[1] 37 | # Get Python syntax error 38 | # 39 | synerror = traceback.format_exc().splitlines()[-1] 40 | return line, filename, synerror 41 | class DataPrep: 42 | 43 | overWrite = None 44 | databases = None 45 | start_db = None 46 | end_db = None 47 | datasetsToInclude = None 48 | standaloneFeatures = None 49 | calledFromApp = None 50 | postExtractGP = None 51 | 52 | def __init__(self,configFilePath=""): 53 | 54 | # This checks to see whether this Data Prep is ran as a standalone or as a sub process in another application 55 | if configFilePath and configFilePath != "": 56 | if type(configFilePath) is dict: 57 | configParams = configFilePath 58 | self.calledFromApp = True 59 | else: 60 | configParams = Common.init_config_json(config_file=configFilePath) 61 | self.calledFromApp = False 62 | 63 | if "Databases" in configParams: 64 | self.databases = configParams["Databases"] 65 | 66 | return None 67 | else: 68 | print "Error, no config file path specified." 69 | return False 70 | 71 | def CopyData(self): 72 | try: 73 | print "************ BEGIN Data Copy****************" 74 | # Read the config values and store in local variables then start extraction 75 | # It all depends on if it can create a GDB, if not, all other processes are bypassed. 76 | for database in self.databases: 77 | self.overWrite = None 78 | self.databases = None 79 | self.start_db = None 80 | self.end_db = None 81 | self.datasetsToInclude = None 82 | self.standaloneFeatures = None 83 | self.postExtractGP = None 84 | retVal = True 85 | 86 | if "GDBPath" in database and "SDEPath" in database: 87 | 88 | #workspaceProp = arcpy.Describe(database["GDBPath"]) 89 | if (database["GDBPath"].lower()).find(".sde") == -1: 90 | #if (workspaceProp.workspaceType == "LocalDatabase"): 91 | self.start_db = database["SDEPath"] 92 | self.end_db = database["GDBPath"] 93 | self.overWrite = database["Overwrite"] 94 | if self._CheckCreateGDBProcess(): 95 | if "DataSets" in database: 96 | if database["DataSets"]: 97 | self.datasetsToInclude = database["DataSets"] 98 | retVal = self._CopyDatasetsProcess() 99 | if "FeatureClasses" in database: 100 | if database["FeatureClasses"]: 101 | self.standaloneFeatures = database["FeatureClasses"] 102 | retVal = self._CopyDataTypeProcess(type="FeatureClasses") 103 | if "Tables" in database: 104 | if database["Tables"]: 105 | self.standaloneFeatures = database["Tables"] 106 | retVal = self._CopyDataTypeProcess(type="Tables") 107 | else: 108 | print "The output geodatabase must be a file geodatabase" 109 | retVal = False 110 | if "PostProcesses" in database: 111 | if database["PostProcesses"]: 112 | self.postExtractGP = database["PostProcesses"] 113 | retVal = self._executePostProcess() 114 | print "************ END Data Copy ****************" 115 | return retVal 116 | except arcpy.ExecuteError: 117 | line, filename, synerror = trace() 118 | raise DataPrepError({ 119 | "function": "CopyData", 120 | "line": line, 121 | "filename": filename, 122 | "synerror": synerror, 123 | "arcpyError": arcpy.GetMessages(2), 124 | }) 125 | except (DataPrepError),e: 126 | raise e 127 | except: 128 | line, filename, synerror = trace() 129 | raise DataPrepError({ 130 | "function": "CopyData", 131 | "line": line, 132 | "filename": filename, 133 | "synerror": synerror, 134 | }) 135 | def _CopyDatasetsProcess(self): 136 | try: 137 | if self.datasetsToInclude: 138 | #Set workspaces 139 | arcpy.env.workspace = self.start_db 140 | wk2 = self.end_db 141 | datasetList = arcpy.ListDatasets() 142 | 143 | #Check GDB if not created already, create it now 144 | if self._CheckCreateGDBProcess(): 145 | 146 | for dataset in datasetList: 147 | if arcpy.Exists(dataset = dataset): 148 | 149 | name = arcpy.Describe(dataset) 150 | new_data=name.name.split('.')[-1] 151 | 152 | # if user specified *, then user wants all datasets and child objects copied 153 | if "*" in self.datasetsToInclude and len(self.datasetsToInclude) == 1: 154 | #print "Reading: {0}".format(dataset) 155 | if arcpy.Exists(wk2 + os.sep + new_data)==False: 156 | arcpy.Copy_management(dataset, wk2 + os.sep + new_data) 157 | print "Created Dataset {0} and all childs in local gdb".format(new_data) 158 | else: 159 | # If a list of dataset names is stated, check to see if it iterating dataset is in that list 160 | for checkDS in self.datasetsToInclude: 161 | if new_data == checkDS["Name"]: 162 | print "Reading: {0}".format(dataset) 163 | if arcpy.Exists(wk2 + os.sep + new_data)==False: 164 | 165 | if "*" in checkDS["FeatureClasses"] and len(checkDS["FeatureClasses"]) == 1: 166 | arcpy.Copy_management(dataset, wk2 + os.sep + new_data) 167 | print "Created Dataset {0} and all childs in local gdb".format(new_data) 168 | else: 169 | #Create the dataset envelope. Creating and not copying because user might not want all 170 | #features copied into this dataset 171 | arcpy.CreateFeatureDataset_management(self.end_db, new_data, dataset) 172 | print "Created Dataset {0} in local gdb".format(new_data) 173 | 174 | #Handles child features of the datset. Can either copy all or user defined features 175 | if name.children: 176 | self._CheckChildFeatures(ds=name.name,childList=name.children,checkList=checkDS["FeatureClasses"]) 177 | else: 178 | #Handles child features of the datset if dataset already exist. Only copy new ones 179 | if name.children: 180 | self._CheckChildFeatures(ds=name.name,childList=name.children,checkList=checkDS["FeatureClasses"]) 181 | print "Dataset {0} already exists in the end_db checking for childs".format(new_data) 182 | else: 183 | raise DataPrepError({ 184 | "function": "_CopyDatasetsProcess", 185 | "line": 125, 186 | "filename": 'dataprep', 187 | "synerror": "%s does not exist" % dataset 188 | } ) 189 | #Clear memory 190 | del dataset 191 | return True 192 | except arcpy.ExecuteError: 193 | line, filename, synerror = trace() 194 | raise DataPrepError({ 195 | "function": "_CopyDatasetsProcess", 196 | "line": line, 197 | "filename": filename, 198 | "synerror": synerror, 199 | "arcpyError": arcpy.GetMessages(2), 200 | } 201 | ) 202 | except: 203 | line, filename, synerror = trace() 204 | raise DataPrepError({ 205 | "function": "_CopyDatasetsProcess", 206 | "line": line, 207 | "filename": filename, 208 | "synerror": synerror, 209 | } 210 | ) 211 | 212 | def _CopyDataTypeProcess(self,type="FeatureClasses",ds="",fc=""): 213 | try: 214 | #Set workspaces 215 | arcpy.env.workspace = self.start_db 216 | wk2 = self.end_db 217 | result = {} 218 | if(self.calledFromApp): 219 | if isinstance(self.standaloneFeatures,dict): 220 | for key,featClass in self.standaloneFeatures.items(): 221 | if arcpy.Exists(dataset=featClass): 222 | 223 | fcName = os.path.basename(featClass) 224 | if '.' in fcName: 225 | fcSplit = fcName.split('.') 226 | fcName = fcSplit[len(fcSplit) - 1] 227 | 228 | #fcDes = arcpy.Describe(featClass) 229 | #workspace =featClass.replace(featClassBase,"") 230 | #fullName = arcpy.ParseTableName(name=featClassBase,workspace=fcDes.workspace) 231 | #nameList = fullName.split(",") 232 | #databaseName = str(nameList[0].encode('utf-8')).strip() 233 | #ownerName = str(nameList[1].encode('utf-8')).strip() 234 | #fcName = str(nameList[2].encode('utf-8')).strip() 235 | 236 | 237 | fcRes = arcpy.FeatureClassToFeatureClass_conversion(featClass,wk2,fcName) 238 | result[key] = str(fcRes) 239 | 240 | print "Completed copy on {0}".format(fcName) 241 | else: 242 | result[key] = featClass 243 | 244 | else: 245 | for featClass in self.standaloneFeatures: 246 | if featClass.upper().find(".SDE") != -1: 247 | featName = featClass.split('.')[-1] 248 | else: 249 | featName = featClass.split('/')[-1] 250 | if arcpy.Exists(dataset=featClass): 251 | arcpy.FeatureClassToFeatureClass_conversion(featClass,wk2,featName) 252 | print "Completed copy on {0}".format(featName) 253 | else: 254 | 255 | # if ds passed value exist then this call came from a copy dataset child object request. 256 | if ds != "": 257 | if arcpy.Exists(wk2 + os.sep + ds.split('.')[-1] + os.sep + fc.split('.')[-1])==False: 258 | if type == "FeatureClasses": 259 | arcpy.FeatureClassToFeatureClass_conversion(self.start_db + os.sep + ds + os.sep + fc,wk2 + os.sep + ds.split('.')[-1],fc.split('.')[-1]) 260 | #arcpy.Copy_management(self.start_db + os.sep + ds + os.sep + fc, wk2 + os.sep + ds.split('.')[-1] + os.sep + fc.split('.')[-1]) 261 | print "Completed copy on {0}".format(fc) 262 | else: 263 | # This function was called independently 264 | #Check GDB if not created already, create it now 265 | if self._CheckCreateGDBProcess(): 266 | #Determine the object type and List out 267 | if type == "Tables": 268 | dataTypeList = arcpy.ListTables() 269 | else: 270 | dataTypeList = arcpy.ListFeatureClasses() 271 | 272 | for dtl in dataTypeList: 273 | name = arcpy.Describe(dtl) 274 | new_data=name.name.split('.')[-1] 275 | 276 | # Checks to see if user wants to copy all features or just the ones that match the supplied list. 277 | if "*" in self.standaloneFeatures and len(self.standaloneFeatures) == 1: 278 | #print "Reading: {0}".format(dtl) 279 | if arcpy.Exists(wk2 + os.sep + new_data)==False: 280 | if type == "Tables": 281 | arcpy.TableToTable_conversion(dtl,wk2,new_data) 282 | else: 283 | arcpy.FeatureClassToFeatureClass_conversion(dtl,wk2,new_data) 284 | print "Completed copy on {0}".format(new_data) 285 | else: 286 | if new_data in self.standaloneFeatures: 287 | print "Reading here: {0}".format(dtl) 288 | if arcpy.Exists(wk2 + os.sep + new_data)==False: 289 | if type == "Tables": 290 | arcpy.TableToTable_conversion(dtl,wk2,new_data) 291 | else: 292 | arcpy.FeatureClassToFeatureClass_conversion(dtl,wk2,new_data) 293 | print "Completed copy on {0}".format(new_data) 294 | else: 295 | print "Feature class {0} already exists in the end_db so skipping".format(new_data) 296 | #Clear memory 297 | del dtl 298 | return True 299 | except arcpy.ExecuteError: 300 | line, filename, synerror = trace() 301 | raise DataPrepError({ 302 | "function": "CopyData", 303 | "line": line, 304 | "filename": filename, 305 | "synerror": synerror, 306 | "arcpyError": arcpy.GetMessages(2), 307 | }) 308 | except: 309 | line, filename, synerror = trace() 310 | raise DataPrepError({ 311 | "function": "CopyData", 312 | "line": line, 313 | "filename": filename, 314 | "synerror": synerror, 315 | }) 316 | 317 | def _CheckChildFeatures(self,ds="",childList="",checkList=""): 318 | #Handles child features of the datset. Can either copy all or user defined features 319 | try: 320 | if checkList: 321 | children = childList 322 | for child in children: 323 | #Determines if all features will be copied. Sending to copy feature class function 324 | #to keep the handling of different data types separate. 325 | if "*" in checkList and len(checkList) == 1: 326 | self._CopyDataTypeProcess(ds=ds,fc=child.name) 327 | else: 328 | if child.name.split('.')[-1] in checkList: 329 | self._CopyDataTypeProcess(ds=ds,fc=child.name) 330 | except: 331 | print "Unexpected error checking for child features:", sys.exc_info()[0] 332 | return False 333 | 334 | def _CheckCreateGDBProcess(self): 335 | try: 336 | # If user param is to overwrite GDB, then delete it first 337 | if self.overWrite.upper() == "YES": 338 | if arcpy.Exists(self.end_db)==True: 339 | arcpy.Delete_management(self.end_db) 340 | self.overWrite = None 341 | print "Deleted previous GDB {0}".format(self.end_db) 342 | 343 | # if the local gdb doesn't exist, then create it using the path and name given in the end_db string 344 | if arcpy.Exists(self.end_db)==False: 345 | if self.end_db.rfind("\\") != -1: 346 | lastSlash = self.end_db.rfind("\\") 347 | else: 348 | lastSlash = self.end_db.rfind("/") 349 | arcpy.CreateFileGDB_management(self.end_db[:lastSlash], self.end_db[lastSlash+1:]) 350 | self.overWrite = None 351 | print "Created geodatabase {0}".format(self.end_db[lastSlash+1:]) 352 | else: 353 | self.overWrite = None 354 | #print "Geodatabase already exists" 355 | return True 356 | except: 357 | print "Unexpected error create geodatabase:", sys.exc_info()[0] 358 | return False 359 | 360 | 361 | def _executePostProcess(self): 362 | try: 363 | print "Running post process GP" 364 | for process in self.postExtractGP: 365 | if process["ToolType"].upper() == "MODEL": 366 | arcpy.ImportToolbox(process["ToolPath"]) 367 | arcpy.gp.toolbox = process["ToolPath"] 368 | tools = arcpy.ListTools() 369 | for tool in process["Tools"]: 370 | if tool in tools: 371 | customCode = "arcpy." + tool + "()" 372 | print eval(customCode) 373 | print "Finished executing model {0}".format(tool) 374 | elif process["ToolType"].upper() == "SCRIPT": 375 | for tool in process["Tools"]: 376 | scriptPath = process["ToolPath"] + "/" + tool 377 | subprocess.call([sys.executable, os.path.join(scriptPath)]) 378 | print "Finished executing script {0}".format(tool) 379 | else: 380 | print "Sorry, not a valid tool" 381 | return True 382 | except arcpy.ExecuteError: 383 | line, filename, synerror = trace() 384 | raise DataPrepError({ 385 | "function": "CopyData", 386 | "line": line, 387 | "filename": filename, 388 | "synerror": synerror, 389 | "arcpyError": arcpy.GetMessages(2), 390 | }) 391 | except: 392 | line, filename, synerror = trace() 393 | raise DataPrepError({ 394 | "function": "CopyData", 395 | "line": line, 396 | "filename": filename, 397 | "synerror": synerror, 398 | }) 399 | 400 | 401 | -------------------------------------------------------------------------------- /solutionreporttools/gptools.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import os 3 | import time 4 | import datetime 5 | import arcpy 6 | import copy 7 | from . import common as Common 8 | from collections import defaultdict 9 | 10 | 11 | def speedyIntersect(fcToSplit, 12 | splitFC, 13 | fieldsToAssign, 14 | countField, 15 | onlyKeepLargest, 16 | outputFC, 17 | report_areas_overlap): 18 | #arcpy.AddMessage(time.ctime()) 19 | 20 | startProcessing = time.time() 21 | arcpy.env.overwriteOutput = True 22 | tempWorkspace = arcpy.env.scratchGDB 23 | tempFCName = Common.random_string_generator() 24 | tempFC= os.path.join(tempWorkspace, tempFCName) 25 | 26 | 27 | 28 | tempFCUnionName = Common.random_string_generator() 29 | tempFCUnion = os.path.join(tempWorkspace, tempFCUnionName) 30 | 31 | arcpy.Dissolve_management(in_features=splitFC, 32 | out_feature_class=tempFCUnion, 33 | dissolve_field=fieldsToAssign, 34 | statistics_fields=None, 35 | multi_part='SINGLE_PART', 36 | unsplit_lines=None) 37 | 38 | 39 | fc = splitByLayer(fcToSplit=fcToSplit, 40 | splitFC=tempFCUnion, 41 | fieldsToAssign=fieldsToAssign, 42 | countField=countField, 43 | onlyKeepLargest=onlyKeepLargest, 44 | outputFC=outputFC, 45 | report_areas_overlap=report_areas_overlap) 46 | if arcpy.Exists(tempFCUnion): 47 | arcpy.Delete_management(tempFCUnion) 48 | def assignFieldsByIntersect(sourceFC, assignFC, fieldsToAssign, outputFC,report_areas_overlap): 49 | tempWorkspace = arcpy.env.scratchGDB 50 | 51 | assignFields = arcpy.ListFields(dataset=assignFC) 52 | assignFieldsNames = [f.name for f in assignFields] 53 | 54 | sourceFields = arcpy.ListFields(dataset=sourceFC) 55 | sourceFieldNames = [f.name for f in sourceFields] 56 | newFields = [] 57 | 58 | fms = arcpy.FieldMappings() 59 | for fieldToAssign in fieldsToAssign: 60 | if fieldToAssign not in assignFieldsNames: 61 | raise ValueError("{0} does not exist in {1}".format(fieldToAssign,assignFC)) 62 | outputField = fieldToAssign 63 | if fieldToAssign in sourceFieldNames + newFields: 64 | outputField = Common.uniqueFieldName(fieldToAssign, sourceFieldNames + newFields) 65 | 66 | newFields.append(outputField) 67 | 68 | fm = arcpy.FieldMap() 69 | fm.addInputField(assignFC, fieldToAssign) 70 | type_name = fm.outputField 71 | type_name.name = outputField 72 | fm.outputField = type_name 73 | fms.addFieldMap(fm) 74 | 75 | 76 | 77 | fieldmappings = arcpy.FieldMappings() 78 | #fieldmappings.addTable(assignFC) 79 | #fieldmappings.removeAll() 80 | fieldmappings.addTable(sourceFC) 81 | for fm in fms.fieldMappings: 82 | fieldmappings.addFieldMap(fm) 83 | 84 | if report_areas_overlap: 85 | join_operation = "JOIN_ONE_TO_MANY" 86 | else: 87 | join_operation = "JOIN_ONE_TO_ONE" 88 | outputLayer = arcpy.SpatialJoin_analysis(target_features=sourceFC, 89 | join_features=assignFC, 90 | out_feature_class=outputFC, 91 | join_operation=join_operation, 92 | join_type="KEEP_COMMON", 93 | field_mapping=fieldmappings, 94 | match_option="HAVE_THEIR_CENTER_IN", 95 | search_radius=None, 96 | distance_field_name=None)[0] 97 | 98 | 99 | return outputLayer 100 | def splitByLayer(fcToSplit, splitFC, fieldsToAssign, countField, onlyKeepLargest, outputFC, report_areas_overlap): 101 | 102 | desc = arcpy.Describe(fcToSplit) 103 | path, fileName = os.path.split(outputFC) 104 | 105 | shapeLengthFieldName ="" 106 | if desc.shapeType == "Polygon": 107 | shapeLengthFieldName = desc.areaFieldName 108 | dimension = 4 109 | measure = "area" 110 | elif desc.shapeType == "Polyline": 111 | shapeLengthFieldName = desc.lengthFieldName 112 | dimension = 2 113 | measure = "length" 114 | else: 115 | #arcpy.FeatureClassToFeatureClass_conversion(in_features=fcToSplit, 116 | #out_path=path, 117 | #out_name=fileName, 118 | #where_clause=None, 119 | #field_mapping=None, 120 | #config_keyword=None) 121 | #TODO - verifiy this is the proper call on points 122 | assignFieldsByIntersect(sourceFC=fcToSplit, 123 | assignFC=splitFC, 124 | fieldsToAssign=fieldsToAssign, 125 | outputFC=outputFC, 126 | report_areas_overlap=report_areas_overlap) 127 | 128 | return outputFC 129 | 130 | arcpy.CreateFeatureclass_management(out_path=path, 131 | out_name=fileName, 132 | geometry_type=desc.shapeType, 133 | template=fcToSplit, 134 | has_m=None, 135 | has_z=None, 136 | spatial_reference=desc.spatialReference, 137 | config_keyword=None, 138 | spatial_grid_1=None, 139 | spatial_grid_2=None, 140 | spatial_grid_3=None) 141 | #Add the reporting name field to set in the split 142 | field_assign_object = arcpy.ListFields(dataset=splitFC, 143 | wild_card=fieldsToAssign[-1], 144 | field_type=None) 145 | #Find the freport label field and add it to the output line layer to store results in 146 | #field = [field for field in field_assign_object if field.name == fieldsToAssign[-1]][0] 147 | field = filter(lambda field:field.name == fieldsToAssign[-1], field_assign_object)[0] 148 | arcpy.AddField_management(in_table=outputFC, field_name=field.baseName, field_type=field.type, 149 | field_precision=field.precision, field_scale=field.scale, 150 | field_length=field.length, field_alias=field.aliasName, 151 | field_is_nullable=field.isNullable, field_is_required=field.required, 152 | field_domain=field.domain) 153 | 154 | fldsInput1 = [f.name for f in arcpy.ListFields(fcToSplit) if f.name not in (desc.shapeFieldName,desc.oidFieldName,shapeLengthFieldName)] + \ 155 | ["OID@","shape@"] 156 | fldsInsert = [arcpy.ValidateFieldName(f.name,path) for f in arcpy.ListFields(fcToSplit) if f.name not in (desc.shapeFieldName,desc.oidFieldName,shapeLengthFieldName)] + \ 157 | [fieldsToAssign[-1],"OID@","shape@"] 158 | 159 | iOID = -2 160 | iShape = -1 161 | iAssignField = -3 162 | iCountField = None 163 | fndField = None 164 | if countField is not None and countField in fldsInput1: 165 | for f in arcpy.ListFields(outputFC): 166 | if f.name == countField: 167 | fndField = f 168 | break 169 | if fndField is None: 170 | raise ValueError("Count field not found") 171 | if fndField.type != "Double" and fndField.type != "Single" and fndField.type != "Integer" and fndField.type != "SmallInteger": 172 | raise ValueError("Count is not numeric") 173 | iCountField = fldsInput1.index(countField) 174 | 175 | with arcpy.da.SearchCursor(splitFC, ["Shape@","OID@",fieldsToAssign[-1]],spatial_reference=desc.spatialReference) as scursor: 176 | reportingGeometries = {row[1]:{"Geometry":row[0],fieldsToAssign[-1]:row[2]} for row in scursor} 177 | 178 | tempWorkspace = arcpy.env.scratchGDB 179 | tempFCName = Common.random_string_generator() 180 | tempFC= os.path.join(tempWorkspace, tempFCName) 181 | 182 | 183 | #Hide all fields to eliminate and Target_id, Join_FID conflicts 184 | target_fi = arcpy.FieldInfo() 185 | for field in desc.fields: 186 | target_fi.addField(field.name,field.name,'HIDDEN','NONE') 187 | 188 | source_fi = arcpy.FieldInfo() 189 | for field in arcpy.Describe(splitFC).fields: 190 | source_fi.addField(field.name,field.name,'HIDDEN','NONE') 191 | 192 | target_sj_no_fields = arcpy.MakeFeatureLayer_management(fcToSplit,"target_sj_no_fields",field_info=target_fi) 193 | join_sj_no_fields = arcpy.MakeFeatureLayer_management(splitFC,"join_sj_no_fields",field_info=source_fi) 194 | 195 | geoToLayerMap = arcpy.SpatialJoin_analysis(target_features=target_sj_no_fields, 196 | join_features=join_sj_no_fields, 197 | out_feature_class=tempFC, 198 | join_operation="JOIN_ONE_TO_MANY", 199 | join_type="KEEP_COMMON", 200 | field_mapping=None, 201 | match_option="INTERSECT", 202 | search_radius=None, 203 | distance_field_name=None)[0] 204 | 205 | ddict = defaultdict(list) 206 | 207 | with arcpy.da.SearchCursor(geoToLayerMap, ("TARGET_FID", "JOIN_FID")) as sCursor: 208 | for row in sCursor: 209 | ddict[row[0]].append(reportingGeometries[row[1]]) 210 | 211 | layerToSplit = arcpy.MakeFeatureLayer_management(fcToSplit,"layerToSplit") 212 | result = arcpy.SelectLayerByLocation_management(layerToSplit, "CROSSED_BY_THE_OUTLINE_OF", splitFC) 213 | 214 | rowCount = int(arcpy.GetCount_management(layerToSplit)[0]) 215 | j = 0 216 | rowsInserted = 0 217 | totalDif = 0 218 | with arcpy.da.SearchCursor(layerToSplit, fldsInput1) as scursor: 219 | with arcpy.da.InsertCursor(outputFC, fldsInsert) as icursor: 220 | 221 | for j,row in enumerate(scursor,1): 222 | newRows = [] 223 | lens = [] 224 | row = list(row) 225 | rowGeo = row[iShape] 226 | origLength = getattr(rowGeo, measure) 227 | row[iShape] = None 228 | for geo in ddict[row[iOID]]: 229 | newRow = copy.copy(row) 230 | #if not row[iShape].disjoint(geo): 231 | splitGeo = rowGeo.intersect(geo['Geometry'], dimension) 232 | 233 | newRow[iShape] = splitGeo 234 | splitLength = getattr(splitGeo, measure) 235 | if iCountField is not None: 236 | if row[iCountField] is not None and splitLength is not None and origLength is not None and origLength !=0: 237 | newRow[iCountField] = float(row[iCountField]) * (float(splitLength) / float(origLength)) 238 | else: 239 | pass 240 | lens.append(float(splitLength)) 241 | #newRows.append(copy.copy(newRow)) 242 | newRow.insert(iAssignField + 1, geo[fieldsToAssign[-1]]) 243 | newRows.append(newRow) 244 | if onlyKeepLargest == True: 245 | result = icursor.insertRow(newRows[lens.index(max(lens))]) 246 | rowsInserted = rowsInserted + 1 247 | else: 248 | newOIDS = [] 249 | for newRow in newRows: 250 | result = icursor.insertRow(newRow) 251 | newOIDS.append(str(result)) 252 | rowsInserted = rowsInserted + 1 253 | #if rowsInserted % 250 == 0: 254 | #print (rowsInserted) 255 | dif = sum(lens) / origLength 256 | if (dif > 1.0001 or dif < .9999) and report_areas_overlap == False: 257 | totalDif = totalDif + (origLength - sum(lens)) 258 | print ("Original Row ID: {3} and new features with OIDs of {0} combined count field did not add up to the original: new combined {1}, original {2}. \n This can be caused by self overlapping lines or data falling outside the split areas. \n\tLayer: {4}".format(",".join(newOIDS),str(sum(lens)),str(origLength),row[iOID],desc.catalogPath)) 259 | 260 | if totalDif > 0 and report_areas_overlap == False: 261 | print ("Total difference from source to results: {0}".format(totalDif)) 262 | result = arcpy.SelectLayerByLocation_management(in_layer=layerToSplit, 263 | selection_type="SWITCH_SELECTION") 264 | rowCount = int(arcpy.GetCount_management(layerToSplit)[0]) 265 | if rowCount > 0: 266 | 267 | none_split_fc_name = Common.random_string_generator() 268 | none_split_fc= os.path.join(tempWorkspace, none_split_fc_name) 269 | 270 | assignFieldsByIntersect(sourceFC=layerToSplit, 271 | assignFC=splitFC, 272 | fieldsToAssign=fieldsToAssign, 273 | outputFC=none_split_fc, 274 | report_areas_overlap=report_areas_overlap) 275 | result = arcpy.Append_management(inputs=none_split_fc, 276 | target=outputFC, 277 | schema_type = "NO_TEST", 278 | field_mapping=None, 279 | subtype=None) 280 | if arcpy.Exists(none_split_fc): 281 | arcpy.Delete_management(none_split_fc) 282 | if arcpy.Exists(tempFC): 283 | arcpy.Delete_management(tempFC) 284 | return outputFC 285 | 286 | -------------------------------------------------------------------------------- /solutionreporttools/reporttools.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import arcpy 4 | from arcpy import env 5 | from copy import deepcopy 6 | import gc 7 | import time 8 | # New CSV exporter class 9 | import csvexport as CSVExport 10 | import sys 11 | from dateutil.parser import parse 12 | from . import dataprep as DataPrep 13 | from . import common as Common 14 | from . import gptools 15 | from distutils.util import strtobool 16 | 17 | import subprocess 18 | 19 | dateTimeFormat = '%Y-%m-%d %H:%M' 20 | tempCSVName = "mergedreport" 21 | 22 | 23 | # ---------------------------------------------------------------------- 24 | def trace(): 25 | """ 26 | trace finds the line, the filename 27 | and error message and returns it 28 | to the user 29 | """ 30 | import traceback, inspect 31 | tb = sys.exc_info()[2] 32 | tbinfo = traceback.format_tb(tb)[0] 33 | filename = inspect.getfile(inspect.currentframe()) 34 | # script name + line number 35 | line = tbinfo.split(", ")[1] 36 | # Get Python syntax error 37 | # 38 | synerror = traceback.format_exc().splitlines()[-1] 39 | return line, filename, synerror 40 | 41 | 42 | class ReportToolsError(Exception): 43 | """ raised when error occurs in utility module functions """ 44 | pass 45 | 46 | 47 | def getLayerName(fc): 48 | fcName = os.path.basename(fc) 49 | if '.' in fcName: 50 | fcSplit = fcName.split('.') 51 | fcName = fcSplit[len(fcSplit) - 1] 52 | return fcName 53 | 54 | 55 | # ---------------------------------------------------------------------- 56 | def reportDataPrep(reportConfig): 57 | try: 58 | print " " 59 | print "-----Data Prep Section Starting-----" 60 | 61 | startTime = datetime.datetime.now() 62 | 63 | if 'ExportDataLocally' in reportConfig and (reportConfig['ExportDataLocally'].upper() == "YES" or reportConfig[ 64 | 'ExportDataLocally'].upper() == "TRUE"): 65 | if ('TempExportLocation' in reportConfig and reportConfig["TempExportLocation"] != ""): 66 | startSectTime = datetime.datetime.now() 67 | print "Data Copy starting: %s" % (startSectTime.strftime(dateTimeFormat)) 68 | 69 | outputWorkspace = reportConfig["TempExportLocation"] 70 | _CheckCreateGDBProcess(outputWorkspace) 71 | if 'ReportingAreas' in reportConfig: 72 | if arcpy.Exists(dataset=reportConfig['ReportingAreas']) == True: 73 | fcName = getLayerName(reportConfig['ReportingAreas']) 74 | reportConfig['ReportingAreas'] = str( 75 | arcpy.FeatureClassToFeatureClass_conversion(reportConfig['ReportingAreas'], outputWorkspace, 76 | fcName)) 77 | # arcpy.ClearWorkspaceCache_management(reportConfig['ReportingAreas']) 78 | print "Completed copy on {0}".format(fcName) 79 | else: 80 | raise ReportToolsError({ 81 | "function": "reportDataPrep", 82 | "line": 62, 83 | "filename": "reportTools.py", 84 | "synerror": reportConfig['ReportingAreas'] + " does not exist", 85 | }) 86 | 87 | 88 | else: 89 | print "Warning: ReportingAreas parameter is missing" 90 | if 'Data' in reportConfig and isinstance(reportConfig['Data'], dict): 91 | for key, featClass in reportConfig['Data'].items(): 92 | if arcpy.Exists(dataset=featClass) == True: 93 | fcName = getLayerName(fc=featClass) 94 | fcRes = arcpy.FeatureClassToFeatureClass_conversion(featClass, outputWorkspace, fcName) 95 | # arcpy.ClearWorkspaceCache_management(featClass) 96 | reportConfig['Data'][key] = str(fcRes) 97 | 98 | print "Completed copy on {0}".format(fcName) 99 | else: 100 | reportConfig['Data'][key] = featClass 101 | else: 102 | print "Warning: Data section is missing" 103 | print "Data Copy complete, time to complete: %s" % str(datetime.datetime.now() - startSectTime) 104 | if 'PreProcessingTasks' in reportConfig and reportConfig['PreProcessingTasks']: 105 | 106 | startSectTime = datetime.datetime.now() 107 | print " " 108 | print "Preprocessing: %s" % (startSectTime.strftime(dateTimeFormat)) 109 | 110 | for process in reportConfig['PreProcessingTasks']: 111 | if process["ToolType"].upper() == "MODEL": 112 | if arcpy.Exists(process["ToolPath"]): 113 | arcpy.ImportToolbox(process["ToolPath"]) 114 | arcpy.gp.AddToolbox(process["ToolPath"]) 115 | for tool in process["Tools"]: 116 | if hasattr(arcpy, tool): 117 | getattr(arcpy, tool)() 118 | print "Finished executing model {0}".format(tool) 119 | elif hasattr(arcpy.gp, "{}_{}".format(tool, arcpy.gp.toolbox)): 120 | getattr(arcpy.gp, "{}_{}".format(tool, arcpy.gp.toolbox))() 121 | print "Finished executing model {0}".format(tool) 122 | else: 123 | print "%s was not found, please verify the name" % tool 124 | else: 125 | print "%s was not found, please verify the path" % process["ToolPath"] 126 | elif process["ToolType"].upper() == "SCRIPT": 127 | for tool in process["Tools"]: 128 | scriptPath = process["ToolPath"] + "/" + tool 129 | subprocess.call([sys.executable, os.path.join(scriptPath)]) 130 | print "Finished executing script {0}".format(tool) 131 | else: 132 | print "Sorry, not a valid tool" 133 | print "PreProcess complete, time to complete: %s" % str(datetime.datetime.now() - startSectTime) 134 | arcpy.ClearWorkspaceCache_management() 135 | print "-----Data Prep Section complete, time to complete: %s-----" % str(datetime.datetime.now() - startTime) 136 | return reportConfig 137 | except arcpy.ExecuteError: 138 | line, filename, synerror = trace() 139 | raise ReportToolsError({ 140 | "function": "CopyData", 141 | "line": line, 142 | "filename": filename, 143 | "synerror": synerror, 144 | "arcpyError": arcpy.GetMessages(2), 145 | }) 146 | except (ReportToolsError), e: 147 | raise e 148 | except: 149 | line, filename, synerror = trace() 150 | raise ReportToolsError({ 151 | "function": "CopyData", 152 | "line": line, 153 | "filename": filename, 154 | "synerror": synerror, 155 | }) 156 | 157 | 158 | # ---------------------------------------------------------------------- 159 | def _CheckCreateGDBProcess(outputWorkspace): 160 | try: 161 | # If user param is to overwrite GDB, then delete it first 162 | if arcpy.Exists(outputWorkspace) == True: 163 | arcpy.Delete_management(outputWorkspace) 164 | print "Deleted previous GDB {0}".format(outputWorkspace) 165 | 166 | # if the local gdb doesn't exist, then create it using the path and name given in the end_db string 167 | if arcpy.Exists(outputWorkspace) == False: 168 | if outputWorkspace.rfind("\\") != -1: 169 | lastSlash = outputWorkspace.rfind("\\") 170 | else: 171 | lastSlash = outputWorkspace.rfind("/") 172 | arcpy.CreateFileGDB_management(outputWorkspace[:lastSlash], outputWorkspace[lastSlash + 1:]) 173 | 174 | print "Created geodatabase {0}".format(outputWorkspace[lastSlash + 1:]) 175 | 176 | 177 | except arcpy.ExecuteError: 178 | line, filename, synerror = trace() 179 | raise ReportToolsError({ 180 | "function": "_CheckCreateGDBProcess", 181 | "line": line, 182 | "filename": filename, 183 | "synerror": synerror, 184 | "arcpyError": arcpy.GetMessages(2), 185 | }) 186 | except: 187 | line, filename, synerror = trace() 188 | raise ReportToolsError({ 189 | "function": "_CheckCreateGDBProcess", 190 | "line": line, 191 | "filename": filename, 192 | "synerror": synerror, 193 | }) 194 | 195 | 196 | # ---------------------------------------------------------------------- 197 | def create_report_layers_using_config(config): 198 | arcpy.env.overwriteOutput = True 199 | 200 | reporting_areas_ID_field = None 201 | reporting_areas = None 202 | reports = None 203 | report_msg = None 204 | idRes = None 205 | try: 206 | 207 | if arcpy.Exists(config["ResultsGDB"]) == False: 208 | 209 | gdbName = os.path.basename(config["ResultsGDB"]) 210 | if not '.sde' in gdbName: 211 | path = os.path.dirname(config["ResultsGDB"]) 212 | if path == '': 213 | path = os.getcwd() 214 | config["ResultsGDB"] = os.path.join(path, gdbName) 215 | arcpy.CreateFileGDB_management(out_folder_path=path, 216 | out_name=gdbName, 217 | out_version=None) 218 | print "%s created in %s" % (gdbName, path) 219 | 220 | # To handle CSV export, a temp FC is created. This code just checks and deletes it, if it exist. 221 | 222 | _TempFC = os.path.join(config["ResultsGDB"], tempCSVName) 223 | deleteFC([_TempFC]) 224 | reports = config['Reports'] 225 | 226 | report_msg = [] 227 | 228 | if 'ReportingAreas' in config: 229 | reporting_areas = config['ReportingAreas'] 230 | 231 | reporting_areas_ID_field = config['ReportingAreasIDField'] 232 | 233 | if arcpy.Exists(reporting_areas) == False: 234 | raise ReportToolsError({ 235 | "function": "create_report_layers_using_config", 236 | "line": 61, 237 | "filename": 'reporttools', 238 | "synerror": 'Report data cannot be located' 239 | }) 240 | idRes = validate_id_field(reporting_areas=reporting_areas, report_ID_field=reporting_areas_ID_field) 241 | if not idRes is None: 242 | reporting_areas = idRes['ReportLayer'] 243 | reporting_areas_ID_field = idRes['IDField'] 244 | if arcpy.Exists(config['SchemaGDB']) == False: 245 | raise ReportToolsError({ 246 | "function": "create_report_layers_using_config", 247 | "line": 61, 248 | "filename": 'reporttools', 249 | "synerror": '%s is not valid in the SchemaGDB parameter' % config['SchemaGDB'] 250 | } 251 | ) 252 | 253 | # if not os.path.isabs(reporting_areas): 254 | # reporting_areas =os.path.abspath(reporting_areas) 255 | for i in reports: 256 | if not ('RunReport' in i): 257 | i['RunReport'] = 'yes' 258 | print "Report is missing the RunReport parameter: type string, values, True or False" 259 | if 'RunReport' in i and (i['RunReport'].upper() == "YES" or i['RunReport'].upper() == "TRUE"): 260 | datasetName = os.path.dirname(i['ReportResult']) 261 | layerName = os.path.basename(i['ReportResult']) 262 | datasetPath = os.path.join(config["ResultsGDB"], datasetName) 263 | reportSchema = os.path.join(config['SchemaGDB'], i['ReportResultSchema']) 264 | if arcpy.Exists(reportSchema) == False: 265 | raise ReportToolsError({ 266 | "function": "create_report_layers_using_config", 267 | "line": 61, 268 | "filename": 'reporttools', 269 | "synerror": 'Report Schema %s is not valid' % reportSchema 270 | } 271 | ) 272 | if arcpy.Exists(datasetPath) == False and (datasetName != layerName): 273 | 274 | datasetNameSch = os.path.dirname(i['ReportResultSchema']) 275 | layerNameSch = os.path.basename(i['ReportResultSchema']) 276 | if layerNameSch != datasetNameSch: 277 | reportSchema = os.path.join(config['SchemaGDB'], datasetNameSch) 278 | else: 279 | reportSchema = os.path.join(config['SchemaGDB'], layerNameSch) 280 | reportSpatRef = arcpy.Describe(reportSchema).spatialReference 281 | arcpy.CreateFeatureDataset_management(out_dataset_path=config["ResultsGDB"], out_name=datasetName, 282 | spatial_reference=reportSpatRef) 283 | print "%s feature dataset created in %s" % (datasetName, config["ResultsGDB"]) 284 | del datasetName 285 | del datasetPath 286 | del layerName 287 | if i['Type'].upper() == "JOINCALCANDLOAD": 288 | create_calcload_report(report_params=i, datasources=config) 289 | elif i['Type'].upper() == "RECLASS": 290 | if arcpy.Exists(reporting_areas) == False: 291 | raise ReportToolsError({ 292 | "function": "create_report_layers_using_config", 293 | "line": 61, 294 | "filename": 'reporttools', 295 | "synerror": 'Report data cannot be located' 296 | }) 297 | report_msg.append(create_reclass_report(reporting_areas=reporting_areas, 298 | reporting_areas_ID_field=reporting_areas_ID_field, 299 | report_params=i, datasources=config)) 300 | elif i['Type'].upper() in ["AVERAGE", "STATISTIC"]: 301 | if arcpy.Exists(reporting_areas) == False: 302 | raise ReportToolsError({ 303 | "function": "create_report_layers_using_config", 304 | "line": 61, 305 | "filename": 'reporttools', 306 | "synerror": 'Report data cannot be located' 307 | }) 308 | report_msg.append(create_average_report(reporting_areas=reporting_areas, 309 | reporting_areas_ID_field=reporting_areas_ID_field, 310 | report_params=i, datasources=config)) 311 | else: 312 | print "Unsupported report type" 313 | 314 | # After all the different reports have been run, export a single CSV of results. 315 | csvProcess = CSVExport.CSVExport(CSVLocation=config["CSVOutputLocation"], 316 | layer=_TempFC, 317 | workspace=config["ResultsGDB"]) 318 | report_msg.append(csvProcess.WriteCSV()) 319 | 320 | if not idRes is None: 321 | deleteFC(in_datasets=[idRes['ReportLayer']]) 322 | if 'error' in report_msg: 323 | return False 324 | else: 325 | return True 326 | 327 | 328 | except arcpy.ExecuteError: 329 | line, filename, synerror = trace() 330 | raise ReportToolsError({ 331 | "function": "create_report_layers_using_config", 332 | "line": line, 333 | "filename": filename, 334 | "synerror": synerror, 335 | "arcpyError": arcpy.GetMessages(2), 336 | }) 337 | except (ReportToolsError), e: 338 | raise e 339 | except: 340 | line, filename, synerror = trace() 341 | raise ReportToolsError({ 342 | "function": "create_report_layers_using_config", 343 | "line": line, 344 | "filename": filename, 345 | "synerror": synerror, 346 | } 347 | ) 348 | 349 | finally: 350 | reports = None 351 | report_msg = None 352 | reporting_areas_ID_field = None 353 | reporting_areas = None 354 | del reports 355 | del report_msg 356 | del reporting_areas_ID_field 357 | del reporting_areas 358 | 359 | gc.collect() 360 | 361 | 362 | # ---------------------------------------------------------------------- 363 | def create_calcload_report(report_params, datasources): 364 | filt_layer = None 365 | reporting_layer = None 366 | reporting_layer_id_field = None 367 | joinInfo = None 368 | field_map = None 369 | sql = None 370 | report_date = None 371 | report_schema = None 372 | report_result = None 373 | _tempWorkspace = None 374 | _tempTableName = None 375 | _tempTableFull = None 376 | _procData = None 377 | inputCnt = None 378 | try: 379 | 380 | filt_layer = "filter_layer" 381 | 382 | reporting_layer = datasources["Data"][report_params['Data']] 383 | reporting_layer_id_field = report_params['DataIDField'] 384 | joinInfo = report_params['JoinInfo'] 385 | reporting_areas = joinInfo['FeatureData'] 386 | reporting_areas_ID_field = joinInfo['FeatureDataIDField'] 387 | field_map = report_params['FieldMap'] 388 | 389 | sql = report_params['FilterSQL'] 390 | 391 | report_date = report_params["ReportDateField"] 392 | report_schema = datasources["SchemaGDB"] + "/" + report_params['ReportResultSchema'] 393 | report_result = datasources["ResultsGDB"] + "/" + report_params['ReportResult'] 394 | 395 | # if not os.path.isabs(report_result): 396 | # report_result = os.path.abspath( report_result) 397 | 398 | # if not os.path.isabs(report_schema): 399 | # report_schema = os.path.abspath( report_schema) 400 | 401 | _tempWorkspace = env.scratchGDB 402 | _tempTableName = Common.random_string_generator() 403 | _tempTableFull = os.path.join(_tempWorkspace, _tempTableName) 404 | 405 | if sql == '' or sql is None or sql == '1=1' or sql == '1==1': 406 | filt_layer = reporting_layer 407 | else: 408 | # arcpy.MakeQueryTable_management(in_table=reporting_layer,out_table=filt_layer,in_key_field_option="USE_KEY_FIELDS",in_key_field="#",in_field="#",where_clause=sql) 409 | try: 410 | arcpy.MakeFeatureLayer_management(reporting_layer, filt_layer, sql, "", "") 411 | 412 | except: 413 | try: 414 | arcpy.TableToTable_conversion(in_rows=reporting_layer, out_path=_tempWorkspace, 415 | out_name=_tempTableName) 416 | 417 | arcpy.MakeTableView_management(in_table=_tempTableFull, out_view=filt_layer, where_clause=sql, 418 | workspace="#", field_info="#") 419 | 420 | except: 421 | pass 422 | inputCnt = int(arcpy.GetCount_management(in_rows=filt_layer)[0]) 423 | className = os.path.basename(report_result) 424 | layerPath = os.path.dirname(report_result) 425 | arcpy.FeatureClassToFeatureClass_conversion(in_features=report_schema, 426 | out_path=layerPath, 427 | out_name=className, 428 | where_clause=None, 429 | field_mapping=None, 430 | config_keyword=None) 431 | # arcpy.Copy_management(report_schema,report_result,"FeatureClass") 432 | 433 | if inputCnt == 0: 434 | 435 | print "%s was created or updated" % report_result 436 | else: 437 | 438 | _procData = calculate_load_results(feature_data=reporting_areas, 439 | feature_data_id_field=reporting_areas_ID_field, 440 | join_table=filt_layer, 441 | join_table_id_field=reporting_layer_id_field, 442 | report_date_field=report_date, 443 | report_result=report_result, 444 | field_map=field_map 445 | ) 446 | 447 | deleteFC([_procData]) 448 | if arcpy.Exists(_tempTableFull): 449 | deleteFC([_tempTableFull]) 450 | 451 | print "%s was created or updated" % report_result 452 | 453 | except arcpy.ExecuteError: 454 | line, filename, synerror = trace() 455 | raise ReportToolsError({ 456 | "function": "create_calcload_report", 457 | "line": line, 458 | "filename": filename, 459 | "synerror": synerror, 460 | "arcpyError": arcpy.GetMessages(2), 461 | } 462 | ) 463 | except: 464 | line, filename, synerror = trace() 465 | raise ReportToolsError({ 466 | "function": "create_calcload_report", 467 | "line": line, 468 | "filename": filename, 469 | "synerror": synerror, 470 | } 471 | ) 472 | finally: 473 | filt_layer = None 474 | reporting_layer = None 475 | reporting_layer_id_field = None 476 | joinInfo = None 477 | field_map = None 478 | sql = None 479 | report_date = None 480 | report_schema = None 481 | report_result = None 482 | _tempWorkspace = None 483 | _tempTableName = None 484 | _tempTableFull = None 485 | _procData = None 486 | inputCnt = None 487 | 488 | del filt_layer 489 | del reporting_layer 490 | del reporting_layer_id_field 491 | del joinInfo 492 | del field_map 493 | del sql 494 | del report_date 495 | del report_schema 496 | del report_result 497 | del _tempWorkspace 498 | del _tempTableName 499 | del _tempTableFull 500 | del _procData 501 | del inputCnt 502 | 503 | gc.collect() 504 | 505 | 506 | # ---------------------------------------------------------------------- 507 | def create_reclass_report(reporting_areas, reporting_areas_ID_field, report_params, datasources): 508 | classified_layer_field_name = None 509 | filt_layer = None 510 | reporting_layer = None 511 | field_map = None 512 | sql = None 513 | count_field = None 514 | reclass_map = None 515 | report_schema = None 516 | report_result = None 517 | report_date_field = None 518 | report_ID_field = None 519 | result_exp = None 520 | inputCnt = None 521 | classified_layer = None 522 | pivot_layer = None 523 | report_copy = None 524 | try: 525 | 526 | classified_layer_field_name = "reclass" 527 | 528 | filt_layer = "filter_layer" 529 | 530 | # reporting_layer = report_params['Data'] 531 | if not report_params['Data'] in datasources["Data"]: 532 | print "ERROR: missing %s in the data section of the config" % report_params['Data'] 533 | raise ReportToolsError({ 534 | "function": "create_reclass_report", 535 | "line": 285, 536 | "filename": "reportTools", 537 | "synerror": 'A report has an invalid data section' 538 | } 539 | ) 540 | reporting_layer = datasources["Data"][report_params['Data']] 541 | 542 | matching_fields = arcpy.ListFields(dataset=reporting_layer, 543 | wild_card=reporting_areas_ID_field, 544 | field_type=None) 545 | 546 | matched_field = filter(lambda field: field.name == reporting_areas_ID_field, matching_fields) 547 | if len(matched_field) > 0: 548 | raise ReportToolsError({ 549 | "function": "create_reclass_report", 550 | "line": 285, 551 | "filename": "reportTools", 552 | "synerror": 'The data cannot contain the reporting areas id field.' 553 | } 554 | ) 555 | sql = report_params['FilterSQL'] 556 | 557 | count_field = report_params['CountField'] 558 | reclass_map = report_params['ReclassMap'] 559 | adjust_count = report_params.get("AdjustCountField", False) 560 | if isinstance(adjust_count, str): 561 | if adjust_count.upper() == 'TRUE': 562 | adjust_count = True 563 | else: 564 | adjust_count = False 565 | if 'ReclassType' in report_params: 566 | reclass_type = report_params['ReclassType'] 567 | else: 568 | reclass_type = 'split' 569 | report_schema = os.path.join(datasources["SchemaGDB"], report_params['ReportResultSchema']) 570 | if arcpy.Exists(report_schema) == False: 571 | raise ReportToolsError({ 572 | "function": "create_reclass_report", 573 | "line": 285, 574 | "filename": "reportTools", 575 | "synerror": '%s does not exist' % report_schema 576 | } 577 | ) 578 | report_result = os.path.join(datasources["ResultsGDB"], report_params['ReportResult']) 579 | 580 | if 'FieldMap' in report_params: 581 | field_map = report_params['FieldMap'] 582 | else: 583 | field_map = [] 584 | 585 | # if not os.path.isabs(report_result): 586 | # report_result =os.path.abspath( report_result) 587 | 588 | # if not os.path.isabs(report_schema): 589 | # report_schema =os.path.abspath( report_schema) 590 | if 'ReportOutputType' in report_params: 591 | report_output_type = report_params['ReportOutputType'] 592 | 593 | else: 594 | print "Report is missing the ReportOutputType parameter: type string, values: Overwrite, Append, Update" 595 | report_output_type = 'Overwrite' 596 | if 'ReportAreasOverlap' in report_params: 597 | report_areas_overlap = bool(strtobool(report_params['ReportAreasOverlap'])) 598 | else: 599 | print "Report is missing the ReportAreasOverlap parameter: type string, values: True, False" 600 | report_areas_overlap = True 601 | 602 | if not report_areas_overlap: 603 | intersect_name = os.path.join("in_memory", Common.random_string_generator()) 604 | 605 | arcpy.Intersect_analysis(in_features="'{}' #".format(reporting_areas), 606 | out_feature_class=intersect_name, 607 | join_attributes='ONLY_FID', 608 | cluster_tolerance=None, 609 | output_type='INPUT') 610 | overlap_count = int(arcpy.GetCount_management(intersect_name)[0]) 611 | if overlap_count: 612 | print "Warning: The reporting areas have overlapping features and specified ReportAreasOverlap is false in your configuration." 613 | 614 | reporting_areas_filter = None 615 | if 'ReportingAreasFilter' in report_params and report_params['ReportingAreasFilter'] != "": 616 | filt_report_layer = 'filteredReportingAreas' 617 | reporting_areas_filter = report_params['ReportingAreasFilter'] 618 | arcpy.MakeFeatureLayer_management(reporting_areas, filt_report_layer, reporting_areas_filter, "", "") 619 | else: 620 | filt_report_layer = reporting_areas 621 | reporting_areas_filter = '' 622 | report_date_field = report_params['ReportDateField'] 623 | report_ID_field = report_params['ReportIDField'] 624 | result_exp = report_params['FinalResultExpression'] 625 | 626 | if 'UseArcMapExpression' in report_params: 627 | useArcMapExpression = report_params['UseArcMapExpression'] 628 | else: 629 | useArcMapExpression = False 630 | 631 | # if type(value_field) is tuple: 632 | # average_value = value_field[1] 633 | # value_field = value_field[0] 634 | 635 | validate_schema_map(report_schema=report_schema, 636 | reclass_map=reclass_map, 637 | report_date_field=report_date_field, 638 | report_ID_field=report_ID_field) 639 | 640 | if sql == '' or sql is None or sql == '1=1' or sql == '1==1': 641 | filt_layer = reporting_layer 642 | else: 643 | # arcpy.MakeQueryTable_management(in_table=reporting_layer,out_table=filt_layer,in_key_field_option="USE_KEY_FIELDS",in_key_field="#",in_field="#",where_clause=sql) 644 | try: 645 | arcpy.MakeFeatureLayer_management(reporting_layer, filt_layer, sql, "", "") 646 | except arcpy.ExecuteError: 647 | line, filename, synerror = trace() 648 | if 'Description' in report_params: 649 | errorString = "Report Description: %s" % report_params['Description'] 650 | else: 651 | errorString = synerror 652 | raise ReportToolsError({ 653 | "function": "create_reclass_report: %s" % reporting_layer, 654 | "line": line, 655 | "filename": filename, 656 | "synerror": errorString, 657 | "arcpyError": arcpy.GetMessages(2), 658 | }) 659 | 660 | inputCnt = int(arcpy.GetCount_management(in_rows=filt_layer)[0]) 661 | if inputCnt == 0: 662 | copy_empty_report(reporting_areas=filt_report_layer, reporting_areas_ID_field=reporting_areas_ID_field, 663 | report_schema=report_schema, 664 | report_result=report_result, 665 | reclass_map=reclass_map, 666 | report_date_field=report_date_field, 667 | report_ID_field=report_ID_field, 668 | reportParam=report_params, 669 | config=datasources, report_output_type=report_output_type) 670 | print "%s was created or updated" % report_result 671 | else: 672 | # print "at split_reclass" 673 | classified_layer = split_reclass(reporting_areas=filt_report_layer, 674 | reporting_areas_ID_field=reporting_areas_ID_field, 675 | reporting_layer=filt_layer, 676 | field_map=field_map, 677 | reclass_map=reclass_map, 678 | classified_layer_field_name=classified_layer_field_name, 679 | count_field_name=count_field, 680 | use_arcmap_expression=useArcMapExpression, 681 | reclass_type=reclass_type, 682 | adjust_count=adjust_count, 683 | report_areas_overlap=report_areas_overlap) 684 | 685 | # print "at classified_pivot" 686 | pivot_layer = classified_pivot(classified_layer=classified_layer, 687 | classified_layer_field_name=classified_layer_field_name, 688 | reporting_areas_ID_field=reporting_areas_ID_field, count_field=count_field) 689 | # print "at copy_report_data_schema" 690 | report_copy = copy_report_data_schema(reporting_areas=filt_report_layer, 691 | reporting_areas_ID_field=reporting_areas_ID_field, 692 | report_schema=report_schema, 693 | report_result=report_result, join_layer=pivot_layer, 694 | report_output_type=report_output_type) 695 | 696 | # print "at calculate_report_results" 697 | calculate_report_results(report_result=report_result, reporting_areas_ID_field=reporting_areas_ID_field, 698 | report_copy=report_copy, 699 | reclass_map=reclass_map, report_date_field=report_date_field, 700 | report_ID_field=report_ID_field, 701 | exp=result_exp, reportParam=report_params, config=datasources, 702 | use_arcmap_expression=useArcMapExpression, 703 | report_output_type=report_output_type, delete_append_sql=reporting_areas_filter) 704 | # print "at deleteFC" 705 | deleteFC([classified_layer, pivot_layer, report_copy]) 706 | 707 | print "%s was created or updated" % report_result 708 | 709 | except arcpy.ExecuteError: 710 | line, filename, synerror = trace() 711 | raise ReportToolsError({ 712 | "function": "create_reclass_report", 713 | "line": line, 714 | "filename": filename, 715 | "synerror": synerror, 716 | "arcpyError": arcpy.GetMessages(2), 717 | }) 718 | except (ReportToolsError), e: 719 | raise e 720 | except: 721 | line, filename, synerror = trace() 722 | raise ReportToolsError({ 723 | "function": "create_reclass_report", 724 | "line": line, 725 | "filename": filename, 726 | "synerror": synerror, 727 | } 728 | ) 729 | finally: 730 | classified_layer_field_name = None 731 | filt_layer = None 732 | reporting_layer = None 733 | field_map = None 734 | sql = None 735 | count_field = None 736 | reclass_map = None 737 | report_schema = None 738 | report_result = None 739 | report_date_field = None 740 | report_ID_field = None 741 | result_exp = None 742 | inputCnt = None 743 | classified_layer = None 744 | pivot_layer = None 745 | report_copy = None 746 | 747 | del classified_layer_field_name 748 | del filt_layer 749 | del reporting_layer 750 | del field_map 751 | del sql 752 | del count_field 753 | del reclass_map 754 | del report_schema 755 | del report_result 756 | del report_date_field 757 | del report_ID_field 758 | del result_exp 759 | del inputCnt 760 | del classified_layer 761 | del pivot_layer 762 | del report_copy 763 | gc.collect() 764 | 765 | 766 | # ---------------------------------------------------------------------- 767 | def create_average_report(reporting_areas, reporting_areas_ID_field, report_params, datasources): 768 | filt_layer = None 769 | reporting_layer = None 770 | field_map = None 771 | sql = None 772 | code_exp = None 773 | report_schema = None 774 | report_result = None 775 | report_date_field = None 776 | report_ID_field = None 777 | avg_field_map = None 778 | inputCnt = None 779 | result = None 780 | report_copy = None 781 | 782 | try: 783 | 784 | filt_layer = "filter_layer" 785 | reporting_layer = datasources["Data"][report_params['Data']] 786 | if 'FieldMap' in report_params: 787 | field_map = report_params['FieldMap'] 788 | else: 789 | field_map = [] 790 | sql = report_params['FilterSQL'] 791 | 792 | code_exp = report_params['PreCalcExpression'] 793 | 794 | report_schema = os.path.join(datasources["SchemaGDB"], report_params['ReportResultSchema']) 795 | report_result = os.path.join(datasources["ResultsGDB"], report_params['ReportResult']) 796 | 797 | # if not os.path.isabs(report_result): 798 | # report_result =os.path.abspath( report_result) 799 | 800 | # if not os.path.isabs(report_schema): 801 | # report_schema =os.path.abspath( report_schema) 802 | 803 | if 'ReportOutputType' in report_params: 804 | report_output_type = report_params['ReportOutputType'] 805 | 806 | else: 807 | print "Report is missing the ReportOutputType parameter: type string, values: Overwrite, Append, Update" 808 | report_output_type = 'Overwrite' 809 | 810 | if 'ReportAreasOverlap' in report_params: 811 | report_areas_overlap = bool(strtobool(report_params['ReportAreasOverlap'])) 812 | 813 | else: 814 | print "Report is missing the ReportAreasOverlap parameter: type string, values: True, False" 815 | report_areas_overlap = True 816 | 817 | report_date_field = report_params['ReportDateField'] 818 | report_ID_field = report_params['ReportIDField'] 819 | 820 | avg_field_map = report_params['AverageToResultFieldMap'] 821 | 822 | if 'UseArcMapExpression' in report_params: 823 | useArcMapExpression = report_params['UseArcMapExpression'] 824 | else: 825 | useArcMapExpression = False 826 | adjust_count = report_params.get("AdjustCountField", False) 827 | 828 | if sql == '' or sql is None or sql == '1=1' or sql == '1==1': 829 | filt_layer = reporting_layer 830 | else: 831 | # arcpy.MakeQueryTable_management(in_table=reporting_layer,out_table=filt_layer,in_key_field_option="USE_KEY_FIELDS",in_key_field="#",in_field="#",where_clause=sql) 832 | arcpy.MakeFeatureLayer_management(reporting_layer, filt_layer, sql, "", "") 833 | 834 | inputCnt = int(arcpy.GetCount_management(in_rows=filt_layer).getOutput(0)) 835 | if inputCnt == 0: 836 | pass 837 | else: 838 | 839 | if report_params['Type'].upper() == "AVERAGE": 840 | 'If we want Speedy Intersect to adjust all the average fields, we need to enhance it to support multi count fields' 841 | result = split_average(reporting_areas=reporting_areas, 842 | reporting_areas_ID_field=reporting_areas_ID_field, 843 | reporting_layer=filt_layer, 844 | reporting_layer_field_map=field_map, 845 | code_exp=code_exp, 846 | use_arcmap_expression=useArcMapExpression, 847 | adjust_count=False, 848 | count_field_name='', 849 | report_areas_overlap=report_areas_overlap) 850 | else: 851 | 'If we want Speedy Intersect to adjust all the average fields, we need to enhance it to support multi count fields' 852 | result = split_statistic(reporting_areas=reporting_areas, 853 | reporting_areas_ID_field=reporting_areas_ID_field, 854 | reporting_layer=filt_layer, 855 | reporting_layer_field_map=field_map, 856 | code_exp=code_exp, 857 | use_arcmap_expression=useArcMapExpression, 858 | adjust_count=False, 859 | count_field_name='', 860 | report_areas_overlap=report_areas_overlap) 861 | 862 | if 'layer' in result: 863 | 864 | report_copy = copy_report_data_schema(reporting_areas=reporting_areas, 865 | reporting_areas_ID_field=reporting_areas_ID_field, 866 | report_schema=report_schema, 867 | report_result=report_result, 868 | join_layer=result['layer'], 869 | report_output_type=report_output_type) 870 | 871 | if report_params['Type'].upper() == "AVERAGE": 872 | report_result = calculate_average_report_results(report_result=report_result, 873 | reporting_areas_ID_field=reporting_areas_ID_field, 874 | report_copy=report_copy, 875 | field_map=avg_field_map, 876 | report_date_field=report_date_field, 877 | report_ID_field=report_ID_field, 878 | average_field=result['field'], 879 | reportParam=report_params, 880 | config=datasources) 881 | else: 882 | report_result = calculate_statistic_report_results(report_result=report_result, 883 | reporting_areas_ID_field=reporting_areas_ID_field, 884 | report_copy=report_copy, 885 | field_map=avg_field_map, 886 | report_date_field=report_date_field, 887 | report_ID_field=report_ID_field, 888 | statistic_field=result['field'], 889 | reportParam=report_params, 890 | config=datasources) 891 | print "%s was created or updated" % report_result 892 | deleteFC(in_datasets=[report_copy, result['layer']]) 893 | 894 | 895 | except arcpy.ExecuteError: 896 | line, filename, synerror = trace() 897 | raise ReportToolsError({ 898 | "function": "create_average_report", 899 | "line": line, 900 | "filename": filename, 901 | "synerror": synerror, 902 | "arcpyError": arcpy.GetMessages(2), 903 | } 904 | ) 905 | except (ReportToolsError), e: 906 | raise e 907 | except: 908 | line, filename, synerror = trace() 909 | raise ReportToolsError({ 910 | "function": "create_average_report", 911 | "line": line, 912 | "filename": filename, 913 | "synerror": synerror, 914 | } 915 | ) 916 | finally: 917 | filt_layer = None 918 | reporting_layer = None 919 | field_map = None 920 | sql = None 921 | code_exp = None 922 | report_schema = None 923 | report_result = None 924 | report_date_field = None 925 | report_ID_field = None 926 | avg_field_map = None 927 | inputCnt = None 928 | result = None 929 | report_copy = None 930 | report_result = None 931 | 932 | del filt_layer 933 | del reporting_layer 934 | del field_map 935 | del sql 936 | del code_exp 937 | del report_schema 938 | del report_result 939 | del report_date_field 940 | del report_ID_field 941 | del avg_field_map 942 | del inputCnt 943 | del result 944 | del report_copy 945 | 946 | gc.collect() 947 | 948 | 949 | # ---------------------------------------------------------------------- 950 | def calculate_load_results(feature_data, 951 | feature_data_id_field, 952 | join_table, 953 | join_table_id_field, 954 | report_date_field, 955 | report_result, 956 | field_map 957 | ): 958 | _tempWorkspace = None 959 | _feature_data_layer = None 960 | _join_table_copy = None 961 | _joinedDataFull = None 962 | _pointsJoinedData = None 963 | _pointsJoinedDataFull = None 964 | joinTableDesc = None 965 | joinName = None 966 | featureDataDesc = None 967 | featureDataName = None 968 | fields = None 969 | tFields = None 970 | layerFlds = None 971 | new_row = None 972 | dt = None 973 | onlTm = None 974 | timeStr = None 975 | strOnlineTime = None 976 | try: 977 | _tempWorkspace = env.scratchGDB 978 | 979 | _feature_data_layer = Common.random_string_generator() 980 | _join_table_copy = Common.random_string_generator() 981 | _joinedDataFull = os.path.join(_tempWorkspace, _join_table_copy) 982 | 983 | _pointsJoinedData = Common.random_string_generator() 984 | _pointsJoinedDataFull = os.path.join(_tempWorkspace, _pointsJoinedData) 985 | # Process: Make Feature Layer 986 | if arcpy.Exists(dataset=feature_data) == False: 987 | return {"Error": feature_data + " Does not exist"} 988 | 989 | if arcpy.Exists(dataset=join_table) == False: 990 | return {"Error": join_table + " Does not exist"} 991 | 992 | arcpy.MakeFeatureLayer_management(in_features=feature_data, out_layer=_feature_data_layer, where_clause=None, 993 | workspace=None, field_info=None) 994 | 995 | # Process: Table to Table 996 | arcpy.TableToTable_conversion(join_table, _tempWorkspace, _join_table_copy, "", "#", "") 997 | 998 | # Process: Add Join 999 | arcpy.AddJoin_management(_feature_data_layer, feature_data_id_field, _joinedDataFull, join_table_id_field, 1000 | "KEEP_COMMON") 1001 | 1002 | arcpy.FeatureClassToFeatureClass_conversion(_feature_data_layer, _tempWorkspace, _pointsJoinedData, "", "", "") 1003 | 1004 | joinTableDesc = arcpy.Describe(_joinedDataFull) 1005 | joinName = str(joinTableDesc.name) 1006 | 1007 | featureDataDesc = arcpy.Describe(feature_data) 1008 | featureDataName = str(featureDataDesc.name) 1009 | 1010 | try: 1011 | arcpy.RemoveJoin_management(_feature_data_layer, joinName) 1012 | except: 1013 | pass 1014 | 1015 | fields = [] 1016 | tFields = [] 1017 | layerFlds = fieldsToFieldArray(featureclass=_pointsJoinedDataFull) 1018 | 1019 | for fld in field_map: 1020 | if fld['FieldName'] in layerFlds: 1021 | 1022 | fields.append(fld['FieldName']) 1023 | elif joinName + "_" + fld['FieldName'] in layerFlds: 1024 | fld['FieldName'] = joinName + "_" + fld['FieldName'] 1025 | fields.append(fld['FieldName']) 1026 | elif featureDataName + "_" + fld['FieldName'] in layerFlds: 1027 | fld['FieldName'] = featureDataName + "_" + fld['FieldName'] 1028 | fields.append(fld['FieldName']) 1029 | 1030 | if len(fields) != len(field_map): 1031 | print "Field Map length does not match fields in layer, exiting" 1032 | return 1033 | 1034 | for fld in field_map: 1035 | tFields.append(fld['TargetField']) 1036 | 1037 | tFields.append("SHAPE@") 1038 | 1039 | fields.append("SHAPE@") 1040 | 1041 | datefld = -1 1042 | if report_date_field in tFields: 1043 | datefld = tFields.index(report_date_field) 1044 | elif report_date_field != '': 1045 | tFields.append(report_date_field) 1046 | 1047 | with arcpy.da.InsertCursor(report_result, tFields) as icursor: 1048 | strOnlineTime = Common.online_time_to_string(Common.local_time_to_online(), dateTimeFormat) 1049 | 1050 | with arcpy.da.SearchCursor(_pointsJoinedDataFull, fields) as scursor: 1051 | for row in scursor: 1052 | new_row = list(row) 1053 | if datefld > -1: 1054 | try: 1055 | if isinstance(new_row[datefld], basestring): 1056 | dt = parse(new_row[datefld]) 1057 | else: 1058 | dt = new_row[datefld] 1059 | 1060 | onlTm = Common.local_time_to_online(dt) 1061 | timeStr = Common.online_time_to_string(onlTm, dateTimeFormat) 1062 | new_row[datefld] = timeStr 1063 | except: 1064 | new_row[datefld] = strOnlineTime 1065 | elif report_date_field != '': 1066 | new_row.append(strOnlineTime) 1067 | icursor.insertRow(new_row) 1068 | del row 1069 | del scursor 1070 | del icursor 1071 | 1072 | except arcpy.ExecuteError: 1073 | line, filename, synerror = trace() 1074 | raise ReportToolsError({ 1075 | "function": "calculate_load_results", 1076 | "line": line, 1077 | "filename": filename, 1078 | "synerror": synerror, 1079 | "arcpyError": arcpy.GetMessages(2), 1080 | } 1081 | ) 1082 | except: 1083 | line, filename, synerror = trace() 1084 | raise ReportToolsError({ 1085 | "function": "calculate_load_results", 1086 | "line": line, 1087 | "filename": filename, 1088 | "synerror": synerror, 1089 | } 1090 | ) 1091 | finally: 1092 | _tempWorkspace = None 1093 | _feature_data_layer = None 1094 | _join_table_copy = None 1095 | _joinedDataFull = None 1096 | _pointsJoinedData = None 1097 | _pointsJoinedDataFull = None 1098 | joinTableDesc = None 1099 | joinName = None 1100 | featureDataDesc = None 1101 | featureDataName = None 1102 | fields = None 1103 | tFields = None 1104 | layerFlds = None 1105 | new_row = None 1106 | dt = None 1107 | onlTm = None 1108 | timeStr = None 1109 | strOnlineTime = None 1110 | 1111 | del _tempWorkspace 1112 | del _feature_data_layer 1113 | del _join_table_copy 1114 | del _joinedDataFull 1115 | del _pointsJoinedData 1116 | del _pointsJoinedDataFull 1117 | del joinTableDesc 1118 | del joinName 1119 | del featureDataDesc 1120 | del featureDataName 1121 | del fields 1122 | del tFields 1123 | del layerFlds 1124 | del new_row 1125 | del dt 1126 | del onlTm 1127 | del timeStr 1128 | del strOnlineTime 1129 | gc.collect() 1130 | 1131 | 1132 | # ---------------------------------------------------------------------- 1133 | def split_average(reporting_areas, 1134 | reporting_areas_ID_field, 1135 | reporting_layer, 1136 | reporting_layer_field_map, 1137 | code_exp, 1138 | use_arcmap_expression=False, 1139 | adjust_count=False, 1140 | count_field_name='', 1141 | report_areas_overlap=True): 1142 | _tempWorkspace = None 1143 | _intersect = None 1144 | sumstats = None 1145 | age_field = None 1146 | try: 1147 | _tempWorkspace = env.scratchGDB 1148 | 1149 | _intersect = os.path.join(_tempWorkspace, Common.random_string_generator()) 1150 | sumstats = os.path.join(_tempWorkspace, Common.random_string_generator()) 1151 | 1152 | # Process: Intersect Reporting Areas with Reporting Data to split them for accurate measurements 1153 | # arcpy.Intersect_analysis(in_features="'"+ reporting_areas + "' #;'" + reporting_layer+ "' #",out_feature_class= _intersect,join_attributes="ALL",cluster_tolerance="#",output_type="INPUT") 1154 | count_field_adjust = count_field_name if adjust_count == True else None 1155 | gptools.speedyIntersect(fcToSplit=reporting_layer, 1156 | splitFC=reporting_areas, 1157 | fieldsToAssign=[reporting_areas_ID_field], 1158 | countField=count_field_adjust, 1159 | onlyKeepLargest=False, 1160 | outputFC=_intersect, 1161 | report_areas_overlap=report_areas_overlap) 1162 | 1163 | age_field = "statsfield" 1164 | # Process: Add a field and calculate it with the groupings required for reporting. 1165 | arcpy.AddField_management(in_table=_intersect, field_name=age_field, field_type="LONG", field_precision="", 1166 | field_scale="", field_length="", 1167 | field_alias="", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", 1168 | field_domain="") 1169 | 1170 | if use_arcmap_expression: 1171 | 1172 | arcpy.CalculateField_management(in_table=_intersect, field=age_field, 1173 | expression=code_exp, 1174 | expression_type='PYTHON_9.3', 1175 | code_block=None) 1176 | else: 1177 | calc_field(inputDataset=_intersect, field_map=reporting_layer_field_map, code_exp=code_exp, 1178 | result_field=age_field) 1179 | 1180 | arcpy.Statistics_analysis(_intersect, out_table=sumstats, statistics_fields=age_field + " MEAN", 1181 | case_field=reporting_areas_ID_field) 1182 | 1183 | deleteFC([_intersect]) 1184 | return {"layer": sumstats, "field": "MEAN_" + age_field} 1185 | 1186 | except arcpy.ExecuteError: 1187 | line, filename, synerror = trace() 1188 | raise ReportToolsError({ 1189 | "function": "split_average", 1190 | "line": line, 1191 | "filename": filename, 1192 | "synerror": synerror, 1193 | "arcpyError": arcpy.GetMessages(2), 1194 | } 1195 | ) 1196 | except: 1197 | line, filename, synerror = trace() 1198 | raise ReportToolsError({ 1199 | "function": "split_average", 1200 | "line": line, 1201 | "filename": filename, 1202 | "synerror": synerror, 1203 | } 1204 | ) 1205 | finally: 1206 | _tempWorkspace = None 1207 | _intersect = None 1208 | 1209 | del _tempWorkspace 1210 | del _intersect 1211 | 1212 | gc.collect() 1213 | 1214 | 1215 | # ---------------------------------------------------------------------- 1216 | 1217 | def split_statistic(reporting_areas, 1218 | reporting_areas_ID_field, 1219 | reporting_layer, 1220 | reporting_layer_field_map, 1221 | code_exp, 1222 | use_arcmap_expression=False, 1223 | adjust_count=False, 1224 | count_field_name='', 1225 | report_areas_overlap=True): 1226 | _tempWorkspace = None 1227 | _intersect = None 1228 | sumstats = None 1229 | statsfield = None 1230 | try: 1231 | _tempWorkspace = env.scratchGDB 1232 | 1233 | _intersect = os.path.join(_tempWorkspace, Common.random_string_generator()) 1234 | sumstats = os.path.join(_tempWorkspace, Common.random_string_generator()) 1235 | 1236 | # Process: Intersect Reporting Areas with Reporting Data to split them for accurate measurements 1237 | # arcpy.Intersect_analysis(in_features=[reporting_areas, reporting_layer], out_feature_class=_intersect, join_attributes="ALL", cluster_tolerance="#", output_type="INPUT") 1238 | count_field_adjust = count_field_name if adjust_count == True else None 1239 | gptools.speedyIntersect(fcToSplit=reporting_layer, 1240 | splitFC=reporting_areas, 1241 | fieldsToAssign=[reporting_areas_ID_field], 1242 | countField=count_field_adjust, 1243 | onlyKeepLargest=False, 1244 | outputFC=_intersect, 1245 | report_areas_overlap=report_areas_overlap) 1246 | 1247 | statsfield = "statsfield" 1248 | # Process: Add a field and calculate it with the groupings required for reporting. 1249 | arcpy.AddField_management(in_table=_intersect, field_name=statsfield, field_type="LONG", field_precision="", 1250 | field_scale="", field_length="", 1251 | field_alias="", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", 1252 | field_domain="") 1253 | 1254 | if use_arcmap_expression: 1255 | 1256 | arcpy.CalculateField_management(in_table=_intersect, field=statsfield, 1257 | expression=code_exp, 1258 | expression_type='PYTHON_9.3', 1259 | code_block=None) 1260 | else: 1261 | calc_field(inputDataset=_intersect, field_map=reporting_layer_field_map, code_exp=code_exp, 1262 | result_field=statsfield) 1263 | 1264 | statistics_fields = [[statsfield, s] for s in 1265 | ["SUM", "MEAN", "MIN", "MAX", "RANGE", "STD", "COUNT", "FIRST", "LAST"]] 1266 | arcpy.Statistics_analysis(_intersect, out_table=sumstats, statistics_fields=statistics_fields, 1267 | case_field=reporting_areas_ID_field) 1268 | 1269 | deleteFC([_intersect]) 1270 | return {"layer": sumstats, "field": statsfield} 1271 | 1272 | except arcpy.ExecuteError: 1273 | line, filename, synerror = trace() 1274 | raise ReportToolsError({ 1275 | "function": "split_statistic", 1276 | "line": line, 1277 | "filename": filename, 1278 | "synerror": synerror, 1279 | "arcpyError": arcpy.GetMessages(2), 1280 | } 1281 | ) 1282 | except: 1283 | line, filename, synerror = trace() 1284 | raise ReportToolsError({ 1285 | "function": "split_statistics", 1286 | "line": line, 1287 | "filename": filename, 1288 | "synerror": synerror, 1289 | } 1290 | ) 1291 | finally: 1292 | _tempWorkspace = None 1293 | _intersect = None 1294 | 1295 | del _tempWorkspace 1296 | del _intersect 1297 | 1298 | gc.collect() 1299 | 1300 | 1301 | # ---------------------------------------------------------------------- 1302 | def split_reclass(reporting_areas, reporting_areas_ID_field, reporting_layer, field_map, reclass_map, 1303 | classified_layer_field_name, 1304 | count_field_name, use_arcmap_expression=False, reclass_type='split', 1305 | adjust_count=False, 1306 | report_areas_overlap=True): 1307 | _tempWorkspace = None 1308 | _intersect = None 1309 | flds = None 1310 | val_fnd = None 1311 | sql_state = None 1312 | 1313 | try: 1314 | _tempWorkspace = env.scratchGDB 1315 | 1316 | _intersect = os.path.join(_tempWorkspace, Common.random_string_generator()) 1317 | _unique_name = Common.random_string_generator() 1318 | 1319 | reclassLayer = os.path.join(_tempWorkspace, _unique_name) 1320 | 1321 | if reclass_type == 'single': 1322 | keep_largest = True 1323 | # shapeBasedSpatialJoin(TargetLayer=reporting_layer, JoinLayer=reporting_areas, JoinResult=_intersect) 1324 | count_field_adjust = count_field_name if adjust_count == True else None 1325 | gptools.speedyIntersect(fcToSplit=reporting_layer, 1326 | splitFC=reporting_areas, 1327 | fieldsToAssign=[reporting_areas_ID_field], 1328 | countField=count_field_adjust, 1329 | onlyKeepLargest=keep_largest, 1330 | outputFC=_intersect, 1331 | report_areas_overlap=report_areas_overlap) 1332 | else: 1333 | keep_largest = False 1334 | 1335 | # Process: Intersect Reporting Areas with Reporting Data to split them for accurate measurements 1336 | count_field_adjust = count_field_name if adjust_count == True else None 1337 | gptools.speedyIntersect(fcToSplit=reporting_layer, 1338 | splitFC=reporting_areas, 1339 | fieldsToAssign=[reporting_areas_ID_field], 1340 | countField=count_field_adjust, 1341 | onlyKeepLargest=keep_largest, 1342 | outputFC=_intersect, 1343 | report_areas_overlap=report_areas_overlap) 1344 | # arcpy.Intersect_analysis(in_features="'"+ reporting_areas + "' #;'" + reporting_layer+ "' #",out_feature_class= _intersect,join_attributes="ALL",cluster_tolerance="#",output_type="INPUT") 1345 | # Process: Add a field and calculate it with the groupings required for reporting. . 1346 | # Process: Create Reclass Feature Class 1347 | desc = arcpy.Describe(_intersect) 1348 | 1349 | arcpy.CreateFeatureclass_management(_tempWorkspace, _unique_name, str(desc.shapeType).upper(), "", "DISABLED", 1350 | "DISABLED", _intersect, "", "0", "0", "0") 1351 | del desc 1352 | arcpy.AddField_management(in_table=reclassLayer, field_name=classified_layer_field_name, field_type="TEXT", 1353 | field_precision="", field_scale="", field_length="", 1354 | field_alias="", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", 1355 | field_domain="") 1356 | idFld = arcpy.ListFields(dataset=reporting_areas, wild_card=reporting_areas_ID_field) 1357 | if len(idFld) > 0: 1358 | arcpy.AddField_management(in_table=reclassLayer, field_name=reporting_areas_ID_field, 1359 | field_type=idFld[0].type, 1360 | field_precision=idFld[0].precision, 1361 | field_scale=idFld[0].scale, 1362 | field_length=idFld[0].length, 1363 | field_alias=idFld[0].aliasName, 1364 | field_is_nullable=idFld[0].isNullable, 1365 | field_is_required=idFld[0].required, 1366 | field_domain=idFld[0].domain) 1367 | 1368 | else: 1369 | arcpy.AddField_management(in_table=reclassLayer, field_name=reporting_areas_ID_field, field_type="TEXT", 1370 | field_precision="", field_scale="", field_length="", 1371 | field_alias="", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", 1372 | field_domain="") 1373 | 1374 | reclassFlds = [] 1375 | reclassFlds.append(reporting_areas_ID_field) 1376 | reclassFlds.append("SHAPE@") 1377 | reclassFlds.append(classified_layer_field_name) 1378 | flds = [] 1379 | 1380 | for fld in field_map: 1381 | flds.append(fld['FieldName']) 1382 | flds.append(reporting_areas_ID_field) 1383 | flds.append("SHAPE@") 1384 | 1385 | countFieldAdded = False 1386 | countField = arcpy.ListFields(reporting_layer, count_field_name) 1387 | 1388 | if len(countField) > 0: 1389 | arcpy.AddField_management(in_table=reclassLayer, field_name=countField[0].name, 1390 | field_type=countField[0].type, 1391 | field_precision=countField[0].precision, field_scale=countField[0].scale, 1392 | field_length=countField[0].length, 1393 | field_alias=countField[0].aliasName, field_is_nullable=countField[0].isNullable, 1394 | field_is_required=countField[0].required, field_domain="") 1395 | countFieldAdded = True 1396 | reclassFlds.append(countField[0].name) 1397 | flds.append(countField[0].name) 1398 | 1399 | selectLayer = "selectLayer" 1400 | if use_arcmap_expression: 1401 | for field in reclass_map: 1402 | sql_state = field['Expression'] 1403 | try: 1404 | arcpy.MakeFeatureLayer_management(in_features=_intersect, 1405 | out_layer=selectLayer, 1406 | where_clause=sql_state, 1407 | workspace=None, 1408 | field_info=None) 1409 | except arcpy.ExecuteError: 1410 | line, filename, synerror = trace() 1411 | raise ReportToolsError({ 1412 | "function": "create_reclass_report: %s" % split_reclass, 1413 | "line": line, 1414 | "filename": filename, 1415 | "synerror": synerror, 1416 | "arcpyError": arcpy.GetMessages(2), 1417 | }) 1418 | reccount = int(arcpy.GetCount_management(selectLayer)[0]) 1419 | # print "%s records found to match %s" % (field["FieldName"],str(reccount)) 1420 | if reccount > 0: 1421 | with arcpy.da.InsertCursor(reclassLayer, reclassFlds) as irows: 1422 | with arcpy.da.SearchCursor(selectLayer, flds) as srows: 1423 | for row in srows: 1424 | if countFieldAdded: 1425 | irows.insertRow((row[len(flds) - 3], row[len(flds) - 2], field["FieldName"], 1426 | row[len(flds) - 1])) 1427 | else: 1428 | irows.insertRow((row[len(flds) - 2], row[len(flds) - 1], field["FieldName"])) 1429 | del row 1430 | del srows 1431 | 1432 | del irows 1433 | else: 1434 | 1435 | with arcpy.da.InsertCursor(reclassLayer, reclassFlds) as irows: 1436 | with arcpy.da.SearchCursor(_intersect, flds) as srows: 1437 | for row in srows: 1438 | 1439 | for field in reclass_map: 1440 | sql_state = field['Expression'] 1441 | try: 1442 | 1443 | for i in range(len(field_map)): 1444 | sql_state = sql_state.replace(field_map[i]['Expression'], str(row[i])) 1445 | 1446 | if eval(sql_state) == True: 1447 | if countFieldAdded: 1448 | irows.insertRow((row[len(flds) - 3], row[len(flds) - 2], field["FieldName"], 1449 | row[len(flds) - 1])) 1450 | else: 1451 | irows.insertRow((row[len(flds) - 2], row[len(flds) - 1], field["FieldName"])) 1452 | 1453 | 1454 | except Exception, e: 1455 | print "Warning: %s is not valid" % str(sql_state) 1456 | 1457 | del row 1458 | del srows 1459 | 1460 | del irows 1461 | # print "done update cursors" 1462 | deleteFC([_intersect]) 1463 | return reclassLayer 1464 | 1465 | except arcpy.ExecuteError: 1466 | line, filename, synerror = trace() 1467 | raise ReportToolsError({ 1468 | "function": "split_reclass", 1469 | "line": line, 1470 | "filename": filename, 1471 | "synerror": synerror, 1472 | "arcpyError": arcpy.GetMessages(2), 1473 | } 1474 | ) 1475 | except ReportToolsError, e: 1476 | raise e 1477 | 1478 | except: 1479 | line, filename, synerror = trace() 1480 | raise ReportToolsError({ 1481 | "function": "split_reclass", 1482 | "line": line, 1483 | "filename": filename, 1484 | "synerror": synerror, 1485 | } 1486 | ) 1487 | finally: 1488 | _tempWorkspace = None 1489 | flds = None 1490 | val_fnd = None 1491 | sql_state = None 1492 | 1493 | del _tempWorkspace 1494 | del flds 1495 | del val_fnd 1496 | del sql_state 1497 | 1498 | gc.collect() 1499 | 1500 | 1501 | # ---------------------------------------------------------------------- 1502 | def classified_pivot(classified_layer, classified_layer_field_name, reporting_areas_ID_field, count_field, 1503 | summary_fields=''): 1504 | _tempWorkspace = None 1505 | _freq = None 1506 | _pivot = None 1507 | 1508 | try: 1509 | _tempWorkspace = env.scratchGDB 1510 | 1511 | _freq = os.path.join(_tempWorkspace, Common.random_string_generator()) 1512 | _pivot = os.path.join(_tempWorkspace, Common.random_string_generator()) 1513 | 1514 | if not count_field in summary_fields and count_field != 'FREQUENCY': 1515 | summary_fields = count_field if summary_fields == '' else summary_fields + ";" + count_field 1516 | 1517 | arcpy.Frequency_analysis(in_table=classified_layer, out_table=_freq, 1518 | frequency_fields=reporting_areas_ID_field + ';' + classified_layer_field_name, 1519 | summary_fields=summary_fields) 1520 | 1521 | arcpy.PivotTable_management(_freq, reporting_areas_ID_field, classified_layer_field_name, count_field, _pivot) 1522 | deleteFC([_freq]) 1523 | return _pivot 1524 | except arcpy.ExecuteError: 1525 | line, filename, synerror = trace() 1526 | raise ReportToolsError({ 1527 | "function": "classified_pivot", 1528 | "line": line, 1529 | "filename": filename, 1530 | "synerror": synerror, 1531 | "arcpyError": arcpy.GetMessages(2), 1532 | } 1533 | ) 1534 | except: 1535 | line, filename, synerror = trace() 1536 | raise ReportToolsError({ 1537 | "function": "classified_pivot", 1538 | "line": line, 1539 | "filename": filename, 1540 | "synerror": synerror, 1541 | } 1542 | ) 1543 | finally: 1544 | _tempWorkspace = None 1545 | _freq = None 1546 | 1547 | del _tempWorkspace 1548 | del _freq 1549 | 1550 | gc.collect() 1551 | 1552 | 1553 | # ---------------------------------------------------------------------- 1554 | def copy_report_data_schema(reporting_areas, 1555 | reporting_areas_ID_field, 1556 | report_schema, 1557 | report_result, 1558 | join_layer, 1559 | report_output_type): 1560 | _tempWorkspace = None 1561 | _reportCopy = None 1562 | final_report = None 1563 | 1564 | try: 1565 | _tempWorkspace = env.scratchGDB 1566 | _reportCopy = Common.random_string_generator() 1567 | final_report = os.path.join(_tempWorkspace, _reportCopy) 1568 | 1569 | # Process: Create a copy of the Reporting Areas for the summary info join 1570 | fm_id = arcpy.FieldMap() 1571 | fms = arcpy.FieldMappings() 1572 | 1573 | # Add fields to their corresponding FieldMap objects 1574 | fm_id.addInputField(reporting_areas, reporting_areas_ID_field) 1575 | 1576 | # Set the output field properties for both FieldMap objects 1577 | type_name = fm_id.outputField 1578 | type_name.name = reporting_areas_ID_field 1579 | fm_id.outputField = type_name 1580 | fms.addFieldMap(fm_id) 1581 | arcpy.FeatureClassToFeatureClass_conversion(in_features=reporting_areas, 1582 | out_path=_tempWorkspace, 1583 | out_name=_reportCopy, 1584 | field_mapping=fms) 1585 | 1586 | # Process: Join the Areas to the pivot table to get a count by area 1587 | arcpy.JoinField_management(final_report, reporting_areas_ID_field, join_layer, reporting_areas_ID_field, "#") 1588 | 1589 | # Process: Create a copy of the report layer 1590 | # if not os.path.isabs(report_result): 1591 | # report_result =os.path.abspath( report_result) 1592 | 1593 | # if not os.path.isabs(report_schema): 1594 | # report_schema =os.path.abspath( report_schema) 1595 | 1596 | # New flag for weather or not to append data to exist results or clear it out. 1597 | if report_output_type.upper() == "APPEND": 1598 | if arcpy.Exists(report_result) == False: 1599 | if arcpy.Exists(report_schema) == False: 1600 | raise ReportToolsError({ 1601 | "function": "copy_report_data_schema", 1602 | "line": 990, 1603 | "filename": 'reporttools', 1604 | "synerror": "%s could not be located" % report_schema 1605 | } 1606 | ) 1607 | className = os.path.basename(report_result) 1608 | layerPath = os.path.dirname(report_result) 1609 | arcpy.FeatureClassToFeatureClass_conversion(in_features=report_schema, 1610 | out_path=layerPath, 1611 | out_name=className, 1612 | where_clause=None, 1613 | field_mapping=None, 1614 | config_keyword=None) 1615 | # arcpy.Copy_management(report_schema,report_result,"FeatureClass") 1616 | elif report_output_type.upper() == "UPDATE": 1617 | if not arcpy.Exists(dataset=report_result): 1618 | if arcpy.Exists(report_schema) == False: 1619 | raise ReportToolsError({ 1620 | "function": "copy_report_data_schema", 1621 | "line": 990, 1622 | "filename": 'reporttools', 1623 | "synerror": "%s could not be located" % report_schema 1624 | } 1625 | ) 1626 | className = os.path.basename(report_result) 1627 | layerPath = os.path.dirname(report_result) 1628 | 1629 | arcpy.FeatureClassToFeatureClass_conversion(in_features=report_schema, 1630 | out_path=layerPath, 1631 | out_name=className, 1632 | where_clause=None, 1633 | field_mapping=None, 1634 | config_keyword=None) 1635 | else: 1636 | # arcpy.Copy_management(report_schema,report_result,"FeatureClass") 1637 | className = os.path.basename(report_result) 1638 | layerPath = os.path.dirname(report_result) 1639 | 1640 | arcpy.FeatureClassToFeatureClass_conversion(in_features=report_schema, 1641 | out_path=layerPath, 1642 | out_name=className, 1643 | where_clause=None, 1644 | field_mapping=None, 1645 | config_keyword=None) 1646 | return final_report 1647 | except arcpy.ExecuteError: 1648 | line, filename, synerror = trace() 1649 | raise ReportToolsError({ 1650 | "function": "copy_report_data_schema", 1651 | "line": line, 1652 | "filename": filename, 1653 | "synerror": synerror, 1654 | "arcpyError": arcpy.GetMessages(2), 1655 | } 1656 | ) 1657 | except: 1658 | line, filename, synerror = trace() 1659 | raise ReportToolsError({ 1660 | "function": "copy_report_data_schema", 1661 | "line": line, 1662 | "filename": filename, 1663 | "synerror": synerror, 1664 | } 1665 | ) 1666 | finally: 1667 | _tempWorkspace = None 1668 | _reportCopy = None 1669 | 1670 | del _tempWorkspace 1671 | del _reportCopy 1672 | 1673 | gc.collect() 1674 | 1675 | 1676 | # ---------------------------------------------------------------------- 1677 | def calculate_average_report_results(report_result, reporting_areas_ID_field, report_copy, field_map, 1678 | report_date_field, report_ID_field, average_field, reportParam, config): 1679 | fields = None 1680 | strOnlineTime = None 1681 | search_fields = None 1682 | newrow = None 1683 | try: 1684 | fields = [] 1685 | for fld in field_map: 1686 | fields.append(fld['FieldName']) 1687 | 1688 | fields.append(report_ID_field) 1689 | fields.append(report_date_field) 1690 | fields.append("SHAPE@") 1691 | 1692 | # strOnlineTime = Common.online_time_to_string(Common.local_time_to_online(),dateTimeFormat) 1693 | strLocalTime = datetime.datetime.now().strftime(dateTimeFormat) 1694 | search_fields = [average_field, reporting_areas_ID_field, "SHAPE@"] 1695 | 1696 | with arcpy.da.InsertCursor(report_result, fields) as irows: 1697 | with arcpy.da.SearchCursor(report_copy, search_fields) as srows: 1698 | for row in srows: 1699 | newrow = [] 1700 | for fld in field_map: 1701 | try: 1702 | if '{Value}' in fld['Expression']: 1703 | newrow.append( 1704 | eval(fld["Expression"].replace("{Value}", str(Common.noneToValue(row[0], 0.0))))) 1705 | else: 1706 | newrow.append(eval(fld["Expression"].replace("{PreCalcExpressionValue}", 1707 | str(Common.noneToValue(row[0], 0.0))))) 1708 | except Exception: 1709 | newrow.append(None) 1710 | newrow.append(row[1]) 1711 | newrow.append(strLocalTime) 1712 | newrow.append(row[2]) 1713 | 1714 | irows.insertRow(tuple(newrow)) 1715 | del srows 1716 | del irows 1717 | 1718 | # The current recordset that is completed, send it to a merged feature for CSV export process layer. 1719 | mergeAllReports(reportLayer=report_result, report=reportParam, config=config) 1720 | 1721 | return report_result 1722 | except arcpy.ExecuteError: 1723 | line, filename, synerror = trace() 1724 | raise ReportToolsError({ 1725 | "function": "calculate_average_report_results", 1726 | "line": line, 1727 | "filename": filename, 1728 | "synerror": synerror, 1729 | "arcpyError": arcpy.GetMessages(2), 1730 | } 1731 | ) 1732 | except: 1733 | line, filename, synerror = trace() 1734 | raise ReportToolsError({ 1735 | "function": "calculate_average_report_results", 1736 | "line": line, 1737 | "filename": filename, 1738 | "synerror": synerror, 1739 | } 1740 | ) 1741 | finally: 1742 | fields = None 1743 | strOnlineTime = None 1744 | search_fields = None 1745 | newrow = None 1746 | 1747 | del fields 1748 | del strOnlineTime 1749 | del search_fields 1750 | del newrow 1751 | 1752 | gc.collect() 1753 | 1754 | 1755 | # ---------------------------------------------------------------------- 1756 | def calculate_statistic_report_results(report_result, reporting_areas_ID_field, report_copy, field_map, 1757 | report_date_field, report_ID_field, statistic_field, reportParam, config): 1758 | fields = None 1759 | strOnlineTime = None 1760 | search_fields = None 1761 | newrow = None 1762 | try: 1763 | 1764 | def dictLookup(expr, d): 1765 | """replace values in string surrounded in {} with a dictionary key,value pair 1766 | attempt to evaluate expression 1767 | expr - str to replace 1768 | d - dict 1769 | """ 1770 | for k, v in d.items(): 1771 | expr = expr.replace("{{{}}}".format(k), str(Common.noneToValue(v, 0.0))) 1772 | try: 1773 | return eval(expr) 1774 | except: 1775 | line, filename, synerror = trace() 1776 | raise ReportToolsError({ 1777 | "function": "dictLookup", 1778 | "line": line, 1779 | "filename": filename, 1780 | "synerror": synerror, 1781 | "expr": expr}) 1782 | 1783 | # Summary statistics creates a table with fields named {statistic}_{fname}, e.g., "MEAN_age" 1784 | stats = ["SUM", "MEAN", "MIN", "MAX", "RANGE", "STD", "COUNT", "FIRST", "LAST"] 1785 | stats_fields = ["{}_{}".format(s, statistic_field) for s in stats] + [reporting_areas_ID_field, "SHAPE@"] 1786 | keys = stats + ["ID", "SHAPE"] 1787 | 1788 | # srows is a list of dictionaries mapping statistics to that row's field value 1789 | srows = [{s: r for s, r in zip(keys, row)} for row in arcpy.da.SearchCursor(report_copy, stats_fields)] 1790 | 1791 | # strOnlineTime = Common.online_time_to_string(Common.local_time_to_online(),dateTimeFormat) 1792 | strLocalTime = datetime.datetime.now().strftime(dateTimeFormat) 1793 | 1794 | fields = [fld['FieldName'] for fld in field_map] 1795 | fields.append(report_ID_field) 1796 | fields.append(report_date_field) 1797 | fields.append("SHAPE@") 1798 | 1799 | with arcpy.da.InsertCursor(report_result, fields) as irows: 1800 | for row in srows: 1801 | newrow = [] 1802 | for fld in field_map: 1803 | try: 1804 | newrow.append(dictLookup(fld["Expression"], row)) 1805 | except: 1806 | newrow.append(None) 1807 | newrow.append(row["ID"]) 1808 | newrow.append(strLocalTime) 1809 | newrow.append(row["SHAPE"]) 1810 | 1811 | irows.insertRow(tuple(newrow)) 1812 | 1813 | # The current recordset that is completed, send it to a merged feature for CSV export process layer. 1814 | mergeAllReports(reportLayer=report_result, report=reportParam, config=config) 1815 | 1816 | return report_result 1817 | except arcpy.ExecuteError: 1818 | line, filename, synerror = trace() 1819 | raise ReportToolsError({ 1820 | "function": "calculate_statistic_report_results", 1821 | "line": line, 1822 | "filename": filename, 1823 | "synerror": synerror, 1824 | "arcpyError": arcpy.GetMessages(2), 1825 | } 1826 | ) 1827 | except: 1828 | line, filename, synerror = trace() 1829 | raise ReportToolsError({ 1830 | "function": "calculate_statistic_report_results", 1831 | "line": line, 1832 | "filename": filename, 1833 | "synerror": synerror, 1834 | } 1835 | ) 1836 | finally: 1837 | fields = None 1838 | strOnlineTime = None 1839 | search_fields = None 1840 | newrow = None 1841 | 1842 | del fields 1843 | del strOnlineTime 1844 | del search_fields 1845 | del newrow 1846 | 1847 | gc.collect() 1848 | 1849 | 1850 | # ---------------------------------------------------------------------- 1851 | def calculate_report_results(report_result, 1852 | reporting_areas_ID_field, report_copy, 1853 | reclass_map, 1854 | report_date_field, 1855 | report_ID_field, 1856 | exp, reportParam, 1857 | config, use_arcmap_expression=False, 1858 | report_output_type='Overwrite', 1859 | delete_append_sql=None): 1860 | try: 1861 | 1862 | reporting_areas_Date_field = 'tempreportdate1' 1863 | arcpy.AddField_management(in_table=report_copy, field_name=reporting_areas_Date_field, field_type="DATE", 1864 | field_precision="", field_scale="", field_length="", field_alias="", 1865 | field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="") 1866 | 1867 | # strOnlineTime = Common.online_time_to_string(Common.local_time_to_online(),dateTimeFormat) 1868 | strLocalTime = datetime.datetime.now().strftime(dateTimeFormat) 1869 | 1870 | reclass_fields = [] 1871 | copyFields = [] 1872 | 1873 | fieldBaseFinalExp = {} 1874 | for fld in reclass_map: 1875 | reclass_fields.append(fld['FieldName']) 1876 | if "FinalResultExpression" in fld: 1877 | fieldBaseFinalExp[fld['FieldName']] = fld['FinalResultExpression'] 1878 | 1879 | reportDataFieldNames = [f.name for f in arcpy.ListFields(report_copy)] 1880 | for reclass_field in reclass_fields: 1881 | if not reclass_field in reportDataFieldNames: 1882 | arcpy.AddField_management(in_table=report_copy, field_name=reclass_field, field_type="TEXT", 1883 | field_precision="", field_scale="", field_length="", field_alias="", 1884 | field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", 1885 | field_domain="") 1886 | 1887 | reportDataFields = arcpy.ListFields(report_copy) 1888 | 1889 | resultFields = [f.name for f in arcpy.ListFields(report_result)] 1890 | 1891 | fms = arcpy.FieldMappings() 1892 | for fld in reportDataFields: 1893 | 1894 | if fld.name == reporting_areas_Date_field and report_date_field != '': 1895 | fm = arcpy.FieldMap() 1896 | 1897 | fm.addInputField(report_copy, fld.name) 1898 | outField = fm.outputField 1899 | outField.name = report_date_field 1900 | fm.outputField = outField 1901 | fms.addFieldMap(fm) 1902 | 1903 | copyFields.append((fld.name, report_date_field)) 1904 | elif fld.name == reporting_areas_ID_field: 1905 | fm = arcpy.FieldMap() 1906 | fm.addInputField(report_copy, fld.name) 1907 | outField = fm.outputField 1908 | outField.name = report_ID_field 1909 | fm.outputField = outField 1910 | fms.addFieldMap(fm) 1911 | # copyFields.append((fld.name,reporting_areas_ID_field )) 1912 | elif fld.name in resultFields and fld.type != 'OID' and fld.type != 'Geometry': 1913 | fm = arcpy.FieldMap() 1914 | fm.addInputField(report_copy, fld.name) 1915 | outField = fm.outputField 1916 | outField.name = fld.name 1917 | fm.outputField = outField 1918 | fms.addFieldMap(fm) 1919 | if fld.name in reclass_fields: 1920 | copyFields.append((fld.name, fld.name)) 1921 | 1922 | # className = os.path.basename(report_result) 1923 | # layerPath = os.path.dirname(report_result) 1924 | 1925 | # with arcpy.da.Editor(layerPath): 1926 | # for fld in reclass_fields: 1927 | 1928 | # outLayer = "outLayer%s" % fld 1929 | # where = '%s IS NULL' % fld 1930 | # arcpy.MakeFeatureLayer_management(report_result, outLayer, where) 1931 | # arcpy.CalculateField_management(outLayer, fld['FieldName'], 1932 | # '0', "PYTHON_9.3") 1933 | 1934 | # arcpy.CalculateField_management(report_result, reporting_areas_Date_field, 1935 | # 'time.strftime(\'%d/%m/%Y %H:%M\')', "PYTHON_9.3") 1936 | 1937 | reclass_fields.append(reporting_areas_Date_field) 1938 | with arcpy.da.UpdateCursor(report_copy, reclass_fields) as urows: 1939 | for row in urows: 1940 | for u in range(len(reclass_fields) - 1): 1941 | if reclass_fields[u] in fieldBaseFinalExp: 1942 | finalExp = fieldBaseFinalExp[reclass_fields[u]] 1943 | finalExpOrig = finalExp 1944 | for k in range(len(reclass_fields) - 1): 1945 | finalFieldVal = reclass_fields[k] 1946 | if "{" + finalFieldVal + "}" in finalExp: 1947 | finalExp = finalExp.replace("{" + finalFieldVal + "}", 1948 | str(Common.noneToValue(row[k], 0))) 1949 | try: 1950 | finalExp = finalExp.replace("{Value}", str(Common.noneToValue(row[u], 0))) 1951 | finalExp = finalExp.replace("{ReclassField}", str(Common.noneToValue(row[u], 0))) 1952 | row[u] = eval(finalExp) 1953 | except: 1954 | line, filename, synerror = trace() 1955 | # print "Warning: Evaluating Final Expression {3} with values {0} for the field {1} failed with the following error: {2}".format(finalExp,reclass_fields[u],synerror,finalExpOrig) 1956 | row[u] = Common.noneToValue(row[u], 0) 1957 | 1958 | else: 1959 | if '{Value}' in exp: 1960 | row[u] = eval(exp.replace("{Value}", str(Common.noneToValue(row[u], 0)))) 1961 | else: 1962 | row[u] = eval(exp.replace("{ReclassField}", str(Common.noneToValue(row[u], 0)))) 1963 | 1964 | row[len(reclass_fields) - 1] = strLocalTime 1965 | 1966 | urows.updateRow(row) 1967 | del urows 1968 | 1969 | if report_output_type.upper() == "UPDATE": 1970 | 1971 | JoinAndCalc(inputDataset=report_result, 1972 | inputJoinField=report_ID_field, 1973 | joinTable=report_copy, 1974 | joinTableJoinField=reporting_areas_ID_field, 1975 | copyFields=copyFields, 1976 | joinType="KEEP_COMMON", 1977 | inputFilter=delete_append_sql) 1978 | # arcpy.MakeFeatureLayer_management(in_features=report_result, 1979 | # out_layer=delfromreport, 1980 | # where_clause=delete_append_sql, 1981 | # workspace=None, 1982 | # field_info=None) 1983 | # arcpy.DeleteFeatures_management(in_features=delfromreport) 1984 | else: 1985 | arcpy.Append_management(report_copy, report_result, "NO_TEST", fms, "") 1986 | 1987 | # The current recordset that is completed, send it to a merged feature for CSV export process layer. 1988 | mergeAllReports(reportLayer=report_result, report=reportParam, config=config) 1989 | 1990 | except arcpy.ExecuteError: 1991 | line, filename, synerror = trace() 1992 | raise ReportToolsError({ 1993 | "function": "calculate_report_results", 1994 | "line": line, 1995 | "filename": filename, 1996 | "synerror": synerror, 1997 | "arcpyError": arcpy.GetMessages(2), 1998 | } 1999 | ) 2000 | except ReportToolsError, e: 2001 | raise e 2002 | except: 2003 | line, filename, synerror = trace() 2004 | raise ReportToolsError({ 2005 | "function": "calculate_report_results", 2006 | "line": line, 2007 | "filename": filename, 2008 | "synerror": synerror, 2009 | } 2010 | ) 2011 | 2012 | finally: 2013 | 2014 | gc.collect() 2015 | 2016 | 2017 | # ---------------------------------------------------------------------- 2018 | def validate_schema_map(report_schema, reclass_map, report_date_field, report_ID_field): 2019 | try: 2020 | valid = True 2021 | fieldList = arcpy.ListFields(report_schema) 2022 | 2023 | layer_fields = [] 2024 | for field in fieldList: 2025 | layer_fields.append(field.name) 2026 | 2027 | for fld in reclass_map: 2028 | if not fld['FieldName'] in layer_fields: 2029 | print "%s does not exist in %s" % (fld['FieldName'], report_schema) 2030 | valid = False 2031 | if report_date_field == '': 2032 | print "Warning: Report Date not set in %s" % (report_schema) 2033 | elif not report_date_field in layer_fields: 2034 | print "%s (Report Date Field) does not exist in %s" % (report_date_field, report_schema) 2035 | valid = False 2036 | if not report_ID_field in layer_fields: 2037 | print "%s (ID Field) does not exist in %s" % (report_ID_field, report_schema) 2038 | valid = False 2039 | 2040 | if valid == False: 2041 | raise ReportToolsError({ 2042 | "function": "validate_schema_map", 2043 | "line": 1454, 2044 | "filename": 'reporttools', 2045 | "synerror": "%s does not contain all the fields contained in the config" % report_schema 2046 | }) 2047 | except arcpy.ExecuteError: 2048 | line, filename, synerror = trace() 2049 | raise ReportToolsError({ 2050 | "function": "validate_schema_map", 2051 | "line": line, 2052 | "filename": filename, 2053 | "synerror": synerror, 2054 | "arcpyError": arcpy.GetMessages(2), 2055 | }) 2056 | except ReportToolsError, e: 2057 | raise e 2058 | except: 2059 | line, filename, synerror = trace() 2060 | raise ReportToolsError({ 2061 | "function": "validate_schema_map", 2062 | "line": line, 2063 | "filename": filename, 2064 | "synerror": synerror, 2065 | }) 2066 | 2067 | 2068 | # ---------------------------------------------------------------------- 2069 | def validate_id_field(reporting_areas, report_ID_field): 2070 | try: 2071 | 2072 | OIDField = arcpy.ListFields(dataset=reporting_areas, field_type='OID') 2073 | if len(OIDField) > 0 and OIDField[0].name == report_ID_field: 2074 | line, filename, synerror = trace() 2075 | raise ReportToolsError({ 2076 | "function": "validate_id_field", 2077 | "line": line, 2078 | "filename": filename, 2079 | "synerror": "OBJECTID cannot be used for ID field", 2080 | }) 2081 | globalIDField = arcpy.ListFields(dataset=reporting_areas, field_type='GlobalID') 2082 | _tempWorkspace = env.scratchGDB 2083 | if len(globalIDField) > 0 and globalIDField[0].name == report_ID_field: 2084 | desc = arcpy.Describe(value=reporting_areas) 2085 | _globalIDCopy_name = Common.random_string_generator() 2086 | globalIDCopy = os.path.join(_tempWorkspace, _globalIDCopy_name) 2087 | reportCopy = arcpy.CreateFeatureclass_management(out_path=_tempWorkspace, 2088 | out_name=_globalIDCopy_name, 2089 | geometry_type=desc.shapeType, 2090 | template=reporting_areas, 2091 | has_m="DISABLED", 2092 | has_z="DISABLED", 2093 | spatial_reference=desc.spatialReference) 2094 | 2095 | # arcpy.FeatureClassToFeatureClass_conversion(reporting_areas, _tempWorkspace, _globalIDCopy_name) 2096 | globalIDFldName = "GLOBALIDCopy_12_1" 2097 | arcpy.AddField_management(in_table=globalIDCopy, field_name=globalIDFldName, field_type="GUID", 2098 | field_precision="", field_scale="", field_length="", 2099 | field_alias="", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", 2100 | field_domain="") 2101 | 2102 | fms = arcpy.FieldMappings() 2103 | reportDataFields = arcpy.ListFields(dataset=reporting_areas) 2104 | for fld in reportDataFields: 2105 | if (fld.type <> "OID" and fld.type <> "GlobalID" and fld.type <> "Geometry"): 2106 | try: 2107 | if (fld.name <> desc.areaFieldName and fld.name <> desc.lengthFieldName): 2108 | fm = arcpy.FieldMap() 2109 | 2110 | fm.addInputField(globalIDCopy, fld.name) 2111 | outField = fm.outputField 2112 | outField.name = fld.name 2113 | fm.outputField = outField 2114 | fms.addFieldMap(fm) 2115 | except: 2116 | pass 2117 | 2118 | fm.addInputField(globalIDCopy, report_ID_field) 2119 | outField = fm.outputField 2120 | outField.name = globalIDFldName 2121 | fm.outputField = outField 2122 | fms.addFieldMap(fm) 2123 | arcpy.Append_management(reporting_areas, globalIDCopy, "NO_TEST", fms, "") 2124 | 2125 | # arcpy.CalculateField_management(globalIDCopy, globalIDFldName, 2126 | # "!" + report_ID_field + "!", "PYTHON_9.3") 2127 | 2128 | return {"ReportLayer": globalIDCopy, 2129 | "IDField": globalIDFldName} 2130 | 2131 | else: 2132 | return None 2133 | except arcpy.ExecuteError: 2134 | line, filename, synerror = trace() 2135 | raise ReportToolsError({ 2136 | "function": "validate_id_field", 2137 | "line": line, 2138 | "filename": filename, 2139 | "synerror": synerror, 2140 | "arcpyError": arcpy.GetMessages(2), 2141 | }) 2142 | except ReportToolsError, e: 2143 | raise e 2144 | except: 2145 | line, filename, synerror = trace() 2146 | raise ReportToolsError({ 2147 | "function": "validate_id_field", 2148 | "line": line, 2149 | "filename": filename, 2150 | "synerror": synerror, 2151 | }) 2152 | 2153 | 2154 | # ---------------------------------------------------------------------- 2155 | def copy_empty_report(reporting_areas, reporting_areas_ID_field, report_schema, report_result, reclass_map, 2156 | report_date_field, report_ID_field, reportParam, config, report_output_type): 2157 | _tempWorkspace = None 2158 | _reportCopy = None 2159 | _final_report = None 2160 | appendString = None 2161 | strOnlineTime = None 2162 | fields = None 2163 | 2164 | try: 2165 | _tempWorkspace = env.scratchGDB 2166 | 2167 | _reportCopy = Common.random_string_generator() 2168 | 2169 | _final_report = os.path.join(_tempWorkspace, _reportCopy) 2170 | 2171 | # Process: Create a copy of the Reporting Areas for the summary info join 2172 | arcpy.FeatureClassToFeatureClass_conversion(reporting_areas, _tempWorkspace, _reportCopy, "", 2173 | reporting_areas_ID_field + ' "' + reporting_areas_ID_field + '" true true false 50 Text 0 0 ,First,#,' + reporting_areas + ',' + reporting_areas_ID_field + ',-1,-1', 2174 | "") 2175 | 2176 | className = os.path.basename(report_result) 2177 | layerPath = os.path.dirname(report_result) 2178 | bUseWhere = True 2179 | 2180 | if report_output_type.upper() == "APPEND": 2181 | if arcpy.Exists(report_result) == False: 2182 | if arcpy.Exists(report_schema) == False: 2183 | raise ReportToolsError({ 2184 | "function": "copy_report_data_schema", 2185 | "line": 990, 2186 | "filename": 'reporttools', 2187 | "synerror": "%s could not be located" % report_schema 2188 | } 2189 | ) 2190 | arcpy.FeatureClassToFeatureClass_conversion(in_features=report_schema, 2191 | out_path=layerPath, 2192 | out_name=className, 2193 | where_clause=None, 2194 | field_mapping=None, 2195 | config_keyword=None) 2196 | appendString = report_ID_field + " \"\" true true false 80 string 0 0 ,First,#," + _final_report + "," + reporting_areas_ID_field + ",-1,-1;" 2197 | 2198 | arcpy.Append_management(_final_report, report_result, "NO_TEST", appendString, "") 2199 | # arcpy.Copy_management(report_schema,report_result,"FeatureClass") 2200 | elif report_output_type.upper() == "UPDATE": 2201 | bUseWhere = False 2202 | 2203 | 2204 | else: 2205 | 2206 | arcpy.FeatureClassToFeatureClass_conversion(in_features=report_schema, 2207 | out_path=layerPath, 2208 | out_name=className, 2209 | where_clause=None, 2210 | field_mapping=None, 2211 | config_keyword=None) 2212 | appendString = report_ID_field + " \"\" true true false 80 string 0 0 ,First,#," + _final_report + "," + reporting_areas_ID_field + ",-1,-1;" 2213 | 2214 | arcpy.Append_management(_final_report, report_result, "NO_TEST", appendString, "") 2215 | 2216 | # arcpy.Copy_management(report_schema,report_result,"FeatureClass") 2217 | 2218 | 2219 | 2220 | # strOnlineTime = Common.online_time_to_string(Common.local_time_to_online(),dateTimeFormat) 2221 | # strLocalTime = datetime.datetime.now().strftime(dateTimeFormat) 2222 | 2223 | fields = [] 2224 | with arcpy.da.Editor(layerPath): 2225 | for fld in reclass_map: 2226 | # fields.append(fld['FieldName']) 2227 | outLayer = "outLayer%s" % fld['FieldName'] 2228 | if bUseWhere: 2229 | where = '%s IS NULL' % fld['FieldName'] 2230 | else: 2231 | where = None 2232 | arcpy.MakeFeatureLayer_management(report_result, outLayer, where) 2233 | arcpy.CalculateField_management(outLayer, fld['FieldName'], 2234 | '0', "PYTHON_9.3") 2235 | 2236 | arcpy.CalculateField_management(report_result, report_date_field, 2237 | 'time.strftime(\'%d/%m/%Y %H:%M\')', "PYTHON_9.3") 2238 | # fields.append(report_date_field) 2239 | 2240 | # Start an edit session. Must provide the worksapce. 2241 | # edit = arcpy.da.Editor(layerPath) 2242 | 2243 | # Edit session is started without an undo/redo stack for versioned data 2244 | # (for second argument, use False for unversioned data) 2245 | # edit.startEditing(False, False) 2246 | 2247 | # Start an edit operation 2248 | # edit.startOperation() 2249 | 2250 | # with arcpy.da.UpdateCursor(report_result,fields) as urows: 2251 | # for row in urows: 2252 | # for u in range(len(fields) - 1): 2253 | # row[u]= str(Common.noneToValue( row[u],0.0)) 2254 | 2255 | 2256 | # row[len(fields)-1] = strOnlineTime 2257 | # urows.updateRow(row) 2258 | # del urows 2259 | # Stop the edit operation. 2260 | # edit.stopOperation() 2261 | 2262 | # Stop the edit session and save the changes 2263 | # edit.stopEditing(True) 2264 | # The current recordset that is completed, send it to a merged feature for CSV export process layer. 2265 | mergeAllReports(reportLayer=report_result, report=reportParam, config=config) 2266 | 2267 | deleteFC([_final_report]) 2268 | except arcpy.ExecuteError: 2269 | line, filename, synerror = trace() 2270 | raise ReportToolsError({ 2271 | "function": "copy_empty_report", 2272 | "line": line, 2273 | "filename": filename, 2274 | "synerror": synerror, 2275 | "arcpyError": arcpy.GetMessages(2), 2276 | } 2277 | ) 2278 | except: 2279 | line, filename, synerror = trace() 2280 | raise ReportToolsError({ 2281 | "function": "copy_empty_report", 2282 | "line": line, 2283 | "filename": filename, 2284 | "synerror": synerror, 2285 | } 2286 | ) 2287 | finally: 2288 | _tempWorkspace = None 2289 | _reportCopy = None 2290 | _final_report = None 2291 | appendString = None 2292 | strOnlineTime = None 2293 | fields = None 2294 | 2295 | del _tempWorkspace 2296 | del _reportCopy 2297 | del _final_report 2298 | del appendString 2299 | del strOnlineTime 2300 | del fields 2301 | 2302 | gc.collect() 2303 | 2304 | 2305 | # ---------------------------------------------------------------------- 2306 | def shapeBasedSpatialJoin(TargetLayer, JoinLayer, JoinResult): 2307 | _tempWorkspace = None 2308 | _targetCopyName = None 2309 | _targetCopy = None 2310 | _areaFieldName = None 2311 | _idenResultLayer = None 2312 | 2313 | layDetails = None 2314 | _lenAreaFld = None 2315 | 2316 | try: 2317 | if not arcpy.Exists(TargetLayer): 2318 | raise ValueError(TargetLayer + " does not exist") 2319 | if not arcpy.Exists(JoinLayer): 2320 | raise ValueError(JoinLayer + " does not exist") 2321 | # Local variables: 2322 | _tempWorkspace = env.scratchGDB 2323 | 2324 | _targetCopyName = Common.random_string_generator() 2325 | _targetCopy = os.path.join(_tempWorkspace, _targetCopyName) 2326 | # JoinResult = os.path.join(_tempWorkspace ,random_string_generator()) 2327 | _areaFieldName = Common.random_string_generator(size=12) 2328 | _idenResultLayer = "polyIdenLayer" 2329 | 2330 | _lenAreaFld = "SHAPE_Area" 2331 | 2332 | layDetails = arcpy.Describe(TargetLayer) 2333 | if layDetails.shapeType == "Polygon": 2334 | _lenAreaFld = "Shape_Area" 2335 | elif layDetails.shapeType == "Polyline": 2336 | _lenAreaFld = "Shape_Length" 2337 | else: 2338 | return "" 2339 | arcpy.FeatureClassToFeatureClass_conversion(TargetLayer, _tempWorkspace, _targetCopyName, "#", "", "#") 2340 | # Process: Copy 2341 | # Process: Add Field 2342 | arcpy.AddField_management(_targetCopy, _areaFieldName, "DOUBLE", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") 2343 | 2344 | # Process: Calculate Field 2345 | arcpy.CalculateField_management(_targetCopy, _areaFieldName, "!" + _lenAreaFld + "!", "PYTHON_9.3", "") 2346 | 2347 | # Process: Identity 2348 | arcpy.Identity_analysis(_targetCopy, JoinLayer, JoinResult, "ALL", "", "NO_RELATIONSHIPS") 2349 | 2350 | # Process: Make Feature Layer 2351 | arcpy.MakeFeatureLayer_management(JoinResult, _idenResultLayer, "", "", "") 2352 | 2353 | # Process: Select Layer By Attribute 2354 | arcpy.SelectLayerByAttribute_management(_idenResultLayer, "NEW_SELECTION", 2355 | _lenAreaFld + " < .5 * " + _areaFieldName) 2356 | 2357 | # Process: Delete Features 2358 | arcpy.DeleteFeatures_management(_idenResultLayer) 2359 | 2360 | deleteFC([_targetCopy]) 2361 | 2362 | return JoinResult 2363 | except arcpy.ExecuteError: 2364 | line, filename, synerror = trace() 2365 | raise ReportToolsError({ 2366 | "function": "shapeBasedSpatialJoin", 2367 | "line": line, 2368 | "filename": filename, 2369 | "synerror": synerror, 2370 | "arcpyError": arcpy.GetMessages(2), 2371 | } 2372 | ) 2373 | except: 2374 | line, filename, synerror = trace() 2375 | raise ReportToolsError({ 2376 | "function": "shapeBasedSpatialJoin", 2377 | "line": line, 2378 | "filename": filename, 2379 | "synerror": synerror, 2380 | } 2381 | ) 2382 | finally: 2383 | _tempWorkspace = None 2384 | _targetCopyName = None 2385 | _targetCopy = None 2386 | _areaFieldName = None 2387 | _idenResultLayer = None 2388 | 2389 | layDetails = None 2390 | _lenAreaFld = None 2391 | 2392 | del _tempWorkspace 2393 | del _targetCopyName 2394 | del _targetCopy 2395 | del _areaFieldName 2396 | del _idenResultLayer 2397 | 2398 | del layDetails 2399 | del _lenAreaFld 2400 | 2401 | gc.collect() 2402 | 2403 | 2404 | # ---------------------------------------------------------------------- 2405 | def JoinAndCalc(inputDataset, inputJoinField, joinTable, joinTableJoinField, copyFields, joinType="KEEP_ALL", 2406 | inputFilter="None"): 2407 | inputLayer = None 2408 | joinTableDesc = None 2409 | joinName = None 2410 | removeJoin = None 2411 | tz = None 2412 | dateExp = None 2413 | exp = None 2414 | 2415 | try: 2416 | 2417 | inputLayer = "inputLayer" 2418 | if inputFilter == '': 2419 | inputFilter = None 2420 | arcpy.MakeFeatureLayer_management(in_features=inputDataset, 2421 | out_layer=inputLayer, 2422 | where_clause=inputFilter) 2423 | 2424 | joinTableDesc = arcpy.Describe(joinTable) 2425 | joinName = str(joinTableDesc.name) 2426 | arcpy.AddJoin_management(inputLayer, inputJoinField, joinTable, joinTableJoinField, joinType) 2427 | removeJoin = True 2428 | 2429 | tz = time.timezone # num of seconds to add to GMT based on current TimeZone 2430 | workspace = os.path.dirname(inputDataset) 2431 | # edit = arcpy.da.Editor(workspace) 2432 | # edit.startEditing(False, True) 2433 | # edit.startOperation() 2434 | with arcpy.da.Editor(workspace) as edit: 2435 | for copyField in copyFields: 2436 | if len(copyField) == 3: 2437 | dateExp = "import time\\nimport datetime\\nfrom time import mktime\\nfrom datetime import datetime\\ndef calc(dt):\\n return datetime.fromtimestamp(mktime(time.strptime(str(dt), '" + str( 2438 | copyField[2]) + "')) + time.timezone)" 2439 | exp = 'calc(!' + joinName + '.' + copyField[0] + '!)' 2440 | arcpy.CalculateField_management(inputLayer, copyField[1], exp, 'PYTHON_9.3', dateExp) 2441 | 2442 | else: 2443 | arcpy.CalculateField_management(inputLayer, copyField[1], '!' + joinName + '.' + copyField[0] + '!', 2444 | "PYTHON_9.3", "") 2445 | 2446 | print copyField[1] + " Calculated from " + copyField[0] 2447 | 2448 | arcpy.RemoveJoin_management(inputLayer, joinName) 2449 | except arcpy.ExecuteError: 2450 | line, filename, synerror = trace() 2451 | raise ReportToolsError({ 2452 | "function": "JoinAndCalc", 2453 | "line": line, 2454 | "filename": filename, 2455 | "synerror": synerror, 2456 | "arcpyError": arcpy.GetMessages(2), 2457 | } 2458 | ) 2459 | except: 2460 | line, filename, synerror = trace() 2461 | raise ReportToolsError({ 2462 | "function": "JoinAndCalc", 2463 | "line": line, 2464 | "filename": filename, 2465 | "synerror": synerror, 2466 | } 2467 | ) 2468 | finally: 2469 | inputLayer = None 2470 | joinTableDesc = None 2471 | joinName = None 2472 | removeJoin = None 2473 | tz = None 2474 | dateExp = None 2475 | exp = None 2476 | 2477 | del inputLayer 2478 | del joinTableDesc 2479 | del joinName 2480 | del removeJoin 2481 | del tz 2482 | del dateExp 2483 | del exp 2484 | 2485 | 2486 | # ---------------------------------------------------------------------- 2487 | def fieldsToFieldArray(featureclass): 2488 | """fieldsToFieldArray(featureclass) 2489 | 2490 | Converts fields to a list 2491 | 2492 | featureclass(String): 2493 | The specified feature class or table whose fields will be returned. 2494 | 2495 | """ 2496 | fieldList = None 2497 | try: 2498 | fieldList = arcpy.ListFields(featureclass) 2499 | returnFields = [] 2500 | for field in fieldList: 2501 | returnFields.append(field.name) 2502 | 2503 | return returnFields 2504 | except: 2505 | line, filename, synerror = trace() 2506 | raise ReportToolsError({ 2507 | "function": "fieldsToFieldArray", 2508 | "line": line, 2509 | "filename": filename, 2510 | "synerror": synerror, 2511 | } 2512 | ) 2513 | finally: 2514 | fieldList = None 2515 | 2516 | del fieldList 2517 | 2518 | gc.collect() 2519 | 2520 | 2521 | # ---------------------------------------------------------------------- 2522 | def FieldExist(featureclass, fieldNames): 2523 | """FieldExist(dataset, [fieldNames]) 2524 | 2525 | Determines if the array of fields exist in the dataset 2526 | 2527 | dataset(String): 2528 | The specified feature class or table whose indexes will be returned. 2529 | 2530 | fieldNames{Array}: 2531 | The the array of field name to verify existance.""" 2532 | fieldList = None 2533 | fndCnt = None 2534 | 2535 | try: 2536 | fieldList = arcpy.ListFields(featureclass) 2537 | fndCnt = 0 2538 | for field in fieldList: 2539 | if field.name in fieldNames: 2540 | fndCnt = fndCnt + 1 2541 | 2542 | if fndCnt > 0: 2543 | return True 2544 | del field 2545 | if fndCnt != len(fieldNames): 2546 | return False 2547 | 2548 | except: 2549 | line, filename, synerror = trace() 2550 | raise ReportToolsError({ 2551 | "function": "FieldExist", 2552 | "line": line, 2553 | "filename": filename, 2554 | "synerror": synerror, 2555 | } 2556 | ) 2557 | finally: 2558 | fieldList = None 2559 | fndCnt = None 2560 | 2561 | del fieldList 2562 | del fndCnt 2563 | 2564 | gc.collect() 2565 | 2566 | 2567 | # ---------------------------------------------------------------------- 2568 | def calc_field(inputDataset, field_map, code_exp, result_field): 2569 | res = None 2570 | sqlState = None 2571 | replaceValList = None 2572 | newList = None 2573 | 2574 | try: 2575 | 2576 | replaceValList = [] 2577 | newList = [] 2578 | for fld in field_map: 2579 | newList.append(fld['FieldName']) 2580 | replaceValList.append(fld['ReplaceValue']) 2581 | newList.append(result_field) 2582 | with arcpy.da.UpdateCursor(inputDataset, newList) as cursor: 2583 | 2584 | for row in cursor: 2585 | sqlState = code_exp 2586 | try: 2587 | for i in range(0, len(replaceValList)): 2588 | sqlState = sqlState.replace(replaceValList[i], str(row[i])) 2589 | 2590 | res = eval(sqlState) 2591 | row[len(newList) - 1] = res 2592 | cursor.updateRow(row) 2593 | 2594 | except Exception: 2595 | cursor.deleteRow() 2596 | del row 2597 | del cursor 2598 | except arcpy.ExecuteError: 2599 | line, filename, synerror = trace() 2600 | raise ReportToolsError({ 2601 | "function": "JoinAndCalc", 2602 | "line": line, 2603 | "filename": filename, 2604 | "synerror": synerror, 2605 | "arcpyError": arcpy.GetMessages(2), 2606 | } 2607 | ) 2608 | except: 2609 | line, filename, synerror = trace() 2610 | raise ReportToolsError({ 2611 | "function": "calc_field", 2612 | "line": line, 2613 | "filename": filename, 2614 | "synerror": synerror, 2615 | }) 2616 | finally: 2617 | res = None 2618 | sqlState = None 2619 | replaceValList = None 2620 | newList = None 2621 | 2622 | del res 2623 | del sqlState 2624 | del replaceValList 2625 | del newList 2626 | 2627 | gc.collect() 2628 | 2629 | 2630 | # ---------------------------------------------------------------------- 2631 | def calculate_age_field(inputDataset, field, result_field): 2632 | newList = None 2633 | try: 2634 | 2635 | newList = [field, result_field] 2636 | with arcpy.da.UpdateCursor(inputDataset, newList) as cursor: 2637 | 2638 | for row in cursor: 2639 | if row[0] == None: 2640 | cursor.deleteRow() 2641 | else: 2642 | row[1] = datetime.datetime.now().year - row[0].year 2643 | cursor.updateRow(row) 2644 | del row 2645 | del cursor 2646 | except arcpy.ExecuteError: 2647 | line, filename, synerror = trace() 2648 | raise ReportToolsError({ 2649 | "function": "JoinAndCalc", 2650 | "line": line, 2651 | "filename": filename, 2652 | "synerror": synerror, 2653 | "arcpyError": arcpy.GetMessages(2), 2654 | } 2655 | ) 2656 | except: 2657 | line, filename, synerror = trace() 2658 | raise ReportToolsError({ 2659 | "function": "calculate_age_field", 2660 | "line": line, 2661 | "filename": filename, 2662 | "synerror": synerror, 2663 | }) 2664 | finally: 2665 | newList = None 2666 | 2667 | del newList 2668 | 2669 | gc.collect() 2670 | 2671 | 2672 | # ---------------------------------------------------------------------- 2673 | def calculate_inline_stats(inputDataset, fields, result_field, stats_method): 2674 | """calculate_inline_stats(inputDataset,(field1, field2,..),resultField,) 2675 | 2676 | Calculates stats on the input table 2677 | 2678 | dataset(String): 2679 | The specified feature class or table 2680 | 2681 | fields(field1,field2,..): 2682 | List of fields to perform stats on 2683 | 2684 | result_field: 2685 | Field to store the results on 2686 | 2687 | stats_method: 2688 | Type of stats to perform 2689 | """ 2690 | lstLen = None 2691 | newList = None 2692 | cnt = None 2693 | val = None 2694 | minVal = None 2695 | maxVal = None 2696 | try: 2697 | 2698 | lstLen = len(fields) 2699 | newList = deepcopy(fields) 2700 | newList.append(result_field) 2701 | with arcpy.da.UpdateCursor(inputDataset, tuple(newList)) as cursor: 2702 | 2703 | for row in cursor: 2704 | row.pop(lstLen) 2705 | if stats_method.upper() == "AVERAGE" or stats_method.upper() == "AVG" or stats_method.upper() == "MEAN": 2706 | cnt = 0 2707 | val = 0 2708 | for i in row: 2709 | if i is not None: 2710 | cnt += 1 2711 | val += i 2712 | row.append(val / cnt) 2713 | elif stats_method.upper() == "MIN" or stats_method.upper() == "MINIMUM": 2714 | minVal = min(i for i in row if i is not None) 2715 | row.append(minVal) 2716 | elif stats_method.upper() == "MAX" or stats_method.upper() == "MAXIMUM": 2717 | maxVal = max(i for i in row if i is not None) 2718 | row.append(maxVal) 2719 | cursor.updateRow(row) 2720 | del row 2721 | del cursor 2722 | 2723 | except: 2724 | line, filename, synerror = trace() 2725 | raise ReportToolsError({ 2726 | "function": "calculate_inline_stats", 2727 | "line": line, 2728 | "filename": filename, 2729 | "synerror": synerror, 2730 | }) 2731 | finally: 2732 | lstLen = None 2733 | newList = None 2734 | cnt = None 2735 | val = None 2736 | minVal = None 2737 | maxVal = None 2738 | 2739 | del lstLen 2740 | del newList 2741 | del cnt 2742 | del val 2743 | del minVal 2744 | del maxVal 2745 | 2746 | gc.collect() 2747 | 2748 | 2749 | # ---------------------------------------------------------------------- 2750 | def deleteFC(in_datasets): 2751 | for in_data in in_datasets: 2752 | try: 2753 | if in_data is not None: 2754 | if arcpy.Exists(dataset=in_data): 2755 | arcpy.Delete_management(in_data=in_data) 2756 | 2757 | except Exception: 2758 | print "Unable to delete %s" % in_data 2759 | 2760 | 2761 | # ---------------------------------------------------------------------- 2762 | # Function to merge run time reports into a temp feature class to export to CSV. 2763 | # This create a new FC with the first report, then just append fields for subsequent reports. 2764 | def mergeAllReports(reportLayer, report, config): 2765 | fieldList = None 2766 | fieldNames = None 2767 | _tempWorkspace = None 2768 | _mergedFeature = None 2769 | _mergedFeaturePath = None 2770 | spatRef = arcpy.Describe(reportLayer).spatialReference 2771 | try: 2772 | if not 'ReportMerge' in report: 2773 | report['ReportMerge'] = 'TRUE' 2774 | print "Report is missing the ReportMerge parameter: type string, values, True or False" 2775 | if report['ReportMerge'].upper() == "YES" or report['ReportMerge'].upper() == "TRUE": 2776 | _tempWorkspace = config["ResultsGDB"] 2777 | _mergedFeature = tempCSVName 2778 | _mergedFeaturePath = os.path.join(_tempWorkspace, _mergedFeature) 2779 | 2780 | if arcpy.Exists(_mergedFeaturePath) == False: 2781 | print "%s created" % _mergedFeaturePath 2782 | arcpy.CreateFeatureclass_management(_tempWorkspace, _mergedFeature, "POLYGON", reportLayer, "DISABLED", 2783 | "DISABLED", spatRef, "", "0", "0", "0") 2784 | arcpy.Append_management(reportLayer, _mergedFeaturePath, "TEST", "", "") 2785 | # arcpy.DeleteField_management(_mergedFeaturePath, ["SHAPE_LENGTH", "SHAPE_AREA"]) 2786 | else: 2787 | fieldNames = [] 2788 | fieldList = arcpy.ListFields(reportLayer) 2789 | for field in fieldList: 2790 | # print field.name 2791 | if (field.name.upper() != "SHAPE") & (field.name.upper() != "SHAPE_LENGTH") & ( 2792 | field.name.upper() != "SHAPE_AREA") & (field.name != report["ReportDateField"]) & ( 2793 | field.name != report["ReportIDField"]): 2794 | fieldNames.append(field.name) 2795 | 2796 | fieldNames = ';'.join(fieldNames) 2797 | 2798 | arcpy.JoinField_management(_mergedFeaturePath, report["ReportIDField"], reportLayer, 2799 | report["ReportIDField"], fieldNames) 2800 | 2801 | except arcpy.ExecuteError: 2802 | line, filename, synerror = trace() 2803 | raise ReportToolsError({ 2804 | "function": "mergeAllReports", 2805 | "line": line, 2806 | "filename": filename, 2807 | "synerror": synerror, 2808 | "arcpyError": arcpy.GetMessages(2), 2809 | } 2810 | ) 2811 | except: 2812 | line, filename, synerror = trace() 2813 | raise ReportToolsError({ 2814 | "function": "mergeAllReports", 2815 | "line": line, 2816 | "filename": filename, 2817 | "synerror": synerror, 2818 | }) 2819 | finally: 2820 | 2821 | del _tempWorkspace 2822 | del _mergedFeature 2823 | del _mergedFeaturePath 2824 | del fieldList 2825 | del fieldNames 2826 | gc.collect() 2827 | --------------------------------------------------------------------------------