]},
234 | 'Topology': {},
235 | 'Workspace': {}}}
236 |
--------------------------------------------------------------------------------
/DescribeObjectReport/README.md:
--------------------------------------------------------------------------------
1 | # Describe-Object-Report
2 |
3 | **Demonstrates**
4 |
5 | * Use of decorator
6 | * Command-line argument processing
7 | * Use of arcpy's extensive collection of Describe objects
8 |
9 | **Description:**
10 |
11 | Creates a report of all arcpy describe-object properties for an object.
12 |
13 | **Usage:**
14 |
15 | Command line arguments must include a verbose or terse flag (-v or -t) as
16 | the first arguement. The difference between the two is that verbose will
17 | show you the attributes that have no value, whereas terse will not.
18 | Subsequent arguements are each of the describe properties being asked for
19 | in the report. These are optional, and all properties will be reported if
20 | these arguements are not provided.
21 |
22 | **Input:**
23 |
24 | NOTE: Be sure to update file_list.py before running!
25 | > python describe_reporter.py -v
26 | > python describe_reporter.py -t
27 | > python describe_reporter.py -v "General Describe" "Layer" "Table"
28 | > python describe_reporter.py -t "General Describe" "Workspace"
29 |
30 | **Caveats**
31 |
32 | * The output will be overwritten unless you change the output's path in the source code
33 |
34 | **Output Format (in terse mode)**
35 |
36 | {
37 | Filename 1: {
38 | Describe Object Class 1: {
39 | Property 1: output,
40 | Propetry 2: {},
41 | }
42 | }
43 | Filename 2: 'FILE NOT FOUND'
44 | }
45 |
--------------------------------------------------------------------------------
/DescribeObjectReport/describe_reporter.py:
--------------------------------------------------------------------------------
1 | """ describe_reporter.py
2 | Andrew Ortego
3 | 8/24/2016
4 |
5 | TODO
6 | * Alter output with pprint so that it's all aligned (see output formatting)
7 | * Web-scrape script which gathering all Desc. object properties
8 | * Handle properties that have no attributes (currently commented)
9 |
10 | DESCRIPTION
11 | Creates a report of all arcpy describe-object properties for an object.
12 |
13 | INPUT
14 | Command line arguments must include a verbose or terse flag (-v or -t) as
15 | the first arguement. The difference between the two is that verbose will
16 | show you the attributes that have no value, whereas terse will not.
17 | Subsequent arguements are each of the describe properties being asked for
18 | in the report. These are optional, and all properties will be reported if
19 | these arguements are not provided.
20 |
21 | CAVEATS
22 | * The output will be overwritten unless you change the output's path.
23 |
24 | SAMPLE USAGE
25 | > python describe_reporter.py -v
26 | > python describe_reporter.py -t
27 | > python describe_reporter.py -v "General Describe" "Layer" "Table"
28 | > python describe_reporter.py -t "General Describe" "Workspace"
29 |
30 | SAMPLE OUTPUT (in terse mode)
31 | {
32 | Filename 1: {
33 | Describe Object Class 1: {
34 | Property 1: output,
35 | Propetry 2: {},
36 | }
37 | }
38 | Filename 2: 'FILE NOT FOUND'
39 | }
40 | """
41 |
42 | import arcpy
43 | import pickle, os, sys, time
44 | from pprint import pprint
45 | from functools import wraps
46 | from collections import OrderedDict as od
47 | try:
48 | from file_list import user_files
49 | except ImportError as e:
50 | print("ERROR: Could not find list of files to be scanned. Please verify")
51 | print("that file_list.py is in the same directory as this script, and")
52 | print("that it contains a list called user_files which holds each path")
53 | print("to your files.")
54 | print("EXAMPLE: user_files = [u'C:\\Users\\andr7495\\Desktop\\KML.kml']")
55 | raise SystemExit
56 |
57 | properties = {
58 | 'General Describe': {
59 | 'baseName',
60 | 'catalogPath',
61 | 'children',
62 | 'childrenExpanded',
63 | 'dataElementType',
64 | 'dataType',
65 | 'extension',
66 | 'file',
67 | 'fullPropsRetrieved',
68 | 'metadataRetrieved',
69 | 'name',
70 | 'path',
71 | },
72 |
73 | 'ArcInfo Workstation Item': {
74 | 'alternateName',
75 | 'isIndexed',
76 | 'isPseudo',
77 | 'isRedefined',
78 | 'itemType',
79 | 'numberDecimals',
80 | 'outputWidth',
81 | 'startPosition',
82 | 'width',
83 | },
84 |
85 | 'ArcInfo Workstation Table': {
86 | 'itemSet',
87 | },
88 |
89 | 'CAD Drawing Dataset': {
90 | 'is2D',
91 | 'is3D',
92 | 'isAutoCAD',
93 | 'isDGN',
94 | },
95 |
96 | #'CAD FeatureClass': {},
97 |
98 | 'Cadastral Fabric': {
99 | 'bufferDistanceForAdjustment',
100 | 'compiledAccuracyCategory',
101 | 'defaultAccuracyCategory',
102 | 'maximumShiftThreshold',
103 | 'multiGenerationEditing',
104 | 'multiLevelReconcile',
105 | 'pinAdjustmentBoundary',
106 | 'pinAdjustmentPointsWithinBoundary',
107 | 'surrogateVersion',
108 | 'type',
109 | 'version',
110 | 'writeAdjustmentVectors',
111 | },
112 |
113 | 'Coverage FeatureClass': {
114 | 'featureClassType',
115 | 'hasFAT',
116 | 'topology',
117 | },
118 |
119 | 'Coverage': {
120 | 'tolerances',
121 | },
122 |
123 | 'Dataset': {
124 | 'canVersion',
125 | 'changeTracked',
126 | 'datasetType',
127 | 'DSID',
128 | 'extent',
129 | 'isArchived',
130 | 'isVersioned',
131 | 'MExtent',
132 | 'spatialReference',
133 | 'ZExtent',
134 | },
135 |
136 | #'dBase': {},
137 |
138 | 'FeatureClass': {
139 | 'featureType',
140 | 'hasM',
141 | 'hasZ',
142 | 'hasSpatialIndex',
143 | 'shapeFieldName',
144 | 'shapeType',
145 | },
146 |
147 | 'GDB FeatureClass': {
148 | 'areaFieldName',
149 | 'geometryStorage',
150 | 'lengthFieldName',
151 | 'representations',
152 | },
153 |
154 | 'GDB Table': {
155 | 'aliasName',
156 | 'defaultSubtypeCode',
157 | 'extensionProperties',
158 | 'globalIDFieldName',
159 | 'hasGlobalID',
160 | 'modelName',
161 | 'rasterFieldName',
162 | 'relationshipClassNames',
163 | 'subtypeFieldName',
164 | 'versionedView',
165 | },
166 |
167 | 'Geometric Network': {
168 | 'featureClassNames',
169 | 'networkType',
170 | 'orphanJunctionFeatureClassName',
171 | },
172 |
173 | 'LAS Dataset': {
174 | 'constraintCount',
175 | 'fileCount',
176 | 'hasStatistics',
177 | 'needsUpdateStatistics',
178 | 'pointCount',
179 | 'usesRelativePath',
180 | },
181 |
182 | 'Layer': {
183 | 'dataElement',
184 | 'featureClass',
185 | 'FIDSet',
186 | 'fieldInfo',
187 | 'layer',
188 | 'nameString',
189 | 'table',
190 | 'whereClause',
191 | },
192 |
193 | #'Map Document': {},
194 |
195 | 'Moscaic Dataset': {
196 | 'allowedCompressionMethods',
197 | 'allowedFields',
198 | 'allowedMensurationCapabilities',
199 | 'allowedMosaicMethods',
200 | 'applyColorCorrection',
201 | 'blendWidth',
202 | 'blendWidthUnits',
203 | 'cellSizeToleranceFactor',
204 | 'childrenNames',
205 | 'clipToBoundary',
206 | 'clipToFootprint',
207 | 'defaultCompressionMethod',
208 | 'defaultMensurationCapability',
209 | 'defaultMosaicMethod',
210 | 'MosaicOperator',
211 | 'defaultResamplingMethod',
212 | 'SortAscending',
213 | 'endTimeField',
214 | 'footprintMayContainNoData',
215 | 'GCSTransforms',
216 | 'JPEGQuality',
217 | 'LERCTolerance',
218 | 'maxDownloadImageCount',
219 | 'maxDownloadSizeLimit',
220 | 'maxRastersPerMosaic',
221 | 'maxRecordsReturned',
222 | 'maxRequestSizeX',
223 | 'maxRequestSizeY',
224 | 'minimumPixelContribution',
225 | 'orderBaseValue',
226 | 'orderField',
227 | 'rasterMetadataLevel',
228 | 'referenced',
229 | 'startTimeField',
230 | 'timeValueFormat',
231 | 'useTime',
232 | 'viewpointSpacingX',
233 | 'viewpointSpacingY',
234 | },
235 |
236 | 'Network Analyst Layer': {
237 | 'network',
238 | 'nameString',
239 | 'solverName',
240 | 'impedance',
241 | 'accumulators',
242 | 'restrictions',
243 | 'ignoreInvalidLocations',
244 | 'uTurns',
245 | 'useHierarchy',
246 | 'hierarchyAttribute',
247 | 'hierarchyLevelCount',
248 | 'maxValueForHierarchyX',
249 | 'locatorCount',
250 | 'locators',
251 | 'findClosest',
252 | 'searchTolerance',
253 | 'excludeRestrictedElements',
254 | 'solverProperties',
255 | 'children',
256 | 'parameterCount',
257 | 'parameters',
258 | },
259 |
260 | 'Prj File': {
261 | 'spatialReference',
262 | },
263 |
264 |
265 | 'Raster Band': {
266 | 'height',
267 | 'isInteger',
268 | 'meanCellHeight',
269 | 'meanCellWidth',
270 | 'noDataValue',
271 | 'pixelType',
272 | 'primaryField',
273 | 'tableType',
274 | 'width',
275 | },
276 |
277 |
278 | 'Raster Catalog': {
279 | 'rasterFieldName',
280 | },
281 |
282 | 'Raster Dataset': {
283 | 'bandCount',
284 | 'compressionType',
285 | 'format',
286 | 'permanent',
287 | 'sensorType',
288 | },
289 |
290 | 'RecordSet and FeatureSet': {
291 | 'json',
292 | 'pjson',
293 | },
294 |
295 | 'RelationshipClass': {
296 | 'backwardPathLabel',
297 | 'cardinality',
298 | 'classKey',
299 | 'destinationClassKeys',
300 | 'destinationClassNames',
301 | 'forwardPathLabel',
302 | 'isAttachmentRelationship',
303 | 'isAttributed',
304 | 'isComposite',
305 | 'isReflexive',
306 | 'keyType',
307 | 'notification',
308 | 'originClassNames',
309 | 'originClassKeys',
310 | 'relationshipRules',
311 | },
312 |
313 | 'RepresentationClass': {
314 | 'backwardPathLabel',
315 | 'cardinality',
316 | 'classKey',
317 | 'destinationClassKeys',
318 | 'destinationClassNames',
319 | 'forwardPathLabel',
320 | 'isAttachmentRelationship',
321 | 'isAttributed',
322 | 'isComposite',
323 | 'isReflexive',
324 | 'keyType',
325 | 'notification',
326 | 'originClassNames',
327 | 'originClassKeys',
328 | 'relationshipRules',
329 | },
330 |
331 | #'Schematic Dataset': {},
332 |
333 | 'Schematic Diagram': {
334 | 'diagramClassName',
335 | },
336 |
337 | #'Schematic Folder': {},
338 |
339 | #'SDC FeatureClass': {},
340 |
341 | #'Shapefile FeatureClass': {},
342 |
343 | 'Table': {
344 | 'hasOID',
345 | 'OIDFieldName',
346 | 'fields',
347 | 'indexes',
348 | },
349 |
350 | 'TableView': {
351 | 'table',
352 | 'FIDSet',
353 | 'fieldInfo',
354 | 'whereClause',
355 | 'nameString',
356 | },
357 |
358 | #'Text File': {},
359 |
360 | 'Tin': {
361 | 'fields',
362 | 'hasEdgeTagValues',
363 | 'hasNodeTagValues',
364 | 'hasTriangleTagValues',
365 | 'isDelaunay',
366 | 'ZFactor',
367 | },
368 |
369 | #'Tool': {},
370 |
371 | #'Toolbox': {},
372 |
373 | 'Topology': {
374 | 'clusterTolerance',
375 | 'featureClassNames',
376 | 'maximumGeneratedErrorCount',
377 | 'ZClusterTolerance',
378 | },
379 |
380 | #'VPF Coverage': {},
381 |
382 | #'VPF FeatureClass': {},
383 |
384 | #VPF Table': {},
385 |
386 | 'Workspace': {
387 | 'connectionProperties',
388 | 'connectionString',
389 | 'currentRelease',
390 | 'domains',
391 | 'release',
392 | 'workspaceFactoryProgID',
393 | 'workspaceType',
394 | },
395 | }
396 |
397 | def timethis(func):
398 | """ Decorator that reports the execution time.
399 | """
400 | @wraps(func)
401 | def wrapper(*args, **kwargs):
402 | start = time.time()
403 | result = func(*args, **kwargs)
404 | end = time.time()
405 | print("Created {0} in {1}s".format(
406 | os.path.join(os.getcwd(), u'Describe Report.txt'),
407 | round(end-start)))
408 | return result
409 | return wrapper
410 |
411 | def set_mode(user_input):
412 | """ Check whether the user has select verbose or terse mode. This is set
413 | with the -v or -t flags, respectively.
414 | """
415 | try:
416 | if user_input[1] not in ["-v", "-t"]:
417 | print("ERROR : Report mode not selected.")
418 | print("SOLUTION: Use -v or -t (verbose or terse) as first arguement.")
419 | print("EXAMPLE : > python describe_reporter.py -v Layer Table Workspace")
420 | raise SystemExit
421 | except IndexError:
422 | print("ERROR : Report mode not selected.")
423 | print("SOLUTION: Use -v or -t (verbose or terse) as first arguement.")
424 | print("EXAMPLE : > python describe_reporter.py -v Layer Table Workspace")
425 | raise SystemExit
426 | return user_input[1] == "-v"
427 |
428 |
429 | def check_prop_list(user_types):
430 | """ Verify that the user has entered valid Describe Object types/classes and
431 | print a warning message for any invalid choices. If no arguments are
432 | provided, the report will print all Describe properties.Returns a list
433 | of Describe properties whose attributes will be included in the report.
434 | """
435 | if not user_types:
436 | queried_types = [p for p in properties]
437 | else:
438 | invalid_types = list()
439 | queried_types = list()
440 | [queried_types.append(k) if k in properties else invalid_types.append(k) for k in user_types]
441 | if invalid_types:
442 | print("WARNING! Describe Types will not be included in report:")
443 | for t in invalid_types:
444 | print(t)
445 | return queried_types
446 |
447 |
448 | @timethis
449 | def generate_report(verbose_mode, property_list, user_files):
450 | """ Generates the report containing each file and its associated
451 | Describe-object attributes. Report is a dictionary and can be useful
452 | for other scripts.
453 | """
454 | report_results = {}
455 | report_path = open(os.path.join(os.getcwd(), u'Describe Report.txt'), 'wt')
456 | for f in user_files:
457 | if arcpy.Exists(f):
458 | desc_dict = od()
459 | for d_class in sorted(property_list):
460 | desc_dict[d_class] = {}
461 | for p in properties[d_class]:
462 | try:
463 | desc_dict[d_class][p] = eval("arcpy.Describe(f).{0}".format(p))
464 | except AttributeError:
465 | if verbose_mode:
466 | desc_dict[d_class][p] = 'ATTRIBUTE ERROR: Method not found'
467 | else:
468 | pass
469 | report_results[f] = desc_dict
470 | else:
471 | report_results[f] = 'FILE NOT FOUND'
472 | pprint(report_results, report_path, width=400)
473 |
474 |
475 | if __name__ == "__main__":
476 | """ Collect user input, check report mode, clean list of properties to be
477 | reported, and generate the report.
478 | """
479 | user_input = [arg for arg in sys.argv]
480 | verbose_mode = set_mode(user_input)
481 | cleaned_user_types = check_prop_list(user_input[2:])
482 | generate_report(verbose_mode, cleaned_user_types, user_files)
483 | print('\nfin.')
484 |
485 |
--------------------------------------------------------------------------------
/DescribeObjectReport/file_list.py:
--------------------------------------------------------------------------------
1 | user_files = [
2 | # ENTER FILE NAMES HERE!
3 | u'C:\\arcgis\\ArcTutor\\Editing\\Zion.gdb\\Research_areas',
4 | u'C:\\arcgis\\ArcTutor\\Editing\\Zion.gdb',
5 | u'C:\\arcgis\\ArcTutor\\Editing\\Hillshade.png',
6 | u'C:\\arcgis\\ArcTutor\\CAD\ManhattanKS\\MillerRanch.dwg',
7 | ]
--------------------------------------------------------------------------------
/FeaturesToGPX/FeaturesToGPX.py:
--------------------------------------------------------------------------------
1 | '''
2 | Tool Name: Features to GPX
3 | Source Name: FeaturesToGPX.py
4 | Version: ArcGIS 10.1+ or ArcGIS Pro 1.0+
5 | Author: Esri
6 | Contributors: Matt Wilkie
7 | (https://github.com/maphew/arcgiscom_tools/blob/master/Features_to_GPX/FeaturesToGPX.py)
8 |
9 | Required Arguments:
10 | Input Features (features): path to layer or featureclass on disk
11 | Output Feature Class (file): path to GPX which will be created
12 | Optional Arguements:
13 | Zero date (boolean): If no date exists, use this option to force dates to epcoh
14 | start, 1970-Jan-01. This will allow GPX files to open in Garmin Basecamp
15 | Pretty (boolean): Output gpx file will be "pretty", or easier to read.
16 |
17 | Description:
18 | This tool takes input features (layers or featureclass) with either point or
19 | line geometry and converts into a .GPX file. Points and multipoint features
20 | are converted in to WPTs, lines are converted into TRKS. If the features conform
21 | to a known schema, the output GPX file will honor those fields.
22 | '''
23 |
24 | try:
25 | from xml.etree import cElementTree as ET
26 | except:
27 | from xml.etree import ElementTree as ET
28 | import arcpy
29 | import time
30 | import datetime
31 | unicode = str
32 |
33 | gpx = ET.Element("gpx", xmlns="http://www.topografix.com/GPX/1/1",
34 | xalan="http://xml.apache.org/xalan",
35 | xsi="http://www.w3.org/2001/XMLSchema-instance",
36 | creator="Esri",
37 | version="1.1")
38 |
39 |
40 | def prettify(elem):
41 | """Return a pretty-printed XML string for the Element.
42 | """
43 | from xml.dom import minidom
44 | rough_string = ET.tostring(elem, 'utf-8')
45 | reparsed = minidom.parseString(rough_string)
46 | return reparsed.toprettyxml(indent=" ")
47 |
48 |
49 |
50 | def featuresToGPX(inputFC, outGPX, zerodate, pretty):
51 | ''' This is called by the __main__ if run from a tool or at the command line
52 | '''
53 |
54 | descInput = arcpy.Describe(inputFC)
55 | if descInput.spatialReference.factoryCode != 4326:
56 | arcpy.AddWarning("Input data is not projected in WGS84,"
57 | " features were reprojected on the fly to create the GPX.")
58 |
59 | generatePointsFromFeatures(inputFC , descInput, zerodate)
60 |
61 | # Write the output GPX file
62 | try:
63 | if pretty:
64 | gpxFile = open(outGPX, "w")
65 | gpxFile.write(prettify(gpx))
66 | else:
67 | gpxFile = open(outGPX, "wb")
68 | ET.ElementTree(gpx).write(gpxFile, encoding="UTF-8", xml_declaration=True)
69 | except TypeError as e:
70 | arcpy.AddError("Error serializing GPX into the file.")
71 | finally:
72 | gpxFile.close()
73 |
74 |
75 |
76 | def generatePointsFromFeatures(inputFC, descInput, zerodate=False):
77 |
78 | def attHelper(row):
79 | # helper function to get/set field attributes for output gpx file
80 |
81 | pnt = row[1].getPart()
82 | valuesDict["PNTX"] = str(pnt.X)
83 | valuesDict["PNTY"] = str(pnt.Y)
84 |
85 | Z = pnt.Z if descInput.hasZ else None
86 | if Z or ("ELEVATION" in cursorFields):
87 | valuesDict["ELEVATION"] = str(Z) if Z else str(row[fieldNameDict["ELEVATION"]])
88 | else:
89 | valuesDict["ELEVATION"] = str(0)
90 |
91 | valuesDict["NAME"] = row[fieldNameDict["NAME"]] if "NAME" in fields else " "
92 | valuesDict["DESCRIPT"] = row[fieldNameDict["DESCRIPT"]] if "DESCRIPT" in fields else " "
93 |
94 |
95 | if "DATETIMES" in fields:
96 | row_time = row[fieldNameDict["DATETIMES"]]
97 | formatted_time = row_time if row_time else " "
98 | elif zerodate and "DATETIMES" not in fields:
99 | formatted_time = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(0))
100 | else:
101 | formatted_time = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(0)) if zerodate else " "
102 |
103 | valuesDict["DATETIMES"] = formatted_time
104 |
105 | return
106 | #-------------end helper function-----------------
107 |
108 |
109 | def getValuesFromFC(inputFC, cursorFields ):
110 |
111 | previousPartNum = 0
112 | startTrack = True
113 |
114 | # Loop through all features and parts
115 | with arcpy.da.SearchCursor(inputFC, cursorFields, spatial_reference="4326", explode_to_points=True) as searchCur:
116 | for row in searchCur:
117 | if descInput.shapeType == "Polyline":
118 | for part in row:
119 | try:
120 | newPart = False
121 | if not row[0] == previousPartNum or startTrack is True:
122 | startTrack = False
123 | newPart = True
124 | previousPartNum = row[0]
125 |
126 | attHelper(row)
127 | yield "trk", newPart
128 | except:
129 | arcpy.AddWarning("Problem reading values for row: {}. Skipping.".format(row[0]))
130 |
131 | elif descInput.shapeType == "Multipoint" or descInput.shapeType == "Point":
132 | # check to see if data was original GPX with "Type" of "TRKPT" or "WPT"
133 | trkType = row[fieldNameDict["TYPE"]].upper() if "TYPE" in fields else None
134 | try:
135 | attHelper(row)
136 |
137 | if trkType == "TRKPT":
138 | newPart = False
139 | if previousPartNum == 0:
140 | newPart = True
141 | previousPartNum = 1
142 |
143 | yield "trk", newPart
144 |
145 | else:
146 | yield "wpt", None
147 | except:
148 | arcpy.AddWarning("Problem reading values for row: {}. Skipping.".format(row[0]))
149 |
150 | # ---------end get values function-------------
151 |
152 |
153 | # Get list of available fields
154 | fields = [f.name.upper() for f in arcpy.ListFields(inputFC)]
155 | valuesDict = {"ELEVATION": 0, "NAME": "", "DESCRIPT": "", "DATETIMES": "", "TYPE": "", "PNTX": 0, "PNTY": 0}
156 | fieldNameDict = {"ELEVATION": 0, "NAME": 1, "DESCRIPT": 2, "DATETIMES": 3, "TYPE": 4, "PNTX": 5, "PNTY": 6}
157 |
158 | cursorFields = ["OID@", "SHAPE@"]
159 |
160 | for key, item in valuesDict.items():
161 | if key in fields:
162 | fieldNameDict[key] = len(cursorFields) # assign current index
163 | cursorFields.append(key) # build up list of fields for cursor
164 | else:
165 | fieldNameDict[key] = None
166 |
167 | for index, gpxValues in enumerate(getValuesFromFC(inputFC, cursorFields)):
168 |
169 | if gpxValues[0] == "wpt":
170 | wpt = ET.SubElement(gpx, 'wpt', {'lon':valuesDict["PNTX"], 'lat':valuesDict["PNTY"]})
171 | wptEle = ET.SubElement(wpt, "ele")
172 | wptEle.text = valuesDict["ELEVATION"]
173 | wptTime = ET.SubElement(wpt, "time")
174 | wptTime.text = valuesDict["DATETIMES"]
175 | wptName = ET.SubElement(wpt, "name")
176 | wptName.text = valuesDict["NAME"]
177 | wptDesc = ET.SubElement(wpt, "desc")
178 | wptDesc.text = valuesDict["DESCRIPT"]
179 |
180 | else: #TRKS
181 | if gpxValues[1]:
182 | # Elements for the start of a new track
183 | trk = ET.SubElement(gpx, "trk")
184 | trkName = ET.SubElement(trk, "name")
185 | trkName.text = valuesDict["NAME"]
186 | trkDesc = ET.SubElement(trk, "desc")
187 | trkDesc.text = valuesDict["DESCRIPT"]
188 | trkSeg = ET.SubElement(trk, "trkseg")
189 |
190 | trkPt = ET.SubElement(trkSeg, "trkpt", {'lon':valuesDict["PNTX"], 'lat':valuesDict["PNTY"]})
191 | trkPtEle = ET.SubElement(trkPt, "ele")
192 | trkPtEle.text = valuesDict["ELEVATION"]
193 | trkPtTime = ET.SubElement(trkPt, "time")
194 | trkPtTime.text = valuesDict["DATETIMES"]
195 |
196 |
197 |
198 | if __name__ == "__main__":
199 | ''' Gather tool inputs and pass them to featuresToGPX
200 | '''
201 |
202 | inputFC = arcpy.GetParameterAsText(0)
203 | outGPX = arcpy.GetParameterAsText(1)
204 | zerodate = arcpy.GetParameter(2)
205 | pretty = arcpy.GetParameter(3)
206 | featuresToGPX(inputFC, outGPX, zerodate, pretty)
207 |
--------------------------------------------------------------------------------
/FeaturesToGPX/GPX.tbx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arcpy/sample-gp-tools/f0bb1fe0ae359216a633739388251b3be9949ca5/FeaturesToGPX/GPX.tbx
--------------------------------------------------------------------------------
/FeaturesToGPX/README.md:
--------------------------------------------------------------------------------
1 | ## GPX to Features
2 |
3 | Converts features (layers and feature classes of schema; point, multipoint and polyline) into GPX files.
4 |
5 | Originally posted to [ArcGIS.com as a sample](http://www.arcgis.com/home/item.html?id=067d6ab392b24497b8466eb8447ea7eb), this tool is the sibiling to the [GPX to Features](https://pro.arcgis.com/en/pro-app/tool-reference/conversion/gpx-to-features.htm) tool available in both ArcGIS Pro and ArcMap.
6 |
7 | ### Parameters
8 |
9 | **Input Features** | *feature layer* | required input
10 | * Input featureclass or feature layer
11 |
12 | **Output GPX** | *file* | required output
13 | * Output GPX file to be created
14 |
15 | **Zero dates (support Garmin Basecamp)** | *boolean* | optional input
16 | * Create 0 date (JAN-1-1970). If a string field named 'DateTimeS' exists, the values from this field will be used to populate the output GPX file. If this field does not exist, an empty string is used for the date. Garmin Basecamp software requires a valid date. Select this option to insert the JAN-1-1970 (epoch) date into your output GPX file if your features do not have a date field.
17 |
18 | **Pretty output** | *boolean* | optional input
19 | *Format the output GPX file to be formatted in a nicer way. ie. human readable. This does not impact hardware and software devices ability to read the output file.
20 |
21 | ### General Usage
22 |
23 | The tool takes both points and line feature classes as input.
24 |
25 | Line features will be turned into Tracks (TRKS)
26 |
27 | Point features will be turned into WayPoints (WPT)
28 |
29 | **Note**: GPX uses the WGS84 coordinate system. If the input data is not in WGS84, the conversion to GPX will reproject the data. If a transformation is required the best match possible is used. For complete reprojection control you should run the Project tool, converting your data into WGS84 and choosing the correct transformation prior to creating a GPX file.
30 |
31 | Note: Features with the following fields will be used in creating output GPX. Output from the GPX to Features tool (v. 10.1+) creates features with these fields.
32 |
33 | * Name
34 |
35 | * Descript
36 |
37 | * Type
38 |
39 | * Elevation
40 |
41 | * DateTimeS (of type String)
42 |
43 | Point features with the field "Type" and a value of "TRKPT" will be turned into Tracks (TRKS)
44 |
45 |
46 |
--------------------------------------------------------------------------------
/GetLayoutTemplatesInfo/GetLayoutTemplateInfo.tbx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arcpy/sample-gp-tools/f0bb1fe0ae359216a633739388251b3be9949ca5/GetLayoutTemplatesInfo/GetLayoutTemplateInfo.tbx
--------------------------------------------------------------------------------
/GetLayoutTemplatesInfo/GetLayoutTemplatesInfo.py:
--------------------------------------------------------------------------------
1 | # Retrieve metadata for all .mxd available in the specified folder
2 | # each .mxd is considered as a layout template in a printing service.
3 | #
4 |
5 |
6 | # Import required modules
7 | #
8 | import sys
9 | import os
10 | import arcpy
11 | import json
12 | import glob
13 | import xml.dom.minidom as DOM
14 |
15 | # default location
16 | #
17 | _defTmpltFolder = os.path.join(arcpy.GetInstallInfo()['InstallDir'], r"Templates\ExportWebMapTemplates")
18 |
19 | # Defining a custom JSONEncoder for MapDocument object
20 | #
21 | class MxdEncoder(json.JSONEncoder):
22 | def default(self, obj):
23 | if isinstance(obj, arcpy.mapping.MapDocument):
24 | d = {}
25 |
26 | # Layout_Template name
27 | d["layoutTemplate"] = os.path.splitext(os.path.basename(obj.filePath))[0]
28 |
29 | # Page size
30 | ps = obj.pageSize
31 | d["pageSize"] = [ps.width, ps.height]
32 |
33 | # Size of the active dataframe element on the layout
34 | adf = obj.activeDataFrame
35 | d["activeDataFrameSize"] = [adf.elementWidth, adf.elementHeight]
36 |
37 | # Layout options containing information about layout elements
38 | lo = {}
39 | d["layoutOptions"] = lo
40 | lo["hasTitleText"] = False
41 | lo["hasAuthorText"] = False
42 | lo["hasCopyrightText"] = False
43 | lo["hasLegend"] = False
44 |
45 | # Is a legend element available whose parent dataframe name is same as the active dataframe's name
46 | for l in arcpy.mapping.ListLayoutElements(obj, "LEGEND_ELEMENT"):
47 | if (l.parentDataFrameName == adf.name):
48 | lo["hasLegend"] = True
49 | break
50 |
51 | # Availability of text elements - both predefined and user-defined
52 | ct = [] #an array contains custom text elements - each as a separate dictionary
53 | lo["customTextElements"] = ct
54 | for t in arcpy.mapping.ListLayoutElements(obj, "TEXT_ELEMENT"):
55 | try: #processing dynamic-text-elements with xml tags
56 | x = DOM.parseString(t.text)
57 | r = x.childNodes[0]
58 | if (r.tagName == "dyn") and (r.getAttribute("type") == "document"): #predefined with specific dynamic-text (i.e. xml tag)
59 | if (r.getAttribute("property") == "title"):
60 | lo["hasTitleText"] = True
61 | if (r.getAttribute("property") == "author"):
62 | lo["hasAuthorText"] = True
63 | if (r.getAttribute("property") == "credits"):
64 | lo["hasCopyrightText"] = True
65 | except: #processing other text elements that have 'names'
66 | if (len(t.name.strip()) > 0):
67 | ct.append({t.name: t.text})
68 |
69 | return d
70 | return json.JSONEncoder.default(self, obj)
71 |
72 |
73 | # Main module
74 | #
75 | def main():
76 | # Get the value of the input parameter
77 | #
78 | tmpltFolder = arcpy.GetParameterAsText(0)
79 |
80 | # When empty, it falls back to the default template location like ExportWebMap tool does
81 | #
82 | if (len(tmpltFolder) == 0):
83 | tmpltFolder = _defTmpltFolder
84 |
85 | # Getting a list of all file paths with .mxd extensions
86 | # createing MapDocument objects and putting them in an array
87 | #
88 | mxds = []
89 | for f in glob.glob(os.path.join(tmpltFolder, "*.mxd")):
90 | try: #throw exception when MapDocument is corrupted
91 | mxds.append(arcpy.mapping.MapDocument(f))
92 | except:
93 | arcpy.AddWarning("Unable to open map document named {0}".format(os.path.basename(f)))
94 |
95 |
96 | # Encoding the array of MapDocument to JSON using a custom JSONEncoder class
97 | #
98 | outJSON = json.dumps(mxds, cls=MxdEncoder, indent=2)
99 |
100 | # Set output parameter
101 | #
102 | arcpy.SetParameterAsText(1, outJSON)
103 |
104 | # Clean up
105 | #
106 | del mxds
107 |
108 |
109 | if __name__ == "__main__":
110 | main()
--------------------------------------------------------------------------------
/GetLayoutTemplatesInfo/README.md:
--------------------------------------------------------------------------------
1 | ##Print Service Templates
2 |
3 | This script tool is designed to work with ExportWebMap geoprocessing tool in order to retrieve metadata for all layout templates available to a printing service.
4 |
5 | This tool takes a folder location of map documents (.mxd files). The map files are layout templates and will be return as JSON (JavaScript object notation).
6 |
7 | For more information about using this tool with the Printing Service, see the following help topics:
8 | * [Get Layout Template tool](http://desktop.arcgis.com/en/arcmap/latest/tools/server-toolbox/get-layout-templates-info.htm)
9 | * [Printing service tutorial](http://server.arcgis.com/en/server/latest/create-web-apps/windows/tutorial-publishing-additional-services-for-printing.htm)
10 | Note: step #8 in 'Preparing and publishing the service' section, use this tool instead of the system toolset.
11 |
12 |
13 | ### Parameters
14 |
15 | **Layout Templates Folder** | *folder* | optional input
16 | * The directory of map documents (mxd) to be used as layouts. If no input folder is given, the tool will use the ExportWebMapTemplates folder in the ArcGIS installation directory.
17 |
18 | **Output JSON** | *string* | derived output
19 | * The output JSON representing the map document (mxd).
20 |
21 | ### General Usage
22 |
23 | Requires ArcMap/ArcGIS Server 10.1+
24 |
25 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
203 |
--------------------------------------------------------------------------------
/LayerToKML_attachments/KML.tbx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arcpy/sample-gp-tools/f0bb1fe0ae359216a633739388251b3be9949ca5/LayerToKML_attachments/KML.tbx
--------------------------------------------------------------------------------
/LayerToKML_attachments/Layer_to_KML_attachment.py:
--------------------------------------------------------------------------------
1 | # Layer to KML - With Attachments (Layer_to_KML_attachment.py)
2 | # Kevin Hibma, Esri
3 | # As found on ArcGIS.com: http://www.arcgis.com/home/item.html?id=5d8704c938ea4715b59eebabcd96c1d9
4 | # Last updated: November 27, 2015
5 | # Version: ArcGIS 10.1+ or ArcGIS Pro 1.0+
6 | #
7 | # Required Arguments:
8 | # Input layer (features layer): path to layer
9 | # Output KML (file): output path to KMZ file to be created
10 | # Optional Arguments:
11 | # Output scale (long): scale to create output KMZ file
12 | # Clamped to ground (boolean): Clamp features to the ground (override their elevation)
13 | # Allow Unique ID Field (boolean): allow a temporary ID field to be added to the input data
14 | # Height (long): set the height to display image attachments in the KML popup
15 | # Width (long): set the width to display image attachments in the KML popup
16 | # ==================================================================================================
17 |
18 | import arcpy
19 | import os
20 | import sys
21 | import zipfile
22 | import shutil
23 | from distutils.version import StrictVersion
24 | try:
25 | from xml.etree import cElementTree as ElementTree
26 | except:
27 | from xml.etree import ElementTree
28 |
29 | # These "supported" items determine what HTML to put into the HTML popup.
30 | # If this list is enhanced, the IFSTATEMENT writing HTML needs to be updated.
31 | fileTypes = {'IMG' : ['.jpg', '.png', '.gif'],
32 | 'PDF' : ['.pdf']
33 | }
34 |
35 |
36 | def checks(inputFeatures):
37 | """ Pre checks to make sure we can run """
38 |
39 | def hasAttachments(inputFeatures):
40 |
41 | d = arcpy.Describe(inputFeatures)
42 | rc_names = d.relationshipClassNames
43 |
44 | if len(rc_names) > 0:
45 | for rc_name in rc_names:
46 | # relationship class is always beside the input features
47 | rc = os.path.join(d.path, rc_name)
48 | rcDesc = arcpy.Describe(rc)
49 |
50 | if rcDesc.isAttachmentRelationship:
51 | attachTables = rcDesc.destinationClassNames
52 | if len(attachTables) > 0:
53 | for att_tableName in attachTables:
54 | if arcpy.Exists(os.path.join(d.path, att_tableName)):
55 | # assume the attachment table resides beside the input feature
56 | return os.path.join(d.path, att_tableName)
57 | else:
58 | # if the attachment table is not found, walk through the workspace looking for it
59 | for dirpath, dirnames, filenames in arcpy.da.Walk(ws, datatype="Table"):
60 | for f in filenames:
61 | if f == att_tableName:
62 | if arcpy.Exists(os.path.join(dirpath, att_tableName)):
63 | return os.path.join(dirpath, att_tableName)
64 |
65 | return None
66 |
67 | ## find the attachment table
68 | attachTable = hasAttachments(inputFeatures)
69 |
70 | ## check for sequential OIDs
71 | seq = True
72 | if max([row[0] for row in arcpy.da.SearchCursor(inputFeatures,["OID@"])]) != \
73 | int(arcpy.GetCount_management(inputFeatures).getOutput(0)):
74 | seq = False
75 |
76 | return attachTable, seq
77 |
78 |
79 | def attachments(KMLfiles, KMLdir, attachTable, seq=True, uniqueID=False, height=None, width=None):
80 | """ Take attachments, extract to disk, update the KML and put them into the KMZ """
81 |
82 | docKML = os.path.join(KMLdir, "doc.kml")
83 | ElementTree.register_namespace('', "http://www.opengis.net/kml/2.2")
84 | tree = ElementTree.parse(docKML)
85 |
86 | KML_NS = ".//{http://www.opengis.net/kml/2.2}"
87 | for node in tree.findall(KML_NS + 'Placemark'):
88 | idTxt = node.attrib['id']
89 | idVal = int(idTxt.replace('ID_', '')) + 1 # add 1 because its 0 indexed.
90 | for node in node.findall(KML_NS + 'description') :
91 | html = node.text
92 |
93 | # Special handling for the addition of the tempID field
94 | if not seq and uniqueID:
95 | gidTD = html.find("tempIDField")
96 | gidStart = html.find("", gidTD)
97 | GID = html[gidStart+4 : gidStart+20]
98 |
99 | # Remove the GUID field from the HTML.
100 | html = html[:gidTD-4] + html[gidStart+25:]
101 |
102 | # Take guid and match it to find the OID to use in the attachment table
103 | expression = "tempIDField = '{0}'".format(GID)
104 | with arcpy.da.SearchCursor(inputFeatures, ['OID@','tempIDField'], expression) as cursor:
105 | for row in cursor:
106 | tableMatchOID = row[0]
107 |
108 | # Extract the images and add HTML into the KML
109 | try:
110 | string2Inject = ''
111 | if not seq and uniqueID: # Use the field that was inserted
112 | exp = "REL_OBJECTID = {0}".format(tableMatchOID)
113 | else: # Otherwise, use the ID value from KML to match
114 | exp = "REL_OBJECTID = {0}".format(idVal)
115 |
116 | with arcpy.da.SearchCursor(attachTable,['DATA', 'ATT_NAME', 'REL_OBJECTID'], exp) as cursor:
117 | for row in cursor:
118 | binaryRep = row[0]
119 | fileName = row[1]
120 | # save to disk
121 | open(os.path.join(KMLfiles, fileName), 'wb').write(binaryRep.tobytes())
122 | fname, ext = os.path.splitext(fileName)
123 |
124 | os.rename(os.path.join(KMLfiles, fileName), os.path.join(KMLfiles, fileName.lower()))
125 | fileName = fileName.lower()
126 |
127 | filetype = "unknown"
128 | for k, v in fileTypes.items():
129 | if ext.lower() in v:
130 | filetype = k
131 |
132 | # Add new items here if the 'fileTypes' dictionary has been updated.
133 | if filetype == 'IMG':
134 | if height or width:
135 | string2Inject += " ".format( fileName, height, width )
136 | else:
137 | string2Inject += " ".format( fileName )
138 | elif filetype == 'PDF':
139 | string2Inject += " PDF: {1} ".format(fileName, fileName)
140 | else: # unknown
141 | arcpy.AddWarning("Unknown or unsupported file type for OBJECTID: {}.".format(row[2]))
142 | arcpy.AddWarning("{} will not be accessible in the popup.".format(fileName))
143 |
144 | string2Inject += ' | '
145 | newHTML = html.replace("", string2Inject, 1)
146 | node.text = newHTML
147 |
148 | except:
149 | arcpy.AddWarning("No attachment match for ID: {}".format(idVal))
150 |
151 | tree.write(docKML)
152 | del tree
153 | del docKML
154 |
155 |
156 | if __name__ == '__main__':
157 |
158 | prodInfo = arcpy.GetInstallInfo()
159 | if prodInfo['ProductName'] == "Desktop":
160 | if StrictVersion(prodInfo['Version']) >= StrictVersion('10.5'):
161 | arcpy.AddWarning("The KML to Layer tool was enhanced to automatically include attachments \
162 | at the 10.5 release, effectively making this tool obsolete.")
163 | elif prodInfo['ProductName'] == "ArcGISPro":
164 | if StrictVersion(prodInfo['Version']) >= StrictVersion('1.4'):
165 | arcpy.AddWarning("The KML to Layer tool was enhanced to automatically include attachments \
166 | at the 1.4 release, effectively making this tool obsolete.")
167 |
168 | inputFeatures = arcpy.GetParameterAsText(0)
169 | outputKML = arcpy.GetParameterAsText(1)
170 | outputScale = arcpy.GetParameterAsText(2)
171 | clamped = arcpy.GetParameterAsText(3)
172 | uniqueID = arcpy.GetParameterAsText(4)
173 | height = arcpy.GetParameterAsText(5)
174 | width = arcpy.GetParameterAsText(6)
175 |
176 | # Check the input and make sure
177 | # 1) the data has sequential OIDs
178 | # 2) an attachment table can be found
179 | attachTable, seq = checks(inputFeatures)
180 |
181 | if attachTable is None:
182 | arcpy.AddError("Could not find an attachment table. Ensure the attachment table is properly")
183 | arcpy.AddError("referenced through a relationship class in the same workspace as the input features.")
184 | sys.exit()
185 |
186 | if not seq:
187 | arcpy.AddWarning("It appears the OIDs for the input featureclass are NOT sequential.")
188 | arcpy.AddWarning("Attachment logic depends on sequential OIDs.")
189 | arcpy.AddWarning("A temporary ID field needs to be added to your data to attempt to reconcile this.")
190 |
191 | # Can only proceed if we're permitted to add a new field to the input data.
192 | if not uniqueID:
193 | arcpy.AddError("You need to check the Allow Unique ID parameter (re-run tool and set to True).")
194 | arcpy.AddError("Note: This will add a field to your data, calc, and eventually remove it.")
195 | arcpy.AddError("To maintain the integrity of your data, make a copy of your data and provide this as input.")
196 | sys.exit()
197 | else: # Add the new ID field to the data
198 | import uuid
199 | arcpy.AddField_management(inputFeatures, "tempIDField", "TEXT")
200 | edit = arcpy.da.Editor(arcpy.Describe(inputFeatures).path)
201 | edit.startEditing(False, False)
202 |
203 | with arcpy.da.UpdateCursor(inputFeatures, ["tempIDField"]) as cursor:
204 | for row in cursor:
205 | row[0] = str(uuid.uuid4().hex.upper()[0:16])
206 | cursor.updateRow(row)
207 | edit.stopEditing(True)
208 | arcpy.AddMessage("A temporary field was added to your data and will be removed when tool completes.")
209 |
210 | # Create KML file
211 | arcpy.LayerToKML_conversion(inputFeatures, outputKML, outputScale, ignore_zvalue=clamped)
212 |
213 | # Make new files directory, copy all images inside
214 | KMLdir = os.path.join(os.path.dirname(outputKML), "kml_extracted")
215 | if not os.path.exists(KMLdir):
216 | os.mkdir(KMLdir)
217 | KMLfiles = os.path.join(KMLdir, "files")
218 | if not os.path.exists(KMLfiles):
219 | os.mkdir(KMLfiles)
220 |
221 | # Rename the KML to ZIP and extract it
222 | root, kmlext = os.path.splitext(outputKML)
223 | os.rename(outputKML, root + ".zip")
224 |
225 | with zipfile.ZipFile(root + ".zip", "r") as z:
226 | z.extractall(KMLdir)
227 |
228 | # Inject images into .kmz and save
229 | docKML = os.path.join(KMLdir, "doc.kml")
230 |
231 | # Place the attachments inside the KMZ
232 | attachments(KMLfiles, KMLdir, attachTable, seq, uniqueID, height, width)
233 | if uniqueID:
234 | arcpy.DeleteField_management(inputFeatures, "tempIDField")
235 |
236 | # Remove the original KMZ (zip) as it'll be made new again
237 | os.remove(root + ".zip")
238 |
239 | # zip everything back up
240 | zipf = zipfile.ZipFile(root + ".zip", 'w')
241 | for rootdir, dirs, files in os.walk(KMLdir):
242 | for f in files:
243 | zipf.write(os.path.join(rootdir, f), os.path.relpath(os.path.join(rootdir, f), KMLdir))
244 | zipf.close()
245 |
246 | # Rename ZIP back to KMZ
247 | os.rename(root + ".zip", outputKML)
248 |
249 | # Clean up the KML dir
250 | shutil.rmtree(KMLdir)
251 |
--------------------------------------------------------------------------------
/LayerToKML_attachments/README.md:
--------------------------------------------------------------------------------
1 | ##Layer to KML with attachments
2 |
3 | Converts a layer featureclass into a KMZ file and insert images from attachments into the output KMZ popup.
4 |
5 | Originally posted to [ArcGIS.com as a sample](http://www.arcgis.com/home/item.html?id=5d8704c938ea4715b59eebabcd96c1d9). Original idea from a [GIS.SE question](http://gis.stackexchange.com/questions/119341/error-exporting-arcmap-feature-to-kml-retain-attached-photo)
6 |
7 | ### Parameters
8 |
9 | **Input Layer** | *feature layer* | required input
10 | * Input feature layer (must be a layer, not featureclass reference from disk)
11 |
12 | **Output KMZ** | *file* | required output
13 | * Output GPX file to be created
14 |
15 | **Output scale** | *long* | optional input
16 | * The scale at which to export the layer. This parameter is used with any scale dependency, such as layer visibility or scale-dependent rendering. Any value, such as 0, can be used if there are no scale dependencies.
17 |
18 | **Clamped to ground** | *boolean* | optional input
19 | * Checked — You can override the Z-values inside your features or force them to be clamped to the ground. You should use this setting if you are not working with 3D features or have features with Z-values that might not honor values relative to sea level.
20 |
21 | **Allow Unique ID Field** | *boolean* | optional input
22 | * Checked — Allows a new ID (ObjectID) field to be added to the input features. This field is only necessary if the input features do not maintain sequential IDs (OID = 1,2,3,4, etc). The field will be removed from your data when the tool completes. If your data has sequential IDs, this setting will not do anything. Unchecked (false) is the default.
23 |
24 | **Height** | *long* | optional input
25 | * Any numeric value will be used to set the *IMG* height within the KML PopUp. Use this value to force all image attachments to be a certain size.
26 |
27 | **Width** | *long* | optional input
28 | * Any numeric value will be used to set the *IMG* height within the KML PopUp. Use this value to force all image attachments to be a certain size.
29 |
30 | ### General Usage
31 |
32 | This tool creates a KMZ file from input features and inserts any attachments found into the output KMZ file. The current implementation of the Layer to KML tool does not export attachments. This tool works by first creating the KML file, then modifying this new KML by adding references to the attachments. The exported attachments are saved into the KMZ file.
33 |
34 |
35 |
--------------------------------------------------------------------------------
/NearByGroup/MoreProximityTools.tbx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arcpy/sample-gp-tools/f0bb1fe0ae359216a633739388251b3be9949ca5/NearByGroup/MoreProximityTools.tbx
--------------------------------------------------------------------------------
/NearByGroup/README.md:
--------------------------------------------------------------------------------
1 | ##Near By Group
2 |
3 | Determines the distance from each feature in the Input Features to the nearest feature with the same attributes in the Near Features.
4 |
5 | [Downloadable ArcGIS.com sample](http://www.arcgis.com/home/item.html?id=37dbbaa29baa467d9da8e27d87d8ad45)
6 |
7 |
8 | ### Parameters
9 |
10 | **Input Features** | *feature layer* | required input
11 | * The input layer or feature class.
12 |
13 | **Group Field(s)** | *Fields* | required input
14 | * The field(s) containing the key attributes for how groups are defined in the input and near features. This process finds for each input feature the nearest near feature with the attributes matching in these field(s).
15 | The group field(s) must exist in the input features and near features datasets.
16 | One or more fields can be specified.
17 |
18 | **Near Features** | *Feature Layer* | required input
19 | * The features that will be evaluated to find the nearest feature with attributes matching each input feature.
20 | One or more layers or feature classes can be specified.
21 | The near features can be the same layer or feature class as the input features.
22 |
23 | **Search Radius** | *Linear Unit* | optional input
24 | * Specifies the maximum distance used to search for near features. If there is no matching near feature within this distance of an input feature, the NEAR_OID, etc. fields in the output will be NULL.
25 |
26 |
27 | ### General Usage
28 |
29 | Determines the distance from each feature in the Input Features to the nearest feature with the same attributes in the Near Features.
30 |
31 | ### Software Requirements:
32 |
33 | * ArcGIS 10.0 or later
34 |
35 | * ArcInfo (Advanced) license
36 |
--------------------------------------------------------------------------------
/NearByGroup/nearbygroup.py:
--------------------------------------------------------------------------------
1 | '''----------------------------------------------------------------------------------
2 | Tool Name: Near By Group
3 | Source Name: nearbygroup.py
4 | Version: ArcGIS 10
5 | Author: Drew Flater, Esri, Inc.
6 | Required Arguments:
7 | Input Features (Feature Layer)
8 | Group Field(s) (Fields)
9 | Near Features (Feature Layer)
10 | Optional Arguments:
11 | Search Radius (Linear Unit)
12 |
13 | Description: Determines the distance from each feature in the Input Features to
14 | the nearest feature in the same attribute group.
15 |
16 | ----------------------------------------------------------------------------------'''
17 |
18 | # Import system modules
19 | import arcpy
20 | import os
21 |
22 | arcpy.env.overwriteOutput = True
23 |
24 | # Main function, all functions run in NearByGroup
25 | def NearByGroup(in_features, group_fields, near_features, search_radius=""):
26 |
27 | # Error if sufficient license is not available
28 | if arcpy.ProductInfo().lower() not in ['arcinfo']:
29 | arcpy.AddError("An ArcGIS for Desktop Advanced license is required.")
30 | sys.exit()
31 |
32 | # Read field values from input features
33 | uniq_values = set()
34 | scur = arcpy.SearchCursor(in_features, "", "", ";".join(group_fields))
35 | try:
36 | for row in scur:
37 | value = tuple()
38 | for field in group_fields:
39 | value += (row.getValue(field),)
40 | uniq_values.add(value)
41 | except:""
42 | finally:
43 | if scur:
44 | del scur
45 |
46 | # Add fields to Input
47 | arcpy.management.AddField(in_features, "NEAR_OID", "LONG")
48 | arcpy.management.AddField(in_features, "NEAR_DISTN", "DOUBLE")
49 | arcpy.management.AddField(in_features, "NEAR_FCLS", "TEXT")
50 |
51 | # Make a selection based on the values
52 | arcpy.management.MakeFeatureLayer(in_features, "input_lyr")
53 | near_features_list = []
54 | for each in near_features:
55 | arcpy.management.MakeFeatureLayer(each, "{0}_lyr".format(os.path.splitext(os.path.basename(each))[0]))
56 | near_features_list.append("{0}_lyr".format(os.path.splitext(os.path.basename(each))[0]))
57 |
58 | # Set the progress bar
59 | arcpy.SetProgressor("step", "Processing...", 0, len(uniq_values), 1)
60 | for uniq_value in uniq_values:
61 | expr = ""
62 | for combo in zip(uniq_value, group_fields):
63 | val = "'{0}'".format(combo[0]) if type(combo[0]) == str or type(combo[0]) == unicode else combo[0]
64 | expr += """{0} = {1} AND """.format(combo[1], val)
65 | expr = expr[:-5]
66 | # Select the input features
67 | arcpy.management.SelectLayerByAttribute("input_lyr", "", expr)
68 | for each in near_features_list:
69 | arcpy.management.SelectLayerByAttribute(each, "", expr)
70 |
71 | # Run the Near process
72 | arcpy.analysis.Near("input_lyr", near_features_list, search_radius)
73 |
74 | # Calculate the values into the NEAR_FID and NEAR_DISTN fields
75 | arcpy.management.CalculateField("input_lyr", "NEAR_OID", "!NEAR_FID!", "PYTHON")
76 | arcpy.management.CalculateField("input_lyr", "NEAR_DISTN", "!NEAR_DIST!", "PYTHON")
77 | if len(near_features) > 1:
78 | arcpy.management.CalculateField("input_lyr", "NEAR_FCLS", """getpath(!NEAR_FC!)""", "PYTHON", """def getpath(layer):\n try:\n return arcpy.Describe(str(layer)).catalogPath\n except:\n return 'None'""")
79 | else:
80 | arcpy.management.CalculateField("input_lyr", "NEAR_FCLS", """r'{0}'""".format(arcpy.Describe(near_features[0]).catalogPath), "PYTHON")
81 | arcpy.SetProgressorPosition()
82 |
83 | # Clean up
84 | arcpy.management.DeleteField("input_lyr", "NEAR_FID;NEAR_DIST;NEAR_FC")
85 | for each in ["input_lyr"] + near_features_list:
86 | try:
87 | arcpy.management.Delete(each)
88 | except:
89 | ""
90 |
91 | # Run the script
92 | if __name__ == '__main__':
93 | # Get Parameters
94 | in_features = arcpy.GetParameterAsText(0)
95 | group_fields = arcpy.GetParameterAsText(1).split(";") if arcpy.GetParameterAsText(1).find(";") > -1 else [arcpy.GetParameterAsText(1)]
96 | near_features = arcpy.GetParameterAsText(2).split(";") if arcpy.GetParameterAsText(2).find(";") > -1 else [arcpy.GetParameterAsText(2)]
97 | search_radius = arcpy.GetParameterAsText(3)
98 |
99 | NearByGroup(in_features, group_fields, near_features, search_radius)
100 | arcpy.SetParameterAsText(4, in_features)
101 | print ("finished")
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # SampleTools
2 |
3 | These tools are provided by Esri as samples to be used with ArcGIS Desktop (ArcMap, ArcCatalog, ArcGIS Pro, etc). No support is expressed or implied. Each tool has been documented with individual help in its given folder. Download an individual tool or clone the entire repository and use the **SampleTools.tbx**.
4 |
5 | ## Tools
6 | * [Dataset Extent To Features](DatasetExtentToFeatures)
7 | * Creates a polygon for the extent of each input geodataset.
8 | * [DescribeObjectReport ](DescribeObjectReport )
9 | * Creates a report of all arcpy describe-object properties for an object.
10 | * [Features to GPX](FeaturesToGPX)
11 | * Convert features into a GPX file.
12 | * [GetLayoutTemplatesInfo](GetLayoutTemplatesInfo)
13 | * Gets and provides JSON metadata from layouts for use with a printing service.
14 | * [Layer To KML with Attachments](LayerToKML_attachments)
15 | * Converts a layer featureclass into a KMZ file and insert images from attachments into the output KMZ popup.
16 | * [Near By Group](NearByGroup)
17 | * Determines the distance from each feature in the Input Features to the nearest feature with the same attributes in the Near Features.
18 | * [Share Package 2](SharePackage2)
19 | * Uploads a package file to arcgis.com or your local portal.
20 | * [To Attachments](ToAttachment)
21 | * Geoprocessing tool and script that converts the files stored or referenced in a dataset to geodatabase attachments. Files to be added as attachments can come from a Raster field, BLOB field, or text field containing a hyperlink or path.
22 |
23 |
24 | ### Contributing
25 |
26 | Suggestions, fixes, and enhancements are welcome and encouraged. Use the ISSUES link to report problems. Please see our [guidelines for contributing](https://github.com/esri/contributing). Please use one branch per tool update.
27 |
28 |
29 | ### Requirements
30 |
31 | * ArcGIS 10.0+, ArcGIS Pro 1.0+ (unless otherwise noted)
32 |
33 | ### Deployment
34 |
35 | After downloading the entire repo, the _SampleTools.tbx_ can be deployed to your system toolbox for quick access by using the setup.py file. Run the following code from the directory the `setup.py` file exists at, using a proper reference to your _python.exe_
36 |
37 | `C:\downloads\sample-gp-tools>c:\Python27\ArcGIS10.3\python.exe setup.py install `
38 |
39 |
40 | ### Licensing
41 |
42 | Copyright 2015 Esri
43 |
44 | Licensed under the Apache License, Version 2.0 (the "License");
45 | you may not use this file except in compliance with the License.
46 | You may obtain a copy of the License at
47 |
48 | http://www.apache.org/licenses/LICENSE-2.0
49 |
50 | Unless required by applicable law or agreed to in writing, software
51 | distributed under the License is distributed on an "AS IS" BASIS,
52 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
53 | See the License for the specific language governing permissions and
54 | limitations under the License.
55 |
56 | A copy of the license is available in the repository's [license.txt](LICENSE) file.
57 |
58 | [](Esri Tags: arcpy sample tool python script)
59 | [](Esri Language: Python)
60 |
--------------------------------------------------------------------------------
/SampleTools.tbx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arcpy/sample-gp-tools/f0bb1fe0ae359216a633739388251b3be9949ca5/SampleTools.tbx
--------------------------------------------------------------------------------
/SharePackage2/README.md:
--------------------------------------------------------------------------------
1 | ##Share Package 2
2 |
3 | Uploads a package to ArcGIS.com or your portal.
4 | This tool, an enhancement to the original [Share Package](http://desktop.arcgis.com/en/desktop/latest/tools/data-management-toolbox/share-package.htm) tool works both inside the app (ArcMap/ArcGIS Pro) as well as command line. If you are inside the application and signed in, it will use that account. Else you can provide a username and password.
5 |
6 | March 2015 - This tool has been enhanced with additional parameters. The parameter order has changed since the original version. Existing scripts calling this tool may need to be updated.
7 |
8 |
9 | ### Parameters
10 |
11 | **Input Package** | *file* | required input
12 | * Input package. Can be any of layer (.lpk, .lpkx), map (.mpk, .mpkx), geoprocessing (.gpk, .gpkx), map tile (.tpk), address locator (.gcpk) project (.ppkx, .aptx) or other type of ArcGIS package file.
13 |
14 | **Folder** | *string* | required input
15 | * Name of the folder to upload package too. If the folder does not exist, it will be created. A value of or blank will upload the package to the root directory.
16 |
17 | **Username** | *string* | required input
18 | * Username for the portal. If using the tool inside ArcMap or ArcGIS Pro and signed in, this option will be unavailable.
19 |
20 | **Password** | *hidden string* | required input
21 | * Password for the portal. If using the tool inside ArcMap or ArcGIS Pro and signed in, this option will be unavailable.
22 |
23 | **Maintain item's metadata** | *boolean* | required input
24 | * Use this option to maintain the metadata when overwriting an existing package. The tools behavior is to overwrite a package if it already exists on the portal. This option will save the original metadata (description, tags, credits, etc) and apply them to the updated package.
25 |
26 | **Summary** | *string* | required input
27 | * Summary of the package
28 |
29 | **Tags** | *string* | required input
30 | * Tags to help make the package searchable
31 |
32 | **Credits** | *string* | optional input
33 | * Package credits
34 |
35 | **Everyone** | *boolean* | optional input
36 | * Share with 'everybody' in the portal.
37 |
38 | **Organization** | *boolean* | optional input
39 | * Share within your organization.
40 |
41 | **Groups** | *string* | optional input
42 | * Share with specific groups in the portal
43 |
44 | **output** | *boolean* | derived output
45 | * Boolean flag set to True if upload succeeded.
46 |
47 | ### General Usage
48 |
49 | Requires 10.3.1+ (uses the [arcpy.GetActivePortalURL](http://desktop.arcgis.com/en/desktop/latest/analyze/arcpy-functions/getactiveportalurl.htm) to obtain the active portal)
50 |
51 |
--------------------------------------------------------------------------------
/SharePackage2/Share.tbx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arcpy/sample-gp-tools/f0bb1fe0ae359216a633739388251b3be9949ca5/SharePackage2/Share.tbx
--------------------------------------------------------------------------------
/SharePackage2/SharePackage2.py:
--------------------------------------------------------------------------------
1 |
2 | import arcpy
3 | import os
4 | import time
5 | import ago
6 |
7 | ago.client.HTTPConnection._http_vsn= 10
8 | ago.client.HTTPConnection._http_vsn_str='HTTP/1.0'
9 |
10 | # Valid package types on portal
11 | pkgTypes = {".LPK": "Layer Package",
12 | ".LPKX": "Layer Package",
13 | ".MPK": "Map Package",
14 | ".MPKX": "Map Package",
15 | ".TPK": "Tile Package",
16 | ".GPK": "Geoprocessing Package",
17 | ".GPKX": "Geoprocessing Package",
18 | ".RPK": "Rule Package",
19 | ".GCPK": "Locator Package",
20 | ".PPKX": "Project Package",
21 | ".APTX": "Project Template",
22 | ".MMPK": "Mobile Map Package",
23 | ".VTPK": "Vector Tile Package"
24 | }
25 |
26 |
27 | def sharePackage2(in_package, folder, username, password, maintain, summary, tags, credits, everyone, org, groups):
28 |
29 | try:
30 | active_url = arcpy.GetActivePortalURL()
31 | except:
32 | active_url = 'https://www.arcgis.com/'
33 | agol_helper = ago.AGOLHelper(portal_url=active_url)
34 |
35 | # If not app-signed in, and have user/pass, sign in the old manual way
36 | if username and password and "Signed in through app" not in username:
37 | agol_helper.login(username, password)
38 | elif arcpy.GetSigninToken() is not None:
39 | # Sign in using info from the app
40 | agol_helper.token_login()
41 | else:
42 | arcpy.AddIDMessage("Error", 1561)
43 | return
44 |
45 | # Make sure file exists
46 | try:
47 | uploadSize = os.stat(in_package).st_size
48 | except FileNotFoundError:
49 | raise Exception("The file {0} was not found".format(in_package))
50 |
51 | fileName, fileExt = os.path.splitext(os.path.basename(in_package))
52 | try:
53 | uploadType = pkgTypes[fileExt.upper()]
54 | except KeyError:
55 | raise Exception("Unknown/unsupported package type extension: {0}".format(fileExt))
56 |
57 |
58 | portalFolders = agol_helper.list_folders()
59 | if folder == "" or None: folder = ''
60 | folderID = ""
61 | moveFolder = False
62 |
63 | if folder:
64 | if folder not in portalFolders.keys():
65 | # Create a new folder
66 | folderID = agol_helper.create_folder(folder)
67 | arcpy.AddMessage("Created: {}".format(folderID))
68 | #refresh the folder list
69 | portalFolders = agol_helper.list_folders()
70 |
71 | #previousPkgId = agol_helper.search(fileName, uploadType)
72 | previousPkgId = agol_helper.search(item_type=uploadType, name=fileName)
73 |
74 | if len(previousPkgId) == 0:
75 | # Pkg does not exist
76 | if maintain:
77 | # No pkg + maintain meta == quit
78 | raise Exception("Existing package not found. Check to make sure it exists or disable maintain metadata.")
79 |
80 | if folder:
81 | if folder in portalFolders.keys():
82 | folderID = portalFolders[folder]
83 | moveFolder = True
84 |
85 | else:
86 | # Pkg exists
87 | newItemID = previousPkgId[0]
88 | itemInfo = agol_helper.item(newItemID)
89 |
90 | # original pkg lives here.
91 | pkgFolderID = itemInfo['ownerFolder'] if itemInfo['ownerFolder'] else ""
92 |
93 | if folder:
94 | if folder in portalFolders.keys():
95 | if maintain and portalFolders[folder] != pkgFolderID:
96 | raise Exception("Existing package to update not found in folder {}. Check the folder or disable maintain metadata.".format(folder))
97 | else:
98 | # Existing pkg lives in supplied folder. It'll be updated.
99 | folderID = portalFolders[folder]
100 | if folderID != pkgFolderID:
101 | # Package of same name exists but uploading to a different folder
102 | moveFolder = True
103 |
104 | # no else here - this is case where folder needs to be created, covered previously
105 |
106 | else:
107 | if maintain and pkgFolderID:
108 | # pkg lives in folder, but root was specified:
109 | raise Exception("Did not find package to update in Does it exist in a folder?")
110 |
111 | # no else here - covered previously with folderID variable initialize
112 |
113 |
114 | # Set metadata by getting original metadata or adding new
115 | if not maintain:
116 | try:
117 | # Only available in Pro 1.2 or 10.4
118 | metaFromPkg = arcpy.GetPackageInfo(in_package)
119 | description = metaFromPkg['description']
120 | if not summary: summary = metaFromPkg['summary']
121 | if not tags: tags = metaFromPkg['tags']
122 | if not credits: credits = metaFromPkg['credits']
123 | except AttributeError:
124 | description = ''
125 | pkgMetadata = (summary, description, tags, credits, '')
126 |
127 | else:
128 | metadataURL = "{}/content/users/{}/{}/items/{}".format(
129 | agol_helper.base_url, agol_helper.username, folderID, newItemID)
130 | metadata = agol_helper.url_request(metadataURL, {'token': agol_helper.token, 'f':'json'} )
131 |
132 | #re-set everyone if necessary from original share options
133 | everyone = True if metadata['sharing']['access'] == 'public' else everyone
134 | org = True if everyone else True if metadata['sharing']['access'] == 'org' else org
135 | groups = metadata['sharing']['groups'] if metadata['sharing']['groups'] else groups
136 | snippet = metadata['item']['snippet'] if metadata['item']['snippet'] else ''
137 | description = metadata['item']['description'] if metadata['item']['description'] else ''
138 | tags = ','.join(metadata['item']['tags'])
139 | accessInfo = metadata['item']['accessInformation'] if metadata['item']['accessInformation'] else ''
140 | licenseInfo = metadata['item']['licenseInfo'] if metadata['item']['licenseInfo'] else ''
141 | pkgMetadata = (snippet, description, tags, accessInfo, licenseInfo)
142 |
143 | # Save original thumbnail to update with metadata
144 | try:
145 | thumbnailURL = "{}/content/items/{}/info/{}".format(
146 | agol_helper.base_url, newItemID, metadata['item']['thumbnail'])
147 | saveThumb = os.path.join(arcpy.env.scratchFolder, "thumbnail.png")
148 | agol_helper.save_file(thumbnailURL, saveThumb)
149 | pkgMetadata += (saveThumb,)
150 | except:
151 | arcpy.AddWarning("Problem getting thumbnail")
152 |
153 | arcpy.AddMessage("Using existing metadata")
154 |
155 |
156 | # Behavior is to always overwrite a package if it exists
157 | extraParams = {'overwrite':'true'}
158 |
159 | # Upload the package
160 | arcpy.AddMessage("Beginning file upload")
161 | newItemIDres = agol_helper.add_item(in_package, agol_helper.username, folderID, uploadType, params=extraParams)
162 |
163 | if 'success' in newItemIDres:
164 | if newItemIDres['success']:
165 | newItemID = newItemIDres['id']
166 | else:
167 | raise Exception("(returned msg) {}".format(newItemIDres))
168 |
169 | # Commit the file
170 | arcpy.AddMessage("Committing the file on the portal")
171 | resCom = agol_helper.commit(newItemID, agol_helper.username)
172 |
173 | status = 'processing' # partial | processing | failed | completed
174 | while status == 'processing' or status == 'partial':
175 | status = agol_helper.item_status(newItemID, agol_helper.username)['status']
176 | time.sleep(1)
177 | if status == 'failed':
178 | raise Exception("Failed in processing the file on the portal")
179 |
180 | if moveFolder:
181 | #move new package into folder
182 | moveResp = agol_helper.move_items(folderID, [newItemID])
183 | if not moveResp['results'][0]['success']:
184 | arcpy.AddMessage("Failed to move item to folder: '{}'. Item will be created in root".format(folder))
185 | folderID = ""
186 |
187 | # Set or Update the metadata
188 | arcpy.AddMessage("Setting metadata and sharing settings")
189 | uresp = agol_helper.update_item(newItemID, pkgMetadata, folder_id=folderID, title=fileName)
190 | try:
191 | if not uresp['success']:
192 | arcpy.AddWarning("Could not set sharing properties")
193 | except:
194 | arcpy.AddWarning("Problem setting metadata values:")
195 | arcpy.AddError(" {0}".format(uresp['error']))
196 |
197 | # Clean up thumbnail
198 | try:
199 | os.remove(saveThumb)
200 | except (NameError, IOError):
201 | pass
202 |
203 |
204 | # Set Sharing options
205 | if not maintain:
206 | if everyone or groups or org:
207 | groupIDs = []
208 | if groups:
209 | userGroups = agol_helper.list_groups(agol_helper.username)
210 | for group in userGroups.keys():
211 | arcpy.AddMessage(group)
212 | for selectedgroup in groups:
213 | if group == selectedgroup:
214 | groupIDs.append(userGroups[group])
215 |
216 | gresp = agol_helper.share_items(groupIDs, everyone, org, [newItemID])
217 | try:
218 | if not gresp['results'][0]['success']:
219 | arcpy.AddWarning("Could not set sharing properties")
220 | arcpy.AddError(" {0}".format(gresp['results'][0]['error']['message']))
221 | except:
222 | arcpy.AddWarning("Problem sharing item:")
223 | arcpy.AddError(" {0}".format(gresp))
224 |
225 |
226 |
227 | if __name__ == '__main__':
228 |
229 | try:
230 | sharePackage2(arcpy.GetParameterAsText(0), #input pkg
231 | arcpy.GetParameterAsText(1), #folder
232 | arcpy.GetParameterAsText(2), #username
233 | arcpy.GetParameterAsText(3), #password
234 | arcpy.GetParameter(4), #maintain
235 | arcpy.GetParameterAsText(5), #summary
236 | arcpy.GetParameterAsText(6), #tags
237 | arcpy.GetParameterAsText(7), #credits
238 | arcpy.GetParameter(8), #everybody
239 | arcpy.GetParameter(9), #org
240 | arcpy.GetParameterAsText(10)) #groups
241 |
242 | arcpy.SetParameter(11, True)
243 | arcpy.AddMessage("done!")
244 |
245 | except Exception as e:
246 | arcpy.AddError("Failed to upload, error: {}".format(e))
247 | arcpy.SetParameter(11, False)
248 |
--------------------------------------------------------------------------------
/SharePackage2/ago.py:
--------------------------------------------------------------------------------
1 | #! -*- coding: utf-8; mode: python -*-
2 | """
3 | ago.py: interact with an ArcGIS Portal instance
4 | """
5 | import arcpy
6 | import json
7 | import time
8 | import datetime
9 | import mimetypes
10 | import gzip
11 | import random
12 | import string
13 | import getpass
14 | import sys
15 | import os
16 | from io import BytesIO
17 | import codecs
18 | import uuid
19 | import shutil
20 |
21 | try:
22 | import http.client as client
23 | import urllib.parse as parse
24 | from urllib.request import urlopen as urlopen
25 | from urllib.request import Request as request
26 | from urllib.request import HTTPError, URLError
27 | from urllib.parse import urlencode as encode
28 | # py2
29 | except ImportError:
30 | import httplib as client
31 | from urllib2 import urlparse as parse
32 | from urllib2 import urlopen as urlopen
33 | from urllib2 import Request as request
34 | from urllib2 import HTTPError, URLError
35 | from urllib import urlencode as encode
36 | unicode = str
37 |
38 | # Valid package types on portal
39 | ITEM_TYPES = {
40 | ".LPK": "Layer Package",
41 | ".LPKX": "Layer Package",
42 | ".MPK": "Map Package",
43 | ".MPKX": "Map Package",
44 | ".GPK": "Geoprocessing Package",
45 | ".GPKX": "Geoprocessing Package",
46 | ".RPK": "Rule Package",
47 | ".GCPK": "Locator Package",
48 | ".PPKX": "Project Package",
49 | ".APTX": "Project Template",
50 | ".TPK": "Tile Package",
51 | ".MMPK": "Mobile Map Package",
52 | ".VTPK": "Vector Tile Package"
53 | }
54 |
55 |
56 | class MultipartFormdataEncoder(object):
57 | """
58 | Usage: request_headers, request_data =
59 | MultipartFormdataEncoder().encodeForm(params, files)
60 | Inputs:
61 | params = {"f": "json", "token": token, "type": item_type,
62 | "title": title, "tags": tags, "description": description}
63 | files = {"file": {"filename": "some_file.sd", "content": content}}
64 | Note: content = open(file_path, "rb").read()
65 | """
66 |
67 | def __init__(self):
68 | self.boundary = uuid.uuid4().hex
69 | self.content_type = {
70 | "Content-Type": "multipart/form-data; boundary={}".format(self.boundary)
71 | }
72 |
73 | @classmethod
74 | def u(cls, s):
75 | if sys.hexversion < 0x03000000 and isinstance(s, str):
76 | s = s.decode('utf-8')
77 | if sys.hexversion >= 0x03000000 and isinstance(s, bytes):
78 | s = s.decode('utf-8')
79 | return s
80 |
81 | def iter(self, fields, files):
82 | """
83 | Yield bytes for body. See class description for usage.
84 | """
85 | encoder = codecs.getencoder('utf-8')
86 | for key, value in fields.items():
87 | yield encoder('--{}\r\n'.format(self.boundary))
88 | yield encoder(
89 | self.u('Content-Disposition: form-data; name="{}"\r\n').format(key))
90 | yield encoder('\r\n')
91 | if isinstance(value, int) or isinstance(value, float):
92 | value = str(value)
93 | yield encoder(self.u(value))
94 | yield encoder('\r\n')
95 |
96 | for key, value in files.items():
97 | if "filename" in value:
98 | filename = value.get("filename")
99 | content_disp = 'Content-Disposition: form-data;name=' + \
100 | '"{}"; filename="{}"\r\n'.format(key, filename)
101 | content_type = 'Content-Type: {}\r\n'.format(
102 | mimetypes.guess_type(filename)[0] or 'application/octet-stream')
103 | yield encoder('--{}\r\n'.format(self.boundary))
104 | yield encoder(content_disp)
105 | yield encoder(content_type)
106 | yield encoder('\r\n')
107 | if "content" in value:
108 | buff = value.get("content")
109 | yield (buff, len(buff))
110 | yield encoder('\r\n')
111 |
112 | yield encoder('--{}--\r\n'.format(self.boundary))
113 |
114 | def encodeForm(self, fields, files):
115 | body = BytesIO()
116 | for chunk, chunk_len in self.iter(fields, files):
117 | body.write(chunk)
118 | self.content_type["Content-Length"] = str(len(body.getvalue()))
119 | return self.content_type, body.getvalue()
120 |
121 |
122 | class AGOLHelper(object):
123 | """
124 | Interact with an ArcGIS Portal instance, such as ArcGIS Online. Must be
125 | initialized with either the login() method, or by reusing an existing
126 | OAuth token via token_login(). Covers approximately 1/3 of the complete
127 | API, primarily focused on the common operations around uploading and
128 | managing services and web maps.
129 | """
130 |
131 | def __init__(self, portal_url=None, token=None, debug=False):
132 | if portal_url is None:
133 | self.portal_url = arcpy.GetActivePortalURL()
134 | else:
135 | self.portal_url = portal_url
136 | # in the absence of information, default to HTTP
137 | self.protocol = 'https'
138 | self.is_arcgis_online = False
139 | url_parts = self._parse_url(self.portal_url)
140 | if url_parts:
141 | if url_parts.scheme:
142 | self.protocol = url_parts.scheme
143 | self.host = self._normalize_host_url(url_parts)
144 | if url_parts.netloc == 'www.arcgis.com':
145 | self.is_arcgis_online = True
146 | self.protocol = 'https'
147 |
148 | else:
149 | arcpy.AddError(NO_PORTAL_URL_MSG)
150 | sys.exit()
151 | self.base_url = '{}://{}/sharing/rest'.format(self.protocol, self.host)
152 | self.secure_url = 'https://{}/sharing/rest'.format(self.host)
153 |
154 | self.token = token
155 | self.debug = debug
156 |
157 | self.headers = {
158 | 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
159 | 'User-Agent': ('ago.py -- ArcGIS portal module 0.1')
160 | }
161 |
162 | self.portal_name = None
163 | self.portal_info = {}
164 | self.username = None
165 | self.login_method = None
166 | self.expiration = None
167 | self._password = None
168 |
169 | def login(self, username=None, password=None, repeat=None):
170 | """
171 | Get a sign-in token from provided credentials.
172 |
173 | Arguments:
174 | username -- user to sign in with
175 | password -- password for user (default: use getpass)
176 |
177 | Returns:
178 | None
179 | """
180 |
181 | if username:
182 | self.username = username
183 | else:
184 | arcpy.AddError("Expected user name. None given.")
185 | return
186 | if password is None:
187 | self._password = getpass.getpass()
188 | else:
189 | self._password = password
190 |
191 | token_url = '{}/generateToken?'.format(self.secure_url)
192 | token_parameters = {
193 | 'username': username,
194 | 'password': self._password,
195 | 'referer': "http://maps.esri.com",
196 | 'expiration': 600,
197 | }
198 | token_response = self.url_request(
199 | token_url, token_parameters, 'POST', repeat=repeat)
200 |
201 | if token_response and 'token' in token_response:
202 | self.token = token_response['token']
203 | self.expiration = datetime.datetime.fromtimestamp(
204 | token_response['expires'] / 1000) - datetime.timedelta(seconds=1)
205 |
206 | if 'ssl' in token_response:
207 | if token_response['ssl']:
208 | self.protocol = 'https'
209 | else:
210 | self.protocol = 'http'
211 |
212 | # update base information with token
213 | self.information()
214 | self.login_method = 'password'
215 | else:
216 | arcpy.AddError("Unable to get signin token.")
217 | return
218 |
219 | def token_login(self):
220 | """
221 | Get a sign-in token generated from ArcPy.
222 |
223 | Arguments:
224 | None
225 |
226 | Returns:
227 | None
228 | """
229 | # NOTE side-effects
230 | token_response = arcpy.GetSigninToken()
231 | if token_response and 'token' in token_response:
232 | self.token = token_response['token']
233 | self.expiration = datetime.datetime.fromtimestamp(
234 | token_response['expires']) - datetime.timedelta(seconds=1)
235 |
236 | if self.debug:
237 | msg = 'Received token starting with ' + \
238 | '"{}", valid for {} minutes.'.format(
239 | self.token[0:10], self.valid_for)
240 | arcpy.AddMessage(msg)
241 |
242 | # update base information with token
243 | self.information()
244 | self.login_method = 'token'
245 | else:
246 | arcpy.AddError("Unable to get signin token.")
247 | return
248 |
249 | @property
250 | def valid_for(self):
251 | """
252 | Length the current token is valid for, in minutes.
253 |
254 | Returns:
255 | An integer of minutes token remains valid
256 | """
257 | valid = False
258 | if self.expiration and isinstance(self.expiration, datetime.datetime):
259 | valid = (self.expiration - datetime.datetime.now()).seconds / 60
260 | return valid
261 |
262 | def information(self):
263 | """
264 | Get portal 'self' information.
265 |
266 | Arguments:
267 | None
268 |
269 | Returns:
270 | A dictionary returned from portals/self.
271 | """
272 |
273 | # NOTE side-effects; do separately
274 | url = '{}/portals/self'.format(self.base_url)
275 |
276 | portal_info = self.url_request(url)
277 | self.portal_info = portal_info
278 | self.portal_name = portal_info['portalName']
279 |
280 | url = '{}/community/self'.format(self.base_url)
281 | user_info = self.url_request(url)
282 | self.username = user_info['username']
283 |
284 | return self.portal_info
285 |
286 | def random_string(self, length):
287 | """
288 | Generate a random string of ASCII letters.
289 |
290 | Arguments:
291 | length = number of characters
292 |
293 | Returns:
294 | random string
295 | """
296 | alpha = string.ascii_letters
297 | return ''.join(random.choice(alpha) for ii in range(length + 1))
298 |
299 | def encode_multipart_data(self, data, files):
300 | """
301 | Create multipart boundaries between file streams.
302 |
303 | Arguments:
304 | data -- input data
305 | files -- input files
306 |
307 | Returns:
308 | A tuple containing response -- (body, headers)
309 | """
310 | boundary = self.random_string(30)
311 |
312 | def get_content_type(filename):
313 | """ Try to determine content type based on file extension."""
314 | return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
315 |
316 | def encode_field(field_name):
317 | """ Encode fields using multipart specification."""
318 | return('--' + boundary,
319 | 'Content-Disposition: form-data; name="%s"' % field_name,
320 | '', str(data[field_name]))
321 |
322 | def encode_file(field_name):
323 | """ Encode file data using multipart specification."""
324 | filename = str(files[field_name])
325 |
326 | return('--' + boundary,
327 | 'Content-Disposition: form-data;'
328 | 'name="{}"; filename="{}"'.format(field_name, filename),
329 | 'Content-Type: %s' % get_content_type(filename),
330 | '', open(filename, 'rb').read())
331 |
332 | lines = []
333 | for name in data:
334 | lines.extend(encode_field(name))
335 | for name in files:
336 | lines.extend(encode_file(name))
337 | lines.extend(('--%s--' % boundary, ''))
338 | body = '\r\n'.join(lines)
339 |
340 | headers = {
341 | 'content-type': 'multipart/form-data; boundary=' + boundary,
342 | 'content-length': str(len(body))
343 | }
344 |
345 | return body, headers
346 |
347 | def list_folders(self):
348 | """
349 | List available user folders.
350 |
351 | Returns:
352 | A dictionary of folder titles to ids.
353 | """
354 |
355 | folders = {}
356 |
357 | folder_request = self.user_content()['folders']
358 | for folder in folder_request:
359 | folders[folder['title']] = folder['id']
360 |
361 | return folders
362 |
363 | def create_folder(self, name):
364 | """
365 | Create a folder item.
366 | property to the created folder.
367 | Arguments:
368 | name -- folder name to create
369 | Returns:
370 | folder item id.
371 | """
372 | folder = None
373 | url = '{}/content/users/{}/createFolder'.format(
374 | self.base_url, self.username)
375 |
376 | parameters = {'title': name}
377 | response = self.url_request(url, parameters, 'POST')
378 |
379 | if response is not None and 'folder' in response:
380 | folder = response['folder']['id']
381 |
382 | return folder
383 |
384 | def item(self, item_id=None, repeat=None):
385 | """
386 | Get back information about a particular item. Must have read
387 | access to the item requested.
388 |
389 | Arguments:
390 | item_id: the portal id of the desired item.
391 |
392 | Returns:
393 | Dictionary from item response.
394 | """
395 | results = {}
396 | if item_id:
397 | url = '{}/content/items/{}'.format(self.base_url, item_id)
398 | results = self.url_request(url, repeat=repeat)
399 | return results
400 |
401 | def move_items(self, target_folder_id, items):
402 | """
403 | Move items to a target folder.
404 |
405 | Arguments:
406 | target_folder_id: folder id to move items to
407 | items: list of one or more item ids to move
408 |
409 | Returns:
410 | None
411 | """
412 | # Test if we have a None object somewhere
413 | # This could potentially be the case if one of the previous
414 | # portal responses was not successful.
415 | if None in items:
416 | arcpy.AddError(EMPTY_ITEM_MSG)
417 | return
418 |
419 | url = '{}/content/users/{}/moveItems'.format(
420 | self.base_url, self.username)
421 |
422 | parameters = {
423 | 'folder': target_folder_id,
424 | 'items': ','.join(map(str, items))
425 | }
426 |
427 | move_response = self.url_request(url, parameters, request_type='POST')
428 | if self.debug:
429 | msg = "Moving items, using {} with parameters {}, got {}".format(
430 | url, parameters, move_response)
431 | arcpy.AddMessage(msg)
432 |
433 | return move_response
434 |
435 | def move_items(self, target_folder_id, items):
436 | """
437 | Move items to a target folder.
438 | Arguments:
439 | target_folder_id: folder id to move items to
440 | items: list of one or more item ids to move
441 | Returns:
442 | None
443 | """
444 | # Test if we have a None object somewhere
445 | # This could potentially be the case if one of the previous
446 | # portal responses was not successful.
447 |
448 | url = '{}/content/users/{}/moveItems'.format(
449 | self.base_url, self.username)
450 |
451 | parameters = {
452 | 'folder': target_folder_id,
453 | 'items': ','.join(map(str, items))
454 | }
455 |
456 | move_response = self.url_request(url, parameters, request_type='POST')
457 |
458 | return move_response
459 |
460 | def share_items(self, groups=None, everyone=False, org=False, items=None):
461 | """
462 | Shares one or more items with the specified groups. Can only share
463 | items with groups the user belongs to. Can also share with
464 | the users' current organization, and the public.
465 |
466 | Arguments:
467 | groups -- a list of group IDs to share items with
468 | everyone -- publicly share the item (default: False)
469 | org -- share with the users' organization (default: False)
470 | items -- a list of item IDs to update sharing properties on
471 |
472 | Returns:
473 | A dictionary of JSON objects, one per item containing the item,
474 | whether sharing was successful, any groups sharing failed with,
475 | and any errors.
476 | """
477 | if (groups is None and not everyone and not org) or not items:
478 | if self.debug:
479 | arcpy.AddWarning("Invalid sharing options set.")
480 | return
481 |
482 | # If shared with everyone, have to share with Org as well
483 | if everyone:
484 | org = True
485 |
486 | url = '{}/content/users/{}/shareItems'.format(
487 | self.base_url, self.username)
488 |
489 | parameters = {
490 | 'everyone': everyone,
491 | 'org': org,
492 | 'items': ','.join(map(str, items))
493 | }
494 | # sharing with specific groups is optional
495 | if groups:
496 | parameters['groups'] = ','.join(map(str, groups))
497 |
498 | sharing_response = self.url_request(url, parameters, 'POST')
499 | if self.debug:
500 | msg = "Sharing items, using {} with parameters {}, got {}".format(
501 | url, parameters, sharing_response)
502 | arcpy.AddMessage(msg)
503 |
504 | return sharing_response
505 |
506 | def search(self, title=None, item_type=None, group=None,
507 | owner=None, item_id=None, repeat=None, num=10, id_only=True, name=None):
508 | """
509 | Search for items, a partial implementation of the
510 | search operation of the ArcGIS REST API. Requires one of:
511 | title, item_type, group, owner.
512 |
513 | Arguments:
514 | title -- item title
515 | item_type -- item type
516 | group -- item group
517 | owner -- username of item owner
518 | item_id -- item id
519 | repeat -- retry the search, up to this number of times (default: None)
520 | num -- number of results (default: 10)
521 | id_only -- return only IDs of results. If False, will return
522 | full JSON results. (default: True)
523 |
524 | Returns:
525 | A list of search results item ids.
526 |
527 | """
528 |
529 | query_types = {
530 | 'title': title,
531 | 'type': item_type,
532 | 'group': group,
533 | 'owner': self.username, #owner,
534 | 'id': item_id,
535 | 'name': name
536 | }
537 |
538 | query_parts = []
539 | for (label, value) in list(query_types.items()):
540 | if value:
541 | query_parts.append('{}: "{}"'.format(label, value))
542 |
543 | if len(query_parts) == 0:
544 | return
545 | elif len(query_parts) == 1:
546 | query = query_parts[0]
547 | else:
548 | query = " AND ".join(query_parts)
549 |
550 | if self.debug:
551 | arcpy.AddMessage("Searching for '{}'".format(query))
552 |
553 | url = '{}/search'.format(self.base_url)
554 | parameters = {
555 | 'num': num,
556 | 'q': query
557 | }
558 | response_info = self.url_request(url, parameters)
559 | results = []
560 |
561 | if response_info and 'results' in response_info:
562 | if response_info['total'] > 0:
563 | for item in response_info['results']:
564 | if 'id' in item:
565 | if id_only:
566 | results.append(item['id'])
567 | else:
568 | results.append(item)
569 | if self.debug:
570 | if results:
571 | arcpy.AddMessage("Got results! Found items: {}".format(results))
572 | else:
573 | arcpy.AddMessage("No results found.")
574 |
575 | # occasional timing conflicts are happening; repeat search until we
576 | # can continue -- the result should be empty since we just deleted it.
577 | if repeat and not results:
578 | repeat -= 1
579 | if repeat <= 0:
580 | return
581 |
582 | time.sleep(1)
583 |
584 | results = self.search(
585 | title=title, item_type=item_type, group=group, owner=owner,
586 | item_id=item_id, repeat=repeat, num=num, id_only=id_only)
587 |
588 | return results
589 |
590 | def user(self, username=None):
591 | """
592 | A user resource representing a registered user of the portal.
593 |
594 | Arguments:
595 | username -- user of interest
596 |
597 | Returns:
598 | A dictionary of the JSON response.
599 |
600 | """
601 | if username is None:
602 | username = self.username
603 |
604 | url = '{}/community/users/{}'.format(self.base_url, username)
605 | return self.url_request(url)
606 |
607 | def user_content(self, username=None):
608 | """
609 | User items and folders.
610 |
611 | Arguments:
612 | username -- user of interest
613 |
614 | Returns:
615 | A dictionary of user items and folders.
616 | """
617 | if username is None:
618 | username = self.username
619 |
620 | url = '{}/content/users/{}'.format(self.base_url, username)
621 | return self.url_request(url)
622 |
623 | def list_groups(self, username=None):
624 | """
625 | List users' groups.
626 |
627 | Returns:
628 | A dictionary of group titles to ids.
629 | """
630 | groups = {}
631 |
632 | if username is None:
633 | username = self.username
634 |
635 | groups_request = self.user(username)['groups']
636 | for group in groups_request:
637 | groups[group['title']] = group['id']
638 |
639 | return groups
640 |
641 | def add_item(self, file_to_upload, username=None, folder_id=None, itemtype=None, params=None):
642 | """
643 | Adds an item to the portal.
644 | All items are added as multipart. Once the item is added,
645 | Add Part will be called.
646 |
647 | Returns:
648 | The response/item_id of the item added.
649 | """
650 | if username is None:
651 | username = self.username
652 |
653 | url = '{}/content/users/{}/{}/addItem'.format(self.base_url, username, folder_id)
654 | parameters = {
655 | 'multipart': 'true',
656 | 'filename': file_to_upload,
657 | }
658 | if params:
659 | parameters.update(params)
660 |
661 | if itemtype:
662 | parameters['type'] = itemtype
663 | else:
664 | try:
665 | file_name, file_ext = os.path.splitext(os.path.basename(file_to_upload))
666 | itemtype = ITEM_TYPES[file_ext.upper()]
667 | except KeyError:
668 | msg = "Unable to upload file: {}, unknown type".format(
669 | file_to_upload)
670 | arcpy.AddError(msg)
671 | return
672 |
673 | details = {'filename': file_to_upload}
674 | add_item_res = self.url_request(
675 | url, parameters, request_type="POST", files=details)
676 |
677 | return self._add_part(file_to_upload, add_item_res['id'], itemtype)
678 |
679 | def _add_part(self, file_to_upload, item_id, upload_type=None):
680 | """ Add item part to an item being uploaded."""
681 |
682 | def read_in_chunks(file_object, chunk_size=10000000):
683 | """Generate file chunks (default: 10MB)"""
684 | while True:
685 | data = file_object.read(chunk_size)
686 | if not data:
687 | break
688 | yield data
689 |
690 | url = '{}/content/users/{}/items/{}/addPart'.format(
691 | self.base_url, self.username, item_id)
692 |
693 | with open(file_to_upload, 'rb') as f:
694 | for part_num, piece in enumerate(read_in_chunks(f), start=1):
695 | title = os.path.splitext(os.path.basename(file_to_upload))[0]
696 | files = {"file": {"filename": file_to_upload, "content": piece}}
697 | params = {
698 | 'f': "json",
699 | 'token': self.token,
700 | 'partNum': part_num,
701 | 'title': title,
702 | 'itemType': 'file',
703 | 'type': upload_type
704 | }
705 | headers, data = MultipartFormdataEncoder().encodeForm(params, files)
706 | resp = self.url_request(url, data, "MULTIPART", headers, repeat=1)
707 |
708 | return resp
709 |
710 | def item_status(self, item_id, username=None):
711 | """
712 | Gets the status of an item.
713 |
714 | Returns:
715 | The item's status. (partial | processing | failed | completed)
716 | """
717 | if username is None:
718 | username = self.username
719 |
720 | url = '{}/content/users/{}/items/{}/status'.format(
721 | self.base_url, username, item_id)
722 |
723 | return self.url_request(url)
724 |
725 | def commit(self, item_id, username=None):
726 | """
727 | Commits an item that was uploaded as multipart
728 |
729 | Returns:
730 | Result of calling commit. (success: true| false)
731 | """
732 | if username is None:
733 | username = self.username
734 |
735 | url = '{}/content/users/{}/items/{}/commit'.format(
736 | self.base_url, username, item_id)
737 |
738 | return self.url_request(url)
739 |
740 | def update_item(self, item_id, metadata, username=None, folder_id=None, title=None):
741 | """
742 | Updates metadata parts of an item.
743 | Metadata expected as a tuple
744 |
745 | Returns:
746 | Result of calling update. (success: true | false)
747 | """
748 | if username is None:
749 | username = self.username
750 |
751 | url = "{}/content/users/{}/{}/items/{}/update".format(
752 | self.base_url, username, folder_id, item_id)
753 |
754 | parameters = {
755 | 'snippet': metadata[0],
756 | 'description': metadata[1],
757 | 'tags': metadata[2],
758 | 'accessInformation': metadata[3],
759 | 'licenseInfo': metadata[4],
760 | 'token': self.token,
761 | 'f': 'json'
762 | }
763 | if title:
764 | parameters['title'] = title
765 |
766 | if len(metadata) > 5:
767 | parameters['thumbnail'] = metadata[5]
768 |
769 | with open(metadata[5], 'rb') as f:
770 | d = f.read()
771 | files = {"thumbnail": {"filename": metadata[5], "content": d }}
772 | headers, data = MultipartFormdataEncoder().encodeForm(parameters, files)
773 | resp = self.url_request(url, data, "MULTIPART", headers, repeat=1)
774 |
775 | return resp
776 |
777 | else:
778 | return self.url_request(url, parameters, 'POST')
779 |
780 |
781 | def url_request(self, in_url, request_parameters=None, request_type='GET',
782 | additional_headers=None, files=None, repeat=0):
783 | """
784 | Make a request to the portal, provided a portal URL
785 | and request parameters, returns portal response. By default,
786 | returns a JSON response, and reuses the current token.
787 |
788 | Arguments:
789 | in_url -- portal url
790 | request_parameters -- dictionary of request parameters.
791 | request_type -- HTTP verb (default: GET)
792 | additional_headers -- any headers to pass along with the request.
793 | files -- any files to send.
794 | repeat -- repeat the request up to this number of times.
795 |
796 | Returns:
797 | dictionary of response from portal instance.
798 | """
799 |
800 | # multipart requests pre-encode the parameters
801 | if request_type == 'MULTIPART':
802 | parameters = request_parameters
803 | else:
804 | parameters = {'f': 'json'}
805 | # if we haven't logged in yet, won't have a valid token
806 | if self.token:
807 | parameters['token'] = self.token
808 | if request_parameters:
809 | parameters.update(request_parameters)
810 |
811 | if request_type == 'GET':
812 | req = request('?'.join((in_url, encode(parameters))))
813 | elif request_type == 'MULTIPART':
814 | req = request(in_url, parameters)
815 | elif request_type == 'WEBMAP':
816 | if files:
817 | req = request(in_url, *self.encode_multipart_data(parameters, files))
818 | else:
819 | arcpy.AddWarning("Multipart request made, but no files provided.")
820 | return
821 | else:
822 | req = request(
823 | in_url, encode(parameters).encode('UTF-8'), self.headers)
824 |
825 | if additional_headers:
826 | for key, value in list(additional_headers.items()):
827 | req.add_header(key, value)
828 | req.add_header('Accept-encoding', 'gzip')
829 | try:
830 | response = urlopen(req)
831 | except HTTPError as e:
832 | arcpy.AddWarning("{} {} -- {}".format(
833 | HTTP_ERROR_MSG, in_url, e.code))
834 | return
835 | except URLError as e:
836 | arcpy.AddWarning("{} {} -- {}".format(
837 | URL_ERROR_MSG, in_url, e.reason))
838 | return
839 |
840 | if response.info().get('Content-Encoding') == 'gzip':
841 | buf = BytesIO(response.read())
842 | with gzip.GzipFile(fileobj=buf) as gzip_file:
843 | response_bytes = gzip_file.read()
844 | else:
845 | response_bytes = response.read()
846 |
847 | response_text = response_bytes.decode('UTF-8')
848 |
849 | # occasional timing conflicts; repeat until we get back a valid response.
850 | response_json = json.loads(response_text)
851 |
852 | # Check that data returned is not an error object
853 | if not response_json or "error" in response_json:
854 | rerun = False
855 | if repeat > 0:
856 | repeat -= 1
857 | rerun = True
858 |
859 | # token has expired. Revalidate, then rerun request
860 | if response_json['error']['code'] == 498:
861 | if self.debug:
862 | arcpy.AddWarning("token invalid, retrying.")
863 | if self.login_method is 'token':
864 | # regenerate the token if we're logged in via the application
865 | self.token_login()
866 | else:
867 | self.login(self.username, self._password, repeat=0)
868 |
869 | # after regenerating token, we should have something long-lived
870 | if not self.token or self.valid_for < 5:
871 | arcpy.AddError("Unable to get signin token.")
872 | return
873 | rerun = True
874 |
875 | if rerun:
876 | time.sleep(2)
877 | response_json = self.url_request(
878 | in_url, request_parameters, request_type,
879 | additional_headers, files, repeat)
880 |
881 | return response_json
882 |
883 | def save_file(self, url, saveFile):
884 | """Saves a file to a given location"""
885 |
886 | if self.token:
887 | url += "?token={}".format(self.token)
888 |
889 | data = urlopen(url).read()
890 | with open(saveFile, "wb") as out_file:
891 | out_file.write(data)
892 |
893 | return saveFile
894 |
895 | def assert_json_success(self, data):
896 | """A function that checks that the input JSON object
897 | is not an error object."""
898 | success = False
899 | obj = json.loads(data)
900 | if 'status' in obj and obj['status'] == "error":
901 | arcpy.AddWarning("{} {}".format("JSON object returned an error.", str(obj)))
902 | elif 'error' in obj:
903 | err = obj['error']
904 | # format the error message
905 | if 'messageCode' in err:
906 | code = err['messageCode']
907 | elif 'code' in err:
908 | code = err['code']
909 | else:
910 | code = "No code provided."
911 |
912 | msg = "Portal error: {}: {}".format(err['message'], code)
913 | if 'details' in err and err['details']:
914 | details = []
915 | for detail in err['details']:
916 | # only use unique detail messages
917 | if detail is not err['message']:
918 | details.append(detail)
919 | if details:
920 | msg += ". Details: {}".format("\n".join(details))
921 | arcpy.AddWarning(msg)
922 | else:
923 | success = True
924 | return success
925 |
926 | def _parse_url(self, url=None):
927 | """ Parse a url into components."""
928 | results = None
929 | if url:
930 | results = parse.urlparse(url)
931 | return results
932 |
933 | def _normalize_host_url(self, parse_result):
934 | """ Normalize a hostname to include just the validated
935 | location and path."""
936 | host_url = parse_result.netloc
937 | if parse_result.path:
938 | path = parse_result.path
939 | if path[-1] == '/':
940 | path = path[:-1]
941 | host_url += path
942 | return host_url
943 |
--------------------------------------------------------------------------------
/ToAttachment/README.md:
--------------------------------------------------------------------------------
1 | ##Raster Field, BLOB, Or Hyperlink To Attachment
2 |
3 | Geoprocessing tool and script that converts the files stored or referenced in a dataset to geodatabase attachments. Files to be added as attachments can come from a Raster field, BLOB field, or text field containing a hyperlink or path.
4 |
5 | Originally posted to [ArcGIS.com as a sample](http://www.arcgis.com/home/item.html?id=473c510504f445d5a6d593cf1a7f1133).
6 |
7 | ### Parameters
8 |
9 | **Input Dataset** | *Table View* | required input
10 | * The input dataset containing a raster field, blob field, or path or hyperlink to a file to add to the input dataset as a geodatabase attachment. This can be a geodatabase feature class or table.
11 |
12 | **Field** | *Field* | required output
13 | * The attribute field from the input dataset containing an image/raster (Raster field), file (Blob field), or file path or hyperlink to be added to the input dataset as a geodatabase attachment.
14 |
15 | **File Type** | *String* | optional input
16 | * The file type of the files contained in the BLOB field. When the input field is a Blob field this must be accurately specified for files to be written correctly as geodatabase attachments. For Raster or Text fields, this parameter is managed automatically and can be left blank.
17 |
18 | Common file types include: JPG, TIF, PNG, BMP, PDF, XML, TXT, DOC, XLS.
19 |
20 | **Output Dataset** | *Dataset* | derived output
21 |
22 | ### General Usage
23 |
24 | Converts the files stored or referenced in a dataset to geodatabase attachments. Files to be added as attachments can come from a Raster field, BLOB field, or text field containing a hyperlink or path.
25 |
26 |
--------------------------------------------------------------------------------
/ToAttachment/ToAttachment.tbx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arcpy/sample-gp-tools/f0bb1fe0ae359216a633739388251b3be9949ca5/ToAttachment/ToAttachment.tbx
--------------------------------------------------------------------------------
/ToAttachment/ToAttachments.py:
--------------------------------------------------------------------------------
1 | '''----------------------------------------------------------------------------------
2 | Tool Name: Raster Field, Blob, Or Hyperlink To Attachment
3 | Source Name: ToAttachments.py
4 | Version: ArcGIS 10.1
5 | Author: dlfater@esri.com - Esri, Inc.
6 | Required Arguments:
7 | Input Dataset (Feature Class or Table)
8 | Field (Raster, Blob, or Text field)
9 | Optional Arguments:
10 | File Type (String)
11 | Derived output:
12 | Output Dataset (Feature Class or Table)
13 |
14 | Description: Adds geodatabase attachments to input dataset, based on files stored
15 | in a raster or blob field, or hyperlinked to.
16 | ----------------------------------------------------------------------------------'''
17 |
18 | # Import system modules
19 | import arcpy
20 | import re
21 | import os
22 | import csv
23 | import urllib2
24 | import datetime
25 | try:
26 | from urllib.request import urlopen as urlopen
27 | except:
28 | from urllib2 import urlopen as urlopen
29 |
30 | arcpy.env.overwriteOutput = True
31 |
32 | # Main function, all functions run in SpatialJoinOverlapsCrossings
33 | def ToAttachments(in_dataset, field, ftype="", hyperlinkDir=""):
34 | try:
35 | # Error if sufficient license is not available
36 | if arcpy.ProductInfo().lower() not in ['arcinfo', 'arceditor']:
37 | arcpy.AddError("An ArcGIS for Desktop Standard or Advanced license is required.")
38 | sys.exit()
39 |
40 | arcpy.SetProgressor("default", "Setting up process")
41 | # Determine the type of the field being analyzed
42 | for fld in arcpy.Describe(in_dataset).fields:
43 | if fld.name.lower() == field.lower():
44 | type = fld.type.lower()
45 | break
46 | oidfield = arcpy.Describe(in_dataset).OIDFieldName
47 | count = int(arcpy.management.GetCount(in_dataset).getOutput(0))
48 |
49 | # Create a folder to store intermediate files on disk
50 | filedir = arcpy.management.CreateFolder("%scratchfolder%", "files_{0}".format(datetime.datetime.strftime(datetime.datetime.now(), "%d%m%Y%H%M%S")))
51 | # Write ObjectIDs to matching file
52 | matchtable = os.path.join(str(filedir), "match.txt")
53 | with open(matchtable, 'wb') as f:
54 | writer = csv.writer(f)
55 | writer.writerow(["OID", "FILE"])
56 |
57 | # If working with a blob field
58 | if type == "blob":
59 | arcpy.SetProgressor("step", "Processing BLOBs in field {0}".format(field), 0, count, 1)
60 | with arcpy.da.SearchCursor(in_dataset, ["OID@", field]) as scur:
61 | # Read through the dataset, harvest files from blob, write to folder, then keep track of OIDs
62 | for row in scur:
63 | try:
64 | path = os.path.join(str(filedir), "file_{0}.{1}".format(row[0], ftype))
65 | open(path, "wb").write(row[1].tobytes())
66 | writer.writerow([str(row[0]), path])
67 | except:
68 | arcpy.AddWarning("Cannot process BLOB for OID {0}.".format(row[0]))
69 | finally:
70 | arcpy.SetProgressorPosition()
71 | # Enable geodatabase attachments and write intermediate files to gdb
72 | f.close()
73 | arcpy.management.EnableAttachments(in_dataset)
74 | arcpy.management.AddAttachments(in_dataset, oidfield, matchtable, "OID", "FILE")
75 |
76 | # If working with a raster field
77 | elif type == "raster":
78 | with arcpy.da.SearchCursor(in_dataset, ["OID@"]) as scur:
79 | i = 1
80 | for row in scur:
81 | try:
82 | arcpy.SetProgressorLabel("Processing record {0}/{1}".format(i, count))
83 | inraster = r'{0}\{1}.OBJECTID = {2}'.format(arcpy.Describe(in_dataset).catalogPath, field, row[0])
84 | newname = os.path.join(str(filedir), "image_{0}.jpg".format(row[0]))
85 | arcpy.management.CopyRaster(inraster, newname)
86 | writer.writerow([str(row[0]), newname])
87 | except:
88 | arcpy.AddWarning("Cannot process raster field for OID {0}.".format(row[0]))
89 | finally:
90 | i+=1
91 | f.close()
92 | arcpy.management.EnableAttachments(in_dataset)
93 | arcpy.management.AddAttachments(in_dataset, oidfield, matchtable, "OID", "FILE")
94 |
95 | # If working with a file path or hyperlink
96 | elif type == "string":
97 | arcpy.SetProgressor("step", "Processing files in field {0}".format(field), 0, count, 1)
98 | with arcpy.da.SearchCursor(in_dataset, ["OID@", field]) as scur:
99 | # Need to read the first value of the input field to see if it is a full path, weblink, or path relative to the hyperlink base
100 | for row in scur:
101 | path = row[1]
102 | scur.reset()
103 | break
104 | # If the path 'exists', it is a file on disk or network location
105 | if os.path.exists(path):
106 | arcpy.management.EnableAttachments(in_dataset)
107 | arcpy.management.AddAttachments(in_dataset, oidfield, in_dataset, oidfield, field)
108 | arcpy.AddWarning(arcpy.GetMessages(1))
109 | arcpy.SetProgressorPosition(count)
110 | # If the path doesn't exist, check if it is on the web or relative to the hyperlink base
111 | else:
112 | # On the web
113 | if str(path).lower().find("http") > -1 or str(path).lower().find("www") > -1:
114 | for row in scur:
115 | path = row[1]
116 | try:
117 | # Go through search cur, download file and write oid and new path
118 | u = urlopen(path)
119 | newname = os.path.join(str(filedir), os.path.basename(path))
120 | localFile = open(newname, "wb")
121 | localFile.write(u.read())
122 | localFile.close()
123 | writer.writerow([str(row[0]), newname])
124 | except:
125 | arcpy.AddWarning("Cannot process file {0} for OID {1}".format(row[1], row[0]))
126 | finally:
127 | arcpy.SetProgressorPosition()
128 | # Enable geodatabase attachments and write intermediate files to gdb
129 | f.close()
130 | arcpy.management.EnableAttachments(in_dataset)
131 | arcpy.management.AddAttachments(in_dataset, oidfield, matchtable, "OID", "FILE")
132 | # Relative to hyperlink base?
133 | else:
134 | if hyperlinkDir:
135 | # If the hyperlinked path exists it is a file on disk or network location
136 | if os.path.exists(os.path.join(hyperlinkDir, path)):
137 | arcpy.management.EnableAttachments(in_dataset)
138 | arcpy.management.AddAttachments(in_dataset, oidfield, in_dataset, oidfield, field, hyperlinkDir)
139 | arcpy.AddWarning(arcpy.GetMessages(1))
140 | arcpy.SetProgressorPosition(count)
141 | # Else, the hyperlink path might be to web
142 | elif str(hyperlinkDir).lower().find("http") > -1 or str(hyperlinkDir).lower().find("www") > -1:
143 | for row in scur:
144 | path = str(hyperlinkDir) + "/" + str(row[1])
145 | try:
146 | # Go through search cur, download file and write oid and new path
147 | u = urlopen(path)
148 | newname = os.path.join(str(filedir), os.path.basename(path))
149 | localFile = open(newname, "wb")
150 | localFile.write(u.read())
151 | localFile.close()
152 | writer.writerow([str(row[0]), newname])
153 | except:
154 | arcpy.AddWarning("Cannot process file {0} for OID {1}".format(row[0], row[1]))
155 | finally:
156 | arcpy.SetProgressorPosition()
157 | # Enable geodatabase attachments and write intermediate files to gdb
158 | f.close()
159 | arcpy.management.EnableAttachments(in_dataset)
160 | arcpy.management.AddAttachments(in_dataset, oidfield, matchtable, "OID", "FILE")
161 | else:
162 | arcpy.AddWarning("The first record in field '{0}' does not contain a valid path. Processing will not continue.".format(field))
163 | else:
164 | arcpy.AddWarning("The first record in field '{0}' does not contain a valid path. Processing will not continue.".format(field))
165 |
166 | except:
167 | raise
168 | finally:
169 | if filedir:
170 | # Delete the temporary folder
171 | arcpy.management.Delete(filedir)
172 |
173 | # Run the script
174 | if __name__ == '__main__':
175 | # Get Parameters
176 | in_dataset = arcpy.GetParameterAsText(0)
177 | field = arcpy.GetParameterAsText(1)
178 | ftype = re.sub(r'\W+', '', arcpy.GetParameterAsText(2).upper())
179 | try:
180 | hyperlinkDir = arcpy.mapping.MapDocument("current").hyperlinkBase
181 | except:
182 | hyperlinkDir = ""
183 |
184 | ToAttachments(in_dataset, field, ftype, hyperlinkDir)
185 | arcpy.SetParameterAsText(3, in_dataset)
186 | print ("finished")
--------------------------------------------------------------------------------
/distribute_setup.py:
--------------------------------------------------------------------------------
1 | #!python
2 | """Bootstrap distribute installation
3 | If you want to use setuptools in your package's setup.py, just include this
4 | file in the same directory with it, and add this to the top of your setup.py::
5 | from distribute_setup import use_setuptools
6 | use_setuptools()
7 | If you want to require a specific version of setuptools, set a download
8 | mirror, or use an alternate download directory, you can do so by supplying
9 | the appropriate options to ``use_setuptools()``.
10 | This file can also be run as a script to install or upgrade setuptools.
11 | """
12 | import os
13 | import sys
14 | import time
15 | import fnmatch
16 | import tempfile
17 | import tarfile
18 | from distutils import log
19 |
20 | try:
21 | from site import USER_SITE
22 | except ImportError:
23 | USER_SITE = None
24 |
25 | try:
26 | import subprocess
27 |
28 | def _python_cmd(*args):
29 | args = (sys.executable,) + args
30 | return subprocess.call(args) == 0
31 |
32 | except ImportError:
33 | # will be used for python 2.3
34 | def _python_cmd(*args):
35 | args = (sys.executable,) + args
36 | # quoting arguments if windows
37 | if sys.platform == 'win32':
38 | def quote(arg):
39 | if ' ' in arg:
40 | return '"%s"' % arg
41 | return arg
42 | args = [quote(arg) for arg in args]
43 | return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
44 |
45 | DEFAULT_VERSION = "0.6.8"
46 | DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
47 | SETUPTOOLS_PKG_INFO = """\
48 | Metadata-Version: 1.0
49 | Name: setuptools
50 | Version: 0.6c9
51 | Summary: xxxx
52 | Home-page: xxx
53 | Author: xxx
54 | Author-email: xxx
55 | License: xxx
56 | Description: xxx
57 | """
58 |
59 |
60 | def _install(tarball):
61 | # extracting the tarball
62 | tmpdir = tempfile.mkdtemp()
63 | log.warn('Extracting in %s', tmpdir)
64 | old_wd = os.getcwd()
65 | try:
66 | os.chdir(tmpdir)
67 | tar = tarfile.open(tarball)
68 | _extractall(tar)
69 | tar.close()
70 |
71 | # going in the directory
72 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
73 | os.chdir(subdir)
74 | log.warn('Now working in %s', subdir)
75 |
76 | # installing
77 | log.warn('Installing Distribute')
78 | assert _python_cmd('setup.py', 'install')
79 | finally:
80 | os.chdir(old_wd)
81 |
82 |
83 | def _build_egg(egg, tarball, to_dir):
84 | # extracting the tarball
85 | tmpdir = tempfile.mkdtemp()
86 | log.warn('Extracting in %s', tmpdir)
87 | old_wd = os.getcwd()
88 | try:
89 | os.chdir(tmpdir)
90 | tar = tarfile.open(tarball)
91 | _extractall(tar)
92 | tar.close()
93 |
94 | # going in the directory
95 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
96 | os.chdir(subdir)
97 | log.warn('Now working in %s', subdir)
98 |
99 | # building an egg
100 | log.warn('Building a Distribute egg in %s', to_dir)
101 | _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
102 |
103 | finally:
104 | os.chdir(old_wd)
105 | # returning the result
106 | log.warn(egg)
107 | if not os.path.exists(egg):
108 | raise IOError('Could not build the egg.')
109 |
110 |
111 | def _do_download(version, download_base, to_dir, download_delay):
112 | egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
113 | % (version, sys.version_info[0], sys.version_info[1]))
114 | if not os.path.exists(egg):
115 | tarball = download_setuptools(version, download_base,
116 | to_dir, download_delay)
117 | _build_egg(egg, tarball, to_dir)
118 | sys.path.insert(0, egg)
119 | import setuptools
120 | setuptools.bootstrap_install_from = egg
121 |
122 |
123 | def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
124 | to_dir=os.curdir, download_delay=15, no_fake=True):
125 | # making sure we use the absolute path
126 | to_dir = os.path.abspath(to_dir)
127 | was_imported = 'pkg_resources' in sys.modules or \
128 | 'setuptools' in sys.modules
129 | try:
130 | try:
131 | import pkg_resources
132 | if not hasattr(pkg_resources, '_distribute'):
133 | if not no_fake:
134 | _fake_setuptools()
135 | raise ImportError
136 | except ImportError:
137 | return _do_download(version, download_base, to_dir, download_delay)
138 | try:
139 | pkg_resources.require("distribute>=" + version)
140 | return
141 | except pkg_resources.VersionConflict:
142 | e = sys.exc_info()[1]
143 | if was_imported:
144 | sys.stderr.write(
145 | "The required version of distribute (>=%s) is not available,\n"
146 | "and can't be installed while this script is running. Please\n"
147 | "install a more recent version first, using\n"
148 | "'easy_install -U distribute'."
149 | "\n\n(Currently using %r)\n" % (version, e.args[0]))
150 | sys.exit(2)
151 | else:
152 | del pkg_resources, sys.modules['pkg_resources'] # reload ok
153 | return _do_download(version, download_base, to_dir,
154 | download_delay)
155 | except pkg_resources.DistributionNotFound:
156 | return _do_download(version, download_base, to_dir,
157 | download_delay)
158 | finally:
159 | if not no_fake:
160 | _create_fake_setuptools_pkg_info(to_dir)
161 |
162 |
163 | def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
164 | to_dir=os.curdir, delay=15):
165 | """Download distribute from a specified location and return its filename
166 | `version` should be a valid distribute version number that is available
167 | as an egg for download under the `download_base` URL (which should end
168 | with a '/'). `to_dir` is the directory where the egg will be downloaded.
169 | `delay` is the number of seconds to pause before an actual download
170 | attempt.
171 | """
172 | # making sure we use the absolute path
173 | to_dir = os.path.abspath(to_dir)
174 | try:
175 | from urllib.request import urlopen
176 | except ImportError:
177 | from urllib2 import urlopen
178 | tgz_name = "distribute-%s.tar.gz" % version
179 | url = download_base + tgz_name
180 | saveto = os.path.join(to_dir, tgz_name)
181 | src = dst = None
182 | if not os.path.exists(saveto): # Avoid repeated downloads
183 | try:
184 | log.warn("Downloading %s", url)
185 | src = urlopen(url)
186 | # Read/write all in one block, so we don't create a corrupt file
187 | # if the download is interrupted.
188 | data = src.read()
189 | dst = open(saveto, "wb")
190 | dst.write(data)
191 | finally:
192 | if src:
193 | src.close()
194 | if dst:
195 | dst.close()
196 | return os.path.realpath(saveto)
197 |
198 |
199 | def _patch_file(path, content):
200 | """Will backup the file then patch it"""
201 | existing_content = open(path).read()
202 | if existing_content == content:
203 | # already patched
204 | log.warn('Already patched.')
205 | return False
206 | log.warn('Patching...')
207 | _rename_path(path)
208 | f = open(path, 'w')
209 | try:
210 | f.write(content)
211 | finally:
212 | f.close()
213 | return True
214 |
215 |
216 | def _same_content(path, content):
217 | return open(path).read() == content
218 |
219 |
220 | def _rename_path(path):
221 | new_name = path + '.OLD.%s' % time.time()
222 | log.warn('Renaming %s into %s', path, new_name)
223 | try:
224 | from setuptools.sandbox import DirectorySandbox
225 | def _violation(*args):
226 | pass
227 | DirectorySandbox._violation = _violation
228 | except ImportError:
229 | pass
230 |
231 | os.rename(path, new_name)
232 | return new_name
233 |
234 |
235 | def _remove_flat_installation(placeholder):
236 | if not os.path.isdir(placeholder):
237 | log.warn('Unkown installation at %s', placeholder)
238 | return False
239 | found = False
240 | for file in os.listdir(placeholder):
241 | if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
242 | found = True
243 | break
244 | if not found:
245 | log.warn('Could not locate setuptools*.egg-info')
246 | return
247 |
248 | log.warn('Removing elements out of the way...')
249 | pkg_info = os.path.join(placeholder, file)
250 | if os.path.isdir(pkg_info):
251 | patched = _patch_egg_dir(pkg_info)
252 | else:
253 | patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
254 |
255 | if not patched:
256 | log.warn('%s already patched.', pkg_info)
257 | return False
258 | # now let's move the files out of the way
259 | for element in ('setuptools', 'pkg_resources.py', 'site.py'):
260 | element = os.path.join(placeholder, element)
261 | if os.path.exists(element):
262 | _rename_path(element)
263 | else:
264 | log.warn('Could not find the %s element of the '
265 | 'Setuptools distribution', element)
266 | return True
267 |
268 |
269 | def _after_install(dist):
270 | log.warn('After install bootstrap.')
271 | placeholder = dist.get_command_obj('install').install_purelib
272 | _create_fake_setuptools_pkg_info(placeholder)
273 |
274 |
275 | def _create_fake_setuptools_pkg_info(placeholder):
276 | if not placeholder or not os.path.exists(placeholder):
277 | log.warn('Could not find the install location')
278 | return
279 | pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
280 | setuptools_file = 'setuptools-0.6c9-py%s.egg-info' % pyver
281 | pkg_info = os.path.join(placeholder, setuptools_file)
282 | if os.path.exists(pkg_info):
283 | log.warn('%s already exists', pkg_info)
284 | return
285 | log.warn('Creating %s', pkg_info)
286 | f = open(pkg_info, 'w')
287 | try:
288 | f.write(SETUPTOOLS_PKG_INFO)
289 | finally:
290 | f.close()
291 | pth_file = os.path.join(placeholder, 'setuptools.pth')
292 | log.warn('Creating %s', pth_file)
293 | f = open(pth_file, 'w')
294 | try:
295 | f.write(os.path.join(os.curdir, setuptools_file))
296 | finally:
297 | f.close()
298 |
299 |
300 | def _patch_egg_dir(path):
301 | # let's check if it's already patched
302 | pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
303 | if os.path.exists(pkg_info):
304 | if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
305 | log.warn('%s already patched.', pkg_info)
306 | return False
307 | _rename_path(path)
308 | os.mkdir(path)
309 | os.mkdir(os.path.join(path, 'EGG-INFO'))
310 | pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
311 | f = open(pkg_info, 'w')
312 | try:
313 | f.write(SETUPTOOLS_PKG_INFO)
314 | finally:
315 | f.close()
316 | return True
317 |
318 |
319 | def _before_install():
320 | log.warn('Before install bootstrap.')
321 | _fake_setuptools()
322 |
323 |
324 | def _under_prefix(location):
325 | if 'install' not in sys.argv:
326 | return True
327 | args = sys.argv[sys.argv.index('install') + 1:]
328 | for index, arg in enumerate(args):
329 | for option in ('--root', '--prefix'):
330 | if arg.startswith('%s=' % option):
331 | top_dir = arg.split('root=')[-1]
332 | return location.startswith(top_dir)
333 | elif arg == option:
334 | if len(args) > index:
335 | top_dir = args[index + 1]
336 | return location.startswith(top_dir)
337 | elif option == '--user' and USER_SITE is not None:
338 | return location.startswith(USER_SITE)
339 | return True
340 |
341 |
342 | def _fake_setuptools():
343 | log.warn('Scanning installed packages')
344 | try:
345 | import pkg_resources
346 | except ImportError:
347 | # we're cool
348 | log.warn('Setuptools or Distribute does not seem to be installed.')
349 | return
350 | ws = pkg_resources.working_set
351 | try:
352 | setuptools_dist = ws.find(pkg_resources.Requirement.load_from_buffer('setuptools',
353 | replacement=False))
354 | except TypeError:
355 | # old distribute API
356 | setuptools_dist = ws.find(pkg_resources.Requirement.load_from_buffer('setuptools'))
357 |
358 | if setuptools_dist is None:
359 | log.warn('No setuptools distribution found')
360 | return
361 | # detecting if it was already faked
362 | setuptools_location = setuptools_dist.location
363 | log.warn('Setuptools installation detected at %s', setuptools_location)
364 |
365 | # if --root or --preix was provided, and if
366 | # setuptools is not located in them, we don't patch it
367 | if not _under_prefix(setuptools_location):
368 | log.warn('Not patching, --root or --prefix is installing Distribute'
369 | ' in another location')
370 | return
371 |
372 | # let's see if its an egg
373 | if not setuptools_location.endswith('.egg'):
374 | log.warn('Non-egg installation')
375 | res = _remove_flat_installation(setuptools_location)
376 | if not res:
377 | return
378 | else:
379 | log.warn('Egg installation')
380 | pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
381 | if (os.path.exists(pkg_info) and
382 | _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
383 | log.warn('Already patched.')
384 | return
385 | log.warn('Patching...')
386 | # let's create a fake egg replacing setuptools one
387 | res = _patch_egg_dir(setuptools_location)
388 | if not res:
389 | return
390 | log.warn('Patched done.')
391 | _relaunch()
392 |
393 |
394 | def _relaunch():
395 | log.warn('Relaunching...')
396 | # we have to relaunch the process
397 | args = [sys.executable] + sys.argv
398 | sys.exit(subprocess.call(args))
399 |
400 |
401 | def _extractall(self, path=".", members=None):
402 | """Extract all members from the archive to the current working
403 | directory and set owner, modification time and permissions on
404 | directories afterwards. `path' specifies a different directory
405 | to extract to. `members' is optional and must be a subset of the
406 | list returned by getmembers().
407 | """
408 | import copy
409 | import operator
410 | from tarfile import ExtractError
411 | directories = []
412 |
413 | if members is None:
414 | members = self
415 |
416 | for tarinfo in members:
417 | if tarinfo.isdir():
418 | # Extract directories with a safe mode.
419 | directories.append(tarinfo)
420 | tarinfo = copy.copy(tarinfo)
421 | tarinfo.mode = 448 # decimal for oct 0700
422 | self.extract(tarinfo, path)
423 |
424 | # Reverse sort directories.
425 | if sys.version_info < (2, 4):
426 | def sorter(dir1, dir2):
427 | return cmp(dir1.name, dir2.name)
428 | directories.sort(sorter)
429 | directories.reverse()
430 | else:
431 | directories.sort(key=operator.attrgetter('name'), reverse=True)
432 |
433 | # Set correct owner, mtime and filemode on directories.
434 | for tarinfo in directories:
435 | dirpath = os.path.join(path, tarinfo.name)
436 | try:
437 | self.chown(tarinfo, dirpath)
438 | self.utime(tarinfo, dirpath)
439 | self.chmod(tarinfo, dirpath)
440 | except ExtractError:
441 | e = sys.exc_info()[1]
442 | if self.errorlevel > 1:
443 | raise
444 | else:
445 | self._dbg(1, "tarfile: %s" % e)
446 |
447 |
448 | def main(argv, version=DEFAULT_VERSION):
449 | """Install or upgrade setuptools and EasyInstall"""
450 | tarball = download_setuptools()
451 | _install(tarball)
452 |
453 |
454 | if __name__ == '__main__':
455 | main(sys.argv[1:])
456 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | try:
2 | from setuptools import setup
3 | except:
4 | import distribute_setup
5 | distribute_setup.use_setuptools()
6 | from setuptools import setup
7 | import glob
8 | import os
9 |
10 | stuff = []
11 | for f in glob.glob("*/*.py"):
12 | stuff.append((os.path.join("esri/toolboxes", os.path.dirname(f)),
13 | [os.path.join(os.path.dirname(f),os.path.basename(f))] ))
14 | stuff.append( ('esri/toolboxes', ["SampleTools.tbx"]))
15 |
16 |
17 | setup(name = "sample-gp-tools",
18 | version = "0.0.1",
19 | description = "",
20 | long_description = "",
21 | author = "Esri",
22 | url = "https://github.com/arcpy/sample-gp-tools",
23 | license = "Apache Software License",
24 | zip_safe = False,
25 | package_dir = {"": "."},
26 | packages = ["",],
27 | package_data = {"": ["*/*.py",
28 | "SampleTools.tbx",
29 | ] },
30 | data_files = stuff,
31 | classifiers = [
32 | "Development Status :: 5 - Production/Stable",
33 | "Intended Audience :: Developers",
34 | "License :: Apache Software License",
35 | "Programming Language :: Python :: 2.7",
36 | "Programming Language :: Python :: 3.4",
37 | "Topic :: Software Development :: Libraries :: Python Modules",
38 | ],
39 | )
40 |
--------------------------------------------------------------------------------