├── Images
├── Globe16.png
├── Overture16.png
├── Overture32.png
├── AddInDesktop16.png
└── AddInDesktop32.png
├── DarkImages
├── Globe16.png
├── Globe32.png
├── Overture16.png
├── Overture32.png
├── AddInDesktop16.png
└── AddInDesktop32.png
├── Properties
└── launchSettings.json
├── DuckDBGeoparquet.sln
├── Extensions
└── README.txt
├── DuckDBGeoparquetModule.cs
├── Views
├── WizardDockpaneShowButton.cs
├── WizardDockpane.xaml.cs
├── CustomExtentTool.cs
└── WizardDockpane.xaml
├── .gitattributes
├── .github
└── workflows
│ ├── update-version.ps1
│ ├── publish-agol.ps1
│ ├── build.yaml
│ └── publish-agol.py
├── Config.daml
├── DuckDBGeoparquet.csproj
├── .gitignore
├── AGOL_PUBLISHING_SETUP.md
├── README.md
├── LICENSE.txt
├── AUTOMATED_VERSIONING.md
└── Services
└── MfcUtility.cs
/Images/Globe16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/Images/Globe16.png
--------------------------------------------------------------------------------
/DarkImages/Globe16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/DarkImages/Globe16.png
--------------------------------------------------------------------------------
/DarkImages/Globe32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/DarkImages/Globe32.png
--------------------------------------------------------------------------------
/Images/Overture16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/Images/Overture16.png
--------------------------------------------------------------------------------
/Images/Overture32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/Images/Overture32.png
--------------------------------------------------------------------------------
/DarkImages/Overture16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/DarkImages/Overture16.png
--------------------------------------------------------------------------------
/DarkImages/Overture32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/DarkImages/Overture32.png
--------------------------------------------------------------------------------
/Images/AddInDesktop16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/Images/AddInDesktop16.png
--------------------------------------------------------------------------------
/Images/AddInDesktop32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/Images/AddInDesktop32.png
--------------------------------------------------------------------------------
/DarkImages/AddInDesktop16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/DarkImages/AddInDesktop16.png
--------------------------------------------------------------------------------
/DarkImages/AddInDesktop32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/HEAD/DarkImages/AddInDesktop32.png
--------------------------------------------------------------------------------
/Properties/launchSettings.json:
--------------------------------------------------------------------------------
1 | {
2 | "profiles": {
3 | "DuckDBGeoparquet": {
4 | "commandName": "Executable",
5 | "executablePath": "C:\\Program Files\\ArcGIS\\Pro\\bin\\ArcGISPro.exe"
6 | }
7 | }
8 | }
--------------------------------------------------------------------------------
/DuckDBGeoparquet.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 17
4 | VisualStudioVersion = 17.12.35527.113 d17.12
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DuckDBGeoparquet", "DuckDBGeoparquet.csproj", "{29132FF5-265A-4F58-8F1E-DABEBD1CCA60}"
7 | EndProject
8 | Global
9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
10 | Debug|Any CPU = Debug|Any CPU
11 | Release|Any CPU = Release|Any CPU
12 | EndGlobalSection
13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
14 | {29132FF5-265A-4F58-8F1E-DABEBD1CCA60}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
15 | {29132FF5-265A-4F58-8F1E-DABEBD1CCA60}.Debug|Any CPU.Build.0 = Debug|Any CPU
16 | {29132FF5-265A-4F58-8F1E-DABEBD1CCA60}.Release|Any CPU.ActiveCfg = Release|Any CPU
17 | {29132FF5-265A-4F58-8F1E-DABEBD1CCA60}.Release|Any CPU.Build.0 = Release|Any CPU
18 | EndGlobalSection
19 | GlobalSection(SolutionProperties) = preSolution
20 | HideSolutionNode = FALSE
21 | EndGlobalSection
22 | EndGlobal
23 |
--------------------------------------------------------------------------------
/Extensions/README.txt:
--------------------------------------------------------------------------------
1 | DuckDB Extensions for GeoParquet Add-in
2 | ============================================
3 |
4 | This folder MUST contain the following DuckDB extension files:
5 | 1. spatial.duckdb_extension - For spatial functions and GeoParquet support
6 | 2. httpfs.duckdb_extension - For HTTP/Cloud storage access
7 |
8 | How to obtain these files (CRITICAL):
9 | -------------------------------------
10 | The extensions MUST match version 1.2.0 of DuckDB to work with this add-in.
11 |
12 | OPTION 1: Download pre-built extensions (RECOMMENDED)
13 | ----------------------------------------------------
14 | 1. Go to: https://github.com/duckdb/duckdb/releases/tag/v1.2.0
15 | 2. Download the Windows x64 version: duckdb_cli-windows-amd64.zip
16 | 3. Extract the zip file
17 | 4. Run these commands in a terminal:
18 | ```
19 | ./duckdb.exe -c "INSTALL spatial; INSTALL httpfs;"
20 | ```
21 | 5. The extensions will be created in a .duckdb/extensions folder
22 | 6. Copy these files to this Extensions folder:
23 | - .duckdb/extensions/v1.2.0/windows_amd64/spatial.duckdb_extension
24 | - .duckdb/extensions/v1.2.0/windows_amd64/httpfs.duckdb_extension
25 |
26 | OPTION 2: Direct download links (ALTERNATIVE)
27 | ---------------------------------------------
28 | Direct links to extension files for v1.2.0:
29 | - spatial: https://github.com/duckdb/duckdb-spatial
30 | - httpfs: https://github.com/duckdb/duckdb-httpfs
31 |
32 | Download these files and rename them to:
33 | - spatial.duckdb_extension
34 | - httpfs.duckdb_extension
35 |
36 | CRITICAL:
37 | ---------
38 | 1. The extensions MUST match version 1.2.0 of DuckDB
39 | 2. The files MUST be named exactly "spatial.duckdb_extension" and "httpfs.duckdb_extension"
40 | 3. The files MUST be directly in this Extensions folder
41 | 4. These extensions will be automatically copied to the output directory during build
42 | and loaded from that location at runtime, eliminating the need for internet access
43 | or admin privileges when running the add-in
--------------------------------------------------------------------------------
/DuckDBGeoparquetModule.cs:
--------------------------------------------------------------------------------
1 | using ArcGIS.Desktop.Framework;
2 | using ArcGIS.Desktop.Framework.Contracts;
3 | using System;
4 | using System.Windows;
5 | using System.Windows.Media;
6 |
7 | namespace DuckDBGeoparquet
8 | {
9 | ///
10 | /// Main module for the Overture Maps GeoParquet add-in
11 | ///
12 | internal class DuckDBGeoparquetModule : Module
13 | {
14 | private static DuckDBGeoparquetModule _this = null;
15 |
16 | ///
17 | /// Gets the singleton instance of this module
18 | ///
19 | public static DuckDBGeoparquetModule Current
20 | {
21 | get
22 | {
23 | return _this ?? (_this = (DuckDBGeoparquetModule)FrameworkApplication.FindModule("DuckDBGeoparquet_Module"));
24 | }
25 | }
26 |
27 | ///
28 | /// Gets whether the application is currently using the dark theme
29 | ///
30 | public static bool IsDarkTheme
31 | {
32 | get
33 | {
34 | try
35 | {
36 | return FrameworkApplication.ApplicationTheme == ApplicationTheme.Dark;
37 | }
38 | catch
39 | {
40 | // If we can't access the framework (e.g., design time), return default
41 | return false;
42 | }
43 | }
44 | }
45 |
46 | ///
47 | /// Initialize the module
48 | ///
49 | protected override bool Initialize()
50 | {
51 | System.Diagnostics.Debug.WriteLine($"Initializing with ArcGIS Pro theme: {(IsDarkTheme ? "Dark" : "Light")}");
52 | return true;
53 | }
54 |
55 | ///
56 | /// Cleanup when unloading the module
57 | ///
58 | protected override bool CanUnload()
59 | {
60 | // Allow ArcGIS Pro to close if your add-in is not busy.
61 | return true;
62 | }
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/Views/WizardDockpaneShowButton.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 | using ArcGIS.Core.CIM;
7 | using ArcGIS.Core.Data;
8 | using ArcGIS.Core.Geometry;
9 | using ArcGIS.Desktop.Catalog;
10 | using ArcGIS.Desktop.Core;
11 | using ArcGIS.Desktop.Editing;
12 | using ArcGIS.Desktop.Extensions;
13 | using ArcGIS.Desktop.Framework;
14 | using ArcGIS.Desktop.Framework.Contracts;
15 | using ArcGIS.Desktop.Framework.Dialogs;
16 | using ArcGIS.Desktop.Framework.Threading.Tasks;
17 | using ArcGIS.Desktop.KnowledgeGraph;
18 | using ArcGIS.Desktop.Layouts;
19 | using ArcGIS.Desktop.Mapping;
20 |
21 | namespace DuckDBGeoparquet.Views
22 | {
23 | ///
24 | /// Button implementation to show the Overture Maps Wizard Dockpane
25 | ///
26 | internal class WizardDockpaneShowButton : Button
27 | {
28 | ///
29 | /// Called when the button is clicked
30 | ///
31 | protected override void OnClick()
32 | {
33 | try
34 | {
35 | // Log that button was clicked
36 | System.Diagnostics.Debug.WriteLine("Overture Maps wizard button clicked");
37 |
38 | // Show the dockpane
39 | WizardDockpaneViewModel.Show();
40 | }
41 | catch (Exception ex)
42 | {
43 | // Log any errors that occur
44 | System.Diagnostics.Debug.WriteLine($"Error showing Overture Maps wizard: {ex.Message}");
45 |
46 | // Show a friendly error message
47 | ArcGIS.Desktop.Framework.Dialogs.MessageBox.Show(
48 | "Unable to open the Overture Maps Data Loader. Please check ArcGIS Pro logs for details.",
49 | "Error Opening Overture Maps",
50 | System.Windows.MessageBoxButton.OK,
51 | System.Windows.MessageBoxImage.Error);
52 | }
53 | }
54 |
55 | ///
56 | /// Called by the framework to determine whether the button should be enabled or disabled
57 | ///
58 | protected override void OnUpdate()
59 | {
60 | // Button is always enabled
61 | Enabled = true;
62 | }
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/Views/WizardDockpane.xaml.cs:
--------------------------------------------------------------------------------
1 | using System.Windows;
2 | using System.Windows.Controls;
3 | using System.Windows.Navigation;
4 | using System.Diagnostics;
5 |
6 | namespace DuckDBGeoparquet.Views
7 | {
8 | public partial class WizardDockpaneView : UserControl
9 | {
10 | public WizardDockpaneView()
11 | {
12 | InitializeComponent();
13 | }
14 |
15 | ///
16 | /// Handles the TreeViewItem's Selected event to update the ViewModel's preview item.
17 | ///
18 | private void OnTreeViewItemSelected(object sender, RoutedEventArgs e)
19 | {
20 | if (DataContext is WizardDockpaneViewModel viewModel && e.OriginalSource is TreeViewItem treeViewItem)
21 | {
22 | if (treeViewItem.DataContext is SelectableThemeItem selectedThemeItem)
23 | {
24 | viewModel.SelectedItemForPreview = selectedThemeItem;
25 | }
26 | }
27 | }
28 |
29 | private void BrowseButton_Click(object sender, RoutedEventArgs e)
30 | {
31 | // Add your browse button click logic here
32 | }
33 |
34 | private void IngestButton_Click(object sender, RoutedEventArgs e)
35 | {
36 | // Add your ingest button click logic here
37 | }
38 |
39 | private void TransformButton_Click(object sender, RoutedEventArgs e)
40 | {
41 | // Add your transform button click logic here
42 | }
43 |
44 | private void ExportButton_Click(object sender, RoutedEventArgs e)
45 | {
46 | // Add your export button click logic here
47 | }
48 |
49 | ///
50 | /// Event handler for the log text box to scroll to the end whenever text changes
51 | ///
52 | private void LogTextBox_TextChanged(object sender, TextChangedEventArgs e)
53 | {
54 | // Automatically scroll to the end when text changes
55 | if (sender is TextBox textBox)
56 | {
57 | textBox.ScrollToEnd();
58 | }
59 | }
60 |
61 | private void Hyperlink_RequestNavigate(object sender, RequestNavigateEventArgs e)
62 | {
63 | Process.Start(new ProcessStartInfo(e.Uri.AbsoluteUri) { UseShellExecute = true });
64 | e.Handled = true;
65 | }
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Set default behavior to automatically normalize line endings.
3 | ###############################################################################
4 | * text=auto
5 |
6 | ###############################################################################
7 | # Set default behavior for command prompt diff.
8 | #
9 | # This is need for earlier builds of msysgit that does not have it on by
10 | # default for csharp files.
11 | # Note: This is only used by command line
12 | ###############################################################################
13 | #*.cs diff=csharp
14 |
15 | ###############################################################################
16 | # Set the merge driver for project and solution files
17 | #
18 | # Merging from the command prompt will add diff markers to the files if there
19 | # are conflicts (Merging from VS is not affected by the settings below, in VS
20 | # the diff markers are never inserted). Diff markers may cause the following
21 | # file extensions to fail to load in VS. An alternative would be to treat
22 | # these files as binary and thus will always conflict and require user
23 | # intervention with every merge. To do so, just uncomment the entries below
24 | ###############################################################################
25 | #*.sln merge=binary
26 | #*.csproj merge=binary
27 | #*.vbproj merge=binary
28 | #*.vcxproj merge=binary
29 | #*.vcproj merge=binary
30 | #*.dbproj merge=binary
31 | #*.fsproj merge=binary
32 | #*.lsproj merge=binary
33 | #*.wixproj merge=binary
34 | #*.modelproj merge=binary
35 | #*.sqlproj merge=binary
36 | #*.wwaproj merge=binary
37 |
38 | ###############################################################################
39 | # behavior for image files
40 | #
41 | # image files are treated as binary by default.
42 | ###############################################################################
43 | #*.jpg binary
44 | #*.png binary
45 | #*.gif binary
46 |
47 | ###############################################################################
48 | # diff behavior for common document formats
49 | #
50 | # Convert binary document formats to text before diffing them. This feature
51 | # is only available from the command line. Turn it on by uncommenting the
52 | # entries below.
53 | ###############################################################################
54 | #*.doc diff=astextplain
55 | #*.DOC diff=astextplain
56 | #*.docx diff=astextplain
57 | #*.DOCX diff=astextplain
58 | #*.dot diff=astextplain
59 | #*.DOT diff=astextplain
60 | #*.pdf diff=astextplain
61 | #*.PDF diff=astextplain
62 | #*.rtf diff=astextplain
63 | #*.RTF diff=astextplain
64 |
--------------------------------------------------------------------------------
/.github/workflows/update-version.ps1:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env pwsh
2 | <#
3 | .SYNOPSIS
4 | Updates version in Config.daml from git tag
5 | .DESCRIPTION
6 | This script updates the version attribute in Config.daml based on a git tag version.
7 | It uses XML manipulation to ensure proper handling of the DAML file.
8 | .PARAMETER Version
9 | The version string to set (e.g., "0.1.2")
10 | .PARAMETER ConfigPath
11 | Path to the Config.daml file (default: "Config.daml")
12 | .EXAMPLE
13 | .\update-version.ps1 -Version "0.1.2" -ConfigPath "Config.daml"
14 | #>
15 |
16 | param(
17 | [Parameter(Mandatory=$true)]
18 | [string]$Version,
19 |
20 | [Parameter(Mandatory=$false)]
21 | [string]$ConfigPath = "Config.daml"
22 | )
23 |
24 | Write-Host "================================================"
25 | Write-Host " Version Update Script for Config.daml"
26 | Write-Host "================================================"
27 | Write-Host ""
28 |
29 | # Validate inputs
30 | if (-not (Test-Path $ConfigPath)) {
31 | Write-Error "Config.daml file not found at: $ConfigPath"
32 | exit 1
33 | }
34 |
35 | # Validate version format (basic semantic version check)
36 | if ($Version -notmatch '^\d+\.\d+\.\d+$') {
37 | Write-Warning "Version format may be non-standard: $Version"
38 | Write-Warning "Expected format: X.Y.Z (e.g., 0.1.2)"
39 | }
40 |
41 | Write-Host "Current file: $ConfigPath"
42 | Write-Host "Target version: $Version"
43 | Write-Host ""
44 |
45 | try {
46 | # Load the XML file
47 | [xml]$xml = Get-Content -Path $ConfigPath -Raw -Encoding UTF8
48 |
49 | # Store original version for comparison
50 | $originalVersion = $xml.ArcGIS.AddInInfo.version
51 | Write-Host "Original version: $originalVersion"
52 |
53 | # Update the version attribute
54 | $xml.ArcGIS.AddInInfo.SetAttribute("version", $Version)
55 |
56 | # Verify the update
57 | $newVersion = $xml.ArcGIS.AddInInfo.version
58 | if ($newVersion -ne $Version) {
59 | Write-Error "Version update verification failed. Expected: $Version, Got: $newVersion"
60 | exit 1
61 | }
62 |
63 | # Save the XML file with UTF-8 encoding and preserve formatting
64 | $xmlSettings = New-Object System.Xml.XmlWriterSettings
65 | $xmlSettings.Indent = $true
66 | $xmlSettings.IndentChars = "`t"
67 | $xmlSettings.NewLineChars = "`r`n"
68 | $xmlSettings.Encoding = [System.Text.UTF8Encoding]::new($true) # UTF-8 with BOM
69 |
70 | $writer = [System.Xml.XmlWriter]::Create($ConfigPath, $xmlSettings)
71 | $xml.Save($writer)
72 | $writer.Close()
73 |
74 | Write-Host ""
75 | Write-Host "✓ Successfully updated version: $originalVersion → $Version" -ForegroundColor Green
76 | Write-Host ""
77 | Write-Host "Changes made to: $ConfigPath"
78 |
79 | # Show the updated line for verification
80 | $updatedContent = Get-Content -Path $ConfigPath | Select-String -Pattern "version="
81 | Write-Host ""
82 | Write-Host "Updated line:" -ForegroundColor Cyan
83 | Write-Host " $updatedContent"
84 |
85 | exit 0
86 | }
87 | catch {
88 | Write-Error "Failed to update Config.daml: $_"
89 | Write-Error $_.Exception.Message
90 | exit 1
91 | }
92 |
--------------------------------------------------------------------------------
/Config.daml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | DuckDBGeoparquet
5 | Export Overture Maps data to GeoParquet format for use in ArcGIS Pro 3.5
6 | Images\AddinDesktop32.png
7 | rylopez
8 | County of Fresno
9 | 5/16/2025
10 | Framework, Geoprocessing, Geodatabase
11 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
34 |
35 |
36 |
37 | Draw a rectangle on the map to define a custom data extent
38 | A map must be open to use this tool
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/Views/CustomExtentTool.cs:
--------------------------------------------------------------------------------
1 | using ArcGIS.Core.Geometry;
2 | using ArcGIS.Desktop.Framework;
3 | using ArcGIS.Desktop.Framework.Contracts;
4 | using ArcGIS.Desktop.Framework.Threading.Tasks;
5 | using ArcGIS.Desktop.Mapping;
6 | using System;
7 | using System.Threading.Tasks;
8 |
9 | namespace DuckDBGeoparquet.Views
10 | {
11 | ///
12 | /// Custom map tool for drawing an extent rectangle
13 | ///
14 | internal class CustomExtentTool : MapTool
15 | {
16 | // Static event to notify when an extent is created - this will work across different instances
17 | public static event Action ExtentCreatedStatic;
18 |
19 | // Keep the instance event for backward compatibility
20 | public event Action ExtentCreated;
21 |
22 | public CustomExtentTool()
23 | {
24 | // Standard rectangle tool
25 | SketchType = SketchGeometryType.Rectangle;
26 | // Don't keep the sketch on the map
27 | SketchOutputMode = SketchOutputMode.Map;
28 | // Use standard cursor
29 | IsSketchTool = true;
30 | // No special properties for sketch
31 | SketchSymbol = null;
32 |
33 | System.Diagnostics.Debug.WriteLine("CustomExtentTool constructor called");
34 | }
35 |
36 | protected override Task OnToolActivateAsync(bool active)
37 | {
38 | System.Diagnostics.Debug.WriteLine($"CustomExtentTool activated: {active}");
39 | return base.OnToolActivateAsync(active);
40 | }
41 |
42 | protected override Task OnSketchCompleteAsync(Geometry geometry)
43 | {
44 | // Log the completion
45 | System.Diagnostics.Debug.WriteLine("OnSketchCompleteAsync called with geometry type: " +
46 | (geometry != null ? geometry.GeometryType.ToString() : "null"));
47 |
48 | try
49 | {
50 | if (geometry != null)
51 | {
52 | System.Diagnostics.Debug.WriteLine($"Geometry created: {geometry.GeometryType}");
53 |
54 | // Get the envelope from the drawn geometry
55 | Envelope extent = geometry.Extent;
56 | System.Diagnostics.Debug.WriteLine($"Extent: {extent.XMin}, {extent.YMin}, {extent.XMax}, {extent.YMax}");
57 |
58 | // Check if instance event has subscribers
59 | if (ExtentCreated != null)
60 | {
61 | System.Diagnostics.Debug.WriteLine("Instance ExtentCreated event has subscribers, invoking");
62 | // Invoke the event with the new extent
63 | ExtentCreated?.Invoke(extent);
64 | }
65 | else
66 | {
67 | System.Diagnostics.Debug.WriteLine("Instance ExtentCreated event has no subscribers");
68 | }
69 |
70 | // Check if static event has subscribers
71 | if (ExtentCreatedStatic != null)
72 | {
73 | System.Diagnostics.Debug.WriteLine("Static ExtentCreatedStatic event has subscribers, invoking");
74 | // Invoke the static event with the new extent
75 | ExtentCreatedStatic?.Invoke(extent);
76 | }
77 | else
78 | {
79 | System.Diagnostics.Debug.WriteLine("WARNING: ExtentCreatedStatic event has no subscribers!");
80 | }
81 |
82 | // The ViewModel (WizardDockpaneViewModel) is now responsible for deactivating the tool
83 | // after this event is processed. Remove tool deactivation from here.
84 | // await FrameworkApplication.SetCurrentToolAsync("esri_mapping_exploreTool");
85 | // System.Diagnostics.Debug.WriteLine("Returned to default tool");
86 |
87 | // Force a UI update to make sure the cursor changes back
88 | // await QueuedTask.Run(() => {
89 | // // Just a quick operation to ensure we're on the main thread
90 | // var activeView = MapView.Active;
91 | // if (activeView != null)
92 | // {
93 | // System.Diagnostics.Debug.WriteLine("Forcing cursor update on active map view");
94 | // }
95 | // });
96 | }
97 | else
98 | {
99 | System.Diagnostics.Debug.WriteLine("WARNING: Geometry is null in OnSketchCompleteAsync");
100 | }
101 | }
102 | catch (Exception ex)
103 | {
104 | System.Diagnostics.Debug.WriteLine($"Error in OnSketchCompleteAsync: {ex.Message}");
105 | System.Diagnostics.Debug.WriteLine($"Stack trace: {ex.StackTrace}");
106 | }
107 |
108 | return Task.FromResult(true);
109 | }
110 | }
111 | }
--------------------------------------------------------------------------------
/DuckDBGeoparquet.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 | net8.0-windows
4 | true
5 | true
6 | win-x64
7 | false
8 | true
9 | CA1416
10 | x64
11 | true
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Core.dll
37 | False
38 | False
39 |
40 |
41 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.Framework.Contracts.dll
42 | False
43 | False
44 |
45 |
46 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.Framework.dll
47 | False
48 | False
49 |
50 |
51 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.Framework.Threading.Tasks.dll
52 | False
53 | False
54 |
55 |
56 | C:\Program Files\ArcGIS\Pro\bin\Extensions\Core\ArcGIS.Desktop.Core.dll
57 | False
58 | False
59 |
60 |
61 | C:\Program Files\ArcGIS\Pro\bin\Extensions\Mapping\ArcGIS.Desktop.Mapping.dll
62 | False
63 | False
64 |
65 |
66 | C:\Program Files\ArcGIS\Pro\bin\Extensions\Catalog\ArcGIS.Desktop.Catalog.dll
67 | False
68 | False
69 |
70 |
71 | C:\Program Files\ArcGIS\Pro\bin\Extensions\Editing\ArcGIS.Desktop.Editing.dll
72 | False
73 | False
74 |
75 |
76 | C:\Program Files\ArcGIS\Pro\bin\Extensions\DesktopExtensions\ArcGIS.Desktop.Extensions.dll
77 | False
78 | False
79 |
80 |
81 | C:\Program Files\ArcGIS\Pro\bin\Extensions\GeoProcessing\ArcGIS.Desktop.GeoProcessing.dll
82 | False
83 | False
84 |
85 |
86 | C:\Program Files\ArcGIS\Pro\bin\Extensions\Layout\ArcGIS.Desktop.Layouts.dll
87 | False
88 | False
89 |
90 |
91 | C:\Program Files\ArcGIS\Pro\bin\Extensions\KnowledgeGraph\ArcGIS.Desktop.KnowledgeGraph.dll
92 | False
93 | False
94 |
95 |
96 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.Shared.Wpf.dll
97 | False
98 | False
99 |
100 |
101 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.Ribbon.Wpf.dll
102 | False
103 | False
104 |
105 |
106 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.DataGrid.Contrib.Wpf.dll
107 | False
108 | False
109 |
110 |
111 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.Resources.dll
112 | False
113 | False
114 |
115 |
116 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.Bars.Wpf.dll
117 | False
118 | False
119 |
120 |
121 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Desktop.Charts.Wpf.dll
122 | False
123 | False
124 |
125 |
126 | C:\Program Files\ArcGIS\Pro\bin\ESRI.ArcGIS.ItemIndex.dll
127 | False
128 | False
129 |
130 |
131 |
132 |
133 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.rsuser
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Mono auto generated files
17 | mono_crash.*
18 |
19 | # Build results
20 | [Dd]ebug/
21 | [Dd]ebugPublic/
22 | [Rr]elease/
23 | [Rr]eleases/
24 | x64/
25 | x86/
26 | [Ww][Ii][Nn]32/
27 | [Aa][Rr][Mm]/
28 | [Aa][Rr][Mm]64/
29 | bld/
30 | [Bb]in/
31 | [Oo]bj/
32 | [Oo]ut/
33 | [Ll]og/
34 | [Ll]ogs/
35 |
36 | # Visual Studio 2015/2017 cache/options directory
37 | .vs/
38 | # Uncomment if you have tasks that create the project's static files in wwwroot
39 | #wwwroot/
40 |
41 | # Visual Studio 2017 auto generated files
42 | Generated\ Files/
43 |
44 | # MSTest test Results
45 | [Tt]est[Rr]esult*/
46 | [Bb]uild[Ll]og.*
47 |
48 | # NUnit
49 | *.VisualState.xml
50 | TestResult.xml
51 | nunit-*.xml
52 |
53 | # Build Results of an ATL Project
54 | [Dd]ebugPS/
55 | [Rr]eleasePS/
56 | dlldata.c
57 |
58 | # Benchmark Results
59 | BenchmarkDotNet.Artifacts/
60 |
61 | # .NET Core
62 | project.lock.json
63 | project.fragment.lock.json
64 | artifacts/
65 |
66 | # ASP.NET Scaffolding
67 | ScaffoldingReadMe.txt
68 |
69 | # StyleCop
70 | StyleCopReport.xml
71 |
72 | # Files built by Visual Studio
73 | *_i.c
74 | *_p.c
75 | *_h.h
76 | *.ilk
77 | *.meta
78 | *.obj
79 | *.iobj
80 | *.pch
81 | *.pdb
82 | *.ipdb
83 | *.pgc
84 | *.pgd
85 | *.rsp
86 | *.sbr
87 | *.tlb
88 | *.tli
89 | *.tlh
90 | *.tmp
91 | *.tmp_proj
92 | *_wpftmp.csproj
93 | *.log
94 | *.vspscc
95 | *.vssscc
96 | .builds
97 | *.pidb
98 | *.svclog
99 | *.scc
100 |
101 | # Chutzpah Test files
102 | _Chutzpah*
103 |
104 | # Visual C++ cache files
105 | ipch/
106 | *.aps
107 | *.ncb
108 | *.opendb
109 | *.opensdf
110 | *.sdf
111 | *.cachefile
112 | *.VC.db
113 | *.VC.VC.opendb
114 |
115 | # Visual Studio profiler
116 | *.psess
117 | *.vsp
118 | *.vspx
119 | *.sap
120 |
121 | # Visual Studio Trace Files
122 | *.e2e
123 |
124 | # TFS 2012 Local Workspace
125 | $tf/
126 |
127 | # Guidance Automation Toolkit
128 | *.gpState
129 |
130 | # ReSharper is a .NET coding add-in
131 | _ReSharper*/
132 | *.[Rr]e[Ss]harper
133 | *.DotSettings.user
134 |
135 | # TeamCity is a build add-in
136 | _TeamCity*
137 |
138 | # DotCover is a Code Coverage Tool
139 | *.dotCover
140 |
141 | # AxoCover is a Code Coverage Tool
142 | .axoCover/*
143 | !.axoCover/settings.json
144 |
145 | # Coverlet is a free, cross platform Code Coverage Tool
146 | coverage*.json
147 | coverage*.xml
148 | coverage*.info
149 |
150 | # Visual Studio code coverage results
151 | *.coverage
152 | *.coveragexml
153 |
154 | # NCrunch
155 | _NCrunch_*
156 | .*crunch*.local.xml
157 | nCrunchTemp_*
158 |
159 | # MightyMoose
160 | *.mm.*
161 | AutoTest.Net/
162 |
163 | # Web workbench (sass)
164 | .sass-cache/
165 |
166 | # Installshield output folder
167 | [Ee]xpress/
168 |
169 | # DocProject is a documentation generator add-in
170 | DocProject/buildhelp/
171 | DocProject/Help/*.HxT
172 | DocProject/Help/*.HxC
173 | DocProject/Help/*.hhc
174 | DocProject/Help/*.hhk
175 | DocProject/Help/*.hhp
176 | DocProject/Help/Html2
177 | DocProject/Help/html
178 |
179 | # Click-Once directory
180 | publish/
181 |
182 | # Publish Web Output
183 | *.[Pp]ublish.xml
184 | *.azurePubxml
185 | # Note: Comment the next line if you want to checkin your web deploy settings,
186 | # but database connection strings (with potential passwords) will be unencrypted
187 | *.pubxml
188 | *.publishproj
189 |
190 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
191 | # checkin your Azure Web App publish settings, but sensitive information contained
192 | # in these scripts will be unencrypted
193 | PublishScripts/
194 |
195 | # NuGet Packages
196 | *.nupkg
197 | # NuGet Symbol Packages
198 | *.snupkg
199 | # The packages folder can be ignored because of Package Restore
200 | **/[Pp]ackages/*
201 | # except build/, which is used as an MSBuild target.
202 | !**/[Pp]ackages/build/
203 | # Uncomment if necessary however generally it will be regenerated when needed
204 | #!**/[Pp]ackages/repositories.config
205 | # NuGet v3's project.json files produces more ignorable files
206 | *.nuget.props
207 | *.nuget.targets
208 |
209 | # Microsoft Azure Build Output
210 | csx/
211 | *.build.csdef
212 |
213 | # Microsoft Azure Emulator
214 | ecf/
215 | rcf/
216 |
217 | # Windows Store app package directories and files
218 | AppPackages/
219 | BundleArtifacts/
220 | Package.StoreAssociation.xml
221 | _pkginfo.txt
222 | *.appx
223 | *.appxbundle
224 | *.appxupload
225 |
226 | # Visual Studio cache files
227 | # files ending in .cache can be ignored
228 | *.[Cc]ache
229 | # but keep track of directories ending in .cache
230 | !?*.[Cc]ache/
231 |
232 | # Others
233 | ClientBin/
234 | ~$*
235 | *~
236 | *.dbmdl
237 | *.dbproj.schemaview
238 | *.jfm
239 | *.pfx
240 | *.publishsettings
241 | orleans.codegen.cs
242 |
243 | # Including strong name files can present a security risk
244 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
245 | #*.snk
246 |
247 | # Since there are multiple workflows, uncomment next line to ignore bower_components
248 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
249 | #bower_components/
250 |
251 | # RIA/Silverlight projects
252 | Generated_Code/
253 |
254 | # Backup & report files from converting an old project file
255 | # to a newer Visual Studio version. Backup files are not needed,
256 | # because we have git ;-)
257 | _UpgradeReport_Files/
258 | Backup*/
259 | UpgradeLog*.XML
260 | UpgradeLog*.htm
261 | ServiceFabricBackup/
262 | *.rptproj.bak
263 |
264 | # SQL Server files
265 | *.mdf
266 | *.ldf
267 | *.ndf
268 |
269 | # Business Intelligence projects
270 | *.rdl.data
271 | *.bim.layout
272 | *.bim_*.settings
273 | *.rptproj.rsuser
274 | *- [Bb]ackup.rdl
275 | *- [Bb]ackup ([0-9]).rdl
276 | *- [Bb]ackup ([0-9][0-9]).rdl
277 |
278 | # Microsoft Fakes
279 | FakesAssemblies/
280 |
281 | # GhostDoc plugin setting file
282 | *.GhostDoc.xml
283 |
284 | # Node.js Tools for Visual Studio
285 | .ntvs_analysis.dat
286 | node_modules/
287 |
288 | # Visual Studio 6 build log
289 | *.plg
290 |
291 | # Visual Studio 6 workspace options file
292 | *.opt
293 |
294 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
295 | *.vbw
296 |
297 | # Visual Studio LightSwitch build output
298 | **/*.HTMLClient/GeneratedArtifacts
299 | **/*.DesktopClient/GeneratedArtifacts
300 | **/*.DesktopClient/ModelManifest.xml
301 | **/*.Server/GeneratedArtifacts
302 | **/*.Server/ModelManifest.xml
303 | _Pvt_Extensions
304 |
305 | # Paket dependency manager
306 | .paket/paket.exe
307 | paket-files/
308 |
309 | # FAKE - F# Make
310 | .fake/
311 |
312 | # CodeRush personal settings
313 | .cr/personal
314 |
315 | # Python Tools for Visual Studio (PTVS)
316 | __pycache__/
317 | *.pyc
318 |
319 | # Cake - Uncomment if you are using it
320 | # tools/**
321 | # !tools/packages.config
322 |
323 | # Tabs Studio
324 | *.tss
325 |
326 | # Telerik's JustMock configuration file
327 | *.jmconfig
328 |
329 | # BizTalk build output
330 | *.btp.cs
331 | *.btm.cs
332 | *.odx.cs
333 | *.xsd.cs
334 |
335 | # OpenCover UI analysis results
336 | OpenCover/
337 |
338 | # Azure Stream Analytics local run output
339 | ASALocalRun/
340 |
341 | # MSBuild Binary and Structured Log
342 | *.binlog
343 |
344 | # NVidia Nsight GPU debugger configuration file
345 | *.nvuser
346 |
347 | # MFractors (Xamarin productivity tool) working folder
348 | .mfractor/
349 |
350 | # Local History for Visual Studio
351 | .localhistory/
352 |
353 | # BeatPulse healthcheck temp database
354 | healthchecksdb
355 |
356 | # Backup folder for Package Reference Convert tool in Visual Studio 2017
357 | MigrationBackup/
358 |
359 | # Ionide (cross platform F# VS Code tools) working folder
360 | .ionide/
361 |
362 | # Fody - auto-generated XML schema
363 | FodyWeavers.xsdE x t e n s i o n s / h t t p f s . d u c k d b _ e x t e n s i o n *
364 |
365 | E x t e n s i o n s / h t t p f s . d u c k d b _ e x t e n s i o n *
366 |
367 | E x t e n s i o n s / s p a t i a l . d u c k d b *
368 |
369 |
370 | .vscode/settings.json
371 |
--------------------------------------------------------------------------------
/AGOL_PUBLISHING_SETUP.md:
--------------------------------------------------------------------------------
1 | # ArcGIS Online Publishing Setup Guide
2 |
3 | This guide explains how to set up automatic publishing to ArcGIS Online (AGOL) when creating releases.
4 |
5 | ## Overview
6 |
7 | The automated publishing system uploads your ArcGIS Pro add-in to ArcGIS Online automatically when you create a production release tag. This eliminates the need to manually upload files after each release.
8 |
9 | ## Authentication Methods
10 |
11 | **Important**: For automated publishing from GitHub Actions, you need **app authentication** (client credentials flow), NOT user authentication. The guide at https://developers.arcgis.com/documentation/security-and-authentication/user-authentication/oauth-credentials-user/ is for user authentication, which requires interactive sign-in. For automated publishing, follow the steps below for app authentication.
12 |
13 | You can use one of two authentication methods:
14 |
15 | ### Option 1: OAuth2 App Client Token (Recommended)
16 |
17 | This is the recommended method as it's more secure and doesn't require storing your personal password. This uses **app authentication** (client credentials flow), not user authentication.
18 |
19 | **Steps:**
20 |
21 | 1. **Create OAuth Credentials for App Authentication in ArcGIS Online:**
22 | - Log in to ArcGIS Online: https://www.arcgis.com
23 | - Go to **Content** → **My content** → **New item**
24 | - In the "New item" modal, click **Developer credentials** (the option with the key/code icon)
25 | - You'll see a "Create developer credentials" screen with "Select credential type"
26 | - Click on the **OAuth 2.0 credentials** card (or click "Next" if it's the only option)
27 | - **Important**: On the next screen, you'll see a comparison between "For user authentication" and "For app authentication"
28 | - Choose **App authentication** (not user authentication) - this uses the client credentials flow
29 | - The comparison table shows: "For app authentication" is for "public applications that do not require your users to sign in and generate access tokens server-side" - this is what you need!
30 | - On the configuration screen that follows:
31 | - **Redirect URLs**: Add `urn:ietf:wg:oauth:2.0:oob` (required by UI, but won't be used for app authentication/client credentials flow)
32 | - Type the URL in the field and click "+ Add"
33 | - This is a placeholder URL for out-of-band authentication flows
34 | - **Referrer URLs**: Leave blank (optional)
35 | - **Application environment**: Select **"Server"** (this is for server-side automation from GitHub Actions)
36 | - **URL**: Leave as default (`https://`) or blank
37 | - On the "Item details" screen:
38 | - **Title** (required): Enter a name like `GitHub Actions Publisher` or `AGOL Publishing Credentials`
39 | - **Folder**: Leave as default (your user folder)
40 | - **Tags** (optional): Add tags like `GitHub Actions`, `Automation`, `CI/CD`
41 | - **Summary** (optional): Add a brief description like "OAuth credentials for automated publishing of ArcGIS Pro add-in to ArcGIS Online from GitHub Actions"
42 | - Click **Create**
43 | - After creation, you'll be taken to the item Overview page
44 | - **Important**: Click the **"Settings"** tab, then click **"Register application"** button
45 | - Fill in the registration form:
46 | - **Redirect URLs**: Add `urn:ietf:wg:oauth:2.0:oob` or `https://localhost` (required by UI)
47 | - **Application environment**: Select **"Server"**
48 | - **URL**: Leave as default (`https://`) or blank
49 | - Click **"Register"**
50 | - **Important**: After registering, you'll see the **Client ID** and **Client Secret** on the Application page - copy both immediately!
51 | - Click the eye icon to reveal the Client Secret
52 | - Use the clipboard icons to copy both values
53 |
54 | **Note**: If you don't see "Developer credentials" as an option, you may need to:
55 | - Check that your account has the "Generate API keys" privilege
56 | - Contact your ArcGIS Online administrator to enable developer credentials
57 |
58 | **Reference**: For more details on app authentication, see: https://developers.arcgis.com/documentation/security-and-authentication/app-authentication/
59 |
60 | 2. **Add GitHub Secrets:**
61 | - Go to your GitHub repository
62 | - Navigate to **Settings** → **Secrets and variables** → **Actions**
63 | - Click **New repository secret** and add:
64 | - `AGOL_CLIENT_ID` = Your OAuth2 Client ID
65 | - `AGOL_CLIENT_SECRET` = Your OAuth2 Client Secret
66 | - `AGOL_ITEM_ID` = The item ID of your add-in in AGOL (see below)
67 |
68 | ### Option 2: Username/Password
69 |
70 | If you prefer to use your ArcGIS Online username and password:
71 |
72 | 1. **Add GitHub Secrets:**
73 | - Go to your GitHub repository
74 | - Navigate to **Settings** → **Secrets and variables** → **Actions**
75 | - Click **New repository secret** and add:
76 | - `AGOL_USERNAME` = Your ArcGIS Online username
77 | - `AGOL_PASSWORD` = Your ArcGIS Online password
78 | - `AGOL_ITEM_ID` = The item ID of your add-in in AGOL (see below)
79 |
80 | ## Finding Your Item ID
81 |
82 | The Item ID is the unique identifier for your add-in in ArcGIS Online:
83 |
84 | 1. Go to your add-in item page in ArcGIS Online
85 | 2. Look at the URL: `https://www.arcgis.com/home/item.html?id=**8293d1220b7848848ce316b4fa3263b5**`
86 | 3. The Item ID is the part after `id=` (e.g., `8293d1220b7848848ce316b4fa3263b5`)
87 |
88 | ## Optional Configuration
89 |
90 | You can also set these optional secrets for more control:
91 |
92 | - `AGOL_PORTAL_URL` - Custom portal URL (default: `https://www.arcgis.com`)
93 | - `AGOL_TITLE` - Custom title for the add-in item
94 | - `AGOL_DESCRIPTION` - Custom description for the add-in item
95 | - `AGOL_TAGS` - Comma-separated tags (default: `ArcGIS Pro,Add-in,GeoParquet`)
96 |
97 | ## How It Works
98 |
99 | 1. **Create a production release tag** (e.g., `v0.1.4`)
100 | 2. **GitHub Actions automatically:**
101 | - Updates Config.daml version
102 | - Builds the add-in
103 | - Creates GitHub release
104 | - **Publishes to ArcGIS Online** (if credentials are configured)
105 |
106 | ## Testing
107 |
108 | To test the AGOL publishing:
109 |
110 | 1. Ensure all required secrets are set
111 | 2. Create a test production tag: `git tag v0.1.4-test && git push origin v0.1.4-test`
112 | 3. Monitor the workflow in GitHub Actions
113 | 4. Check that the add-in was updated in ArcGIS Online
114 |
115 | ## Troubleshooting
116 |
117 | ### Publishing Step is Skipped
118 |
119 | If the publishing step is skipped, check:
120 | - Are the required secrets set? (Either OAuth2 credentials OR username/password)
121 | - Is it a production tag? (Dev tags with `-dev` suffix won't publish)
122 | - Check the workflow logs for error messages
123 |
124 | ### Authentication Fails
125 |
126 | - **OAuth2**: Verify Client ID and Secret are correct
127 | - **Username/Password**: Verify credentials are correct and account is active
128 | - Check that the app/client has permission to update items
129 |
130 | ### Upload Fails (403 Permission Error)
131 |
132 | If you see a "403 - You do not have permissions" error:
133 |
134 | **For OAuth2 App Client Credentials:**
135 | - OAuth2 app client credentials may not have permission to update items by default
136 | - **Solution 1**: Grant permissions to the OAuth2 app:
137 | 1. Go to ArcGIS Online → Content → Your OAuth credentials item
138 | 2. Check the Settings tab for permission/scopes configuration
139 | 3. Ensure the app has "Content: Update" or similar permissions
140 | - **Solution 2**: Use username/password authentication instead (recommended):
141 | - OAuth2 app client credentials have limited permissions
142 | - Username/password authentication has full user permissions
143 | - Set `AGOL_USERNAME` and `AGOL_PASSWORD` in GitHub Secrets
144 | - Remove `AGOL_CLIENT_ID` and `AGOL_CLIENT_SECRET` (or leave them empty)
145 | - **Solution 3**: Ensure the item is owned/shared with the app:
146 | - The add-in item must be accessible to the OAuth2 app
147 | - Check item sharing settings in ArcGIS Online
148 |
149 | **For Username/Password:**
150 | - Verify the Item ID is correct
151 | - Check that you have permission to update the item
152 | - Ensure the item exists in ArcGIS Online
153 | - Check workflow logs for specific error messages
154 |
155 | ## Security Notes
156 |
157 | - **Never commit secrets to the repository**
158 | - Use GitHub Secrets for all sensitive information
159 | - OAuth2 app client tokens are preferred over username/password
160 | - Consider using a dedicated service account for automation
161 |
162 | ## Example Workflow
163 |
164 | ```bash
165 | # 1. Set up secrets in GitHub (one-time setup)
166 | # 2. Create a release tag
167 | git tag v0.1.4
168 | git push origin v0.1.4
169 |
170 | # 3. GitHub Actions automatically:
171 | # - Updates version
172 | # - Builds add-in
173 | # - Creates GitHub release
174 | # - Publishes to AGOL ✅
175 | ```
176 |
177 | ## Support
178 |
179 | For issues or questions:
180 | - Check the workflow logs in GitHub Actions
181 | - Review the PowerShell script: `.github/workflows/publish-agol.ps1`
182 | - Open an issue in the repository
183 |
184 |
--------------------------------------------------------------------------------
/.github/workflows/publish-agol.ps1:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env pwsh
2 | <#
3 | .SYNOPSIS
4 | Publishes ArcGIS Pro add-in to ArcGIS Online using Portal REST API
5 | .DESCRIPTION
6 | This script uploads and updates an add-in item in ArcGIS Online using the Portal REST API.
7 | Supports both OAuth2 app client token and username/password authentication.
8 | .PARAMETER AddInFile
9 | Path to the .esriAddInX file to publish
10 | .PARAMETER PortalUrl
11 | ArcGIS Online portal URL (default: https://www.arcgis.com)
12 | .PARAMETER ItemId
13 | The item ID of the existing add-in in AGOL (required for updates)
14 | .PARAMETER AuthMethod
15 | Authentication method: 'token' or 'username' (default: 'token')
16 | .PARAMETER ClientId
17 | OAuth2 app client ID (required if AuthMethod is 'token')
18 | .PARAMETER ClientSecret
19 | OAuth2 app client secret (required if AuthMethod is 'token')
20 | .PARAMETER Username
21 | Portal username (required if AuthMethod is 'username')
22 | .PARAMETER Password
23 | Portal password (required if AuthMethod is 'username')
24 | .PARAMETER Title
25 | Title for the add-in item (optional, uses filename if not provided)
26 | .PARAMETER Description
27 | Description for the add-in item (optional)
28 | .PARAMETER Tags
29 | Comma-separated tags for the add-in (optional)
30 | .EXAMPLE
31 | .\publish-agol.ps1 -AddInFile "addin.esriAddInX" -ItemId "abc123" -ClientId "myclientid" -ClientSecret "mysecret"
32 | #>
33 |
34 | param(
35 | [Parameter(Mandatory=$true)]
36 | [string]$AddInFile,
37 |
38 | [Parameter(Mandatory=$false)]
39 | [string]$PortalUrl = "https://www.arcgis.com",
40 |
41 | [Parameter(Mandatory=$true)]
42 | [string]$ItemId,
43 |
44 | [Parameter(Mandatory=$false)]
45 | [ValidateSet('token', 'username')]
46 | [string]$AuthMethod = 'token',
47 |
48 | [Parameter(Mandatory=$false)]
49 | [string]$ClientId,
50 |
51 | [Parameter(Mandatory=$false)]
52 | [string]$ClientSecret,
53 |
54 | [Parameter(Mandatory=$false)]
55 | [string]$Username,
56 |
57 | [Parameter(Mandatory=$false)]
58 | [string]$Password,
59 |
60 | [Parameter(Mandatory=$false)]
61 | [string]$Title,
62 |
63 | [Parameter(Mandatory=$false)]
64 | [string]$Description,
65 |
66 | [Parameter(Mandatory=$false)]
67 | [string]$Tags = "ArcGIS Pro,Add-in,GeoParquet"
68 | )
69 |
70 | Write-Host "================================================"
71 | Write-Host " ArcGIS Online Publishing Script"
72 | Write-Host "================================================"
73 | Write-Host ""
74 |
75 | # Validate inputs
76 | if (-not (Test-Path $AddInFile)) {
77 | Write-Error "Add-in file not found at: $AddInFile"
78 | exit 1
79 | }
80 |
81 | if ($AuthMethod -eq 'token' -and (-not $ClientId -or -not $ClientSecret)) {
82 | Write-Error "ClientId and ClientSecret are required when using token authentication"
83 | exit 1
84 | }
85 |
86 | if ($AuthMethod -eq 'username' -and (-not $Username -or -not $Password)) {
87 | Write-Error "Username and Password are required when using username authentication"
88 | exit 1
89 | }
90 |
91 | # Set default title if not provided
92 | if (-not $Title) {
93 | $Title = [System.IO.Path]::GetFileNameWithoutExtension($AddInFile)
94 | }
95 |
96 | Write-Host "Portal URL: $PortalUrl"
97 | Write-Host "Item ID: $ItemId"
98 | Write-Host "Add-in file: $AddInFile"
99 | Write-Host "Authentication: $AuthMethod"
100 | Write-Host ""
101 |
102 | try {
103 | # Step 1: Get authentication token
104 | Write-Host "🔐 Authenticating with ArcGIS Online..."
105 |
106 | $token = $null
107 | if ($AuthMethod -eq 'token') {
108 | # OAuth2 app client authentication
109 | $tokenUrl = "$PortalUrl/sharing/rest/oauth2/token"
110 | $tokenBody = @{
111 | client_id = $ClientId
112 | client_secret = $ClientSecret
113 | grant_type = "client_credentials"
114 | }
115 |
116 | try {
117 | $tokenResponse = Invoke-RestMethod -Uri $tokenUrl -Method Post -Body $tokenBody -ContentType "application/x-www-form-urlencoded"
118 | $token = $tokenResponse.access_token
119 |
120 | if (-not $token) {
121 | if ($tokenResponse.error) {
122 | Write-Error "Failed to obtain access token: $($tokenResponse.error.message) - $($tokenResponse.error.details)"
123 | } else {
124 | Write-Error "Failed to obtain access token: No access_token in response"
125 | }
126 | exit 1
127 | }
128 | }
129 | catch {
130 | Write-Error "Authentication request failed: $_"
131 | if ($_.ErrorDetails.Message) {
132 | Write-Error "Response: $($_.ErrorDetails.Message)"
133 | }
134 | exit 1
135 | }
136 | Write-Host "✅ Authentication successful (OAuth2)"
137 | }
138 | else {
139 | # Username/password authentication
140 | $tokenUrl = "$PortalUrl/sharing/rest/generateToken"
141 | $tokenBody = @{
142 | username = $Username
143 | password = $Password
144 | referer = "https://www.arcgis.com"
145 | f = "json"
146 | }
147 |
148 | $tokenResponse = Invoke-RestMethod -Uri $tokenUrl -Method Post -Body $tokenBody -ContentType "application/x-www-form-urlencoded"
149 | $token = $tokenResponse.token
150 |
151 | if (-not $token) {
152 | Write-Error "Failed to obtain access token: $($tokenResponse.error.message)"
153 | exit 1
154 | }
155 | Write-Host "✅ Authentication successful (Username/Password)"
156 | }
157 |
158 | # Step 2: Upload the add-in file using Portal REST API
159 | Write-Host ""
160 | Write-Host "📤 Uploading add-in file..."
161 |
162 | $uploadUrl = "$PortalUrl/sharing/rest/content/users/self/items/$ItemId/update"
163 | $fileName = [System.IO.Path]::GetFileName($AddInFile)
164 |
165 | # Portal REST API requires multipart/form-data with file and token
166 | $boundary = [System.Guid]::NewGuid().ToString()
167 | $fileBytes = [System.IO.File]::ReadAllBytes($AddInFile)
168 |
169 | # Build multipart form data manually
170 | $LF = "`r`n"
171 | $bodyParts = @()
172 |
173 | # Add token field
174 | $bodyParts += "--$boundary"
175 | $bodyParts += "Content-Disposition: form-data; name=`"token`""
176 | $bodyParts += ""
177 | $bodyParts += $token
178 |
179 | # Add file field
180 | $bodyParts += "--$boundary"
181 | $bodyParts += "Content-Disposition: form-data; name=`"file`"; filename=`"$fileName`""
182 | $bodyParts += "Content-Type: application/octet-stream"
183 | $bodyParts += ""
184 |
185 | # Combine text parts and binary file
186 | $textBody = $bodyParts -join $LF
187 | $textBytes = [System.Text.Encoding]::UTF8.GetBytes($textBody + $LF)
188 | $endBoundary = [System.Text.Encoding]::UTF8.GetBytes($LF + "--$boundary--" + $LF)
189 |
190 | # Combine all parts
191 | $bodyStream = New-Object System.IO.MemoryStream
192 | $bodyStream.Write($textBytes, 0, $textBytes.Length)
193 | $bodyStream.Write($fileBytes, 0, $fileBytes.Length)
194 | $bodyStream.Write($endBoundary, 0, $endBoundary.Length)
195 | $bodyBytes = $bodyStream.ToArray()
196 | $bodyStream.Close()
197 |
198 | $headers = @{
199 | "Authorization" = "Bearer $token"
200 | }
201 |
202 | try {
203 | $uploadResponse = Invoke-RestMethod -Uri $uploadUrl -Method Post -Body $bodyBytes -ContentType "multipart/form-data; boundary=$boundary" -Headers $headers
204 |
205 | if ($uploadResponse.error) {
206 | Write-Error "Upload failed: $($uploadResponse.error.message)"
207 | exit 1
208 | }
209 |
210 | Write-Host "✅ File uploaded successfully"
211 | }
212 | catch {
213 | $errorDetails = $_.ErrorDetails.Message | ConvertFrom-Json -ErrorAction SilentlyContinue
214 | if ($errorDetails -and $errorDetails.error) {
215 | Write-Error "Upload failed: $($errorDetails.error.message)"
216 | } else {
217 | Write-Error "Upload failed: $_"
218 | }
219 | exit 1
220 | }
221 |
222 | # Step 3: Update item metadata (if provided)
223 | if ($Description -or $Tags) {
224 | Write-Host ""
225 | Write-Host "📝 Updating item metadata..."
226 |
227 | $updateUrl = "$PortalUrl/sharing/rest/content/users/self/items/$ItemId/update"
228 | $updateParams = @{
229 | token = $token
230 | f = "json"
231 | }
232 |
233 | if ($Description) {
234 | $updateParams.description = $Description
235 | }
236 |
237 | if ($Tags) {
238 | $updateParams.tags = $Tags
239 | }
240 |
241 | $updateResponse = Invoke-RestMethod -Uri $updateUrl -Method Post -Body $updateParams -ContentType "application/x-www-form-urlencoded"
242 |
243 | if ($updateResponse.error) {
244 | Write-Warning "Metadata update failed: $($updateResponse.error.message)"
245 | } else {
246 | Write-Host "✅ Metadata updated successfully"
247 | }
248 | }
249 |
250 | Write-Host ""
251 | Write-Host "✅ Successfully published to ArcGIS Online!" -ForegroundColor Green
252 | Write-Host " Item URL: $PortalUrl/home/item.html?id=$ItemId"
253 | Write-Host ""
254 |
255 | exit 0
256 | }
257 | catch {
258 | Write-Error "Failed to publish to ArcGIS Online: $_"
259 | Write-Error $_.Exception.Message
260 | exit 1
261 | }
262 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ArcGIS Pro GeoParquet Add‑in
2 |
3 | A powerful ArcGIS Pro add-in that simplifies working with cloud-native geospatial data formats, especially GeoParquet files. This tool uses [DuckDB](https://duckdb.org/) to deliver high-performance data processing directly within ArcGIS Pro, making it ideal for both GIS professionals and newcomers alike.
4 |
5 | 
6 |
7 | ## 🎥 See It In Action
8 |
9 | Watch the add-in in action loading Overture Maps data with incredible performance:
10 |
11 | [](https://www.linkedin.com/posts/ryan-lopez-fresnocounty_now-the-arcgis-pro-35-brings-native-geoparquet-activity-7333914884641890307-sYOY)
12 |
13 | 📖 **Comprehensive Guide**: Mark Litwintschik wrote an excellent [step-by-step guide](https://tech.marksblogg.com/overture-maps-esri-arcgis-pro.html) covering installation and usage.
14 |
15 | ## Features
16 |
17 | - **Native GeoParquet Support**: Fully leverages ArcGIS Pro 3.5's native GeoParquet capabilities for optimal performance
18 | - **High-Performance Processing**: Optimized data pipeline with 5-15% performance improvements and smart empty dataset handling
19 | - **Complex Data Preservation**: Maintains original data structure including nested types
20 | - **Cloud-Native Integration**: Direct access to data in S3, Azure, and other cloud storage
21 | - **Wizard-Driven Interface**: Simple step-by-step process with clean, focused progress reporting
22 | - **Overture Maps Integration**: Specialized support for Overture Maps Foundation data
23 | - **In-Memory Processing**: Uses DuckDB for high-performance operations
24 | - **Spatial Filtering**: Filter data by map extent before loading
25 | - **Multi-File Feature Connections**: Automatically creates MFCs for efficient multi-dataset workflows
26 | - **Incredible Performance**: Experience blazing-fast map redraws thanks to Parquet's optimized format
27 |
28 | ## Requirements
29 |
30 | - **ArcGIS Pro**: Version 3.5 or later
31 | - **.NET SDK**: Version 8.0 or later
32 | - **Storage**: Minimum 4GB free disk space for temporary data
33 | - **Memory**: Minimum 8GB RAM (16GB recommended for large datasets)
34 | - **For Developers**:
35 | - Visual Studio 2022 or newer
36 | - ArcGIS Pro SDK for .NET
37 |
38 | ## Installation
39 |
40 | ### Option 1: Quick Installation
41 |
42 | 1. Navigate to [ArcGIS Marketplace](https://cofgisonline.maps.arcgis.com/home/item.html?id=8293d1220b7848848ce316b4fa3263b5)
43 | 2. Click **Download** to get the add-in
44 | 3. Once downloaded, double-click the `ArcGISPro-GeoParquet-Addin.esriAddInX` file
45 | 4. Follow the installation prompts
46 | 5. Restart ArcGIS Pro if it's already running
47 |
48 | ### Option 2: Build from Source
49 |
50 | 1. **Clone the Repository**
51 | ```bash
52 | git clone https://github.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin.git
53 | cd ArcGISPro-GeoParquet-Addin
54 | ```
55 |
56 | 2. **Open in Visual Studio**
57 | - Launch Visual Studio 2022
58 | - Go to **File → Open → Project/Solution**
59 | - Select `DuckDBGeoparquet.sln`
60 |
61 | 3. **Build the Add-in**
62 | - Set configuration to Debug or Release
63 | - Right-click the project in Solution Explorer
64 | - Select **Build**
65 | - Find the generated `.esriAddInX` file in the bin folder
66 |
67 | 4. **Install the Add-in**
68 | - Double-click the `.esriAddInX` file
69 | - Or copy it to: `C:\Users\\Documents\ArcGIS\AddIns\ArcGISPro`
70 | - Restart ArcGIS Pro if it's already running
71 |
72 | ## Usage
73 |
74 | ### Basic Workflow
75 |
76 | 1. **Start ArcGIS Pro** and open a project
77 | 2. Navigate to the **Add-In** tab on the ribbon
78 | 3. Click the **Launch Overture** button to open the dockpane
79 | 4. Follow the wizard steps:
80 | - Select data source
81 | - Preview the data
82 | - Apply transformations (if needed)
83 | - Export to GeoParquet format
84 |
85 | ### Working with Overture Maps Data
86 |
87 | #### Step 1: Select Overture Maps Theme
88 | 1. Open the Wizard Dockpane
89 | 2. Choose a theme from the dropdown (addresses, base, buildings, etc.)
90 | 3. Review the estimated data volume information
91 |
92 | #### Step 2: Define Your Area of Interest
93 | 1. Select "Use Current Map Extent" to use your current view
94 | 2. Alternatively, use "Custom Extent" to define a specific area
95 |
96 | #### Step 3: Load and Transform
97 | 1. Click "Load Data" to begin the download process
98 | 2. Monitor the progress through the clean, focused status reporting
99 | 3. The add-in handles all processing automatically with optimized performance:
100 | - Skips empty datasets for faster processing
101 | - Preserves complex data types
102 | - Maintains spatial relationships
103 | - Ensures proper georeferencing
104 |
105 | #### Step 4: Visualization
106 | 1. Data is automatically added to your map once processed
107 | 2. Layers are named based on the Overture Maps theme and geometry type
108 | 3. All attributes are preserved in their original structure
109 |
110 | #### Step 5: Multi-File Feature Connection (Optional)
111 | 1. After loading multiple datasets, you'll be prompted to create an MFC
112 | 2. Choose your preferred location for the MFC file
113 | 3. This enables more efficient workflows when working with multiple related datasets
114 | 4. The MFC appears in your Catalog for easy access and management
115 |
116 | ### Advanced Features
117 |
118 | - **Spatial Filtering**: Reduces data volume by filtering to your area of interest
119 | - **Geometry-Based Layers**: Data is separated by geometry type for optimal display
120 | - **Release Selection**: Choose specific Overture Maps data releases
121 | - **Progress Tracking**: Detailed logging of each processing step
122 |
123 | ## Project Structure
124 |
125 | - **Views/**
126 | - `WizardDockpane.xaml` / `WizardDockpane.xaml.cs`: The UI components
127 | - `WizardDockpaneViewModel.cs`: ViewModel controlling the UI logic
128 | - **Services/**
129 | - `DataProcessor.cs`: Core data handling using DuckDB
130 | - **DuckDBGeoparquetModule.cs**: Main module class
131 | - **Config.daml**: Add-in manifest defining components
132 |
133 | ## Troubleshooting
134 |
135 | ### Common Issues
136 |
137 | #### Installation Problems
138 | - **Missing SDK Templates**: Ensure you've installed the [ArcGIS Pro SDK for .NET](https://pro.arcgis.com/en/pro-app/latest/sdk/)
139 | - **Access Denied Errors**: Right-click the .esriAddInX file → Properties → Unblock
140 | - **Add-in Not Appearing**: Verify installation path and restart ArcGIS Pro completely
141 |
142 | #### Data Loading Issues
143 | - **Timeout Errors**: Large Overture datasets may require multiple attempts
144 | - **Memory Errors**: Try reducing your area of interest or closing other applications
145 | - **Missing Data**: Verify internet connectivity and cloud storage permissions
146 |
147 | #### Performance Optimization
148 | - **Slow Processing**:
149 | - The add-in now automatically skips empty datasets for faster processing
150 | - Reduce the area of interest for very large regions
151 | - Close other resource-intensive applications
152 | - Ensure you have at least 8GB of available RAM
153 | - **Large Files**:
154 | - Recent optimizations provide 5-15% performance improvements
155 | - Consider using a machine with more RAM for very large areas
156 | - Process data in smaller geographic chunks
157 | - **Improved Logging**:
158 | - Cleaner, more focused progress reporting reduces visual noise
159 | - Essential debugging information is still preserved
160 |
161 | ### Debug Logging
162 |
163 | The add-in maintains detailed logs that can help diagnose issues:
164 | - Check the log output in the dockpane
165 | - Review system logs for DuckDB-related errors
166 | - Optimized logging reduces noise while preserving essential debugging information
167 |
168 | ## Recent Improvements
169 |
170 | 📦 **Latest Release**: See [GitHub Releases](https://github.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/releases) for complete release notes and changelog.
171 |
172 | Release notes are automatically generated from git commits, ensuring you always have the most up-to-date information about what's new in each version.
173 |
174 | ### Key Recent Features
175 | - **Automated Releases**: Fully automated CI/CD pipeline for seamless version management
176 | - **ArcGIS Online Publishing**: Automatic publishing to ArcGIS Marketplace with each release
177 | - **Performance Optimizations**: 5-15% faster processing through optimized file operations
178 | - **Smart Dataset Handling**: Automatically skips empty datasets for faster processing
179 | - **Native GeoParquet**: Fully utilizes ArcGIS Pro 3.5's built-in GeoParquet support
180 |
181 | ## Contributing
182 |
183 | Contributions to this project are welcome! Here's how to get started:
184 |
185 | 1. Fork the repository
186 | 2. Create a feature branch: `git checkout -b new-feature`
187 | 3. Make your changes
188 | 4. Submit a pull request
189 |
190 | Please adhere to the existing code style and include appropriate tests.
191 |
192 | ## License
193 |
194 | This project is licensed under the MIT License - see the [LICENSE.txt](LICENSE.txt) file for details.
195 |
196 | ## Media Coverage & Community
197 |
198 | - 📺 **Demo Video**: [LinkedIn demonstration](https://www.linkedin.com/posts/ryan-lopez-fresnocounty_now-the-arcgis-pro-35-brings-native-geoparquet-activity-7333914884641890307-sYOY) by Ryan Lopez
199 | - 📖 **Technical Guide**: [Comprehensive walkthrough](https://tech.marksblogg.com/overture-maps-esri-arcgis-pro.html) by Mark Litwintschik
200 | - 🚀 **CI/CD Implementation**: [Automated release pipeline](https://www.linkedin.com/feed/update/urn:li:activity:7338847609953947648/) built by Youssef Harby
201 | - 🏆 **Inspiration**: Built upon the excellent work of [Chris Holmes](https://github.com/cholmes) and his QGIS Overture plugin
202 |
203 | ## Acknowledgments
204 |
205 | - [Overture Maps Foundation](https://overturemaps.org/) for providing open map data
206 | - [DuckDB](https://duckdb.org/) for the powerful embedded database engine
207 | - [ArcGIS Pro SDK](https://pro.arcgis.com/en/pro-app/latest/sdk/) for development tools
208 | - [Chris Holmes](https://github.com/cholmes) for pioneering the DuckDB + Overture approach in QGIS
209 | - [Mark Litwintschik](https://tech.marksblogg.com/) for the excellent technical documentation
210 | - [Youssef Harby](https://www.linkedin.com/in/yharby/) for implementing the professional CI/CD pipeline
211 | - All contributors who have helped improve this add-in
212 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/.github/workflows/build.yaml:
--------------------------------------------------------------------------------
1 | name: Build ArcGIS Pro GeoParquet Add-in
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | - dev # Test workflow changes on dev branch
8 | tags:
9 | - 'v*' # Production release tags, e.g. v1.0.0
10 | - 'v*-dev' # Dev/test release tags, e.g. v0.1.3-dev
11 | pull_request:
12 | branches: [ main, dev ]
13 | workflow_dispatch: # Allows manual triggering
14 |
15 | permissions:
16 | contents: write # Needed for creating releases
17 |
18 | jobs:
19 | build:
20 | runs-on: windows-2022 # Using Windows Server 2022 with Visual Studio 2022
21 |
22 | steps:
23 | - name: Checkout repository
24 | uses: actions/checkout@v4
25 | with:
26 | fetch-depth: 0 # Fetch all history for proper git operations
27 |
28 | - name: Add MSBuild to PATH
29 | uses: microsoft/setup-msbuild@v2
30 |
31 | - name: Setup .NET
32 | uses: actions/setup-dotnet@v4
33 | with:
34 | dotnet-version: '8.0.x'
35 |
36 | - name: Update Config.daml version from tag
37 | if: startsWith(github.ref, 'refs/tags/v')
38 | shell: powershell
39 | run: |
40 | $version = "${{ github.ref }}".Replace('refs/tags/v', '')
41 | Write-Host "Updating Config.daml to version: $version"
42 |
43 | # Remove -dev suffix for version number in Config.daml
44 | $cleanVersion = $version -replace '-dev$', ''
45 | pwsh ./.github/workflows/update-version.ps1 -Version $cleanVersion -ConfigPath "Config.daml"
46 |
47 | - name: Restore NuGet packages
48 | run: nuget restore DuckDBGeoparquet.sln
49 |
50 | - name: Build solution with MSBuild
51 | run: msbuild DuckDBGeoparquet.sln /p:Configuration=Release /p:Platform="Any CPU"
52 |
53 | - name: Commit version update back to main branch
54 | if: startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, '-dev')
55 | shell: bash
56 | run: |
57 | VERSION=${GITHUB_REF#refs/tags/v}
58 |
59 | echo "📝 Committing version $VERSION back to main branch..."
60 |
61 | # Configure git with GitHub Actions bot identity
62 | git config user.name "github-actions[bot]"
63 | git config user.email "github-actions[bot]@users.noreply.github.com"
64 |
65 | # Check if Config.daml has changes
66 | if git diff --quiet Config.daml; then
67 | echo "⚠️ No version changes to commit (Config.daml unchanged)"
68 | exit 0
69 | fi
70 |
71 | # Fetch the main branch and switch to it
72 | git fetch origin main
73 | git checkout main
74 |
75 | # Add and commit the version update
76 | git add Config.daml
77 | git commit -m "chore: bump version to $VERSION [skip ci]
78 |
79 | Automated version update from release tag v$VERSION
80 |
81 | Co-Authored-By: github-actions[bot] "
82 |
83 | # Push to main branch
84 | git push origin main
85 |
86 | echo "✅ Version $VERSION committed back to main branch"
87 |
88 | - name: Get version from tag
89 | id: get_version
90 | shell: bash
91 | run: |
92 | # Extract version from tag (if present), otherwise use today's date
93 | if [[ $GITHUB_REF == refs/tags/v* ]]; then
94 | echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
95 | else
96 | echo "VERSION=$(date +'%Y.%m.%d')" >> $GITHUB_OUTPUT
97 | fi
98 |
99 | - name: Create Release Zip
100 | shell: powershell
101 | run: |
102 | $version = "${{ steps.get_version.outputs.VERSION }}"
103 | $zipName = "ArcGISPro-GeoParquet-Addin-v${version}.zip"
104 | $addinFile = Get-ChildItem -Path "bin/Release/net8.0-windows" -Filter "*.esriAddInX" | Select-Object -First 1
105 |
106 | if ($addinFile) {
107 | # Create a directory for the release
108 | New-Item -Path "release" -ItemType Directory -Force
109 |
110 | # Create a temporary directory to properly structure the zip content
111 | $tempDir = "temp-release"
112 | New-Item -Path $tempDir -ItemType Directory -Force
113 |
114 | # Copy the add-in file to the temp directory
115 | Copy-Item -Path $addinFile.FullName -Destination "$tempDir/"
116 |
117 | # Create the zip file containing only the add-in
118 | Compress-Archive -Path "$tempDir/*" -DestinationPath "release/$zipName" -Force
119 |
120 | # Clean up the temporary directory
121 | Remove-Item -Path $tempDir -Recurse -Force
122 |
123 | echo "Created release zip: $zipName containing the .esriAddInX file"
124 | } else {
125 | echo "::error::No .esriAddInX file found"
126 | exit 1
127 | }
128 |
129 | - name: Upload build artifacts
130 | uses: actions/upload-artifact@v4
131 | with:
132 | name: addin-package
133 | path: "bin/Release/net8.0-windows/*.esriAddInX"
134 | if-no-files-found: error
135 |
136 | - name: Upload release zip
137 | uses: actions/upload-artifact@v4
138 | with:
139 | name: release-zip
140 | path: "release/*.zip"
141 | if-no-files-found: error
142 |
143 | # Create GitHub Release when a production version tag is pushed (not -dev tags)
144 | - name: Create GitHub Release
145 | if: startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, '-dev')
146 | uses: softprops/action-gh-release@v2.3.2
147 | with:
148 | files: release/*.zip
149 | name: v${{ steps.get_version.outputs.VERSION }}
150 | generate_release_notes: true
151 |
152 | # For dev tags, mark as pre-release
153 | - name: Create GitHub Pre-Release (Dev)
154 | if: startsWith(github.ref, 'refs/tags/v') && contains(github.ref, '-dev')
155 | uses: softprops/action-gh-release@v2.3.2
156 | with:
157 | files: release/*.zip
158 | name: v${{ steps.get_version.outputs.VERSION }}
159 | generate_release_notes: true
160 | prerelease: true
161 | draft: false
162 |
163 | # Publish to ArcGIS Online (only for production releases)
164 | - name: Setup Python
165 | if: startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, '-dev')
166 | uses: actions/setup-python@v5
167 | with:
168 | python-version: '3.11'
169 |
170 | - name: Install arcgis-python-api
171 | if: startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, '-dev')
172 | run: pip install arcgis
173 |
174 | - name: Generate AGOL description
175 | if: startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, '-dev')
176 | id: generate_description
177 | shell: bash
178 | run: |
179 | set +e # Don't exit on errors for this step
180 | VERSION=${GITHUB_REF#refs/tags/v}
181 |
182 | # Get all tags sorted by version, find the one before current
183 | # Get current tag name
184 | CURRENT_TAG=$(git describe --tags --exact-match HEAD 2>/dev/null || echo "")
185 |
186 | # Get previous tag (excluding -dev and -test tags)
187 | PREV_TAG=$(git tag --sort=-version:refname | grep -E "^v[0-9]+\.[0-9]+\.[0-9]+$" | grep -v "$CURRENT_TAG" | head -1 || echo "")
188 |
189 | # Get commits - handle errors gracefully, filter out CI/CD noise and technical commits
190 | COMMITS=""
191 | if [ -z "$PREV_TAG" ]; then
192 | # If no previous tag, get last 10 commits
193 | COMMITS=$(git log --oneline --pretty=format:"%s" -10 2>/dev/null || echo "")
194 | else
195 | # Get commits since previous release (excluding merge commits, version bumps, debug, and CI/CD commits)
196 | # Filter out: version bumps, CI/CD, workflow fixes, technical improvements, regex, metadata, description updates
197 | COMMITS=$(git log ${PREV_TAG}..HEAD --oneline --pretty=format:"%s" --no-merges 2>/dev/null | \
198 | grep -v -iE "(bump version|version to|chore:.*version|debug:|fix:.*workflow|feat:.*workflow|ci:|skip ci|\[skip ci\]|\[ci skip\]|regex|metadata|description|AGOL|publish|separate file|improve.*update|update.*reliability)" | \
199 | grep -v -iE "(fix:.*description|fix:.*AGOL|improve.*AGOL|improve.*description|separate.*upload)" | \
200 | head -3 || echo "")
201 | fi
202 |
203 | # Build a clean version line with meaningful, user-facing improvements
204 | VERSION_LINE="Latest Version: v${VERSION}"
205 | if [ -n "$COMMITS" ] && [ "$COMMITS" != "" ]; then
206 | # Extract meaningful improvements (remove prefixes, make user-friendly, filter technical terms)
207 | # Capitalize first letter and make it readable
208 | IMPROVEMENTS=$(echo "$COMMITS" | \
209 | sed 's/^fix: //' | sed 's/^feat: //' | sed 's/^chore: //' | \
210 | sed 's/^docs: //' | sed 's/^refactor: //' | \
211 | grep -v -iE "(regex|metadata|description|AGOL|publish|workflow|ci|separate|upload|reliability|improve.*update)" | \
212 | head -2 | \
213 | sed 's/^\(.\)/\U\1/' | \
214 | sed 's/$/./' | \
215 | tr '\n' ' ' | sed 's/ */ /g' | sed 's/\. /\. /g')
216 |
217 | if [ -n "$IMPROVEMENTS" ] && [ "$IMPROVEMENTS" != "" ]; then
218 | # Format as clean text with proper capitalization
219 | VERSION_LINE="${VERSION_LINE} - ${IMPROVEMENTS}"
220 | fi
221 | fi
222 |
223 | # Build description - only the version line to update existing description
224 | {
225 | echo 'DESCRIPTION<> $GITHUB_OUTPUT
229 |
230 | echo "Generated description for v${VERSION}"
231 | set -e # Re-enable error exit
232 |
233 | - name: Find add-in file
234 | if: startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, '-dev')
235 | id: find_addin
236 | shell: powershell
237 | run: |
238 | $addinFile = Get-ChildItem -Path "bin/Release/net8.0-windows" -Filter "*.esriAddInX" -Recurse | Select-Object -First 1
239 | if ($addinFile) {
240 | $fullPath = $addinFile.FullName
241 | Write-Host "Found add-in file: $fullPath"
242 | echo "ADDIN_FILE=$fullPath" >> $env:GITHUB_OUTPUT
243 | } else {
244 | Write-Host "ERROR: No .esriAddInX file found in bin/Release/net8.0-windows"
245 | Get-ChildItem -Path "bin/Release" -Recurse | Select-Object FullName | Format-Table
246 | exit 1
247 | }
248 |
249 | - name: Publish to ArcGIS Online
250 | if: startsWith(github.ref, 'refs/tags/v') && !contains(github.ref, '-dev')
251 | shell: bash
252 | run: |
253 | ADDIN_FILE="${{ steps.find_addin.outputs.ADDIN_FILE }}"
254 |
255 | if [ -z "$ADDIN_FILE" ]; then
256 | echo "ERROR: No .esriAddInX file found"
257 | exit 1
258 | fi
259 |
260 | echo "Using add-in file: $ADDIN_FILE"
261 |
262 | # Determine authentication method and build Python command
263 | # Note: Description updates disabled - just uploading the file
264 | if [ -n "$AGOL_CLIENT_ID" ] && [ -n "$AGOL_CLIENT_SECRET" ]; then
265 | echo "✅ Using OAuth2 token authentication"
266 | python ./.github/workflows/publish-agol.py \
267 | --auth-method token \
268 | --client-id "$AGOL_CLIENT_ID" \
269 | --client-secret "$AGOL_CLIENT_SECRET" \
270 | --addin-file "$ADDIN_FILE" \
271 | --item-id "$AGOL_ITEM_ID" \
272 | ${AGOL_PORTAL_URL:+--portal-url "$AGOL_PORTAL_URL"} \
273 | ${AGOL_TITLE:+--title "$AGOL_TITLE"} \
274 | ${AGOL_TAGS:+--tags "$AGOL_TAGS"}
275 | elif [ -n "$AGOL_USERNAME" ] && [ -n "$AGOL_PASSWORD" ]; then
276 | echo "✅ Using username/password authentication"
277 | python ./.github/workflows/publish-agol.py \
278 | --auth-method username \
279 | --username "$AGOL_USERNAME" \
280 | --password "$AGOL_PASSWORD" \
281 | --addin-file "$ADDIN_FILE" \
282 | --item-id "$AGOL_ITEM_ID" \
283 | ${AGOL_PORTAL_URL:+--portal-url "$AGOL_PORTAL_URL"} \
284 | ${AGOL_TITLE:+--title "$AGOL_TITLE"} \
285 | ${AGOL_TAGS:+--tags "$AGOL_TAGS"}
286 | else
287 | echo "WARNING: No AGOL credentials found. Skipping AGOL publishing."
288 | echo "Please ensure AGOL_CLIENT_ID and AGOL_CLIENT_SECRET are set in GitHub Secrets"
289 | exit 0
290 | fi
291 | env:
292 | AGOL_CLIENT_ID: ${{ secrets.AGOL_CLIENT_ID }}
293 | AGOL_CLIENT_SECRET: ${{ secrets.AGOL_CLIENT_SECRET }}
294 | AGOL_USERNAME: ${{ secrets.AGOL_USERNAME }}
295 | AGOL_PASSWORD: ${{ secrets.AGOL_PASSWORD }}
296 | AGOL_ITEM_ID: ${{ secrets.AGOL_ITEM_ID }}
297 | AGOL_PORTAL_URL: ${{ secrets.AGOL_PORTAL_URL }}
298 | AGOL_TITLE: ${{ secrets.AGOL_TITLE }}
299 | AGOL_DESCRIPTION: ${{ secrets.AGOL_DESCRIPTION }}
300 | AGOL_TAGS: ${{ secrets.AGOL_TAGS }}
--------------------------------------------------------------------------------
/AUTOMATED_VERSIONING.md:
--------------------------------------------------------------------------------
1 | # Automated Versioning & Release Guide
2 |
3 | **Quick Summary**: This project now has fully automated version management. Just create a git tag and push it - everything else happens automatically!
4 |
5 | ---
6 |
7 | ## 📋 Table of Contents
8 |
9 | 1. [What Was Fixed](#what-was-fixed)
10 | 2. [How It Works Now](#how-it-works-now)
11 | 3. [Creating a Release](#creating-a-release)
12 | 4. [Testing with Dev Branch](#testing-with-dev-branch)
13 | 5. [Build System Improvements](#build-system-improvements)
14 | 6. [Testing](#testing)
15 | 7. [Troubleshooting](#troubleshooting)
16 |
17 | ---
18 |
19 | ## What Was Fixed
20 |
21 | ### The Problem (Issue #5)
22 |
23 | - **Config.daml** showed version `0.1.0`
24 | - **GitHub Release** showed `v0.1.2`
25 | - **Users couldn't tell** which version they had installed
26 |
27 | **Root Cause**: Version in Config.daml had to be manually updated, leading to version drift.
28 |
29 | ### The Solution
30 |
31 | ✅ **Automated Version Management**
32 | - Config.daml is automatically updated from git tags
33 | - Version is automatically committed back to main branch
34 | - All versions stay synchronized
35 |
36 | ✅ **Build System Improvements**
37 | - Smart conditional references that detect the environment
38 | - Local development uses installed ArcGIS Pro
39 | - CI/CD automatically uses NuGet package
40 | - No more fragile csproj modification scripts
41 |
42 | ✅ **Current Status**: Config.daml updated to `0.1.2` and synchronized with latest release
43 |
44 | ---
45 |
46 | ## How It Works Now
47 |
48 | ### Automated Workflow
49 |
50 | ```
51 | Developer Creates Tag (v0.1.3)
52 | ↓
53 | Push to GitHub
54 | ↓
55 | GitHub Actions Automatically:
56 | 1. Updates Config.daml to 0.1.3
57 | 2. Builds add-in (.esriAddInX)
58 | 3. Commits version back to main [skip ci]
59 | 4. Creates GitHub Release
60 | 5. Uploads release package
61 | ↓
62 | All versions match!
63 | ```
64 |
65 | ### Key Features
66 |
67 | - **Zero manual version management** - Just create and push a tag
68 | - **Main branch stays synchronized** - Version changes committed automatically
69 | - **No infinite loops** - Uses `[skip ci]` in commit message
70 | - **Smart builds** - Automatically detects local vs CI/CD environment
71 |
72 | ---
73 |
74 | ## Creating a Release
75 |
76 | ### Prerequisites
77 |
78 | - All code changes committed and pushed to `main`
79 | - Code tested locally in ArcGIS Pro
80 | - Decide on version number (see [Semantic Versioning](#semantic-versioning))
81 |
82 | ### Step-by-Step Process
83 |
84 | #### 1. Create and Push a Version Tag
85 |
86 | ```bash
87 | # For a patch release (bug fixes): 0.1.2 → 0.1.3
88 | git tag v0.1.3
89 |
90 | # For a minor release (new features): 0.1.x → 0.2.0
91 | git tag v0.2.0
92 |
93 | # For a major release (breaking changes): 0.x.y → 1.0.0
94 | git tag v1.0.0
95 |
96 | # Push the tag to trigger the release
97 | git push origin v0.1.3
98 | ```
99 |
100 | #### 2. Monitor GitHub Actions
101 |
102 | Go to: https://github.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/actions
103 |
104 | Watch the "Build ArcGIS Pro GeoParquet Add-in" workflow. It will:
105 | - ✅ Update Config.daml with the version
106 | - ✅ Build the add-in
107 | - ✅ Commit the version back to main
108 | - ✅ Create the release
109 |
110 | #### 3. Review and Publish the Release
111 |
112 | 1. Go to: https://github.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/releases
113 | 2. Find your new release (will be in "Draft" mode)
114 | 3. Edit the release notes if desired
115 | 4. Click **Publish Release**
116 |
117 | #### 4. Verify Everything Worked
118 |
119 | ```bash
120 | # Pull the version update that was committed back
121 | git pull origin main
122 |
123 | # Check that Config.daml has the correct version
124 | grep 'version=' Config.daml
125 | # Should show: version="0.1.3"
126 | ```
127 |
128 | **Done!** Your release is complete and all versions are synchronized.
129 |
130 | ### Semantic Versioning
131 |
132 | Follow [Semantic Versioning](https://semver.org/) format: `MAJOR.MINOR.PATCH`
133 |
134 | - **PATCH** (0.1.2 → 0.1.3): Bug fixes, small improvements
135 | - **MINOR** (0.1.x → 0.2.0): New features, backward compatible
136 | - **MAJOR** (0.x.y → 1.0.0): Breaking changes
137 |
138 | ### Pre-Release Checklist
139 |
140 | Before creating a release tag:
141 |
142 | - [ ] All tests pass locally
143 | - [ ] Code builds successfully in Release configuration
144 | - [ ] Add-in works correctly in ArcGIS Pro 3.5+
145 | - [ ] README.md is up to date (if needed)
146 | - [ ] No sensitive data (keys, passwords) in code
147 | - [ ] Breaking changes are documented (if any)
148 |
149 | ---
150 |
151 | ## Testing with Dev Branch
152 |
153 | ### Why Use a Dev Branch?
154 |
155 | The `dev` branch allows you to **test workflow changes** before they go to production:
156 |
157 | - ✅ Test new workflow features
158 | - ✅ Verify versioning logic
159 | - ✅ Check build system changes
160 | - ✅ No risk to production releases
161 | - ✅ No commits back to main
162 |
163 | ### How to Use Dev Branch
164 |
165 | #### 1. Create and Push Dev Branch (First Time Only)
166 |
167 | ```bash
168 | # Create dev branch from main
169 | git checkout -b dev main
170 |
171 | # Push to GitHub
172 | git push -u origin dev
173 | ```
174 |
175 | #### 2. Make Changes and Test
176 |
177 | ```bash
178 | # Make your workflow changes
179 | git add .github/workflows/build.yaml
180 | git commit -m "test: experiment with workflow changes"
181 |
182 | # Push to dev branch
183 | git push origin dev
184 |
185 | # This triggers the workflow on dev branch
186 | ```
187 |
188 | #### 3. Test with Dev Tags
189 |
190 | ```bash
191 | # Create a dev tag for testing
192 | git tag v0.1.3-dev
193 |
194 | # Push the dev tag
195 | git push origin v0.1.3-dev
196 | ```
197 |
198 | **What happens with dev tags:**
199 | - ✅ Config.daml gets updated (without -dev suffix)
200 | - ✅ Build runs and creates artifacts
201 | - ✅ Creates a **pre-release** on GitHub (marked as dev)
202 | - ❌ Does NOT commit version back to main
203 | - ❌ Does NOT create a production release
204 |
205 | #### 4. Verify the Dev Build
206 |
207 | 1. Go to: https://github.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/actions
208 | 2. Watch the workflow run on dev branch
209 | 3. Check the pre-release: https://github.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/releases
210 | 4. Download and test the dev build
211 |
212 | #### 5. Clean Up Dev Releases
213 |
214 | ```bash
215 | # Delete the dev tag when done testing
216 | git push origin --delete v0.1.3-dev
217 | git tag -d v0.1.3-dev
218 |
219 | # Delete the pre-release on GitHub (via releases page)
220 | ```
221 |
222 | #### 6. Merge to Main When Ready
223 |
224 | ```bash
225 | # Switch back to main
226 | git checkout main
227 |
228 | # Merge dev changes
229 | git merge dev
230 |
231 | # Push to main
232 | git push origin main
233 |
234 | # Now create a production release
235 | git tag v0.1.3
236 | git push origin v0.1.3
237 | ```
238 |
239 | ### Dev vs Production Comparison
240 |
241 | | Aspect | Dev Branch/Tags | Production (Main) |
242 | |--------|----------------|-------------------|
243 | | **Trigger** | Push to `dev` or `v*-dev` tags | Push to `main` or `v*` tags |
244 | | **Version Update** | ✅ Yes (without -dev) | ✅ Yes |
245 | | **Build** | ✅ Yes | ✅ Yes |
246 | | **Commit Back** | ❌ No | ✅ Yes (to main) |
247 | | **Release Type** | Pre-release | Production release |
248 | | **Safe to Test** | ✅ Yes | ⚠️ Creates production release |
249 |
250 | ### Best Practices
251 |
252 | 1. **Always test workflow changes on dev first**
253 | - Prevents breaking production releases
254 | - Validates changes before merging
255 |
256 | 2. **Use descriptive dev tag names**
257 | ```bash
258 | v0.1.3-dev # Good
259 | v0.1.3-dev-test # Better (shows it's a test)
260 | ```
261 |
262 | 3. **Clean up dev releases**
263 | - Delete dev tags after testing
264 | - Remove pre-releases from GitHub
265 |
266 | 4. **Merge to main only when confident**
267 | - All tests pass
268 | - Workflow behaves as expected
269 | - Ready for production
270 |
271 | ---
272 |
273 | ## Build System Improvements
274 |
275 | ### Conditional References
276 |
277 | The project now uses smart conditional references that automatically detect the environment:
278 |
279 | **When ArcGIS Pro is installed locally** (development):
280 | ```xml
281 |
282 |
283 | C:\Program Files\ArcGIS\Pro\bin\ArcGIS.Core.dll
284 |
285 |
286 | ```
287 |
288 | **When ArcGIS Pro is NOT installed** (CI/CD):
289 | ```xml
290 |
292 | ```
293 |
294 | ### Benefits
295 |
296 | - ✅ **For Developers**: Use your local ArcGIS Pro installation (no changes needed)
297 | - ✅ **For CI/CD**: Automatically uses the official Esri NuGet package
298 | - ✅ **No manual modification**: Project file automatically adapts to the environment
299 | - ✅ **Follows best practices**: Uses Esri's official recommendations
300 |
301 | ### What Was Removed
302 |
303 | - ❌ `.github/workflows/update-csproj.ps1` - No longer needed
304 | - ❌ Manual NuGet installation step in workflow
305 | - ❌ Fragile string replacement hacks
306 |
307 | ---
308 |
309 | ## Testing
310 |
311 | ### Test the Version Update Script Locally
312 |
313 | ```bash
314 | # Update Config.daml to a test version
315 | pwsh .github/workflows/update-version.ps1 -Version "0.1.3" -ConfigPath "Config.daml"
316 |
317 | # Verify the change
318 | cat Config.daml | Select-String "version="
319 | ```
320 |
321 | ### Test a Complete Release (Recommended Before v0.1.3)
322 |
323 | ```bash
324 | # 1. Create a test tag
325 | git tag v0.1.3-test
326 |
327 | # 2. Push the tag
328 | git push origin v0.1.3-test
329 |
330 | # 3. Monitor GitHub Actions
331 | # Watch the workflow at: https://github.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/actions
332 |
333 | # 4. Verify everything:
334 | # - Config.daml was updated in the workflow logs
335 | # - Build succeeded
336 | # - Version was committed back to main branch
337 | # - Commit message includes [skip ci]
338 | # - No infinite workflow loops
339 | # - GitHub release was created
340 |
341 | # 5. Check the main branch
342 | git pull origin main
343 | grep 'version=' Config.daml
344 | # Should show: version="0.1.3-test"
345 |
346 | # 6. Clean up the test
347 | # - Delete the test release on GitHub (releases page)
348 | # - Delete the remote tag: git push origin --delete v0.1.3-test
349 | # - Delete the local tag: git tag -d v0.1.3-test
350 | ```
351 |
352 | ---
353 |
354 | ## Troubleshooting
355 |
356 | ### Issue: Workflow fails at "Update Config.daml version from tag"
357 |
358 | **Cause**: PowerShell script error or XML parsing issue
359 |
360 | **Solution**:
361 | ```bash
362 | # Test the script locally to see the error
363 | pwsh .github/workflows/update-version.ps1 -Version "0.1.3" -ConfigPath "Config.daml"
364 |
365 | # Check Config.daml for XML syntax errors
366 | cat Config.daml
367 | ```
368 |
369 | ### Issue: Version in ArcGIS Pro doesn't match release
370 |
371 | **Cause**: Old add-in cached or not fully uninstalled
372 |
373 | **Solution**:
374 | 1. Completely uninstall the add-in in ArcGIS Pro
375 | 2. Delete add-in cache:
376 | - Path: `C:\Users\\AppData\Local\ESRI\ArcGISPro\AssemblyCache`
377 | 3. Restart ArcGIS Pro completely
378 | 4. Reinstall the new version from the release `.zip` file
379 |
380 | ### Issue: Version wasn't committed back to main
381 |
382 | **Cause**: Permission issue or git configuration error
383 |
384 | **Solution**:
385 | 1. Check that the workflow has `contents: write` permission (already set in `build.yaml`)
386 | 2. Check GitHub Actions logs for error messages
387 | 3. Verify that `fetch-depth: 0` is set in checkout step (already configured)
388 |
389 | ### Issue: Release created but version is wrong
390 |
391 | **Cause**: Tag doesn't match expected format
392 |
393 | **Solution**:
394 | ```bash
395 | # Delete the incorrect tag
396 | git push origin --delete v0.1.3
397 | git tag -d v0.1.3
398 |
399 | # Recreate with correct format (must start with 'v')
400 | git tag v0.1.3
401 |
402 | # Push again
403 | git push origin v0.1.3
404 | ```
405 |
406 | ### Issue: Infinite workflow loop
407 |
408 | **Cause**: Commit message doesn't include `[skip ci]`
409 |
410 | **Solution**: This is already handled in the workflow script. If you see this:
411 | 1. Check the workflow file for the commit message format
412 | 2. Verify `[skip ci]` is in the commit message template
413 | 3. Stop the running workflows manually in GitHub Actions
414 |
415 | ### Issue: Build fails with reference errors
416 |
417 | **Cause**: Conditional references not working or NuGet package missing
418 |
419 | **Solution**:
420 | 1. For local builds: Verify ArcGIS Pro 3.5 is installed
421 | 2. For CI/CD: Check that NuGet restore completed successfully
422 | 3. Check GitHub Actions logs for specific error messages
423 |
424 | ---
425 |
426 | ## File Reference
427 |
428 | ### Files Modified
429 |
430 | - **`.github/workflows/build.yaml`** - Automated versioning and build workflow
431 | - **`DuckDBGeoparquet.csproj`** - Conditional ArcGIS Pro references
432 | - **`Config.daml`** - Version updated to 0.1.2
433 | - **`README.md`** - Version history updated
434 |
435 | ### Files Created
436 |
437 | - **`.github/workflows/update-version.ps1`** - PowerShell script to update Config.daml
438 | - **`AUTOMATED_VERSIONING.md`** - This documentation file
439 |
440 | ### Files Removed
441 |
442 | - **`.github/workflows/update-csproj.ps1`** - Obsolete (replaced by conditional references)
443 |
444 | ---
445 |
446 | ## Quick Reference
447 |
448 | ### Production Release
449 |
450 | ```bash
451 | # Create and push production release
452 | git tag v0.1.3 && git push origin v0.1.3
453 | ```
454 |
455 | That's it! Everything else is automatic.
456 |
457 | ### Dev/Test Release
458 |
459 | ```bash
460 | # Test workflow changes on dev branch
461 | git checkout dev
462 | git push origin dev
463 |
464 | # Or create a dev tag for testing
465 | git tag v0.1.3-dev && git push origin v0.1.3-dev
466 | ```
467 |
468 | Safe testing - no commits to main, creates pre-release only.
469 |
470 | ### Check Current Version
471 |
472 | ```bash
473 | # In Config.daml
474 | grep 'version=' Config.daml
475 |
476 | # Latest git tag
477 | git describe --tags --abbrev=0
478 |
479 | # Latest GitHub release
480 | gh release list | head -n 1
481 | ```
482 |
483 | ### Version Update Flow
484 |
485 | ```
486 | Tag → GitHub Actions → Update Config.daml → Build → Commit to main → Create Release
487 | ```
488 |
489 | ---
490 |
491 | ## Future Enhancements (Optional)
492 |
493 | ### 1. Automatic AGOL Publishing
494 | Automatically publish releases to ArcGIS Online using Portal REST API.
495 |
496 | **Requirements**:
497 | - Add AGOL credentials to GitHub Secrets
498 | - Create PowerShell functions for AGOL API
499 | - Add publishing step to workflow
500 |
501 | **Benefit**: Fully automated release from code to marketplace
502 |
503 | ### 2. CHANGELOG.md Generation
504 | Automatically generate changelog from commit messages and PRs.
505 |
506 | **Tools**:
507 | - `github-changelog-generator`
508 | - `conventional-changelog`
509 | - GitHub API for PR notes
510 |
511 | **Benefit**: Automatic, consistent release notes
512 |
513 | ### 3. Version in AssemblyInfo
514 | Embed version in compiled DLL metadata.
515 |
516 | ```xml
517 |
518 | 0.1.3
519 | 0.1.3.0
520 | 0.1.3.0
521 |
522 | ```
523 |
524 | **Benefit**: Programmatically check version at runtime
525 |
526 | ---
527 |
528 | ## Summary
529 |
530 | ### What Changed
531 |
532 | **Before**: Manual version updates → version drift → user confusion
533 |
534 | **After**: Automatic version management → everything synchronized → professional releases
535 |
536 | ### Key Benefits
537 |
538 | ✅ **For Developers**:
539 | - Release process: 6 manual steps → 2 commands
540 | - Zero manual version management
541 | - Main branch always synchronized
542 |
543 | ✅ **For CI/CD**:
544 | - Robust, maintainable workflow
545 | - Follows Esri's best practices
546 | - No fragile hacks
547 |
548 | ✅ **For Users**:
549 | - Version shown in ArcGIS Pro matches GitHub release
550 | - Clear release history
551 | - Professional experience
552 |
553 | ### Next Steps
554 |
555 | 1. Test the solution with the next release (v0.1.3)
556 | 2. Consider optional enhancements (AGOL publishing, etc.)
557 | 3. Keep this documentation updated
558 |
559 | ---
560 |
561 | **Last Updated**: 2025-11-08
562 | **Status**: ✅ Production Ready
563 | **Issue**: [#5 - Incorrect version shown in Add-in](https://github.com/COF-RyLopez/ArcGISPro-GeoParquet-Addin/issues/5)
564 |
565 | For questions or issues, open a GitHub issue with:
566 | - Steps to reproduce
567 | - Error messages
568 | - GitHub Actions workflow logs
569 |
--------------------------------------------------------------------------------
/Views/WizardDockpane.xaml:
--------------------------------------------------------------------------------
1 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
63 |
64 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
129 |
130 |
131 | Files will be saved as GeoParquet format. After loading data, you can optionally create a Multifile Feature Connection (MFC) from the "Create MFC" tab.
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
225 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
250 |
251 | A Multifile Feature Connection (MFC) allows ArcGIS Pro to work efficiently with multiple GeoParquet files as a single dataset. This makes it easier to visualize and analyze the Overture Maps data.
252 |
253 | Note: Creating an MFC can take some time as it indexes all of your data files.
254 |
255 |
256 |
257 |
258 |
264 |
265 |
266 |
267 |
269 |
271 |
272 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
288 |
289 |
290 |
291 |
292 |
293 |
295 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
--------------------------------------------------------------------------------
/.github/workflows/publish-agol.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Publishes ArcGIS Pro add-in to ArcGIS Online using arcgis-python-api
4 |
5 | This script uploads and updates an add-in item in ArcGIS Online using the
6 | official Esri arcgis-python-api library, which provides a more reliable
7 | interface than raw REST API calls.
8 | """
9 |
10 | import os
11 | import sys
12 | import argparse
13 | from pathlib import Path
14 |
15 | # Fix Windows console encoding issues
16 | if sys.platform == "win32":
17 | # Set UTF-8 encoding for stdout/stderr on Windows
18 | if hasattr(sys.stdout, 'reconfigure'):
19 | sys.stdout.reconfigure(encoding='utf-8', errors='replace')
20 | if hasattr(sys.stderr, 'reconfigure'):
21 | sys.stderr.reconfigure(encoding='utf-8', errors='replace')
22 | # Also set environment variable for subprocess calls
23 | os.environ['PYTHONIOENCODING'] = 'utf-8'
24 |
25 | try:
26 | from arcgis.gis import GIS
27 | from arcgis import __version__ as arcgis_version
28 | except ImportError:
29 | print("ERROR: arcgis-python-api is not installed")
30 | print("Install it with: pip install arcgis")
31 | sys.exit(1)
32 |
33 | def main():
34 | parser = argparse.ArgumentParser(
35 | description="Publish ArcGIS Pro add-in to ArcGIS Online"
36 | )
37 | parser.add_argument(
38 | "--addin-file",
39 | required=True,
40 | help="Path to the .esriAddInX file to publish"
41 | )
42 | parser.add_argument(
43 | "--item-id",
44 | required=True,
45 | help="The item ID of the existing add-in in AGOL"
46 | )
47 | parser.add_argument(
48 | "--portal-url",
49 | default="https://www.arcgis.com",
50 | help="ArcGIS Online portal URL (default: https://www.arcgis.com)"
51 | )
52 | parser.add_argument(
53 | "--auth-method",
54 | choices=["token", "username"],
55 | default="token",
56 | help="Authentication method: 'token' (OAuth2) or 'username' (default: token)"
57 | )
58 | parser.add_argument(
59 | "--client-id",
60 | help="OAuth2 app client ID (required if auth-method is 'token')"
61 | )
62 | parser.add_argument(
63 | "--client-secret",
64 | help="OAuth2 app client secret (required if auth-method is 'token')"
65 | )
66 | parser.add_argument(
67 | "--username",
68 | help="Portal username (required if auth-method is 'username')"
69 | )
70 | parser.add_argument(
71 | "--password",
72 | help="Portal password (required if auth-method is 'username')"
73 | )
74 | parser.add_argument(
75 | "--title",
76 | help="Title for the add-in item (optional)"
77 | )
78 | parser.add_argument(
79 | "--description",
80 | help="Description for the add-in item (optional)"
81 | )
82 | parser.add_argument(
83 | "--tags",
84 | help="Comma-separated tags for the add-in (optional)"
85 | )
86 |
87 | args = parser.parse_args()
88 |
89 | # Validate inputs
90 | addin_path = Path(args.addin_file)
91 | if not addin_path.exists():
92 | print(f"ERROR: Add-in file not found at: {addin_path}")
93 | sys.exit(1)
94 |
95 | if args.auth_method == "token":
96 | if not args.client_id or not args.client_secret:
97 | print("ERROR: Client ID and Client Secret are required when using token authentication")
98 | sys.exit(1)
99 | else:
100 | if not args.username or not args.password:
101 | print("ERROR: Username and Password are required when using username authentication")
102 | sys.exit(1)
103 |
104 | print("=" * 50)
105 | print(" ArcGIS Online Publishing Script (Python)")
106 | print("=" * 50)
107 | print()
108 | print(f"Portal URL: {args.portal_url}")
109 | print(f"Item ID: {args.item_id}")
110 | print(f"Add-in file: {addin_path}")
111 | print(f"Authentication: {args.auth_method}")
112 | print(f"arcgis-python-api version: {arcgis_version}")
113 | print()
114 |
115 | try:
116 | # Step 1: Authenticate
117 | print("[AUTH] Authenticating with ArcGIS Online...")
118 |
119 | try:
120 | if args.auth_method == "token":
121 | # OAuth2 app client authentication
122 | gis = GIS(
123 | url=args.portal_url,
124 | client_id=args.client_id,
125 | client_secret=args.client_secret
126 | )
127 | else:
128 | # Username/password authentication
129 | gis = GIS(
130 | url=args.portal_url,
131 | username=args.username,
132 | password=args.password
133 | )
134 | except Exception as e:
135 | print(f"ERROR: Authentication failed: {e}")
136 | import traceback
137 | traceback.print_exc()
138 | sys.exit(1)
139 |
140 | print(f"[OK] Authentication successful")
141 | # With OAuth2 app client authentication, there may not be a user object
142 | if gis.users.me:
143 | print(f" Logged in as: {gis.users.me.username}")
144 | else:
145 | print(f" Authenticated as application (OAuth2 client credentials)")
146 | print()
147 |
148 | # Step 2: Get the item
149 | print("[GET] Retrieving add-in item...")
150 | try:
151 | item = gis.content.get(args.item_id)
152 | except Exception as e:
153 | print(f"ERROR: Failed to retrieve item: {e}")
154 | import traceback
155 | traceback.print_exc()
156 | sys.exit(1)
157 |
158 | if not item:
159 | print(f"ERROR: Item {args.item_id} not found or you don't have access to it")
160 | sys.exit(1)
161 |
162 | print(f"[OK] Found item: {item.title}")
163 | print()
164 |
165 | # Step 3: Prepare metadata updates (if provided)
166 | metadata_updates = {}
167 | if args.title:
168 | metadata_updates["title"] = args.title
169 | if args.description:
170 | # If description is just a version line, update only that part of existing description
171 | # Otherwise, replace entire description
172 | if args.description.startswith("Latest Version:"):
173 | # Get current description and update just the version line
174 | current_desc = item.description or ""
175 | import re
176 |
177 | # Debug: Print current description snippet
178 | print(f"[DEBUG] Current description length: {len(current_desc)} chars")
179 | if current_desc:
180 | # Find the Latest Version line
181 | version_match = re.search(r'Latest Version:.*', current_desc, re.MULTILINE)
182 | if version_match:
183 | print(f"[DEBUG] Found existing version line: {version_match.group()[:100]}")
184 | else:
185 | print("[DEBUG] No existing 'Latest Version:' line found")
186 |
187 | # Try multiple regex patterns to match the version line and ALL following content until next section
188 | # Pattern 1: Match from "Latest Version:" until next section marker (🏆, 📦, 🔄) or section header
189 | # This should match everything including "with the following improvements" and bullet points
190 | pattern1 = r'Latest Version:.*?(?=\n\n|\n🏆|\n📦|\n🔄|🏆|📦|🔄|$)'
191 | # Pattern 2: Match from "Latest Version:" until we hit a line starting with emoji or section header
192 | pattern2 = r'Latest Version:.*?(?=\n[🏆📦🔄]|\nSource|\nReady|\nTerms|🏆|📦|🔄|$)'
193 | # Pattern 3: Match everything from Latest Version until double newline (more aggressive)
194 | pattern3 = r'Latest Version:.*?(?=\n\n|$)'
195 | # Pattern 4: Match entire line from "Latest Version:" to end of line (handles \r\n and \n)
196 | pattern4 = r'Latest Version:.*?(?=\n|$)'
197 | # Pattern 5: Match everything from Latest Version until we hit a line starting with certain markers (DOTALL)
198 | pattern5 = r'Latest Version:.*?(?=\n(?:🏆|📦|🔄|Source|Ready|Terms)|$)'
199 |
200 | updated_desc = None
201 | for i, pattern in enumerate([pattern1, pattern2, pattern3, pattern4, pattern5], 1):
202 | match = re.search(pattern, current_desc, re.MULTILINE | re.DOTALL)
203 | if match:
204 | # Use a more precise replacement - match the exact text found
205 | matched_text = match.group()
206 | print(f"[DEBUG] Pattern {i} matched: {matched_text[:150]}...")
207 | # Replace the matched text with just the new version line
208 | updated_desc = current_desc.replace(matched_text, args.description.strip(), 1)
209 | print(f"[DEBUG] Replaced version section using pattern {i}")
210 | print(f"[DEBUG] Matched length: {len(matched_text)} chars, Replacement length: {len(args.description.strip())} chars")
211 | break
212 |
213 | if updated_desc is None:
214 | # Fallback: Try simple string replacement
215 | print("[DEBUG] Regex patterns didn't match, trying string-based replacement")
216 | lines = current_desc.split('\n')
217 | updated_lines = []
218 | replaced = False
219 | # Handle HTML content - might span multiple lines
220 | i = 0
221 | while i < len(lines):
222 | line = lines[i]
223 | if line.strip().startswith('Latest Version:'):
224 | # Replace this line and any following lines until we hit a section marker or empty line
225 | updated_lines.append(args.description.strip())
226 | replaced = True
227 | print(f"[DEBUG] Found and replacing: {line[:80]}...")
228 | # Skip following lines that are part of the version section (HTML content, list items, etc.)
229 | i += 1
230 | # Skip all lines until we hit a section marker or empty line followed by section marker
231 | while i < len(lines):
232 | line_stripped = lines[i].strip()
233 | # Stop if we hit a section marker
234 | if line_stripped.startswith(('🏆', '📦', '🔄', 'Source', 'Ready', 'Terms')):
235 | break
236 | # Stop if we hit an empty line (might be end of version section)
237 | if line_stripped == '':
238 | # Check if next non-empty line is a section marker
239 | j = i + 1
240 | while j < len(lines) and lines[j].strip() == '':
241 | j += 1
242 | if j < len(lines) and lines[j].strip().startswith(('🏆', '📦', '🔄', 'Source', 'Ready', 'Terms')):
243 | break
244 | # Skip this line (it's part of the version section)
245 | i += 1
246 | continue
247 | else:
248 | updated_lines.append(line)
249 | i += 1
250 |
251 | if replaced:
252 | updated_desc = '\n'.join(updated_lines)
253 | else:
254 | # No match found, prepend version line
255 | print("[DEBUG] No 'Latest Version:' line found, prepending")
256 | updated_desc = args.description.strip() + "\n\n" + current_desc
257 |
258 | metadata_updates["description"] = updated_desc
259 | print(f"[DEBUG] Updated description length: {len(updated_desc)} chars")
260 | print(f"[DEBUG] New version line: {args.description.strip()[:100]}")
261 | else:
262 | # Full description replacement
263 | metadata_updates["description"] = args.description
264 | if args.tags:
265 | # Tags should be a list
266 | tag_list = [tag.strip() for tag in args.tags.split(",")]
267 | metadata_updates["tags"] = tag_list
268 |
269 | # Step 4: Update the file first, then metadata separately for better reliability
270 | print("[UPLOAD] Uploading add-in file...")
271 | print(f" File path: {addin_path}")
272 | print(f" File exists: {addin_path.exists()}")
273 | print(f" File size: {addin_path.stat().st_size / 1024 / 1024:.2f} MB")
274 | print()
275 |
276 | try:
277 | # Upload file first (separate from metadata for better reliability)
278 | update_result = item.update(
279 | data=str(addin_path),
280 | thumbnail=None # Keep existing thumbnail
281 | )
282 |
283 | if update_result:
284 | print("[OK] File uploaded successfully")
285 | else:
286 | print("WARNING: Update returned False, but file may have been uploaded")
287 | except Exception as e:
288 | error_msg = str(e)
289 | print(f"ERROR: File upload failed: {error_msg}")
290 |
291 | # Provide helpful guidance for common errors
292 | if "403" in error_msg or "permissions" in error_msg.lower():
293 | print()
294 | print("=" * 70)
295 | print("PERMISSION ERROR - Troubleshooting Steps:")
296 | print("=" * 70)
297 | print()
298 | if args.auth_method == "token":
299 | print("OAuth2 App Client Credentials may not have permission to update items.")
300 | print()
301 | print("SOLUTION 1: Grant permissions to the OAuth2 app:")
302 | print(" 1. Go to ArcGIS Online → Content → Your OAuth credentials item")
303 | print(" 2. Check the Settings tab for permission/scopes configuration")
304 | print(" 3. Ensure the app has 'Content: Update' or similar permissions")
305 | print()
306 | print("SOLUTION 2: Use username/password authentication instead:")
307 | print(" - Set AGOL_USERNAME and AGOL_PASSWORD in GitHub Secrets")
308 | print(" - Remove AGOL_CLIENT_ID and AGOL_CLIENT_SECRET")
309 | print(" - Username/password auth has full user permissions")
310 | print()
311 | print("SOLUTION 3: Ensure the item is owned/shared with the app:")
312 | print(" - The add-in item must be accessible to the OAuth2 app")
313 | print(" - Check item sharing settings in ArcGIS Online")
314 | else:
315 | print("Username/password authentication should have full permissions.")
316 | print("Check that:")
317 | print(" 1. The username/password are correct")
318 | print(" 2. The account has permission to update the item")
319 | print(" 3. The item ID is correct")
320 | print("=" * 70)
321 | print()
322 |
323 | import traceback
324 | traceback.print_exc()
325 | sys.exit(1)
326 |
327 | # Step 4b: Update metadata separately (more reliable for description updates)
328 | if metadata_updates:
329 | print()
330 | print("[METADATA] Updating item metadata...")
331 | print(f" Updating: {', '.join(metadata_updates.keys())}")
332 | if 'description' in metadata_updates:
333 | desc_preview = metadata_updates['description'][:200] if len(metadata_updates['description']) > 200 else metadata_updates['description']
334 | print(f" Description length: {len(metadata_updates['description'])} characters")
335 | print(f" Description preview: {desc_preview}...")
336 |
337 | try:
338 | # Refresh the item to get latest state after file upload
339 | print(" Refreshing item before metadata update...")
340 | item = gis.content.get(args.item_id)
341 |
342 | # Log current state
343 | print(f" Current description length: {len(item.description or '')} chars")
344 | if 'description' in metadata_updates:
345 | print(f" New description length: {len(metadata_updates['description'])} chars")
346 | print(f" New description preview: {metadata_updates['description'][:150]}...")
347 |
348 | # Update metadata separately - this is more reliable for description updates
349 | print(" Calling item.update() with metadata...")
350 | print(f" Metadata keys to update: {list(metadata_updates.keys())}")
351 | metadata_result = item.update(item_properties=metadata_updates)
352 |
353 | if metadata_result:
354 | print("[OK] Metadata update returned True")
355 | # Wait a moment for AGOL to process the update
356 | import time
357 | time.sleep(2)
358 |
359 | # Verify immediately
360 | print(" Verifying update...")
361 | updated_item_check = gis.content.get(args.item_id)
362 | if 'description' in metadata_updates:
363 | if updated_item_check.description:
364 | print(f" Verified: Description exists ({len(updated_item_check.description)} chars)")
365 | # Check if the version line was actually updated
366 | import re
367 | current_version_match = re.search(r'Latest Version:.*', updated_item_check.description, re.MULTILINE)
368 | expected_version_match = re.search(r'Latest Version:.*', metadata_updates['description'], re.MULTILINE)
369 |
370 | if current_version_match and expected_version_match:
371 | current_line = current_version_match.group().strip()
372 | expected_line = expected_version_match.group().strip()
373 | print(f" Current version line: {current_line[:100]}...")
374 | print(f" Expected version line: {expected_line[:100]}...")
375 |
376 | if current_line == expected_line:
377 | print(" ✅ Description update verified - version line matches!")
378 | else:
379 | print(f" ⚠️ WARNING: Version line doesn't match - forcing update...")
380 | # Force update the description
381 | item.update(item_properties={"description": metadata_updates['description']})
382 | print(" ✅ Forced update completed")
383 | elif current_version_match:
384 | print(f" ⚠️ WARNING: Found version line but expected format not found")
385 | print(f" Current: {current_version_match.group()[:100]}...")
386 | # Try to update just the version line
387 | current_desc = updated_item_check.description
388 | lines = current_desc.split('\n')
389 | updated_lines = []
390 | replaced = False
391 | for line in lines:
392 | if line.strip().startswith('Latest Version:'):
393 | updated_lines.append(expected_version_match.group().strip() if expected_version_match else metadata_updates['description'])
394 | replaced = True
395 | else:
396 | updated_lines.append(line)
397 | if replaced:
398 | final_desc = '\n'.join(updated_lines)
399 | item.update(item_properties={"description": final_desc})
400 | print(" ✅ Version line updated")
401 | else:
402 | print(f" ⚠️ WARNING: No version line found in description")
403 | print(f" Description preview: {updated_item_check.description[:200]}...")
404 | else:
405 | print(" ⚠️ WARNING: Description appears empty after update")
406 | else:
407 | print(" ⚠️ WARNING: Metadata update returned False - update may have failed")
408 | print(" Attempting direct update via item_properties...")
409 | # Try alternative approach
410 | try:
411 | item.update(item_properties=metadata_updates)
412 | print(" ✅ Retry successful")
413 | except Exception as retry_error:
414 | print(f" ❌ Retry also failed: {retry_error}")
415 | except Exception as metadata_error:
416 | print(f" ❌ ERROR: Metadata update failed: {metadata_error}")
417 | import traceback
418 | traceback.print_exc()
419 | # Try one more time with a fresh item fetch
420 | print(" Attempting final retry with fresh item fetch...")
421 | try:
422 | fresh_item = gis.content.get(args.item_id)
423 | fresh_item.update(item_properties=metadata_updates)
424 | print(" ✅ Final retry successful")
425 | except Exception as final_error:
426 | print(f" ❌ Final retry also failed: {final_error}")
427 | print(" Continuing despite metadata update failure...")
428 |
429 | print()
430 |
431 | # Step 5: Verify metadata update (if description was provided) and force update if needed
432 | if args.description:
433 | print("[VERIFY] Verifying description update...")
434 | try:
435 | # Refresh the item to get latest data
436 | updated_item = gis.content.get(args.item_id)
437 | if updated_item.description:
438 | # Check if the version line was actually updated
439 | import re
440 | expected_version = args.description.strip()
441 | if expected_version.startswith("Latest Version:"):
442 | # Check if the version line matches what we sent
443 | current_version_line = re.search(r'Latest Version:.*', updated_item.description, re.MULTILINE)
444 | if current_version_line:
445 | current_line = current_version_line.group().strip()
446 | expected_line = expected_version.strip()
447 | if current_line == expected_line:
448 | print(f" ✅ Verified: Version line updated correctly")
449 | print(f" Verified: Description updated ({len(updated_item.description)} chars)")
450 | print(f" Preview: {updated_item.description[:200]}...")
451 | else:
452 | print(f" ⚠️ WARNING: Version line doesn't match expected value")
453 | print(f" Expected: {expected_line[:100]}")
454 | print(f" Actual: {current_line[:100]}")
455 | print(" Attempting separate description update with string replacement...")
456 | try:
457 | # Re-fetch item and update description separately using string replacement
458 | item_to_update = gis.content.get(args.item_id)
459 | current_desc = item_to_update.description or ""
460 |
461 | # Use string-based line replacement (most reliable)
462 | lines = current_desc.split('\n')
463 | updated_lines = []
464 | replaced = False
465 | for line in lines:
466 | if line.strip().startswith('Latest Version:'):
467 | updated_lines.append(expected_line)
468 | replaced = True
469 | print(f" Found and replacing: {line[:80]}...")
470 | else:
471 | updated_lines.append(line)
472 |
473 | if replaced:
474 | updated_desc = '\n'.join(updated_lines)
475 | item_to_update.update(item_properties={"description": updated_desc})
476 | print(" ✅ Description updated via separate call (string replacement)")
477 | else:
478 | # Fallback: prepend if not found
479 | updated_desc = expected_line + "\n\n" + current_desc
480 | item_to_update.update(item_properties={"description": updated_desc})
481 | print(" ✅ Description updated via separate call (prepended)")
482 | except Exception as e2:
483 | print(f" ❌ ERROR: Separate update also failed: {e2}")
484 | import traceback
485 | traceback.print_exc()
486 | else:
487 | print(" ⚠️ WARNING: No 'Latest Version:' line found in description")
488 | print(f" Description preview: {updated_item.description[:200]}...")
489 | else:
490 | print(f" Verified: Description updated ({len(updated_item.description)} chars)")
491 | print(f" Preview: {updated_item.description[:200]}...")
492 | else:
493 | print(" WARNING: Description appears empty after update")
494 | # Try updating description separately as fallback
495 | print(" Attempting separate description update...")
496 | try:
497 | item.update(item_properties={"description": args.description})
498 | print(" ✅ Description updated via separate call")
499 | except Exception as e2:
500 | print(f" ❌ ERROR: Separate update also failed: {e2}")
501 | except Exception as e:
502 | print(f" WARNING: Verification failed: {e}")
503 | import traceback
504 | traceback.print_exc()
505 | print()
506 |
507 | # Step 6: Final verification
508 | print("[VERIFY] Verifying update...")
509 | updated_item = gis.content.get(args.item_id)
510 | print(f"[OK] Item updated successfully")
511 | print(f" Item URL: {args.portal_url}/home/item.html?id={args.item_id}")
512 | print(f" Modified: {updated_item.modified}")
513 | print()
514 |
515 | print("[SUCCESS] Successfully published to ArcGIS Online!")
516 | print()
517 |
518 | sys.exit(0)
519 |
520 | except Exception as e:
521 | print(f"ERROR: Failed to publish to ArcGIS Online: {e}")
522 | import traceback
523 | traceback.print_exc()
524 | sys.exit(1)
525 |
526 | if __name__ == "__main__":
527 | main()
528 |
--------------------------------------------------------------------------------
/Services/MfcUtility.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Linq;
5 | using System.Text.Json;
6 | using System.Text.Json.Serialization;
7 | using System.Threading.Tasks;
8 | using DuckDB.NET.Data;
9 |
10 | namespace DuckDBGeoparquet.Services
11 | {
12 | ///
13 | /// Utility class for creating and managing Multifile Feature Connections (MFC)
14 | /// for Overture Maps data
15 | ///
16 | public class MfcUtility
17 | {
18 | private const string GEOMETRY_COLUMN = "geometry"; // Added class constant
19 |
20 | // C# Models for MFC JSON Structure
21 | public class MfcConnectionProps
22 | {
23 | [JsonPropertyName("path")]
24 | public string Path { get; set; }
25 | }
26 |
27 | public class MfcConnectionInfo // Renamed from MfcConnection to avoid conflict
28 | {
29 | [JsonPropertyName("type")]
30 | public string Type { get; set; } = "filesystem";
31 |
32 | [JsonPropertyName("properties")]
33 | public MfcConnectionProps Properties { get; set; }
34 | }
35 |
36 | public class MfcDatasetProperties
37 | {
38 | [JsonPropertyName("fileformat")]
39 | public string FileFormat { get; set; } = "parquet";
40 | }
41 |
42 | public class MfcField
43 | {
44 | [JsonPropertyName("name")]
45 | public string Name { get; set; }
46 |
47 | [JsonPropertyName("type")]
48 | public string Type { get; set; }
49 |
50 | // Make Visible nullable. It will only be serialized if it has a value.
51 | // We'll typically only set this to false for the main geometry field.
52 | [JsonPropertyName("visible")]
53 | [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
54 | public bool? Visible { get; set; }
55 |
56 | // Add SourceType, to be serialized only if it has a value.
57 | [JsonPropertyName("sourceType")]
58 | [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
59 | public string SourceType { get; set; }
60 |
61 | // Constructor to simplify creation
62 | public MfcField(string name, string type, bool? visible = null, string sourceType = null)
63 | {
64 | Name = name;
65 | Type = type;
66 | Visible = visible;
67 | SourceType = sourceType;
68 | }
69 | }
70 |
71 | public class MfcGeometryField
72 | {
73 | [JsonPropertyName("name")]
74 | public string Name { get; set; }
75 |
76 | [JsonPropertyName("formats")]
77 | public List Formats { get; set; }
78 | }
79 |
80 | public class MfcSpatialReference
81 | {
82 | [JsonPropertyName("wkid")]
83 | public int Wkid { get; set; }
84 | }
85 |
86 | public class MfcGeometry
87 | {
88 | [JsonPropertyName("geometryType")]
89 | public string GeometryType { get; set; }
90 |
91 | [JsonPropertyName("spatialReference")]
92 | public MfcSpatialReference SpatialReference { get; set; }
93 |
94 | [JsonPropertyName("fields")]
95 | public List Fields { get; set; }
96 | }
97 |
98 | public class MfcDataset
99 | {
100 | [JsonPropertyName("name")]
101 | public string Name { get; set; }
102 |
103 | [JsonPropertyName("alias")]
104 | public string Alias { get; set; }
105 |
106 | [JsonPropertyName("properties")]
107 | public MfcDatasetProperties Properties { get; set; } = new MfcDatasetProperties();
108 |
109 | [JsonPropertyName("fields")]
110 | public List FieldsList { get; set; } // Renamed to avoid conflict with MfcGeometry.Fields
111 |
112 | [JsonPropertyName("geometry")]
113 | public MfcGeometry Geometry { get; set; }
114 | }
115 |
116 | public class MfcRoot
117 | {
118 | [JsonPropertyName("connection")]
119 | public MfcConnectionInfo Connection { get; set; } // Use renamed MfcConnectionInfo
120 |
121 | [JsonPropertyName("datasets")]
122 | public List Datasets { get; set; } = new List();
123 | }
124 |
125 | // Helper for sanitizing file names if needed (currently used by DataProcessor)
126 | public static string SanitizeFileName(string fileName)
127 | {
128 | // Basic sanitization, can be expanded
129 | return string.Join("_", fileName.Split(Path.GetInvalidFileNameChars()));
130 | }
131 |
132 | // Define field exclusion and renaming maps
133 | private static readonly Dictionary> FieldExclusionMap = new Dictionary>(StringComparer.OrdinalIgnoreCase)
134 | {
135 | { "address", new HashSet(StringComparer.OrdinalIgnoreCase) { "address_levels", "sources" } },
136 | { "building", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources" } },
137 | { "building_part", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources" } },
138 | { "connector", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources" } },
139 | { "division", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources", "local_type", "hierarchies", "capital_division_ids", "capital_of_divisions" } },
140 | { "division_area", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources" } },
141 | { "infrastructure", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources", "source_tags" } },
142 | { "land", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources", "source_tags" } },
143 | { "land_cover", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources" } },
144 | { "land_use", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources", "source_tags" } },
145 | { "place", new HashSet(StringComparer.OrdinalIgnoreCase) { "addresses", "brand", "emails", "phones", "socials", "sources", "websites" } },
146 | { "segment", new HashSet(StringComparer.OrdinalIgnoreCase) {
147 | "access_restrictions", "connectors", "destinations", "level_rules",
148 | "prohibited_transitions", "road_flags", "road_surface", "routes", "sources",
149 | "speed_limits", "subclass_rules", "width_rules"
150 | }
151 | },
152 | { "water", new HashSet(StringComparer.OrdinalIgnoreCase) { "sources", "source_tags" } }
153 | };
154 |
155 | private static readonly Dictionary> FieldRenameMap = new Dictionary>(StringComparer.OrdinalIgnoreCase)
156 | {
157 | { "division", new Dictionary(StringComparer.OrdinalIgnoreCase)
158 | {
159 | { "perspectives", "perspectives_mode" },
160 | { "norms", "norms_driving_side" }
161 | }
162 | }
163 | // Add other dataset types and their renames as needed
164 | // For "place" and "brand_wikidata" / "brand_names_primary":
165 | // We are currently excluding the 'brand' struct. If flattened versions exist in Parquet,
166 | // they should be picked up automatically. If not, they can't be created by the MFC.
167 | };
168 |
169 | private static readonly Dictionary> DatasetFieldOrder = new Dictionary>(StringComparer.OrdinalIgnoreCase)
170 | {
171 | {
172 | "address", new List {
173 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
174 | "country", "postcode", "street", "number", "unit", "postal_city",
175 | "version", "filename", "theme", "type", "geometry"
176 | }
177 | },
178 | {
179 | "building", new List {
180 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
181 | "version", "level", "subtype", "class", "height", "names_primary",
182 | "has_parts", "is_underground", "num_floors", "num_floors_underground",
183 | "min_height", "min_floor", "facade_color", "facade_material",
184 | "roof_material", "roof_shape", "roof_direction", "roof_orientation",
185 | "roof_color", "roof_height", "filename", "theme", "type", "geometry"
186 | }
187 | },
188 | {
189 | "building_part", new List {
190 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
191 | "version", "level", "height", "names_primary", "is_underground",
192 | "num_floors", "num_floors_underground", "min_height", "min_floor",
193 | "facade_color", "facade_material", "roof_material", "roof_shape",
194 | "roof_direction", "roof_orientation", "roof_color", "roof_height",
195 | "building_id", "filename", "theme", "type", "geometry"
196 | }
197 | },
198 | {
199 | "connector", new List {
200 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
201 | "version", "filename", "theme", "type", "geometry"
202 | }
203 | },
204 | {
205 | "division", new List {
206 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
207 | "country", "version", "cartography_prominence", "cartography_min_zoom",
208 | "cartography_max_zoom", "cartography_sort_key", "subtype", "class",
209 | "names_primary", "wikidata", "region", "perspectives_mode",
210 | "parent_division_id", "norms_driving_side", "population",
211 | "filename", "theme", "type", "geometry"
212 | }
213 | },
214 | {
215 | "division_area", new List {
216 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
217 | "country", "version", "subtype", "class", "names_primary",
218 | "is_land", "is_territorial", "region", "division_id",
219 | "filename", "theme", "type", "geometry"
220 | }
221 | },
222 | {
223 | "infrastructure", new List {
224 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
225 | "version", "level", "subtype", "class", "height", "surface",
226 | "names_primary", "wikidata", "filename", "theme", "type", "geometry"
227 | }
228 | },
229 | {
230 | "land", new List {
231 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
232 | "version", "level", "subtype", "class", "surface", "names_primary",
233 | "wikidata", "elevation", "filename", "theme", "type", "geometry"
234 | }
235 | },
236 | {
237 | "land_cover", new List {
238 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
239 | "version", "cartography_prominence", "cartography_min_zoom",
240 | "cartography_max_zoom", "cartography_sort_key", "subtype",
241 | "filename", "theme", "type", "geometry"
242 | }
243 | },
244 | {
245 | "land_use", new List {
246 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
247 | "version", "level", "subtype", "class", "surface", "names_primary",
248 | "wikidata", "filename", "theme", "type", "geometry"
249 | }
250 | },
251 | {
252 | "place", new List {
253 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
254 | "version", "names_primary", "categories_primary", "confidence",
255 | "brand_wikidata", "brand_names_primary", "filename", "theme", "type", "geometry"
256 | }
257 | },
258 | {
259 | "segment", new List {
260 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
261 | "version", "subtype", "class", "names_primary", "subclass",
262 | "filename", "theme", "type", "geometry"
263 | }
264 | },
265 | {
266 | "water", new List {
267 | "id", "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax",
268 | "version", "level", "subtype", "class", "names_primary", "wikidata",
269 | "is_salt", "is_intermittent", "filename", "theme", "type", "geometry"
270 | }
271 | }
272 | };
273 |
274 | public static async Task GenerateMfcFileAsync(string sourceDataFolder, string outputMfcFilePath, string addinExecutingPath, Action logAction = null)
275 | {
276 | logAction ??= Console.WriteLine; // Default logger
277 |
278 | try
279 | {
280 | logAction($"Starting MFC generation. Source: {sourceDataFolder}, Output: {outputMfcFilePath}");
281 |
282 | var mfcRoot = new MfcRoot
283 | {
284 | Connection = new MfcConnectionInfo
285 | {
286 | Properties = new MfcConnectionProps
287 | {
288 | Path = sourceDataFolder.Replace('/', '\\')
289 | }
290 | }
291 | };
292 |
293 | var datasetDirectories = Directory.GetDirectories(sourceDataFolder);
294 | if (!datasetDirectories.Any())
295 | {
296 | logAction($"No dataset subfolders found in {sourceDataFolder}. Cannot generate MFC.");
297 | return false;
298 | }
299 |
300 | string extensionsPath = Path.Combine(addinExecutingPath, "Extensions");
301 | string normalizedExtensionsPath = extensionsPath.Replace('\\', '/');
302 |
303 | using (var duckDBConnection = new DuckDBConnection("DataSource=:memory:"))
304 | {
305 | await duckDBConnection.OpenAsync();
306 | using (var setupCmd = duckDBConnection.CreateCommand())
307 | {
308 | bool spatialLoaded = false;
309 | // 1. Prioritize Bundled Extension
310 | try
311 | {
312 | setupCmd.CommandText = $"SET extension_directory='{normalizedExtensionsPath}'; LOAD spatial;";
313 | await setupCmd.ExecuteNonQueryAsync();
314 | logAction("DuckDB spatial extension loaded successfully from local add-in directory.");
315 | spatialLoaded = true;
316 | }
317 | catch (Exception extEx)
318 | {
319 | logAction($"Info: Could not load DuckDB spatial extension from local directory '{normalizedExtensionsPath}'. Error: {extEx.Message}. Will try other methods.");
320 | }
321 |
322 | // 2. Attempt simple LOAD spatial (if Pro 3.5 makes it available globally to .NET DuckDB)
323 | if (!spatialLoaded)
324 | {
325 | try
326 | {
327 | setupCmd.CommandText = "LOAD spatial;";
328 | await setupCmd.ExecuteNonQueryAsync();
329 | logAction("DuckDB spatial extension loaded successfully using simple 'LOAD spatial'. (Potentially from ArcGIS Pro default environment)");
330 | spatialLoaded = true;
331 | }
332 | catch (Exception loadEx)
333 | {
334 | logAction($"Info: Simple 'LOAD spatial' failed. Error: {loadEx.Message}. Will try FORCE INSTALL as last resort.");
335 | }
336 | }
337 |
338 | // 3. Try Force Install and Load (if others fail)
339 | if (!spatialLoaded)
340 | {
341 | try
342 | {
343 | setupCmd.CommandText = "FORCE INSTALL spatial; LOAD spatial;";
344 | await setupCmd.ExecuteNonQueryAsync();
345 | logAction("DuckDB spatial extension FORCE INSTALLED and LOADED successfully.");
346 |
347 | // Diagnostic check for spatial functions
348 | using (var checkCmd = duckDBConnection.CreateCommand())
349 | {
350 | checkCmd.CommandText = "SELECT function_name FROM duckdb_functions() WHERE function_name ILIKE 'st_srid' OR function_name ILIKE 'st_geometrytype' ORDER BY function_name;";
351 | logAction($"Executing diagnostic query: {checkCmd.CommandText}");
352 | using (var reader = await checkCmd.ExecuteReaderAsync())
353 | {
354 | bool foundSrid = false;
355 | bool foundGeomType = false;
356 | while (await reader.ReadAsync())
357 | {
358 | string funcName = reader.GetString(0);
359 | logAction($"Found function via diagnostic query: {funcName}");
360 | if (funcName.ToLowerInvariant() == "st_srid") foundSrid = true;
361 | if (funcName.ToLowerInvariant() == "st_geometrytype") foundGeomType = true;
362 | }
363 | if (foundSrid && foundGeomType)
364 | {
365 | logAction("Diagnostic check: ST_SRID and ST_GeometryType ARE listed in duckdb_functions().");
366 | }
367 | else if (foundSrid)
368 | {
369 | logAction("Diagnostic check: ST_SRID IS listed, but ST_GeometryType IS NOT.");
370 | }
371 | else if (foundGeomType)
372 | {
373 | logAction("Diagnostic check: ST_GeometryType IS listed, but ST_SRID IS NOT.");
374 | }
375 | else
376 | {
377 | logAction("Diagnostic check: NEITHER ST_SRID NOR ST_GeometryType are listed in duckdb_functions(). This is the core issue.");
378 | }
379 | }
380 | }
381 | spatialLoaded = true;
382 | }
383 | catch (Exception forceEx)
384 | {
385 | logAction($"CRITICAL ERROR: All attempts to load DuckDB spatial extension failed (bundled, simple load, force install/load). Error during FORCE INSTALL/LOAD: {forceEx.Message}. MFC generation will likely fail or produce incorrect geometry types. Please ensure 'spatial.duckdb_extension' is in '{normalizedExtensionsPath}' or that network access allows DuckDB to download it.");
386 | spatialLoaded = false; // Explicitly false
387 | }
388 | }
389 |
390 | if (!spatialLoaded)
391 | {
392 | logAction("CRITICAL ERROR: Spatial extension could not be loaded after all attempts. Cannot proceed with MFC generation.");
393 | return false;
394 | }
395 | }
396 |
397 | foreach (var dirPath in datasetDirectories)
398 | {
399 | string datasetName = new DirectoryInfo(dirPath).Name;
400 | logAction($"Processing dataset: {datasetName}");
401 |
402 | string detectedGeometryType = null;
403 | string detectedWkid = "4326"; // Default SRID
404 | bool geometryColumnExistsInSchema = false;
405 | // Keep track of field names we've added to avoid duplicates if Parquet has both struct and flattened
406 | HashSet addedFieldNames = new HashSet(StringComparer.OrdinalIgnoreCase);
407 |
408 | var parquetFiles = Directory.GetFiles(dirPath, "*.parquet")
409 | .OrderBy(f => f) // Consistent order
410 | .ToList();
411 |
412 | if (!parquetFiles.Any())
413 | {
414 | logAction($"No .parquet files found in {dirPath} for dataset {datasetName}. Skipping.");
415 | continue;
416 | }
417 |
418 | string firstParquetFileForSchema = parquetFiles.First().Replace('\\', '/');
419 | logAction($"Using sample file for general schema: {firstParquetFileForSchema}");
420 |
421 | var dataset = new MfcDataset
422 | {
423 | Name = datasetName,
424 | Alias = datasetName,
425 | FieldsList = new List()
426 | };
427 |
428 | var columns = new List();
429 |
430 | try
431 | {
432 | using (var schemaCmd = duckDBConnection.CreateCommand())
433 | {
434 | schemaCmd.CommandText = $"DESCRIBE SELECT * FROM read_parquet('{firstParquetFileForSchema.Replace("'", "''")}') LIMIT 0;";
435 | using (var reader = await schemaCmd.ExecuteReaderAsync())
436 | {
437 | var tempFieldList = new List>();
438 | while (await reader.ReadAsync())
439 | {
440 | string colName = reader.GetString(0);
441 | string colType = reader.GetString(1).ToUpper();
442 | tempFieldList.Add(Tuple.Create(colName, colType));
443 | }
444 |
445 | foreach (var fieldTuple in tempFieldList)
446 | {
447 | string columnName = fieldTuple.Item1;
448 | string duckDbType = fieldTuple.Item2;
449 |
450 | logAction($"MFC Generation: Dataset '{datasetName}' - Schema Column: '{columnName}', DuckDB Type: '{duckDbType}'");
451 |
452 | if (columnName.StartsWith("__duckdb_internal")) continue;
453 |
454 | // Apply Exclusions
455 | if (FieldExclusionMap.TryGetValue(datasetName, out var exclusions) && exclusions.Contains(columnName))
456 | {
457 | logAction($"MFC Generation: Excluding field '{columnName}' for dataset '{datasetName}' as per exclusion rules.");
458 | continue;
459 | }
460 |
461 | // Apply Renames
462 | if (FieldRenameMap.TryGetValue(datasetName, out var renames) && renames.TryGetValue(columnName, out var newName))
463 | {
464 | logAction($"MFC Generation: Renaming field '{columnName}' to '{newName}' for dataset '{datasetName}'.");
465 | columnName = newName;
466 | }
467 |
468 | var knownBooleanFields = new HashSet {
469 | "has_parts", "is_underground", "is_land", "is_territorial", "is_salt", "is_intermittent"
470 | };
471 | string mfcType;
472 | string sourceType = null;
473 |
474 | if (columnName.ToLower() == GEOMETRY_COLUMN.ToLower())
475 | {
476 | geometryColumnExistsInSchema = true;
477 | // Add geometry to main fields list without "visible: false"
478 | if (addedFieldNames.Add(columnName))
479 | {
480 | columns.Add(new MfcField(columnName, "Binary"));
481 | }
482 | logAction($"MFC Generation: Found '{GEOMETRY_COLUMN}' field for '{datasetName}'. Will be added to main field list.");
483 | continue;
484 | }
485 |
486 | // Handle specific bbox_xmin, etc. fields if they exist directly
487 | if (columnName.ToLower() == "bbox_xmin" || columnName.ToLower() == "bbox_xmax" ||
488 | columnName.ToLower() == "bbox_ymin" || columnName.ToLower() == "bbox_ymax")
489 | {
490 | if (addedFieldNames.Add(columnName))
491 | {
492 | columns.Add(new MfcField(columnName, "Float32"));
493 | }
494 | continue;
495 | }
496 |
497 | // If we encounter a 'bbox' struct, we IGNORE it for the main field list.
498 | // The individual bbox_xmin, etc., fields will be added ensured later.
499 | if (columnName.ToLower() == "bbox")
500 | {
501 | logAction($"MFC Generation: Encountered 'bbox' struct for dataset '{datasetName}'. It will be skipped in main field list. Flattened versions will be ensured.");
502 | continue;
503 | }
504 |
505 | if ((columnName.ToLower() == "names" || columnName.ToLower() == "categories") && duckDbType.StartsWith("STRUCT"))
506 | {
507 | string primaryFieldName = $"{columnName}_primary";
508 | if (addedFieldNames.Add(primaryFieldName))
509 | {
510 | columns.Add(new MfcField(primaryFieldName, "String"));
511 | }
512 | continue;
513 | }
514 |
515 | if (columnName.ToLower() == "cartography" && duckDbType.StartsWith("STRUCT"))
516 | {
517 | var cartoSubFields = new Dictionary
518 | {
519 | { "prominence", "Int32" }, { "min_zoom", "Int32" },
520 | { "max_zoom", "Int32" }, { "sort_key", "Int32" }
521 | };
522 | foreach (var subField in cartoSubFields)
523 | {
524 | string fullSubFieldName = $"cartography_{subField.Key}";
525 | if (addedFieldNames.Add(fullSubFieldName))
526 | {
527 | columns.Add(new MfcField(fullSubFieldName, subField.Value));
528 | }
529 | }
530 | continue;
531 | }
532 |
533 | mfcType = ConvertDuckDbTypeToMfcType(duckDbType, columnName, logAction);
534 | if (knownBooleanFields.Contains(columnName.ToLower()))
535 | {
536 | mfcType = "String";
537 | sourceType = "Boolean";
538 | }
539 |
540 | if (addedFieldNames.Add(columnName))
541 | {
542 | columns.Add(new MfcField(columnName, mfcType, null, sourceType));
543 | }
544 | }
545 | }
546 |
547 | // Ensure the four specific bbox fields are present
548 | string[] requiredBboxFields = { "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax" };
549 | foreach (var bboxField in requiredBboxFields)
550 | {
551 | if (addedFieldNames.Add(bboxField)) // If not already added (e.g., directly from Parquet schema)
552 | {
553 | columns.Add(new MfcField(bboxField, "Float32"));
554 | logAction($"MFC Generation: Ensured '{bboxField}' (Float32) is added to dataset '{datasetName}'.");
555 | }
556 | }
557 |
558 | // For 'place' dataset, ensure brand_wikidata and brand_names_primary exist
559 | if (datasetName.Equals("place", StringComparison.OrdinalIgnoreCase))
560 | {
561 | if (addedFieldNames.Add("brand_wikidata"))
562 | {
563 | columns.Add(new MfcField("brand_wikidata", "String"));
564 | logAction($"MFC Generation: Ensured 'brand_wikidata' (String) is added to dataset 'place'.");
565 | }
566 | if (addedFieldNames.Add("brand_names_primary"))
567 | {
568 | columns.Add(new MfcField("brand_names_primary", "String"));
569 | logAction($"MFC Generation: Ensured 'brand_names_primary' (String) is added to dataset 'place'.");
570 | }
571 | }
572 |
573 | // Reorder fields based on DatasetFieldOrder or default if not specified
574 | List finalOrderedFieldsList;
575 | if (DatasetFieldOrder.TryGetValue(datasetName, out var specificOrder))
576 | {
577 | finalOrderedFieldsList = new List();
578 | var availableFields = columns.ToDictionary(f => f.Name, f => f, StringComparer.OrdinalIgnoreCase);
579 |
580 | // Add fields according to specificOrder
581 | foreach (var fieldNameInOrder in specificOrder)
582 | {
583 | if (availableFields.TryGetValue(fieldNameInOrder, out var field))
584 | {
585 | finalOrderedFieldsList.Add(field);
586 | availableFields.Remove(fieldNameInOrder);
587 | }
588 | else
589 | {
590 | logAction($"Warning: Field '{fieldNameInOrder}' specified in order for dataset '{datasetName}' was not found in available fields. It might be excluded, not present in Parquet, or not yet handled (e.g. complex structs).");
591 | }
592 | }
593 |
594 | // Add any remaining fields from 'columns' that were not in specificOrder.
595 | // These are fields present in the Parquet but not in the target MFC's defined order.
596 | // They will be added alphabetically, with 'geometry' (if remaining and not in specificOrder) last among them.
597 | var stillAvailableFields = availableFields.Values.ToList();
598 | MfcField geomFieldFromAvailable = stillAvailableFields.FirstOrDefault(f => f.Name.Equals(GEOMETRY_COLUMN, StringComparison.OrdinalIgnoreCase));
599 |
600 | foreach (var field in stillAvailableFields
601 | .Where(f => !f.Name.Equals(GEOMETRY_COLUMN, StringComparison.OrdinalIgnoreCase))
602 | .OrderBy(f => f.Name, StringComparer.OrdinalIgnoreCase))
603 | {
604 | finalOrderedFieldsList.Add(field);
605 | logAction($"MFC Generation: Adding field '{field.Name}' to dataset '{datasetName}' (was in Parquet but not in specific order).");
606 | }
607 |
608 | // Add geometry field if it was in availableFields and not already added by specificOrder
609 | if (geomFieldFromAvailable != null && !specificOrder.Contains(GEOMETRY_COLUMN, StringComparer.OrdinalIgnoreCase))
610 | {
611 | finalOrderedFieldsList.Add(geomFieldFromAvailable);
612 | logAction($"MFC Generation: Adding field '{GEOMETRY_COLUMN}' to dataset '{datasetName}' (was in Parquet but not in specific order, placed last among extras).");
613 | }
614 | }
615 | else
616 | {
617 | // Reorder fields: id, bbox_*, other fields alphabetically, geometry
618 | var defaultOrderedFields = new List();
619 | MfcField idField = columns.FirstOrDefault(f => f.Name.Equals("id", StringComparison.OrdinalIgnoreCase));
620 | if (idField != null)
621 | {
622 | defaultOrderedFields.Add(idField);
623 | columns.Remove(idField); // Remove to avoid re-adding
624 | }
625 |
626 | var bboxMfcFieldsSource = columns.Where(f => f.Name.StartsWith("bbox_", StringComparison.OrdinalIgnoreCase)).ToList();
627 | var bboxOrderedFields = new List();
628 | foreach (string bboxName in new[] { "bbox_xmin", "bbox_xmax", "bbox_ymin", "bbox_ymax" })
629 | {
630 | MfcField field = bboxMfcFieldsSource.FirstOrDefault(f => f.Name.Equals(bboxName, StringComparison.OrdinalIgnoreCase));
631 | if (field != null)
632 | {
633 | bboxOrderedFields.Add(field);
634 | columns.Remove(field); // Remove from original list to avoid re-adding
635 | }
636 | }
637 | defaultOrderedFields.AddRange(bboxOrderedFields);
638 |
639 | MfcField geomField = columns.FirstOrDefault(f => f.Name.Equals(GEOMETRY_COLUMN, StringComparison.OrdinalIgnoreCase));
640 | if (geomField != null) columns.Remove(geomField); // Remove to add last
641 |
642 | var otherFields = columns.OrderBy(f => f.Name, StringComparer.OrdinalIgnoreCase).ToList(); // Alphabetical for others
643 | defaultOrderedFields.AddRange(otherFields);
644 |
645 | if (geomField != null) defaultOrderedFields.Add(geomField); // Add geometry last
646 |
647 | finalOrderedFieldsList = defaultOrderedFields; // Assign the result of default ordering
648 | }
649 |
650 | dataset.FieldsList = finalOrderedFieldsList;
651 | }
652 | }
653 | catch (Exception ex)
654 | {
655 | logAction($"Error describing schema for {firstParquetFileForSchema} in dataset {datasetName}: {ex.Message}");
656 | continue;
657 | }
658 |
659 | if (geometryColumnExistsInSchema)
660 | {
661 | logAction($"Starting geometry type/SRID detection for dataset '{datasetName}'...");
662 | foreach (var parquetFile in parquetFiles)
663 | {
664 | string currentFileForGeomCheck = parquetFile.Replace('\\', '/');
665 | logAction($" Checking file: {currentFileForGeomCheck}");
666 | try
667 | {
668 | using (var geomCmd = duckDBConnection.CreateCommand())
669 | {
670 | // Query only for ST_GeometryType as ST_SRID is problematic
671 | string query = $"SELECT ST_GeometryType({GEOMETRY_COLUMN}) FROM read_parquet('{currentFileForGeomCheck.Replace("'", "''")}') WHERE {GEOMETRY_COLUMN} IS NOT NULL LIMIT 1;";
672 | geomCmd.CommandText = query;
673 | logAction($" Executing query: {query}");
674 |
675 | using (var geomReader = await geomCmd.ExecuteReaderAsync())
676 | {
677 | if (await geomReader.ReadAsync())
678 | {
679 | logAction(" Successfully read a row for geometry info.");
680 | object rawGeomTypeObj = geomReader.GetValue(0);
681 | // SRID will be assumed as 4326 (default)
682 |
683 | logAction($" Raw ST_GeometryType: {(rawGeomTypeObj == DBNull.Value ? "DBNull" : rawGeomTypeObj?.ToString())}");
684 | // No longer trying to read SRID from query
685 | // logAction($" Raw ST_SRID: {(rawSridObj == DBNull.Value ? "DBNull" : rawSridObj?.ToString())}");
686 |
687 | if (rawGeomTypeObj != DBNull.Value && rawGeomTypeObj != null)
688 | {
689 | detectedGeometryType = rawGeomTypeObj.ToString();
690 | // detectedWkid remains the default "4326"
691 | logAction($"MFC Generation: Detected geometry type '{detectedGeometryType}' for dataset '{datasetName}' using file '{currentFileForGeomCheck}'. SRID assumed as {detectedWkid}.");
692 | break;
693 | }
694 | else
695 | {
696 | logAction(" ST_GeometryType was DBNull or null. Trying next file.");
697 | }
698 | }
699 | else
700 | {
701 | logAction(" No rows returned for geometry info from this file. Trying next file.");
702 | }
703 | }
704 | }
705 | }
706 | catch (Exception geomEx)
707 | {
708 | logAction($" Warning: Error executing geometry detection query on '{currentFileForGeomCheck}' for dataset '{datasetName}': {geomEx.Message}. Trying next file if available.");
709 | }
710 | if (!string.IsNullOrEmpty(detectedGeometryType)) break;
711 | }
712 |
713 | if (string.IsNullOrEmpty(detectedGeometryType))
714 | {
715 | logAction($"Warning: Could not detect a specific geometry type for dataset '{datasetName}' after checking all files. Defaulting geometry definition.");
716 | }
717 | }
718 |
719 | // Populate dataset.Geometry section
720 | if (geometryColumnExistsInSchema && !string.IsNullOrEmpty(detectedGeometryType))
721 | {
722 | dataset.Geometry = new MfcGeometry
723 | {
724 | GeometryType = MapDuckDbGeomTypeToEsriGeomType(detectedGeometryType.ToUpperInvariant(), logAction),
725 | SpatialReference = new MfcSpatialReference { Wkid = int.Parse(detectedWkid) },
726 | Fields = new List { new MfcGeometryField { Name = GEOMETRY_COLUMN, Formats = new List { "WKB" } } }
727 | };
728 | logAction($"MFC Generation: Added geometry definition for '{datasetName}' with type '{dataset.Geometry.GeometryType}' and SRID '{detectedWkid}'.");
729 | }
730 | else if (geometryColumnExistsInSchema) // Geometry column was in schema, but type detection failed
731 | {
732 | logAction($"Warning: Dataset '{datasetName}' has a '{GEOMETRY_COLUMN}' field, but its type could not be robustly determined. Using default 'esriGeometryAny' and SRID '{detectedWkid}'.");
733 | dataset.Geometry = new MfcGeometry
734 | {
735 | GeometryType = "esriGeometryAny",
736 | SpatialReference = new MfcSpatialReference { Wkid = int.Parse(detectedWkid) },
737 | Fields = new List { new MfcGeometryField { Name = GEOMETRY_COLUMN, Formats = new List { "WKB" } } }
738 | };
739 | }
740 | // If no geometry column in schema, dataset.Geometry remains null, and will be omitted by JsonSerializerOptions if DefaultIgnoreCondition is WhenWritingNull.
741 |
742 | mfcRoot.Datasets.Add(dataset);
743 | }
744 | }
745 |
746 | var options = new JsonSerializerOptions
747 | {
748 | WriteIndented = true,
749 | DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull // Or WhenWritingDefault if prefer
750 | };
751 | string jsonString = JsonSerializer.Serialize(mfcRoot, options);
752 |
753 | await File.WriteAllTextAsync(outputMfcFilePath, jsonString);
754 | logAction($"MFC file successfully generated at {outputMfcFilePath}");
755 | return true;
756 | }
757 | catch (Exception ex)
758 | {
759 | logAction($"Error generating MFC file: {ex.Message}\n{ex.StackTrace}");
760 | return false;
761 | }
762 | }
763 |
764 | private static string ConvertDuckDbTypeToMfcType(string duckDbType, string columnNameForContext, Action logAction)
765 | {
766 | if (duckDbType.StartsWith("DECIMAL")) return "Float64";
767 | if (duckDbType.StartsWith("VARCHAR") || duckDbType.Contains("CHAR") || duckDbType == "TEXT") return "String";
768 |
769 | switch (duckDbType)
770 | {
771 | case "BOOLEAN":
772 | logAction($"Converting DuckDB BOOLEAN type for column '{columnNameForContext}' to String. Consider adding to knownBooleanFields for SourceType.");
773 | return "String";
774 | case "TINYINT": return "Int8";
775 | case "SMALLINT": return "Int16";
776 | case "INTEGER": return "Int32";
777 | case "BIGINT": return "Int64";
778 | case "HUGEINT": return "String";
779 | case "FLOAT4":
780 | case "REAL":
781 | case "FLOAT":
782 | return "Float32";
783 | case "FLOAT8":
784 | case "DOUBLE PRECISION":
785 | case "DOUBLE":
786 | return "Float64";
787 | case "DATE": return "Date";
788 | case "TIMESTAMP": return "String";
789 | case "TIMESTAMPTZ": return "String";
790 | case "TIME": return "String";
791 | case "INTERVAL": return "String";
792 | case "BLOB": return "Binary";
793 | case "BYTEA": return "Binary";
794 | default:
795 | if (duckDbType.StartsWith("STRUCT") || duckDbType.StartsWith("LIST") || duckDbType.StartsWith("ARRAY") || duckDbType.StartsWith("MAP"))
796 | {
797 | logAction($"Warning: Converting complex DuckDB type '{duckDbType}' for column '{columnNameForContext}' to String. Data may be stringified.");
798 | return "String";
799 | }
800 | logAction($"Warning: Unknown DuckDB type for column '{columnNameForContext}': '{duckDbType}'. Defaulting to String.");
801 | return "String";
802 | }
803 | }
804 |
805 | private static string MapDuckDbGeomTypeToEsriGeomType(string duckDbGeomType, Action logAction)
806 | {
807 | switch (duckDbGeomType)
808 | {
809 | case "POINT":
810 | case "MULTIPOINT":
811 | return "esriGeometryPoint";
812 | case "LINESTRING":
813 | case "MULTILINESTRING":
814 | return "esriGeometryPolyline";
815 | case "POLYGON":
816 | case "MULTIPOLYGON":
817 | return "esriGeometryPolygon";
818 | default:
819 | logAction($"Unmapped DuckDB geometry type: {duckDbGeomType}. Defaulting to esriGeometryPoint.");
820 | return "esriGeometryPoint";
821 | }
822 | }
823 |
824 | // Removed the old CreateMfcAsync stub and RefreshMfcAsync method
825 | }
826 | }
--------------------------------------------------------------------------------