├── .DS_Store ├── .env ├── .github └── workflows │ ├── codeql-analysis.yml │ ├── package-scripts.yml │ └── upload-release-assets.yml ├── .gitignore ├── .vscode ├── launch.json └── settings.json ├── AlternativeSQLServerExtractionMethods ├── .DS_Store ├── License.txt ├── PowerShellScripts │ ├── .DS_Store │ ├── README.md │ ├── Revision-History.txt │ └── bin │ │ └── extract-sql-server-ddl.ps1 ├── README.md ├── Table _sizing_report_query.pdf └── mssql-scripter │ └── mssql-scripter.pdf ├── BigQuery ├── License.txt ├── README.md └── bin │ └── create_ddls.sh ├── DB2 ├── License.txt ├── README.md └── bin │ ├── create_ddls.ps1 │ └── create_ddls.sh ├── Databricks ├── Download_jobs_sources.dbc ├── License.txt ├── README.md └── images │ └── notebook_export_source_codes.png ├── Hive ├── License.txt ├── README.md └── exp_ddl.sh ├── LEGAL.md ├── Netezza ├── License.txt └── README.md ├── Oracle ├── License.txt ├── README.md ├── bin │ ├── create_ddls.bat │ ├── create_ddls.sh │ └── create_ddls_plus.sh ├── scripts │ ├── create_ddls.sql │ └── create_ddls_plus.sql └── setup.cfg ├── README.md ├── Redshift ├── License.txt ├── README.md ├── bin │ ├── create_ddls.ps1 │ └── create_ddls.sh └── scripts │ ├── DDL_Function.sql │ ├── DDL_Procedure.sql │ ├── DDL_Table.sql │ └── DDL_View.sql ├── SQLServer ├── .DS_Store ├── License.txt ├── README.pdf ├── SQL_Server_Code_Extraction.pdf └── Table _sizing_report.pdf ├── Teradata ├── .DS_Store ├── License.txt ├── README.md ├── bin │ └── create_ddls.sh ├── scripts_template │ ├── create_databases.btq │ ├── create_functions.btq │ ├── create_join_indexes.btq │ ├── create_macros.btq │ ├── create_procedures.btq │ ├── create_schemas.btq │ ├── create_tables.btq │ ├── create_triggers.btq │ └── create_views.btq └── sf_objects │ ├── Helper_Functions.sql │ └── SF_Sys_Calendar.sql ├── Tests ├── DB2 │ ├── .gitignore │ ├── License.txt │ ├── README.md │ └── startDocker.sh └── Teradata │ ├── License.txt │ ├── README.md │ ├── database_summary │ ├── __init__.py │ ├── database_source_code_summarizer.py │ ├── database_source_code_summary.py │ └── top_level_object_type.py │ ├── scripts │ ├── config.sh │ ├── execute_deploy_database_script.sh │ ├── execute_drop_database_script.sh │ ├── execute_extract_database_script.sh │ ├── execute_scripts.sh │ └── ssh_automatic_login_configuration.sh │ ├── source_code │ └── demo_database │ │ ├── database_code │ │ ├── DDL_CreateMacro.sql │ │ ├── DDL_Databases.sql │ │ ├── DDL_JoinIndex.sql │ │ ├── DDL_Tables.sql │ │ ├── DDL_Trigger.sql │ │ ├── DDL_Views.sql │ │ ├── INSERT_VEMPLOYEE.sql │ │ ├── UPDATE_VEMPLOYEE.sql │ │ ├── my_yyyymmdd_to_date2.c │ │ └── my_yyyymmdd_to_date2.sql │ │ ├── deploy_database.sh │ │ └── drop_database.sh │ ├── teradata_extraction_test_base.py │ └── test_demo_database.py ├── Vertica ├── DocumentationImages │ ├── BinNewTerminal.png │ ├── ConnectToServer.png │ ├── ContainerRunning.PNG │ ├── CreateTables.png │ ├── CreateViews.png │ ├── DockerExtensions.PNG │ ├── DockerRunning.PNG │ ├── ExampleScripts.PNG │ ├── FoldeStructure.PNG │ ├── Folder.PNG │ ├── Launchjson.png │ ├── PipInstall_sqlparse.png │ ├── PipInstallsqlparse.PNG │ ├── PythonDDLRunSucessfully.png │ ├── PythonScripts.png │ ├── PythonVersion.png │ ├── RunDockerVertica.png │ ├── RunPythonCode.png │ ├── RunPythonCode02.png │ ├── TempFileCreated.png │ ├── TempFolder.png │ ├── VerticaClientDriversLinux.png │ └── VerticaTarFile.png ├── License.txt ├── README.md ├── Scripts │ ├── SFConfig.py │ ├── SFConvert.py │ ├── SQL_Convert │ │ └── sqls │ │ │ └── vmart_query_01.sql │ ├── VerticaConfig.py │ ├── VerticaDBCalls.py │ └── vertMain.py ├── TEMP │ └── VerticaDDL │ │ ├── STORE_MYTABLE_1.sql │ │ ├── STORE_MYTABLE_2.sql │ │ ├── STORE_MYVIEW1.sql │ │ └── STORE_MYVIEW2.sql ├── VerticaReadme.md ├── install-vertica.sh ├── requirements.txt ├── sfConf.txt └── verticaConf.txt ├── [ARCHIVED] TeradataScripts ├── .DS_Store └── Teradata │ ├── .DS_Store │ ├── License.txt │ ├── README.md │ ├── bin │ └── create_ddls.sh │ ├── scripts │ ├── create_ddls.btq │ ├── create_reports.btq │ ├── create_sample_inserts.btq │ ├── create_usage_reports.btq │ ├── data_profiling.btq │ └── invalid_objects.btq │ └── sf_objects │ ├── Helper_Functions.sql │ └── SF_Sys_Calendar.sql └── additional_notes └── DB2 └── useful_db2_scripts.md /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/.DS_Store -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | SNOW_USER=Change for your user 2 | SNOW_PASSWORD=XXXXXXXX 3 | SNOW_ACCOUNT=SnowAccount 4 | SNOW_WAREHOUSE=datawarehouseName 5 | SNOW_DATABASE=DatabaseName 6 | SNOW_ROLE=Rolename 7 | OUT_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/sqls 8 | SUCCESS_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/success 9 | FAILED_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/failed 10 | FORMATTED_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/formatted 11 | LOG_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/log 12 | ACTION=SQL 13 | MAX_THREADS=15 14 | STOP_AFTER=1000000 15 | DB_SESSIONS=1 16 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ main ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ main ] 20 | schedule: 21 | - cron: '30 11 * * 4' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] 37 | # Learn more: 38 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed 39 | 40 | steps: 41 | - name: Checkout repository 42 | uses: actions/checkout@v2 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v1 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v1 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v1 72 | -------------------------------------------------------------------------------- /.github/workflows/package-scripts.yml: -------------------------------------------------------------------------------- 1 | name: "Package Scripts" 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | zip-files: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - uses: papeloto/action-zip@v1 13 | with: 14 | files: Oracle 15 | dest: export-scripts-oracle.zip 16 | - uses: papeloto/action-zip@v1 17 | with: 18 | files: Redshift 19 | dest: export-scripts-redshift.zip 20 | - uses: papeloto/action-zip@v1 21 | with: 22 | files: SQLServer 23 | dest: export-scripts-sqlserver.zip 24 | - uses: papeloto/action-zip@v1 25 | with: 26 | files: Teradata 27 | dest: export-scripts-teradata.zip 28 | - uses: papeloto/action-zip@v1 29 | with: 30 | files: Vertica 31 | dest: export-scripts-vertica.zip 32 | - uses: papeloto/action-zip@v1 33 | with: 34 | files: Databricks 35 | dest: export-scripts-databricks.zip 36 | - uses: papeloto/action-zip@v1 37 | with: 38 | files: AlternativeSQLServerExtractionMethods 39 | dest: export-scripts-AlternativeSQLServerExtractionMethods.zip 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | .venv/ 7 | 8 | # User-specific files 9 | snowsql_rt.log.* 10 | nodejs.csv 11 | *.sql.js 12 | *.html 13 | cconvp.ini 14 | *.suo 15 | *.user 16 | *.userosscache 17 | *.sln.docstates 18 | nodejs/ 19 | nodejs*/ 20 | .vscode/ 21 | .snowqm/ 22 | TargetMigratedCode/ 23 | Deploy*/ 24 | 25 | # User-specific files (MonoDevelop/Xamarin Studio) 26 | *.userprefs 27 | 28 | # Build results 29 | [Dd]ebug/ 30 | [Dd]ebugPublic/ 31 | [Rr]elease/ 32 | [Rr]eleases/ 33 | x64/ 34 | x86/ 35 | bld/ 36 | [Oo]bj/ 37 | [Ll]og/ 38 | 39 | # Visual Studio 2015/2017 cache/options directory 40 | .vs/ 41 | # Uncomment if you have tasks that create the project's static files in wwwroot 42 | #wwwroot/ 43 | 44 | # Visual Studio 2017 auto generated files 45 | Generated\ Files/ 46 | 47 | # MSTest test Results 48 | [Tt]est[Rr]esult*/ 49 | [Bb]uild[Ll]og.* 50 | 51 | # NUNIT 52 | *.VisualState.xml 53 | TestResult.xml 54 | 55 | # Build Results of an ATL Project 56 | [Dd]ebugPS/ 57 | [Rr]eleasePS/ 58 | dlldata.c 59 | 60 | # Benchmark Results 61 | BenchmarkDotNet.Artifacts/ 62 | 63 | # .NET Core 64 | project.lock.json 65 | project.fragment.lock.json 66 | artifacts/ 67 | **/Properties/launchSettings.json 68 | 69 | # StyleCop 70 | StyleCopReport.xml 71 | 72 | # Files built by Visual Studio 73 | *_i.c 74 | *_p.c 75 | *_i.h 76 | *.ilk 77 | *.meta 78 | *.obj 79 | *.iobj 80 | *.pch 81 | *.pdb 82 | *.ipdb 83 | *.pgc 84 | *.pgd 85 | *.rsp 86 | *.sbr 87 | *.tlb 88 | *.tli 89 | *.tlh 90 | *.tmp 91 | *.tmp_proj 92 | *.log 93 | *.vspscc 94 | *.vssscc 95 | .builds 96 | *.pidb 97 | *.svclog 98 | *.scc 99 | 100 | # Chutzpah Test files 101 | _Chutzpah* 102 | 103 | # Visual C++ cache files 104 | ipch/ 105 | *.aps 106 | *.ncb 107 | *.opendb 108 | *.opensdf 109 | *.sdf 110 | *.cachefile 111 | *.VC.db 112 | *.VC.VC.opendb 113 | 114 | # Visual Studio profiler 115 | *.psess 116 | *.vsp 117 | *.vspx 118 | *.sap 119 | 120 | # Visual Studio Trace Files 121 | *.e2e 122 | 123 | # TFS 2012 Local Workspace 124 | $tf/ 125 | 126 | # Guidance Automation Toolkit 127 | *.gpState 128 | 129 | # ReSharper is a .NET coding add-in 130 | _ReSharper*/ 131 | *.[Rr]e[Ss]harper 132 | *.DotSettings.user 133 | 134 | # JustCode is a .NET coding add-in 135 | .JustCode 136 | 137 | # TeamCity is a build add-in 138 | _TeamCity* 139 | 140 | # DotCover is a Code Coverage Tool 141 | *.dotCover 142 | 143 | # AxoCover is a Code Coverage Tool 144 | .axoCover/* 145 | !.axoCover/settings.json 146 | 147 | # Visual Studio code coverage results 148 | *.coverage 149 | *.coveragexml 150 | 151 | # NCrunch 152 | _NCrunch_* 153 | .*crunch*.local.xml 154 | nCrunchTemp_* 155 | 156 | # MightyMoose 157 | *.mm.* 158 | AutoTest.Net/ 159 | 160 | # Web workbench (sass) 161 | .sass-cache/ 162 | 163 | # Installshield output folder 164 | [Ee]xpress/ 165 | 166 | # DocProject is a documentation generator add-in 167 | DocProject/buildhelp/ 168 | DocProject/Help/*.HxT 169 | DocProject/Help/*.HxC 170 | DocProject/Help/*.hhc 171 | DocProject/Help/*.hhk 172 | DocProject/Help/*.hhp 173 | DocProject/Help/Html2 174 | DocProject/Help/html 175 | 176 | # Click-Once directory 177 | publish/ 178 | 179 | # Publish Web Output 180 | *.[Pp]ublish.xml 181 | *.azurePubxml 182 | # Note: Comment the next line if you want to checkin your web deploy settings, 183 | # but database connection strings (with potential passwords) will be unencrypted 184 | *.pubxml 185 | *.publishproj 186 | 187 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 188 | # checkin your Azure Web App publish settings, but sensitive information contained 189 | # in these scripts will be unencrypted 190 | PublishScripts/ 191 | 192 | # NuGet Packages 193 | *.nupkg 194 | # The packages folder can be ignored because of Package Restore 195 | **/[Pp]ackages/* 196 | # except build/, which is used as an MSBuild target. 197 | !**/[Pp]ackages/build/ 198 | # Uncomment if necessary however generally it will be regenerated when needed 199 | #!**/[Pp]ackages/repositories.config 200 | # NuGet v3's project.json files produces more ignorable files 201 | *.nuget.props 202 | *.nuget.targets 203 | 204 | # Microsoft Azure Build Output 205 | csx/ 206 | *.build.csdef 207 | 208 | # Microsoft Azure Emulator 209 | ecf/ 210 | rcf/ 211 | 212 | # Windows Store app package directories and files 213 | AppPackages/ 214 | BundleArtifacts/ 215 | Package.StoreAssociation.xml 216 | _pkginfo.txt 217 | *.appx 218 | 219 | # Visual Studio cache files 220 | # files ending in .cache can be ignored 221 | *.[Cc]ache 222 | # but keep track of directories ending in .cache 223 | !*.[Cc]ache/ 224 | 225 | # Others 226 | ClientBin/ 227 | ~$* 228 | *~ 229 | *.dbmdl 230 | *.dbproj.schemaview 231 | *.jfm 232 | *.pfx 233 | *.publishsettings 234 | orleans.codegen.cs 235 | 236 | # Including strong name files can present a security risk 237 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 238 | #*.snk 239 | 240 | # Since there are multiple workflows, uncomment next line to ignore bower_components 241 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 242 | #bower_components/ 243 | 244 | # RIA/Silverlight projects 245 | Generated_Code/ 246 | 247 | # Backup & report files from converting an old project file 248 | # to a newer Visual Studio version. Backup files are not needed, 249 | # because we have git ;-) 250 | _UpgradeReport_Files/ 251 | Backup*/ 252 | UpgradeLog*.XML 253 | UpgradeLog*.htm 254 | ServiceFabricBackup/ 255 | *.rptproj.bak 256 | 257 | # SQL Server files 258 | *.mdf 259 | *.ldf 260 | *.ndf 261 | 262 | # Business Intelligence projects 263 | *.rdl.data 264 | *.bim.layout 265 | *.bim_*.settings 266 | *.rptproj.rsuser 267 | 268 | # Microsoft Fakes 269 | FakesAssemblies/ 270 | 271 | # GhostDoc plugin setting file 272 | *.GhostDoc.xml 273 | 274 | # Node.js Tools for Visual Studio 275 | .ntvs_analysis.dat 276 | node_modules/ 277 | 278 | # Visual Studio 6 build log 279 | *.plg 280 | 281 | # Visual Studio 6 workspace options file 282 | *.opt 283 | 284 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 285 | *.vbw 286 | 287 | # Visual Studio LightSwitch build output 288 | **/*.HTMLClient/GeneratedArtifacts 289 | **/*.DesktopClient/GeneratedArtifacts 290 | **/*.DesktopClient/ModelManifest.xml 291 | **/*.Server/GeneratedArtifacts 292 | **/*.Server/ModelManifest.xml 293 | _Pvt_Extensions 294 | 295 | # Paket dependency manager 296 | .paket/paket.exe 297 | paket-files/ 298 | 299 | # FAKE - F# Make 300 | .fake/ 301 | 302 | # JetBrains Rider 303 | .idea/ 304 | *.sln.iml 305 | 306 | # CodeRush 307 | .cr/ 308 | 309 | # Python Tools for Visual Studio (PTVS) 310 | __pycache__/ 311 | *.pyc 312 | 313 | # Cake - Uncomment if you are using it 314 | # tools/** 315 | # !tools/packages.config 316 | 317 | # Tabs Studio 318 | *.tss 319 | 320 | # Telerik's JustMock configuration file 321 | *.jmconfig 322 | 323 | # BizTalk build output 324 | *.btp.cs 325 | *.btm.cs 326 | *.odx.cs 327 | *.xsd.cs 328 | 329 | # OpenCover UI analysis results 330 | OpenCover/ 331 | 332 | # Azure Stream Analytics local run output 333 | ASALocalRun/ 334 | 335 | # MSBuild Binary and Structured Log 336 | *.binlog 337 | 338 | # NVidia Nsight GPU debugger configuration file 339 | *.nvuser 340 | 341 | # MFractors (Xamarin productivity tool) working folder 342 | .mfractor/ 343 | snowsql_rt.log_bootstrap 344 | 345 | opt/ 346 | bfg.jar 347 | 348 | # DB2 files 349 | DB2/database/ 350 | DB2/object_extracts/ 351 | 352 | # Teradata files 353 | Teradata/output/ 354 | Teradata/log/ 355 | Teradata/bin/execute_on_mac.sh 356 | 357 | # Tests files 358 | Tests/Teradata/extracted_code/ 359 | Tests/Teradata/scripts/Teradata_Extraction 360 | 361 | # Autogenerated file 362 | *.DS_Store -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Debug Script", 6 | "type": "python", 7 | "request": "launch", 8 | "program": "${file}", 9 | "console": "integratedTerminal", 10 | "justMyCode": false, 11 | "envFile": "${workspaceFolder}/.env" 12 | }, 13 | { 14 | "name": "Vertica main", 15 | "type": "python", 16 | "request": "launch", 17 | "program": "${file}", 18 | "console": "integratedTerminal", 19 | "justMyCode": false, 20 | "envFile": "${workspaceFolder}/.env", 21 | "args": ["-s","/workspace/SnowConvertDDLExportScripts/sfConf.txt","-v","/workspace/SnowConvertDDLExportScripts/verticaConf.txt"] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "**/.git": true, 4 | "**/.svn": true, 5 | "**/.hg": true, 6 | "**/CVS": true, 7 | "**/.DS_Store": true, 8 | "**/__pycache__":true, 9 | "**/.pytest_cache": true 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/AlternativeSQLServerExtractionMethods/.DS_Store -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/PowerShellScripts/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/AlternativeSQLServerExtractionMethods/PowerShellScripts/.DS_Store -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/PowerShellScripts/README.md: -------------------------------------------------------------------------------- 1 | # SQL Server Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your SQLServer code so it can be migrated to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-transactsql/introduction). 4 | 5 | ## Version 6 | 7 | Version 2.8 8 | Release 2022-09-01 9 | 10 | ## Usage 11 | 12 | The `extract-sql-server-ddl.ps1` script attempts to connect to an instance of SQL Server using either Windows or SQL authentication and, for each database that survives inclusion/exclusion filters, retrieves certain object definitions as individual DDL files to a local directory. 13 | 14 | **SQL Server tested versions**: `SQL Server 2019`, `Azure SQLDatabase` 15 | 16 | The script uses the following parameters. The script will prompt the user for any parameter not specified on the command line. 17 | 18 | * **ServerName**: Specifies the SQL Server database server to use 19 | * **InstanceName**: Specifies the SQL Server database instance to use (default is the default instance) 20 | * **PortNumber**: Specifies the port to use (default is 1433) 21 | * **UserName**: Specifies the user name to use with SQL Authentication (default is the logged-in user) 22 | * **Password**: Specifies the password to use for **UserName** (if SQL authentication preferred) 23 | * **ScriptDirectory**: Specifies the root directory in which the extracted files are to be stored (default is .\MyScriptsDirectory) 24 | * **IncludeDatabases**: Specifies databases that match the listed pattern(s) be included in the extraction (default is all) 25 | * **ExcludeDatabases**: Specifies databases that match the listed pattern(s) be excluded from the extraction (default is none) 26 | * **IncludeSchemas**: Specifies schemas (in any database) that match the listed pattern(s) be included in the extraction (default is all) 27 | * **ExcludeSchemas**: Specifies schemas (in any database) that match the listed pattern(s) be excluded from the extraction (default is none) 28 | * **IncludeSystemDatabases**: Specifies whether to include databases, schemas, and tables tagged as SQL Server system objects (default is false) 29 | * **ExistingDirectoryAction**: Specifies whether to delete or keep the existing **ScriptDirectory** (default is to prompt interactively) 30 | * **NoSysAdminAction**: Specifies whether to stop or continue should the authenticated **UserName** not have the sysadmin role on **ServerName**\\**InstanceName** (default is to prompt interactively) 31 | 32 | ## Troubleshooting 33 | 34 | ### What to if I need to run the scripts on a machine with no Internet Access ? 35 | 36 | The extraction scripts will try to install a PowerShell module for SQLServer. If the machine does not have access to internet this operation might fail. 37 | 38 | One option can be to download this module and install it manually. 39 | 40 | You can follow these steps: 41 | 42 | 1. Run powershell 43 | 2. Create a folder for example c:\temp 44 | 3. Run `Invoke-WebRequest -Uri powershellgallery.com/api/v2/package/sqlserver -Out D:\temp\sqlserver.zip` 45 | 4. Now we need to extract the module into a path that Powershell can use to load the modules. For that purpose we can run 46 | ``` 47 | PS C:\> echo $env:PSModulePath.Split(";") 48 | C:\Users\username\Documents\WindowsPowerShell\Modules 49 | C:\Program Files (x86)\WindowsPowerShell\Modules 50 | C:\Program Files\WindowsPowerShell\Modules 51 | ``` 52 | As you can see the output will print a list of folder where powershell lists the modules. 53 | You can select one of the folder like this: 54 | ``` 55 | PS C:\> echo $env:PSModulePath.Split(";")[0] 56 | C:\Users\username\Documents\WindowsPowerShell\Modules 57 | ``` 58 | Create a target folder: 59 | ``` 60 | PS C:\> mkdir ($env:PSModulePath.Split(";")[0] + "\SqlServer") 61 | ``` 62 | 63 | And extract the module like: 64 | ``` 65 | PS C:\> Expand-Archive -Path C:\temp\sqlserver.zip -DestinationPath ($env:PSModulePath.Split(";")[0] + "\SqlServer") 66 | ``` 67 | 68 | 5. Install it like: 69 | ``` 70 | PS C:\> Install-Module -Name SqlServer -Scope CurrentUser 71 | 72 | Untrusted repository 73 | You are installing the modules from an untrusted repository. If you trust this repository, change its 74 | InstallationPolicy value by running the Set-PSRepository cmdlet. Are you sure you want to install the modules from 75 | 'PSGallery'? 76 | [Y] Yes [A] Yes to All [N] No [L] No to All [S] Suspend [?] Help (default is "N"): A 77 | ``` 78 | 79 | ## Additional Help 80 | 81 | For more information on using the script, execute the following: 82 | ```ps 83 | PS> Get-Help -full .\extract-sql-server-ddl.ps1 84 | ``` 85 | 86 | ## Reporting issues and feedback 87 | 88 | If you encounter any bugs with the tool please file an issue in the 89 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 90 | 91 | ## License 92 | 93 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/SQLServer/License.txt). 94 | -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/PowerShellScripts/Revision-History.txt: -------------------------------------------------------------------------------- 1 | # 2 | # 2021-08-05 Derrick Cole 3 | # - parameterized variables 4 | # - added reset switch 5 | # - reordered/cleaned up logic 6 | # - more robust try/catch error handling 7 | # - corrected databaseObjectType references 8 | # - converted "where name" to Where-Object for compatability 9 | # - added filter to exclude system schemae/objects and in-memory temp tables 10 | # 11 | # 2021-08-06 Derrick Cole 12 | # - added database include and exclude capability 13 | # - added database- and table-level info capture (in addition to the DDL) 14 | # 15 | # 2021-08-09 Derrick Cole 16 | # - ran script through PSScriptAnalyzer and tweaked based on default ruleset (install; Invoke-ScriptAnalyzer -Path ) 17 | # - added check for PS 4.0+ 18 | # - added external* database object types 19 | # - added database and table summary info 20 | # 21 | # 2021-09-02 Derrick Cole 22 | # - incorporated Azure support from separate script 23 | # - cleaned up parameters and logic 24 | # 25 | # 2021-09-03 Derrick Cole 26 | # - version 1.0 27 | # - added SqlServer module presence/install block 28 | # - corrected database inclusion/exclusion filtering 29 | # - consolidated server connection into single block 30 | # - added a server summary dump 31 | # - added version and rundate info 32 | # - minor cleanup 33 | # 34 | # 2021-09-07 Derrick Cole 35 | # - version 1.1 36 | # - adjusted database inclusion/exclusion filtering 37 | # - added support for masked password prompting 38 | # - added SQL Server authentication option (Windows authentication by default) 39 | # - added support for Get-Help 40 | # - more cleanup 41 | # 42 | # 2021-09-18 Derrick Cole 43 | # - version 1.2 44 | # - added user role check 45 | # - added more graceful processing and error handling 46 | # - more cleanup 47 | # 48 | # 2021-10-28 Derrick Cole 49 | # - version 1.3 50 | # - increased PowerShell minimum version to 5.0 51 | # - removed dependency on SqlServer module in favor of two SMO assemblies 52 | # - removed SqlAuthentication parameter in favor of UserId on the command line 53 | # - renamed IncludeSystemObjects parameter to IncludeSystemDatabases 54 | # - added command line directives for existing directories and no sysadmin 55 | # - added {Include/Exclude}Schema filters 56 | # - closer alignment to out-of-the-box functionality 57 | # - database objects collected into "DDL_.sql" files 58 | # - added help comments 59 | # - general cleanup 60 | # 61 | # 2021-11-02 Derrick Cole 62 | # - version 1.4 63 | # - default vs named instance support 64 | # - corrected scripter AppendToFile option 65 | # - cleaned up help comments, added SMO assembly URL 66 | # 67 | # 2021-11-17 Derrick Cole 68 | # - version 1.5 69 | # - added object inventory dump 70 | # - converted urnCollection/scripter calls from once per object to once per object type 71 | # - incorporated more robust checks for required assemblies 72 | # - included verbiage for addressing missing required assemblies and alternatives 73 | # - removed IncludeIfNotExists scripter option 74 | # - minor cleanup 75 | # 76 | # 2021-12-03 Derrick Cole 77 | # - version 1.6 78 | # - added more robust checking for sysadmin once connected to instance (via is_srvrolemember() query instead of login when logins are group-managed) 79 | # - adjusted psadmin-pre-reqs.ps1 script to check for assemblies prior to installing module 80 | # - minor cleanup 81 | # 82 | # 2021-12-09 Derrick Cole 83 | # - version 1.7 84 | # - refactored server and server\instance connection handling (code and command-line parameters) to account for all combinations of (un)specified server, instance, port, and tcp format 85 | # - minor cleanup 86 | # 87 | # 2021-12-16 Derrick Cole 88 | # - version 1.8 89 | # - added ExtendedProperties to the scripter options list to capture comments (and other extended properties) 90 | # 91 | # 2022-01-07 Derrick Cole 92 | # - version 1.9 93 | # - added more robust handling of encrypted objects and tracking of same in object_inventory.csv 94 | # - minor cleanup 95 | # 96 | # 2022-02-10 Derrick Cole 97 | # - version 2.0 98 | # - added interactive prompts with default values for parameters not specified on the command line 99 | # - separated ServerInstance parameter into separate ServerName and InstanceName parameters 100 | # - added more robust handling of server names and instance names 101 | # - relocated default script directory to directory containing script 102 | # - removed UseTcp parameter 103 | # - minor cleanup 104 | # 105 | # 2022-02-22 Derrick Cole 106 | # - version 2.1 107 | # - added support for instance-level linked server scripting 108 | # 109 | # 2022-04-05 Derrick Cole 110 | # - version 2.2 111 | # - added additional user instructions and clarified prompts 112 | # - more robust handling of empty arrays 113 | # - moved to one scripter call per object (as opposed to per object type) 114 | # - minor cleanup 115 | # 116 | # 2022-05-20 Derrick Cole 117 | # - version 2.3 118 | # - first cut at Synapse support 119 | # - defaulting to identifying type of instance based on value of ServerName 120 | # - minor cleanup 121 | # 122 | # 2022-07-07 Derrick Cole 123 | # - version 2.4 124 | # - corrected database and schema inclusion/exclusion pattern match handling 125 | # - added operating system check 126 | # 127 | # 2022-07-08 Derrick Cole 128 | # - version 2.5 129 | # - reorganized environment checks 130 | # - moved all function calls ahead of main block 131 | # - refreshed README.md to current version 132 | # 133 | # 2022-07-26 Derrick Cole 134 | # - version 2.6 135 | # - added support for PowerShell 7 136 | # - simplified assembly acquisition instructions 137 | # - added support for *nix execution 138 | # - minor cleanup 139 | # 140 | # 2022-08-22 Derrick Cole 141 | # - version 2.7 142 | # - corrected handling of scripter file paths containing spaces 143 | # - verified script works on PowerShell 5 (Windows) and PowerShell 7 (on Windows and *nix) with SQL Server module 144 | # 145 | # 2022-09-01 Derrick Cole 146 | # - version 2.8 147 | # - added '' delimiters between object definitions 148 | # - enforced ASCII encoding output (clash between appending the delimiter and the scripter output) 149 | # - added calls to resolve-path to, well, resolve the script directory path prior to setting scripterfile 150 | # -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/README.md: -------------------------------------------------------------------------------- 1 | # Alternative SQL Server Extraction Methods 2 | 3 | 4 | ## Extraction Methods 5 | 6 | This folder contains a set of alternative methods for code extraction for SQL Server, in case SQL Server Management Studio can not be executed in your system. 7 | - mssql-scripter: Is a Python package developed by Microsoft developed to generate data definition language (DDL) and data manipulation language (DML) T-SQL scripts for database objects in SQL Server. We recommend using this option for MacOS and Linux. Also runs in Windows, but always try using SSMS in Windows environments. Needs a previous Python installation in your system. 8 | 9 | - PowerShell Extraction Script: The Script attempts to connect to an instance of SQL Server and retrieves certain object definitions as individual DDL files to a local directory. This script should be executed in a Windows environment but we recommend using it in case SSMS and mssql-scripter definitely can't be executed in your system. 10 | 11 | 12 | ## Table Sizing Report 13 | 14 | - The Table_sizing_report_query.pdf file located in this folder, contains a query used to generate a report that provides detailed data on the utilization of disk space within a database in SQL Server. -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/Table _sizing_report_query.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/AlternativeSQLServerExtractionMethods/Table _sizing_report_query.pdf -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/mssql-scripter/mssql-scripter.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/AlternativeSQLServerExtractionMethods/mssql-scripter/mssql-scripter.pdf -------------------------------------------------------------------------------- /BigQuery/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /BigQuery/README.md: -------------------------------------------------------------------------------- 1 | # BigQuery DDL Export Scripts 2 | 3 | This repository offers a collection of straightforward scripts designed to facilitate the export of your BigQuery code, making it easier to migrate to [Snowflake](https://www.snowflake.com/). These scripts are specifically crafted to simplify the process of extracting your BigQuery code artifacts, such as stored procedures, functions, and views, ensuring a smooth transition to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-google-bigquery/introduction). 4 | 5 | ## Version 6 | 7 | Release 2024-11-19 8 | 9 | ## Usage 10 | 11 | The following are the steps to execute the DDL Code Generation. They can be executed in Linux/Unix. 12 | 13 | Remove Windows. We might add a side note on how to execute them on Windows 14 | 15 | ## How does this work? 16 | 17 | The script `create_ddls.sh` will connect to your database and create a collection of SQL files. 18 | 19 | ## Prerequisits 20 | 21 | 1. Cloud SDK needs to be installed. If you have not installed it, you can follow [these](https://cloud.google.com/sdk/docs/install#linux) instructions. 22 | 2. The user must have Admin or Owner privileges, otherwise no information will be retrieved. 23 | 3. The user must be granted with a role with the `bigquery.datasets.get` permission. If there is no roles with it, you could create a custom role just for this. 24 | 25 | 26 | ## Usage 27 | 28 | The following are the steps to execute the DDL Code Generation. They can be executed in Linux/Unix environments. 29 | 30 | 1. Modify the `create_ddls.sh` that is located in the `bin` folder 31 | - The region setting will be at the top of this file. 32 | - You must log in by going to a link in your browser when you run `./google-cloud-sdk/bin/gcloud init`, and then select the cloud project to use. 33 | 34 | 2. Before executing the script ensure `create_ddls.sh` is at the same folder level with `./google-cloud-sdk/` 35 | - Finally, run `create_ddls.sh` to extract the DDLs from BigQuery 36 | - After a successful run, remove region information from the top line of `create_ddls.sh`. 37 | 38 | ### Arguments 39 | 40 | ```--version``` 41 | 42 | Check the current version of the extraction scripts. 43 | 44 | ```--help``` 45 | 46 | Display the help screen. 47 | 48 | ```-s "schema1, schema2 [, ...]``` 49 | 50 | The parameter to limit to an in-list of schemas using the following structure schema1 [, ...]. 51 | 52 | 53 | ### DDL Files 54 | These files will contain the definitions of the objects specified by the file name. 55 | 56 | * `DDL_Schema.sql` 57 | * `DDL_Tables.sql` 58 | * `DDL_External_Tables.sql` 59 | * `DDL_Views.sql` 60 | * `DDL_Functions.sql` 61 | * `DDL_Procedures.sql` 62 | * `DDL_Reservations.sql` 63 | * `DDL_Capacity_commitments.sql` 64 | * `DDL_Assignments.sql` -------------------------------------------------------------------------------- /DB2/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /DB2/README.md: -------------------------------------------------------------------------------- 1 | # DB2 Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your DB2 code so it can be migrated to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-db2/introduction) 4 | 5 | ## Version 6 | 7 | Release 2023-02-28 8 | 9 | ## Usage 10 | 11 | The following are the steps to execute the DDL Code Generation. They can be executed in Linux/Unix enviroments. 12 | 13 | ## **For Linux/Unix:** 14 | 15 | 1 - Modify `create_ddls.sh` located in the `bin` folder. 16 | Using a text editor modify the following parameters: 17 | 18 | * `DATABASES_TO_EXCLUDE` 19 | 20 | That variable will determine if there are any database that you want to exclude from the extraction 21 | 22 | **It is required to use a user with administrative privileges (DBA)** and to run on a production-like environment with recently up to date statistics. 23 | 24 | 25 | 2 - After modifying, the `create_ddls.sh` file can be run from the command line to execute the extract. The following files will be created in the directory `/object_extracts/DDL`: 26 | 27 | 3 - Run `create_ddls.sh --version` to check the current version of the extraction scripts. 28 | 29 | ## **For Windows:** 30 | 31 | 1 - Modify `create_ddls.ps1` located in the `bin` folder. 32 | Using a text editor modify the following parameters: 33 | 34 | * `DATABASES_TO_EXCLUDE` 35 | 36 | That variable will determine if there are any database that you want to exclude from the extraction 37 | 38 | **It is required to use a user with administrative privileges (DBA)** and to run on a production-like environment with recently up to date statistics. 39 | 40 | 41 | 2 - After modifying, the `create_ddls.ps1` file can be run from the command line to execute the extract. The following files will be created in the directory `/object_extracts/DDL`: 42 | 43 | 44 | ### DDL Files 45 | For each database a folder with the database name and a file called `DDL_All.sql` will be generated. It will contain the definitions of the objects in the database. 46 | 47 | ### Reports 48 | 49 | For each database some volumetrics reports will be created: 50 | 51 | - `volumetrics_per_object.txt` 52 | - `volumetrics_per_database.txt` 53 | - `db_size.txt` 54 | 55 | ## Reporting issues and feedback 56 | 57 | If you encounter any bugs with the tool please file an issue in the 58 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 59 | 60 | ## License 61 | 62 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/DB2/License.txt). 63 | -------------------------------------------------------------------------------- /DB2/bin/create_ddls.ps1: -------------------------------------------------------------------------------- 1 | echo "DB2 DDL Export script" 2 | echo "Getting list of databases" 3 | $OUTPUTDIR="../object_extracts" 4 | ### Get List of Database 5 | 6 | ## You can modify this variable to exclude some databases: 7 | ## For example if you want to exclude database TESTDB just set: 8 | ## DATABASES_TO_EXCLUDE=@("TESTDB") 9 | ## If you want to exclude database TESTDB and database SAMPLE just set: 10 | ## DATABASES_TO_EXCLUDE=@("TESTDB","SAMPLE") 11 | ## You can use regular any valid regular expression as a pattern to exclude the databases to exclude 12 | $DATABASES_TO_EXCLUDE=@() 13 | ## DB Reports 14 | $SCHEMA_FILTER="%" 15 | 16 | $DDLS="$OUTPUTDIR/DDL" 17 | $REPORTS="$OUTPUTDIR/Reports" 18 | 19 | IF (-Not (Test-Path "$DDLS")) { mkdir -p "$DDLS" } 20 | IF (-Not (Test-Path "$REPORTS")) { mkdir -p $REPORTS } 21 | ## Get list of databases 22 | $lines=(db2 list db directory) | ForEach { "$_"} 23 | $DBS=$lines|where{$_ -match "Database alias"} | Foreach { $_.Split("=")[1].Trim() } | where { $_ -notin $DATABASES_TO_EXCLUDE } 24 | echo "Output Directory: $OUTPUTDIR" 25 | Foreach ($db in $DBS) 26 | { 27 | IF (-Not (Test-Path "$DDLS/$db")) { mkdir -p "$DDLS/$db" } 28 | IF (-Not (Test-Path "$REPORTS/$db")) { mkdir -p "$REPORTS/$db" } 29 | 30 | echo "Processing Database $db" 31 | db2look -d $db -e -l > "$DDLS/$db/DDL_All.sql" 32 | 33 | ## Get REPORTS 34 | ## Get table volumetrics 35 | db2 "connect to $db" 36 | 37 | db2 "SELECT SUBSTR(TABSCHEMA,1,10) AS SCHEMA, SUBSTR(TABNAME,1,15) AS TABNAME, 38 | INT(DATA_OBJECT_P_SIZE) AS OBJ_SZ_KB, 39 | INT(INDEX_OBJECT_P_SIZE) AS INX_SZ_KB, 40 | INT(XML_OBJECT_P_SIZE) AS XML_SZ_KB 41 | FROM SYSIBMADM.ADMINTABINFO 42 | WHERE TABSCHEMA LIKE '%' 43 | ORDER BY 3 DESC;" > "$REPORTS/$db/volumetrics_per_object.txt" 44 | 45 | db2 "SELECT SUBSTR(TABSCHEMA,1,10) AS SCHEMA, 46 | SUM(DATA_OBJECT_P_SIZE) AS OBJ_SZ_KB, 47 | SUM(INDEX_OBJECT_P_SIZE) AS INX_SZ_KB, 48 | SUM(XML_OBJECT_P_SIZE) AS XML_SZ_KB 49 | FROM SYSIBMADM.ADMINTABINFO 50 | GROUP BY TABSCHEMA 51 | ORDER BY 2 DESC;" > "$REPORTS/$db/volumetrics_per_database.txt" 52 | 53 | ### DATABASE SIZE 54 | 55 | db2 "CALL GET_DBSIZE_INFO(?,?,?,-1)" > "$REPORTS/$db/db_size.txt" 56 | 57 | } 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /DB2/bin/create_ddls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #This version should match the README.md version. Please update this version on every change request. 4 | VERSION="Release 2024-02-28" 5 | 6 | export versionParam=$1 7 | 8 | if [ "$versionParam" = "--version" ]; then 9 | echo "You are using the $VERSION of the extraction scripts" 10 | exit 1 11 | fi 12 | 13 | echo "DB2 DDL Export script" 14 | echo "Getting list of databases" 15 | OUTPUTDIR="../object_extracts" 16 | ### Get List of Database 17 | 18 | ## You can modify this variable to exclude some databases: 19 | ## For example if you want to exclude database TESTDB just set: 20 | ## DATABASES_TO_EXCLUDE="TESTDB" 21 | ## If you want to exclude database TESTDB and database SAMPLE just set: 22 | ## DATABASES_TO_EXCLUDE="TESTDB|SAMPLE" 23 | ## You can use regular any valid regular expression as a pattern to exclude the databases to exclude 24 | DATABASES_TO_EXCLUDE="XXXXXXX" 25 | 26 | ## DB Reports 27 | SCHEMA_FILTER="%" 28 | 29 | DDLS="$OUTPUTDIR/DDL" 30 | REPORTS="$OUTPUTDIR/Reports" 31 | mkdir -p $DDLS 32 | mkdir -p $REPORTS 33 | DBS=$( db2 list db directory | grep Indirect -B 5 |grep "Database alias" |awk {'print $4'} |sort -u | uniq 2>/dev/null | grep -v -E $DATABASES_TO_EXCLUDE) 34 | for db in $DBS 35 | do 36 | mkdir -p "$DDLS/$db" 37 | mkdir -p "$REPORTS/$db" 38 | echo "Processing Database $db" 39 | db2look -d $db -e -l > "$DDLS/$db/DDL_All.sql" 40 | 41 | ## Get REPORTS 42 | ## Get table volumetrics 43 | db2 "connect to $db" 44 | 45 | db2 "SELECT SUBSTR(TABSCHEMA,1,10) AS SCHEMA, SUBSTR(TABNAME,1,15) AS TABNAME, 46 | INT(DATA_OBJECT_P_SIZE) AS OBJ_SZ_KB, 47 | INT(INDEX_OBJECT_P_SIZE) AS INX_SZ_KB, 48 | INT(XML_OBJECT_P_SIZE) AS XML_SZ_KB 49 | FROM SYSIBMADM.ADMINTABINFO 50 | WHERE TABSCHEMA LIKE '%' 51 | ORDER BY 3 DESC;" > "$REPORTS/$db/volumetrics_per_object.txt" 52 | 53 | db2 "SELECT SUBSTR(TABSCHEMA,1,10) AS SCHEMA, 54 | SUM(DATA_OBJECT_P_SIZE) AS OBJ_SZ_KB, 55 | SUM(INDEX_OBJECT_P_SIZE) AS INX_SZ_KB, 56 | SUM(XML_OBJECT_P_SIZE) AS XML_SZ_KB 57 | FROM SYSIBMADM.ADMINTABINFO 58 | GROUP BY TABSCHEMA 59 | ORDER BY 2 DESC;" > "$REPORTS/$db/volumetrics_per_database.txt" 60 | 61 | ### DATABASE SIZE 62 | 63 | db2 "CALL GET_DBSIZE_INFO(?,?,?,-1)" > "$REPORTS/$db/db_size.txt" 64 | 65 | done 66 | 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /Databricks/Download_jobs_sources.dbc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Databricks/Download_jobs_sources.dbc -------------------------------------------------------------------------------- /Databricks/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Databricks/README.md: -------------------------------------------------------------------------------- 1 | # DataBricks Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your Databricks code. 4 | 5 | ## Version 6 | 7 | Version 1.0 8 | Release 2023-01-23 9 | 10 | ## Overview 11 | 12 | There are several ways to extract your code from DataBricks. We will provide several alternatives. Choose the one that works better in your scenario. 13 | 14 | 15 | ## Extracting with the DBX Command Line Tool 16 | 17 | The Databricks command line tool can be used in order to export your application code. 18 | 1. Open a terminal. 19 | 1. Install the databricks command line tool. Follow the instructions in the [documentation](https://docs.databricks.com/dev-tools/cli/index.html). In most cases just running `pip install databricks-cli` should be enough. 20 | 2. From the terminal, setup a pair of environment variables: 21 | It should be something like this: 22 | ``` 23 | $ export DATABRICKS_HOST=https://*************.azuredatabricks.net/ 24 | $ export DATABRICKS_TOKEN=************************************ 25 | ``` 26 | Adjust those variables to point to your databricks account. 27 | 3. You can then run: 28 | ``` 29 | databricks workspace list 30 | ``` 31 | that will show a listing like: 32 | ``` 33 | Users 34 | Shared 35 | Repos 36 | ``` 37 | 4. Export a workspace. For example to export all the `Shared` workspace you can run: 38 | ``` 39 | $ databricks workspace export_dir "/Shared" "~/exportSharedWorkspace" -o 40 | ``` 41 | 5. Zip the folder. 42 | ``` 43 | zip -r exportedWorkspace.zip ~/exportSharedWorkspace 44 | ``` 45 | 46 | ## Extracting with a DBX Notebook 47 | 48 | This is another alternative to extract your source code, it provides two choices of ZIP file with source codes either notebooks or python source in your repo or DBFS. The first one includes only sources from the top time-consuming jobs and the second includes all the source files from the running jobs. That means interactive notebooks, not scheduled to run, won't be exported. 49 | 50 | 1. Open your Databricks workspace and creates an new notebook. 51 | 2. Open File menu and click import. 52 | 3. Select URL and paste ***https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Databricks/Download_jobs_sources.dbc*** 53 | 4. Follow notebook instructions. 54 | 1. Install Databricks pypi dependency. 55 | 2. Update configuration accordling with your cluster (host_url, and token). ***We Advise against using the token directly in the notebook. Please store it in a secret scope, using Databricks CLI***. For more details [Databricks Authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) 56 | 57 | ![Exporting Jobs sources](./images/notebook_export_source_codes.png) 58 | 59 | ## Reporting issues and feedback 60 | 61 | If you encounter any bugs with the tool please file an issue in the 62 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 63 | 64 | ## License 65 | 66 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Databricks/License.txt). 67 | -------------------------------------------------------------------------------- /Databricks/images/notebook_export_source_codes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Databricks/images/notebook_export_source_codes.png -------------------------------------------------------------------------------- /Hive/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Hive/README.md: -------------------------------------------------------------------------------- 1 | # Hive DDL Export Script 2 | 3 | This repository provides scripts to help exporting Hive DDL so it can be migrated to [Snowflake](https://www.snowflake.com/). Hive versions 4.0 and above, as well as versions below 4.0, are supported. 4 | ## Version 5 | 6 | Release 2025-04-15 7 | 8 | ## Usage 9 | 10 | Extracts all table and view DDL in the specified database, wildcard match, or all databases on the system (default). Beeline is used by default to create a JDBC connection to Hive. No data is extracted. There are no third-party binary packages installed or used. 11 | 12 | This script can be executed in Linux or Unix from the command line. 13 | 14 | >**Important:** Extraction can take time. Databases in scope of migration should have DDL extracted only. If databases contain many objects or there are many databases, the process should be broken up into sets of databases using a wildcard or individual database extraction. 15 | 16 | ### 1. Environment Configuration 17 | 18 | Open `exp_ddl.sh` in a text editor and navigate to the "ENVIRONMENT CONFIGURATION" section starting on or around line 17. 19 | 20 | 1. Update `HOST` to match the host name of the server where Hive is running and will be used make a JDBC connection. 21 | 22 | Default: `localhost` 23 | 24 | 2. Update `PORT` to match the port number of the server where Hive is running and will be used to make a JDBC connection. 25 | 26 | Default: `10000` 27 | 28 | 3. Update `databasefilter` to explicitly name a single database or use a wildcard to match database names for a list of databases to extract DDL from. **The wildcard for Hive < 4.0 is `*` whereas the wildcard for >= 4.0 is `%`.** 29 | 30 | Default: `*` (all databases, supporting Hive < 4.0) 31 | 32 | 4. (Optional) Update `root` to a customer folder name where the output is stored. 33 | 34 | Default: `ddl_extract` in the folder where this script is executed 35 | 36 | ### 2. Hive Extraction Command Options 37 | 38 | By default, beeline CLI is used to create a JDBC connection. Alternatively the Hive CLI can be used. Open `exp_ddl.sh` in a text editor and navigate to the "HIVE EXTRACTION COMMAND OPTIONS" section starting on or around line 49. 39 | 1. Select use of `beeline` or `hive` by commenting with a `#` the undesired command and uncommenting the desired command. 40 | 41 | Default: `beeline` 42 | 43 | ### 3. Confirm extract script version 44 | 45 | Run `./exp_ddls.sh --version` from the command line and verify the version matches the release version at the top of this readme. 46 | 47 | ### 4. Start DDL extraction 48 | 49 | Run `./exp_ddl.sh` from the command line to execute the extract. The DDL files will be created in the current directory under `ddl_extract` subdirectory unless a different location was specified in the "Environment Configuration" section. 50 | 51 | ### 5. Share output 52 | 53 | After extracting DDL for all in-scope databases, send the extracted DDL SQL files and objects CSV files to your Snowflake representative for assessment and next steps. If you are not working with a Snowflake representative, skip this step. 54 | 55 | ## Reporting issues and feedback 56 | 57 | If you encounter any bugs with the script, first reach out to the Snowflake representative you are working with. If you are not working with a Snowflake representative, file an issue in the [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of the GitHub repository. 58 | 59 | ## License 60 | 61 | Export scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Hive/License.txt). -------------------------------------------------------------------------------- /Hive/exp_ddl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # -------------------------------------------------------------------------------------------------------------------- 4 | # SCRIPT VERSION 5 | # This version should match the README.md version. Customization not required for this section. Do NOT make 6 | # changes unless there is a extraction error due to unique system configuration. 7 | # -------------------------------------------------------------------------------------------------------------------- 8 | VERSION="Release 2025-04-15" 9 | 10 | export versionParam=$1 11 | 12 | if [ "$versionParam" = "--version" ]; then 13 | echo "You are using the $VERSION of the extraction scripts" 14 | exit 1 15 | fi 16 | 17 | # -------------------------------------------------------------------------------------------------------------------- 18 | # ENVIRONMENT CUSTOMIZATION 19 | # HOST 20 | # The host name of the server where Hive is running used to make a JDBC connection. 21 | # 22 | # Default: localhost 23 | # 24 | # PORT 25 | # Port of the server where Hive is running used to make a JDBC connection. 26 | # 27 | # Default: 10000 28 | # 29 | # databasefilter 30 | # Hive database name to filter for DDL extraction. Hive <4.0 use * (asterisk) and Hive >=4.0 use % (percent) 31 | # for wildcard. May also be explicit database name or wildcard for all databases in system. 32 | # 33 | # For example: 34 | # Hive <4: "db*" or "*db*" or "my_db" (no wildcard) or * (all databases) 35 | # Hive >=4: "db%" or "%db%" or "my_db" (no wildcard) or % (all databases) 36 | # See https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-ShowDatabases 37 | # 38 | # Default: * (Hive <4.0 support) 39 | # 40 | # root 41 | # Name of folder to be created in the same path as the extraction script where output files will be written. 42 | # -------------------------------------------------------------------------------------------------------------------- 43 | 44 | HOST=localhost # Update as required 45 | PORT=10000 # Update as reuqired 46 | databasefilter="%" # Hive database name to filter for DDL extraction. Hive <4.0 use * and Hive >=4.0 use % wildcard 47 | root="ddl_extract" # Folder name created below where script executes to store output 48 | 49 | # -------------------------------------------------------------------------------------------------------------------- 50 | # HIVE EXTRACTION COMMAND OPTIONS 51 | # Beeline connection through JDBC is preferred. If beeline is not available, hive may be used directly from 52 | # the server. 53 | # -------------------------------------------------------------------------------------------------------------------- 54 | 55 | hivecmd="beeline -u jdbc:hive2://${HOST}:${PORT} --showHeader=false --outputformat=tsv2 -e " # Use beeline CLI (preferred) 56 | #hivecmd="hive -e" # Use hive CLI (fallback) 57 | 58 | # -------------------------------------------------------------------------------------------------------------------- 59 | # EXTRACTION ROUTINE 60 | # Customization not rueqired for this section. Do NOT make changes unless there is a extraction error due to 61 | # unique system configuration. 62 | # -------------------------------------------------------------------------------------------------------------------- 63 | 64 | current_time=$(date "+%Y%m%d%-H%-M%-S") 65 | csv="${root}/all_objects.${current_time}.csv" #master list of all tables/views found 66 | 67 | mkdir -p ${root} 68 | echo "database,object_name,object_type,size_in_bytes,hdfs_location,serde,inputformat,outputformat" >$csv 69 | 70 | set -f #turn off expansion for wildcard 71 | databases=$(${hivecmd} "show databases like '${databasefilter}';") 72 | set +f #turn on expansion for wildcard 73 | 74 | all_db_names=${databases} 75 | 76 | for db in $all_db_names 77 | do 78 | expfile=$root/${db}.sql 79 | 80 | tables=$(${hivecmd} "show tables in ${db};") 81 | all_tab_names=`echo "${tables}"` 82 | 83 | if [ ! -z "${all_tab_names}" ] 84 | then 85 | echo " " > $expfile 86 | echo " /**** Start DDLs for Tables in ${db} ****/ " >> $expfile 87 | fi 88 | 89 | for table in $all_tab_names 90 | do 91 | sql="show create table ${db}.${table};" 92 | echo " ====== Running SHOW CREATE TABLE Statement for $db.${table} ======= : " 93 | results=`${hivecmd} "use ${db}; $sql"` 94 | loc=$(echo "$results" | awk -F 'LOCATION' '{print $2}' | awk '{print $1;}' | awk -F '/' '{for (i=4; i> $expfile 112 | echo "" >> $expfile 113 | echo "${db},${table},${objtype},${size},${loc},${serde},${inputformat},${outputformat}" >>$csv 114 | done 115 | 116 | if [ ! -z "${all_tab_names}" ] 117 | then 118 | echo " /**** End DDLs for Tables in ${db} ****/ " >> $expfile 119 | fi 120 | done -------------------------------------------------------------------------------- /LEGAL.md: -------------------------------------------------------------------------------- 1 | This application is not part of the Snowflake Service and is governed by the terms in LICENSE, unless expressly agreed to in writing. You use this application at your own risk, and Snowflake has no obligation to support your use of this application. 2 | -------------------------------------------------------------------------------- /Netezza/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Oracle/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Oracle/bin/create_ddls.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | REM GENERAL INSTRUCTIONS: This script is used to extract object DDL from your Oracle Database. Please adjust the variables below 3 | REM to match your environment. Once completed, your extracted DDL code will be stored in the object_extracts folder. 4 | 5 | 6 | SET ORACLE_SID=ORCL 7 | 8 | SET CONNECT_STRING="system/System123" 9 | 10 | SET SCRIPT_PATH="\\Mac\Home\Documents\Workspace\SC.DDLExportScripts\Oracle" 11 | 12 | SET OUTPUT_PATH=%SCRIPT_PATH% 13 | 14 | 15 | if not exist %SCRIPT_PATH% ( 16 | echo "The script_path path does not exist." 17 | EXIT /b 18 | ) 19 | 20 | REM Path to where object extracts are written 21 | 22 | mkdir %OUTPUT_PATH%\object_extracts 23 | mkdir %OUTPUT_PATH%\object_extracts\DDL 24 | mkdir %OUTPUT_PATH%\object_extracts\STORAGE 25 | cd . > %OUTPUT_PATH%\object_extracts\DDL\.sc_extracted 26 | 27 | if not exist %OUTPUT_PATH% ( 28 | echo "The output path does not exist." 29 | EXIT /b 30 | ) 31 | 32 | REM Modify the operator and condition for the Oracle schemas to explicity include. 33 | REM By default all schemas, other than system schemas, will be included. 34 | REM Use uppercase names. Do not remove the parentheses or double quotes. 35 | SET INCLUDE_OPERATOR="LIKE" 36 | SET INCLUDE_CONDITION="('%%')" 37 | 38 | REM Modify the operator and condition for the Oracle schemas to explicity exclude. 39 | REM Not necessary to modify this if you are using the above section to explicity include only certain schemas. 40 | REM Use uppercase names. Do not remove the parentheses or double quotes. 41 | SET EXCLUDE_OPERATOR="IN" 42 | SET EXCLUDE_CONDITION="('XXX')" 43 | 44 | set FILE_NAME=create_ddls_plus.sql 45 | set FULL_PATH=%SCRIPT_PATH%\%file_name% 46 | 47 | @echo on 48 | sqlplus %CONNECT_STRING% @%FULL_PATH% %INCLUDE_OPERATOR% %INCLUDE_CONDITION% %EXCLUDE_OPERATOR% %EXCLUDE_CONDITION% %OUTPUT_PATH% -------------------------------------------------------------------------------- /Oracle/bin/create_ddls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #GENERAL INSTRUCTIONS: This script is used to extract object DDL from your Oracle Database. Please adjust the variables below 3 | # to match your environment. Once completed, your extracted DDL code will be stored in the object_extracts folder. 4 | 5 | 6 | #Version 2024-02-28: Added flag to display version. Update output text with more detailed information about the execution. 7 | #Version 2024-03-14 (v0.0.88): Updated version flag to display correct version. 8 | 9 | #This version should match the README.md version. Please update this version on every change request. 10 | VERSION="0.0.89" 11 | 12 | export versionParam=$1 13 | 14 | if [ "$versionParam" = "--version" ]; then 15 | echo "You are using the version $VERSION of the extraction scripts" 16 | exit 1 17 | fi 18 | 19 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Execute Oracle extraction scripts: Started" 20 | 21 | export ORACLE_SID= 22 | export CONNECT_STRING=system/oracle 23 | export SCRIPT_PATH= 24 | export SQLCL_PATH= 25 | # Default value is the #SCRIPT_PATH folder, You can change the output directory here! 26 | export OUTPUT_PATH=$SCRIPT_PATH 27 | 28 | 29 | if [ ! -e "$SCRIPT_PATH" ]; then 30 | echo "The script path does not exist." 31 | exit 1 32 | fi 33 | 34 | if [ ! -e "$SQLCL_PATH" ]; then 35 | echo "The sqlcl path does not exist." 36 | exit 1 37 | fi 38 | 39 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 1/4 - Creating Directories: Started" 40 | 41 | #Path to where object extracts are written 42 | mkdir -p $OUTPUT_PATH/object_extracts 43 | mkdir -p $OUTPUT_PATH/object_extracts/DDL 44 | mkdir -p $OUTPUT_PATH/object_extracts/STORAGE 45 | touch -- "${OUTPUT_PATH}/object_extracts/DDL/.sc_extracted" 46 | 47 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 1/4 - Creating Directories: Completed" 48 | 49 | 50 | if [ ! -e "$OUTPUT_PATH" ]; then 51 | echo "The output path does not exist." 52 | exit 1 53 | fi 54 | 55 | # Modify the operator and condition for the Oracle schemas to explicity INCLUDE. 56 | # By default all schemas, other than system schemas, will be included. 57 | # Use uppercase names. Do not remove the parentheses or double quotes. 58 | export INCLUDE_OPERATOR=LIKE 59 | export INCLUDE_CONDITION="('%')" 60 | 61 | # Modify the operator and condition for the Oracle schemas to explicity EXCLUDE. 62 | # By default all schemas, other than system schemas, will be included. 63 | # Use uppercase names. Do not remove the parentheses or double quotes. 64 | export EXCLUDE_OPERATOR=IN 65 | export EXCLUDE_CONDITION="('SYSMAN')" 66 | 67 | # Modify this JAVA variable to asign less or more memory to the JVM 68 | # export JAVA_TOOL_OPTIONS=-Xmx4G 69 | 70 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 2/4 - Extracting DDLs: Started" 71 | 72 | $SQLCL_PATH/sql $CONNECT_STRING @$SCRIPT_PATH/create_ddls.sql $INCLUDE_OPERATOR $INCLUDE_CONDITION $EXCLUDE_OPERATOR $EXCLUDE_CONDITION $OUTPUT_PATH 73 | -------------------------------------------------------------------------------- /Oracle/bin/create_ddls_plus.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #GENERAL INSTRUCTIONS: This script is used to extract object DDL from your Oracle Database. Please adjust the variables below 4 | # to match your environment. Once completed, your extracted DDL code will be stored in the object_extracts folder. 5 | 6 | export ORACLE_SID= 7 | export CONNECT_STRING=system/oracle 8 | export SCRIPT_PATH= 9 | # Default value is the #SCRIPT_PATH folder, You can change the output directory here! 10 | export OUTPUT_PATH=$SCRIPT_PATH 11 | 12 | 13 | if [ ! -e "$SCRIPT_PATH" ]; then 14 | echo "The script path does not exist." 15 | exit 1 16 | fi 17 | 18 | #Path to where object extracts are written 19 | mkdir -p $OUTPUT_PATH/object_extracts 20 | mkdir -p $OUTPUT_PATH/object_extracts/DDL 21 | mkdir -p $OUTPUT_PATH/object_extracts/STORAGE 22 | 23 | if [ ! -e "$OUTPUT_PATH" ]; then 24 | echo "The output path does not exist." 25 | exit 1 26 | fi 27 | 28 | # Modify the operator and condition for the Oracle schemas to explicity INCLUDE. 29 | # By default all schemas, other than system schemas, will be included. 30 | # Use uppercase names. Do not remove the parentheses or double quotes. 31 | export INCLUDE_OPERATOR=LIKE 32 | export INCLUDE_CONDITION="('%')" 33 | 34 | # Modify the operator and condition for the Oracle schemas to explicity EXCLUDE. 35 | # By default all schemas, other than system schemas, will be included. 36 | # Use uppercase names. Do not remove the parentheses or double quotes. 37 | export EXCLUDE_OPERATOR=IN 38 | export EXCLUDE_CONDITION="('SYSMAN')" 39 | 40 | # Modify this JAVA variable to asign less or more memory to the JVM 41 | # export JAVA_TOOL_OPTIONS=-Xmx4G 42 | 43 | sqlplus $CONNECT_STRING @$SCRIPT_PATH/create_ddls_plus.sql $INCLUDE_OPERATOR $INCLUDE_CONDITION $EXCLUDE_OPERATOR $EXCLUDE_CONDITION $OUTPUT_PATH -------------------------------------------------------------------------------- /Oracle/setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal =1 3 | 4 | [metadata] 5 | name = snowconvert-export-oracle 6 | version = 0.0.10 7 | author = Mauricio Rojas 8 | author_email = mauricio.rojas@mobilize.net 9 | description = Mobilize.Net Oracle Export Tool for SnowConvert 10 | long_description = file: README.rst 11 | long_description_content_type = text/x-rst 12 | license_file = LICENSE.txt 13 | 14 | python_requires='>=3' 15 | 16 | url = https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Oracle 17 | project_urls = 18 | Bug Tracker = https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues 19 | classifiers = 20 | Development Status :: 3 - Alpha 21 | Intended Audience :: Developers 22 | Intended Audience :: System Administrators 23 | Programming Language :: Python 24 | Programming Language :: Python :: 3 25 | Programming Language :: Python :: 3.4 26 | Programming Language :: Python :: 3.5 27 | Programming Language :: Python :: 3.6 28 | License :: OSI Approved :: MIT License 29 | 30 | [options] 31 | scripts= 32 | sc-oracle-export 33 | 34 | [flake8] 35 | ignore = 36 | # E501: line too long. 37 | E501, 38 | # F401, imported but unused, ignore where we import setup. 39 | F401, 40 | # E402 module level import not at top of file. 41 | # To maintain py2 - 3 compat certain orders of import is necessary. 42 | E402 -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Database Export Scripts Collection for SnowConvert 2 | 3 | 4 | ## Overview 5 | 6 | This repository contains some utility scripts that can be used for exporting code to be migrated with 7 | the [SnowConvert tool](https://docs.snowconvert.com/sc/) 8 | 9 | - [Export Scripts for Teradata](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Teradata) 10 | - [Export Scripts for SQLServer](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/SQLServer) 11 | - [Export Scripts for Oracle](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Oracle) 12 | - [Export Scripts for Redshift](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Redshift) 13 | - [Export Scripts for Netezza](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Netezza) 14 | - [Export Scripts for Vertica](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Vertica) 15 | - [Export Scripts for DB2](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/DB2) 16 | - [Export Scripts for Hive](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Hive) 17 | - [Export Scripts for BigQuery](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/BigQuery) 18 | - [Export Scripts for Databricks](https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Databricks) 19 | -------------------------------------------------------------------------------- /Redshift/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Redshift/bin/create_ddls.ps1: -------------------------------------------------------------------------------- 1 | 2 | # GENERAL INSTRUCTIONS: This script is used to extract object DDL from your RedShift Cluster. Please adjust the variables with enclosed by <> 3 | # below to match your environment. Once completed, your extracted DDL code will be stored in the object_extracts folder. 4 | 5 | # ---- Variables to change ---- 6 | 7 | # General Variables 8 | $OUTPUT_PATH="" 9 | 10 | if ($OUTPUT_PATH -match '(?:\\|\/)*$') 11 | { 12 | # Remove trailing slashes 13 | $OUTPUT_PATH = $OUTPUT_PATH -replace '(?:\\|\/)*$', '' 14 | } 15 | 16 | # AWS RedShift Variables 17 | $RS_CLUSTER="" 18 | $RS_DATABASE="" 19 | $RS_SECRET_ARN="" 20 | 21 | # Script Variables 22 | $SCHEMA_FILTER="lower(schemaname) LIKE '%'" 23 | $MAX_ITERATIONS=60 #Every iteration waits 5 seconds. Must be > 0. 24 | # ---- END: Variables to change ---- 25 | 26 | 27 | if($MAX_ITERATIONS -lt 0) 28 | { 29 | $MAX_ITERATIONS = 60 30 | Write-Output "Detected iterations less than 0. Setting to 60." 31 | } 32 | 33 | function Check-Command($cmdname) 34 | { 35 | return [bool](Get-Command -Name $cmdname -ErrorAction SilentlyContinue) 36 | } 37 | 38 | if (-not (Check-Command -cmdname "aws")) 39 | { 40 | Write-Output "AWS Cli not found. Please check this link on how to install: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" 41 | exit 42 | } 43 | 44 | Write-Output "Creating output folders..." 45 | 46 | $ddl_output = "$OUTPUT_PATH\object_extracts\DDL" 47 | $log_output = "$OUTPUT_PATH\log" 48 | 49 | ## Create directories 50 | New-Item -ItemType Directory -Force -Path $OUTPUT_PATH | Out-Null 51 | New-Item -ItemType Directory -Force -Path $log_output | Out-Null 52 | New-Item -ItemType Directory -Force -Path $OUTPUT_PATH\object_extracts | Out-Null 53 | New-Item -ItemType Directory -Force -Path $ddl_output | Out-Null 54 | New-Item -ItemType File -Force -Path $ddl_output\.sc_extracted | Out-Null 55 | 56 | ## Created log files and tracking variables 57 | Out-File -FilePath $log_output\log.txt -InputObject "--------------" -Append 58 | Out-File -FilePath $log_output\log.txt -InputObject "Starting new extraction" -Append 59 | Out-File -FilePath $log_output\log.txt -InputObject "Variables:" -Append 60 | Out-File -FilePath $log_output\log.txt -InputObject $OUTPUT_PATH -Append 61 | Out-File -FilePath $log_output\log.txt -InputObject $SCHEMA_FILTER -Append 62 | 63 | # Defined main variables 64 | Write-Output "Getting queries from files..." 65 | $queries = @{} # Hash to control queries execution 66 | $files = (Get-ChildItem -Path ../scripts/* -Include *.sql).Name # Get list of queries 67 | 68 | Write-Output "Sending queries to execute..." 69 | foreach ( $file in $files) 70 | { 71 | $query = Get-Content ..\scripts/$file -Raw 72 | $query = $query.replace('{schema_filter}', $SCHEMA_FILTER) 73 | # Execute queries on Resdshift 74 | $response = aws redshift-data execute-statement --cluster-identifier $RS_CLUSTER --database $RS_DATABASE --secret-arn $RS_SECRET_ARN --sql "$query" | ConvertFrom-Json 75 | $queries[$file] = $response.Id 76 | } 77 | 78 | Write-Output "Waiting 20 seconds for queries to finish..." 79 | Start-Sleep -Seconds 20 80 | 81 | Write-Output "Starting query validation and extraction iterations..." 82 | $i = 0 83 | while($i -ne $MAX_ITERATIONS) 84 | { 85 | $i++ 86 | if($queries.keys.count -ne 0) 87 | { 88 | # List to remove queries from Hash for next iteration when finished 89 | $to_remove = [System.Collections.Generic.List[string]]::new() 90 | foreach( $query in $queries.keys ) 91 | { 92 | $id = $queries[$query] 93 | Write-Output "Validating completion for query $query..." 94 | # Get statement state 95 | $response = aws redshift-data describe-statement --id $id | ConvertFrom-Json 96 | if ($response.Status -eq "FINISHED") 97 | { 98 | Write-Output "Query finished, starting extraction..." 99 | # Get statement results when finished 100 | $results_response = aws redshift-data get-statement-result --id $id | ConvertFrom-Json 101 | $data = $results_response.Records 102 | Out-File -FilePath $ddl_output\$query -InputObject "" -Encoding utf8 103 | $strings_data = [System.Collections.Generic.List[string]]::new() 104 | $data | ForEach-Object { $strings_data.Add($PSItem.stringValue) } 105 | Out-File -FilePath $ddl_output\$query -InputObject $strings_data -Append -Encoding utf8 106 | $to_remove.Add($query) 107 | } elseif ($response.Status -eq "FAILED") { 108 | Write-Output "Query failed... Error message:" 109 | Write-Output $response.Error 110 | # Save error to log 111 | Out-File -FilePath $log_output\log.txt -InputObject "Failed query:" -Append 112 | Out-File -FilePath $log_output\log.txt -InputObject $query -Append 113 | Out-File -FilePath $log_output\log.txt -InputObject $id -Append 114 | Out-File -FilePath $log_output\log.txt -InputObject $response.Error -Append 115 | $to_remove.Add($query) 116 | } else { 117 | Write-Output "Query still pending. Validating again in some seconds." 118 | } 119 | } 120 | foreach($query in $to_remove) 121 | { 122 | $queries.Remove($query) 123 | } 124 | } else { 125 | break 126 | } 127 | # Wait before continuing with next iteration 128 | Start-Sleep -Seconds 5 129 | } 130 | 131 | if($queries.keys.count -gt 0) 132 | { 133 | Write-Output "Not all queries have finished. Consider increasing iterations value to increase timeout." 134 | } else 135 | { 136 | Write-Output "Finished extracting RedShift DDL. Please check for output in the specified folder." 137 | } 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | -------------------------------------------------------------------------------- /Redshift/bin/create_ddls.sh: -------------------------------------------------------------------------------- 1 | # GENERAL INSTRUCTIONS: This script is used to extract object DDL from your RedShift Cluster. Please adjust the variables with enclosed by <> 2 | # below to match your environment. Once completed, your extracted DDL code will be stored in the object_extracts folder. 3 | 4 | # ---- Variables to change ---- 5 | 6 | # General Variables 7 | OUTPUT_PATH="/example/path" 8 | 9 | # AWS RedShift Variables 10 | RS_CLUSTER="" 11 | RS_DATABASE="" 12 | RS_SECRET_ARN="" 13 | 14 | #Script Variables 15 | SCHEMA_FILTER="lower(schemaname) LIKE '%'" 16 | MAX_ITERATIONS=60 #Every iteration waits 5 seconds. Must be > 0. 17 | # ---- END: Variables to change ---- 18 | 19 | OUTPUT_PATH="${OUTPUT_PATH/%\//}" 20 | 21 | # Validate if max iterations value is valid 22 | if [ $MAX_ITERATIONS -lt 0 ] 23 | then 24 | MAX_ITERATIONS=60 25 | echo "Detected iterations less than 0. Setting to 60." 26 | fi 27 | 28 | # Check if AWS Cli exists 29 | hash aws &> /dev/null 30 | if [ $? -eq 1 ]; then 31 | echo >&2 "AWS Cli not found. Please check this link on how to install: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" 32 | exit 1 33 | fi 34 | 35 | echo "Creating output folders..." 36 | 37 | ddl_output=$OUTPUT_PATH/object_extracts/DDL 38 | log_output=$OUTPUT_PATH/log 39 | temp_output=$OUTPUT_PATH/temp 40 | 41 | mkdir -p "$ddl_output" 42 | mkdir -p "$log_output" 43 | mkdir -p "$temp_output" 44 | mkdir -p "$OUTPUT_PATH/object_extracts" 45 | mkdir -p "$OUTPUT_PATH/object_extracts/DDL" 46 | touch -- "${OUTPUT_PATH}/object_extracts/DDL/.sc_extracted" 47 | 48 | # Create log files and tracking variables 49 | echo "--------------" >> "$log_output/log.txt" 50 | echo "Starting new extraction" >> "$log_output/log.txt" 51 | echo "Variables:" >> "$log_output/log.txt" 52 | echo "$OUTPUT_PATH" >> "$log_output/log.txt" 53 | echo "$SCHEMA_FILTER" >> "$log_output/log.txt" 54 | 55 | # Define main variables 56 | cd ../scripts/ 57 | echo "Getting queries from files..." 58 | files=$(ls *.sql) 59 | declare -a queries 60 | i=0 61 | 62 | echo "Sending queries to execute..." 63 | for f in $files 64 | do 65 | # Read queries from scripts folder 66 | query=$(<$f) 67 | # Replace {schema_filter} in the query template 68 | final_query="${query/\{schema_filter\}/$SCHEMA_FILTER}" 69 | # Execute query 70 | response=$(aws redshift-data execute-statement --cluster-identifier $RS_CLUSTER --database $RS_DATABASE --secret-arn $RS_SECRET_ARN --sql "$final_query" --output yaml 2>&1) 71 | if [ $? -ne 0 ] 72 | then 73 | # Log and print if there is an error 74 | echo $response | tee -a "$log_output/log.txt" 75 | else 76 | # Extract Id from response 77 | re="Id: ([[:xdigit:]]{8}(-[[:xdigit:]]{4}){3}-[[:xdigit:]]{12})" 78 | [[ $response =~ $re ]] && queries[$i]="$f=${BASH_REMATCH[1]}" 79 | i=$((i+1)) 80 | fi 81 | done 82 | 83 | if [ ${#queries[@]} -eq 0 ] 84 | then 85 | echo "Unable to send queries to execute. Please make sure that the connection to AWS is properly configured and that the connection parameters are correct." 86 | exit 1 87 | fi 88 | 89 | echo "Waiting 20 seconds for queries to finish..." 90 | sleep 20 91 | 92 | echo "Starting query validation and extraction iterations..." 93 | i=0 94 | while [ $i -ne $MAX_ITERATIONS ] 95 | do 96 | i=$((i+1)) 97 | if [ ${#queries[@]} -ne 0 ] 98 | then 99 | # List to remove queries from queries list for next iteration when finished 100 | to_remove=() 101 | for query in "${queries[@]}" 102 | do 103 | # Split value from array 104 | IFS='=' 105 | read -ra parts <<< "$query" 106 | echo "Validating completion for query ${parts[0]}..." 107 | statement_response=$(aws redshift-data describe-statement --id ${parts[1]} --output yaml) 108 | # Get statement status 109 | re="Status: ([a-zA-Z]*)" 110 | [[ $statement_response =~ $re ]] && status="${BASH_REMATCH[1]}" 111 | if [ "$status" = "FINISHED" ] 112 | then 113 | echo "Query finished, starting extraction..." 114 | # Extract query result into file 115 | aws redshift-data get-statement-result --id ${parts[1]} --output text > "$temp_output/${parts[0]}" 116 | # Clean output (remove first 2 lines and prefix for RECORDS keyword 117 | sed -e 1,2d "$temp_output/${parts[0]}" > "$ddl_output/${parts[0]}" 118 | perl -i -pe 's/^RECORDS\s//g' "$ddl_output/${parts[0]}" 119 | # Add query to the remove list 120 | to_remove+=("$query") 121 | elif [ "$status" = "FAILED" ] 122 | then 123 | echo "Query failed... Error message:" 124 | # Extract error messge from response 125 | error_re="Error: '(.*)'\\s+\\w+:" 126 | [[ $statement_response =~ $error_re ]] && error_msg="${BASH_REMATCH[1]}" 127 | # Save error to log 128 | echo "Failed query:" >> "$log_output/log.txt" 129 | echo "${parts[0]}" >> "$log_output/log.txt" 130 | echo "${parts[1]}" >> "$log_output/log.txt" 131 | echo "$error_msg" | tee -a "$log_output/log.txt" 132 | # Add query to the remove list 133 | to_remove+=("$query") 134 | else 135 | echo "Query still pending. Validating again in some seconds." 136 | fi 137 | done 138 | 139 | # Iteration to remove queries from queue when finished 140 | for ele in "${to_remove[@]}"; do 141 | for i in "${!queries[@]}"; do 142 | if [[ "${queries[i]}" = "$ele" ]]; then 143 | unset queries[i] 144 | fi 145 | done 146 | done 147 | 148 | # Wait 5 seconds to give some more time to queries to finish 149 | sleep 5 150 | 151 | else 152 | break 153 | fi 154 | done 155 | 156 | # Validate if there are queries pending 157 | if [ ${#queries[@]} -gt 0 ] 158 | then 159 | echo "Finished process, but not all queries finished due to timeout." >> "$log_output/log.txt" 160 | echo "Not all queries have finished. Consider increasing iterations value to increase timeout." 161 | else 162 | echo "Finished extracting Redshift DDL. Please check for output in the specified folder." 163 | fi -------------------------------------------------------------------------------- /Redshift/scripts/DDL_Function.sql: -------------------------------------------------------------------------------- 1 | WITH arguments 2 | AS ( 3 | SELECT oid 4 | , i 5 | , arg_name [i] AS argument_name 6 | , arg_types [i-1] argument_type 7 | FROM ( 8 | SELECT generate_series(1, arg_count) AS i 9 | , arg_name 10 | , arg_types 11 | , oid 12 | FROM ( 13 | SELECT oid 14 | , proargnames arg_name 15 | , proargtypes arg_types 16 | , pronargs arg_count 17 | FROM pg_proc 18 | WHERE proowner != 1 19 | ) t 20 | ) t 21 | ) 22 | SELECT 23 | ddl 24 | FROM 25 | ( 26 | SELECT 27 | schemaname 28 | , udfname 29 | , seq 30 | , trim(ddl) ddl 31 | FROM 32 | ( 33 | SELECT 34 | n.nspname AS schemaname 35 | , p.proname AS udfname 36 | , p.oid AS udfoid 37 | , 1 AS seq 38 | , ('\n/* ' || n.nspname || '.' || p.proname || ' */\n')::VARCHAR(max) AS ddl 39 | FROM pg_proc p 40 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 41 | JOIN pg_language l ON p.prolang = l.oid 42 | WHERE p.proowner != 1 43 | AND l.lanname <> 'plpgsql' 44 | 45 | UNION ALL 46 | 47 | SELECT n.nspname AS schemaname 48 | , p.proname AS udfname 49 | , p.oid AS udfoid 50 | , 1000 AS seq 51 | , ('CREATE OR REPLACE FUNCTION ' || QUOTE_IDENT(n.nspname) || '.' || QUOTE_IDENT(p.proname) || ' \(')::VARCHAR(max) AS ddl 52 | FROM pg_proc p 53 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 54 | JOIN pg_language l ON p.prolang = l.oid 55 | WHERE p.proowner != 1 56 | AND l.lanname <> 'plpgsql' 57 | 58 | UNION ALL 59 | 60 | SELECT n.nspname AS schemaname 61 | , p.proname AS udfname 62 | , p.oid AS udfoid 63 | , 2000 + nvl(i, 0) AS seq 64 | , CASE 65 | WHEN i = 1 66 | THEN NVL(argument_name, '') || ' ' || format_type(argument_type, NULL) 67 | ELSE ',' || NVL(argument_name, '') || ' ' || format_type(argument_type, NULL) 68 | END AS ddl 69 | FROM pg_proc p 70 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 71 | LEFT JOIN arguments a ON a.oid = p.oid 72 | JOIN pg_language l ON p.prolang = l.oid 73 | WHERE p.proowner != 1 74 | AND l.lanname <> 'plpgsql' 75 | 76 | UNION ALL 77 | 78 | SELECT n.nspname AS schemaname 79 | , p.proname AS udfname 80 | , p.oid AS udfoid 81 | , 3000 AS seq 82 | , '\)' AS ddl 83 | FROM pg_proc p 84 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 85 | JOIN pg_language l ON p.prolang = l.oid 86 | WHERE p.proowner != 1 87 | AND l.lanname <> 'plpgsql' 88 | 89 | UNION ALL 90 | 91 | SELECT n.nspname AS schemaname 92 | , p.proname AS udfname 93 | , p.oid AS udfoid 94 | , 4000 AS seq 95 | , ' RETURNS ' || pg_catalog.format_type(p.prorettype, NULL) AS ddl 96 | FROM pg_proc p 97 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 98 | JOIN pg_language l ON p.prolang = l.oid 99 | WHERE p.proowner != 1 100 | AND l.lanname <> 'plpgsql' 101 | 102 | UNION ALL 103 | 104 | SELECT n.nspname AS schemaname 105 | , p.proname AS udfname 106 | , p.oid AS udfoid 107 | , 5000 AS seq 108 | , CASE 109 | WHEN p.provolatile = 'v' 110 | THEN 'VOLATILE' 111 | WHEN p.provolatile = 's' 112 | THEN 'STABLE' 113 | WHEN p.provolatile = 'i' 114 | THEN 'IMMUTABLE' 115 | ELSE '' 116 | END AS ddl 117 | FROM pg_proc p 118 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 119 | JOIN pg_language l ON p.prolang = l.oid 120 | WHERE p.proowner != 1 121 | AND l.lanname <> 'plpgsql' 122 | 123 | UNION ALL 124 | 125 | SELECT n.nspname AS schemaname 126 | , p.proname AS udfname 127 | , p.oid AS udfoid 128 | , 6000 AS seq 129 | , 'AS $$' AS ddl 130 | FROM pg_proc p 131 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 132 | JOIN pg_language l ON p.prolang = l.oid 133 | WHERE p.proowner != 1 134 | AND l.lanname <> 'plpgsql' 135 | 136 | UNION ALL 137 | 138 | SELECT n.nspname AS schemaname 139 | , p.proname AS udfname 140 | , p.oid AS udfoid 141 | , 7000 AS seq 142 | , p.prosrc AS DDL 143 | FROM pg_proc p 144 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 145 | JOIN pg_language l ON p.prolang = l.oid 146 | WHERE p.proowner != 1 147 | AND l.lanname <> 'plpgsql' 148 | 149 | UNION ALL 150 | 151 | SELECT n.nspname AS schemaname 152 | , p.proname AS udfname 153 | , p.oid AS udfoid 154 | , 8000 AS seq 155 | , '$$ LANGUAGE ' + lang.lanname + ';' AS ddl 156 | FROM pg_proc p 157 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 158 | LEFT JOIN ( 159 | SELECT oid 160 | , lanname 161 | FROM pg_language 162 | ) lang ON p.prolang = lang.oid 163 | WHERE p.proowner != 1 164 | AND lang.lanname <> 'plpgsql' 165 | ) 166 | ORDER BY 167 | udfoid 168 | , seq 169 | ) 170 | WHERE 171 | {schema_filter} 172 | -- For manual runs, remove the above line and replace with something like this: 173 | -- Example: 174 | -- lower(schemaname) LIKE '%' 175 | ; 176 | -------------------------------------------------------------------------------- /Redshift/scripts/DDL_Table.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | ddl 3 | FROM ( 4 | SELECT table_id 5 | , REGEXP_REPLACE(schemaname, '^zzzzzzzz', '') AS schemaname 6 | , REGEXP_REPLACE(tablename, '^zzzzzzzz', '') AS tablename 7 | , seq 8 | , ddl 9 | FROM 10 | ( 11 | SELECT table_id 12 | , schemaname 13 | , tablename 14 | , seq 15 | , ddl 16 | FROM ( 17 | -- SNOWCONVERT OBJECT HEADER 18 | SELECT c.oid::BIGINT AS table_id 19 | , n.nspname AS schemaname 20 | , c.relname AS tablename 21 | , 0 AS seq 22 | , '\n/* ' + n.nspname + '.' + c.relname + ' */\n' AS ddl 23 | FROM pg_namespace AS n 24 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 25 | WHERE c.relkind = 'r' 26 | 27 | --CREATE TABLE 28 | 29 | UNION 30 | 31 | SELECT c.oid::BIGINT AS table_id 32 | , n.nspname AS schemaname 33 | , c.relname AS tablename 34 | , 2 AS seq 35 | , 'CREATE TABLE IF NOT EXISTS ' + QUOTE_IDENT(n.nspname) + '.' + QUOTE_IDENT(c.relname) + '' AS ddl 36 | FROM pg_namespace AS n 37 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 38 | WHERE c.relkind = 'r' 39 | --OPEN PAREN COLUMN LIST 40 | 41 | UNION 42 | 43 | SELECT c.oid::BIGINT AS table_id 44 | , n.nspname AS schemaname 45 | , c.relname AS tablename 46 | , 5 AS seq 47 | , '(' AS ddl 48 | FROM pg_namespace AS n 49 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 50 | WHERE c.relkind = 'r' 51 | --COLUMN LIST 52 | 53 | UNION 54 | 55 | SELECT table_id 56 | , schemaname 57 | , tablename 58 | , seq 59 | , '\t' + col_delim + col_name + ' ' + col_datatype + ' ' + col_nullable + ' ' + col_default AS ddl 60 | FROM ( 61 | SELECT c.oid::BIGINT AS table_id 62 | , n.nspname AS schemaname 63 | , c.relname AS tablename 64 | , 100000000 + a.attnum AS seq 65 | , CASE 66 | WHEN a.attnum > 1 67 | THEN ',' 68 | ELSE '' 69 | END AS col_delim 70 | , QUOTE_IDENT(a.attname) AS col_name 71 | , CASE 72 | WHEN STRPOS(UPPER(format_type(a.atttypid, a.atttypmod)), 'CHARACTER VARYING') > 0 73 | THEN REPLACE(UPPER(format_type(a.atttypid, a.atttypmod)), 'CHARACTER VARYING', 'VARCHAR') 74 | WHEN STRPOS(UPPER(format_type(a.atttypid, a.atttypmod)), 'CHARACTER') > 0 75 | THEN REPLACE(UPPER(format_type(a.atttypid, a.atttypmod)), 'CHARACTER', 'CHAR') 76 | ELSE UPPER(format_type(a.atttypid, a.atttypmod)) 77 | END AS col_datatype 78 | , CASE 79 | WHEN a.atthasdef IS TRUE 80 | THEN 'DEFAULT ' + adef.adsrc 81 | ELSE '' 82 | END AS col_default 83 | , CASE 84 | WHEN a.attnotnull IS TRUE 85 | THEN 'NOT NULL' 86 | ELSE '' 87 | END AS col_nullable 88 | FROM pg_namespace AS n 89 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 90 | INNER JOIN pg_attribute AS a ON c.oid = a.attrelid 91 | LEFT OUTER JOIN pg_attrdef AS adef ON a.attrelid = adef.adrelid 92 | AND a.attnum = adef.adnum 93 | WHERE c.relkind = 'r' 94 | AND a.attnum > 0 95 | ORDER BY a.attnum 96 | ) 97 | --CONSTRAINT LIST 98 | 99 | UNION 100 | 101 | ( 102 | SELECT c.oid::BIGINT AS table_id 103 | , n.nspname AS schemaname 104 | , c.relname AS tablename 105 | , 200000000 + CAST(con.oid AS INT) AS seq 106 | , '\t,' + pg_get_constraintdef(con.oid) AS ddl 107 | FROM pg_constraint AS con 108 | INNER JOIN pg_class AS c ON c.relnamespace = con.connamespace 109 | AND c.oid = con.conrelid 110 | INNER JOIN pg_namespace AS n ON n.oid = c.relnamespace 111 | WHERE c.relkind = 'r' 112 | AND pg_get_constraintdef(con.oid) NOT LIKE 'FOREIGN KEY%' 113 | ORDER BY seq 114 | ) 115 | --CLOSE PAREN COLUMN LIST 116 | 117 | UNION 118 | 119 | SELECT c.oid::BIGINT AS table_id 120 | , n.nspname AS schemaname 121 | , c.relname AS tablename 122 | , 299999999 AS seq 123 | , ')' AS ddl 124 | FROM pg_namespace AS n 125 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 126 | WHERE c.relkind = 'r' 127 | --SORTKEY COLUMNS 128 | 129 | UNION 130 | 131 | SELECT table_id 132 | , schemaname 133 | , tablename 134 | , seq 135 | , CASE 136 | WHEN min_sort < 0 137 | THEN 'INTERLEAVED SORTKEY (' 138 | ELSE ' SORTKEY (' 139 | END AS ddl 140 | FROM ( 141 | SELECT c.oid::BIGINT AS table_id 142 | , n.nspname AS schemaname 143 | , c.relname AS tablename 144 | , 499999999 AS seq 145 | , min(attsortkeyord) min_sort 146 | FROM pg_namespace AS n 147 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 148 | INNER JOIN pg_attribute AS a ON c.oid = a.attrelid 149 | WHERE c.relkind = 'r' 150 | AND abs(a.attsortkeyord) > 0 151 | AND a.attnum > 0 152 | GROUP BY 1 153 | , 2 154 | , 3 155 | , 4 156 | ) 157 | 158 | UNION 159 | 160 | ( 161 | SELECT c.oid::BIGINT AS table_id 162 | , n.nspname AS schemaname 163 | , c.relname AS tablename 164 | , 500000000 + abs(a.attsortkeyord) AS seq 165 | , CASE 166 | WHEN abs(a.attsortkeyord) = 1 167 | THEN '\t' + QUOTE_IDENT(a.attname) 168 | ELSE '\t, ' + QUOTE_IDENT(a.attname) 169 | END AS ddl 170 | FROM pg_namespace AS n 171 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 172 | INNER JOIN pg_attribute AS a ON c.oid = a.attrelid 173 | WHERE c.relkind = 'r' 174 | AND abs(a.attsortkeyord) > 0 175 | AND a.attnum > 0 176 | ORDER BY abs(a.attsortkeyord) 177 | ) 178 | 179 | UNION 180 | 181 | SELECT c.oid::BIGINT AS table_id 182 | , n.nspname AS schemaname 183 | , c.relname AS tablename 184 | , 599999999 AS seq 185 | , '\t)' AS ddl 186 | FROM pg_namespace AS n 187 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 188 | INNER JOIN pg_attribute AS a ON c.oid = a.attrelid 189 | WHERE c.relkind = 'r' 190 | AND abs(a.attsortkeyord) > 0 191 | AND a.attnum > 0 192 | --END SEMICOLON 193 | 194 | UNION 195 | 196 | SELECT c.oid::BIGINT AS table_id 197 | , n.nspname AS schemaname 198 | , c.relname AS tablename 199 | , 600000000 AS seq 200 | , ';' AS ddl 201 | FROM pg_namespace AS n 202 | INNER JOIN pg_class AS c ON n.oid = c.relnamespace 203 | WHERE c.relkind = 'r' 204 | ) 205 | --COMMENT 206 | UNION 207 | 208 | SELECT c.oid::BIGINT AS table_id 209 | , n.nspname AS schemaname 210 | , c.relname AS tablename 211 | , 600250000 AS seq 212 | , ('COMMENT ON '::text + nvl2(cl.column_name, 'column '::text, 'table '::text) 213 | + quote_ident(n.nspname::text) 214 | + '.'::text 215 | + quote_ident(c.relname::text) 216 | + nvl2(cl.column_name, '.'::text 217 | + cl.column_name::text, ''::text) 218 | + ' IS '::text 219 | + quote_literal(des.description) 220 | + ';'::text)::character VARYING 221 | AS ddl 222 | FROM pg_description des 223 | JOIN pg_class c ON c.oid = des.objoid 224 | JOIN pg_namespace n ON n.oid = c.relnamespace 225 | LEFT JOIN information_schema.columns cl ON cl.ordinal_position::INTEGER = des.objsubid 226 | AND cl.table_name::NAME = c.relname 227 | WHERE c.relkind = 'r' 228 | 229 | 230 | UNION 231 | 232 | ( 233 | SELECT c.oid::BIGINT AS table_id 234 | , 'zzzzzzzz' || n.nspname AS schemaname 235 | , 'zzzzzzzz' || c.relname AS tablename 236 | , 700000000 + CAST(con.oid AS INT) AS seq 237 | , 'ALTER TABLE ' + QUOTE_IDENT(n.nspname) + '.' + QUOTE_IDENT(c.relname) + ' ADD ' + pg_get_constraintdef(con.oid)::VARCHAR(1024) + ';' AS ddl 238 | FROM pg_constraint AS con 239 | INNER JOIN pg_class AS c ON c.relnamespace = con.connamespace 240 | AND c.oid = con.conrelid 241 | INNER JOIN pg_namespace AS n ON n.oid = c.relnamespace 242 | WHERE c.relkind = 'r' 243 | AND con.contype = 'f' 244 | ORDER BY seq 245 | ) 246 | 247 | ORDER BY table_id 248 | , schemaname 249 | , tablename 250 | , seq 251 | ) 252 | WHERE 253 | schemaname not in ('information_schema', 'pg_catalog', 'pg_internal') 254 | ) 255 | WHERE 256 | {schema_filter} 257 | -- For manual runs, remove the above line and replace with something like this: 258 | -- Example: 259 | -- lower(schemaname) LIKE '%' 260 | ; 261 | -------------------------------------------------------------------------------- /Redshift/scripts/DDL_View.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | ddl 3 | FROM 4 | ( 5 | SELECT 6 | pg_get_viewdef(c.oid, TRUE) as view_definition, 7 | '/* ' + n.nspname + '.' + c.relname + ' */\n\n' 8 | + CASE 9 | WHEN c.relnatts > 0 and view_definition not ILIKE 'CREATE MATERIALIZED View%' THEN 10 | 'CREATE OR REPLACE VIEW ' + QUOTE_IDENT(n.nspname) + '.' + QUOTE_IDENT(c.relname) + ' AS\n' + COALESCE(view_definition, '') 11 | ELSE 12 | COALESCE(view_definition, '') 13 | END 14 | + '\n' AS ddl 15 | , n.nspname as schemaname 16 | FROM 17 | pg_catalog.pg_class AS c 18 | INNER JOIN pg_catalog.pg_namespace AS n ON c.relnamespace = n.oid 19 | WHERE 20 | relkind = 'v' 21 | AND n.nspname not in ('information_schema', 'pg_catalog', 'pg_internal') 22 | ) 23 | WHERE 24 | {schema_filter} 25 | -- For manual runs, remove the above line and replace with something like this: 26 | -- Example: 27 | -- lower(schemaname) LIKE '%' 28 | ; 29 | -------------------------------------------------------------------------------- /SQLServer/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/SQLServer/.DS_Store -------------------------------------------------------------------------------- /SQLServer/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /SQLServer/README.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/SQLServer/README.pdf -------------------------------------------------------------------------------- /SQLServer/SQL_Server_Code_Extraction.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/SQLServer/SQL_Server_Code_Extraction.pdf -------------------------------------------------------------------------------- /SQLServer/Table _sizing_report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/SQLServer/Table _sizing_report.pdf -------------------------------------------------------------------------------- /Teradata/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Teradata/.DS_Store -------------------------------------------------------------------------------- /Teradata/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Teradata/README.md: -------------------------------------------------------------------------------- 1 | # Teradata Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your Teradata code so it can be migrated to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-teradata/introduction) 4 | 5 | ## Version 6 | 7 | 0.0.88 8 | 9 | ## Usage 10 | 11 | The following are the steps to execute the DDL Code Generation. They should be executed in bash shell on a linux environment with access to bteq/tpt utilities. 12 | 13 | 1 - Modify `create_ddls.sh` in the bin folder – Using a text editor modify the following parameters: 14 | 15 | * `connection_string` 16 | * `include_databases` 17 | * `exclude_databases` 18 | * `include_objects` 19 | 20 | It is recommended to use the user 'DBC' in the connection string but a user with sysadmin privileges should also work. Please run on a production-like environment with up to date statistics. 21 | 22 | By default the script is setup to exclude system related databases and include all others. You can modify these to get the desired scope, including the operator that is used. Statements need to exclude spaces in the parameter values and values should be all **UPPERCASE**. 23 | By default, all the comments in source code are preserved. If comments needed to be removed, contact Snowflake team. 24 | Executing the create_ddl.sh permanently changes create_ddl.btq file. A copy of "create_ddl.btq" can be used if needed. 25 | 26 | > Do not remove the parentheses around the entire statement which are needed for compound logic. 27 | > Do not use **LIKE ANY** clause for both as it can cause unexpected issues. 28 | 29 | Example values: 30 | 31 | ```sql 32 | (UPPER(T1.DATABASENAME) NOT IN ('ALL', 'TESTDB')); 33 | 34 | (UPPER(T1.DATABASENAME) NOT IN ('ALL', 'TESTDB')) AND UPPER(T1.DATABASENAME) NOT LIKE ('TD_%')) 35 | ``` 36 | 37 | 2 - After modifying, the `create_ddls.sh` file can be run from the command line to execute the extract from within the bin directory. The following files will be created in the output folder: 38 | 39 | ## DDL Files 40 | 41 | These files will contain the definitions of the objects specified by the file name. 42 | 43 | * `DDL_Databases.sql` 44 | * `DDL_Tables.sql` 45 | * `DDL_Join_Indexes.sql` 46 | * `DDL_Functions.sql` 47 | * `DDL_Views.sql` 48 | * `DDL_Macros.sql` 49 | * `DDL_Procedures.sql` 50 | 51 | 3 - Run `create_ddls.sh --version` to check the current version of the extraction scripts. 52 | 53 | ## Reporting issues and feedback 54 | 55 | If you encounter any bugs with the tool please file an issue in the 56 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 57 | 58 | ## License 59 | 60 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Teradata/License.txt). 61 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_databases.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .OS rm ../output/object_extracts/DDL/DDL_Databases.sql 9 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Databases.sql 10 | .SET WIDTH 65531 11 | SELECT 'CREATE DATABASE ' || TRIM(T1.DATABASENAME) || ' FROM DBC AS PERM = 100000000;' "--" FROM DBC.DATABASESV T1 12 | WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1; 13 | .EXPORT RESET 14 | 15 | .quit 0; 16 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_functions.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Functions.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.SpecificNAME) || ' */'' as "--"; ' || 'SHOW FUNCTION ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.FUNCTIONNAME) || ';' "---" 11 | FROM DBC.FUNCTIONSV T1 WHERE include_databases AND exclude_databases GROUP BY 1; 12 | .EXPORT RESET 13 | .OS rm ../output/object_extracts/DDL/DDL_Functions.sql 14 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Functions.sql 15 | .SET WIDTH 65531 16 | .RUN FILE = ../temp/SHOW_Functions.sql 17 | .EXPORT RESET 18 | 19 | .quit 0; 20 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_join_indexes.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Join_Indexes.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW JOIN INDEX ' || TRIM(T1.DATABASENAME) || '.' ||TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('I') AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 11 | .EXPORT RESET 12 | .OS rm ../output/object_extracts/DDL/DDL_Join_Indexes.sql 13 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Join_Indexes.sql 14 | .SET WIDTH 65531 15 | .RUN FILE = ../temp/SHOW_Join_Indexes.sql 16 | .EXPORT RESET 17 | 18 | .quit 0; 19 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_macros.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Macros.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW MACRO ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'M' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 11 | .EXPORT RESET 12 | .OS rm ../output/object_extracts/DDL/DDL_Macros.sql 13 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Macros.sql 14 | .SET WIDTH 65531 15 | .RUN FILE = ../temp/SHOW_Macros.sql 16 | .EXPORT RESET 17 | 18 | .quit 0; 19 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_procedures.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Procedures.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW PROCEDURE ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'P' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 11 | .EXPORT RESET 12 | .OS rm ../output/object_extracts/DDL/DDL_Procedures.sql 13 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Procedures.sql 14 | .SET WIDTH 65531 15 | .RUN FILE = ../temp/SHOW_Procedures.sql 16 | .EXPORT RESET 17 | 18 | .quit 0; 19 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_schemas.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .OS rm ../output/object_extracts/DDL/DDL_SF_Schemas.sql 9 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_SF_Schemas.sql 10 | .SET WIDTH 65531 11 | SELECT '/* ' || TRIM(T1.DATABASENAME) || ' */ ' || 'CREATE SCHEMA ' || TRIM(T1.DATABASENAME) || ';' "--" FROM DBC.DATABASESV T1 WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1; 12 | .EXPORT RESET 13 | 14 | .quit 0; 15 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_tables.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Tables.sql 9 | .SET WIDTH 65531 10 | .set titledashes off 11 | 12 | LOCKING ROW FOR ACCESS 13 | SELECT 'SELECT ''/* '' || ''' || TRIM(DATABASENAME) || '.' || TRIM(TABLENAME) || ' */'' as "--"; ' || 'SHOW TABLE ' || TRIM(DATABASENAME) || '.' ||TRIM(TABLENAME) || ';' "--" 14 | FROM DBC.TABLESV T1 15 | WHERE T1.TABLEKIND IN ( 'T' ,'O','Q') -- PI AND NOPI 16 | AND include_databases AND exclude_databases AND include_objects; 17 | 18 | .EXPORT RESET 19 | .OS rm ../output/object_extracts/DDL/DDL_Tables.sql 20 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Tables.sql 21 | 22 | .SET WIDTH 65531 23 | .set titledashes off 24 | .RUN FILE = ../temp/SHOW_Tables.sql 25 | .EXPORT RESET 26 | 27 | .quit 0; 28 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_triggers.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Trigger.sql 9 | .SET WIDTH 65531 10 | .set titledashes off 11 | 12 | LOCKING ROW FOR ACCESS 13 | SELECT 'SELECT ''/* '' || ''' || TRIM(DATABASENAME) || '.' || TRIM(TABLENAME) || ' */'' as "--"; ' || 'SHOW TRIGGER ' || TRIM(DATABASENAME) || '.' ||TRIM(TABLENAME) || ';' "--" 14 | FROM DBC.TABLESV T1 15 | WHERE T1.TABLEKIND = 'G' -- TRIGGERS 16 | AND include_databases AND exclude_databases AND include_objects; 17 | 18 | .EXPORT RESET 19 | .OS rm ../output/object_extracts/DDL/DDL_Trigger.sql 20 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Trigger.sql 21 | 22 | .SET WIDTH 65531 23 | .set titledashes off 24 | .RUN FILE = ../temp/SHOW_Trigger.sql 25 | .EXPORT RESET 26 | 27 | .quit 0; 28 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_views.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Views.sql 9 | .SET WIDTH 65531 10 | .set titledashes off 11 | 12 | LOCKING ROW FOR ACCESS 13 | SELECT 'SELECT ''/* '' || ''' || TRIM(DATABASENAME) || '.' || TRIM(TABLENAME) || ' */'' as "--"; ' || 'SHOW VIEW ' || TRIM(DATABASENAME) || '.' ||TRIM(TABLENAME) || ';' "--" 14 | FROM DBC.TABLESV T1 15 | WHERE T1.TABLEKIND = 'V' -- VIEWS 16 | AND include_databases AND exclude_databases AND include_objects; 17 | 18 | .EXPORT RESET 19 | .OS rm ../output/object_extracts/DDL/DDL_Views.sql 20 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Views.sql 21 | .SET WIDTH 65531 22 | .set titledashes off 23 | .RUN FILE = ../temp/SHOW_Views.sql 24 | .EXPORT RESET 25 | 26 | .quit 0; -------------------------------------------------------------------------------- /Teradata/sf_objects/SF_Sys_Calendar.sql: -------------------------------------------------------------------------------- 1 | USE DATABASE ......; 2 | 3 | CREATE SCHEMA SYS_CALENDAR; 4 | 5 | CREATE TABLE Sys_Calendar.CALDATES ( 6 | cdate DATE /**** WARNING: FORMAT 'YY/MM/DD' NOT SUPPORTED ****/, 7 | UNIQUE( cdate) 8 | ) ; 9 | 10 | CREATE TABLE SYS_CALENDAR.TDBUSINESSCALENDARV( 11 | CALENDAR_TYPE VARCHAR(20), 12 | CALENDAR_DATE DATE, 13 | DAY_OF_WEEK INTEGER, 14 | DAY_OF_MONTH INTEGER, 15 | DAY_OF_YEAR INTEGER, 16 | DAY_OF_CALENDAR INTEGER, 17 | WEEKDAY_OF_MONTH INTEGER, 18 | WEEK_OF_MONTH INTEGER, 19 | WEEK_OF_QUARTER INTEGER, 20 | WEEK_OF_YEAR INTEGER, 21 | WEEK_OF_CALENDAR INTEGER, 22 | MONTH_OF_QUARTER INTEGER, 23 | MONTH_OF_YEAR INTEGER, 24 | MONTH_OF_CALENDAR INTEGER, 25 | QUARTER_OF_YEAR INTEGER, 26 | QUARTER_OF_CALENDAR INTEGER, 27 | YEAR_OF_CALENDAR INTEGER, 28 | 29 | WEEKBEGIN DATE, 30 | WEEKEND DATE, 31 | MONTHBEGIN DATE, 32 | MONTHEND DATE, 33 | QUARTERBEGIN DATE, 34 | QUARTEREND DATE, 35 | YEARBEGIN DATE, 36 | YEAREND DATE, 37 | ISBUSINESSDAY SMALLINT, 38 | 39 | BUSINESSWEEKBEGIN DATE, 40 | BUSINESSWEEKEND DATE, 41 | BUSINESSMONTHBEGIN DATE, 42 | BUSINESSMONTHEND DATE, 43 | BUSINESSQUARTERBEGIN DATE, 44 | BUSINESSQUARTEREND DATE, 45 | BUSINESSYEARBEGIN DATE, 46 | BUSINESSYEAREND DATE); 47 | 48 | CREATE VIEW SYS_CALENDAR.BUSINESSCALENDAR 49 | COMMENT = 'Table supports migrated default Teradata business calendar' 50 | AS 51 | SELECT * FROM 52 | SYS_CALENDAR.TDBUSINESSCALENDARV 53 | WHERE CALENDAR_TYPE = 'Teradata'; 54 | 55 | CREATE OR REPLACE VIEW SYS_CALENDAR.CALBASICS ( 56 | calendar_date, 57 | day_of_calendar, 58 | day_of_month, 59 | day_of_year, 60 | month_of_year, 61 | year_of_calendar) 62 | AS 63 | SELECT 64 | cdate, 65 | case 66 | when ((TRUNC((mod((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 10000)) / 100)) > 2) then TRUNC( 67 | (146097 * (TRUNC((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate)/10000) + 1900) / 100))) / 4) 68 | + TRUNC((1461 * ((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate)/10000) + 1900) - (TRUNC((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate)/10000) + 1900) / 100))*100) ) / 4) 69 | + TRUNC((153 * ((TRUNC((mod((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 10000))/100)) - 3) + 2) / 5) 70 | + mod((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) - 693901 71 | else TRUNC( 72 | (146097 * (TRUNC(((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate)/10000) + 1900) - 1) / 100))) / 4) 73 | + TRUNC((1461 * (((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate)/10000) + 1900) - 1) - (TRUNC(((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate)/10000) + 1900) - 1) / 100))*100) ) / 4) 74 | + TRUNC((153 * ((TRUNC((mod((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 10000))/100)) + 9) + 2) / 5) 75 | + mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) - 693901 76 | end, 77 | mod((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100), 78 | (case TRUNC( (mod((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 10000))/100) 79 | when 1 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) 80 | when 2 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 31 81 | when 3 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 59 82 | when 4 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 90 83 | when 5 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 120 84 | when 6 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 151 85 | when 7 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 181 86 | when 8 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 212 87 | when 9 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 243 88 | when 10 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 273 89 | when 11 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 304 90 | when 12 then mod( (YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 100) + 334 91 | end) 92 | + 93 | (case 94 | when (((mod((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate) / 10000) + 1900), 4) = 0) AND (mod((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate) / 10000) + 1900), 100) <> 0)) OR 95 | (mod((TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate) / 10000) + 1900), 400) = 0)) AND (TRUNC((mod((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 10000))/100) > 2) then 96 | 1 97 | else 98 | 0 99 | end),TRUNC( 100 | (mod((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate), 10000))/100),TRUNC((YEAR(cdate) - 1900) * 10000 + MONTH(cdate) * 100 + DAY(cdate)/10000) 101 | FROM SYS_CALENDAR.CALDATES ; 102 | 103 | CREATE OR REPLACE VIEW SYS_CALENDAR.CALENDARTMP ( 104 | calendar_date, 105 | day_of_week, 106 | day_of_month, 107 | day_of_year, 108 | day_of_calendar, 109 | weekday_of_month, 110 | week_of_month, 111 | week_of_year, 112 | week_of_calendar, 113 | month_of_quarter, 114 | month_of_year, 115 | month_of_calendar, 116 | quarter_of_year, 117 | quarter_of_calendar, 118 | year_of_calendar) 119 | AS 120 | SELECT 121 | calendar_date,mod( 122 | (day_of_calendar + 0), 7) + 1, 123 | day_of_month, 124 | day_of_year, 125 | day_of_calendar,TRUNC( 126 | (day_of_month - 1) / 7) + 1,TRUNC( 127 | (day_of_month - mod( (day_of_calendar + 0), 7) + 6) / 7),TRUNC( 128 | (day_of_year - mod( (day_of_calendar + 0), 7) + 6) / 7),TRUNC( 129 | (day_of_calendar - mod( (day_of_calendar + 0), 7) + 6) / 7),mod( 130 | (month_of_year - 1), 3) + 1, 131 | month_of_year, 132 | month_of_year + 12 * year_of_calendar,TRUNC( 133 | (month_of_year + 2) / 3),TRUNC( 134 | (month_of_year + 2) / 3) + 4 * year_of_calendar, 135 | year_of_calendar + 1900 136 | FROM SYS_CALENDAR.CALBASICS; 137 | 138 | CREATE OR REPLACE VIEW SYS_CALENDAR.CALENDAR_TD_ISO_COMPATIBLE ( 139 | calendar_date, 140 | day_of_week, 141 | day_of_month, 142 | day_of_year, 143 | day_of_calendar, 144 | weekday_of_month, 145 | --week_of_month, 146 | week_of_year, 147 | --week_of_calendar, 148 | month_of_quarter, 149 | month_of_year, 150 | --month_of_calendar, 151 | quarter_of_year, 152 | --quarter_of_calendar, 153 | year_of_calendar) 154 | AS 155 | SELECT 156 | calendar_date, 157 | DAYOFWEEKISO(calendar_date), 158 | DAYOFMONTH(calendar_date), 159 | DAYOFYEAR(calendar_date), 160 | SNOWCONVERT.PUBLIC.DayNumber_Of_Calendar_UDF(calendar_date), 161 | TRUNC((DAYOFMONTH(calendar_date)/7))+1, 162 | --WeekNumber_Of_Month(calendar_date), 163 | extract(weekiso, calendar_date), 164 | --WeekNumber_Of_Calendar(calendar_date), 165 | MOD(MONTH(calendar_date),3), 166 | MONTH(calendar_date), 167 | --month_of_calendar, 168 | QUARTER(calendar_date), 169 | --quarter_of_calendar, 170 | YEAR(calendar_date) 171 | FROM SYS_CALENDAR.CALENDARTMP; 172 | 173 | 174 | CREATE OR REPLACE VIEW Sys_Calendar.CALENDAR( 175 | calendar_date, 176 | day_of_week, 177 | day_of_month, 178 | day_of_year, 179 | --day_of_calendar, 180 | weekday_of_month, 181 | --week_of_month, 182 | week_of_year, 183 | --week_of_calendar, 184 | month_of_quarter, 185 | month_of_year, 186 | --month_of_calendar, 187 | quarter_of_year, 188 | --quarter_of_calendar, 189 | year_of_calendar) 190 | AS 191 | SELECT 192 | calendar_date, 193 | DAYOFWEEKISO(calendar_date), 194 | DAYOFMONTH(calendar_date), 195 | DAYOFYEAR(calendar_date), 196 | --DayNumber_Of_Calendar(calendar_date), 197 | TRUNC((DAYOFMONTH(calendar_date)/7))+1, 198 | --WeekNumber_Of_Month(calendar_date), 199 | extract(weekiso, calendar_date), 200 | --WeekNumber_Of_Calendar(calendar_date), 201 | MOD(MONTH(calendar_date),3), 202 | Month(calendar_date), 203 | --month_of_calendar, 204 | Quarter(calendar_date), 205 | --quarter_of_calendar, 206 | Year(calendar_date) 207 | FROM Sys_Calendar.CALENDARTMP; -------------------------------------------------------------------------------- /Tests/DB2/.gitignore: -------------------------------------------------------------------------------- 1 | database/ -------------------------------------------------------------------------------- /Tests/DB2/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Tests/DB2/README.md: -------------------------------------------------------------------------------- 1 | ## Test files for DB2 Scripts 2 | 3 | A `startDocker.sh` scripts is provided to create a container where the Export DDL script can be tested. -------------------------------------------------------------------------------- /Tests/DB2/startDocker.sh: -------------------------------------------------------------------------------- 1 | DBPASS=$1 2 | if [ -z "$DBPASS" ] 3 | then 4 | echo "Please provide password for the DB Instanace" 5 | read DBPASS 6 | fi 7 | 8 | docker run -itd --name mydb2 --privileged=true -p 50000:50000 -e LICENSE=accept -e DB2INST1_PASSWORD=$DBPASS -e DBNAME=testdb -v $CODESPACE_VSCODE_FOLDER/Tests/DB2/database:/database -v $CODESPACE_VSCODE_FOLDER/DB2:/DDLExportScripts/export ibmcom/db2 9 | docker exec -ti mydb2 bash -c "su - db2inst1" 10 | 11 | #if you get errors saying that the database did not start, you can try this out. 12 | #a) db2trc on -f db2trace.out 13 | #b) db2start 14 | #c) db2trc off -------------------------------------------------------------------------------- /Tests/Teradata/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Tests/Teradata/README.md: -------------------------------------------------------------------------------- 1 | # Teradata Export Scripts Tests 2 | 3 | # Teradata Export Scripts Tests 4 | 5 | > [!WARNING] 6 | > This test folder should be run only on the teradata demo database [Teradata-Express](https://downloads.teradata.com/download/database/teradata-express/vmware), because it creates and removes new databases to test the extraction process. 7 | 8 | ## How to run the tests. 9 | 1 - Modify `scripts/config.sh` with your connection values, if you are using the demo Teradata-Express this values should be the same. 10 | 11 | 3 - Ensure your demo database is running in your local system. 12 | 13 | 4 - Go to `./Tests/Teradata/scripts` and run the script `ssh_automatic_login_condiguration.sh`, this is necessary to automate the login process to the demo database. 14 | 15 | 5 - Go to back to `./Tests/Teradata/` and run `python -m unittest` 16 | 17 | 18 | ## How to add a new tests with new database. 19 | 1 - Create a new folder in `./Tests/Teradata/source_code`, this folder must contain the following files. 20 | * `deploy_database.sh`, this script executes the necesary commands to deploy the example source code. 21 | * `drop_database.sh`, this script execute the necesary commands to drop the example source code.in this file replaces the variables defined in `./Teradata/bin/create_ddls.sh`. 22 | * The SQL source code, the scripts that create tables, procedures, etc. 23 | 24 | 2 - Create a python test class. As an example check the file `test_demo_database.py`. In addition, the folder name defined in the setUpClass method must be the same name as the created in the previous step, since the script `execute_extract_database_script.sh` looks for that folder in the directory `./Tests/Teradata/source_code`. 25 | 26 | 3 - The SQL files must be in UTF-8. -------------------------------------------------------------------------------- /Tests/Teradata/database_summary/__init__.py: -------------------------------------------------------------------------------- 1 |  -------------------------------------------------------------------------------- /Tests/Teradata/database_summary/database_source_code_summarizer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | from .database_source_code_summary import DatabaseSourceCodeSummary 4 | from .top_level_object_type import TopLevelObjectType 5 | 6 | def sumarize_database_source_code(path: str) -> DatabaseSourceCodeSummary: 7 | database_summary = DatabaseSourceCodeSummary() 8 | 9 | for dirpath, dirnames, filenames in os.walk(path): 10 | for filename in filenames: 11 | if not re.match(r'.*\.sql$', filename, flags=re.IGNORECASE): 12 | continue 13 | file_path = os.path.join(dirpath, filename) 14 | database_summary.add_sql_file(file_path) 15 | sql_statements = read_sql_statements_from_file(file_path) 16 | analyze_sql_statements(sql_statements, database_summary) 17 | return database_summary 18 | 19 | def analyze_sql_statements(sql_statements: "list[str]", database_summary: DatabaseSourceCodeSummary): 20 | for sql_statement in sql_statements: 21 | type = get_sql_statement_type(sql_statement) 22 | database_summary.get_top_level_object_to_int_map()[type] += 1 23 | 24 | def get_sql_statement_type(sql_statement: str) -> TopLevelObjectType: 25 | for type in TopLevelObjectType: 26 | if is_statement_of_type(sql_statement, type.name): 27 | return type 28 | 29 | return TopLevelObjectType.UNDEFINED_TYPE 30 | 31 | def is_statement_of_type(statement: str, type_name: str) -> bool: 32 | type_name = type_name.replace("_", r"\s*") 33 | regex = r'^\s*(?:CREATE|REPLACE)(?:\s*\w*\s*){0,2}' + type_name+ r'\s' 34 | result = re.search(regex, statement, flags=re.IGNORECASE|re.MULTILINE) 35 | return result 36 | 37 | def read_sql_statements_from_file(file_path: str) ->"list[str]": 38 | with open(file_path) as my_file: 39 | comment_pattern = r'\/\*[\s\S]*?\*\/' 40 | code_without_comments = re.sub(comment_pattern, '', my_file.read(), flags=re.MULTILINE) 41 | sql_statements = code_without_comments.split(';') 42 | return sql_statements -------------------------------------------------------------------------------- /Tests/Teradata/database_summary/database_source_code_summary.py: -------------------------------------------------------------------------------- 1 | from .top_level_object_type import TopLevelObjectType 2 | class DatabaseSourceCodeSummary(): 3 | def __init__(self): 4 | self._file_paths = [] 5 | self._top_level_object_to_int_map = {} 6 | for top_level_object_type in TopLevelObjectType: 7 | self._top_level_object_to_int_map[top_level_object_type] = 0 8 | 9 | def get_count_of_files(self) -> int: 10 | return len(self._file_paths) 11 | 12 | def add_sql_file(self, file_path: str) -> None: 13 | self._file_paths+=[file_path] 14 | 15 | def get_top_level_object_to_int_map(self) -> "dict[TopLevelObjectType, int]": 16 | return self._top_level_object_to_int_map -------------------------------------------------------------------------------- /Tests/Teradata/database_summary/top_level_object_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from types import DynamicClassAttribute 3 | 4 | class TopLevelObjectType(Enum): 5 | TABLE = 1 6 | PROCEDURE = 2 7 | VIEW = 3 8 | DATABASE = 4 9 | TRIGGER = 5 10 | MACRO = 6 11 | FUNCTION = 7 12 | JOIN_INDEX = 8 13 | UNDEFINED_TYPE = 20 -------------------------------------------------------------------------------- /Tests/Teradata/scripts/config.sh: -------------------------------------------------------------------------------- 1 | # 2 | vm_connection="root@127.0.0.1" 3 | vm_ssh_port="2222" 4 | logon_command="dbc,dbc" 5 | 6 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/execute_deploy_database_script.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Version 20230810: Script created 3 | 4 | #####Constants 5 | MESSAGE='\033[0;32m' # Green 6 | ERROR='\033[0;31m' # Red 7 | NC='\033[0m' # No Color 8 | 9 | #####Parameters 10 | source_code_folder_name="$1" 11 | if [ ! "$source_code_folder_name" ] || [ ! -d "../source_code/$source_code_folder_name/" ] ; then 12 | echo "${ERROR}Invalid parameter '$source_code_folder_name', options are [$(ls ../source_code)]${NC}" 13 | exit 1 14 | fi 15 | 16 | #####Import config variables 17 | source config.sh 18 | 19 | #####Commands 20 | echo "${MESSAGE}Sending the database source code to the Virual Machine...${NC}" 21 | ssh $vm_connection $vm_ssh_port "mkdir -p /root/sc_testing_folder" 22 | rsync -r ../source_code/$source_code_folder_name $vm_connection:/root/sc_testing_folder -e "ssh -p $vm_ssh_port" 23 | 24 | echo "${MESSAGE}Executing scripts in the Virtual Machine...${NC}" 25 | ssh $vm_connection -p $vm_ssh_port "cd /root/sc_testing_folder/$source_code_folder_name && bash deploy_database.sh $logon_command" 26 | ssh -q $vm_connection -p $vm_ssh_port "rm -r /root/sc_testing_folder/$source_code_folder_name/" 27 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/execute_drop_database_script.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Version 20230810: Script created 3 | 4 | #####Constants 5 | MESSAGE='\033[0;32m' # Green 6 | ERROR='\033[0;31m' # Red 7 | NC='\033[0m' # No Color 8 | 9 | #####Parameters 10 | source_code_folder_name="$1" 11 | if [ ! "$source_code_folder_name" ] || [ ! -d "../source_code/$source_code_folder_name/" ] ; then 12 | echo "${ERROR}Invalid parameter '$source_code_folder_name', options are [$(ls ../source_code)]${NC}" 13 | exit 1 14 | fi 15 | 16 | #####Import config variables 17 | source config.sh 18 | 19 | #####Commands 20 | 21 | #####Commands 22 | echo "${MESSAGE}Sending the database source code to the Virual Machine...${NC}" 23 | rsync -r ../source_code/$source_code_folder_name $vm_connection:/root/sc_testing_folder/ -e "ssh -p $vm_ssh_port" 24 | 25 | echo "${MESSAGE}Executing scripts in the Virtual Machine...${NC}" 26 | ssh $vm_connection -p $vm_ssh_port "cd /root/sc_testing_folder/$source_code_folder_name && bash drop_database.sh $logon_command" 27 | ssh -q $vm_connection -p $vm_ssh_port rm -r /root/sc_testing_folder/$folder_name 28 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/execute_extract_database_script.sh: -------------------------------------------------------------------------------- 1 | # 2 | #Version 20230810: Script created 3 | 4 | #####Constants 5 | MESSAGE='\033[0;32m' # Green 6 | ERROR='\033[0;31m' # Red 7 | NC='\033[0m' # No Color 8 | folder_name="Teradata_Extraction" 9 | 10 | #####Parameters 11 | # First: The database folder to be used. 12 | # Second to n: The extraction parameters to be used, for example: 13 | # include_databases="(UPPER(T1.DATABASENAME) = 'SC_EXAMPLE_DEMO')" exclude_databases="(UPPER(T1.DATABASENAME) NOT IN ('SYS_CALENDAR','ALL','CONSOLE','CRASHDUMPS','DBC','DBCMANAGER','DBCMNGR','DEFAULT','EXTERNAL_AP','EXTUSER','LOCKLOGSHREDDER','PDCRADM','PDCRDATA','PDCRINFO','PUBLIC','SQLJ','SYSADMIN','SYSBAR','SYSJDBC','SYSLIB','SYSSPATIAL','SYSTEMFE','SYSUDTLIB','SYSUIF','TD_SERVER_DB','TD_SYSFNLIB','TD_SYSFNLIB','TD_SYSGPL','TD_SYSXML','TDMAPS', 'TDPUSER','TDQCD','TDSTATS','TDWM','VIEWPOINT','PDCRSTG'))" 14 | 15 | source_code_folder_name="$1" 16 | extracted_source_code_folder_name="$2" 17 | if [ ! "$source_code_folder_name" ] || [ ! -d "../source_code/$source_code_folder_name/" ] ; then 18 | echo "${ERROR}Invalid parameter '$source_code_folder_name', options are [$(ls ../source_code)]${NC}" 19 | exit 1 20 | fi 21 | if [ ! "$extracted_source_code_folder_name" ] ; then 22 | echo "${ERROR}Invalid parameter '$extracted_source_code_folder_name', this value is the output folder name.${NC}" 23 | exit 1 24 | fi 25 | 26 | 27 | for ARGUMENT in "${@:3}" 28 | do 29 | KEY=$(echo $ARGUMENT | cut -f1 -d=) 30 | 31 | KEY_LENGTH=${#KEY} 32 | VALUE="${ARGUMENT:$KEY_LENGTH+1}" 33 | 34 | export "$KEY"="$VALUE" 35 | done 36 | 37 | echo "${MESSAGE}Using the following extraction parameters:${NC}" 38 | echo "include_databases = $include_databases" 39 | echo "exclude_databases = $exclude_databases" 40 | echo "include_objects = $include_objects" 41 | echo "ddl_leng_max_limit_dic = $ddl_leng_max_limit_dic" 42 | echo "ddl_leng_max_limit_dic = $ddl_leng_max_limit_dic" 43 | 44 | #####Import config variables 45 | echo "${MESSAGE}Importing connection variables...${NC}" 46 | . config.sh 47 | 48 | ##### Commands 49 | echo "${MESSAGE}Copying Teradata Script...${NC}" 50 | cp -fr ../../../Teradata $folder_name 51 | mkdir -p ../extracted_code/ 52 | rm -fr ../extracted_code/$extracted_source_code_folder_name 53 | 54 | 55 | echo "${MESSAGE}Replacing Teradata Script parameters...${NC}" 56 | sed -i '' "s/connection_string=/connection_string=${logon_command} #/g" $folder_name/bin/create_ddls.sh 57 | 58 | #### Replace the variable include_databases, if it was defined in the imported script 59 | if [ ! -z ${include_databases+x} ]; then 60 | sed -i '' "s/include_databases=/include_databases=\"${include_databases}\" #/g" $folder_name/bin/create_ddls.sh 61 | fi 62 | 63 | if [ ! -z ${exclude_databases+x} ]; then 64 | sed -i '' "s/exclude_databases=/exclude_databases=\"${exclude_databases}\" #/g" $folder_name/bin/create_ddls.sh 65 | fi 66 | 67 | if [ ! -z ${include_objects+x} ]; then 68 | sed -i '' "s/include_objects=/include_objects=\"${include_objects}\" #/g" $folder_name/bin/create_ddls.sh 69 | fi 70 | 71 | if [ ! -z ${ddl_leng_max_limit_dic+x} ]; then 72 | sed -i '' "s/ddl_leng_max_limit_dic=/ddl_leng_max_limit_dic=${ddl_leng_max_limit_dic} #/g" $folder_name/bin/create_ddls.sh 73 | fi 74 | 75 | echo "${MESSAGE}Removing previous execution output...${NC}" 76 | rm -fr $folder_name/output 77 | rm -fr $folder_name/log 78 | 79 | 80 | echo "${MESSAGE}Sending Teradata scripts to the Virual Machine...${NC}" 81 | scp -P $vm_ssh_port -r $folder_name $vm_connection:/root/sc_testing_folder/$folder_name 82 | rm -fr $folder_name 83 | 84 | 85 | echo "${MESSAGE}Executing scripts in the Virtual Machine...${NC}" 86 | ssh $vm_connection -p $vm_ssh_port "cd /root/sc_testing_folder/$folder_name/bin && bash create_ddls.sh" 87 | 88 | 89 | echo "${MESSAGE}Retrieving the output folder and removing the sent files...${NC}" 90 | scp -r -OT -P $vm_ssh_port $vm_connection:"/root/sc_testing_folder/$folder_name/output /root/sc_testing_folder/$folder_name/log" ../extracted_code/$extracted_source_code_folder_name 91 | ssh -q $vm_connection -p $vm_ssh_port rm -r /root/sc_testing_folder/$folder_name 92 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/execute_scripts.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Constants 3 | STEP_MESSAGE='\033[0;34m' # Green 4 | ERROR='\033[0;31m' # Red 5 | NC='\033[0m' # No Color 6 | 7 | #####Parameters 8 | ##### 1 - Source code folder name 9 | ##### 2 - Extracted code folder name 10 | ##### 3 to n - Extraction parameters in the following format key="value" 11 | 12 | source_code_folder_name="$1" 13 | extracted_source_code_folder_name="$2" 14 | if [ ! "$source_code_folder_name" ] || [ ! -d "../source_code/$source_code_folder_name/" ] ; then 15 | echo "${ERROR}Invalid parameter '$source_code_folder_name', options are [$(ls ../source_code)]${NC}" 16 | exit 1 17 | fi 18 | if [ ! "$extracted_source_code_folder_name" ] ; then 19 | echo "${ERROR}Invalid parameter '$extracted_source_code_folder_name', this value is the output folder name.${NC}" 20 | exit 1 21 | fi 22 | 23 | for ARGUMENT in "${@:3}" 24 | do 25 | extraction_parameters="$extraction_parameters \"$ARGUMENT\"" 26 | done 27 | 28 | echo "${STEP_MESSAGE}Step 1/3 Deplying database...${NC}" 29 | source execute_deploy_database_script.sh $source_code_folder_name 30 | echo "${STEP_MESSAGE}Step 2/3 Extracting database...${NC}" 31 | eval "source execute_extract_database_script.sh $source_code_folder_name $extracted_source_code_folder_name $extraction_parameters" 32 | echo "${STEP_MESSAGE}Step 3/3 Removing database...${NC}" 33 | source execute_drop_database_script.sh $source_code_folder_name 34 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/ssh_automatic_login_configuration.sh: -------------------------------------------------------------------------------- 1 | # 2 | source config.sh 3 | ssh-keygen -t rsa -b 2048 4 | ssh-copy-id -p $vm_ssh_port $vm_connection -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_CreateMacro.sql: -------------------------------------------------------------------------------- 1 | CREATE MACRO Get_Emp AS ( 2 | SELECT EmployeeID,FirstName,LastName 3 | FROM Employee; 4 | ); 5 | 6 | 7 | CREATE MACRO Get_Emp_version2 AS ( 8 | SELECT EmployeeID,FirstName,LastName 9 | FROM Employee; 10 | ); -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_Databases.sql: -------------------------------------------------------------------------------- 1 | -- 2 | CREATE DATABASE SC_EXAMPLE_DEMO FROM DBC AS PERM = 100000000; 3 | CREATE DATABASE SC_EXAMPLE_DEMO_2 FROM DBC AS PERM = 100000000; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_JoinIndex.sql: -------------------------------------------------------------------------------- 1 | CREATE JOIN INDEX Employee_JI 2 | AS 3 | SELECT EmployeeID,FirstName,LastName 4 | FROM Employee 5 | PRIMARY INDEX(FirstName); 6 | 7 | 8 | 9 | CREATE JOIN INDEX Employee_JI2 10 | AS 11 | SELECT EmployeeID,FirstName,LastName 12 | FROM Employee 13 | PRIMARY INDEX(LastName); -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_Tables.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE Employee ( 2 | EmployeeID INT NOT NULL, 3 | FirstName VARCHAR(50), 4 | LastName VARCHAR(50), 5 | Department VARCHAR(50), 6 | Email VARCHAR(100), 7 | Salary number, 8 | PRIMARY KEY (EmployeeID) 9 | ); 10 | 11 | CREATE TABLE salary_log( 12 | type_user VARCHAR(50), 13 | id INT, 14 | old_salary number, 15 | new_salary number 16 | ); 17 | 18 | CREATE TABLE expandOnTable 19 | ( 20 | id INTEGER, 21 | pd PERIOD ( TIMESTAMP) 22 | ); 23 | 24 | CREATE TABLE SC_EXAMPLE_DEMO_2.project 25 | ( 26 | emp_id INTEGER, 27 | project_name VARCHAR(20), 28 | dept_id INTEGER, 29 | duration PERIOD( DATE) 30 | ); 31 | 32 | CREATE TABLE MessageStorage 33 | ( 34 | MessageID TIMESTAMP(0), 35 | Message1 VARCHAR(100), 36 | Message2 VARCHAR(100) 37 | ); 38 | 39 | CREATE TABLE account_balance 40 | ( 41 | account_id INTEGER NOT NULL, 42 | month_id INTEGER, 43 | balance INTEGER 44 | ) UNIQUE PRIMARY INDEX (account_id, month_id); 45 | 46 | 47 | CREATE TABLE ResTable 48 | ( 49 | Column1 VARCHAR(255) 50 | ); 51 | 52 | CREATE TABLE EMPLOYEE_JOB_PERIODS ( 53 | FIRST_NAME VARCHAR(100), 54 | LAST_NAME VARCHAR(100), 55 | JOB_DURATION PERIOD(DATE) 56 | ); 57 | 58 | CREATE TABLE vEmployee 59 | ( 60 | PersonID INT, 61 | LastName VARCHAR(255), 62 | FirstName VARCHAR(255) 63 | ); -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_Trigger.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TRIGGER RaiseTrig 3 | AFTER UPDATE OF salary ON employee 4 | REFERENCING OLD AS OldRow NEW AS NewRow 5 | FOR EACH ROW 6 | WHEN ((NewRow.salary - OldRow.salary) / OldRow.salary >.10) 7 | INSERT INTO salary_log 8 | VALUES ('USER', NewRow.EmployeeID, OldRow.salary, NewRow.salary); -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_Views.sql: -------------------------------------------------------------------------------- 1 | CREATE VIEW EMPLOYEE_JOB_DURATION_COMPARISONS 2 | AS 3 | LOCKING ROW FOR ACCESS 4 | SELECT 'OVERLAP' FUNC, FIRST_NAME, LAST_NAME 5 | FROM EMPLOYEE_JOB_PERIODS 6 | WHERE JOB_DURATION OVERLAPS PERIOD(DATE '2009-01-01', DATE '2010-09-24') 7 | UNION ALL 8 | SELECT 'LDIFF' FUNC, FIRST_NAME, LAST_NAME 9 | FROM EMPLOYEE_JOB_PERIODS 10 | WHERE INTERVAL(JOB_DURATION LDIFF PERIOD(DATE '2009-01-01', DATE '2010-09-24')) MONTH > 3 11 | UNION ALL 12 | SELECT 'RDIFF' FUNC, FIRST_NAME, LAST_NAME 13 | FROM EMPLOYEE_JOB_PERIODS 14 | WHERE JOB_DURATION RDIFF PERIOD(DATE '2009-01-01', DATE '2010-09-24') IS NOT NULL; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/INSERT_VEMPLOYEE.sql: -------------------------------------------------------------------------------- 1 | CREATE PROCEDURE SC_EXAMPLE_DEMO.InsertVEmployee() 2 | BEGIN 3 | INSERT INTO SC_EXAMPLE_DEMO.vEmployee VALUES(0,'AL','Montgomery'); 4 | INSERT INTO SC_EXAMPLE_DEMO.vEmployee VALUES(0,'AL','Montgomery'); 5 | END; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/UPDATE_VEMPLOYEE.sql: -------------------------------------------------------------------------------- 1 | CREATE PROCEDURE SC_EXAMPLE_DEMO.UpdateVEmployee() 2 | BEGIN 3 | INSERT INTO SC_EXAMPLE_DEMO.vEmployee VALUES(0,'AL','Montgomery'); 4 | INSERT INTO SC_EXAMPLE_DEMO.vEmployee VALUES(0,'AL','Montgomery'); 5 | END; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/my_yyyymmdd_to_date2.c: -------------------------------------------------------------------------------- 1 | /* 2 | my_yyyymmdd_to_date2.c 3 | Teradata User Defined Function (UDF) 4 | Calling 5 | ------- 6 | my_yyyymmdd_to_date2(date_str); 7 | SELECT my_yyyymmdd_to_date2('20130423') AS ValidDate; 8 | Parameters 9 | ---------- 10 | date_str 11 | Character string containing date to be validated 12 | UDF Compilation 13 | --------------- 14 | REPLACE FUNCTION my_yyyymmdd_to_date2 15 | ( 16 | InputDate VARCHAR(8) 17 | ) 18 | RETURNS DATE 19 | LANGUAGE C 20 | NO SQL 21 | DETERMINISTIC 22 | PARAMETER STYLE SQL 23 | EXTERNAL NAME 'CS!my_yyyymmdd_to_date2!./my_yyyymmdd_to_date2.c' 24 | ; 25 | */ 26 | /* Must define SQL_TEXT before including "sqltypes_td "*/ 27 | /* Must define SQL_TEXT before including "sqltypes_td "*/ 28 | #define SQL_TEXT Latin_Text 29 | #include "sqltypes_td.h" 30 | #include "stdio.h" 31 | #include "string.h" 32 | #define IsNull -1 33 | #define IsNotNull 0 34 | #define NoSqlError "00000" 35 | #define YYYYMMDD_LENGTH 8 36 | #define ERR_RC 99 37 | void my_yyyymmdd_to_date2 38 | ( 39 | VARCHAR_LATIN *InputDateString 40 | ,DATE *result 41 | ,int *inputDateStringIsNull 42 | ,int *resultIsNull 43 | ,char sqlstate[6] 44 | ,SQL_TEXT extname[129] 45 | ,SQL_TEXT specificname[129] 46 | ,SQL_TEXT error_message[257] 47 | ) 48 | { 49 | char input_integer[30]; 50 | int year_yyyy; 51 | int month_mm; 52 | int day_dd; 53 | char day_char[3]; 54 | char month_char[3]; 55 | char year_char[5]; 56 | int in_len,i; 57 | /* Return Nulls on Null Input */ 58 | if ((*inputDateStringIsNull == IsNull)) 59 | { 60 | strcpy(sqlstate, "22018") ; 61 | strcpy((char *) error_message, "Null value not allowed.") ; 62 | *resultIsNull = IsNull; 63 | return; 64 | } 65 | in_len = strlen(InputDateString); 66 | if ( in_len != YYYYMMDD_LENGTH ) 67 | { 68 | *result = ( 1 * 10000 ) + ( 12 * 100) + 1; 69 | *resultIsNull = IsNull; 70 | strcpy((char *) sqlstate, "01H01"); 71 | strcpy((char *) error_message, 72 | "InputDateString is of wrong length, must be in YYYYMMDD format"); 73 | return; 74 | } 75 | if ( in_len != YYYYMMDD_LENGTH ) 76 | { 77 | *result = ( 1 * 10000 ) + ( 12 * 100) + 2; 78 | return; 79 | } 80 | strcpy(input_integer , (char *) InputDateString); 81 | for (i = 0; i '9') 84 | { 85 | *result = ( 1 * 10000 ) + ( 1 * 100) + 3; 86 | return; 87 | } 88 | else 89 | { 90 | input_integer[i] = tolower(input_integer[i]); 91 | } 92 | } 93 | sprintf(year_char,"%c%c%c%c",input_integer[0],input_integer[1],input_integer[2], 94 | input_integer[3]); 95 | sprintf(month_char,"%c%c",input_integer[4],input_integer[5]); 96 | sprintf(day_char,"%c%c",input_integer[6],input_integer[7]); 97 | year_yyyy = atoi(year_char); 98 | month_mm = atoi(month_char); 99 | day_dd = atoi(day_char); 100 | /* Format output_date in internal Teradata format ((YEAR - 1900) * 10000 ) + 101 | (MONTH * 100) + DAY */ 102 | *result = (( year_yyyy - 1900 ) * 10000 ) + ( month_mm * 100) + day_dd; 103 | } -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/my_yyyymmdd_to_date2.sql: -------------------------------------------------------------------------------- 1 | CREATE FUNCTION SC_EXAMPLE_DEMO.my_yyyymmdd_to_date2 2 | ( 3 | InputDate VARCHAR(8) CHARACTER SET LATIN 4 | ) 5 | RETURNS DATE 6 | LANGUAGE C 7 | SPECIFIC my_yyyymmdd_to_date2 8 | NO SQL 9 | DETERMINISTIC 10 | PARAMETER STYLE SQL 11 | CALLED ON NULL INPUT 12 | EXTERNAL NAME 'CS!my_yyyymmdd_to_date2!./database_code/my_yyyymmdd_to_date2.c' 13 | ; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/deploy_database.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Constants 3 | ERROR='\033[0;31m' # Red 4 | NC='\033[0m' # No Color 5 | 6 | #####Parameters 7 | logon_command="$1" 8 | if [ ! "$logon_command" ];then 9 | echo "${ERROR}Logon command not provided${NC}" 10 | exit 1 11 | fi 12 | 13 | bteq << EOF 14 | .logon $logon_command; 15 | .RUN FILE ./database_code/DDL_Databases.sql 16 | DATABASE SC_EXAMPLE_DEMO; 17 | GRANT ALL PRIVILEGES ON SC_EXAMPLE_DEMO 18 | TO DBC 19 | WITH GRANT OPTION; 20 | DATABASE SC_EXAMPLE_DEMO_2; 21 | GRANT ALL PRIVILEGES ON SC_EXAMPLE_DEMO_2 22 | TO DBC 23 | WITH GRANT OPTION; 24 | DATABASE SC_EXAMPLE_DEMO; 25 | 26 | .RUN FILE ./database_code/DDL_SF_Schemas.sql 27 | .RUN FILE ./database_code/DDL_Tables.sql 28 | .RUN FILE ./database_code/DDL_AlterTables.sql 29 | .RUN FILE ./database_code/DDL_Trigger.sql 30 | .RUN FILE ./database_code/DDL_Views.sql 31 | .RUN FILE ./database_code/DDL_CreateMacro.sql 32 | .RUN FILE ./database_code/DDL_JoinIndex.sql 33 | .COMPILE FILE = ./database_code/UPDATE_VEMPLOYEE.sql 34 | .COMPILE FILE = ./database_code/INSERT_VEMPLOYEE.sql 35 | .RUN FILE ./database_code/my_yyyymmdd_to_date2.sql 36 | EOF 37 | -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/drop_database.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Constants 3 | ERROR='\033[0;31m' # Red 4 | NC='\033[0m' # No Color 5 | 6 | 7 | #####Parameters 8 | logon_command="$1" 9 | if [ ! "$logon_command" ];then 10 | echo "${ERROR}Logon command not provided${NC}" 11 | exit 1 12 | fi 13 | 14 | bteq << EOF 15 | .logon $logon_command; 16 | -- 500 MB, final size should be < 300 MB 17 | 18 | DROP JOIN INDEX SC_EXAMPLE_DEMO_2.Employee_JI; 19 | DROP JOIN INDEX SC_EXAMPLE_DEMO_2.Employee_JI2; 20 | 21 | DELETE DATABASE SC_EXAMPLE_DEMO_2 ALL; 22 | MODIFY DATABASE SC_EXAMPLE_DEMO_2 AS DROP DEFAULT JOURNAL TABLE; 23 | 24 | DROP JOIN INDEX SC_EXAMPLE_DEMO.Employee_JI; 25 | DROP JOIN INDEX SC_EXAMPLE_DEMO.Employee_JI2; 26 | 27 | DELETE DATABASE SC_EXAMPLE_DEMO ALL; 28 | MODIFY DATABASE SC_EXAMPLE_DEMO AS DROP DEFAULT JOURNAL TABLE; 29 | 30 | 31 | DROP DATABASE SC_EXAMPLE_DEMO; 32 | 33 | DROP DATABASE SC_EXAMPLE_DEMO_2; 34 | EOF 35 | -------------------------------------------------------------------------------- /Tests/Teradata/teradata_extraction_test_base.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import subprocess 3 | from database_summary.database_source_code_summarizer import sumarize_database_source_code 4 | from database_summary.database_source_code_summary import DatabaseSourceCodeSummary 5 | from database_summary.database_source_code_summary import TopLevelObjectType 6 | 7 | class TeradataExtractionTestBase(unittest.TestCase): 8 | source_database_path = "./source_code/" 9 | extracted_database_path = "./extracted_code/" 10 | 11 | def __init__(self, *args, **kwargs): 12 | super(TeradataExtractionTestBase, self).__init__(*args, **kwargs) 13 | source_database_summary : DatabaseSourceCodeSummary = None 14 | extracted_database_summary : DatabaseSourceCodeSummary= None 15 | error_messages : "list[str]" = [] 16 | 17 | def set_up_class(self, database_source_folder_name: str, database_output_folder_name: str, extraction_parameters: "list[str]"): 18 | self.run_extraction_scripts("demo_database", "test_demo_database", extraction_parameters) 19 | self.source_database_summary = self.sumarize_source_code(database_source_folder_name) 20 | self.extracted_database_summary = self.sumarize_extracted_code(database_output_folder_name) 21 | self.error_messages = [] 22 | 23 | def sumarize_source_code(database_folder_name: str) -> DatabaseSourceCodeSummary: 24 | result = sumarize_database_source_code("./source_code/"+database_folder_name) 25 | return result 26 | 27 | def validate_top_level_objects_quantity(self, type: TopLevelObjectType, expected_amount: int) -> DatabaseSourceCodeSummary: 28 | actual_amount = self.source_database_summary.get_top_level_object_to_int_map()[type] 29 | try: 30 | self.assertEqual(actual_amount, expected_amount) 31 | except AssertionError: 32 | self.error_messages += [f"Expected {expected_amount} {type.name.lower() + ('s' if expected_amount > 1 else '')} in source code, but {actual_amount} found"] 33 | 34 | actual_amount = self.extracted_database_summary.get_top_level_object_to_int_map()[type] 35 | try: 36 | self.assertEqual(actual_amount, expected_amount) 37 | except AssertionError: 38 | self.error_messages += [f"Expected {expected_amount} {type.name.lower() + ('s' if expected_amount > 1 else '')} in extracted code, but {actual_amount} found"] 39 | 40 | def validate_extracted_files_quantity(self, actual_amount, expected_amount: int) -> DatabaseSourceCodeSummary: 41 | try: 42 | self.assertEqual(actual_amount, expected_amount) 43 | except AssertionError as e: 44 | error_messages += [f"Expected {expected_amount} file{'s' if expected_amount > 1 else ''} in extracted files, but {actual_amount} found"] 45 | 46 | def assert_no_errors_messages(self): 47 | if len(self.error_messages) > 0: 48 | error_message = '\n'.join(self.error_messages) 49 | raise AssertionError(error_message) 50 | 51 | 52 | def sumarize_extracted_code(database_folder_name: str) -> DatabaseSourceCodeSummary: 53 | return sumarize_database_source_code("./extracted_code/"+database_folder_name) 54 | 55 | def run_extraction_scripts(database_folder_name: str, extraction_output_folder_name: str, extraction_parameters: "list[str]") -> None: 56 | subprocess.call(['sh', './execute_scripts.sh', database_folder_name, extraction_output_folder_name] + extraction_parameters, cwd='./scripts') 57 | 58 | def remove_extraction_results(database_folder_name: str) -> None: 59 | subprocess.call(['rm', '-r', f'extracted_code/{database_folder_name}' ]) 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /Tests/Teradata/test_demo_database.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from teradata_extraction_test_base import TeradataExtractionTestBase 3 | from database_summary.top_level_object_type import TopLevelObjectType 4 | 5 | class TestDemoDatabase(TeradataExtractionTestBase): 6 | 7 | @classmethod 8 | def setUpClass(cls): 9 | extraction_parameters = ["include_databases=(UPPER(T1.DATABASENAME) IN ('SC_EXAMPLE_DEMO', 'SC_EXAMPLE_DEMO_2') )", 10 | "exclude_databases=(UPPER(T1.DATABASENAME) NOT IN ('SYS_CALENDAR','ALL','CONSOLE','CRASHDUMPS','DBC','DBCMANAGER','DBCMNGR','DEFAULT','EXTERNAL_AP','EXTUSER','LOCKLOGSHREDDER','PDCRADM','PDCRDATA','PDCRINFO','PUBLIC','SQLJ','SYSADMIN','SYSBAR','SYSJDBC','SYSLIB','SYSSPATIAL','SYSTEMFE','SYSUDTLIB','SYSUIF','TD_SERVER_DB','TD_SYSFNLIB','TD_SYSFNLIB','TD_SYSGPL','TD_SYSXML','TDMAPS', 'TDPUSER','TDQCD','TDSTATS','TDWM','VIEWPOINT','PDCRSTG'))"] 11 | cls.set_up_class(cls, "demo_database", "test_demo_database", extraction_parameters) 12 | 13 | def test_database_files(self): 14 | self.validate_extracted_files_quantity(self.extracted_database_summary.get_count_of_files(), 9) 15 | self.validate_top_level_objects_quantity(TopLevelObjectType.TABLE, 9) 16 | self.validate_top_level_objects_quantity(TopLevelObjectType.DATABASE, 2) 17 | self.validate_top_level_objects_quantity(TopLevelObjectType.PROCEDURE, 2) 18 | self.validate_top_level_objects_quantity(TopLevelObjectType.JOIN_INDEX, 2) 19 | self.validate_top_level_objects_quantity(TopLevelObjectType.MACRO, 2) 20 | self.validate_top_level_objects_quantity(TopLevelObjectType.FUNCTION, 1) 21 | self.validate_top_level_objects_quantity(TopLevelObjectType.TRIGGER, 1) 22 | self.validate_top_level_objects_quantity(TopLevelObjectType.VIEW, 1) 23 | 24 | self.assert_no_errors_messages() 25 | 26 | 27 | if __name__ == '__main__': 28 | unittest.main() -------------------------------------------------------------------------------- /Vertica/DocumentationImages/BinNewTerminal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/BinNewTerminal.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/ConnectToServer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/ConnectToServer.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/ContainerRunning.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/ContainerRunning.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/CreateTables.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/CreateTables.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/CreateViews.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/CreateViews.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/DockerExtensions.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/DockerExtensions.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/DockerRunning.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/DockerRunning.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/ExampleScripts.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/ExampleScripts.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/FoldeStructure.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/FoldeStructure.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/Folder.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/Folder.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/Launchjson.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/Launchjson.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PipInstall_sqlparse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/PipInstall_sqlparse.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PipInstallsqlparse.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/PipInstallsqlparse.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PythonDDLRunSucessfully.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/PythonDDLRunSucessfully.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PythonScripts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/PythonScripts.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PythonVersion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/PythonVersion.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/RunDockerVertica.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/RunDockerVertica.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/RunPythonCode.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/RunPythonCode.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/RunPythonCode02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/RunPythonCode02.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/TempFileCreated.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/TempFileCreated.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/TempFolder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/TempFolder.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/VerticaClientDriversLinux.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/VerticaClientDriversLinux.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/VerticaTarFile.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/Vertica/DocumentationImages/VerticaTarFile.png -------------------------------------------------------------------------------- /Vertica/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Vertica/README.md: -------------------------------------------------------------------------------- 1 | # Vertica DDL Migration Utility 2 | 3 | ## Overview 4 | 5 | In the absence of a third party tool to perform migration of Vertica DDL to snowflake. 6 | 7 | ## Repository Structure 8 | 9 | This repository has the following structure: 10 | 11 | + Project Folder 12 | + .vscode 13 | + opt/vertica 14 | + bin 15 | + vsql 16 | + vsql32 17 | + DocumentationImages 18 | + Scripts 19 | + TEMP 20 | 21 | 22 | ## 1 Vertica To Snowflake DDL Migration 23 | 24 | In the absence of a third party tool to perform migration of Vertica SQL to snowflake, this utility can be used. 25 | 26 | ### Configuration 27 | 28 | #### Python dependencies are: 29 | 30 | * Vertica Python Connector 31 | * Pandas 32 | * sqlparse 33 | 34 | To install do 35 | ``` 36 | pip install -r requirements.txt 37 | ``` 38 | 39 | #### Python scripts 40 | 41 | The scripts can be located at `./Scripts` folder. 42 | Included with the scripts is a sample configuration file. The next section describes the config file. 43 | 44 | ![Python Scripts](DocumentationImages/PythonScripts.png) 45 | 46 | 47 | The python scripts are: 48 | 49 | * vertMain.py This is the script that is called from the command line 50 | * SFConfig.py Class file for SF configuration 51 | * VerticaConfig.py Class file for Vertica configuration 52 | * VerticaDBCalls.py Class file containing Vertica DB methods 53 | * SFConvert.py Class file containing the DDL migration and Snowflake DB methods 54 | 55 | Sample configuration files: 56 | 57 | * sfConf.txt Details on snowflake config 58 | * verticaConf.txt Details on vertica config. 59 | 60 | #### Vertica Config 61 | 62 | The following file is an example of the Vertica file.(Example: `./verticaConf.txt`) 63 | 64 | ``` 65 | [vertica] 66 | host=localhost 67 | port=5433 68 | user=dbadmin 69 | database=docker 70 | ssl=False 71 | schema=store 72 | ``` 73 | 74 | #### Snowflake Config 75 | 76 | The following is an example of the Snowflake file.(Example: `./sfConf.txt`) 77 | 78 | 79 | ``` 80 | [snowflake] 81 | account=xxxxxx 82 | user=xxxxxxx 83 | password=XXXXXXXXXXXXX 84 | role=verticadb_role 85 | # schema_mappings map the vertica schema to the sf schema 86 | # Format 87 | # vertica_schema=snowflake_db.snowflake_schema 88 | [schema_mappings] 89 | store=store 90 | [inview_mappings] 91 | store=store 92 | # This section determines what to do with the DDL 93 | # Valid options are 94 | # ddlDisplay=[True|False] If True, the DDL is written to the log 95 | # ddlSave= If present, write the ddl into 96 | # ddlExecute=[True|False] If True, executes the ddl in snowflake. If not present ddlExeute will be false 97 | # dropExisting=[True|False] If True, existing table will be dropped. 98 | # If False, the table will not be dropped but a warning will be given saying the table exists 99 | # processViews=[True|False] If True, migrate the views from the vertica instance to snowflake. 100 | # be sure to utilise [inview_mappings] to ensure the view will compile ok 101 | # If false, the views will not be migrated. 102 | [execution] 103 | ddlDisplay=True 104 | ddlSave=TEMP/VerticaDDL 105 | ddlExecute=True 106 | dropExisting=False 107 | processViews=True 108 | ``` 109 | 110 | #### Create the TEMP\VerticaDDL folder. 111 | 112 | Create the folder `./TEMP/VerticaDDL` in the project. 113 | 114 | ![TEMP\VerticaDDL](DocumentationImages/TempFolder.png) 115 | 116 | #### Update the file launch.json 117 | 118 | Is using VSCODE update the file `./vscode/launch.json` by adding the following code 119 | ``` 120 | { 121 | "name": "Vertica main", 122 | "type": "python", 123 | "request": "launch", 124 | "program": "${file}", 125 | "console": "integratedTerminal", 126 | "justMyCode": false, 127 | "envFile": "${workspaceFolder}/.env", 128 | "args": ["-s","./sfConf.txt","-v","./verticaConf.txt"] 129 | } 130 | ``` 131 | 132 | ![Launchjson](DocumentationImages/Launchjson.png) 133 | 134 | #### Run the script to set all the environment variables. 135 | 136 | Run the following script in the command line 137 | 138 | ``` 139 | export $(xargs < .env) 140 | ``` 141 | 142 | #### Add Docker extension into the project 143 | 144 | ![Docker-extension](DocumentationImages/DockerExtensions.PNG) 145 | 146 | #### Install the vertica client divers 147 | 148 | Follow the steps below or just run the following command in the terminal. 149 | 150 | ``` 151 | ./install-vertica.sh 152 | ``` 153 | 154 | 155 | Download the driver for linux 9.1.x 156 | 157 | https://www.vertica.com/client-drivers/ 158 | 159 | 160 | ![Drivers](DocumentationImages/VerticaClientDriversLinux.png) 161 | 162 | Once the tar file is downloaded copy the file and paste it in the root directory of the project. 163 | 164 | ![VerticaTarFile](DocumentationImages/VerticaTarFile.png) 165 | 166 | #### Unzip the file 167 | 168 | ``` 169 | gzip -d vertica-client-9.1.1-0.x86_64.tar.gz 170 | ``` 171 | 172 | #### Untar the file 173 | ``` 174 | tar -xvf vertica-client-9.1.1-0.x86_64.tar 175 | ``` 176 | 177 | Once the gz file was unzip right click in the directory ./opt/vertica/bin created and run a new terminal. 178 | 179 | ![BinNewTerminal](DocumentationImages/BinNewTerminal.png) 180 | 181 | #### Connect to the server 182 | 183 | Run the following command to connect to the server. 184 | 185 | ``` 186 | docker run -p 5433:5433 dataplatform/docker-vertica 187 | ``` 188 | ![VerticaTarFile](DocumentationImages/RunDockerVertica.png) 189 | 190 | #### Confirm the server is running 191 | 192 | Confirm the docket container is running 193 | 194 | ![VerticaTarFile](DocumentationImages/ContainerRunning.PNG) 195 | 196 | #### Connect to the database 197 | 198 | Run the following command in the new terminal to connect to the database. 199 | 200 | ``` 201 | ./vsql -hlocalhost -Udbadmin 202 | ``` 203 | 204 | ![ConnectToServer](DocumentationImages/ConnectToServer.png) 205 | 206 | #### Create the schema 207 | 208 | Run the following command to create the schema(Example). 209 | 210 | ``` 211 | CREATE SCHEMA IF NOT EXISTS STORE; 212 | ``` 213 | 214 | #### Create tables 215 | 216 | Run the following command to create the tables(Example). 217 | 218 | 219 | ``` 220 | CREATE OR REPLACE TABLE STORE.MYTABLE_1 ( 221 | MYINT INTEGER NOT NULL PRIMARY KEY, 222 | MYINTEGER INTEGER, 223 | MYBIGINT BIGINT 224 | ); 225 | 226 | CREATE OR REPLACE TABLE STORE.MYTABLE_2 ( 227 | MYINT INTEGER NOT NULL PRIMARY KEY, 228 | MYINTEGER INTEGER, 229 | MYBIGINT BIGINT 230 | ); 231 | ``` 232 | 233 | ![ConnectToServer](DocumentationImages/CreateTables.png) 234 | 235 | #### Create views 236 | 237 | Run the following command to create the views(Example). 238 | 239 | ``` 240 | SELECT MYINT, 241 | MYINTEGER, 242 | MYBIGINT 243 | FROM STORE.MYTABLE_1; 244 | 245 | CREATE OR REPLACE VIEW STORE.MYVIEW2 AS 246 | SELECT MYINT, 247 | MYINTEGER, 248 | MYBIGINT 249 | FROM STORE.MYTABLE_2; 250 | ``` 251 | 252 | ![ConnectToServer](DocumentationImages/CreateViews.png) 253 | 254 | ### Execution 255 | 256 | Run the python script ./Scripts/vertMain.py 257 | 258 | ![RunPythonCode](DocumentationImages/RunPythonCode.png) 259 | 260 | Debug the code 261 | 262 | ![DebugCode](DocumentationImages/RunPythonCode02.png) 263 | 264 | ### Run sucessfully 265 | 266 | Once the project run sucessfully the project created the sql scripts in the folder created previously ./TEMP/VerticaDLL 267 | 268 | Example 269 | ``` 270 | Create or Replace View store.MYVIEW2 AS 271 | SELECT MYTABLE_2.MYINT, 272 | MYTABLE_2.MYINTEGER, 273 | MYTABLE_2.MYBIGINT 274 | FROM store.MYTABLE_2 275 | ``` 276 | 277 | ![TempFileCreated](DocumentationImages/TempFileCreated.png) -------------------------------------------------------------------------------- /Vertica/Scripts/SFConfig.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | 4 | # Class file for SnowFlake configuration 5 | #This class has all the necessary variables to be able to connect to the Snowflake database. 6 | #Snowflake Account 7 | #User 8 | #Password 9 | #Snowflake warehouse 10 | #Schema 11 | #Role 12 | #SSL 13 | 14 | 15 | 16 | class SFConfig: 17 | 18 | def __init__(self, sfAccount = "", sfUser = "", sfPassword = "", sfWarehouse = "", sfRole= ""): 19 | self.sfAccount = sfAccount 20 | self.sfUser = sfUser 21 | self.sfPassword = sfPassword 22 | self.sfWarehouse = sfWarehouse 23 | self.sfRole = sfRole 24 | self.schemaMapping = ["",""] 25 | self.execution = ["",""] 26 | self.inviewMappings = ["",""] 27 | 28 | def readConfig(self, configFile): 29 | isSnowflakeFile = False 30 | mappingSpecified = False 31 | executionSpecified = False 32 | inviewMappingSpecified = False 33 | 34 | with open(configFile) as f: 35 | for line in f: 36 | line = line.strip() 37 | # Ignore comments 38 | if line[0] == "#": 39 | continue 40 | 41 | if line.upper() == "[SNOWFLAKE]": 42 | isSnowflakeFile = True 43 | elif line.upper() == "[SCHEMA_MAPPINGS]": 44 | mappingSpecified = True 45 | inviewMappingSpecified = False 46 | executionSpecified = False 47 | self.schemaMapping.clear() 48 | elif line.upper() == "[INVIEW_MAPPINGS]": 49 | inviewMappingSpecified = True 50 | mappingSpecified = False 51 | executionSpecified = False 52 | self.inviewMappings.clear() 53 | elif line.upper() == "[EXECUTION]": 54 | executionSpecified = True 55 | mappingSpecified = False 56 | inviewMappingSpecified = False 57 | self.execution.clear() 58 | 59 | else: 60 | lineItems = line.split("=") 61 | if len(lineItems) != 2: 62 | print("Invalid Config Line: " + line) 63 | else: 64 | if mappingSpecified is True: 65 | self.schemaMapping.append([lineItems[0], lineItems[1]]) 66 | continue 67 | 68 | if executionSpecified is True: 69 | self.execution.append([lineItems[0], lineItems[1]]) 70 | continue 71 | 72 | if inviewMappingSpecified is True: 73 | self.inviewMappings.append([lineItems[0], lineItems[1]]) 74 | continue 75 | 76 | if lineItems[0].upper() == "ACCOUNT": 77 | self.sfAccount = lineItems[1] 78 | elif lineItems[0].upper() == "USER": 79 | self.sfUser = lineItems[1] 80 | elif lineItems[0].upper() == "PASSWORD": 81 | self.sfPassword = lineItems[1] 82 | elif lineItems[0].upper() == "ROLE": 83 | self.sfRole = lineItems[1] 84 | elif lineItems[0].upper() == "ROLE": 85 | self.sfRole = lineItems[1] 86 | elif lineItems[0].upper() == "PROCESSVIEWS": 87 | if lineItems[1].upper == "TRUE": 88 | self.processViews = True; 89 | else: 90 | self.processViews = False; 91 | 92 | def validate(self): 93 | # TODO - Add validation 94 | 95 | return True -------------------------------------------------------------------------------- /Vertica/Scripts/SFConvert.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | import vertica_python 4 | import snowflake.connector 5 | import logging 6 | 7 | class SFConvert: 8 | 9 | def __init__(self, logger): 10 | self.logger = logger 11 | self.binary = ["VARBINARY", "LONG VARBINARY", "BYTEA", "RAW", "BINARY"] 12 | self.char = ["CHAR", "LONG VARCHAR", "VARCHAR"] 13 | self.timestamp = ["DATETIME", "SMALLDATETIME"] 14 | self.interval = ["INTERVAL", "INTERVAL DAY TO SECOND", "INTERVAL YEAR TO MONTH"] 15 | self.numberic = ["INT8", "TINYINT"] 16 | 17 | def vertTableToSFTable(self, sfConfig, tableRow, tableColumns): 18 | 19 | # Defaults 20 | ddlDrop = "FALSE" 21 | ddlExecute = "FALSE" 22 | 23 | # Find the vertica table schema in the schema mapping to determine the location (db, schema) to create the table 24 | mappingFound = False 25 | for mapping in sfConfig.schemaMapping: 26 | if mapping[0].upper() == tableRow['table_schema'].upper(): 27 | mappingFound = True 28 | snowflakeDbSchema = mapping[1] 29 | 30 | # Abort if mapping is not found 31 | if mappingFound is False: 32 | self.logger.error("Unable to find mapping for Vertica schema: " + tableRow['table_schema']) 33 | sys.exit(99) 34 | 35 | # Obtain execution model and drop existing 36 | for exec in sfConfig.execution: 37 | if exec[0].upper() == "DDLEXECUTE": 38 | ddlExecute = exec[1] 39 | elif exec[0].upper == "DROPSAVE": 40 | ddlSave = exec[1] 41 | # Check the folder exists 42 | if not os.path.isdir(ddlSave): 43 | self.logger.error("Save DDL path (" + ddlSave + ") does not exists on your workstation") 44 | sys.exit(99) 45 | elif exec[0].upper() == "DROPEXISTING": 46 | ddlDrop = exec[1] 47 | 48 | # Construct the table DDL 49 | if ddlDrop.upper() == "TRUE": 50 | sfTable = "Create or Replace Table " 51 | else: 52 | sfTable = "Create Table " 53 | sfTable += snowflakeDbSchema + "." + tableRow['table_name'] + "\n" 54 | sfTable += "(\n" 55 | 56 | boolFirstCol = True 57 | for colIdx, col in tableColumns.iterrows(): 58 | if boolFirstCol is True: 59 | sfTable += " " + col['column_name'] 60 | boolFirstCol = False 61 | else: 62 | sfTable += "," + col['column_name'] 63 | 64 | if col['data_type'].find("(") >-1: 65 | rawDataType = col['data_type'][0:col['data_type'].find("(")] 66 | typeLen = col['data_type'][col['data_type'].find("(") + 1:col['data_type'].find(")")] 67 | else: 68 | rawDataType = col['data_type'] 69 | typeLen = "1" 70 | 71 | 72 | # Check datatype 73 | if rawDataType.upper() in self.binary: 74 | sfTable += " BINARY " + "(" + typeLen + ")" 75 | elif rawDataType.upper() in self.char: 76 | sfTable += " VARCHAR " + "(" + typeLen + ")" 77 | elif rawDataType.upper() in self.timestamp: 78 | sfTable += " TIMESTAMP " 79 | elif rawDataType.upper() == "TIME WITH TIMEZONE": 80 | sfTable += " TIME " 81 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " TIME WITH TIMEZONE migrated to TIME") 82 | elif rawDataType.upper() == "TIMESTAMP": 83 | sfTable += " TIMESTAMP_NTZ " 84 | elif rawDataType.upper() == "TIMESTAMP WITH TIMEZONE": 85 | sfTable += " TIMESTAMP_TZ " 86 | elif rawDataType.upper() in self.interval: 87 | sfTable += " INT " 88 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " INTERVAL migrated to INT") 89 | elif rawDataType.upper() in self.numberic: 90 | sfTable += " NUMBER " 91 | elif rawDataType.upper() == "MONEY": 92 | sfTable += " NUMBER (18,4) " 93 | elif rawDataType.upper() == "GEOMETRY": 94 | sfTable += " BINARY " 95 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " GEOMETRY migrated to BINARY ") 96 | elif rawDataType.upper() == "GEOGRAPHY": 97 | sfTable += " BINARY " 98 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " GEOGRAPHY migrated to BINARY ") 99 | elif rawDataType.upper() == "UUID": 100 | sfTable += " INTEGER " 101 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " Requires Identity Column ") 102 | else: 103 | sfTable += " " + col['data_type'] 104 | 105 | # Add not null if needed 106 | if col['is_nullable'].upper() == "FALSE": 107 | sfTable += " NOT NULL" 108 | 109 | sfTable += "\n" 110 | 111 | sfTable += ")\n" 112 | 113 | 114 | 115 | return sfTable 116 | 117 | 118 | def buildView(self, sfConfig, tableSchema, viewName, viewDefinition): 119 | 120 | # Find the vertica table schema in the schema mapping to determine the location (db, schema) to create the table 121 | mappingFound = False 122 | for mapping in sfConfig.schemaMapping: 123 | if mapping[0].upper() == tableSchema.upper(): 124 | mappingFound = True 125 | snowflakeDbSchema = mapping[1] 126 | 127 | sfSQL = "Create or Replace View " + snowflakeDbSchema + "." + viewName + " AS " + viewDefinition 128 | 129 | # This view will reference tables that exist in Vertica. 130 | # The mappings can be used to modify these references 131 | for mapping in sfConfig.inviewMappings: 132 | sfSQL = sfSQL.replace(mapping[0] + ".", mapping[1] + ".") 133 | 134 | return sfSQL 135 | 136 | def executeSQL(self, dbConn, ddlString): 137 | 138 | try: 139 | self.logger.info("execute SQL Start") 140 | sfCursor = dbConn.cursor().execute(ddlString) 141 | sfCursor.close() 142 | self.logger.info("Success! ") 143 | 144 | except snowflake.connector.errors.ProgrammingError as sfExp: 145 | errorString = format("Error No: " + str(sfExp.errno) + "\n" + str(sfExp.sqlstate) + "\n" + str(sfExp.msg)) 146 | self.logger.error(errorString) 147 | -------------------------------------------------------------------------------- /Vertica/Scripts/SQL_Convert/sqls/vmart_query_01.sql: -------------------------------------------------------------------------------- 1 | SELECT fat_content 2 | FROM ( 3 | SELECT DISTINCT fat_content 4 | FROM product_dimension 5 | WHERE department_description 6 | IN ('Dairy') ) AS food 7 | ORDER BY fat_content 8 | LIMIT 5; -------------------------------------------------------------------------------- /Vertica/Scripts/VerticaConfig.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | 4 | #Class file for Vertica configuration 5 | #This class has all the necessary variables to be able to connect to the vertica database. 6 | #HOST 7 | #PORT 8 | #USER 9 | #PASSWORD 10 | #DATABSE 11 | #SHEMA 12 | #SSL 13 | 14 | class VerticaConfig: 15 | 16 | def __init__(self, host = "", port = "", user = "", password = "", database = "", ssl = False, schema = ""): 17 | self.host = host 18 | self.port = port 19 | self.user = user 20 | self.password = password 21 | self.database = database 22 | self.ssl = ssl 23 | self.schema = schema 24 | 25 | def readConfig(self, configFile): 26 | isVerticaFile = False 27 | 28 | with open(configFile) as f: 29 | for line in f: 30 | line = line.strip() 31 | if line.upper() == "[VERTICA]": 32 | isVerticaFile = True 33 | else: 34 | lineItems = line.split("=") 35 | if len(lineItems) != 2: 36 | print("Invalid Config Line: " + line) 37 | break 38 | 39 | if lineItems[0].upper() == "HOST": 40 | self.host = lineItems[1] 41 | elif lineItems[0].upper() == "PORT": 42 | self.port = lineItems[1] 43 | elif lineItems[0].upper() == "USER": 44 | self.user = lineItems[1] 45 | elif lineItems[0].upper() == "PASSWORD": 46 | self.password = lineItems[1] 47 | elif lineItems[0].upper() == "DATABASE": 48 | self.database = lineItems[1] 49 | elif lineItems[0].upper() == "SCHEMA": 50 | self.schema = lineItems[1] 51 | elif lineItems[0].upper() == "SSL": 52 | if lineItems[1].upper() == "False": 53 | self.ssl = False 54 | else: 55 | self.ssl = True 56 | 57 | 58 | 59 | def validate(self): 60 | #TODO - Add validation 61 | 62 | return True 63 | 64 | 65 | 66 | -------------------------------------------------------------------------------- /Vertica/Scripts/VerticaDBCalls.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | import vertica_python 4 | import pandas as pd 5 | # Class file to get the tables and views from Vertica 6 | #This class execute the queries to get the views and tables from vertica 7 | #According to the number of results of views and tables the program will convert the Vertica objects to Snowflake 8 | 9 | 10 | class VerticaDBCalls: 11 | 12 | def __init__(self, dbConnection): 13 | self.dbConnection = dbConnection 14 | 15 | def getTablesInSchema(self, schema): 16 | sqlString = "select table_schema" \ 17 | ",table_name " + \ 18 | ",case " + \ 19 | " when is_temp_table is TRUE then 'TRUE' " + \ 20 | " else 'FALSE' " + \ 21 | "end as is_temp_table " + \ 22 | ",owner_name " + \ 23 | "from v_catalog.tables " + \ 24 | "where upper(table_schema) = '" + schema.upper() + "'" #+ \ 25 | #" and upper(TABLE_NAME) = 'MYTABLE1'" 26 | 27 | 28 | sqlQuery = pd.read_sql_query(sqlString, self.dbConnection) 29 | df = pd.DataFrame(sqlQuery, columns=['table_schema', 'table_name','is_temp_table','owner_name']) 30 | 31 | return df 32 | 33 | def getColumnsInTable(self, schema, table): 34 | sqlString = "select table_name " + \ 35 | ",column_name " + \ 36 | ",data_type " + \ 37 | ",data_type_length " + \ 38 | ",character_maximum_length " + \ 39 | ",numeric_precision " + \ 40 | ",numeric_scale " + \ 41 | ",datetime_precision " + \ 42 | ",interval_precision " + \ 43 | ",ordinal_position " + \ 44 | ",case " + \ 45 | " When is_nullable is TRUE Then 'TRUE' " + \ 46 | " Else 'FALSE' " + \ 47 | "end as is_nullable " + \ 48 | ",column_default " + \ 49 | ",column_set_using " + \ 50 | ",case " + \ 51 | " When is_identity IS TRUE THEN'TRUE' " + \ 52 | " Else 'FALSE' " + \ 53 | "end as is_identity " + \ 54 | "from v_catalog.columns " + \ 55 | "where upper(table_schema) = '" + schema.upper() + "' " + \ 56 | "and upper(table_name) = '" + table.upper() + "' " + \ 57 | "order by ordinal_position " 58 | sqlQuery = pd.read_sql_query(sqlString, self.dbConnection) 59 | df = pd.DataFrame(sqlQuery, columns=['table_name', 'column_name', 'data_type', 'data_type_length', 60 | 'character_maximum_length','numeric_precision','numeric_scale', 61 | 'datetime_precision','interval_precision','ordinal_position', 62 | 'is_nullable','column_default','is_identity']) 63 | 64 | 65 | 66 | return df 67 | 68 | def getViewsInSchema(self, schema): 69 | sqlString = "select table_schema" \ 70 | ",table_name " + \ 71 | ", view_definition " + \ 72 | ",owner_name " + \ 73 | "from v_catalog.views " + \ 74 | "where upper(table_schema) = '" + schema.upper() + "'" #+ \ 75 | 76 | 77 | sqlQuery = pd.read_sql_query(sqlString, self.dbConnection) 78 | df = pd.DataFrame(sqlQuery, columns=['table_schema', 'table_name','view_definition','owner_name']) 79 | 80 | return df 81 | -------------------------------------------------------------------------------- /Vertica/Scripts/vertMain.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import getopt 3 | import os 4 | import os.path 5 | import vertica_python 6 | import snowflake.connector 7 | import time 8 | import logging 9 | import sqlparse 10 | from SFConfig import * 11 | from VerticaConfig import * 12 | from VerticaDBCalls import * 13 | from SFConvert import * 14 | 15 | def main(argv): 16 | 17 | sfSQL="" 18 | # Configure logging 19 | logger = logging.getLogger() 20 | logger.setLevel(logging.INFO) 21 | fmt = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s') 22 | ch = logging.StreamHandler() 23 | ch.setFormatter(fmt) 24 | logger.addHandler(ch) 25 | 26 | logger.info("**************************************") 27 | logger.info("* Vertica to Snowflake DDL Converter *") 28 | logger.info("**************************************") 29 | 30 | logger.info("Gathering connection details - Start") 31 | snowflakeConnFile = "" 32 | verticaConnFile = "" 33 | sfConfig = SFConfig() 34 | verticaConfig = VerticaConfig() 35 | try: 36 | opts, args = getopt.getopt(argv, "s:v:", ["snowflake=", "vertica="]) 37 | except getopt.GetOptError as e: 38 | print("Invalid Parameter(s): " + str(e)) 39 | sys.exit(2) 40 | 41 | for opt, arg in opts: 42 | if opt == "-s": 43 | snowflakeConnFile = arg 44 | elif opt == "-v": 45 | verticaConnFile = arg 46 | 47 | 48 | # Both arguments provided. Check the files exist 49 | if not os.path.isfile(snowflakeConnFile): 50 | print ("Snowflake config file: " + snowflakeConnFile + " does not exist") 51 | sys.exit(2) 52 | else: 53 | sfConfig.readConfig(snowflakeConnFile) 54 | 55 | 56 | if not os.path.isfile(verticaConnFile): 57 | print("Vertica config file: " + verticaConnFile + " does not exist") 58 | sys.exit(2) 59 | else: 60 | verticaConfig.readConfig(verticaConnFile) 61 | 62 | if not sfConfig.validate: 63 | print("Error in Snowflake config file") 64 | sys.exit(2) 65 | 66 | if not verticaConfig.validate: 67 | print("Error in Vertica config file") 68 | sys.exit(2) 69 | 70 | logger.info("Gathering connection details - Complete") 71 | 72 | # All is well, start the process 73 | # Process is 74 | # - Connet to SF and Vertica 75 | # - For Each vertica schema and table 76 | # Extract Vertica table definition 77 | # Build SF DDL 78 | # submit snowflake DDL to SF DB 79 | 80 | # Connect to SF 81 | try: 82 | logger.info("Connecting to Snowflake") 83 | sfConn = snowflake.connector.connect( 84 | user=sfConfig.sfUser, 85 | password=sfConfig.sfPassword, 86 | account=sfConfig.sfAccount, 87 | warehouse=sfConfig.sfWarehouse, 88 | role=sfConfig.sfRole 89 | ) 90 | logger.info("Connecting to Snowflake - OK") 91 | except Exception as e: 92 | print(e) 93 | sys.exit(3) 94 | 95 | # Connect to Vertica 96 | 97 | try: 98 | logger.info("Connecting to Vertica") 99 | conn_info = {'host': verticaConfig.host, 100 | 'port': verticaConfig.port, 101 | 'user': verticaConfig.user, 102 | 'password': verticaConfig.password, 103 | 'database': verticaConfig.database, 104 | 'ssl': False} 105 | 106 | connVert = vertica_python.connect(**conn_info) 107 | logger.info("Connecting to Vertica - OK") 108 | 109 | # Get list of vertica Tables 110 | vertDBCalls = VerticaDBCalls(connVert) 111 | sfConvert = SFConvert(logger) 112 | 113 | # Get the options 114 | ddlExecute = "FALSE" 115 | ddlSave = "FALSE" 116 | 117 | for sfOpts in sfConfig.execution: 118 | if sfOpts[0].upper() == "DDLEXECUTE": 119 | ddlExecute = sfOpts[1] 120 | elif sfOpts[0].upper() == "PROCESSVIEWS": 121 | if sfOpts[1].upper() == "TRUE": 122 | processViews = True 123 | else: 124 | processViews = False 125 | elif sfOpts[0].upper() == "DDLSAVE": 126 | ddlSave = sfOpts[1] 127 | # Check the folder exists 128 | if not os.path.isdir(ddlSave): 129 | logger.error("Save DDL path (" + ddlSave + ") does not exists on your workstation") 130 | sys.exit(99) 131 | 132 | 133 | dfTableRows = vertDBCalls.getTablesInSchema(verticaConfig.schema) 134 | 135 | for index, row in dfTableRows.iterrows(): 136 | logger.info("Processing table: " + row['table_schema'] + "." + row['table_name']) 137 | 138 | tableCols = vertDBCalls.getColumnsInTable(verticaConfig.schema, row['table_name'] ) 139 | sfSQL = sfConvert.vertTableToSFTable(sfConfig, row, tableCols) 140 | 141 | if ddlExecute.upper() == "TRUE": 142 | sfConvert.executeSQL(sfConn, sfSQL) 143 | 144 | if not ddlSave == "False": 145 | ddlFn = ddlSave + "/" + row['table_schema'] + "_" + row['table_name'] + ".sql" 146 | ddlFile = open(ddlFn, "w") 147 | ddlFile.write(sfSQL) 148 | ddlFile.close() # to change file access modes 149 | 150 | if processViews is True: 151 | dfViewRows = vertDBCalls.getViewsInSchema(verticaConfig.schema) 152 | for index, row in dfViewRows.iterrows(): 153 | logger.info("Processing view: " + row['table_schema'] + "." + row['table_name']) 154 | 155 | sfSQL = sfConvert.buildView(sfConfig, row['table_schema'], row['table_name'], row['view_definition']) 156 | 157 | if ddlExecute.upper() == "TRUE": 158 | sfConvert.executeSQL(sfConn, sfSQL) 159 | 160 | if not ddlSave == "False": 161 | sfSQL = sqlparse.format(sfSQL, reindent=True) 162 | ddlFn = ddlSave + "/" + row['table_schema'] + "_" + row['table_name'] + ".sql" 163 | ddlFile = open(ddlFn, "w") 164 | ddlFile.write(sfSQL) 165 | ddlFile.close() # to change file access modes 166 | 167 | logger.info("Closing DB connections") 168 | connVert.close() 169 | sfConn.close() 170 | 171 | except Exception as e: 172 | print(e) 173 | sys.exit(3) 174 | 175 | 176 | if __name__ == '__main__': 177 | main(sys.argv[1:]) -------------------------------------------------------------------------------- /Vertica/TEMP/VerticaDDL/STORE_MYTABLE_1.sql: -------------------------------------------------------------------------------- 1 | Create or Replace Table store.MYTABLE_1 2 | ( 3 | MYINT int NOT NULL 4 | ,MYINTEGER int 5 | ,MYBIGINT int 6 | ) 7 | -------------------------------------------------------------------------------- /Vertica/TEMP/VerticaDDL/STORE_MYTABLE_2.sql: -------------------------------------------------------------------------------- 1 | Create or Replace Table store.MYTABLE_2 2 | ( 3 | MYINT int NOT NULL 4 | ,MYINTEGER int 5 | ,MYBIGINT int 6 | ) 7 | -------------------------------------------------------------------------------- /Vertica/TEMP/VerticaDDL/STORE_MYVIEW1.sql: -------------------------------------------------------------------------------- 1 | Create or Replace View store.MYVIEW1 AS 2 | SELECT MYTABLE_1.MYINT, 3 | MYTABLE_1.MYINTEGER, 4 | MYTABLE_1.MYBIGINT 5 | FROM STORE.MYTABLE_1 -------------------------------------------------------------------------------- /Vertica/TEMP/VerticaDDL/STORE_MYVIEW2.sql: -------------------------------------------------------------------------------- 1 | Create or Replace View store.MYVIEW2 AS 2 | SELECT MYTABLE_2.MYINT, 3 | MYTABLE_2.MYINTEGER, 4 | MYTABLE_2.MYBIGINT 5 | FROM STORE.MYTABLE_2 -------------------------------------------------------------------------------- /Vertica/VerticaReadme.md: -------------------------------------------------------------------------------- 1 | # Vertica DDL Migration Utility 2 | 3 | ## Overview 4 | 5 | In the absence of a third party tool to perform migration of Vertica DDL to snowflake. 6 | 7 | ## Repository Structure 8 | 9 | This repository has the following structure: 10 | 11 | + Project Folder 12 | + .vscode 13 | + opt/vertica 14 | + bin 15 | + vsql 16 | + vsql32 17 | + DocumentationImages 18 | + Scripts 19 | + TEMP 20 | 21 | 22 | ## 1 Vertica To Snowflake DDL Migration 23 | 24 | In the absence of a third party tool to perform migration of Vertica SQL to snowflake, this utility can be used by snowflake employees who are involved in such a project. 25 | 26 | ● Configuration 27 | ● Execution 28 | 29 | ### Configuration 30 | 31 | #### Python dependencies are: 32 | 33 | ● Vertica Python Connector 34 | ● Pandas 35 | ● sqlparse 36 | 37 | ``` 38 | To install do pip install -r requirements.txt 39 | ``` 40 | 41 | #### Python scripts 42 | 43 | The scripts can be located at ./Scripts Included with the scripts is a sample configuration file. The next section describes the config file. 44 | 45 | ![Python Scripts](DocumentationImages/PythonScripts.png) 46 | 47 | 48 | The python scripts are :- 49 | 50 | ● vertMain.py This is the script that is called from the command line 51 | ● SFConfig.py Class file for SF configuration 52 | ● VerticaConfig.py Class file for Vertica configuration 53 | ● VerticaDBCalls.py Class file containing Vertica DB methods 54 | ● SFConvert.py Class file containing the DDL migration and Snowflake DB methods 55 | 56 | Sample configuration files:- 57 | 58 | ● sfConf.txt Details on snowflake config 59 | ● verticaConf.txt Details on vertica config. 60 | 61 | #### Vertica Config 62 | 63 | The following file is an example of the Vertica file.(Example: ./verticaConf.txt) 64 | 65 | ``` 66 | [vertica] 67 | host=localhost 68 | port=5433 69 | user=dbadmin 70 | database=docker 71 | ssl=False 72 | schema=store 73 | ``` 74 | 75 | #### Snowflake Config 76 | 77 | The following is an example of the Snowflake file.(Example: ./sfConf.txt) 78 | 79 | 80 | ``` 81 | [snowflake] 82 | account=xxxxxx 83 | user=xxxxxxx 84 | password=XXXXXXXXXXXXX 85 | role=verticadb_role 86 | # schema_mappings map the vertica schema to the sf schema 87 | # Format 88 | # vertica_schema=snowflake_db.snowflake_schema 89 | [schema_mappings] 90 | store=store 91 | [inview_mappings] 92 | store=store 93 | # This section determines what to do with the DDL 94 | # Valid options are 95 | # ddlDisplay=[True|False] If True, the DDL is written to the log 96 | # ddlSave= If present, write the ddl into 97 | # ddlExecute=[True|False] If True, executes the ddl in snowflake. If not present ddlExeute will be false 98 | # dropExisting=[True|False] If True, existing table will be dropped. 99 | # If False, the table will not be dropped but a warning will be given saying the table exists 100 | # processViews=[True|False] If True, migrate the views from the vertica instance to snowflake. 101 | # be sure to utilise [inview_mappings] to ensure the view will compile ok 102 | # If false, the views will not be migrated. 103 | [execution] 104 | ddlDisplay=True 105 | ddlSave=TEMP/VerticaDDL 106 | ddlExecute=True 107 | dropExisting=False 108 | processViews=True 109 | ``` 110 | 111 | #### Create the TEMP\VerticaDDL folder. 112 | 113 | Create the folder ./TEMP/VerticaDDL in the project. 114 | 115 | ![TEMP\VerticaDDL](DocumentationImages/TempFolder.png) 116 | 117 | #### Update the file launch.json 118 | 119 | Update the file ./vscode/launch.json add the following code 120 | 121 | 122 | ``` 123 | { 124 | "name": "Vertica main", 125 | "type": "python", 126 | "request": "launch", 127 | "program": "${file}", 128 | "console": "integratedTerminal", 129 | "justMyCode": false, 130 | "envFile": "${workspaceFolder}/.env", 131 | "args": ["-s","/workspace/SnowConvertDDLExportScripts/sfConf.txt","-v","/workspace/SnowConvertDDLExportScripts/verticaConf.txt"] 132 | } 133 | ``` 134 | 135 | ![Launchjson](DocumentationImages/Launchjson.png) 136 | 137 | #### Run the script to set all the environment variables. 138 | 139 | Run the following script in the command line 140 | 141 | ``` 142 | export $(xargs < .env) 143 | ``` 144 | 145 | #### Add Docker extension into the project 146 | 147 | ![Docker-extension](DocumentationImages/DockerExtensions.PNG) 148 | 149 | #### Install the vertica client divers 150 | 151 | Follow the steps below or just run the following command in the terminal. 152 | 153 | ``` 154 | ./install-vertica.sh 155 | ``` 156 | 157 | 158 | Download the driver for linux 9.1.x 159 | 160 | https://www.vertica.com/client-drivers/ 161 | 162 | 163 | ![Drivers](DocumentationImages/VerticaClientDriversLinux.png) 164 | 165 | Once the tar file is downloaded copy the file and paste it in the root directory of the project. 166 | 167 | ![VerticaTarFile](DocumentationImages/VerticaTarFile.png) 168 | 169 | #### Unzip the file 170 | 171 | ``` 172 | gzip -d vertica-client-9.1.1-0.x86_64.tar.gz 173 | ``` 174 | 175 | #### Untar the file 176 | ``` 177 | tar -xvf vertica-client-9.1.1-0.x86_64.tar 178 | ``` 179 | 180 | Once the gz file was unzip right click in the directory ./opt/vertica/bin created and run a new terminal. 181 | 182 | ![BinNewTerminal](DocumentationImages/BinNewTerminal.png) 183 | 184 | #### Connect to the server 185 | 186 | Run the following command to connect to the server. 187 | 188 | ``` 189 | docker run -p 5433:5433 dataplatform/docker-vertica 190 | ``` 191 | ![VerticaTarFile](DocumentationImages/RunDockerVertica.png) 192 | 193 | #### Confirm the server is running 194 | 195 | Confirm the docket container is running 196 | 197 | ![VerticaTarFile](DocumentationImages/ContainerRunning.PNG) 198 | 199 | #### Connect to the database 200 | 201 | Run the following command in the new terminal to connect to the database. 202 | 203 | ``` 204 | ./vsql -hlocalhost -Udbadmin 205 | ``` 206 | 207 | ![ConnectToServer](DocumentationImages/ConnectToServer.png) 208 | 209 | #### Create the schema 210 | 211 | Run the following command to create the schema(Example). 212 | 213 | ``` 214 | CREATE SCHEMA IF NOT EXISTS STORE; 215 | ``` 216 | 217 | #### Create tables 218 | 219 | Run the following command to create the tables(Example). 220 | 221 | 222 | ``` 223 | CREATE OR REPLACE TABLE STORE.MYTABLE_1 ( 224 | MYINT INTEGER NOT NULL PRIMARY KEY, 225 | MYINTEGER INTEGER, 226 | MYBIGINT BIGINT 227 | ); 228 | 229 | CREATE OR REPLACE TABLE STORE.MYTABLE_2 ( 230 | MYINT INTEGER NOT NULL PRIMARY KEY, 231 | MYINTEGER INTEGER, 232 | MYBIGINT BIGINT 233 | ); 234 | ``` 235 | 236 | ![ConnectToServer](DocumentationImages/CreateTables.png) 237 | 238 | #### Create views 239 | 240 | Run the following command to create the views(Example). 241 | 242 | ``` 243 | SELECT MYINT, 244 | MYINTEGER, 245 | MYBIGINT 246 | FROM STORE.MYTABLE_1; 247 | 248 | CREATE OR REPLACE VIEW STORE.MYVIEW2 AS 249 | SELECT MYINT, 250 | MYINTEGER, 251 | MYBIGINT 252 | FROM STORE.MYTABLE_2; 253 | ``` 254 | 255 | ![ConnectToServer](DocumentationImages/CreateViews.png) 256 | 257 | ### Execution 258 | 259 | Run the python script ./Scripts/vertMain.py 260 | 261 | ![RunPythonCode](DocumentationImages/RunPythonCode.png) 262 | 263 | Debug the code 264 | 265 | ![DebugCode](DocumentationImages/RunPythonCode02.png) 266 | 267 | ### Run sucessfully 268 | 269 | Once the project run sucessfully the project created the sql scripts in the folder created previously ./TEMP/VerticaDLL 270 | 271 | Example 272 | ``` 273 | Create or Replace View store.MYVIEW2 AS 274 | SELECT MYTABLE_2.MYINT, 275 | MYTABLE_2.MYINTEGER, 276 | MYTABLE_2.MYBIGINT 277 | FROM store.MYTABLE_2 278 | ``` 279 | 280 | ![TempFileCreated](DocumentationImages/TempFileCreated.png) 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | -------------------------------------------------------------------------------- /Vertica/install-vertica.sh: -------------------------------------------------------------------------------- 1 | curl https://www.vertica.com/client_drivers/9.1.x/9.1.1-0/vertica-client-9.1.1-0.x86_64.tar.gz --output vertica-client.tar.gz 2 | gzip -d vertica-client.tar.gz 3 | tar -xvf vertica-client.tar -------------------------------------------------------------------------------- /Vertica/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy==1.22.0 2 | pandas==1.2.4 3 | parse==1.19.0 4 | vertica-python==1.0.1 5 | -------------------------------------------------------------------------------- /Vertica/sfConf.txt: -------------------------------------------------------------------------------- 1 | [snowflake] 2 | account=xxxxx 3 | user=xxxxx 4 | password=xxxxx 5 | role=verticadb_role 6 | # schema_mappings map the vertica schema to the sf schema 7 | # Format 8 | # vertica_schema=snowflake_db.snowflake_schema 9 | [schema_mappings] 10 | store=store 11 | [inview_mappings] 12 | store=store 13 | # This section determines what to do with the DDL 14 | # Valid options are 15 | # ddlDisplay=[True|False] If True, the DDL is written to the log 16 | # ddlSave= If present, write the ddl into 17 | # ddlExecute=[True|False] If True, executes the ddl in snowflake. If not present ddlExeute will be false 18 | # dropExisting=[True|False] If True, existing table will be dropped. 19 | # If False, the table will not be dropped but a warning will be given saying the table exists 20 | # processViews=[True|False] If True, migrate the views from the vertica instance to snowflake. 21 | # be sure to utilise [inview_mappings] to ensure the view will compile ok 22 | # If false, the views will not be migrated. 23 | [execution] 24 | ddlDisplay=True 25 | ddlSave=TEMP/VerticaDDL 26 | ddlExecute=False 27 | dropExisting=True 28 | processViews=True 29 | -------------------------------------------------------------------------------- /Vertica/verticaConf.txt: -------------------------------------------------------------------------------- 1 | [vertica] 2 | host=localhost 3 | port=5433 4 | user=dbadmin 5 | database=docker 6 | ssl=False 7 | schema=store -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/[ARCHIVED] TeradataScripts/.DS_Store -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/37b7e4c0976224f57bae83cf2487ac883affc888/[ARCHIVED] TeradataScripts/Teradata/.DS_Store -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/README.md: -------------------------------------------------------------------------------- 1 | # Teradata Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your Teradata code so it can be migrated to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-teradata/introduction) 4 | 5 | ## Version 6 | 7 | Release 2021-12-10 8 | 9 | ## Usage 10 | 11 | The following are the steps to execute the DDL Code Generation. They should be executed in bash shell on a linux environment with access to bteq/tpt utilities. 12 | 13 | 1 - Modify `create_ddls.sh` in the bin folder – Using a text editor modify the following parameters: 14 | 15 | * `connection_string` 16 | * `include_databases` 17 | * `exclude_databases` 18 | * `include_objects` 19 | 20 | It is recommended to use the user 'DBC' in the connection string but a user with sysadmin privileges should also work. Please run on a production-like environment with up to date statistics. 21 | 22 | By default the script is setup to exclude system related databases and include all others. You can modify these to get the desired scope, including the operator that is used. Statements need to exclude spaces in the parameter values and values should be all **UPPERCASE**. 23 | 24 | > Do not remove the parentheses around the entire statement which are needed for compound logic. 25 | > Do not use **LIKE ANY** clause for both as it can cause unexpected issues. 26 | 27 | Example values: 28 | 29 | ```sql 30 | (UPPER(T1.DATABASENAME) NOT IN ('ALL', 'TESTDB')); 31 | 32 | (UPPER(T1.DATABASENAME) NOT IN ('ALL', 'TESTDB')) AND UPPER(T1.DATABASENAME) NOT LIKE ('TD_%')) 33 | ``` 34 | 35 | 2 - After modifying, the `create_ddls.sh` file can be run from the command line to execute the extract from within the bin directory. The following files will be created in the output folder: 36 | 37 | ## DDL Files 38 | 39 | These files will contain the definitions of the objects specified by the file name. 40 | 41 | * `DDL_Databases.sql 42 | * `DDL_Tables.sql` 43 | * `DDL_Join_Indexes.sql` 44 | * `DDL_Functions.sql` 45 | * `DDL_Views.sql` 46 | * `DDL_Macros.sql` 47 | * `DDL_Procedures.sql` 48 | * `Insert_statements.sql` (these are 2 dummy records created for each Teradata Table - NOT CUSTOMER DATA) 49 | 50 | ## Report Files 51 | 52 | ### System Statistics 53 | These files provide information around key system statistics and objects that can have a specific impact on conversion and migration activities. 54 | 55 | * `Object_Type_List.txt` 56 | * `Object_Type_Summary.txt` 57 | * `Table_List.txt` 58 | * `Special_Columns_List.txt` 59 | * `All_Stats.txt` 60 | * `Table_Stats.txt` 61 | * `View_Dependency_Detail.txt` 62 | * `View_Dependency_Report.txt` 63 | * `Object_Join_Indexes.txt` 64 | 65 | ### Usage Report Files 66 | 67 | These files provide information relevant to the sizing and usage of the Teradata system. These will not be created unless you uncomment the section for Creating Usage Reports 68 | 69 | * `90_Day_CPU_Stats.txt` 70 | * `90_Day_Node_Stats.txt` 71 | * `90_Day_Workload_Stats.txt` 72 | 73 | ### Data Profiling Files 74 | 75 | These collect information about certain column types in which information about the data is required to understand certain aspects of the migration. 76 | 77 | * `Data_Profile_Numbers.txt` 78 | 79 | ### Invalid Objects Log 80 | 81 | This file returns results showing any test failures for views that are not valid. 82 | 83 | * `invalid_objects.log` 84 | 85 | 3 - After a successful run, remove logon information from the top line of each of the files in the scripts folder as well as the `create_ddls.sh` file. Compress the entire Teradata Source Extract and return to Snowflake. Please do not modify or remove any files so that we can review logs as needed. 86 | 87 | ## Reporting issues and feedback 88 | 89 | If you encounter any bugs with the tool please file an issue in the 90 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 91 | 92 | ## License 93 | 94 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Teradata/License.txt). 95 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/scripts/create_ddls.btq: -------------------------------------------------------------------------------- 1 | **** Modified by: 2 | **** Modified Date: 3 | **** Description: 4 | 5 | .LOGON connection_string; 6 | 7 | **** CREATE TABLES FILE **** 8 | .EXPORT FILE = ../temp/SHOW_Tables.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW TABLE ' || TRIM(T1.DATABASENAME) || '.' ||TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('T','O','Q') AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 11 | .EXPORT RESET 12 | .OS rm ../output/object_extracts/DDL/DDL_Tables.sql 13 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Tables.sql 14 | .SET WIDTH 65531 15 | .RUN FILE = ../temp/SHOW_Tables.sql 16 | .EXPORT RESET 17 | 18 | 19 | **** CREATE JOIN INDEXES FILE **** 20 | .EXPORT FILE = ../temp/SHOW_Join_Indexes.sql 21 | .SET WIDTH 65531 22 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW JOIN INDEX ' || TRIM(T1.DATABASENAME) || '.' ||TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('I') AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 23 | .EXPORT RESET 24 | .OS rm ../output/object_extracts/DDL/DDL_Join_Indexes.sql 25 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Join_Indexes.sql 26 | .SET WIDTH 65531 27 | .RUN FILE = ../temp/SHOW_Join_Indexes.sql 28 | .EXPORT RESET 29 | 30 | 31 | **** CREATE VIEWS FILE **** 32 | .EXPORT FILE = ../temp/SHOW_Views.sql 33 | .SET WIDTH 65531 34 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW VIEW ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'V' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 35 | .EXPORT RESET 36 | .OS rm ../output/object_extracts/DDL/DDL_Views.sql 37 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Views.sql 38 | .SET WIDTH 65531 39 | .RUN FILE = ../temp/SHOW_Views.sql 40 | .EXPORT RESET 41 | 42 | **** CREATE FUNCTIONS FILE **** 43 | .EXPORT FILE = ../temp/SHOW_Functions.sql 44 | .SET WIDTH 65531 45 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.SpecificNAME) || ' */'' as "--"; ' || 'SHOW FUNCTION ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.FUNCTIONNAME) || ';' "--" 46 | FROM DBC.FUNCTIONSV T1 WHERE include_databases AND exclude_databases GROUP BY 1; 47 | .EXPORT RESET 48 | .OS rm ../output/object_extracts/DDL/DDL_Functions.sql 49 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Functions.sql 50 | .SET WIDTH 65531 51 | .RUN FILE = ../temp/SHOW_Functions.sql 52 | .EXPORT RESET 53 | 54 | **** CREATE MACROS FILE **** 55 | .EXPORT FILE = ../temp/SHOW_Macros.sql 56 | .SET WIDTH 65531 57 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW MACRO ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'M' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 58 | .EXPORT RESET 59 | .OS rm ../output/object_extracts/DDL/DDL_Macros.sql 60 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Macros.sql 61 | .SET WIDTH 65531 62 | .RUN FILE = ../temp/SHOW_Macros.sql 63 | .EXPORT RESET 64 | 65 | 66 | **** CREATE PROCEDURES FILE **** 67 | .EXPORT FILE = ../temp/SHOW_Procedures.sql 68 | .SET WIDTH 65531 69 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW PROCEDURE ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'P' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 70 | .EXPORT RESET 71 | .OS rm ../output/object_extracts/DDL/DDL_Procedures.sql 72 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Procedures.sql 73 | .SET WIDTH 65531 74 | .RUN FILE = ../temp/SHOW_Procedures.sql 75 | .EXPORT RESET 76 | 77 | 78 | **** CREATE DATABASES FILE **** 79 | .OS rm ../output/object_extracts/DDL/DDL_Databases.sql 80 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Databases.sql 81 | .SET WIDTH 65531 82 | SELECT 'CREATE DATABASE ' || TRIM(T1.DATABASENAME) || ' FROM DBC AS PERM = 100000000;' "--" FROM DBC.DATABASESV T1 WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1; 83 | .EXPORT RESET 84 | 85 | 86 | **** CREATE SNOWFLAKE SCHEMA FILE **** 87 | .OS rm ../output/object_extracts/DDL/DDL_SF_Schemas.sql 88 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_SF_Schemas.sql 89 | .SET WIDTH 65531 90 | SELECT '/* ' || TRIM(T1.DATABASENAME) || ' */ ' || 'CREATE SCHEMA ' || TRIM(T1.DATABASENAME) || ';' "--" FROM DBC.DATABASESV T1 WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1; 91 | .EXPORT RESET 92 | 93 | 94 | .quit 0; 95 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/scripts/create_usage_reports.btq: -------------------------------------------------------------------------------- 1 | **** Modified by: 2 | **** Modified Date: 3 | **** Description: 4 | 5 | .LOGON connection_string; 6 | 7 | **** CREATE REPORTS **** 8 | 9 | .OS rm ../output/object_extracts/Usage/90_Day_CPU_Stats.txt 10 | .EXPORT FILE = ../output/object_extracts/Usage/90_Day_CPU_Stats.txt 11 | .SET format off 12 | .SET titledashes off 13 | .SET SEPARATOR = '|' 14 | .SET WIDTH 65531 15 | SELECT TheDate ||'|'|| TheTime ||'|'|| NodeID ||'|'|| CPUIdle ||'|'|| CPUIoWait ||'|'|| CPUUServ ||'|'|| CPUUExec FROM pdcrinfo.ResUsageSpma WHERE thedate between current_date - 91 AND current_date; 16 | .EXPORT RESET 17 | 18 | 19 | .OS rm ../output/object_extracts/Usage/90_Day_Node_Stats.txt 20 | .EXPORT FILE = ../output/object_extracts/Usage/90_Day_Node_Stats.txt 21 | .SET format off 22 | .SET titledashes off 23 | .SET SEPARATOR = '|' 24 | .SET WIDTH 65531 25 | SELECT distinct TheDate||'|'|| NodeID||'|'|| NodeType ||'|'|| PM_CPU_COD ||'|'|| WM_CPU_COD ||'|'|| PM_IO_COD ||'|'|| WM_IO_COD ||'|'|| NCPUs ||'|'|| Vproc1 ||'|'|| Vproc2 ||'|'|| VprocType1 ||'|'|| VprocType2 ||'|'|| MemSize ||'|'|| NodeNormFactor FROM pdcrinfo.ResUsageSpma WHERE thedate between current_date - 91 AND current_date; 26 | .EXPORT RESET 27 | 28 | 29 | .OS rm ../output/object_extracts/Usage/90_Day_Workload_Stats.txt 30 | .EXPORT FILE = ../output/object_extracts/Usage/90_Day_Workload_Stats.txt 31 | .SET format off 32 | .SET titledashes off 33 | .SET SEPARATOR = '|' 34 | .SET WIDTH 65531 35 | SELECT trim(a.LogDate)||'|'|| 36 | trim(a.UserName)||'|'|| 37 | trim(a.StatementType)||'|'|| 38 | trim(a.ErrorCode)||'|'|| 39 | trim(a.Single_AMP)||'|'|| 40 | trim(a.StartHour)||'|'|| 41 | trim(a.WDID)||'|'|| 42 | trim(a.WDName)||'|'|| 43 | trim(a.AMPCPU)||'|'|| 44 | trim(a.ParserCPU)||'|'|| 45 | trim(a.RequestCount) 46 | FROM 47 | (SELECT LogDate 48 | , UserName 49 | , StatementType 50 | , ErrorCode 51 | , case when NumOfActiveAMPs <=2 then 'Yes' else 'No ' end as Single_AMP 52 | , EXTRACT( HOUR FROM starttime) AS StartHour 53 | , WDID 54 | , WDName 55 | , cast(SUM(AMPCPUTime) as varchar(18)) AS AMPCPU 56 | , cast(SUM(ParserCPUTime) as varchar(18)) AS ParserCPU 57 | , CAST(COUNT(*) as varchar(18)) AS RequestCount 58 | FROM pdcrinfo.dbqlogtbl_hst 59 | WHERE LogDate between Current_Date - 91 and Current_Date 60 | AND NumOfActiveAMPs <> 0 61 | GROUP BY 1,2,3,4,5,6,7,8) a 62 | ORDER BY 1; 63 | .EXPORT RESET 64 | 65 | .quit 0; 66 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/scripts/data_profiling.btq: -------------------------------------------------------------------------------- 1 | **** Modified by: 2 | **** Modified Date: 3 | **** Description: 4 | 5 | .LOGON connection_string; 6 | 7 | **** NUMBERS WITH FLEXIBLE PRECISION **** 8 | .EXPORT FILE = ../temp/NUMBER_COLUMNS.sql 9 | .SET WIDTH 65531 10 | select 11 | 'select c1 || ''|'' || cast(c2 as varchar(3)) || ''|'' || cast(c3 as varchar(3)) as "--" from ( 12 | select ''' || T1.databasename || '|' || T1.tablename || '|' || columnname || '|'' AS c1, 13 | max(length(cast(' || columnname || ' as varchar(40))) - case when position(''.'' IN cast(' || columnname || ' as varchar(40))) = 0 then 0 else length(cast(' || columnname || ' as varchar(40))) - position(''.'' IN cast(' || columnname || ' as varchar(40))) + 1 end) as "c2", 14 | max(case when position(''.'' IN cast(' || columnname || ' as varchar(40))) = 0 then 0 else length(cast(' || columnname || ' as varchar(40))) - position(''.'' IN cast(' || columnname || ' as varchar(40))) end) as "c3" from ' || T1.databasename || '.' || T1.tablename || ') T1;' as "--" 15 | from 16 | dbc.columnsv T1, 17 | dbc.tablesv T2 18 | where 19 | columntype = 'N' 20 | and UPPER(T1.DATABASENAME) = UPPER(T2.DATABASENAME) 21 | and UPPER(T1.TABLENAME) = UPPER(T2.TABLENAME) 22 | and T2.TABLEKIND IN ('T','O', 'Q') 23 | and decimaltotaldigits = -128 24 | and decimalfractionaldigits = -128 25 | AND include_databases AND exclude_databases AND include_objects 26 | ; 27 | .EXPORT RESET 28 | .OS rm ../output/object_extracts/Reports/Data_Profile_Numbers.txt 29 | .EXPORT FILE = ../output/object_extracts/Reports/Data_Profile_Numbers.txt 30 | .SET WIDTH 65531 31 | .RUN FILE = ../temp/NUMBER_COLUMNS.sql 32 | .EXPORT RESET 33 | 34 | .quit 0; 35 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/scripts/invalid_objects.btq: -------------------------------------------------------------------------------- 1 | **** Modified by: 2 | **** Modified Date: 3 | **** Description: 4 | 5 | .LOGON connection_string; 6 | 7 | **** CREATE INVALID VIEWS LIST FILE **** 8 | .SET ERROROUT STDOUT 9 | .EXPORT FILE = ../temp/Invalid_Object_Test.sql 10 | .SET WIDTH 65531 11 | SELECT 'SELECT * FROM ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' WHERE 1 = 2;' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('V') AND include_databases AND exclude_databases AND include_objects; 12 | .EXPORT RESET 13 | 14 | .RUN FILE = ../temp/Invalid_Object_Test.sql 15 | 16 | .quit 0; 17 | --------------------------------------------------------------------------------