├── .env ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug-report.md │ └── feature-request.md ├── hooks │ ├── README.md │ └── check-version-update.sh ├── pull_request_template.md ├── scripts │ ├── common-normalize-version.sh │ └── install-hooks.sh └── workflows │ ├── check-hooks.yml │ ├── common-pr-draft.yml │ ├── common-reusable-build.yml │ ├── package-scripts.yml │ ├── release-all-ci.yml │ ├── release-push-scripts.yml │ └── release-set-version.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode ├── launch.json └── settings.json ├── AlternativeSQLServerExtractionMethods ├── .DS_Store ├── License.txt ├── PowerShellScripts │ ├── .DS_Store │ ├── README.md │ ├── Revision-History.txt │ └── bin │ │ └── extract-sql-server-ddl.ps1 ├── README.md ├── Table _sizing_report_query.pdf └── mssql-scripter │ └── mssql-scripter.pdf ├── BigQuery ├── License.txt ├── README.md └── bin │ └── create_ddls.sh ├── DB2 ├── License.txt ├── README.md └── bin │ ├── create_ddls.ps1 │ └── create_ddls.sh ├── Databricks ├── Download_jobs_sources.dbc ├── License.txt ├── README.md └── images │ └── notebook_export_source_codes.png ├── Hive ├── License.txt ├── README.md └── exp_ddl.sh ├── LEGAL.md ├── LICENSE ├── Netezza ├── License.txt └── README.md ├── Oracle ├── License.txt ├── README.md ├── bin │ ├── create_ddls.bat │ ├── create_ddls.sh │ └── create_ddls_plus.sh ├── scripts │ ├── create_ddls.sql │ └── create_ddls_plus.sql └── setup.cfg ├── README.md ├── Redshift ├── License.txt ├── README.md ├── bin │ ├── create_ddls.ps1 │ └── create_ddls.sh └── scripts │ ├── DDL_Function.sql │ ├── DDL_Procedure.sql │ ├── DDL_Table.sql │ └── DDL_View.sql ├── SQLServer ├── .DS_Store ├── License.txt ├── README.md ├── README.pdf ├── SQL_Server_Code_Extraction.pdf └── Table _sizing_report.pdf ├── Synapse ├── Create_ddls.ps1 ├── Create_ddls.sh ├── License.txt ├── README.md └── Scripts │ ├── Get_external_data_sources.sql │ ├── Get_external_file_formats.sql │ ├── Get_external_tables.sql │ ├── Get_external_tables_serveless.sql │ ├── Get_external_views.sql │ ├── Get_functions.sql │ ├── Get_indexes.sql │ ├── Get_procedures.sql │ ├── Get_schemas.sql │ ├── Get_tables.sql │ └── Get_views.sql ├── Teradata ├── .DS_Store ├── License.txt ├── README.md ├── bin │ └── create_ddls.sh ├── scripts_template │ ├── create_databases.btq │ ├── create_functions.btq │ ├── create_join_indexes.btq │ ├── create_macros.btq │ ├── create_procedures.btq │ ├── create_schemas.btq │ ├── create_tables.btq │ ├── create_triggers.btq │ └── create_views.btq └── sf_objects │ ├── Helper_Functions.sql │ └── SF_Sys_Calendar.sql ├── Tests ├── DB2 │ ├── .gitignore │ ├── License.txt │ ├── README.md │ └── startDocker.sh └── Teradata │ ├── License.txt │ ├── README.md │ ├── database_summary │ ├── __init__.py │ ├── database_source_code_summarizer.py │ ├── database_source_code_summary.py │ └── top_level_object_type.py │ ├── scripts │ ├── config.sh │ ├── execute_deploy_database_script.sh │ ├── execute_drop_database_script.sh │ ├── execute_extract_database_script.sh │ ├── execute_scripts.sh │ └── ssh_automatic_login_configuration.sh │ ├── source_code │ └── demo_database │ │ ├── database_code │ │ ├── DDL_CreateMacro.sql │ │ ├── DDL_Databases.sql │ │ ├── DDL_JoinIndex.sql │ │ ├── DDL_Tables.sql │ │ ├── DDL_Trigger.sql │ │ ├── DDL_Views.sql │ │ ├── INSERT_VEMPLOYEE.sql │ │ ├── UPDATE_VEMPLOYEE.sql │ │ ├── my_yyyymmdd_to_date2.c │ │ └── my_yyyymmdd_to_date2.sql │ │ ├── deploy_database.sh │ │ └── drop_database.sh │ ├── teradata_extraction_test_base.py │ └── test_demo_database.py ├── VERSION ├── VERSION-UPDATE.sh ├── Vertica ├── DocumentationImages │ ├── BinNewTerminal.png │ ├── ConnectToServer.png │ ├── ContainerRunning.PNG │ ├── CreateTables.png │ ├── CreateViews.png │ ├── DockerExtensions.PNG │ ├── DockerRunning.PNG │ ├── ExampleScripts.PNG │ ├── FoldeStructure.PNG │ ├── Folder.PNG │ ├── Launchjson.png │ ├── PipInstall_sqlparse.png │ ├── PipInstallsqlparse.PNG │ ├── PythonDDLRunSucessfully.png │ ├── PythonScripts.png │ ├── PythonVersion.png │ ├── RunDockerVertica.png │ ├── RunPythonCode.png │ ├── RunPythonCode02.png │ ├── TempFileCreated.png │ ├── TempFolder.png │ ├── VerticaClientDriversLinux.png │ └── VerticaTarFile.png ├── License.txt ├── README.md ├── Scripts │ ├── SFConfig.py │ ├── SFConvert.py │ ├── SQL_Convert │ │ └── sqls │ │ │ └── vmart_query_01.sql │ ├── VerticaConfig.py │ ├── VerticaDBCalls.py │ └── vertMain.py ├── TEMP │ └── VerticaDDL │ │ ├── STORE_MYTABLE_1.sql │ │ ├── STORE_MYTABLE_2.sql │ │ ├── STORE_MYVIEW1.sql │ │ └── STORE_MYVIEW2.sql ├── VerticaReadme.md ├── install-vertica.sh ├── requirements.txt ├── sfConf.txt └── verticaConf.txt ├── [ARCHIVED] TeradataScripts ├── .DS_Store └── Teradata │ ├── .DS_Store │ ├── License.txt │ ├── README.md │ ├── bin │ └── create_ddls.sh │ ├── scripts │ ├── create_ddls.btq │ ├── create_reports.btq │ ├── create_sample_inserts.btq │ ├── create_usage_reports.btq │ ├── data_profiling.btq │ └── invalid_objects.btq │ └── sf_objects │ ├── Helper_Functions.sql │ └── SF_Sys_Calendar.sql ├── additional_notes └── DB2 │ └── useful_db2_scripts.md └── setup.sh /.env: -------------------------------------------------------------------------------- 1 | SNOW_USER=Change for your user 2 | SNOW_PASSWORD=XXXXXXXX 3 | SNOW_ACCOUNT=SnowAccount 4 | SNOW_WAREHOUSE=datawarehouseName 5 | SNOW_DATABASE=DatabaseName 6 | SNOW_ROLE=Rolename 7 | OUT_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/sqls 8 | SUCCESS_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/success 9 | FAILED_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/failed 10 | FORMATTED_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/formatted 11 | LOG_FOLDER=/workspace/snowconvertdataexportscripts/Vertica/Scripts/SQL_Convert/log 12 | ACTION=SQL 13 | MAX_THREADS=15 14 | STOP_AFTER=1000000 15 | DB_SESSIONS=1 16 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | .github/ @Snowflake-Labs/migrations-devops 2 | .pre-commit-config.yaml @Snowflake-Labs/migrations-devops 3 | VERSION-UPDATE.sh @Snowflake-Labs/migrations-devops 4 | setup.sh @Snowflake-Labs/migrations-devops -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "Bug Report \U0001F41E" 3 | about: Something isn't working as expected? Here is the right place to report. 4 | title: '' 5 | labels: bug, needs triage 6 | assignees: '' 7 | 8 | --- 9 | 10 | Please answer these questions before submitting your issue. Thanks! 11 | 12 | 1. What version of Python are you using? 13 | 14 | Replace with the output of `python --version` 15 | 16 | 2. What version of Snowflake connector are you using? 17 | 18 | Replace with the output of `pip show snowflake-connector-python` 19 | 20 | 3. What operating system and processor architecture are you using? 21 | 22 | Replace with information about your OS and processor 23 | 24 | 4. What are the dependencies in your project? 25 | 26 | Replace with the relevant sections from your `requirements.txt` file or `pip freeze` 27 | 28 | 5. What Snowflake version/edition are you connecting to? 29 | 30 | If possible, provide the output of `SELECT CURRENT_VERSION()` from Snowflake 31 | 32 | 6. What did you do? 33 | 34 | If possible, provide a recipe for reproducing the error. 35 | A complete runnable script is good. 36 | 37 | 7. What did you expect to see? 38 | 39 | What should have happened and what happened instead? 40 | 41 | 8. What operating system was the application running on when the bug occurred? 42 | 43 | Please specify the OS name, version, and any relevant details about the environment. 44 | 45 | 9. Were there any error messages or logs? 46 | 47 | Please include any relevant error messages, log outputs, or screenshots. -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "Feature Request \U0001F4A1" 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## What is the current behavior? 11 | 12 | ## What is the desired behavior? 13 | 14 | ## How would this improve the project or user experience? 15 | 16 | ## References, Other Background 17 | -------------------------------------------------------------------------------- /.github/hooks/README.md: -------------------------------------------------------------------------------- 1 | # Version Control Hooks 2 | 3 | This directory contains Git hooks to ensure proper version management in the repository. 4 | 5 | ## What These Hooks Do 6 | 7 | The pre-commit hook checks if you've modified any `.sh` or `.sql` files outside of the `.github/` directory. If you have, it verifies that the `VERSION` file has also been modified. If not, it warns you to update the version and run the `VERSION-UPDATE.sh` script. 8 | 9 | **Important**: This step is mandatory for all developers to ensure consistent versioning across the codebase. 10 | 11 | ## Installation 12 | 13 | To install the hooks, run: 14 | 15 | ```bash 16 | ./.github/scripts/install-hooks.sh 17 | ``` 18 | 19 | This requires `pre-commit` to be installed on your system. If you don't have it, you can install it with: 20 | 21 | ```bash 22 | pip install pre-commit 23 | # or 24 | brew install pre-commit # On macOS with Homebrew 25 | ``` 26 | 27 | ## Usage 28 | 29 | After installation, the hooks will run automatically when you commit changes. If you've modified `.sh` or `.sql` files without updating the VERSION file, you'll see a warning. 30 | 31 | To update the version properly: 32 | 33 | 1. Modify the `VERSION` file with the new version number 34 | 2. Run `./VERSION-UPDATE.sh` to propagate the version to all README.md files 35 | 3. Commit your changes 36 | 37 | If you need to bypass the check (not recommended), you can use: 38 | 39 | ```bash 40 | git commit --no-verify 41 | ``` 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /.github/hooks/check-version-update.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Get all changed files (staged for commit) that are .sh or .sql 4 | CHANGED_FILES=$(git diff --cached --name-only | grep -E "\.sh$|\.sql$" | grep -v "^\.github/" || true) 5 | 6 | # Check if VERSION file is modified 7 | VERSION_MODIFIED=$(git diff --cached --name-only | grep -c "^VERSION$" || true) 8 | 9 | # If there are .sh or .sql files modified outside .github/ but VERSION is not modified 10 | if [ -n "$CHANGED_FILES" ] && [ "$VERSION_MODIFIED" -eq 0 ]; then 11 | echo "⚠️ WARNING: You have modified .sh or .sql files, but the VERSION file has not been updated." 12 | echo "⚠️ Please update the VERSION file and run the VERSION-UPDATE.sh script to propagate the version." 13 | echo "⚠️ Changed files:" 14 | echo "$CHANGED_FILES" 15 | echo "" 16 | echo "⚠️ You can still commit with --no-verify if this is intentional." 17 | exit 1 18 | fi 19 | 20 | exit 0 21 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | ### Motivation & Context 3 | 4 | 5 | 6 | ### Description 7 | 8 | 9 | ### How Has This Been Tested? 10 | 11 | 12 | 13 | ### Checklist 14 | 15 | - [ ] Bug fix (non-breaking change which fixes an issue) 16 | - [ ] New feature (non-breaking change which adds functionality) 17 | - [ ] Data correction (data quality issue originating from upstream source or dataset) 18 | - [ ] Cleanup and optimization (improvement that does not alter the data returned by a model) 19 | - [ ] Other (please specify) 20 | 21 | ### Review & Approval Requests 22 | 23 | 24 | 25 | **Note**: For work in progress, use GitHub's [draft PR feature](https://github.blog/2019-02-14-introducing-draft-pull-requests/) which automatically applies a 'DO NOT MERGE' status until the PR is marked as ready for review. 26 | -------------------------------------------------------------------------------- /.github/scripts/install-hooks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" 4 | 5 | echo "Installing pre-commit hooks for the repository at $REPO_ROOT" 6 | 7 | create_post_merge_hook() { 8 | POST_MERGE_HOOK="$REPO_ROOT/.git/hooks/post-merge" 9 | 10 | if [ -f "$POST_MERGE_HOOK" ]; then 11 | if grep -q "install-hooks.sh" "$POST_MERGE_HOOK"; then 12 | return 0 13 | fi 14 | 15 | cp "$POST_MERGE_HOOK" "$POST_MERGE_HOOK.bak" 16 | fi 17 | 18 | cat > "$POST_MERGE_HOOK" << 'EOF' 19 | #!/bin/bash 20 | if git diff-tree -r --name-only --no-commit-id ORIG_HEAD HEAD | grep -q "install-hooks.sh\|.pre-commit-config.yaml"; then 21 | echo "Detected changes in hook configuration. Running install-hooks.sh..." 22 | "$(dirname "$(git rev-parse --git-dir)")/.github/scripts/install-hooks.sh" 23 | fi 24 | EOF 25 | 26 | chmod +x "$POST_MERGE_HOOK" 27 | echo "Created post-merge hook to auto-update hooks after pull/merge" 28 | } 29 | 30 | if ! command -v pre-commit &> /dev/null; then 31 | echo "Error: pre-commit is not installed." 32 | echo "Please install it using: pip install pre-commit" 33 | echo "or: brew install pre-commit" 34 | exit 1 35 | fi 36 | 37 | echo "Installing pre-commit hooks..." 38 | pre-commit install 39 | 40 | create_post_merge_hook 41 | 42 | echo "===============================================" 43 | echo "Installation completed successfully!" 44 | echo "The VERSION-UPDATE check will now run every time you commit changes to .sh, .ps1, or .sql files." 45 | echo "" 46 | echo "You can now start working with the DDL export scripts." 47 | echo "===============================================" 48 | -------------------------------------------------------------------------------- /.github/workflows/check-hooks.yml: -------------------------------------------------------------------------------- 1 | name: Check Pre-commit Hooks Installation 2 | 3 | # Summary: 4 | # This workflow serves as a "safety net" to ensure code quality standards are maintained 5 | # even if developers haven't installed pre-commit hooks locally or have bypassed them. 6 | # It provides: 7 | # 1. Verification that all files meet project standards (VERSION update check) 8 | # 2. Documentation of expected code checks 9 | # 3. Project protection by enforcing consistent validation 10 | # 4. Early warning system for developers who haven't installed hooks 11 | # 5. Consistency across all contributions regardless of developer environment 12 | # 13 | # The pre-commit hooks check that VERSION file is updated when .sh or .sql files are modified. 14 | # If issues are found, the workflow advises developers to run install-hooks.sh locally. 15 | 16 | on: 17 | push: 18 | branches: [main, "support/*", "feature/*", "bugfix/*", "sfc-gh-*/*"] 19 | pull_request: 20 | branches: ["**"] 21 | 22 | jobs: 23 | check-hooks: 24 | runs-on: ubuntu-latest 25 | steps: 26 | - uses: actions/checkout@v4 27 | - name: Set up Python 28 | uses: actions/setup-python@v4 29 | with: 30 | python-version: "3.x" 31 | 32 | - name: Install pre-commit 33 | run: pip install pre-commit 34 | 35 | - name: Check pre-commit status 36 | run: | 37 | pre-commit run --all-files || ( 38 | echo "::warning::Pre-commit hooks found issues. Please run './.github/scripts/install-hooks.sh' locally and fix the issues." 39 | exit 1 40 | ) 41 | -------------------------------------------------------------------------------- /.github/workflows/common-pr-draft.yml: -------------------------------------------------------------------------------- 1 | name: PR Draft 2 | 3 | on: 4 | pull_request: 5 | types: 6 | [ 7 | edited, 8 | opened, 9 | reopened, 10 | synchronize, 11 | converted_to_draft, 12 | ready_for_review, 13 | ] 14 | 15 | permissions: 16 | contents: read 17 | pull-requests: write 18 | 19 | jobs: 20 | add-label-draft: 21 | runs-on: ubuntu-latest 22 | if: github.event.pull_request.draft == true 23 | steps: 24 | - name: Checkout repository 25 | uses: actions/checkout@v4 26 | 27 | - name: Install GitHub CLI 28 | run: sudo apt-get update && sudo apt-get install -y gh 29 | 30 | - name: Authenticate GitHub CLI 31 | run: echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token 32 | 33 | - name: Add label to draft PR 34 | run: gh pr edit "$PR_URL" --add-label "DO NOT MERGE" 35 | env: 36 | PR_URL: ${{ github.event.pull_request.html_url }} 37 | 38 | remove-label-draft: 39 | runs-on: ubuntu-latest 40 | if: github.event.pull_request.draft == false 41 | steps: 42 | - name: Checkout repository 43 | uses: actions/checkout@v4 44 | 45 | - name: Install GitHub CLI 46 | run: sudo apt-get update && sudo apt-get install -y gh 47 | 48 | - name: Authenticate GitHub CLI 49 | run: echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token 50 | 51 | - name: Remove label from PR 52 | run: gh pr edit "$PR_URL" --remove-label "DO NOT MERGE" 53 | env: 54 | PR_URL: ${{ github.event.pull_request.html_url }} 55 | -------------------------------------------------------------------------------- /.github/workflows/package-scripts.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_call: 3 | push: 4 | branches: 5 | - support/* 6 | - feature/* 7 | - bugfix/* 8 | - sfc-gh-*/* 9 | pull_request: 10 | branches: 11 | - '**' 12 | 13 | name: "Package Scripts" 14 | 15 | permissions: 16 | contents: write 17 | pull-requests: write 18 | 19 | jobs: 20 | build-assets: 21 | name: Package Database Scripts 22 | uses: ./.github/workflows/common-reusable-build.yml 23 | with: 24 | artifact_name: ddl-export-scripts 25 | artifact_retention_days: 30 26 | -------------------------------------------------------------------------------- /.github/workflows/release-all-ci.yml: -------------------------------------------------------------------------------- 1 | name: Run All Release Process 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - "**" 9 | - "![ARCHIVED] TeradataScripts/**" 10 | - "!**/additional_notes/**" 11 | - "!README.md" 12 | - "!VERSION" 13 | pull_request: 14 | branches: 15 | - "**" 16 | paths: 17 | - "**" 18 | - "![ARCHIVED] TeradataScripts/**" 19 | - "!**/additional_notes/**" 20 | - "!README.md" 21 | - "!VERSION" 22 | workflow_call: 23 | 24 | permissions: 25 | contents: write 26 | pull-requests: write 27 | 28 | jobs: 29 | 30 | set-version: 31 | name: Set Version and Check for Tags 32 | uses: ./.github/workflows/release-set-version.yml 33 | secrets: inherit 34 | 35 | check-context: 36 | name: Determine Build Context 37 | runs-on: ubuntu-latest 38 | outputs: 39 | is_main: ${{ steps.context.outputs.is_main }} 40 | steps: 41 | - name: Determine context 42 | id: context 43 | run: | 44 | if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then 45 | echo "Running on main branch" 46 | echo "is_main=true" >> $GITHUB_OUTPUT 47 | else 48 | echo "Running on branch: ${{ github.ref }}" 49 | echo "is_main=false" >> $GITHUB_OUTPUT 50 | fi 51 | 52 | - name: Output context 53 | run: | 54 | echo "Is main branch: ${{ steps.context.outputs.is_main }}" 55 | echo "GitHub ref: ${{ github.ref }}" 56 | echo "Event name: ${{ github.event_name }}" 57 | 58 | 59 | # This job checks the context if you're on the main branch or a PR/branch 60 | release-workflow: 61 | name: Create Release and Upload Assets (Main Branch) 62 | needs: [set-version, check-context] 63 | if: needs.check-context.outputs.is_main == 'true' 64 | uses: ./.github/workflows/release-push-scripts.yml 65 | secrets: inherit 66 | 67 | package-workflow: 68 | name: Package Assets Only (PR/Branch) 69 | needs: [set-version, check-context] 70 | if: needs.check-context.outputs.is_main != 'true' 71 | uses: ./.github/workflows/package-scripts.yml 72 | secrets: inherit 73 | -------------------------------------------------------------------------------- /.github/workflows/release-set-version.yml: -------------------------------------------------------------------------------- 1 | on: workflow_call 2 | 3 | name: Set Version and Tag Release 4 | 5 | permissions: 6 | contents: write 7 | pull-requests: write 8 | 9 | jobs: 10 | set-version: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Check out the repository 14 | uses: actions/checkout@v4 15 | with: 16 | fetch-depth: 0 17 | 18 | - name: Extract version from VERSION file 19 | id: get_version 20 | working-directory: ${{ github.workspace }} 21 | run: | 22 | # Get version with underscores for use in some places 23 | VERSION=$(grep "__version__" VERSION | cut -d'"' -f2 | tr '.' '_') 24 | echo "VERSION=${VERSION}" >> "${GITHUB_OUTPUT}" 25 | 26 | # Get original version with dots for use in file names 27 | VERSION_DOTS=$(grep "__version__" VERSION | cut -d'"' -f2) 28 | echo "VERSION_DOTS=${VERSION_DOTS}" >> "${GITHUB_OUTPUT}" 29 | 30 | # Get clean version without any 'v' prefix to ensure we don't get double v's 31 | VERSION_CLEAN=$(grep "__version__" VERSION | cut -d'"' -f2 | sed 's/^v//g') 32 | echo "VERSION_CLEAN=${VERSION_CLEAN}" >> "${GITHUB_OUTPUT}" 33 | 34 | echo "Version extracted: ${VERSION} (with underscores), ${VERSION_DOTS} (with dots), and ${VERSION_CLEAN} (clean without v prefix)" 35 | 36 | - name: Check GitHub context 37 | id: context 38 | run: | 39 | # Determine if we're on main branch or in a PR 40 | if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then 41 | echo "🚀 Running on main branch - will create tag and release" 42 | echo "is_main=true" >> $GITHUB_OUTPUT 43 | else 44 | echo "⚠️ Not running on main branch - will not create tag and release" 45 | echo "is_main=false" >> $GITHUB_OUTPUT 46 | fi 47 | 48 | - name: Check for existing tags 49 | id: check_tag 50 | run: | 51 | # Fetch all tags 52 | git fetch --tags 53 | 54 | # Only check for the standard v prefix format 55 | if git tag -l "v${{ steps.get_version.outputs.VERSION_CLEAN }}" | grep -q "v${{ steps.get_version.outputs.VERSION_CLEAN }}"; then 56 | echo "Tag v${{ steps.get_version.outputs.VERSION_CLEAN }} already exists" 57 | echo "tag_exists=true" >> $GITHUB_OUTPUT 58 | echo "existing_tag=v${{ steps.get_version.outputs.VERSION_CLEAN }}" >> $GITHUB_OUTPUT 59 | else 60 | echo "Tag v${{ steps.get_version.outputs.VERSION_CLEAN }} does not exist" 61 | echo "tag_exists=false" >> $GITHUB_OUTPUT 62 | 63 | # Get latest tag if no matching tag exists 64 | LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "v0.0.1") 65 | echo "latest_tag=${LATEST_TAG}" >> $GITHUB_OUTPUT 66 | fi 67 | 68 | - name: Bump version and push tag 69 | id: tag_version 70 | if: steps.context.outputs.is_main == 'true' && steps.check_tag.outputs.tag_exists != 'true' 71 | uses: mathieudutour/github-tag-action@v6.1 72 | with: 73 | github_token: ${{ secrets.GITHUB_TOKEN }} 74 | custom_tag: v${{ steps.get_version.outputs.VERSION_CLEAN }} 75 | tag_prefix: "" 76 | create_annotated_tag: true 77 | dry_run: false 78 | 79 | - name: Set tag info for non-main branches 80 | id: set_tag_info 81 | if: steps.context.outputs.is_main != 'true' || steps.check_tag.outputs.tag_exists == 'true' 82 | run: | 83 | if [[ "${{ steps.check_tag.outputs.tag_exists }}" == "true" ]]; then 84 | # Use existing tag if available 85 | echo "Using existing tag: ${{ steps.check_tag.outputs.existing_tag }}" 86 | echo "new_tag=${{ steps.check_tag.outputs.existing_tag }}" >> $GITHUB_OUTPUT 87 | echo "tag=${{ steps.check_tag.outputs.existing_tag }}" >> $GITHUB_OUTPUT 88 | else 89 | # Use version from VERSION file with 'v' prefix 90 | echo "Using version from VERSION file: v${{ steps.get_version.outputs.VERSION_CLEAN }}" 91 | echo "new_tag=v${{ steps.get_version.outputs.VERSION_CLEAN }}" >> $GITHUB_OUTPUT 92 | echo "tag=v${{ steps.get_version.outputs.VERSION_CLEAN }}" >> $GITHUB_OUTPUT 93 | fi 94 | 95 | # Set changelog to empty for non-main branches 96 | echo "changelog=" >> $GITHUB_OUTPUT 97 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: check-version-update 5 | name: Check Version Update 6 | language: system 7 | entry: .github/hooks/check-version-update.sh 8 | files: \.(sh|sql|ps1)$ 9 | exclude: ^\.github/|setup\.sh$ 10 | stages: [commit] 11 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Debug Script", 6 | "type": "python", 7 | "request": "launch", 8 | "program": "${file}", 9 | "console": "integratedTerminal", 10 | "justMyCode": false, 11 | "envFile": "${workspaceFolder}/.env" 12 | }, 13 | { 14 | "name": "Vertica main", 15 | "type": "python", 16 | "request": "launch", 17 | "program": "${file}", 18 | "console": "integratedTerminal", 19 | "justMyCode": false, 20 | "envFile": "${workspaceFolder}/.env", 21 | "args": ["-s","/workspace/SnowConvertDDLExportScripts/sfConf.txt","-v","/workspace/SnowConvertDDLExportScripts/verticaConf.txt"] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "**/.git": true, 4 | "**/.svn": true, 5 | "**/.hg": true, 6 | "**/CVS": true, 7 | "**/.DS_Store": true, 8 | "**/__pycache__":true, 9 | "**/.pytest_cache": true 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/AlternativeSQLServerExtractionMethods/.DS_Store -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/PowerShellScripts/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/AlternativeSQLServerExtractionMethods/PowerShellScripts/.DS_Store -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/PowerShellScripts/README.md: -------------------------------------------------------------------------------- 1 | # SQL Server Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your SQLServer code so it can be migrated to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-transactsql/introduction). 4 | 5 | ## Version 6 | 0.0.96 7 | Version 2.8 8 | Release 2022-09-01 9 | 10 | ## Usage 11 | 12 | The `extract-sql-server-ddl.ps1` script attempts to connect to an instance of SQL Server using either Windows or SQL authentication and, for each database that survives inclusion/exclusion filters, retrieves certain object definitions as individual DDL files to a local directory. 13 | 14 | **SQL Server tested versions**: `SQL Server 2019`, `Azure SQLDatabase` 15 | 16 | The script uses the following parameters. The script will prompt the user for any parameter not specified on the command line. 17 | 18 | * **ServerName**: Specifies the SQL Server database server to use 19 | * **InstanceName**: Specifies the SQL Server database instance to use (default is the default instance) 20 | * **PortNumber**: Specifies the port to use (default is 1433) 21 | * **UserName**: Specifies the user name to use with SQL Authentication (default is the logged-in user) 22 | * **Password**: Specifies the password to use for **UserName** (if SQL authentication preferred) 23 | * **ScriptDirectory**: Specifies the root directory in which the extracted files are to be stored (default is .\MyScriptsDirectory) 24 | * **IncludeDatabases**: Specifies databases that match the listed pattern(s) be included in the extraction (default is all) 25 | * **ExcludeDatabases**: Specifies databases that match the listed pattern(s) be excluded from the extraction (default is none) 26 | * **IncludeSchemas**: Specifies schemas (in any database) that match the listed pattern(s) be included in the extraction (default is all) 27 | * **ExcludeSchemas**: Specifies schemas (in any database) that match the listed pattern(s) be excluded from the extraction (default is none) 28 | * **IncludeSystemDatabases**: Specifies whether to include databases, schemas, and tables tagged as SQL Server system objects (default is false) 29 | * **ExistingDirectoryAction**: Specifies whether to delete or keep the existing **ScriptDirectory** (default is to prompt interactively) 30 | * **NoSysAdminAction**: Specifies whether to stop or continue should the authenticated **UserName** not have the sysadmin role on **ServerName**\\**InstanceName** (default is to prompt interactively) 31 | 32 | ## Troubleshooting 33 | 34 | ### What to if I need to run the scripts on a machine with no Internet Access ? 35 | 36 | The extraction scripts will try to install a PowerShell module for SQLServer. If the machine does not have access to internet this operation might fail. 37 | 38 | One option can be to download this module and install it manually. 39 | 40 | You can follow these steps: 41 | 42 | 1. Run powershell 43 | 2. Create a folder for example c:\temp 44 | 3. Run `Invoke-WebRequest -Uri powershellgallery.com/api/v2/package/sqlserver -Out D:\temp\sqlserver.zip` 45 | 4. Now we need to extract the module into a path that Powershell can use to load the modules. For that purpose we can run 46 | ``` 47 | PS C:\> echo $env:PSModulePath.Split(";") 48 | C:\Users\username\Documents\WindowsPowerShell\Modules 49 | C:\Program Files (x86)\WindowsPowerShell\Modules 50 | C:\Program Files\WindowsPowerShell\Modules 51 | ``` 52 | As you can see the output will print a list of folder where powershell lists the modules. 53 | You can select one of the folder like this: 54 | ``` 55 | PS C:\> echo $env:PSModulePath.Split(";")[0] 56 | C:\Users\username\Documents\WindowsPowerShell\Modules 57 | ``` 58 | Create a target folder: 59 | ``` 60 | PS C:\> mkdir ($env:PSModulePath.Split(";")[0] + "\SqlServer") 61 | ``` 62 | 63 | And extract the module like: 64 | ``` 65 | PS C:\> Expand-Archive -Path C:\temp\sqlserver.zip -DestinationPath ($env:PSModulePath.Split(";")[0] + "\SqlServer") 66 | ``` 67 | 68 | 5. Install it like: 69 | ``` 70 | PS C:\> Install-Module -Name SqlServer -Scope CurrentUser 71 | 72 | Untrusted repository 73 | You are installing the modules from an untrusted repository. If you trust this repository, change its 74 | InstallationPolicy value by running the Set-PSRepository cmdlet. Are you sure you want to install the modules from 75 | 'PSGallery'? 76 | [Y] Yes [A] Yes to All [N] No [L] No to All [S] Suspend [?] Help (default is "N"): A 77 | ``` 78 | 79 | ## Additional Help 80 | 81 | For more information on using the script, execute the following: 82 | ```ps 83 | PS> Get-Help -full .\extract-sql-server-ddl.ps1 84 | ``` 85 | 86 | ## Reporting issues and feedback 87 | 88 | If you encounter any bugs with the tool please file an issue in the 89 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 90 | 91 | ## License 92 | 93 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/SQLServer/License.txt). 94 | -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/PowerShellScripts/Revision-History.txt: -------------------------------------------------------------------------------- 1 | # 2 | # 2021-08-05 Derrick Cole 3 | # - parameterized variables 4 | # - added reset switch 5 | # - reordered/cleaned up logic 6 | # - more robust try/catch error handling 7 | # - corrected databaseObjectType references 8 | # - converted "where name" to Where-Object for compatability 9 | # - added filter to exclude system schemae/objects and in-memory temp tables 10 | # 11 | # 2021-08-06 Derrick Cole 12 | # - added database include and exclude capability 13 | # - added database- and table-level info capture (in addition to the DDL) 14 | # 15 | # 2021-08-09 Derrick Cole 16 | # - ran script through PSScriptAnalyzer and tweaked based on default ruleset (install; Invoke-ScriptAnalyzer -Path ) 17 | # - added check for PS 4.0+ 18 | # - added external* database object types 19 | # - added database and table summary info 20 | # 21 | # 2021-09-02 Derrick Cole 22 | # - incorporated Azure support from separate script 23 | # - cleaned up parameters and logic 24 | # 25 | # 2021-09-03 Derrick Cole 26 | # - version 1.0 27 | # - added SqlServer module presence/install block 28 | # - corrected database inclusion/exclusion filtering 29 | # - consolidated server connection into single block 30 | # - added a server summary dump 31 | # - added version and rundate info 32 | # - minor cleanup 33 | # 34 | # 2021-09-07 Derrick Cole 35 | # - version 1.1 36 | # - adjusted database inclusion/exclusion filtering 37 | # - added support for masked password prompting 38 | # - added SQL Server authentication option (Windows authentication by default) 39 | # - added support for Get-Help 40 | # - more cleanup 41 | # 42 | # 2021-09-18 Derrick Cole 43 | # - version 1.2 44 | # - added user role check 45 | # - added more graceful processing and error handling 46 | # - more cleanup 47 | # 48 | # 2021-10-28 Derrick Cole 49 | # - version 1.3 50 | # - increased PowerShell minimum version to 5.0 51 | # - removed dependency on SqlServer module in favor of two SMO assemblies 52 | # - removed SqlAuthentication parameter in favor of UserId on the command line 53 | # - renamed IncludeSystemObjects parameter to IncludeSystemDatabases 54 | # - added command line directives for existing directories and no sysadmin 55 | # - added {Include/Exclude}Schema filters 56 | # - closer alignment to out-of-the-box functionality 57 | # - database objects collected into "DDL_.sql" files 58 | # - added help comments 59 | # - general cleanup 60 | # 61 | # 2021-11-02 Derrick Cole 62 | # - version 1.4 63 | # - default vs named instance support 64 | # - corrected scripter AppendToFile option 65 | # - cleaned up help comments, added SMO assembly URL 66 | # 67 | # 2021-11-17 Derrick Cole 68 | # - version 1.5 69 | # - added object inventory dump 70 | # - converted urnCollection/scripter calls from once per object to once per object type 71 | # - incorporated more robust checks for required assemblies 72 | # - included verbiage for addressing missing required assemblies and alternatives 73 | # - removed IncludeIfNotExists scripter option 74 | # - minor cleanup 75 | # 76 | # 2021-12-03 Derrick Cole 77 | # - version 1.6 78 | # - added more robust checking for sysadmin once connected to instance (via is_srvrolemember() query instead of login when logins are group-managed) 79 | # - adjusted psadmin-pre-reqs.ps1 script to check for assemblies prior to installing module 80 | # - minor cleanup 81 | # 82 | # 2021-12-09 Derrick Cole 83 | # - version 1.7 84 | # - refactored server and server\instance connection handling (code and command-line parameters) to account for all combinations of (un)specified server, instance, port, and tcp format 85 | # - minor cleanup 86 | # 87 | # 2021-12-16 Derrick Cole 88 | # - version 1.8 89 | # - added ExtendedProperties to the scripter options list to capture comments (and other extended properties) 90 | # 91 | # 2022-01-07 Derrick Cole 92 | # - version 1.9 93 | # - added more robust handling of encrypted objects and tracking of same in object_inventory.csv 94 | # - minor cleanup 95 | # 96 | # 2022-02-10 Derrick Cole 97 | # - version 2.0 98 | # - added interactive prompts with default values for parameters not specified on the command line 99 | # - separated ServerInstance parameter into separate ServerName and InstanceName parameters 100 | # - added more robust handling of server names and instance names 101 | # - relocated default script directory to directory containing script 102 | # - removed UseTcp parameter 103 | # - minor cleanup 104 | # 105 | # 2022-02-22 Derrick Cole 106 | # - version 2.1 107 | # - added support for instance-level linked server scripting 108 | # 109 | # 2022-04-05 Derrick Cole 110 | # - version 2.2 111 | # - added additional user instructions and clarified prompts 112 | # - more robust handling of empty arrays 113 | # - moved to one scripter call per object (as opposed to per object type) 114 | # - minor cleanup 115 | # 116 | # 2022-05-20 Derrick Cole 117 | # - version 2.3 118 | # - first cut at Synapse support 119 | # - defaulting to identifying type of instance based on value of ServerName 120 | # - minor cleanup 121 | # 122 | # 2022-07-07 Derrick Cole 123 | # - version 2.4 124 | # - corrected database and schema inclusion/exclusion pattern match handling 125 | # - added operating system check 126 | # 127 | # 2022-07-08 Derrick Cole 128 | # - version 2.5 129 | # - reorganized environment checks 130 | # - moved all function calls ahead of main block 131 | # - refreshed README.md to current version 132 | # 133 | # 2022-07-26 Derrick Cole 134 | # - version 2.6 135 | # - added support for PowerShell 7 136 | # - simplified assembly acquisition instructions 137 | # - added support for *nix execution 138 | # - minor cleanup 139 | # 140 | # 2022-08-22 Derrick Cole 141 | # - version 2.7 142 | # - corrected handling of scripter file paths containing spaces 143 | # - verified script works on PowerShell 5 (Windows) and PowerShell 7 (on Windows and *nix) with SQL Server module 144 | # 145 | # 2022-09-01 Derrick Cole 146 | # - version 2.8 147 | # - added '' delimiters between object definitions 148 | # - enforced ASCII encoding output (clash between appending the delimiter and the scripter output) 149 | # - added calls to resolve-path to, well, resolve the script directory path prior to setting scripterfile 150 | # -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/README.md: -------------------------------------------------------------------------------- 1 | # Alternative SQL Server Extraction Methods 2 | 3 | ## Version 4 | 0.0.96 5 | 6 | ## Extraction Methods 7 | 8 | This folder contains a set of alternative methods for code extraction for SQL Server, in case SQL Server Management Studio can not be executed in your system. 9 | - mssql-scripter: Is a Python package developed by Microsoft developed to generate data definition language (DDL) and data manipulation language (DML) T-SQL scripts for database objects in SQL Server. We recommend using this option for MacOS and Linux. Also runs in Windows, but always try using SSMS in Windows environments. Needs a previous Python installation in your system. 10 | 11 | - PowerShell Extraction Script: The Script attempts to connect to an instance of SQL Server and retrieves certain object definitions as individual DDL files to a local directory. This script should be executed in a Windows environment but we recommend using it in case SSMS and mssql-scripter definitely can't be executed in your system. 12 | 13 | 14 | ## Table Sizing Report 15 | 16 | -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/Table _sizing_report_query.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/AlternativeSQLServerExtractionMethods/Table _sizing_report_query.pdf -------------------------------------------------------------------------------- /AlternativeSQLServerExtractionMethods/mssql-scripter/mssql-scripter.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/AlternativeSQLServerExtractionMethods/mssql-scripter/mssql-scripter.pdf -------------------------------------------------------------------------------- /BigQuery/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2024 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /BigQuery/README.md: -------------------------------------------------------------------------------- 1 | # BigQuery DDL Export Scripts 2 | 3 | This repository offers a collection of straightforward scripts designed to facilitate the export of your BigQuery code, making it easier to migrate to [Snowflake](https://www.snowflake.com/). These scripts are specifically crafted to simplify the process of extracting your BigQuery code artifacts, such as stored procedures, functions, and views, ensuring a smooth transition to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-google-bigquery/introduction). 4 | 5 | ## Version 6 | 0.0.96 7 | 8 | ## Usage 9 | 10 | The following are the steps to execute the DDL Code Generation. They can be executed in Linux/Unix. 11 | 12 | Remove Windows. We might add a side note on how to execute them on Windows 13 | 14 | ## How does this work? 15 | 16 | The script `create_ddls.sh` will connect to your database and create a collection of SQL files. 17 | 18 | ## Prerequisits 19 | 20 | 1. Cloud SDK needs to be installed. If you have not installed it, you can follow [these](https://cloud.google.com/sdk/docs/install#linux) instructions. 21 | 2. The user must have Admin or Owner privileges, otherwise no information will be retrieved. 22 | 3. The user must be granted with a role with the `bigquery.datasets.get` permission. If there is no roles with it, you could create a custom role just for this. 23 | 24 | 25 | ## Usage 26 | 27 | The following are the steps to execute the DDL Code Generation. They can be executed in Linux/Unix environments. 28 | 29 | 1. Modify the `create_ddls.sh` that is located in the `bin` folder 30 | - The region setting will be at the top of this file. 31 | - You must log in by going to a link in your browser when you run `./google-cloud-sdk/bin/gcloud init`, and then select the cloud project to use. 32 | 33 | 2. Before executing the script ensure `create_ddls.sh` is at the same folder level with `./google-cloud-sdk/` 34 | - Finally, run `create_ddls.sh` to extract the DDLs from BigQuery 35 | - After a successful run, remove region information from the top line of `create_ddls.sh`. 36 | 37 | ### Arguments 38 | 39 | ```--version``` 40 | 41 | Check the current version of the extraction scripts. 42 | 43 | ```--help``` 44 | 45 | Display the help screen. 46 | 47 | ```-s "schema1, schema2 [, ...]``` 48 | 49 | The parameter to limit to an in-list of schemas using the following structure schema1 [, ...]. 50 | 51 | 52 | ### DDL Files 53 | These files will contain the definitions of the objects specified by the file name. 54 | 55 | * `DDL_Schema.sql` 56 | * `DDL_Tables.sql` 57 | * `DDL_External_Tables.sql` 58 | * `DDL_Views.sql` 59 | * `DDL_Functions.sql` 60 | * `DDL_Procedures.sql` 61 | * `DDL_Reservations.sql` 62 | * `DDL_Capacity_commitments.sql` 63 | -------------------------------------------------------------------------------- /DB2/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /DB2/README.md: -------------------------------------------------------------------------------- 1 | # DB2 Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your DB2 code so it can be migrated to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-db2/introduction) 4 | 5 | ## Version 6 | 0.0.96 7 | 8 | ## Usage 9 | 10 | The following are the steps to execute the DDL Code Generation. They can be executed in Linux/Unix enviroments. 11 | 12 | ## **For Linux/Unix:** 13 | 14 | 1 - Modify `create_ddls.sh` located in the `bin` folder. 15 | Using a text editor modify the following parameters: 16 | 17 | * `DATABASES_TO_EXCLUDE` 18 | 19 | That variable will determine if there are any database that you want to exclude from the extraction 20 | 21 | **It is required to use a user with administrative privileges (DBA)** and to run on a production-like environment with recently up to date statistics. 22 | 23 | 24 | 2 - After modifying, the `create_ddls.sh` file can be run from the command line to execute the extract. The following files will be created in the directory `/object_extracts/DDL`: 25 | 26 | 3 - Run `create_ddls.sh --version` to check the current version of the extraction scripts. 27 | 28 | ## **For Windows:** 29 | 30 | 1 - Modify `create_ddls.ps1` located in the `bin` folder. 31 | Using a text editor modify the following parameters: 32 | 33 | * `DATABASES_TO_EXCLUDE` 34 | 35 | That variable will determine if there are any database that you want to exclude from the extraction 36 | 37 | **It is required to use a user with administrative privileges (DBA)** and to run on a production-like environment with recently up to date statistics. 38 | 39 | 40 | 2 - After modifying, the `create_ddls.ps1` file can be run from the command line to execute the extract. The following files will be created in the directory `/object_extracts/DDL`: 41 | 42 | 43 | ### DDL Files 44 | For each database a folder with the database name and a file called `DDL_All.sql` will be generated. It will contain the definitions of the objects in the database. 45 | 46 | ### Reports 47 | 48 | For each database some volumetrics reports will be created: 49 | 50 | - `volumetrics_per_object.txt` 51 | - `volumetrics_per_database.txt` 52 | - `db_size.txt` 53 | 54 | ## Reporting issues and feedback 55 | 56 | If you encounter any bugs with the tool please file an issue in the 57 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 58 | 59 | ## License 60 | 61 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/DB2/License.txt). 62 | -------------------------------------------------------------------------------- /DB2/bin/create_ddls.ps1: -------------------------------------------------------------------------------- 1 | Write-Output "DB2 DDL Export script" 2 | Write-Output "Getting list of databases" 3 | $OUTPUTDIR = "../object_extracts" 4 | ### Get List of Database 5 | 6 | ## You can modify this variable to exclude some databases: 7 | ## For example if you want to exclude database TESTDB just set: 8 | ## DATABASES_TO_EXCLUDE=@("TESTDB") 9 | ## If you want to exclude database TESTDB and database SAMPLE just set: 10 | ## DATABASES_TO_EXCLUDE=@("TESTDB","SAMPLE") 11 | ## You can use regular any valid regular expression as a pattern to exclude the databases to exclude 12 | $DATABASES_TO_EXCLUDE = @() 13 | ## DB Reports 14 | $SCHEMA_FILTER = "%" 15 | 16 | $DDLS = "$OUTPUTDIR/DDL" 17 | $REPORTS = "$OUTPUTDIR/Reports" 18 | 19 | IF (-Not (Test-Path "$DDLS")) { mkdir -p "$DDLS" } 20 | IF (-Not (Test-Path "$REPORTS")) { mkdir -p $REPORTS } 21 | ## Get list of databases 22 | $lines = (db2 list db directory) | ForEach-Object { "$_" } 23 | $DBS = $lines | Where-Object { $_ -match "Database alias" } | ForEach-Object { $_.Split("=")[1].Trim() } | Where-Object { $_ -notin $DATABASES_TO_EXCLUDE } 24 | Write-Output "Output Directory: $OUTPUTDIR" 25 | Foreach ($db in $DBS) { 26 | IF (-Not (Test-Path "$DDLS/$db")) { mkdir -p "$DDLS/$db" } 27 | IF (-Not (Test-Path "$REPORTS/$db")) { mkdir -p "$REPORTS/$db" } 28 | 29 | Write-Output "Processing Database $db" 30 | db2look -d $db -e -l > "$DDLS/$db/DDL_All.sql" 31 | 32 | ## Get REPORTS 33 | ## Get table volumetrics 34 | db2 "connect to $db" 35 | 36 | db2 "SELECT SUBSTR(TABSCHEMA,1,10) AS SCHEMA, SUBSTR(TABNAME,1,15) AS TABNAME, 37 | INT(DATA_OBJECT_P_SIZE) AS OBJ_SZ_KB, 38 | INT(INDEX_OBJECT_P_SIZE) AS INX_SZ_KB, 39 | INT(XML_OBJECT_P_SIZE) AS XML_SZ_KB 40 | FROM SYSIBMADM.ADMINTABINFO 41 | WHERE TABSCHEMA LIKE '%' 42 | ORDER BY 3 DESC;" > "$REPORTS/$db/volumetrics_per_object.txt" 43 | 44 | db2 "SELECT SUBSTR(TABSCHEMA,1,10) AS SCHEMA, 45 | SUM(DATA_OBJECT_P_SIZE) AS OBJ_SZ_KB, 46 | SUM(INDEX_OBJECT_P_SIZE) AS INX_SZ_KB, 47 | SUM(XML_OBJECT_P_SIZE) AS XML_SZ_KB 48 | FROM SYSIBMADM.ADMINTABINFO 49 | GROUP BY TABSCHEMA 50 | ORDER BY 2 DESC;" > "$REPORTS/$db/volumetrics_per_database.txt" 51 | 52 | ### DATABASE SIZE 53 | 54 | db2 "CALL GET_DBSIZE_INFO(?,?,?,-1)" > "$REPORTS/$db/db_size.txt" 55 | 56 | } 57 | 58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /DB2/bin/create_ddls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | VERSION="0.0.96" 3 | 4 | # This script extracts DDLs from DB2 databases using the db2look utility. 5 | # It generates DDL scripts for all databases listed in the DB2 directory, 6 | # excluding those specified in the DATABASES_TO_EXCLUDE variable. 7 | export versionParam=$1 8 | 9 | if [ "$versionParam" = "--version" ]; then 10 | echo "You are using the $VERSION of the extraction scripts" 11 | exit 1 12 | fi 13 | 14 | echo "DB2 DDL Export script" 15 | echo "Getting list of databases" 16 | OUTPUTDIR="../object_extracts" 17 | ### Get List of Database 18 | 19 | ## You can modify this variable to exclude some databases: 20 | ## For example if you want to exclude database TESTDB just set: 21 | ## DATABASES_TO_EXCLUDE="TESTDB" 22 | ## If you want to exclude database TESTDB and database SAMPLE just set: 23 | ## DATABASES_TO_EXCLUDE="TESTDB|SAMPLE" 24 | ## You can use regular any valid regular expression as a pattern to exclude the databases to exclude 25 | DATABASES_TO_EXCLUDE="XXXXXXX" 26 | 27 | ## DB Reports 28 | SCHEMA_FILTER="%" 29 | 30 | DDLS="$OUTPUTDIR/DDL" 31 | REPORTS="$OUTPUTDIR/Reports" 32 | mkdir -p $DDLS 33 | mkdir -p $REPORTS 34 | DBS=$( db2 list db directory | grep Indirect -B 5 |grep "Database alias" |awk {'print $4'} |sort -u | uniq 2>/dev/null | grep -v -E $DATABASES_TO_EXCLUDE) 35 | for db in $DBS 36 | do 37 | mkdir -p "$DDLS/$db" 38 | mkdir -p "$REPORTS/$db" 39 | echo "Processing Database $db" 40 | db2look -d $db -e -l > "$DDLS/$db/DDL_All.sql" 41 | 42 | ## Get REPORTS 43 | ## Get table volumetrics 44 | db2 "connect to $db" 45 | 46 | db2 "SELECT SUBSTR(TABSCHEMA,1,10) AS SCHEMA, SUBSTR(TABNAME,1,15) AS TABNAME, 47 | INT(DATA_OBJECT_P_SIZE) AS OBJ_SZ_KB, 48 | INT(INDEX_OBJECT_P_SIZE) AS INX_SZ_KB, 49 | INT(XML_OBJECT_P_SIZE) AS XML_SZ_KB 50 | FROM SYSIBMADM.ADMINTABINFO 51 | WHERE TABSCHEMA LIKE '%' 52 | ORDER BY 3 DESC;" > "$REPORTS/$db/volumetrics_per_object.txt" 53 | 54 | db2 "SELECT SUBSTR(TABSCHEMA,1,10) AS SCHEMA, 55 | SUM(DATA_OBJECT_P_SIZE) AS OBJ_SZ_KB, 56 | SUM(INDEX_OBJECT_P_SIZE) AS INX_SZ_KB, 57 | SUM(XML_OBJECT_P_SIZE) AS XML_SZ_KB 58 | FROM SYSIBMADM.ADMINTABINFO 59 | GROUP BY TABSCHEMA 60 | ORDER BY 2 DESC;" > "$REPORTS/$db/volumetrics_per_database.txt" 61 | 62 | ### DATABASE SIZE 63 | 64 | db2 "CALL GET_DBSIZE_INFO(?,?,?,-1)" > "$REPORTS/$db/db_size.txt" 65 | 66 | done 67 | 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /Databricks/Download_jobs_sources.dbc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Databricks/Download_jobs_sources.dbc -------------------------------------------------------------------------------- /Databricks/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Databricks/README.md: -------------------------------------------------------------------------------- 1 | # DataBricks Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your Databricks code. 4 | 5 | There are several ways to extract your code from DataBricks. We will provide several alternatives. Choose the one that works better in your scenario. 6 | 7 | ## Version 8 | 0.0.96 9 | 10 | ## Extracting with the DBX Command Line Tool 11 | 12 | The Databricks command line tool can be used in order to export your application code. 13 | 1. Open a terminal. 14 | 1. Install the databricks command line tool. Follow the instructions in the [documentation](https://docs.databricks.com/dev-tools/cli/index.html). In most cases just running `pip install databricks-cli` should be enough. 15 | 2. From the terminal, setup a pair of environment variables: 16 | It should be something like this: 17 | ``` 18 | $ export DATABRICKS_HOST=https://*************.azuredatabricks.net/ 19 | $ export DATABRICKS_TOKEN=************************************ 20 | ``` 21 | Adjust those variables to point to your databricks account. 22 | 3. You can then run: 23 | ``` 24 | databricks workspace list 25 | ``` 26 | that will show a listing like: 27 | ``` 28 | Users 29 | Shared 30 | Repos 31 | ``` 32 | 4. Export a workspace. For example to export all the `Shared` workspace you can run: 33 | ``` 34 | $ databricks workspace export_dir "/Shared" "~/exportSharedWorkspace" -o 35 | ``` 36 | 5. Zip the folder. 37 | ``` 38 | zip -r exportedWorkspace.zip ~/exportSharedWorkspace 39 | ``` 40 | 41 | ## Extracting with a DBX Notebook 42 | 43 | This is another alternative to extract your source code, it provides two choices of ZIP file with source codes either notebooks or python source in your repo or DBFS. The first one includes only sources from the top time-consuming jobs and the second includes all the source files from the running jobs. That means interactive notebooks, not scheduled to run, won't be exported. 44 | 45 | 1. Open your Databricks workspace and creates an new notebook. 46 | 2. Open File menu and click import. 47 | 3. Select URL and paste ***https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Databricks/Download_jobs_sources.dbc*** 48 | 4. Follow notebook instructions. 49 | 1. Install Databricks pypi dependency. 50 | 2. Update configuration accordling with your cluster (host_url, and token). ***We Advise against using the token directly in the notebook. Please store it in a secret scope, using Databricks CLI***. For more details [Databricks Authentication](https://docs.databricks.com/dev-tools/api/latest/authentication.html) 51 | 52 | ![Exporting Jobs sources](./images/notebook_export_source_codes.png) 53 | 54 | ## Reporting issues and feedback 55 | 56 | If you encounter any bugs with the tool please file an issue in the 57 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 58 | 59 | ## License 60 | 61 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Databricks/License.txt). 62 | -------------------------------------------------------------------------------- /Databricks/images/notebook_export_source_codes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Databricks/images/notebook_export_source_codes.png -------------------------------------------------------------------------------- /Hive/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Hive/README.md: -------------------------------------------------------------------------------- 1 | # Hive DDL Export Script 2 | 3 | This repository provides scripts to help exporting Hive DDL so it can be migrated to [Snowflake](https://www.snowflake.com/). Hive versions 4.0 and above, as well as versions below 4.0, are supported. 4 | 5 | ## Version 6 | 0.0.96 7 | 8 | ## Usage 9 | 10 | Extracts all table and view DDL in the specified database, wildcard match, or all databases on the system (default). Beeline is used by default to create a JDBC connection to Hive. No data is extracted. There are no third-party binary packages installed or used. 11 | 12 | This script can be executed in Linux or Unix from the command line. 13 | 14 | >**Important:** Extraction can take time. Databases in scope of migration should have DDL extracted only. If databases contain many objects or there are many databases, the process should be broken up into sets of databases using a wildcard or individual database extraction. 15 | 16 | ### 1. Environment Configuration 17 | 18 | Open `exp_ddl.sh` in a text editor and navigate to the "ENVIRONMENT CONFIGURATION" section starting on or around line 17. 19 | 20 | 1. Update `HOST` to match the host name of the server where Hive is running and will be used make a JDBC connection. 21 | 22 | Default: `localhost` 23 | 24 | 2. Update `PORT` to match the port number of the server where Hive is running and will be used to make a JDBC connection. 25 | 26 | Default: `10000` 27 | 28 | 3. Update `databasefilter` to explicitly name a single database or use a wildcard to match database names for a list of databases to extract DDL from. **The wildcard for Hive < 4.0 is `*` whereas the wildcard for >= 4.0 is `%`.** 29 | 30 | Default: `*` (all databases, supporting Hive < 4.0) 31 | 32 | 4. (Optional) Update `root` to a customer folder name where the output is stored. 33 | 34 | Default: `ddl_extract` in the folder where this script is executed 35 | 36 | ### 2. Hive Extraction Command Options 37 | 38 | By default, beeline CLI is used to create a JDBC connection. Alternatively the Hive CLI can be used. Open `exp_ddl.sh` in a text editor and navigate to the "HIVE EXTRACTION COMMAND OPTIONS" section starting on or around line 49. 39 | 1. Select use of `beeline` or `hive` by commenting with a `#` the undesired command and uncommenting the desired command. 40 | 41 | Default: `beeline` 42 | 43 | ### 3. Confirm extract script version 44 | 45 | Run `./exp_ddls.sh --version` from the command line and verify the version matches the release version at the top of this readme. 46 | 47 | ### 4. Start DDL extraction 48 | 49 | Run `./exp_ddl.sh` from the command line to execute the extract. The DDL files will be created in the current directory under `ddl_extract` subdirectory unless a different location was specified in the "Environment Configuration" section. 50 | 51 | ### 5. Share output 52 | 53 | After extracting DDL for all in-scope databases, send the extracted DDL SQL files and objects CSV files to your Snowflake representative for assessment and next steps. If you are not working with a Snowflake representative, skip this step. 54 | 55 | ## Reporting issues and feedback 56 | 57 | If you encounter any bugs with the script, first reach out to the Snowflake representative you are working with. If you are not working with a Snowflake representative, file an issue in the [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of the GitHub repository. 58 | 59 | ## License 60 | 61 | -------------------------------------------------------------------------------- /Hive/exp_ddl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | VERSION="0.0.96" 3 | 4 | # This script extracts DDLs from Hive databases using Beeline or Hive CLI. 5 | # It connects to a Hive server and retrieves the DDL statements for all tables and views in specified databases. 6 | # The output is written to a CSV file and individual SQL files for each database. 7 | 8 | # Function to generate sc_extraction_script header comment 9 | generate_header_comment() { 10 | local current_date=$(date '+%Y-%m-%d %H:%M:%S') 11 | local language_name="Hive DDL" 12 | echo "-- ${language_name} code extracted using script version ${VERSION} on ${current_date} " 13 | } 14 | export versionParam=$1 15 | 16 | if [ "$versionParam" = "--version" ]; then 17 | echo "You are using the $VERSION of the extraction scripts" 18 | exit 1 19 | fi 20 | 21 | # -------------------------------------------------------------------------------------------------------------------- 22 | # ENVIRONMENT CUSTOMIZATION 23 | # HOST 24 | # The host name of the server where Hive is running used to make a JDBC connection. 25 | # 26 | # Default: localhost 27 | # 28 | # PORT 29 | # Port of the server where Hive is running used to make a JDBC connection. 30 | # 31 | # Default: 10000 32 | # 33 | # databasefilter 34 | # Hive database name to filter for DDL extraction. Hive <4.0 use * (asterisk) and Hive >=4.0 use % (percent) 35 | # for wildcard. May also be explicit database name or wildcard for all databases in system. 36 | # 37 | # For example: 38 | # Hive <4: "db*" or "*db*" or "my_db" (no wildcard) or * (all databases) 39 | # Hive >=4: "db%" or "%db%" or "my_db" (no wildcard) or % (all databases) 40 | # See https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-ShowDatabases 41 | # 42 | # Default: * (Hive <4.0 support) 43 | # 44 | # root 45 | # Name of folder to be created in the same path as the extraction script where output files will be written. 46 | # -------------------------------------------------------------------------------------------------------------------- 47 | 48 | HOST=localhost # Update as required 49 | PORT=10000 # Update as reuqired 50 | databasefilter="%" # Hive database name to filter for DDL extraction. Hive <4.0 use * and Hive >=4.0 use % wildcard 51 | root="ddl_extract" # Folder name created below where script executes to store output 52 | 53 | # -------------------------------------------------------------------------------------------------------------------- 54 | # HIVE EXTRACTION COMMAND OPTIONS 55 | # Beeline connection through JDBC is preferred. If beeline is not available, hive may be used directly from 56 | # the server. 57 | # -------------------------------------------------------------------------------------------------------------------- 58 | 59 | hivecmd="beeline -u jdbc:hive2://${HOST}:${PORT} --showHeader=false --outputformat=tsv2 -e " # Use beeline CLI (preferred) 60 | #hivecmd="hive -e" # Use hive CLI (fallback) 61 | 62 | # -------------------------------------------------------------------------------------------------------------------- 63 | # EXTRACTION ROUTINE 64 | # Customization not rueqired for this section. Do NOT make changes unless there is a extraction error due to 65 | # unique system configuration. 66 | # -------------------------------------------------------------------------------------------------------------------- 67 | 68 | current_time=$(date "+%Y%m%d%-H%-M%-S") 69 | csv="${root}/all_objects.${current_time}.csv" #master list of all tables/views found 70 | 71 | mkdir -p ${root} 72 | echo "database,object_name,object_type,size_in_bytes,hdfs_location,serde,inputformat,outputformat" >$csv 73 | 74 | set -f #turn off expansion for wildcard 75 | databases=$(${hivecmd} "show databases like '${databasefilter}';") 76 | set +f #turn on expansion for wildcard 77 | 78 | all_db_names=${databases} 79 | 80 | for db in $all_db_names 81 | do 82 | expfile=$root/${db}.sql 83 | 84 | tables=$(${hivecmd} "show tables in ${db};") 85 | all_tab_names=`echo "${tables}"` 86 | 87 | if [ ! -z "${all_tab_names}" ] 88 | then 89 | # Initialize file with sc_extraction_script header comment 90 | generate_header_comment > $expfile 91 | echo "" >> $expfile 92 | echo " /**** Start DDLs for Tables in ${db} ****/ " >> $expfile 93 | fi 94 | 95 | for table in $all_tab_names 96 | do 97 | sql="show create table ${db}.${table};" 98 | echo " ====== Running SHOW CREATE TABLE Statement for $db.${table} ======= : " 99 | results=`${hivecmd} "use ${db}; $sql"` 100 | loc=$(echo "$results" | awk -F 'LOCATION' '{print $2}' | awk '{print $1;}' | awk -F '/' '{for (i=4; i> $expfile 118 | echo "" >> $expfile 119 | echo "${db},${table},${objtype},${size},${loc},${serde},${inputformat},${outputformat}" >>$csv 120 | done 121 | 122 | if [ ! -z "${all_tab_names}" ] 123 | then 124 | echo " /**** End DDLs for Tables in ${db} ****/ " >> $expfile 125 | fi 126 | done -------------------------------------------------------------------------------- /LEGAL.md: -------------------------------------------------------------------------------- 1 | # Legal Notice 2 | 3 | ## Disclaimer 4 | 5 | This application is not part of the Snowflake Service and is governed by the terms in the [LICENSE](./LICENSE) file, unless expressly agreed to in writing. 6 | 7 | ## Risk Acknowledgement 8 | 9 | You use this application at your own risk, and Snowflake has no obligation to support your use of this application. 10 | 11 | ## Additional Information 12 | 13 | - This is provided as-is without warranties of any kind, either express or implied. 14 | - No support or maintenance is guaranteed. 15 | - Snowflake bears no responsibility for any issues arising from the use of this application. 16 | 17 | --- 18 | 19 | © Snowflake Inc. All rights reserved. 20 | -------------------------------------------------------------------------------- /Netezza/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Oracle/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Oracle/bin/create_ddls.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | REM GENERAL INSTRUCTIONS: This script is used to extract object DDL from your Oracle Database. Please adjust the variables below 3 | REM to match your environment. Once completed, your extracted DDL code will be stored in the object_extracts folder. 4 | 5 | SET VERSION=0.0.96 6 | 7 | SET ORACLE_SID=ORCL 8 | 9 | SET CONNECT_STRING="system/System123" 10 | 11 | SET SCRIPT_PATH="\\Mac\Home\Documents\Workspace\SC.DDLExportScripts\Oracle" 12 | 13 | SET OUTPUT_PATH=%SCRIPT_PATH% 14 | 15 | 16 | if not exist %SCRIPT_PATH% ( 17 | echo "The script_path path does not exist." 18 | EXIT /b 19 | ) 20 | 21 | echo [%date% %time%] Info: Execute Oracle extraction scripts: Started 22 | echo. 23 | echo [%date% %time%] Info: Step 1/4 - Creating Directories: Started 24 | 25 | REM Path to where object extracts are written 26 | 27 | mkdir %OUTPUT_PATH%\object_extracts 28 | mkdir %OUTPUT_PATH%\object_extracts\DDL 29 | mkdir %OUTPUT_PATH%\object_extracts\STORAGE 30 | cd . > %OUTPUT_PATH%\object_extracts\DDL\.sc_extracted 31 | 32 | if not exist %OUTPUT_PATH% ( 33 | echo "The output path does not exist." 34 | EXIT /b 35 | ) 36 | 37 | echo [%date% %time%] Info: Step 1/4 - Creating Directories: Completed 38 | echo. 39 | echo [%date% %time%] Info: Step 2/4 - Extracting DDLs: Started 40 | 41 | REM Modify the operator and condition for the Oracle schemas to explicity include. 42 | REM By default all schemas, other than system schemas, will be included. 43 | REM Use uppercase names. Do not remove the parentheses or double quotes. 44 | SET INCLUDE_OPERATOR="LIKE" 45 | SET INCLUDE_CONDITION="('%%')" 46 | 47 | REM Modify the operator and condition for the Oracle schemas to explicity exclude. 48 | REM Not necessary to modify this if you are using the above section to explicity include only certain schemas. 49 | REM Use uppercase names. Do not remove the parentheses or double quotes. 50 | SET EXCLUDE_OPERATOR="IN" 51 | SET EXCLUDE_CONDITION="('XXX')" 52 | 53 | set FILE_NAME=create_ddls_plus.sql 54 | set FULL_PATH=%SCRIPT_PATH%\%file_name% 55 | 56 | @echo on 57 | sqlplus %CONNECT_STRING% @%FULL_PATH% %INCLUDE_OPERATOR% %INCLUDE_CONDITION% %EXCLUDE_OPERATOR% %EXCLUDE_CONDITION% %OUTPUT_PATH% %VERSION% 58 | 59 | @echo off 60 | echo. 61 | echo [%date% %time%] Info: Step 2/4 - Extracting DDLs: Completed 62 | echo. 63 | echo [%date% %time%] Info: Step 3/4 - Adding extraction headers: Started 64 | 65 | REM Add extraction script header to each DDL file 66 | SET DDL_FILES=DDL_Tables.sql DDL_Views.sql DDL_Functions.sql DDL_Procedures.sql DDL_Packages.sql DDL_Synonyms.sql DDL_Types.sql DDL_Indexes.sql DDL_Triggers.sql DDL_Sequences.sql DDL_DBlink.sql DDL_QUEUE_TABLES.sql DDL_OLAP_CUBES.sql DDL_MATERIALIZED_VIEWS.sql DDL_QUEUES.sql DDL_ANALYTIC_VIEWS.sql DDL_OPERATORS.sql 67 | 68 | for %%f in (%DDL_FILES%) do ( 69 | if exist "%OUTPUT_PATH%\object_extracts\DDL\%%f" ( 70 | echo Processing %%f... 71 | REM Create temporary file with header 72 | echo -- ^ Oracle code extracted using script version %VERSION% on %date% ^ > "%OUTPUT_PATH%\object_extracts\DDL\%%f.tmp" 73 | type "%OUTPUT_PATH%\object_extracts\DDL\%%f" >> "%OUTPUT_PATH%\object_extracts\DDL\%%f.tmp" 74 | move "%OUTPUT_PATH%\object_extracts\DDL\%%f.tmp" "%OUTPUT_PATH%\object_extracts\DDL\%%f" 75 | ) 76 | ) 77 | 78 | echo [%date% %time%] Info: Step 3/4 - Adding extraction headers: Completed 79 | echo. 80 | echo [%date% %time%] Info: Step 4/4 - Oracle extraction scripts: Completed -------------------------------------------------------------------------------- /Oracle/bin/create_ddls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | VERSION="0.0.96" 3 | 4 | # This script extracts DDLs from Oracle databases using SQL*Plus. 5 | # It connects to an Oracle instance and retrieves the DDL statements for schemas, tables, views, procedures, 6 | # functions, packages, and other database objects. 7 | export versionParam=$1 8 | 9 | if [ "$versionParam" = "--version" ]; then 10 | echo "You are using the version $VERSION of the extraction scripts" 11 | exit 1 12 | fi 13 | 14 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Execute Oracle extraction scripts: Started" 15 | 16 | export ORACLE_SID= 17 | export CONNECT_STRING=system/oracle 18 | export SCRIPT_PATH= 19 | export SQLCL_PATH= 20 | # Default value is the #SCRIPT_PATH folder, You can change the output directory here! 21 | export OUTPUT_PATH=$SCRIPT_PATH 22 | 23 | 24 | if [ ! -e "$SCRIPT_PATH" ]; then 25 | echo "The script path does not exist." 26 | exit 1 27 | fi 28 | 29 | if [ ! -e "$SQLCL_PATH" ]; then 30 | echo "The sqlcl path does not exist." 31 | exit 1 32 | fi 33 | 34 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 1/4 - Creating Directories: Started" 35 | 36 | #Path to where object extracts are written 37 | mkdir -p $OUTPUT_PATH/object_extracts 38 | mkdir -p $OUTPUT_PATH/object_extracts/DDL 39 | mkdir -p $OUTPUT_PATH/object_extracts/STORAGE 40 | touch -- "${OUTPUT_PATH}/object_extracts/DDL/.sc_extracted" 41 | 42 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 1/4 - Creating Directories: Completed" 43 | 44 | 45 | if [ ! -e "$OUTPUT_PATH" ]; then 46 | echo "The output path does not exist." 47 | exit 1 48 | fi 49 | 50 | # Modify the operator and condition for the Oracle schemas to explicity INCLUDE. 51 | # By default all schemas, other than system schemas, will be included. 52 | # Use uppercase names. Do not remove the parentheses or double quotes. 53 | export INCLUDE_OPERATOR=LIKE 54 | export INCLUDE_CONDITION="('%')" 55 | 56 | # Modify the operator and condition for the Oracle schemas to explicity EXCLUDE. 57 | # By default all schemas, other than system schemas, will be included. 58 | # Use uppercase names. Do not remove the parentheses or double quotes. 59 | export EXCLUDE_OPERATOR=IN 60 | export EXCLUDE_CONDITION="('SYSMAN')" 61 | 62 | # Modify this JAVA variable to asign less or more memory to the JVM 63 | # export JAVA_TOOL_OPTIONS=-Xmx4G 64 | 65 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 2/4 - Extracting DDLs: Started" 66 | 67 | "$SQLCL_PATH"/sql $CONNECT_STRING @"$SCRIPT_PATH"/create_ddls.sql $INCLUDE_OPERATOR $INCLUDE_CONDITION $EXCLUDE_OPERATOR $EXCLUDE_CONDITION "$OUTPUT_PATH" $VERSION 68 | 69 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 2/4 - Extracting DDLs: Completed" 70 | 71 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 3/4 - Adding extraction headers: Started" 72 | 73 | # Add extraction script header to each DDL file 74 | DDL_FILES=("DDL_Tables.sql" "DDL_Views.sql" "DDL_Functions.sql" "DDL_Procedures.sql" "DDL_Packages.sql" "DDL_Synonyms.sql" "DDL_Types.sql" "DDL_Indexes.sql" "DDL_Triggers.sql" "DDL_Sequences.sql" "DDL_DBlink.sql" "DDL_QUEUE_TABLES.sql" "DDL_OLAP_CUBES.sql" "DDL_MATERIALIZED_VIEWS.sql" "DDL_QUEUES.sql" "DDL_ANALYTIC_VIEWS.sql" "DDL_OPERATORS.sql") 75 | 76 | for file in "${DDL_FILES[@]}"; do 77 | if [ -f "$OUTPUT_PATH/object_extracts/DDL/$file" ]; then 78 | # Create temporary file with header 79 | temp_file=$(mktemp) 80 | echo "-- Oracle code extracted using script version $VERSION on $(date +%m/%d/%Y) " > "$temp_file" 81 | cat "$OUTPUT_PATH/object_extracts/DDL/$file" >> "$temp_file" 82 | mv "$temp_file" "$OUTPUT_PATH/object_extracts/DDL/$file" 83 | fi 84 | done 85 | 86 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 3/4 - Adding extraction headers: Completed" 87 | 88 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 4/4 - Oracle extraction scripts: Completed" 89 | -------------------------------------------------------------------------------- /Oracle/bin/create_ddls_plus.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | VERSION="0.0.96" 3 | 4 | # This script extracts DDLs from Oracle databases using SQL*Plus. 5 | # It connects to an Oracle instance and retrieves the DDL statements for schemas, tables, views, procedures, 6 | # functions, packages, and other database objects. 7 | export versionParam=$1 8 | 9 | if [ "$versionParam" = "--version" ]; then 10 | echo "You are using the version $VERSION of the extraction scripts" 11 | exit 1 12 | fi 13 | 14 | export ORACLE_SID= 15 | export CONNECT_STRING=system/oracle 16 | export SCRIPT_PATH= 17 | # Default value is the #SCRIPT_PATH folder, You can change the output directory here! 18 | export OUTPUT_PATH=$SCRIPT_PATH 19 | 20 | 21 | if [ ! -e "$SCRIPT_PATH" ]; then 22 | echo "The script path does not exist." 23 | exit 1 24 | fi 25 | 26 | #Path to where object extracts are written 27 | mkdir -p $OUTPUT_PATH/object_extracts 28 | mkdir -p $OUTPUT_PATH/object_extracts/DDL 29 | mkdir -p $OUTPUT_PATH/object_extracts/STORAGE 30 | 31 | if [ ! -e "$OUTPUT_PATH" ]; then 32 | echo "The output path does not exist." 33 | exit 1 34 | fi 35 | 36 | # Modify the operator and condition for the Oracle schemas to explicity INCLUDE. 37 | # By default all schemas, other than system schemas, will be included. 38 | # Use uppercase names. Do not remove the parentheses or double quotes. 39 | export INCLUDE_OPERATOR=LIKE 40 | export INCLUDE_CONDITION="('%')" 41 | 42 | # Modify the operator and condition for the Oracle schemas to explicity EXCLUDE. 43 | # By default all schemas, other than system schemas, will be included. 44 | # Use uppercase names. Do not remove the parentheses or double quotes. 45 | export EXCLUDE_OPERATOR=IN 46 | export EXCLUDE_CONDITION="('SYSMAN')" 47 | 48 | # Modify this JAVA variable to asign less or more memory to the JVM 49 | # export JAVA_TOOL_OPTIONS=-Xmx4G 50 | 51 | sqlplus $CONNECT_STRING @"$SCRIPT_PATH"/create_ddls_plus.sql $INCLUDE_OPERATOR $INCLUDE_CONDITION $EXCLUDE_OPERATOR $EXCLUDE_CONDITION "$OUTPUT_PATH" $VERSION 52 | 53 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 3/4 - Adding extraction headers: Started" 54 | 55 | # Add extraction script header to each DDL file 56 | DDL_FILES=("DDL_Tables.sql" "DDL_Views.sql" "DDL_Functions.sql" "DDL_Procedures.sql" "DDL_Packages.sql" "DDL_Synonyms.sql" "DDL_Types.sql" "DDL_Indexes.sql" "DDL_Triggers.sql" "DDL_Sequences.sql" "DDL_DBlink.sql" "DDL_QUEUE_TABLES.sql" "DDL_OLAP_CUBES.sql" "DDL_MATERIALIZED_VIEWS.sql" "DDL_QUEUES.sql" "DDL_ANALYTIC_VIEWS.sql" "DDL_OPERATORS.sql") 57 | 58 | for file in "${DDL_FILES[@]}"; do 59 | if [ -f "$OUTPUT_PATH/object_extracts/DDL/$file" ]; then 60 | # Create temporary file with header 61 | temp_file=$(mktemp) 62 | echo "-- Oracle code extracted using script version $VERSION on $(date +%m/%d/%Y) " > "$temp_file" 63 | cat "$OUTPUT_PATH/object_extracts/DDL/$file" >> "$temp_file" 64 | mv "$temp_file" "$OUTPUT_PATH/object_extracts/DDL/$file" 65 | fi 66 | done 67 | 68 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 3/4 - Adding extraction headers: Completed" 69 | 70 | echo "[$(date '+%Y/%m/%d %l:%M:%S%p')] Info: Step 4/4 - Oracle extraction scripts: Completed" -------------------------------------------------------------------------------- /Oracle/setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal =1 3 | 4 | [metadata] 5 | name = snowconvert-export-oracle 6 | version = 0.0.10 7 | author = Mauricio Rojas 8 | author_email = mauricio.rojas@mobilize.net 9 | description = Mobilize.Net Oracle Export Tool for SnowConvert 10 | long_description = file: README.rst 11 | long_description_content_type = text/x-rst 12 | license_file = LICENSE.txt 13 | 14 | python_requires='>=3' 15 | 16 | url = https://github.com/Snowflake-Labs/SC.DDLExportScripts/tree/main/Oracle 17 | project_urls = 18 | Bug Tracker = https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues 19 | classifiers = 20 | Development Status :: 3 - Alpha 21 | Intended Audience :: Developers 22 | Intended Audience :: System Administrators 23 | Programming Language :: Python 24 | Programming Language :: Python :: 3 25 | Programming Language :: Python :: 3.4 26 | Programming Language :: Python :: 3.5 27 | Programming Language :: Python :: 3.6 28 | License :: OSI Approved :: MIT License 29 | 30 | [options] 31 | scripts= 32 | sc-oracle-export 33 | 34 | [flake8] 35 | ignore = 36 | # E501: line too long. 37 | E501, 38 | # F401, imported but unused, ignore where we import setup. 39 | F401, 40 | # E402 module level import not at top of file. 41 | # To maintain py2 - 3 compat certain orders of import is necessary. 42 | E402 -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Database Export Scripts Collection for SnowConvert 2 | 3 | [![Run All Release Process](https://github.com/Snowflake-Labs/SC.DDLExportScripts/actions/workflows/release-all-ci.yml/badge.svg)](https://github.com/Snowflake-Labs/SC.DDLExportScripts/actions/workflows/release-all-ci.yml) 4 | 5 | ## Overview 6 | 7 | This repository contains utility scripts for exporting database objects from various database platforms to be migrated with the [SnowConvert tool](https://docs.snowconvert.com/sc/). These scripts help extract DDL (Data Definition Language) statements that can be used as input for SnowConvert, facilitating the migration process to Snowflake. 8 | 9 | ## Supported Databases 10 | 11 | - [Teradata](./Teradata) 12 | - [SQL Server](./SQLServer) 13 | - [Synapse](./Synapse) 14 | - [Oracle](./Oracle) 15 | - [Redshift](./Redshift) 16 | - [Netezza](./Netezza) 17 | - [Vertica](./Vertica) 18 | - [DB2](./DB2) 19 | - [Hive](./Hive) 20 | - [BigQuery](./BigQuery) 21 | - [Databricks](./Databricks) 22 | 23 | ## 🚀 Quick Start 24 | 25 | **📋 Required Setup for Contributors** 26 | 27 | After cloning this repository, please run the setup script to configure your development environment: 28 | 29 | ```bash 30 | ./setup.sh 31 | ``` 32 | 33 | This setup is required for all contributors and provides: 34 | - ✅ Standardized project configuration across all environments 35 | - ✅ Proper version control and code quality checks 36 | - ✅ Consistent development workflow for the entire team 37 | 38 | **Alternative manual setup:** 39 | 40 | ```bash 41 | ./.github/scripts/install-hooks.sh 42 | ``` 43 | 44 | **Note:** Working without proper setup may lead to versioning inconsistencies and CI/CD pipeline issues. 45 | 46 | ## Getting Started 47 | 48 | 1. Select the directory for your source database platform 49 | 2. Follow the instructions in the platform-specific README file 50 | 3. Use the exported DDL files as input for SnowConvert 51 | 52 | ## License 53 | 54 | This project is licensed under the Apache License 2.0 - see the [LICENSE](./LICENSE) file for details. 55 | 56 | -------------------------------------------------------------------------------- /Redshift/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Redshift/bin/create_ddls.ps1: -------------------------------------------------------------------------------- 1 | 2 | # GENERAL INSTRUCTIONS: This script is used to extract object DDL from your RedShift Cluster. Please adjust the variables with enclosed by <> 3 | # below to match your environment. Once completed, your extracted DDL code will be stored in the object_extracts folder. 4 | 5 | # Script version 6 | $VERSION="0.0.96" 7 | 8 | # ---- Variables to change ---- 9 | 10 | # General Variables 11 | $OUTPUT_PATH="" 12 | 13 | if ($OUTPUT_PATH -match '(?:\\|\/)*$') 14 | { 15 | # Remove trailing slashes 16 | $OUTPUT_PATH = $OUTPUT_PATH -replace '(?:\\|\/)*$', '' 17 | } 18 | 19 | # AWS RedShift Variables 20 | $RS_CLUSTER="" 21 | $RS_DATABASE="" 22 | $RS_SECRET_ARN="" 23 | 24 | # Script Variables 25 | $SCHEMA_FILTER="lower(schemaname) LIKE '%'" 26 | $MAX_ITERATIONS=60 #Every iteration waits 5 seconds. Must be > 0. 27 | # ---- END: Variables to change ---- 28 | 29 | 30 | if($MAX_ITERATIONS -lt 0) 31 | { 32 | $MAX_ITERATIONS = 60 33 | Write-Output "Detected iterations less than 0. Setting to 60." 34 | } 35 | 36 | function Check-Command($cmdname) 37 | { 38 | return [bool](Get-Command -Name $cmdname -ErrorAction SilentlyContinue) 39 | } 40 | 41 | if (-not (Check-Command -cmdname "aws")) 42 | { 43 | Write-Output "AWS Cli not found. Please check this link on how to install: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" 44 | exit 45 | } 46 | 47 | Write-Output "Creating output folders..." 48 | 49 | $ddl_output = "$OUTPUT_PATH\object_extracts\DDL" 50 | $log_output = "$OUTPUT_PATH\log" 51 | 52 | ## Create directories 53 | New-Item -ItemType Directory -Force -Path $OUTPUT_PATH | Out-Null 54 | New-Item -ItemType Directory -Force -Path $log_output | Out-Null 55 | New-Item -ItemType Directory -Force -Path $OUTPUT_PATH\object_extracts | Out-Null 56 | New-Item -ItemType Directory -Force -Path $ddl_output | Out-Null 57 | New-Item -ItemType File -Force -Path $ddl_output\.sc_extracted | Out-Null 58 | 59 | ## Created log files and tracking variables 60 | Out-File -FilePath $log_output\log.txt -InputObject "--------------" -Append 61 | Out-File -FilePath $log_output\log.txt -InputObject "Starting new extraction" -Append 62 | Out-File -FilePath $log_output\log.txt -InputObject "Variables:" -Append 63 | Out-File -FilePath $log_output\log.txt -InputObject $OUTPUT_PATH -Append 64 | Out-File -FilePath $log_output\log.txt -InputObject $SCHEMA_FILTER -Append 65 | 66 | # Defined main variables 67 | Write-Output "Getting queries from files..." 68 | $queries = @{} # Hash to control queries execution 69 | $files = (Get-ChildItem -Path ../scripts/* -Include *.sql).Name # Get list of queries 70 | 71 | Write-Output "Sending queries to execute..." 72 | foreach ( $file in $files) 73 | { 74 | $query = Get-Content ..\scripts/$file -Raw 75 | $query = $query.replace('{schema_filter}', $SCHEMA_FILTER) 76 | # Execute queries on Resdshift 77 | $response = aws redshift-data execute-statement --cluster-identifier $RS_CLUSTER --database $RS_DATABASE --secret-arn $RS_SECRET_ARN --sql "$query" | ConvertFrom-Json 78 | $queries[$file] = $response.Id 79 | } 80 | 81 | Write-Output "Waiting 20 seconds for queries to finish..." 82 | Start-Sleep -Seconds 20 83 | 84 | Write-Output "Starting query validation and extraction iterations..." 85 | $i = 0 86 | while($i -ne $MAX_ITERATIONS) 87 | { 88 | $i++ 89 | if($queries.keys.count -ne 0) 90 | { 91 | # List to remove queries from Hash for next iteration when finished 92 | $to_remove = [System.Collections.Generic.List[string]]::new() 93 | foreach( $query in $queries.keys ) 94 | { 95 | $id = $queries[$query] 96 | Write-Output "Validating completion for query $query..." 97 | # Get statement state 98 | $response = aws redshift-data describe-statement --id $id | ConvertFrom-Json 99 | if ($response.Status -eq "FINISHED") 100 | { 101 | Write-Output "Query finished, starting extraction..." 102 | # Get statement results when finished 103 | $results_response = aws redshift-data get-statement-result --id $id | ConvertFrom-Json 104 | $data = $results_response.Records 105 | # Add comment header to the file 106 | $currentDate = Get-Date -Format "MM/dd/yyyy" 107 | $headerComment = "-- Redshift code extracted using script version $VERSION on $currentDate " 108 | Out-File -FilePath $ddl_output\$query -InputObject $headerComment -Encoding utf8 109 | $strings_data = [System.Collections.Generic.List[string]]::new() 110 | $data | ForEach-Object { $strings_data.Add($PSItem.stringValue) } 111 | Out-File -FilePath $ddl_output\$query -InputObject $strings_data -Append -Encoding utf8 112 | $to_remove.Add($query) 113 | } elseif ($response.Status -eq "FAILED") { 114 | Write-Output "Query failed... Error message:" 115 | Write-Output $response.Error 116 | # Save error to log 117 | Out-File -FilePath $log_output\log.txt -InputObject "Failed query:" -Append 118 | Out-File -FilePath $log_output\log.txt -InputObject $query -Append 119 | Out-File -FilePath $log_output\log.txt -InputObject $id -Append 120 | Out-File -FilePath $log_output\log.txt -InputObject $response.Error -Append 121 | $to_remove.Add($query) 122 | } else { 123 | Write-Output "Query still pending. Validating again in some seconds." 124 | } 125 | } 126 | foreach($query in $to_remove) 127 | { 128 | $queries.Remove($query) 129 | } 130 | } else { 131 | break 132 | } 133 | # Wait before continuing with next iteration 134 | Start-Sleep -Seconds 5 135 | } 136 | 137 | if($queries.keys.count -gt 0) 138 | { 139 | Write-Output "Not all queries have finished. Consider increasing iterations value to increase timeout." 140 | } else 141 | { 142 | Write-Output "Finished extracting RedShift DDL. Please check for output in the specified folder." 143 | } 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | -------------------------------------------------------------------------------- /Redshift/bin/create_ddls.sh: -------------------------------------------------------------------------------- 1 | # GENERAL INSTRUCTIONS: This script is used to extract object DDL from your RedShift Cluster. Please adjust the variables with enclosed by <> 2 | # below to match your environment. Once completed, your extracted DDL code will be stored in the object_extracts folder. 3 | 4 | # Script version 5 | VERSION="0.0.96" 6 | 7 | # ---- Variables to change ---- 8 | 9 | # General Variables 10 | OUTPUT_PATH="/example/path" 11 | 12 | # AWS RedShift Variables 13 | RS_CLUSTER="" 14 | RS_DATABASE="" 15 | RS_SECRET_ARN="" 16 | 17 | #Script Variables 18 | SCHEMA_FILTER="lower(schemaname) LIKE '%'" 19 | MAX_ITERATIONS=60 #Every iteration waits 5 seconds. Must be > 0. 20 | # ---- END: Variables to change ---- 21 | 22 | OUTPUT_PATH="${OUTPUT_PATH/%\//}" 23 | 24 | # Validate if max iterations value is valid 25 | if [ $MAX_ITERATIONS -lt 0 ] 26 | then 27 | MAX_ITERATIONS=60 28 | echo "Detected iterations less than 0. Setting to 60." 29 | fi 30 | 31 | # Check if AWS Cli exists 32 | hash aws &> /dev/null 33 | if [ $? -eq 1 ]; then 34 | echo >&2 "AWS Cli not found. Please check this link on how to install: https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" 35 | exit 1 36 | fi 37 | 38 | echo "Creating output folders..." 39 | 40 | ddl_output=$OUTPUT_PATH/object_extracts/DDL 41 | log_output=$OUTPUT_PATH/log 42 | temp_output=$OUTPUT_PATH/temp 43 | 44 | mkdir -p "$ddl_output" 45 | mkdir -p "$log_output" 46 | mkdir -p "$temp_output" 47 | mkdir -p "$OUTPUT_PATH/object_extracts" 48 | mkdir -p "$OUTPUT_PATH/object_extracts/DDL" 49 | touch -- "${OUTPUT_PATH}/object_extracts/DDL/.sc_extracted" 50 | 51 | # Create log files and tracking variables 52 | echo "--------------" >> "$log_output/log.txt" 53 | echo "Starting new extraction" >> "$log_output/log.txt" 54 | echo "Variables:" >> "$log_output/log.txt" 55 | echo "$OUTPUT_PATH" >> "$log_output/log.txt" 56 | echo "$SCHEMA_FILTER" >> "$log_output/log.txt" 57 | 58 | # Define main variables 59 | cd ../scripts/ 60 | echo "Getting queries from files..." 61 | files=$(ls *.sql) 62 | declare -a queries 63 | i=0 64 | 65 | echo "Sending queries to execute..." 66 | for f in $files 67 | do 68 | # Read queries from scripts folder 69 | query=$(<$f) 70 | # Replace {schema_filter} in the query template 71 | final_query="${query/\{schema_filter\}/$SCHEMA_FILTER}" 72 | # Execute query 73 | response=$(aws redshift-data execute-statement --cluster-identifier $RS_CLUSTER --database $RS_DATABASE --secret-arn $RS_SECRET_ARN --sql "$final_query" --output yaml 2>&1) 74 | if [ $? -ne 0 ] 75 | then 76 | # Log and print if there is an error 77 | echo $response | tee -a "$log_output/log.txt" 78 | else 79 | # Extract Id from response 80 | re="Id: ([[:xdigit:]]{8}(-[[:xdigit:]]{4}){3}-[[:xdigit:]]{12})" 81 | [[ $response =~ $re ]] && queries[$i]="$f=${BASH_REMATCH[1]}" 82 | i=$((i+1)) 83 | fi 84 | done 85 | 86 | if [ ${#queries[@]} -eq 0 ] 87 | then 88 | echo "Unable to send queries to execute. Please make sure that the connection to AWS is properly configured and that the connection parameters are correct." 89 | exit 1 90 | fi 91 | 92 | echo "Waiting 20 seconds for queries to finish..." 93 | sleep 20 94 | 95 | echo "Starting query validation and extraction iterations..." 96 | i=0 97 | while [ $i -ne $MAX_ITERATIONS ] 98 | do 99 | i=$((i+1)) 100 | if [ ${#queries[@]} -ne 0 ] 101 | then 102 | # List to remove queries from queries list for next iteration when finished 103 | to_remove=() 104 | for query in "${queries[@]}" 105 | do 106 | # Split value from array 107 | IFS='=' 108 | read -ra parts <<< "$query" 109 | echo "Validating completion for query ${parts[0]}..." 110 | statement_response=$(aws redshift-data describe-statement --id ${parts[1]} --output yaml) 111 | # Get statement status 112 | re="Status: ([a-zA-Z]*)" 113 | [[ $statement_response =~ $re ]] && status="${BASH_REMATCH[1]}" 114 | if [ "$status" = "FINISHED" ] 115 | then 116 | echo "Query finished, starting extraction..." 117 | # Extract query result into file 118 | aws redshift-data get-statement-result --id ${parts[1]} --output text > "$temp_output/${parts[0]}" 119 | # Clean output (remove first 2 lines and prefix for RECORDS keyword 120 | sed -e 1,2d "$temp_output/${parts[0]}" > "$temp_output/${parts[0]}.clean" 121 | perl -i -pe 's/^RECORDS\s//g' "$temp_output/${parts[0]}.clean" 122 | # Add comment header to the final file 123 | echo "-- Redshift code extracted using script version $VERSION on $(date +%m/%d/%Y) " > "$ddl_output/${parts[0]}" 124 | cat "$temp_output/${parts[0]}.clean" >> "$ddl_output/${parts[0]}" 125 | # Add query to the remove list 126 | to_remove+=("$query") 127 | elif [ "$status" = "FAILED" ] 128 | then 129 | echo "Query failed... Error message:" 130 | # Extract error messge from response 131 | error_re="Error: '(.*)'\\s+\\w+:" 132 | [[ $statement_response =~ $error_re ]] && error_msg="${BASH_REMATCH[1]}" 133 | # Save error to log 134 | echo "Failed query:" >> "$log_output/log.txt" 135 | echo "${parts[0]}" >> "$log_output/log.txt" 136 | echo "${parts[1]}" >> "$log_output/log.txt" 137 | echo "$error_msg" | tee -a "$log_output/log.txt" 138 | # Add query to the remove list 139 | to_remove+=("$query") 140 | else 141 | echo "Query still pending. Validating again in some seconds." 142 | fi 143 | done 144 | 145 | # Iteration to remove queries from queue when finished 146 | for ele in "${to_remove[@]}"; do 147 | for i in "${!queries[@]}"; do 148 | if [[ "${queries[i]}" = "$ele" ]]; then 149 | unset queries[i] 150 | fi 151 | done 152 | done 153 | 154 | # Wait 5 seconds to give some more time to queries to finish 155 | sleep 5 156 | 157 | else 158 | break 159 | fi 160 | done 161 | 162 | # Validate if there are queries pending 163 | if [ ${#queries[@]} -gt 0 ] 164 | then 165 | echo "Finished process, but not all queries finished due to timeout." >> "$log_output/log.txt" 166 | echo "Not all queries have finished. Consider increasing iterations value to increase timeout." 167 | else 168 | echo "Finished extracting Redshift DDL. Please check for output in the specified folder." 169 | fi -------------------------------------------------------------------------------- /Redshift/scripts/DDL_Function.sql: -------------------------------------------------------------------------------- 1 | WITH arguments 2 | AS ( 3 | SELECT oid 4 | , i 5 | , arg_name [i] AS argument_name 6 | , arg_types [i-1] argument_type 7 | FROM ( 8 | SELECT generate_series(1, arg_count) AS i 9 | , arg_name 10 | , arg_types 11 | , oid 12 | FROM ( 13 | SELECT oid 14 | , proargnames arg_name 15 | , proargtypes arg_types 16 | , pronargs arg_count 17 | FROM pg_proc 18 | WHERE proowner != 1 19 | ) t 20 | ) t 21 | ) 22 | SELECT 23 | ddl 24 | FROM 25 | ( 26 | SELECT 27 | schemaname 28 | , udfname 29 | , seq 30 | , trim(ddl) ddl 31 | FROM 32 | ( 33 | SELECT 34 | n.nspname AS schemaname 35 | , p.proname AS udfname 36 | , p.oid AS udfoid 37 | , 1 AS seq 38 | , ('\n/* ' || n.nspname || '.' || p.proname || ' */\n')::VARCHAR(max) AS ddl 39 | FROM pg_proc p 40 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 41 | JOIN pg_language l ON p.prolang = l.oid 42 | WHERE p.proowner != 1 43 | AND l.lanname <> 'plpgsql' 44 | 45 | UNION ALL 46 | 47 | SELECT n.nspname AS schemaname 48 | , p.proname AS udfname 49 | , p.oid AS udfoid 50 | , 1000 AS seq 51 | , ('CREATE OR REPLACE FUNCTION ' || QUOTE_IDENT(n.nspname) || '.' || QUOTE_IDENT(p.proname) || ' \(')::VARCHAR(max) AS ddl 52 | FROM pg_proc p 53 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 54 | JOIN pg_language l ON p.prolang = l.oid 55 | WHERE p.proowner != 1 56 | AND l.lanname <> 'plpgsql' 57 | 58 | UNION ALL 59 | 60 | SELECT n.nspname AS schemaname 61 | , p.proname AS udfname 62 | , p.oid AS udfoid 63 | , 2000 + nvl(i, 0) AS seq 64 | , CASE 65 | WHEN i = 1 66 | THEN NVL(argument_name, '') || ' ' || format_type(argument_type, NULL) 67 | ELSE ',' || NVL(argument_name, '') || ' ' || format_type(argument_type, NULL) 68 | END AS ddl 69 | FROM pg_proc p 70 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 71 | LEFT JOIN arguments a ON a.oid = p.oid 72 | JOIN pg_language l ON p.prolang = l.oid 73 | WHERE p.proowner != 1 74 | AND l.lanname <> 'plpgsql' 75 | 76 | UNION ALL 77 | 78 | SELECT n.nspname AS schemaname 79 | , p.proname AS udfname 80 | , p.oid AS udfoid 81 | , 3000 AS seq 82 | , '\)' AS ddl 83 | FROM pg_proc p 84 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 85 | JOIN pg_language l ON p.prolang = l.oid 86 | WHERE p.proowner != 1 87 | AND l.lanname <> 'plpgsql' 88 | 89 | UNION ALL 90 | 91 | SELECT n.nspname AS schemaname 92 | , p.proname AS udfname 93 | , p.oid AS udfoid 94 | , 4000 AS seq 95 | , ' RETURNS ' || pg_catalog.format_type(p.prorettype, NULL) AS ddl 96 | FROM pg_proc p 97 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 98 | JOIN pg_language l ON p.prolang = l.oid 99 | WHERE p.proowner != 1 100 | AND l.lanname <> 'plpgsql' 101 | 102 | UNION ALL 103 | 104 | SELECT n.nspname AS schemaname 105 | , p.proname AS udfname 106 | , p.oid AS udfoid 107 | , 5000 AS seq 108 | , CASE 109 | WHEN p.provolatile = 'v' 110 | THEN 'VOLATILE' 111 | WHEN p.provolatile = 's' 112 | THEN 'STABLE' 113 | WHEN p.provolatile = 'i' 114 | THEN 'IMMUTABLE' 115 | ELSE '' 116 | END AS ddl 117 | FROM pg_proc p 118 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 119 | JOIN pg_language l ON p.prolang = l.oid 120 | WHERE p.proowner != 1 121 | AND l.lanname <> 'plpgsql' 122 | 123 | UNION ALL 124 | 125 | SELECT n.nspname AS schemaname 126 | , p.proname AS udfname 127 | , p.oid AS udfoid 128 | , 6000 AS seq 129 | , 'AS $$' AS ddl 130 | FROM pg_proc p 131 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 132 | JOIN pg_language l ON p.prolang = l.oid 133 | WHERE p.proowner != 1 134 | AND l.lanname <> 'plpgsql' 135 | 136 | UNION ALL 137 | 138 | SELECT n.nspname AS schemaname 139 | , p.proname AS udfname 140 | , p.oid AS udfoid 141 | , 7000 AS seq 142 | , p.prosrc AS DDL 143 | FROM pg_proc p 144 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 145 | JOIN pg_language l ON p.prolang = l.oid 146 | WHERE p.proowner != 1 147 | AND l.lanname <> 'plpgsql' 148 | 149 | UNION ALL 150 | 151 | SELECT n.nspname AS schemaname 152 | , p.proname AS udfname 153 | , p.oid AS udfoid 154 | , 8000 AS seq 155 | , '$$ LANGUAGE ' + lang.lanname + ';' AS ddl 156 | FROM pg_proc p 157 | LEFT JOIN pg_namespace n ON n.oid = p.pronamespace 158 | LEFT JOIN ( 159 | SELECT oid 160 | , lanname 161 | FROM pg_language 162 | ) lang ON p.prolang = lang.oid 163 | WHERE p.proowner != 1 164 | AND lang.lanname <> 'plpgsql' 165 | ) 166 | ORDER BY 167 | udfoid 168 | , seq 169 | ) 170 | WHERE 171 | {schema_filter} 172 | -- For manual runs, remove the above line and replace with something like this: 173 | -- Example: 174 | -- lower(schemaname) LIKE '%' 175 | ; 176 | -------------------------------------------------------------------------------- /Redshift/scripts/DDL_View.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | ddl 3 | FROM 4 | ( 5 | SELECT 6 | pg_get_viewdef(c.oid, TRUE) as view_definition, 7 | '/* ' + n.nspname + '.' + c.relname + ' */\n\n' 8 | + CASE 9 | WHEN c.relnatts > 0 and view_definition not ILIKE 'CREATE MATERIALIZED View%' THEN 10 | 'CREATE OR REPLACE VIEW ' + QUOTE_IDENT(n.nspname) + '.' + QUOTE_IDENT(c.relname) + ' AS\n' + COALESCE(view_definition, '') 11 | ELSE 12 | COALESCE(view_definition, '') 13 | END 14 | + '\n' AS ddl 15 | , n.nspname as schemaname 16 | FROM 17 | pg_catalog.pg_class AS c 18 | INNER JOIN pg_catalog.pg_namespace AS n ON c.relnamespace = n.oid 19 | WHERE 20 | relkind = 'v' 21 | AND n.nspname not in ('information_schema', 'pg_catalog', 'pg_internal') 22 | ) 23 | WHERE 24 | {schema_filter} 25 | -- For manual runs, remove the above line and replace with something like this: 26 | -- Example: 27 | -- lower(schemaname) LIKE '%' 28 | ; 29 | -------------------------------------------------------------------------------- /SQLServer/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/SQLServer/.DS_Store -------------------------------------------------------------------------------- /SQLServer/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /SQLServer/README.md: -------------------------------------------------------------------------------- 1 | # SQLServer 2 | 3 | ## Version 4 | 0.0.96 5 | -------------------------------------------------------------------------------- /SQLServer/README.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/SQLServer/README.pdf -------------------------------------------------------------------------------- /SQLServer/SQL_Server_Code_Extraction.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/SQLServer/SQL_Server_Code_Extraction.pdf -------------------------------------------------------------------------------- /SQLServer/Table _sizing_report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/SQLServer/Table _sizing_report.pdf -------------------------------------------------------------------------------- /Synapse/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Synapse/Scripts/Get_external_data_sources.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | DB_NAME() COLLATE DATABASE_DEFAULT + -- Data Sources are usually at the database level in serverless 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | name COLLATE DATABASE_DEFAULT + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | 'CREATE EXTERNAL DATA SOURCE [' + name COLLATE DATABASE_DEFAULT + ']' + CHAR(13) + CHAR(10) + 11 | 'WITH (' + CHAR(13) + CHAR(10) + 12 | ' LOCATION = ''' + location COLLATE DATABASE_DEFAULT + '''' + CHAR(13) + CHAR(10) + 13 | ' ' + CASE WHEN credential_id IS NOT NULL THEN ', CREDENTIAL = [' + (SELECT name COLLATE DATABASE_DEFAULT FROM sys.database_scoped_credentials WHERE credential_id = s.credential_id) + ']' ELSE '' END COLLATE DATABASE_DEFAULT + CHAR(13) + CHAR(10) + 14 | ');' + CHAR(13) + CHAR(10) + 15 | N'@@END_OBJECT_DEFINITION@@' 16 | FROM 17 | sys.external_data_sources s 18 | WHERE 19 | name <> 'data_lake_store' COLLATE DATABASE_DEFAULT; -- Exclude default data source if not desired -------------------------------------------------------------------------------- /Synapse/Scripts/Get_external_file_formats.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | DB_NAME() COLLATE DATABASE_DEFAULT + -- File Formats are usually at the database level in serverless 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | name COLLATE DATABASE_DEFAULT + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | 'CREATE EXTERNAL FILE FORMAT [' + name COLLATE DATABASE_DEFAULT + ']' + CHAR(13) + CHAR(10) + 11 | 'WITH (' + CHAR(13) + CHAR(10) + 12 | -- FIX: Use 'format_type' column (integer) and map it to a string description 13 | ' FORMAT_TYPE = ' + 14 | CASE format_type 15 | WHEN 1 THEN 'DELIMITEDTEXT' COLLATE DATABASE_DEFAULT 16 | WHEN 2 THEN 'PARQUET' COLLATE DATABASE_DEFAULT 17 | WHEN 3 THEN 'ORC' COLLATE DATABASE_DEFAULT 18 | WHEN 4 THEN 'DELTA' COLLATE DATABASE_DEFAULT -- Delta is type 4 19 | ELSE 'UNKNOWN' COLLATE DATABASE_DEFAULT -- Fallback for any unmapped types 20 | END + ',' + CHAR(13) + CHAR(10) + 21 | -- END FIX 22 | CASE 23 | WHEN format_type = 1 -- Only DELIMITEDTEXT has these options (using format_type now) 24 | THEN 25 | ' FIELD_TERMINATOR = ''' + ISNULL(field_terminator, '') COLLATE DATABASE_DEFAULT + ''',' + CHAR(13) + CHAR(10) + 26 | ' STRING_DELIMITER = ''' + ISNULL(string_delimiter, '') COLLATE DATABASE_DEFAULT + ''',' + CHAR(13) + CHAR(10) + 27 | ' USE_TYPE_DEFAULT = ' + CASE WHEN use_type_default = 1 THEN 'ON' ELSE 'OFF' END COLLATE DATABASE_DEFAULT + ',' + CHAR(13) + CHAR(10) + 28 | ' DATE_FORMAT = ''' + ISNULL(date_format, '') COLLATE DATABASE_DEFAULT + ''',' + CHAR(13) + CHAR(10) + 29 | ' FIRST_ROW = ' + CAST(first_row AS VARCHAR(10)) COLLATE DATABASE_DEFAULT + ',' + CHAR(13) + CHAR(10) + 30 | ' ROW_TERMINATOR = ''' + ISNULL(row_terminator, '') COLLATE DATABASE_DEFAULT + '''' + CHAR(13) + CHAR(10) + 31 | CASE WHEN parser_version IS NOT NULL THEN ', PARSER_VERSION = ' + CAST(parser_version AS VARCHAR(10)) COLLATE DATABASE_DEFAULT ELSE '' END 32 | ELSE '' 33 | END COLLATE DATABASE_DEFAULT + 34 | ');' + CHAR(13) + CHAR(10) + 35 | N'@@END_OBJECT_DEFINITION@@' 36 | FROM 37 | sys.external_file_formats; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_external_tables.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name COLLATE DATABASE_DEFAULT + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | t.name COLLATE DATABASE_DEFAULT + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | 'CREATE EXTERNAL TABLE [' + s.name COLLATE DATABASE_DEFAULT + '].[' + t.name COLLATE DATABASE_DEFAULT + '] (' + CHAR(13) + CHAR(10) + 11 | -- The fix is to cast the input expression for STRING_AGG to NVARCHAR(MAX). 12 | -- This promotes the result to a large object type, avoiding the 8000-byte limit. 13 | STRING_AGG( 14 | CAST( 15 | ' [' + c.name COLLATE DATABASE_DEFAULT + '] ' + 16 | TYPE_NAME(c.system_type_id) COLLATE DATABASE_DEFAULT + 17 | CASE 18 | WHEN c.max_length != -1 AND TYPE_NAME(c.system_type_id) IN ('nvarchar', 'varchar', 'varbinary') THEN '(' + CAST(c.max_length AS VARCHAR(10)) + ')' 19 | WHEN c.max_length = -1 AND TYPE_NAME(c.system_type_id) IN ('nvarchar', 'varchar', 'varbinary') THEN '(MAX)' 20 | WHEN TYPE_NAME(c.system_type_id) IN ('decimal', 'numeric') THEN '(' + CAST(c.precision AS VARCHAR(10)) + ',' + CAST(c.scale AS VARCHAR(10)) + ')' 21 | ELSE '' 22 | END + 23 | CASE WHEN c.is_nullable = 1 THEN ' NULL' ELSE ' NOT NULL' END 24 | AS NVARCHAR(MAX)), -- <<< THIS IS THE FIX 25 | ',' + CHAR(13) + CHAR(10) 26 | ) WITHIN GROUP (ORDER BY c.column_id) + CHAR(13) + CHAR(10) + 27 | ')' + CHAR(13) + CHAR(10) + 28 | 'WITH (' + CHAR(13) + CHAR(10) + 29 | ' LOCATION = ''' + ext_t.location COLLATE DATABASE_DEFAULT + ''',' + CHAR(13) + CHAR(10) + 30 | ' DATA_SOURCE = [' + eds.name COLLATE DATABASE_DEFAULT + '],' + CHAR(13) + CHAR(10) + 31 | ' FILE_FORMAT = [' + eff.name COLLATE DATABASE_DEFAULT + ']' + CHAR(13) + CHAR(10) + 32 | ');' + CHAR(13) + CHAR(10) + 33 | N'@@END_OBJECT_DEFINITION@@' 34 | FROM 35 | sys.external_tables AS ext_t 36 | INNER JOIN 37 | sys.objects AS t ON ext_t.object_id = t.object_id 38 | INNER JOIN 39 | sys.schemas AS s ON t.schema_id = s.schema_id 40 | INNER JOIN 41 | sys.columns AS c ON t.object_id = c.object_id 42 | INNER JOIN 43 | sys.external_data_sources AS eds ON ext_t.data_source_id = eds.data_source_id 44 | INNER JOIN 45 | sys.external_file_formats AS eff ON ext_t.file_format_id = eff.file_format_id 46 | WHERE 47 | t.is_ms_shipped = 0 48 | GROUP BY 49 | s.name, t.name, ext_t.location, eds.name, eff.name 50 | ORDER BY 51 | s.name, t.name; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_external_tables_serveless.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name COLLATE DATABASE_DEFAULT + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | t.name COLLATE DATABASE_DEFAULT + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | 'CREATE EXTERNAL TABLE [' + s.name COLLATE DATABASE_DEFAULT + '].[' + t.name COLLATE DATABASE_DEFAULT + '] (' + CHAR(13) + CHAR(10) + 11 | STRING_AGG( 12 | ' [' + c.name COLLATE DATABASE_DEFAULT + '] ' + 13 | TYPE_NAME(c.system_type_id) COLLATE DATABASE_DEFAULT + 14 | CASE WHEN c.max_length != -1 AND TYPE_NAME(c.system_type_id) IN ('nvarchar', 'varchar', 'varbinary') THEN '(' + CAST(c.max_length AS VARCHAR(10)) + ')' 15 | WHEN c.max_length = -1 AND TYPE_NAME(c.system_type_id) IN ('nvarchar', 'varchar', 'varbinary') THEN '(MAX)' 16 | WHEN TYPE_NAME(c.system_type_id) IN ('decimal', 'numeric') THEN '(' + CAST(c.precision AS VARCHAR(10)) + ',' + CAST(c.scale AS VARCHAR(10)) + ')' 17 | ELSE '' 18 | END COLLATE DATABASE_DEFAULT + 19 | CASE WHEN c.is_nullable = 1 THEN ' NULL' ELSE ' NOT NULL' END COLLATE DATABASE_DEFAULT, 20 | ',' + CHAR(13) + CHAR(10) 21 | ) WITHIN GROUP (ORDER BY c.column_id) COLLATE DATABASE_DEFAULT + CHAR(13) + CHAR(10) + 22 | ')' + CHAR(13) + CHAR(10) + 23 | 'WITH (' + CHAR(13) + CHAR(10) + 24 | ' LOCATION = ''' + ext_t.location COLLATE DATABASE_DEFAULT + ''',' + CHAR(13) + CHAR(10) + 25 | ' DATA_SOURCE = [' + eds.name COLLATE DATABASE_DEFAULT + '],' + CHAR(13) + CHAR(10) + 26 | ' FILE_FORMAT = [' + eff.name COLLATE DATABASE_DEFAULT + ']' + CHAR(13) + CHAR(10) + 27 | ');' + CHAR(13) + CHAR(10) + 28 | N'@@END_OBJECT_DEFINITION@@' 29 | FROM 30 | sys.external_tables AS ext_t 31 | INNER JOIN 32 | sys.objects AS t ON ext_t.object_id = t.object_id 33 | INNER JOIN 34 | sys.schemas AS s ON t.schema_id = s.schema_id 35 | INNER JOIN 36 | sys.columns AS c ON t.object_id = c.object_id 37 | INNER JOIN 38 | sys.external_data_sources AS eds ON ext_t.data_source_id = eds.data_source_id 39 | INNER JOIN 40 | sys.external_file_formats AS eff ON ext_t.file_format_id = eff.file_format_id 41 | WHERE 42 | t.is_ms_shipped = 0 -- Exclude system objects 43 | GROUP BY 44 | s.name, t.name, ext_t.location, eds.name, eff.name 45 | ORDER BY 46 | s.name, t.name; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_external_views.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name COLLATE DATABASE_DEFAULT + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | o.name COLLATE DATABASE_DEFAULT + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | sm.definition COLLATE DATABASE_DEFAULT + 11 | CHAR(13) + CHAR(10) + 12 | N'@@END_OBJECT_DEFINITION@@' 13 | FROM 14 | sys.sql_modules sm 15 | INNER JOIN 16 | sys.objects o ON sm.object_id = o.object_id 17 | INNER JOIN 18 | sys.schemas s ON o.schema_id = s.schema_id 19 | WHERE 20 | o.type = 'V' -- Filter for Views 21 | AND s.name <> 'sys' COLLATE DATABASE_DEFAULT; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_functions.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | o.name + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | sm.definition + 11 | CHAR(13) + CHAR(10) + 12 | N'@@END_OBJECT_DEFINITION@@' 13 | FROM 14 | sys.sql_modules sm 15 | INNER JOIN 16 | sys.objects o ON sm.object_id = o.object_id 17 | INNER JOIN 18 | sys.schemas s ON o.schema_id = s.schema_id 19 | WHERE 20 | o.type = 'FN' 21 | AND s.name <> 'sys'; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_indexes.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name COLLATE DATABASE_DEFAULT + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | t.name COLLATE DATABASE_DEFAULT + 7 | '_Idx_' + i.name COLLATE DATABASE_DEFAULT + 8 | N'@@END_NAME@@' + 9 | N'@@START_OBJECT_DEFINITION@@' + 10 | CHAR(13) + CHAR(10) + 11 | 'CREATE ' + CASE WHEN i.is_unique = 1 THEN 'UNIQUE ' ELSE '' END + 12 | i.type_desc COLLATE DATABASE_DEFAULT + ' INDEX [' + i.name COLLATE DATABASE_DEFAULT + ']' + CHAR(13) + CHAR(10) + 13 | 'ON [' + s.name COLLATE DATABASE_DEFAULT + '].[' + t.name COLLATE DATABASE_DEFAULT + '] (' + CHAR(13) + CHAR(10) + 14 | STRING_AGG( 15 | ' , [' + c.name COLLATE DATABASE_DEFAULT + ']' + CASE WHEN ic.is_descending_key = 1 THEN ' DESC' ELSE ' ASC' END, 16 | ',' + CHAR(13) + CHAR(10) 17 | ) WITHIN GROUP (ORDER BY ic.key_ordinal) COLLATE DATABASE_DEFAULT + CHAR(13) + CHAR(10) + -- Apply collation to STRING_AGG result 18 | ')' + CHAR(13) + CHAR(10) + 19 | CASE WHEN i.has_filter = 1 THEN 'WHERE ' + i.filter_definition COLLATE DATABASE_DEFAULT + CHAR(13) + CHAR(10) ELSE '' END + 20 | 'WITH (' + CHAR(13) + CHAR(10) + 21 | ' DATA_COMPRESSION = ' + CASE WHEN i.type = 5 THEN 'COLUMNSTORE' ELSE 'NONE' END COLLATE DATABASE_DEFAULT + 22 | ');' + CHAR(13) + CHAR(10) + 23 | N'@@END_OBJECT_DEFINITION@@' 24 | FROM 25 | sys.indexes i 26 | INNER JOIN 27 | sys.tables t ON i.object_id = t.object_id 28 | INNER JOIN 29 | sys.schemas s ON t.schema_id = s.schema_id 30 | INNER JOIN 31 | sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id AND ic.is_included_column = 0 32 | INNER JOIN 33 | sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id 34 | WHERE 35 | i.is_disabled = 0 36 | AND i.type IN (1, 2) 37 | AND i.is_primary_key = 0 38 | AND i.is_unique_constraint = 0 39 | AND i.name NOT LIKE 'PK_%' 40 | AND i.name NOT LIKE 'UQ_%' 41 | GROUP BY 42 | s.name, t.name, i.name, i.is_unique, i.type_desc, i.type, i.has_filter, i.filter_definition, 43 | i.object_id, i.index_id 44 | ORDER BY 45 | s.name, t.name, i.name; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_procedures.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | o.name + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | sm.definition + 11 | CHAR(13) + CHAR(10) + 12 | N'@@END_OBJECT_DEFINITION@@' 13 | FROM 14 | sys.sql_modules sm 15 | INNER JOIN 16 | sys.objects o ON sm.object_id = o.object_id 17 | INNER JOIN 18 | sys.schemas s ON o.schema_id = s.schema_id 19 | WHERE 20 | o.type = 'P' 21 | AND s.name <> 'sys'; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_schemas.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | s.name + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | 'CREATE SCHEMA [' + s.name + '];' + 11 | CHAR(13) + CHAR(10) + 12 | N'@@END_OBJECT_DEFINITION@@' 13 | FROM 14 | sys.schemas s 15 | WHERE 16 | s.schema_id > 4 17 | AND s.name NOT IN ('db_owner', 'db_accessadmin', 'db_securityadmin', 'db_ddladmin', 'db_backupoperator', 'db_datareader', 'db_datawriter', 'db_denieddatawriter', 'db_denieddatareader') -- Excluir roles de base de datos 18 | AND s.name <> 'sysdiag' 19 | ORDER BY 20 | s.name; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_tables.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | t.name + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | 'CREATE TABLE [' + s.name + '].[' + t.name + '] (' + CHAR(13) + CHAR(10) + 11 | -- The result of the STRING_AGG is cast to NVARCHAR(MAX) to avoid the 8000 byte limit. 12 | -- This is done by casting the *input* expression to NVARCHAR(MAX). 13 | STRING_AGG( 14 | CAST( 15 | ' [' + c.name + '] ' + 16 | TYPE_NAME(c.system_type_id) + 17 | CASE 18 | WHEN c.max_length != -1 AND TYPE_NAME(c.system_type_id) IN ('nvarchar', 'varchar', 'varbinary') THEN '(' + CAST(c.max_length AS VARCHAR(10)) + ')' 19 | WHEN c.max_length = -1 AND TYPE_NAME(c.system_type_id) IN ('nvarchar', 'varchar', 'varbinary') THEN '(MAX)' 20 | WHEN TYPE_NAME(c.system_type_id) IN ('decimal', 'numeric') THEN '(' + CAST(c.precision AS VARCHAR(10)) + ',' + CAST(c.scale AS VARCHAR(10)) + ')' 21 | ELSE '' 22 | END + 23 | CASE WHEN c.is_nullable = 1 THEN ' NULL' ELSE ' NOT NULL' END + 24 | CASE WHEN ic.is_identity = 1 THEN ' IDENTITY(' + CAST(ISNULL(ic.seed_value, 1) AS VARCHAR(10)) + ',' + CAST(ISNULL(ic.increment_value, 1) AS VARCHAR(10)) + ')' ELSE '' END 25 | AS NVARCHAR(MAX)), -- <<< THIS IS THE FIX 26 | ',' + CHAR(13) + CHAR(10) 27 | ) WITHIN GROUP (ORDER BY c.column_id) + CHAR(13) + CHAR(10) + 28 | ')' + CASE WHEN t.temporal_type = 2 THEN ' FOR SYSTEM_TIME AS HISTORY_TABLE = [' + OBJECT_SCHEMA_NAME(t.history_table_id) + '].[' + OBJECT_NAME(t.history_table_id) + ']' ELSE '' END + ';' + CHAR(13) + CHAR(10) + N'@@END_OBJECT_DEFINITION@@' 29 | FROM 30 | sys.tables t 31 | INNER JOIN 32 | sys.schemas s ON t.schema_id = s.schema_id 33 | INNER JOIN 34 | sys.columns c ON t.object_id = c.object_id 35 | LEFT JOIN 36 | sys.identity_columns ic ON c.object_id = ic.object_id AND c.column_id = ic.column_id 37 | WHERE 38 | t.is_ms_shipped = 0 39 | GROUP BY 40 | s.name, t.name, t.temporal_type, t.history_table_id 41 | ORDER BY 42 | s.name, t.name; -------------------------------------------------------------------------------- /Synapse/Scripts/Get_views.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | N'@@START_SCHEMA@@' + 3 | s.name + 4 | N'@@END_SCHEMA@@' + 5 | N'@@START_NAME@@' + 6 | o.name + 7 | N'@@END_NAME@@' + 8 | N'@@START_OBJECT_DEFINITION@@' + 9 | CHAR(13) + CHAR(10) + 10 | sm.definition + 11 | CHAR(13) + CHAR(10) + 12 | N'@@END_OBJECT_DEFINITION@@' 13 | FROM 14 | sys.sql_modules sm 15 | INNER JOIN 16 | sys.objects o ON sm.object_id = o.object_id 17 | INNER JOIN 18 | sys.schemas s ON o.schema_id = s.schema_id 19 | WHERE 20 | o.type = 'V' 21 | AND s.name <> 'sys'; -------------------------------------------------------------------------------- /Teradata/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Teradata/.DS_Store -------------------------------------------------------------------------------- /Teradata/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Teradata/README.md: -------------------------------------------------------------------------------- 1 | # Teradata Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your Teradata code so it can be migrated to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowflake.com/en/migrations/snowconvert-docs/general/getting-started/code-extraction/teradata) 4 | 5 | ## Version 6 | 0.0.96 7 | 8 | ## Usage 9 | 10 | The following are the steps to execute the DDL Code Generation. They should be executed in bash shell on a linux environment with access to bteq/tpt utilities. 11 | 12 | 1 - Modify `create_ddls.sh` in the bin folder – Using a text editor modify the following parameters: 13 | 14 | * `connection_string` 15 | * `include_databases` 16 | * `exclude_databases` 17 | * `include_objects` 18 | 19 | It is recommended to use the user 'DBC' in the connection string but a user with sysadmin privileges should also work. Please run on a production-like environment with up to date statistics. 20 | 21 | By default the script is setup to exclude system related databases and include all others. You can modify these to get the desired scope, including the operator that is used. Statements need to exclude spaces in the parameter values and values should be all **UPPERCASE**. 22 | By default, all the comments in source code are preserved. If comments needed to be removed, contact Snowflake team. 23 | Executing the create_ddl.sh permanently changes create_ddl.btq file. A copy of "create_ddl.btq" can be used if needed. 24 | 25 | > Do not remove the parentheses around the entire statement which are needed for compound logic. 26 | > Do not use **LIKE ANY** clause for both as it can cause unexpected issues. 27 | 28 | Example values: 29 | 30 | ```sql 31 | (UPPER(T1.DATABASENAME) NOT IN ('ALL', 'TESTDB')); 32 | 33 | (UPPER(T1.DATABASENAME) NOT IN ('ALL', 'TESTDB')) AND UPPER(T1.DATABASENAME) NOT LIKE ('TD_%')) 34 | ``` 35 | 36 | 2 - After modifying, the `create_ddls.sh` file can be run from the command line to execute the extract from within the bin directory. The following files will be created in the output folder: 37 | 38 | ## DDL Files 39 | 40 | These files will contain the definitions of the objects specified by the file name. 41 | 42 | * `DDL_Databases.sql` 43 | * `DDL_Tables.sql` 44 | * `DDL_Join_Indexes.sql` 45 | * `DDL_Functions.sql` 46 | * `DDL_Views.sql` 47 | * `DDL_Macros.sql` 48 | * `DDL_Procedures.sql` 49 | 50 | 3 - Run `create_ddls.sh --version` to check the current version of the extraction scripts. 51 | 52 | ## Reporting issues and feedback 53 | 54 | If you encounter any bugs with the tool please file an issue in the 55 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 56 | 57 | ## License 58 | 59 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Teradata/License.txt). 60 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_databases.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .OS rm ../output/object_extracts/DDL/DDL_Databases.sql 9 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Databases.sql 10 | .SET WIDTH 65531 11 | SELECT 'CREATE DATABASE ' || TRIM(T1.DATABASENAME) || ' FROM DBC AS PERM = 100000000;' "--" FROM DBC.DATABASESV T1 12 | WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1; 13 | .EXPORT RESET 14 | 15 | .quit 0; 16 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_functions.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Functions.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.SpecificNAME) || ' */'' as "--"; ' || 'SHOW FUNCTION ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.FUNCTIONNAME) || ';' "---" 11 | FROM DBC.FUNCTIONSV T1 WHERE include_databases AND exclude_databases GROUP BY 1; 12 | .EXPORT RESET 13 | .OS rm ../output/object_extracts/DDL/DDL_Functions.sql 14 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Functions.sql 15 | .SET WIDTH 65531 16 | .RUN FILE = ../temp/SHOW_Functions.sql 17 | .EXPORT RESET 18 | 19 | .quit 0; 20 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_join_indexes.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Join_Indexes.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW JOIN INDEX ' || TRIM(T1.DATABASENAME) || '.' ||TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('I') AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 11 | .EXPORT RESET 12 | .OS rm ../output/object_extracts/DDL/DDL_Join_Indexes.sql 13 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Join_Indexes.sql 14 | .SET WIDTH 65531 15 | .RUN FILE = ../temp/SHOW_Join_Indexes.sql 16 | .EXPORT RESET 17 | 18 | .quit 0; 19 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_macros.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Macros.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW MACRO ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'M' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 11 | .EXPORT RESET 12 | .OS rm ../output/object_extracts/DDL/DDL_Macros.sql 13 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Macros.sql 14 | .SET WIDTH 65531 15 | .RUN FILE = ../temp/SHOW_Macros.sql 16 | .EXPORT RESET 17 | 18 | .quit 0; 19 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_procedures.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Procedures.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW PROCEDURE ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'P' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 11 | .EXPORT RESET 12 | .OS rm ../output/object_extracts/DDL/DDL_Procedures.sql 13 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Procedures.sql 14 | .SET WIDTH 65531 15 | .RUN FILE = ../temp/SHOW_Procedures.sql 16 | .EXPORT RESET 17 | 18 | .quit 0; 19 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_schemas.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .OS rm ../output/object_extracts/DDL/DDL_SF_Schemas.sql 9 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_SF_Schemas.sql 10 | .SET WIDTH 65531 11 | SELECT '/* ' || TRIM(T1.DATABASENAME) || ' */ ' || 'CREATE SCHEMA ' || TRIM(T1.DATABASENAME) || ';' "--" FROM DBC.DATABASESV T1 WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1; 12 | .EXPORT RESET 13 | 14 | .quit 0; 15 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_tables.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Tables.sql 9 | .SET WIDTH 65531 10 | .set titledashes off 11 | 12 | LOCKING ROW FOR ACCESS 13 | SELECT 'SELECT ''/* '' || ''' || TRIM(DATABASENAME) || '.' || TRIM(TABLENAME) || ' */'' as "--"; ' || 'SHOW TABLE ' || TRIM(DATABASENAME) || '.' ||TRIM(TABLENAME) || ';' "--" 14 | FROM DBC.TABLESV T1 15 | WHERE T1.TABLEKIND IN ( 'T' ,'O','Q') -- PI AND NOPI 16 | AND include_databases AND exclude_databases AND include_objects; 17 | 18 | .EXPORT RESET 19 | .OS rm ../output/object_extracts/DDL/DDL_Tables.sql 20 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Tables.sql 21 | 22 | .SET WIDTH 65531 23 | .set titledashes off 24 | .RUN FILE = ../temp/SHOW_Tables.sql 25 | .EXPORT RESET 26 | 27 | .quit 0; 28 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_triggers.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Trigger.sql 9 | .SET WIDTH 65531 10 | .set titledashes off 11 | 12 | LOCKING ROW FOR ACCESS 13 | SELECT 'SELECT ''/* '' || ''' || TRIM(DATABASENAME) || '.' || TRIM(TABLENAME) || ' */'' as "--"; ' || 'SHOW TRIGGER ' || TRIM(DATABASENAME) || '.' ||TRIM(TABLENAME) || ';' "--" 14 | FROM DBC.TABLESV T1 15 | WHERE T1.TABLEKIND = 'G' -- TRIGGERS 16 | AND include_databases AND exclude_databases AND include_objects; 17 | 18 | .EXPORT RESET 19 | .OS rm ../output/object_extracts/DDL/DDL_Trigger.sql 20 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Trigger.sql 21 | 22 | .SET WIDTH 65531 23 | .set titledashes off 24 | .RUN FILE = ../temp/SHOW_Trigger.sql 25 | .EXPORT RESET 26 | 27 | .quit 0; 28 | -------------------------------------------------------------------------------- /Teradata/scripts_template/create_views.btq: -------------------------------------------------------------------------------- 1 | .LOGON connection_string; 2 | 3 | .EXPORT RESET 4 | 5 | .SET WIDTH 65531 6 | .set titledashes off 7 | 8 | .EXPORT FILE = ../temp/SHOW_Views.sql 9 | .SET WIDTH 65531 10 | .set titledashes off 11 | 12 | LOCKING ROW FOR ACCESS 13 | SELECT 'SELECT ''/* '' || ''' || TRIM(DATABASENAME) || '.' || TRIM(TABLENAME) || ' */'' as "--"; ' || 'SHOW VIEW ' || TRIM(DATABASENAME) || '.' ||TRIM(TABLENAME) || ';' "--" 14 | FROM DBC.TABLESV T1 15 | WHERE T1.TABLEKIND = 'V' -- VIEWS 16 | AND include_databases AND exclude_databases AND include_objects; 17 | 18 | .EXPORT RESET 19 | .OS rm ../output/object_extracts/DDL/DDL_Views.sql 20 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Views.sql 21 | .SET WIDTH 65531 22 | .set titledashes off 23 | .RUN FILE = ../temp/SHOW_Views.sql 24 | .EXPORT RESET 25 | 26 | .quit 0; -------------------------------------------------------------------------------- /Tests/DB2/.gitignore: -------------------------------------------------------------------------------- 1 | database/ -------------------------------------------------------------------------------- /Tests/DB2/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Tests/DB2/README.md: -------------------------------------------------------------------------------- 1 | ## Test files for DB2 Scripts 2 | 3 | A `startDocker.sh` scripts is provided to create a container where the Export DDL script can be tested. -------------------------------------------------------------------------------- /Tests/DB2/startDocker.sh: -------------------------------------------------------------------------------- 1 | DBPASS=$1 2 | if [ -z "$DBPASS" ] 3 | then 4 | echo "Please provide password for the DB Instanace" 5 | read DBPASS 6 | fi 7 | 8 | docker run -itd --name mydb2 --privileged=true -p 50000:50000 -e LICENSE=accept -e DB2INST1_PASSWORD=$DBPASS -e DBNAME=testdb -v $CODESPACE_VSCODE_FOLDER/Tests/DB2/database:/database -v $CODESPACE_VSCODE_FOLDER/DB2:/DDLExportScripts/export ibmcom/db2 9 | docker exec -ti mydb2 bash -c "su - db2inst1" 10 | 11 | #if you get errors saying that the database did not start, you can try this out. 12 | #a) db2trc on -f db2trace.out 13 | #b) db2start 14 | #c) db2trc off -------------------------------------------------------------------------------- /Tests/Teradata/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Tests/Teradata/README.md: -------------------------------------------------------------------------------- 1 | # Teradata Export Scripts Tests 2 | 3 | # Teradata Export Scripts Tests 4 | 5 | > [!WARNING] 6 | > This test folder should be run only on the teradata demo database [Teradata-Express](https://downloads.teradata.com/download/database/teradata-express/vmware), because it creates and removes new databases to test the extraction process. 7 | 8 | ## How to run the tests. 9 | 1 - Modify `scripts/config.sh` with your connection values, if you are using the demo Teradata-Express this values should be the same. 10 | 11 | 3 - Ensure your demo database is running in your local system. 12 | 13 | 4 - Go to `./Tests/Teradata/scripts` and run the script `ssh_automatic_login_condiguration.sh`, this is necessary to automate the login process to the demo database. 14 | 15 | 5 - Go to back to `./Tests/Teradata/` and run `python -m unittest` 16 | 17 | 18 | ## How to add a new tests with new database. 19 | 1 - Create a new folder in `./Tests/Teradata/source_code`, this folder must contain the following files. 20 | * `deploy_database.sh`, this script executes the necesary commands to deploy the example source code. 21 | * `drop_database.sh`, this script execute the necesary commands to drop the example source code.in this file replaces the variables defined in `./Teradata/bin/create_ddls.sh`. 22 | * The SQL source code, the scripts that create tables, procedures, etc. 23 | 24 | 2 - Create a python test class. As an example check the file `test_demo_database.py`. In addition, the folder name defined in the setUpClass method must be the same name as the created in the previous step, since the script `execute_extract_database_script.sh` looks for that folder in the directory `./Tests/Teradata/source_code`. 25 | 26 | 3 - The SQL files must be in UTF-8. -------------------------------------------------------------------------------- /Tests/Teradata/database_summary/__init__.py: -------------------------------------------------------------------------------- 1 |  -------------------------------------------------------------------------------- /Tests/Teradata/database_summary/database_source_code_summarizer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | from .database_source_code_summary import DatabaseSourceCodeSummary 4 | from .top_level_object_type import TopLevelObjectType 5 | 6 | def sumarize_database_source_code(path: str) -> DatabaseSourceCodeSummary: 7 | database_summary = DatabaseSourceCodeSummary() 8 | 9 | for dirpath, dirnames, filenames in os.walk(path): 10 | for filename in filenames: 11 | if not re.match(r'.*\.sql$', filename, flags=re.IGNORECASE): 12 | continue 13 | file_path = os.path.join(dirpath, filename) 14 | database_summary.add_sql_file(file_path) 15 | sql_statements = read_sql_statements_from_file(file_path) 16 | analyze_sql_statements(sql_statements, database_summary) 17 | return database_summary 18 | 19 | def analyze_sql_statements(sql_statements: "list[str]", database_summary: DatabaseSourceCodeSummary): 20 | for sql_statement in sql_statements: 21 | type = get_sql_statement_type(sql_statement) 22 | database_summary.get_top_level_object_to_int_map()[type] += 1 23 | 24 | def get_sql_statement_type(sql_statement: str) -> TopLevelObjectType: 25 | for type in TopLevelObjectType: 26 | if is_statement_of_type(sql_statement, type.name): 27 | return type 28 | 29 | return TopLevelObjectType.UNDEFINED_TYPE 30 | 31 | def is_statement_of_type(statement: str, type_name: str) -> bool: 32 | type_name = type_name.replace("_", r"\s*") 33 | regex = r'^\s*(?:CREATE|REPLACE)(?:\s*\w*\s*){0,2}' + type_name+ r'\s' 34 | result = re.search(regex, statement, flags=re.IGNORECASE|re.MULTILINE) 35 | return result 36 | 37 | def read_sql_statements_from_file(file_path: str) ->"list[str]": 38 | with open(file_path) as my_file: 39 | comment_pattern = r'\/\*[\s\S]*?\*\/' 40 | code_without_comments = re.sub(comment_pattern, '', my_file.read(), flags=re.MULTILINE) 41 | sql_statements = code_without_comments.split(';') 42 | return sql_statements -------------------------------------------------------------------------------- /Tests/Teradata/database_summary/database_source_code_summary.py: -------------------------------------------------------------------------------- 1 | from .top_level_object_type import TopLevelObjectType 2 | class DatabaseSourceCodeSummary(): 3 | def __init__(self): 4 | self._file_paths = [] 5 | self._top_level_object_to_int_map = {} 6 | for top_level_object_type in TopLevelObjectType: 7 | self._top_level_object_to_int_map[top_level_object_type] = 0 8 | 9 | def get_count_of_files(self) -> int: 10 | return len(self._file_paths) 11 | 12 | def add_sql_file(self, file_path: str) -> None: 13 | self._file_paths+=[file_path] 14 | 15 | def get_top_level_object_to_int_map(self) -> "dict[TopLevelObjectType, int]": 16 | return self._top_level_object_to_int_map -------------------------------------------------------------------------------- /Tests/Teradata/database_summary/top_level_object_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from types import DynamicClassAttribute 3 | 4 | class TopLevelObjectType(Enum): 5 | TABLE = 1 6 | PROCEDURE = 2 7 | VIEW = 3 8 | DATABASE = 4 9 | TRIGGER = 5 10 | MACRO = 6 11 | FUNCTION = 7 12 | JOIN_INDEX = 8 13 | UNDEFINED_TYPE = 20 -------------------------------------------------------------------------------- /Tests/Teradata/scripts/config.sh: -------------------------------------------------------------------------------- 1 | # 2 | vm_connection="root@127.0.0.1" 3 | vm_ssh_port="2222" 4 | logon_command="dbc,dbc" 5 | 6 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/execute_deploy_database_script.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Version 20230810: Script created 3 | 4 | #####Constants 5 | MESSAGE='\033[0;32m' # Green 6 | ERROR='\033[0;31m' # Red 7 | NC='\033[0m' # No Color 8 | 9 | #####Parameters 10 | source_code_folder_name="$1" 11 | if [ ! "$source_code_folder_name" ] || [ ! -d "../source_code/$source_code_folder_name/" ] ; then 12 | echo "${ERROR}Invalid parameter '$source_code_folder_name', options are [$(ls ../source_code)]${NC}" 13 | exit 1 14 | fi 15 | 16 | #####Import config variables 17 | source config.sh 18 | 19 | #####Commands 20 | echo "${MESSAGE}Sending the database source code to the Virual Machine...${NC}" 21 | ssh $vm_connection $vm_ssh_port "mkdir -p /root/sc_testing_folder" 22 | rsync -r ../source_code/$source_code_folder_name $vm_connection:/root/sc_testing_folder -e "ssh -p $vm_ssh_port" 23 | 24 | echo "${MESSAGE}Executing scripts in the Virtual Machine...${NC}" 25 | ssh $vm_connection -p $vm_ssh_port "cd /root/sc_testing_folder/$source_code_folder_name && bash deploy_database.sh $logon_command" 26 | ssh -q $vm_connection -p $vm_ssh_port "rm -r /root/sc_testing_folder/$source_code_folder_name/" 27 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/execute_drop_database_script.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Version 20230810: Script created 3 | 4 | #####Constants 5 | MESSAGE='\033[0;32m' # Green 6 | ERROR='\033[0;31m' # Red 7 | NC='\033[0m' # No Color 8 | 9 | #####Parameters 10 | source_code_folder_name="$1" 11 | if [ ! "$source_code_folder_name" ] || [ ! -d "../source_code/$source_code_folder_name/" ] ; then 12 | echo "${ERROR}Invalid parameter '$source_code_folder_name', options are [$(ls ../source_code)]${NC}" 13 | exit 1 14 | fi 15 | 16 | #####Import config variables 17 | source config.sh 18 | 19 | #####Commands 20 | 21 | #####Commands 22 | echo "${MESSAGE}Sending the database source code to the Virual Machine...${NC}" 23 | rsync -r ../source_code/$source_code_folder_name $vm_connection:/root/sc_testing_folder/ -e "ssh -p $vm_ssh_port" 24 | 25 | echo "${MESSAGE}Executing scripts in the Virtual Machine...${NC}" 26 | ssh $vm_connection -p $vm_ssh_port "cd /root/sc_testing_folder/$source_code_folder_name && bash drop_database.sh $logon_command" 27 | ssh -q $vm_connection -p $vm_ssh_port rm -r /root/sc_testing_folder/$folder_name 28 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/execute_extract_database_script.sh: -------------------------------------------------------------------------------- 1 | # 2 | #Version 20230810: Script created 3 | 4 | #####Constants 5 | MESSAGE='\033[0;32m' # Green 6 | ERROR='\033[0;31m' # Red 7 | NC='\033[0m' # No Color 8 | folder_name="Teradata_Extraction" 9 | 10 | #####Parameters 11 | # First: The database folder to be used. 12 | # Second to n: The extraction parameters to be used, for example: 13 | # include_databases="(UPPER(T1.DATABASENAME) = 'SC_EXAMPLE_DEMO')" exclude_databases="(UPPER(T1.DATABASENAME) NOT IN ('SYS_CALENDAR','ALL','CONSOLE','CRASHDUMPS','DBC','DBCMANAGER','DBCMNGR','DEFAULT','EXTERNAL_AP','EXTUSER','LOCKLOGSHREDDER','PDCRADM','PDCRDATA','PDCRINFO','PUBLIC','SQLJ','SYSADMIN','SYSBAR','SYSJDBC','SYSLIB','SYSSPATIAL','SYSTEMFE','SYSUDTLIB','SYSUIF','TD_SERVER_DB','TD_SYSFNLIB','TD_SYSFNLIB','TD_SYSGPL','TD_SYSXML','TDMAPS', 'TDPUSER','TDQCD','TDSTATS','TDWM','VIEWPOINT','PDCRSTG'))" 14 | 15 | source_code_folder_name="$1" 16 | extracted_source_code_folder_name="$2" 17 | if [ ! "$source_code_folder_name" ] || [ ! -d "../source_code/$source_code_folder_name/" ] ; then 18 | echo "${ERROR}Invalid parameter '$source_code_folder_name', options are [$(ls ../source_code)]${NC}" 19 | exit 1 20 | fi 21 | if [ ! "$extracted_source_code_folder_name" ] ; then 22 | echo "${ERROR}Invalid parameter '$extracted_source_code_folder_name', this value is the output folder name.${NC}" 23 | exit 1 24 | fi 25 | 26 | 27 | for ARGUMENT in "${@:3}" 28 | do 29 | KEY=$(echo $ARGUMENT | cut -f1 -d=) 30 | 31 | KEY_LENGTH=${#KEY} 32 | VALUE="${ARGUMENT:$KEY_LENGTH+1}" 33 | 34 | export "$KEY"="$VALUE" 35 | done 36 | 37 | echo "${MESSAGE}Using the following extraction parameters:${NC}" 38 | echo "include_databases = $include_databases" 39 | echo "exclude_databases = $exclude_databases" 40 | echo "include_objects = $include_objects" 41 | echo "ddl_leng_max_limit_dic = $ddl_leng_max_limit_dic" 42 | echo "ddl_leng_max_limit_dic = $ddl_leng_max_limit_dic" 43 | 44 | #####Import config variables 45 | echo "${MESSAGE}Importing connection variables...${NC}" 46 | . config.sh 47 | 48 | ##### Commands 49 | echo "${MESSAGE}Copying Teradata Script...${NC}" 50 | cp -fr ../../../Teradata $folder_name 51 | mkdir -p ../extracted_code/ 52 | rm -fr ../extracted_code/$extracted_source_code_folder_name 53 | 54 | 55 | echo "${MESSAGE}Replacing Teradata Script parameters...${NC}" 56 | sed -i '' "s/connection_string=/connection_string=${logon_command} #/g" $folder_name/bin/create_ddls.sh 57 | 58 | #### Replace the variable include_databases, if it was defined in the imported script 59 | if [ ! -z ${include_databases+x} ]; then 60 | sed -i '' "s/include_databases=/include_databases=\"${include_databases}\" #/g" $folder_name/bin/create_ddls.sh 61 | fi 62 | 63 | if [ ! -z ${exclude_databases+x} ]; then 64 | sed -i '' "s/exclude_databases=/exclude_databases=\"${exclude_databases}\" #/g" $folder_name/bin/create_ddls.sh 65 | fi 66 | 67 | if [ ! -z ${include_objects+x} ]; then 68 | sed -i '' "s/include_objects=/include_objects=\"${include_objects}\" #/g" $folder_name/bin/create_ddls.sh 69 | fi 70 | 71 | if [ ! -z ${ddl_leng_max_limit_dic+x} ]; then 72 | sed -i '' "s/ddl_leng_max_limit_dic=/ddl_leng_max_limit_dic=${ddl_leng_max_limit_dic} #/g" $folder_name/bin/create_ddls.sh 73 | fi 74 | 75 | echo "${MESSAGE}Removing previous execution output...${NC}" 76 | rm -fr $folder_name/output 77 | rm -fr $folder_name/log 78 | 79 | 80 | echo "${MESSAGE}Sending Teradata scripts to the Virual Machine...${NC}" 81 | scp -P $vm_ssh_port -r $folder_name $vm_connection:/root/sc_testing_folder/$folder_name 82 | rm -fr $folder_name 83 | 84 | 85 | echo "${MESSAGE}Executing scripts in the Virtual Machine...${NC}" 86 | ssh $vm_connection -p $vm_ssh_port "cd /root/sc_testing_folder/$folder_name/bin && bash create_ddls.sh" 87 | 88 | 89 | echo "${MESSAGE}Retrieving the output folder and removing the sent files...${NC}" 90 | scp -r -OT -P $vm_ssh_port $vm_connection:"/root/sc_testing_folder/$folder_name/output /root/sc_testing_folder/$folder_name/log" ../extracted_code/$extracted_source_code_folder_name 91 | ssh -q $vm_connection -p $vm_ssh_port rm -r /root/sc_testing_folder/$folder_name 92 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/execute_scripts.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Constants 3 | STEP_MESSAGE='\033[0;34m' # Green 4 | ERROR='\033[0;31m' # Red 5 | NC='\033[0m' # No Color 6 | 7 | #####Parameters 8 | ##### 1 - Source code folder name 9 | ##### 2 - Extracted code folder name 10 | ##### 3 to n - Extraction parameters in the following format key="value" 11 | 12 | source_code_folder_name="$1" 13 | extracted_source_code_folder_name="$2" 14 | if [ ! "$source_code_folder_name" ] || [ ! -d "../source_code/$source_code_folder_name/" ] ; then 15 | echo "${ERROR}Invalid parameter '$source_code_folder_name', options are [$(ls ../source_code)]${NC}" 16 | exit 1 17 | fi 18 | if [ ! "$extracted_source_code_folder_name" ] ; then 19 | echo "${ERROR}Invalid parameter '$extracted_source_code_folder_name', this value is the output folder name.${NC}" 20 | exit 1 21 | fi 22 | 23 | for ARGUMENT in "${@:3}" 24 | do 25 | extraction_parameters="$extraction_parameters \"$ARGUMENT\"" 26 | done 27 | 28 | echo "${STEP_MESSAGE}Step 1/3 Deplying database...${NC}" 29 | source execute_deploy_database_script.sh $source_code_folder_name 30 | echo "${STEP_MESSAGE}Step 2/3 Extracting database...${NC}" 31 | eval "source execute_extract_database_script.sh $source_code_folder_name $extracted_source_code_folder_name $extraction_parameters" 32 | echo "${STEP_MESSAGE}Step 3/3 Removing database...${NC}" 33 | source execute_drop_database_script.sh $source_code_folder_name 34 | -------------------------------------------------------------------------------- /Tests/Teradata/scripts/ssh_automatic_login_configuration.sh: -------------------------------------------------------------------------------- 1 | # 2 | source config.sh 3 | ssh-keygen -t rsa -b 2048 4 | ssh-copy-id -p $vm_ssh_port $vm_connection -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_CreateMacro.sql: -------------------------------------------------------------------------------- 1 | CREATE MACRO Get_Emp AS ( 2 | SELECT EmployeeID,FirstName,LastName 3 | FROM Employee; 4 | ); 5 | 6 | 7 | CREATE MACRO Get_Emp_version2 AS ( 8 | SELECT EmployeeID,FirstName,LastName 9 | FROM Employee; 10 | ); -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_Databases.sql: -------------------------------------------------------------------------------- 1 | -- 2 | CREATE DATABASE SC_EXAMPLE_DEMO FROM DBC AS PERM = 100000000; 3 | CREATE DATABASE SC_EXAMPLE_DEMO_2 FROM DBC AS PERM = 100000000; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_JoinIndex.sql: -------------------------------------------------------------------------------- 1 | CREATE JOIN INDEX Employee_JI 2 | AS 3 | SELECT EmployeeID,FirstName,LastName 4 | FROM Employee 5 | PRIMARY INDEX(FirstName); 6 | 7 | 8 | 9 | CREATE JOIN INDEX Employee_JI2 10 | AS 11 | SELECT EmployeeID,FirstName,LastName 12 | FROM Employee 13 | PRIMARY INDEX(LastName); -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_Tables.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE Employee ( 2 | EmployeeID INT NOT NULL, 3 | FirstName VARCHAR(50), 4 | LastName VARCHAR(50), 5 | Department VARCHAR(50), 6 | Email VARCHAR(100), 7 | Salary number, 8 | PRIMARY KEY (EmployeeID) 9 | ); 10 | 11 | CREATE TABLE salary_log( 12 | type_user VARCHAR(50), 13 | id INT, 14 | old_salary number, 15 | new_salary number 16 | ); 17 | 18 | CREATE TABLE expandOnTable 19 | ( 20 | id INTEGER, 21 | pd PERIOD ( TIMESTAMP) 22 | ); 23 | 24 | CREATE TABLE SC_EXAMPLE_DEMO_2.project 25 | ( 26 | emp_id INTEGER, 27 | project_name VARCHAR(20), 28 | dept_id INTEGER, 29 | duration PERIOD( DATE) 30 | ); 31 | 32 | CREATE TABLE MessageStorage 33 | ( 34 | MessageID TIMESTAMP(0), 35 | Message1 VARCHAR(100), 36 | Message2 VARCHAR(100) 37 | ); 38 | 39 | CREATE TABLE account_balance 40 | ( 41 | account_id INTEGER NOT NULL, 42 | month_id INTEGER, 43 | balance INTEGER 44 | ) UNIQUE PRIMARY INDEX (account_id, month_id); 45 | 46 | 47 | CREATE TABLE ResTable 48 | ( 49 | Column1 VARCHAR(255) 50 | ); 51 | 52 | CREATE TABLE EMPLOYEE_JOB_PERIODS ( 53 | FIRST_NAME VARCHAR(100), 54 | LAST_NAME VARCHAR(100), 55 | JOB_DURATION PERIOD(DATE) 56 | ); 57 | 58 | CREATE TABLE vEmployee 59 | ( 60 | PersonID INT, 61 | LastName VARCHAR(255), 62 | FirstName VARCHAR(255) 63 | ); -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_Trigger.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TRIGGER RaiseTrig 3 | AFTER UPDATE OF salary ON employee 4 | REFERENCING OLD AS OldRow NEW AS NewRow 5 | FOR EACH ROW 6 | WHEN ((NewRow.salary - OldRow.salary) / OldRow.salary >.10) 7 | INSERT INTO salary_log 8 | VALUES ('USER', NewRow.EmployeeID, OldRow.salary, NewRow.salary); -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/DDL_Views.sql: -------------------------------------------------------------------------------- 1 | CREATE VIEW EMPLOYEE_JOB_DURATION_COMPARISONS 2 | AS 3 | LOCKING ROW FOR ACCESS 4 | SELECT 'OVERLAP' FUNC, FIRST_NAME, LAST_NAME 5 | FROM EMPLOYEE_JOB_PERIODS 6 | WHERE JOB_DURATION OVERLAPS PERIOD(DATE '2009-01-01', DATE '2010-09-24') 7 | UNION ALL 8 | SELECT 'LDIFF' FUNC, FIRST_NAME, LAST_NAME 9 | FROM EMPLOYEE_JOB_PERIODS 10 | WHERE INTERVAL(JOB_DURATION LDIFF PERIOD(DATE '2009-01-01', DATE '2010-09-24')) MONTH > 3 11 | UNION ALL 12 | SELECT 'RDIFF' FUNC, FIRST_NAME, LAST_NAME 13 | FROM EMPLOYEE_JOB_PERIODS 14 | WHERE JOB_DURATION RDIFF PERIOD(DATE '2009-01-01', DATE '2010-09-24') IS NOT NULL; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/INSERT_VEMPLOYEE.sql: -------------------------------------------------------------------------------- 1 | CREATE PROCEDURE SC_EXAMPLE_DEMO.InsertVEmployee() 2 | BEGIN 3 | INSERT INTO SC_EXAMPLE_DEMO.vEmployee VALUES(0,'AL','Montgomery'); 4 | INSERT INTO SC_EXAMPLE_DEMO.vEmployee VALUES(0,'AL','Montgomery'); 5 | END; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/UPDATE_VEMPLOYEE.sql: -------------------------------------------------------------------------------- 1 | CREATE PROCEDURE SC_EXAMPLE_DEMO.UpdateVEmployee() 2 | BEGIN 3 | INSERT INTO SC_EXAMPLE_DEMO.vEmployee VALUES(0,'AL','Montgomery'); 4 | INSERT INTO SC_EXAMPLE_DEMO.vEmployee VALUES(0,'AL','Montgomery'); 5 | END; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/my_yyyymmdd_to_date2.c: -------------------------------------------------------------------------------- 1 | /* 2 | my_yyyymmdd_to_date2.c 3 | Teradata User Defined Function (UDF) 4 | Calling 5 | ------- 6 | my_yyyymmdd_to_date2(date_str); 7 | SELECT my_yyyymmdd_to_date2('20130423') AS ValidDate; 8 | Parameters 9 | ---------- 10 | date_str 11 | Character string containing date to be validated 12 | UDF Compilation 13 | --------------- 14 | REPLACE FUNCTION my_yyyymmdd_to_date2 15 | ( 16 | InputDate VARCHAR(8) 17 | ) 18 | RETURNS DATE 19 | LANGUAGE C 20 | NO SQL 21 | DETERMINISTIC 22 | PARAMETER STYLE SQL 23 | EXTERNAL NAME 'CS!my_yyyymmdd_to_date2!./my_yyyymmdd_to_date2.c' 24 | ; 25 | */ 26 | /* Must define SQL_TEXT before including "sqltypes_td "*/ 27 | /* Must define SQL_TEXT before including "sqltypes_td "*/ 28 | #define SQL_TEXT Latin_Text 29 | #include "sqltypes_td.h" 30 | #include "stdio.h" 31 | #include "string.h" 32 | #define IsNull -1 33 | #define IsNotNull 0 34 | #define NoSqlError "00000" 35 | #define YYYYMMDD_LENGTH 8 36 | #define ERR_RC 99 37 | void my_yyyymmdd_to_date2 38 | ( 39 | VARCHAR_LATIN *InputDateString 40 | ,DATE *result 41 | ,int *inputDateStringIsNull 42 | ,int *resultIsNull 43 | ,char sqlstate[6] 44 | ,SQL_TEXT extname[129] 45 | ,SQL_TEXT specificname[129] 46 | ,SQL_TEXT error_message[257] 47 | ) 48 | { 49 | char input_integer[30]; 50 | int year_yyyy; 51 | int month_mm; 52 | int day_dd; 53 | char day_char[3]; 54 | char month_char[3]; 55 | char year_char[5]; 56 | int in_len,i; 57 | /* Return Nulls on Null Input */ 58 | if ((*inputDateStringIsNull == IsNull)) 59 | { 60 | strcpy(sqlstate, "22018") ; 61 | strcpy((char *) error_message, "Null value not allowed.") ; 62 | *resultIsNull = IsNull; 63 | return; 64 | } 65 | in_len = strlen(InputDateString); 66 | if ( in_len != YYYYMMDD_LENGTH ) 67 | { 68 | *result = ( 1 * 10000 ) + ( 12 * 100) + 1; 69 | *resultIsNull = IsNull; 70 | strcpy((char *) sqlstate, "01H01"); 71 | strcpy((char *) error_message, 72 | "InputDateString is of wrong length, must be in YYYYMMDD format"); 73 | return; 74 | } 75 | if ( in_len != YYYYMMDD_LENGTH ) 76 | { 77 | *result = ( 1 * 10000 ) + ( 12 * 100) + 2; 78 | return; 79 | } 80 | strcpy(input_integer , (char *) InputDateString); 81 | for (i = 0; i '9') 84 | { 85 | *result = ( 1 * 10000 ) + ( 1 * 100) + 3; 86 | return; 87 | } 88 | else 89 | { 90 | input_integer[i] = tolower(input_integer[i]); 91 | } 92 | } 93 | sprintf(year_char,"%c%c%c%c",input_integer[0],input_integer[1],input_integer[2], 94 | input_integer[3]); 95 | sprintf(month_char,"%c%c",input_integer[4],input_integer[5]); 96 | sprintf(day_char,"%c%c",input_integer[6],input_integer[7]); 97 | year_yyyy = atoi(year_char); 98 | month_mm = atoi(month_char); 99 | day_dd = atoi(day_char); 100 | /* Format output_date in internal Teradata format ((YEAR - 1900) * 10000 ) + 101 | (MONTH * 100) + DAY */ 102 | *result = (( year_yyyy - 1900 ) * 10000 ) + ( month_mm * 100) + day_dd; 103 | } -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/database_code/my_yyyymmdd_to_date2.sql: -------------------------------------------------------------------------------- 1 | CREATE FUNCTION SC_EXAMPLE_DEMO.my_yyyymmdd_to_date2 2 | ( 3 | InputDate VARCHAR(8) CHARACTER SET LATIN 4 | ) 5 | RETURNS DATE 6 | LANGUAGE C 7 | SPECIFIC my_yyyymmdd_to_date2 8 | NO SQL 9 | DETERMINISTIC 10 | PARAMETER STYLE SQL 11 | CALLED ON NULL INPUT 12 | EXTERNAL NAME 'CS!my_yyyymmdd_to_date2!./database_code/my_yyyymmdd_to_date2.c' 13 | ; -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/deploy_database.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Constants 3 | ERROR='\033[0;31m' # Red 4 | NC='\033[0m' # No Color 5 | 6 | #####Parameters 7 | logon_command="$1" 8 | if [ ! "$logon_command" ];then 9 | echo "${ERROR}Logon command not provided${NC}" 10 | exit 1 11 | fi 12 | 13 | bteq << EOF 14 | .logon $logon_command; 15 | .RUN FILE ./database_code/DDL_Databases.sql 16 | DATABASE SC_EXAMPLE_DEMO; 17 | GRANT ALL PRIVILEGES ON SC_EXAMPLE_DEMO 18 | TO DBC 19 | WITH GRANT OPTION; 20 | DATABASE SC_EXAMPLE_DEMO_2; 21 | GRANT ALL PRIVILEGES ON SC_EXAMPLE_DEMO_2 22 | TO DBC 23 | WITH GRANT OPTION; 24 | DATABASE SC_EXAMPLE_DEMO; 25 | 26 | .RUN FILE ./database_code/DDL_SF_Schemas.sql 27 | .RUN FILE ./database_code/DDL_Tables.sql 28 | .RUN FILE ./database_code/DDL_AlterTables.sql 29 | .RUN FILE ./database_code/DDL_Trigger.sql 30 | .RUN FILE ./database_code/DDL_Views.sql 31 | .RUN FILE ./database_code/DDL_CreateMacro.sql 32 | .RUN FILE ./database_code/DDL_JoinIndex.sql 33 | .COMPILE FILE = ./database_code/UPDATE_VEMPLOYEE.sql 34 | .COMPILE FILE = ./database_code/INSERT_VEMPLOYEE.sql 35 | .RUN FILE ./database_code/my_yyyymmdd_to_date2.sql 36 | EOF 37 | -------------------------------------------------------------------------------- /Tests/Teradata/source_code/demo_database/drop_database.sh: -------------------------------------------------------------------------------- 1 | # 2 | #####Constants 3 | ERROR='\033[0;31m' # Red 4 | NC='\033[0m' # No Color 5 | 6 | 7 | #####Parameters 8 | logon_command="$1" 9 | if [ ! "$logon_command" ];then 10 | echo "${ERROR}Logon command not provided${NC}" 11 | exit 1 12 | fi 13 | 14 | bteq << EOF 15 | .logon $logon_command; 16 | -- 500 MB, final size should be < 300 MB 17 | 18 | DROP JOIN INDEX SC_EXAMPLE_DEMO_2.Employee_JI; 19 | DROP JOIN INDEX SC_EXAMPLE_DEMO_2.Employee_JI2; 20 | 21 | DELETE DATABASE SC_EXAMPLE_DEMO_2 ALL; 22 | MODIFY DATABASE SC_EXAMPLE_DEMO_2 AS DROP DEFAULT JOURNAL TABLE; 23 | 24 | DROP JOIN INDEX SC_EXAMPLE_DEMO.Employee_JI; 25 | DROP JOIN INDEX SC_EXAMPLE_DEMO.Employee_JI2; 26 | 27 | DELETE DATABASE SC_EXAMPLE_DEMO ALL; 28 | MODIFY DATABASE SC_EXAMPLE_DEMO AS DROP DEFAULT JOURNAL TABLE; 29 | 30 | 31 | DROP DATABASE SC_EXAMPLE_DEMO; 32 | 33 | DROP DATABASE SC_EXAMPLE_DEMO_2; 34 | EOF 35 | -------------------------------------------------------------------------------- /Tests/Teradata/teradata_extraction_test_base.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import subprocess 3 | from database_summary.database_source_code_summarizer import sumarize_database_source_code 4 | from database_summary.database_source_code_summary import DatabaseSourceCodeSummary 5 | from database_summary.database_source_code_summary import TopLevelObjectType 6 | 7 | class TeradataExtractionTestBase(unittest.TestCase): 8 | source_database_path = "./source_code/" 9 | extracted_database_path = "./extracted_code/" 10 | 11 | def __init__(self, *args, **kwargs): 12 | super(TeradataExtractionTestBase, self).__init__(*args, **kwargs) 13 | source_database_summary : DatabaseSourceCodeSummary = None 14 | extracted_database_summary : DatabaseSourceCodeSummary= None 15 | error_messages : "list[str]" = [] 16 | 17 | def set_up_class(self, database_source_folder_name: str, database_output_folder_name: str, extraction_parameters: "list[str]"): 18 | self.run_extraction_scripts("demo_database", "test_demo_database", extraction_parameters) 19 | self.source_database_summary = self.sumarize_source_code(database_source_folder_name) 20 | self.extracted_database_summary = self.sumarize_extracted_code(database_output_folder_name) 21 | self.error_messages = [] 22 | 23 | def sumarize_source_code(database_folder_name: str) -> DatabaseSourceCodeSummary: 24 | result = sumarize_database_source_code("./source_code/"+database_folder_name) 25 | return result 26 | 27 | def validate_top_level_objects_quantity(self, type: TopLevelObjectType, expected_amount: int) -> DatabaseSourceCodeSummary: 28 | actual_amount = self.source_database_summary.get_top_level_object_to_int_map()[type] 29 | try: 30 | self.assertEqual(actual_amount, expected_amount) 31 | except AssertionError: 32 | self.error_messages += [f"Expected {expected_amount} {type.name.lower() + ('s' if expected_amount > 1 else '')} in source code, but {actual_amount} found"] 33 | 34 | actual_amount = self.extracted_database_summary.get_top_level_object_to_int_map()[type] 35 | try: 36 | self.assertEqual(actual_amount, expected_amount) 37 | except AssertionError: 38 | self.error_messages += [f"Expected {expected_amount} {type.name.lower() + ('s' if expected_amount > 1 else '')} in extracted code, but {actual_amount} found"] 39 | 40 | def validate_extracted_files_quantity(self, actual_amount, expected_amount: int) -> DatabaseSourceCodeSummary: 41 | try: 42 | self.assertEqual(actual_amount, expected_amount) 43 | except AssertionError as e: 44 | error_messages += [f"Expected {expected_amount} file{'s' if expected_amount > 1 else ''} in extracted files, but {actual_amount} found"] 45 | 46 | def assert_no_errors_messages(self): 47 | if len(self.error_messages) > 0: 48 | error_message = '\n'.join(self.error_messages) 49 | raise AssertionError(error_message) 50 | 51 | 52 | def sumarize_extracted_code(database_folder_name: str) -> DatabaseSourceCodeSummary: 53 | return sumarize_database_source_code("./extracted_code/"+database_folder_name) 54 | 55 | def run_extraction_scripts(database_folder_name: str, extraction_output_folder_name: str, extraction_parameters: "list[str]") -> None: 56 | subprocess.call(['sh', './execute_scripts.sh', database_folder_name, extraction_output_folder_name] + extraction_parameters, cwd='./scripts') 57 | 58 | def remove_extraction_results(database_folder_name: str) -> None: 59 | subprocess.call(['rm', '-r', f'extracted_code/{database_folder_name}' ]) 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /Tests/Teradata/test_demo_database.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from teradata_extraction_test_base import TeradataExtractionTestBase 3 | from database_summary.top_level_object_type import TopLevelObjectType 4 | 5 | class TestDemoDatabase(TeradataExtractionTestBase): 6 | 7 | @classmethod 8 | def setUpClass(cls): 9 | extraction_parameters = ["include_databases=(UPPER(T1.DATABASENAME) IN ('SC_EXAMPLE_DEMO', 'SC_EXAMPLE_DEMO_2') )", 10 | "exclude_databases=(UPPER(T1.DATABASENAME) NOT IN ('SYS_CALENDAR','ALL','CONSOLE','CRASHDUMPS','DBC','DBCMANAGER','DBCMNGR','DEFAULT','EXTERNAL_AP','EXTUSER','LOCKLOGSHREDDER','PDCRADM','PDCRDATA','PDCRINFO','PUBLIC','SQLJ','SYSADMIN','SYSBAR','SYSJDBC','SYSLIB','SYSSPATIAL','SYSTEMFE','SYSUDTLIB','SYSUIF','TD_SERVER_DB','TD_SYSFNLIB','TD_SYSFNLIB','TD_SYSGPL','TD_SYSXML','TDMAPS', 'TDPUSER','TDQCD','TDSTATS','TDWM','VIEWPOINT','PDCRSTG'))"] 11 | cls.set_up_class(cls, "demo_database", "test_demo_database", extraction_parameters) 12 | 13 | def test_database_files(self): 14 | self.validate_extracted_files_quantity(self.extracted_database_summary.get_count_of_files(), 9) 15 | self.validate_top_level_objects_quantity(TopLevelObjectType.TABLE, 9) 16 | self.validate_top_level_objects_quantity(TopLevelObjectType.DATABASE, 2) 17 | self.validate_top_level_objects_quantity(TopLevelObjectType.PROCEDURE, 2) 18 | self.validate_top_level_objects_quantity(TopLevelObjectType.JOIN_INDEX, 2) 19 | self.validate_top_level_objects_quantity(TopLevelObjectType.MACRO, 2) 20 | self.validate_top_level_objects_quantity(TopLevelObjectType.FUNCTION, 1) 21 | self.validate_top_level_objects_quantity(TopLevelObjectType.TRIGGER, 1) 22 | self.validate_top_level_objects_quantity(TopLevelObjectType.VIEW, 1) 23 | 24 | self.assert_no_errors_messages() 25 | 26 | 27 | if __name__ == '__main__': 28 | unittest.main() -------------------------------------------------------------------------------- /VERSION: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Snowflake Inc. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | __version__ = "0.0.96" -------------------------------------------------------------------------------- /VERSION-UPDATE.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | find_repo_root() { 6 | REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 7 | 8 | if [ ! -f "$REPO_ROOT/VERSION" ]; then 9 | echo "ERROR: VERSION file not found. This script must be executed from the repository root." 10 | exit 1 11 | fi 12 | } 13 | 14 | extract_version() { 15 | VERSION=$(grep '__version__' "$REPO_ROOT/VERSION" | sed 's/.*"\(.*\)".*/\1/') 16 | 17 | if [ -z "$VERSION" ]; then 18 | echo "ERROR: Could not extract version from VERSION file." 19 | exit 1 20 | fi 21 | 22 | echo "Extracted version: $VERSION" 23 | } 24 | 25 | verify_normalize_script() { 26 | NORMALIZE_SCRIPT="$REPO_ROOT/.github/scripts/common-normalize-version.sh" 27 | 28 | if [ ! -f "$NORMALIZE_SCRIPT" ]; then 29 | echo "ERROR: common-normalize-version.sh script not found at $NORMALIZE_SCRIPT." 30 | exit 1 31 | fi 32 | } 33 | 34 | execute_normalize_script() { 35 | echo "Executing common-normalize-version.sh..." 36 | "$NORMALIZE_SCRIPT" 37 | } 38 | 39 | update_powershell_scripts() { 40 | echo "Updating version in PowerShell scripts..." 41 | 42 | POWERSHELL_FILES=$(find "$REPO_ROOT" -name "*.ps1" -type f) 43 | 44 | for PS_FILE in $POWERSHELL_FILES; do 45 | echo "Checking $PS_FILE..." 46 | 47 | if grep -q "\$version = ['\\\"]\(v\)\?[0-9][0-9.]*['\\\"]\|^\$version = ['\\\"].*['\\\"]" "$PS_FILE"; then 48 | echo " Found hardcoded version in $PS_FILE, updating..." 49 | 50 | cp "$PS_FILE" "$PS_FILE.bak" 51 | 52 | sed -i '' "s/\$version = ['\\\"]\(v\)\?[0-9][0-9.]*['\\\"]/\$version = '${VERSION}'/" "$PS_FILE" 53 | 54 | if diff -q "$PS_FILE" "$PS_FILE.bak" >/dev/null; then 55 | echo " Warning: Version not updated in $PS_FILE. Trying alternative method..." 56 | perl -i -pe "s/\\\$version = ['\\\"](?:v)?[0-9][0-9.]*['\\\"]/\\\$version = '${VERSION}'/" "$PS_FILE" 57 | fi 58 | 59 | rm -f "$PS_FILE.bak" 60 | 61 | if grep -q "\$version = ['\\\"]\(v\)\?${VERSION}['\\\"]\|^\$version = ['\\\"]\(v\)\?${VERSION}['\\\"]\$" "$PS_FILE"; then 62 | echo " Successfully updated $PS_FILE to version $VERSION" 63 | else 64 | echo " Failed to update version in $PS_FILE" 65 | fi 66 | fi 67 | done 68 | } 69 | 70 | update_bash_scripts() { 71 | echo "Updating version in Bash scripts..." 72 | 73 | BASH_FILES=$(find "$REPO_ROOT" -name "*.sh" -type f) 74 | 75 | for BASH_FILE in $BASH_FILES; do 76 | echo "Checking $BASH_FILE..." 77 | 78 | if [[ "$BASH_FILE" == *"VERSION-UPDATE.sh" ]]; then 79 | echo " Skipping $BASH_FILE (this script)" 80 | continue 81 | fi 82 | 83 | if grep -q "VERSION=[\"'][^\"']*[\"']" "$BASH_FILE"; then 84 | echo " Found hardcoded version in $BASH_FILE, updating..." 85 | 86 | cp "$BASH_FILE" "$BASH_FILE.bak" 87 | 88 | sed -i '' "s/VERSION=[\"'][^\"']*[\"']/VERSION=\"${VERSION}\"/" "$BASH_FILE" 89 | 90 | if diff -q "$BASH_FILE" "$BASH_FILE.bak" >/dev/null; then 91 | echo " Warning: Version not updated in $BASH_FILE. Trying alternative method..." 92 | perl -i -pe "s/VERSION=[\"'][^\"']*[\"']/VERSION=\"${VERSION}\"/" "$BASH_FILE" 93 | fi 94 | 95 | rm -f "$BASH_FILE.bak" 96 | 97 | if grep -q "VERSION=[\"']${VERSION}[\"']" "$BASH_FILE"; then 98 | echo " Successfully updated $BASH_FILE to version $VERSION" 99 | else 100 | echo " Failed to update version in $BASH_FILE" 101 | fi 102 | fi 103 | done 104 | } 105 | 106 | main() { 107 | find_repo_root 108 | extract_version 109 | verify_normalize_script 110 | execute_normalize_script 111 | update_powershell_scripts 112 | update_bash_scripts 113 | } 114 | 115 | main 116 | exit $? 117 | -------------------------------------------------------------------------------- /Vertica/DocumentationImages/BinNewTerminal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/BinNewTerminal.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/ConnectToServer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/ConnectToServer.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/ContainerRunning.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/ContainerRunning.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/CreateTables.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/CreateTables.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/CreateViews.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/CreateViews.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/DockerExtensions.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/DockerExtensions.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/DockerRunning.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/DockerRunning.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/ExampleScripts.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/ExampleScripts.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/FoldeStructure.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/FoldeStructure.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/Folder.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/Folder.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/Launchjson.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/Launchjson.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PipInstall_sqlparse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/PipInstall_sqlparse.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PipInstallsqlparse.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/PipInstallsqlparse.PNG -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PythonDDLRunSucessfully.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/PythonDDLRunSucessfully.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PythonScripts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/PythonScripts.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/PythonVersion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/PythonVersion.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/RunDockerVertica.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/RunDockerVertica.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/RunPythonCode.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/RunPythonCode.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/RunPythonCode02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/RunPythonCode02.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/TempFileCreated.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/TempFileCreated.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/TempFolder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/TempFolder.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/VerticaClientDriversLinux.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/VerticaClientDriversLinux.png -------------------------------------------------------------------------------- /Vertica/DocumentationImages/VerticaTarFile.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/Vertica/DocumentationImages/VerticaTarFile.png -------------------------------------------------------------------------------- /Vertica/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /Vertica/Scripts/SFConfig.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | 4 | # Class file for SnowFlake configuration 5 | #This class has all the necessary variables to be able to connect to the Snowflake database. 6 | #Snowflake Account 7 | #User 8 | #Password 9 | #Snowflake warehouse 10 | #Schema 11 | #Role 12 | #SSL 13 | 14 | 15 | 16 | class SFConfig: 17 | 18 | def __init__(self, sfAccount = "", sfUser = "", sfPassword = "", sfWarehouse = "", sfRole= ""): 19 | self.sfAccount = sfAccount 20 | self.sfUser = sfUser 21 | self.sfPassword = sfPassword 22 | self.sfWarehouse = sfWarehouse 23 | self.sfRole = sfRole 24 | self.schemaMapping = ["",""] 25 | self.execution = ["",""] 26 | self.inviewMappings = ["",""] 27 | 28 | def readConfig(self, configFile): 29 | isSnowflakeFile = False 30 | mappingSpecified = False 31 | executionSpecified = False 32 | inviewMappingSpecified = False 33 | 34 | with open(configFile) as f: 35 | for line in f: 36 | line = line.strip() 37 | # Ignore comments 38 | if line[0] == "#": 39 | continue 40 | 41 | if line.upper() == "[SNOWFLAKE]": 42 | isSnowflakeFile = True 43 | elif line.upper() == "[SCHEMA_MAPPINGS]": 44 | mappingSpecified = True 45 | inviewMappingSpecified = False 46 | executionSpecified = False 47 | self.schemaMapping.clear() 48 | elif line.upper() == "[INVIEW_MAPPINGS]": 49 | inviewMappingSpecified = True 50 | mappingSpecified = False 51 | executionSpecified = False 52 | self.inviewMappings.clear() 53 | elif line.upper() == "[EXECUTION]": 54 | executionSpecified = True 55 | mappingSpecified = False 56 | inviewMappingSpecified = False 57 | self.execution.clear() 58 | 59 | else: 60 | lineItems = line.split("=") 61 | if len(lineItems) != 2: 62 | print("Invalid Config Line: " + line) 63 | else: 64 | if mappingSpecified is True: 65 | self.schemaMapping.append([lineItems[0], lineItems[1]]) 66 | continue 67 | 68 | if executionSpecified is True: 69 | self.execution.append([lineItems[0], lineItems[1]]) 70 | continue 71 | 72 | if inviewMappingSpecified is True: 73 | self.inviewMappings.append([lineItems[0], lineItems[1]]) 74 | continue 75 | 76 | if lineItems[0].upper() == "ACCOUNT": 77 | self.sfAccount = lineItems[1] 78 | elif lineItems[0].upper() == "USER": 79 | self.sfUser = lineItems[1] 80 | elif lineItems[0].upper() == "PASSWORD": 81 | self.sfPassword = lineItems[1] 82 | elif lineItems[0].upper() == "ROLE": 83 | self.sfRole = lineItems[1] 84 | elif lineItems[0].upper() == "ROLE": 85 | self.sfRole = lineItems[1] 86 | elif lineItems[0].upper() == "PROCESSVIEWS": 87 | if lineItems[1].upper == "TRUE": 88 | self.processViews = True; 89 | else: 90 | self.processViews = False; 91 | 92 | def validate(self): 93 | # TODO - Add validation 94 | 95 | return True -------------------------------------------------------------------------------- /Vertica/Scripts/SFConvert.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | import vertica_python 4 | import snowflake.connector 5 | import logging 6 | 7 | class SFConvert: 8 | 9 | def __init__(self, logger): 10 | self.logger = logger 11 | self.binary = ["VARBINARY", "LONG VARBINARY", "BYTEA", "RAW", "BINARY"] 12 | self.char = ["CHAR", "LONG VARCHAR", "VARCHAR"] 13 | self.timestamp = ["DATETIME", "SMALLDATETIME"] 14 | self.interval = ["INTERVAL", "INTERVAL DAY TO SECOND", "INTERVAL YEAR TO MONTH"] 15 | self.numberic = ["INT8", "TINYINT"] 16 | 17 | def vertTableToSFTable(self, sfConfig, tableRow, tableColumns): 18 | 19 | # Defaults 20 | ddlDrop = "FALSE" 21 | ddlExecute = "FALSE" 22 | 23 | # Find the vertica table schema in the schema mapping to determine the location (db, schema) to create the table 24 | mappingFound = False 25 | for mapping in sfConfig.schemaMapping: 26 | if mapping[0].upper() == tableRow['table_schema'].upper(): 27 | mappingFound = True 28 | snowflakeDbSchema = mapping[1] 29 | 30 | # Abort if mapping is not found 31 | if mappingFound is False: 32 | self.logger.error("Unable to find mapping for Vertica schema: " + tableRow['table_schema']) 33 | sys.exit(99) 34 | 35 | # Obtain execution model and drop existing 36 | for exec in sfConfig.execution: 37 | if exec[0].upper() == "DDLEXECUTE": 38 | ddlExecute = exec[1] 39 | elif exec[0].upper == "DROPSAVE": 40 | ddlSave = exec[1] 41 | # Check the folder exists 42 | if not os.path.isdir(ddlSave): 43 | self.logger.error("Save DDL path (" + ddlSave + ") does not exists on your workstation") 44 | sys.exit(99) 45 | elif exec[0].upper() == "DROPEXISTING": 46 | ddlDrop = exec[1] 47 | 48 | # Construct the table DDL 49 | if ddlDrop.upper() == "TRUE": 50 | sfTable = "Create or Replace Table " 51 | else: 52 | sfTable = "Create Table " 53 | sfTable += snowflakeDbSchema + "." + tableRow['table_name'] + "\n" 54 | sfTable += "(\n" 55 | 56 | boolFirstCol = True 57 | for colIdx, col in tableColumns.iterrows(): 58 | if boolFirstCol is True: 59 | sfTable += " " + col['column_name'] 60 | boolFirstCol = False 61 | else: 62 | sfTable += "," + col['column_name'] 63 | 64 | if col['data_type'].find("(") >-1: 65 | rawDataType = col['data_type'][0:col['data_type'].find("(")] 66 | typeLen = col['data_type'][col['data_type'].find("(") + 1:col['data_type'].find(")")] 67 | else: 68 | rawDataType = col['data_type'] 69 | typeLen = "1" 70 | 71 | 72 | # Check datatype 73 | if rawDataType.upper() in self.binary: 74 | sfTable += " BINARY " + "(" + typeLen + ")" 75 | elif rawDataType.upper() in self.char: 76 | sfTable += " VARCHAR " + "(" + typeLen + ")" 77 | elif rawDataType.upper() in self.timestamp: 78 | sfTable += " TIMESTAMP " 79 | elif rawDataType.upper() == "TIME WITH TIMEZONE": 80 | sfTable += " TIME " 81 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " TIME WITH TIMEZONE migrated to TIME") 82 | elif rawDataType.upper() == "TIMESTAMP": 83 | sfTable += " TIMESTAMP_NTZ " 84 | elif rawDataType.upper() == "TIMESTAMP WITH TIMEZONE": 85 | sfTable += " TIMESTAMP_TZ " 86 | elif rawDataType.upper() in self.interval: 87 | sfTable += " INT " 88 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " INTERVAL migrated to INT") 89 | elif rawDataType.upper() in self.numberic: 90 | sfTable += " NUMBER " 91 | elif rawDataType.upper() == "MONEY": 92 | sfTable += " NUMBER (18,4) " 93 | elif rawDataType.upper() == "GEOMETRY": 94 | sfTable += " BINARY " 95 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " GEOMETRY migrated to BINARY ") 96 | elif rawDataType.upper() == "GEOGRAPHY": 97 | sfTable += " BINARY " 98 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " GEOGRAPHY migrated to BINARY ") 99 | elif rawDataType.upper() == "UUID": 100 | sfTable += " INTEGER " 101 | self.logger.warn("Table: " + tableRow['table_schema'] + "." + tableRow['table_name'] + " Requires Identity Column ") 102 | else: 103 | sfTable += " " + col['data_type'] 104 | 105 | # Add not null if needed 106 | if col['is_nullable'].upper() == "FALSE": 107 | sfTable += " NOT NULL" 108 | 109 | sfTable += "\n" 110 | 111 | sfTable += ")\n" 112 | 113 | 114 | 115 | return sfTable 116 | 117 | 118 | def buildView(self, sfConfig, tableSchema, viewName, viewDefinition): 119 | 120 | # Find the vertica table schema in the schema mapping to determine the location (db, schema) to create the table 121 | mappingFound = False 122 | for mapping in sfConfig.schemaMapping: 123 | if mapping[0].upper() == tableSchema.upper(): 124 | mappingFound = True 125 | snowflakeDbSchema = mapping[1] 126 | 127 | sfSQL = "Create or Replace View " + snowflakeDbSchema + "." + viewName + " AS " + viewDefinition 128 | 129 | # This view will reference tables that exist in Vertica. 130 | # The mappings can be used to modify these references 131 | for mapping in sfConfig.inviewMappings: 132 | sfSQL = sfSQL.replace(mapping[0] + ".", mapping[1] + ".") 133 | 134 | return sfSQL 135 | 136 | def executeSQL(self, dbConn, ddlString): 137 | 138 | try: 139 | self.logger.info("execute SQL Start") 140 | sfCursor = dbConn.cursor().execute(ddlString) 141 | sfCursor.close() 142 | self.logger.info("Success! ") 143 | 144 | except snowflake.connector.errors.ProgrammingError as sfExp: 145 | errorString = format("Error No: " + str(sfExp.errno) + "\n" + str(sfExp.sqlstate) + "\n" + str(sfExp.msg)) 146 | self.logger.error(errorString) 147 | -------------------------------------------------------------------------------- /Vertica/Scripts/SQL_Convert/sqls/vmart_query_01.sql: -------------------------------------------------------------------------------- 1 | SELECT fat_content 2 | FROM ( 3 | SELECT DISTINCT fat_content 4 | FROM product_dimension 5 | WHERE department_description 6 | IN ('Dairy') ) AS food 7 | ORDER BY fat_content 8 | LIMIT 5; -------------------------------------------------------------------------------- /Vertica/Scripts/VerticaConfig.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | 4 | #Class file for Vertica configuration 5 | #This class has all the necessary variables to be able to connect to the vertica database. 6 | #HOST 7 | #PORT 8 | #USER 9 | #PASSWORD 10 | #DATABSE 11 | #SHEMA 12 | #SSL 13 | 14 | class VerticaConfig: 15 | 16 | def __init__(self, host = "", port = "", user = "", password = "", database = "", ssl = False, schema = ""): 17 | self.host = host 18 | self.port = port 19 | self.user = user 20 | self.password = password 21 | self.database = database 22 | self.ssl = ssl 23 | self.schema = schema 24 | 25 | def readConfig(self, configFile): 26 | isVerticaFile = False 27 | 28 | with open(configFile) as f: 29 | for line in f: 30 | line = line.strip() 31 | if line.upper() == "[VERTICA]": 32 | isVerticaFile = True 33 | else: 34 | lineItems = line.split("=") 35 | if len(lineItems) != 2: 36 | print("Invalid Config Line: " + line) 37 | break 38 | 39 | if lineItems[0].upper() == "HOST": 40 | self.host = lineItems[1] 41 | elif lineItems[0].upper() == "PORT": 42 | self.port = lineItems[1] 43 | elif lineItems[0].upper() == "USER": 44 | self.user = lineItems[1] 45 | elif lineItems[0].upper() == "PASSWORD": 46 | self.password = lineItems[1] 47 | elif lineItems[0].upper() == "DATABASE": 48 | self.database = lineItems[1] 49 | elif lineItems[0].upper() == "SCHEMA": 50 | self.schema = lineItems[1] 51 | elif lineItems[0].upper() == "SSL": 52 | if lineItems[1].upper() == "False": 53 | self.ssl = False 54 | else: 55 | self.ssl = True 56 | 57 | 58 | 59 | def validate(self): 60 | #TODO - Add validation 61 | 62 | return True 63 | 64 | 65 | 66 | -------------------------------------------------------------------------------- /Vertica/Scripts/VerticaDBCalls.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | import vertica_python 4 | import pandas as pd 5 | # Class file to get the tables and views from Vertica 6 | #This class execute the queries to get the views and tables from vertica 7 | #According to the number of results of views and tables the program will convert the Vertica objects to Snowflake 8 | 9 | 10 | class VerticaDBCalls: 11 | 12 | def __init__(self, dbConnection): 13 | self.dbConnection = dbConnection 14 | 15 | def getTablesInSchema(self, schema): 16 | sqlString = "select table_schema" \ 17 | ",table_name " + \ 18 | ",case " + \ 19 | " when is_temp_table is TRUE then 'TRUE' " + \ 20 | " else 'FALSE' " + \ 21 | "end as is_temp_table " + \ 22 | ",owner_name " + \ 23 | "from v_catalog.tables " + \ 24 | "where upper(table_schema) = '" + schema.upper() + "'" #+ \ 25 | #" and upper(TABLE_NAME) = 'MYTABLE1'" 26 | 27 | 28 | sqlQuery = pd.read_sql_query(sqlString, self.dbConnection) 29 | df = pd.DataFrame(sqlQuery, columns=['table_schema', 'table_name','is_temp_table','owner_name']) 30 | 31 | return df 32 | 33 | def getColumnsInTable(self, schema, table): 34 | sqlString = "select table_name " + \ 35 | ",column_name " + \ 36 | ",data_type " + \ 37 | ",data_type_length " + \ 38 | ",character_maximum_length " + \ 39 | ",numeric_precision " + \ 40 | ",numeric_scale " + \ 41 | ",datetime_precision " + \ 42 | ",interval_precision " + \ 43 | ",ordinal_position " + \ 44 | ",case " + \ 45 | " When is_nullable is TRUE Then 'TRUE' " + \ 46 | " Else 'FALSE' " + \ 47 | "end as is_nullable " + \ 48 | ",column_default " + \ 49 | ",column_set_using " + \ 50 | ",case " + \ 51 | " When is_identity IS TRUE THEN'TRUE' " + \ 52 | " Else 'FALSE' " + \ 53 | "end as is_identity " + \ 54 | "from v_catalog.columns " + \ 55 | "where upper(table_schema) = '" + schema.upper() + "' " + \ 56 | "and upper(table_name) = '" + table.upper() + "' " + \ 57 | "order by ordinal_position " 58 | sqlQuery = pd.read_sql_query(sqlString, self.dbConnection) 59 | df = pd.DataFrame(sqlQuery, columns=['table_name', 'column_name', 'data_type', 'data_type_length', 60 | 'character_maximum_length','numeric_precision','numeric_scale', 61 | 'datetime_precision','interval_precision','ordinal_position', 62 | 'is_nullable','column_default','is_identity']) 63 | 64 | 65 | 66 | return df 67 | 68 | def getViewsInSchema(self, schema): 69 | sqlString = "select table_schema" \ 70 | ",table_name " + \ 71 | ", view_definition " + \ 72 | ",owner_name " + \ 73 | "from v_catalog.views " + \ 74 | "where upper(table_schema) = '" + schema.upper() + "'" #+ \ 75 | 76 | 77 | sqlQuery = pd.read_sql_query(sqlString, self.dbConnection) 78 | df = pd.DataFrame(sqlQuery, columns=['table_schema', 'table_name','view_definition','owner_name']) 79 | 80 | return df 81 | -------------------------------------------------------------------------------- /Vertica/TEMP/VerticaDDL/STORE_MYTABLE_1.sql: -------------------------------------------------------------------------------- 1 | Create or Replace Table store.MYTABLE_1 2 | ( 3 | MYINT int NOT NULL 4 | ,MYINTEGER int 5 | ,MYBIGINT int 6 | ) 7 | -------------------------------------------------------------------------------- /Vertica/TEMP/VerticaDDL/STORE_MYTABLE_2.sql: -------------------------------------------------------------------------------- 1 | Create or Replace Table store.MYTABLE_2 2 | ( 3 | MYINT int NOT NULL 4 | ,MYINTEGER int 5 | ,MYBIGINT int 6 | ) 7 | -------------------------------------------------------------------------------- /Vertica/TEMP/VerticaDDL/STORE_MYVIEW1.sql: -------------------------------------------------------------------------------- 1 | Create or Replace View store.MYVIEW1 AS 2 | SELECT MYTABLE_1.MYINT, 3 | MYTABLE_1.MYINTEGER, 4 | MYTABLE_1.MYBIGINT 5 | FROM STORE.MYTABLE_1 -------------------------------------------------------------------------------- /Vertica/TEMP/VerticaDDL/STORE_MYVIEW2.sql: -------------------------------------------------------------------------------- 1 | Create or Replace View store.MYVIEW2 AS 2 | SELECT MYTABLE_2.MYINT, 3 | MYTABLE_2.MYINTEGER, 4 | MYTABLE_2.MYBIGINT 5 | FROM STORE.MYTABLE_2 -------------------------------------------------------------------------------- /Vertica/install-vertica.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [[ "$OSTYPE" != "linux-gnu"* ]] && [[ "$OSTYPE" != "darwin"* ]]; then 4 | echo "Error: This script only works on Linux or macOS systems." 5 | exit 1 6 | fi 7 | 8 | echo "Checking prerequisites..." 9 | 10 | install_package() { 11 | package_name=$1 12 | echo "Attempting to install $package_name..." 13 | 14 | if [[ "$OSTYPE" == "linux-gnu"* ]]; then 15 | if command -v apt-get &> /dev/null; then 16 | sudo apt-get update && sudo apt-get install -y $package_name 17 | elif command -v yum &> /dev/null; then 18 | sudo yum install -y $package_name 19 | else 20 | echo "Warning: Could not determine package manager. Please install $package_name manually." 21 | return 1 22 | fi 23 | elif [[ "$OSTYPE" == "darwin"* ]]; then 24 | if command -v brew &> /dev/null; then 25 | brew install $package_name 26 | else 27 | echo "Warning: Homebrew not found. Installing Homebrew..." 28 | /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" 29 | if [ $? -eq 0 ]; then 30 | echo "Homebrew installed successfully. Installing $package_name..." 31 | brew install $package_name 32 | else 33 | echo "Warning: Failed to install Homebrew. Please install $package_name manually." 34 | return 1 35 | fi 36 | fi 37 | else 38 | return 1 39 | fi 40 | 41 | return 0 42 | } 43 | 44 | if ! command -v curl &> /dev/null; then 45 | echo "$package_name is not installed." 46 | install_package curl 47 | if ! command -v curl &> /dev/null; then 48 | echo "Error: Failed to install curl. Please install it manually and try again." 49 | exit 1 50 | fi 51 | else 52 | echo "curl is already installed." 53 | fi 54 | 55 | if ! command -v tar &> /dev/null; then 56 | echo "tar is not installed." 57 | install_package tar 58 | if ! command -v tar &> /dev/null; then 59 | echo "Error: Failed to install tar. Please install it manually and try again." 60 | exit 1 61 | fi 62 | else 63 | echo "tar is already installed." 64 | fi 65 | 66 | if ! command -v gzip &> /dev/null; then 67 | echo "gzip is not installed." 68 | install_package gzip 69 | if ! command -v gzip &> /dev/null; then 70 | echo "Error: Failed to install gzip. Please install it manually and try again." 71 | exit 1 72 | fi 73 | else 74 | echo "gzip is already installed." 75 | fi 76 | 77 | echo "All prerequisites are installed or have been installed." 78 | echo "Starting Vertica client driver installation..." 79 | 80 | echo "Downloading Vertica client driver..." 81 | curl https://www.vertica.com/client_drivers/9.1.x/9.1.1-0/vertica-client-9.1.1-0.x86_64.tar.gz --output vertica-client.tar.gz 82 | 83 | echo "Extracting Vertica client driver..." 84 | gzip -d vertica-client.tar.gz 85 | tar -xvf vertica-client.tar 86 | 87 | echo "Vertica client driver installation completed successfully." -------------------------------------------------------------------------------- /Vertica/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy==1.22.0 2 | pandas==1.2.4 3 | parse==1.19.0 4 | vertica-python==1.0.1 5 | -------------------------------------------------------------------------------- /Vertica/sfConf.txt: -------------------------------------------------------------------------------- 1 | [snowflake] 2 | account=xxxxx 3 | user=xxxxx 4 | password=xxxxx 5 | role=verticadb_role 6 | # schema_mappings map the vertica schema to the sf schema 7 | # Format 8 | # vertica_schema=snowflake_db.snowflake_schema 9 | [schema_mappings] 10 | store=store 11 | [inview_mappings] 12 | store=store 13 | # This section determines what to do with the DDL 14 | # Valid options are 15 | # ddlDisplay=[True|False] If True, the DDL is written to the log 16 | # ddlSave= If present, write the ddl into 17 | # ddlExecute=[True|False] If True, executes the ddl in snowflake. If not present ddlExeute will be false 18 | # dropExisting=[True|False] If True, existing table will be dropped. 19 | # If False, the table will not be dropped but a warning will be given saying the table exists 20 | # processViews=[True|False] If True, migrate the views from the vertica instance to snowflake. 21 | # be sure to utilise [inview_mappings] to ensure the view will compile ok 22 | # If false, the views will not be migrated. 23 | [execution] 24 | ddlDisplay=True 25 | ddlSave=TEMP/VerticaDDL 26 | ddlExecute=False 27 | dropExisting=True 28 | processViews=True 29 | -------------------------------------------------------------------------------- /Vertica/verticaConf.txt: -------------------------------------------------------------------------------- 1 | [vertica] 2 | host=localhost 3 | port=5433 4 | user=dbadmin 5 | database=docker 6 | ssl=False 7 | schema=store -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/[ARCHIVED] TeradataScripts/.DS_Store -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Snowflake-Labs/SC.DDLExportScripts/f1e24a4aa40463ae428beceb0f5e26c2538417a7/[ARCHIVED] TeradataScripts/Teradata/.DS_Store -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/License.txt: -------------------------------------------------------------------------------- 1 | Copyright 2023-2025 Snowflake, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Software), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | 10 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/README.md: -------------------------------------------------------------------------------- 1 | # Teradata Export Scripts 2 | 3 | This repository provides some simple scripts to help exporting your Teradata code so it can be migrated to [Snowflake](https://www.snowflake.com/) using [SnowConvert](https://docs.snowconvert.com/snowconvert/for-teradata/introduction) 4 | 5 | 6 | ## Usage 7 | 8 | The following are the steps to execute the DDL Code Generation. They should be executed in bash shell on a linux environment with access to bteq/tpt utilities. 9 | 10 | 1 - Modify `create_ddls.sh` in the bin folder – Using a text editor modify the following parameters: 11 | 12 | * `connection_string` 13 | * `include_databases` 14 | * `exclude_databases` 15 | * `include_objects` 16 | 17 | It is recommended to use the user 'DBC' in the connection string but a user with sysadmin privileges should also work. Please run on a production-like environment with up to date statistics. 18 | 19 | By default the script is setup to exclude system related databases and include all others. You can modify these to get the desired scope, including the operator that is used. Statements need to exclude spaces in the parameter values and values should be all **UPPERCASE**. 20 | 21 | > Do not remove the parentheses around the entire statement which are needed for compound logic. 22 | > Do not use **LIKE ANY** clause for both as it can cause unexpected issues. 23 | 24 | Example values: 25 | 26 | ```sql 27 | (UPPER(T1.DATABASENAME) NOT IN ('ALL', 'TESTDB')); 28 | 29 | (UPPER(T1.DATABASENAME) NOT IN ('ALL', 'TESTDB')) AND UPPER(T1.DATABASENAME) NOT LIKE ('TD_%')) 30 | ``` 31 | 32 | 2 - After modifying, the `create_ddls.sh` file can be run from the command line to execute the extract from within the bin directory. The following files will be created in the output folder: 33 | 34 | ## DDL Files 35 | 36 | These files will contain the definitions of the objects specified by the file name. 37 | 38 | * `DDL_Databases.sql 39 | * `DDL_Tables.sql` 40 | * `DDL_Join_Indexes.sql` 41 | * `DDL_Functions.sql` 42 | * `DDL_Views.sql` 43 | * `DDL_Macros.sql` 44 | * `DDL_Procedures.sql` 45 | * `Insert_statements.sql` (these are 2 dummy records created for each Teradata Table - NOT CUSTOMER DATA) 46 | 47 | ## Report Files 48 | 49 | ### System Statistics 50 | These files provide information around key system statistics and objects that can have a specific impact on conversion and migration activities. 51 | 52 | * `Object_Type_List.txt` 53 | * `Object_Type_Summary.txt` 54 | * `Table_List.txt` 55 | * `Special_Columns_List.txt` 56 | * `All_Stats.txt` 57 | * `Table_Stats.txt` 58 | * `View_Dependency_Detail.txt` 59 | * `View_Dependency_Report.txt` 60 | * `Object_Join_Indexes.txt` 61 | 62 | ### Usage Report Files 63 | 64 | These files provide information relevant to the sizing and usage of the Teradata system. These will not be created unless you uncomment the section for Creating Usage Reports 65 | 66 | * `90_Day_CPU_Stats.txt` 67 | * `90_Day_Node_Stats.txt` 68 | * `90_Day_Workload_Stats.txt` 69 | 70 | ### Data Profiling Files 71 | 72 | These collect information about certain column types in which information about the data is required to understand certain aspects of the migration. 73 | 74 | * `Data_Profile_Numbers.txt` 75 | 76 | ### Invalid Objects Log 77 | 78 | This file returns results showing any test failures for views that are not valid. 79 | 80 | * `invalid_objects.log` 81 | 82 | 3 - After a successful run, remove logon information from the top line of each of the files in the scripts folder as well as the `create_ddls.sh` file. Compress the entire Teradata Source Extract and return to Snowflake. Please do not modify or remove any files so that we can review logs as needed. 83 | 84 | ## Reporting issues and feedback 85 | 86 | If you encounter any bugs with the tool please file an issue in the 87 | [Issues](https://github.com/Snowflake-Labs/SC.DDLExportScripts/issues) section of our GitHub repo. 88 | 89 | ## License 90 | 91 | These scripts are licensed under the [MIT license](https://github.com/Snowflake-Labs/SC.DDLExportScripts/blob/main/Teradata/License.txt). 92 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/scripts/create_ddls.btq: -------------------------------------------------------------------------------- 1 | **** Modified by: 2 | **** Modified Date: 3 | **** Description: 4 | 5 | .LOGON connection_string; 6 | 7 | **** CREATE TABLES FILE **** 8 | .EXPORT FILE = ../temp/SHOW_Tables.sql 9 | .SET WIDTH 65531 10 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW TABLE ' || TRIM(T1.DATABASENAME) || '.' ||TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('T','O','Q') AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 11 | .EXPORT RESET 12 | .OS rm ../output/object_extracts/DDL/DDL_Tables.sql 13 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Tables.sql 14 | .SET WIDTH 65531 15 | .RUN FILE = ../temp/SHOW_Tables.sql 16 | .EXPORT RESET 17 | 18 | 19 | **** CREATE JOIN INDEXES FILE **** 20 | .EXPORT FILE = ../temp/SHOW_Join_Indexes.sql 21 | .SET WIDTH 65531 22 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW JOIN INDEX ' || TRIM(T1.DATABASENAME) || '.' ||TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('I') AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 23 | .EXPORT RESET 24 | .OS rm ../output/object_extracts/DDL/DDL_Join_Indexes.sql 25 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Join_Indexes.sql 26 | .SET WIDTH 65531 27 | .RUN FILE = ../temp/SHOW_Join_Indexes.sql 28 | .EXPORT RESET 29 | 30 | 31 | **** CREATE VIEWS FILE **** 32 | .EXPORT FILE = ../temp/SHOW_Views.sql 33 | .SET WIDTH 65531 34 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW VIEW ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'V' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 35 | .EXPORT RESET 36 | .OS rm ../output/object_extracts/DDL/DDL_Views.sql 37 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Views.sql 38 | .SET WIDTH 65531 39 | .RUN FILE = ../temp/SHOW_Views.sql 40 | .EXPORT RESET 41 | 42 | **** CREATE FUNCTIONS FILE **** 43 | .EXPORT FILE = ../temp/SHOW_Functions.sql 44 | .SET WIDTH 65531 45 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.SpecificNAME) || ' */'' as "--"; ' || 'SHOW FUNCTION ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.FUNCTIONNAME) || ';' "--" 46 | FROM DBC.FUNCTIONSV T1 WHERE include_databases AND exclude_databases GROUP BY 1; 47 | .EXPORT RESET 48 | .OS rm ../output/object_extracts/DDL/DDL_Functions.sql 49 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Functions.sql 50 | .SET WIDTH 65531 51 | .RUN FILE = ../temp/SHOW_Functions.sql 52 | .EXPORT RESET 53 | 54 | **** CREATE MACROS FILE **** 55 | .EXPORT FILE = ../temp/SHOW_Macros.sql 56 | .SET WIDTH 65531 57 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW MACRO ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'M' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 58 | .EXPORT RESET 59 | .OS rm ../output/object_extracts/DDL/DDL_Macros.sql 60 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Macros.sql 61 | .SET WIDTH 65531 62 | .RUN FILE = ../temp/SHOW_Macros.sql 63 | .EXPORT RESET 64 | 65 | 66 | **** CREATE PROCEDURES FILE **** 67 | .EXPORT FILE = ../temp/SHOW_Procedures.sql 68 | .SET WIDTH 65531 69 | SELECT 'SELECT ''/* '' || ''' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' */'' as "--"; ' || 'SHOW PROCEDURE ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ';' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND = 'P' AND include_databases AND exclude_databases AND include_objects GROUP BY 1; 70 | .EXPORT RESET 71 | .OS rm ../output/object_extracts/DDL/DDL_Procedures.sql 72 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Procedures.sql 73 | .SET WIDTH 65531 74 | .RUN FILE = ../temp/SHOW_Procedures.sql 75 | .EXPORT RESET 76 | 77 | 78 | **** CREATE DATABASES FILE **** 79 | .OS rm ../output/object_extracts/DDL/DDL_Databases.sql 80 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_Databases.sql 81 | .SET WIDTH 65531 82 | SELECT 'CREATE DATABASE ' || TRIM(T1.DATABASENAME) || ' FROM DBC AS PERM = 100000000;' "--" FROM DBC.DATABASESV T1 WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1; 83 | .EXPORT RESET 84 | 85 | 86 | **** CREATE SNOWFLAKE SCHEMA FILE **** 87 | .OS rm ../output/object_extracts/DDL/DDL_SF_Schemas.sql 88 | .EXPORT FILE = ../output/object_extracts/DDL/DDL_SF_Schemas.sql 89 | .SET WIDTH 65531 90 | SELECT '/* ' || TRIM(T1.DATABASENAME) || ' */ ' || 'CREATE SCHEMA ' || TRIM(T1.DATABASENAME) || ';' "--" FROM DBC.DATABASESV T1 WHERE include_databases AND exclude_databases GROUP BY 1 ORDER BY 1; 91 | .EXPORT RESET 92 | 93 | 94 | .quit 0; 95 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/scripts/create_usage_reports.btq: -------------------------------------------------------------------------------- 1 | **** Modified by: 2 | **** Modified Date: 3 | **** Description: 4 | 5 | .LOGON connection_string; 6 | 7 | **** CREATE REPORTS **** 8 | 9 | .OS rm ../output/object_extracts/Usage/90_Day_CPU_Stats.txt 10 | .EXPORT FILE = ../output/object_extracts/Usage/90_Day_CPU_Stats.txt 11 | .SET format off 12 | .SET titledashes off 13 | .SET SEPARATOR = '|' 14 | .SET WIDTH 65531 15 | SELECT TheDate ||'|'|| TheTime ||'|'|| NodeID ||'|'|| CPUIdle ||'|'|| CPUIoWait ||'|'|| CPUUServ ||'|'|| CPUUExec FROM pdcrinfo.ResUsageSpma WHERE thedate between current_date - 91 AND current_date; 16 | .EXPORT RESET 17 | 18 | 19 | .OS rm ../output/object_extracts/Usage/90_Day_Node_Stats.txt 20 | .EXPORT FILE = ../output/object_extracts/Usage/90_Day_Node_Stats.txt 21 | .SET format off 22 | .SET titledashes off 23 | .SET SEPARATOR = '|' 24 | .SET WIDTH 65531 25 | SELECT distinct TheDate||'|'|| NodeID||'|'|| NodeType ||'|'|| PM_CPU_COD ||'|'|| WM_CPU_COD ||'|'|| PM_IO_COD ||'|'|| WM_IO_COD ||'|'|| NCPUs ||'|'|| Vproc1 ||'|'|| Vproc2 ||'|'|| VprocType1 ||'|'|| VprocType2 ||'|'|| MemSize ||'|'|| NodeNormFactor FROM pdcrinfo.ResUsageSpma WHERE thedate between current_date - 91 AND current_date; 26 | .EXPORT RESET 27 | 28 | 29 | .OS rm ../output/object_extracts/Usage/90_Day_Workload_Stats.txt 30 | .EXPORT FILE = ../output/object_extracts/Usage/90_Day_Workload_Stats.txt 31 | .SET format off 32 | .SET titledashes off 33 | .SET SEPARATOR = '|' 34 | .SET WIDTH 65531 35 | SELECT trim(a.LogDate)||'|'|| 36 | trim(a.UserName)||'|'|| 37 | trim(a.StatementType)||'|'|| 38 | trim(a.ErrorCode)||'|'|| 39 | trim(a.Single_AMP)||'|'|| 40 | trim(a.StartHour)||'|'|| 41 | trim(a.WDID)||'|'|| 42 | trim(a.WDName)||'|'|| 43 | trim(a.AMPCPU)||'|'|| 44 | trim(a.ParserCPU)||'|'|| 45 | trim(a.RequestCount) 46 | FROM 47 | (SELECT LogDate 48 | , UserName 49 | , StatementType 50 | , ErrorCode 51 | , case when NumOfActiveAMPs <=2 then 'Yes' else 'No ' end as Single_AMP 52 | , EXTRACT( HOUR FROM starttime) AS StartHour 53 | , WDID 54 | , WDName 55 | , cast(SUM(AMPCPUTime) as varchar(18)) AS AMPCPU 56 | , cast(SUM(ParserCPUTime) as varchar(18)) AS ParserCPU 57 | , CAST(COUNT(*) as varchar(18)) AS RequestCount 58 | FROM pdcrinfo.dbqlogtbl_hst 59 | WHERE LogDate between Current_Date - 91 and Current_Date 60 | AND NumOfActiveAMPs <> 0 61 | GROUP BY 1,2,3,4,5,6,7,8) a 62 | ORDER BY 1; 63 | .EXPORT RESET 64 | 65 | .quit 0; 66 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/scripts/data_profiling.btq: -------------------------------------------------------------------------------- 1 | **** Modified by: 2 | **** Modified Date: 3 | **** Description: 4 | 5 | .LOGON connection_string; 6 | 7 | **** NUMBERS WITH FLEXIBLE PRECISION **** 8 | .EXPORT FILE = ../temp/NUMBER_COLUMNS.sql 9 | .SET WIDTH 65531 10 | select 11 | 'select c1 || ''|'' || cast(c2 as varchar(3)) || ''|'' || cast(c3 as varchar(3)) as "--" from ( 12 | select ''' || T1.databasename || '|' || T1.tablename || '|' || columnname || '|'' AS c1, 13 | max(length(cast(' || columnname || ' as varchar(40))) - case when position(''.'' IN cast(' || columnname || ' as varchar(40))) = 0 then 0 else length(cast(' || columnname || ' as varchar(40))) - position(''.'' IN cast(' || columnname || ' as varchar(40))) + 1 end) as "c2", 14 | max(case when position(''.'' IN cast(' || columnname || ' as varchar(40))) = 0 then 0 else length(cast(' || columnname || ' as varchar(40))) - position(''.'' IN cast(' || columnname || ' as varchar(40))) end) as "c3" from ' || T1.databasename || '.' || T1.tablename || ') T1;' as "--" 15 | from 16 | dbc.columnsv T1, 17 | dbc.tablesv T2 18 | where 19 | columntype = 'N' 20 | and UPPER(T1.DATABASENAME) = UPPER(T2.DATABASENAME) 21 | and UPPER(T1.TABLENAME) = UPPER(T2.TABLENAME) 22 | and T2.TABLEKIND IN ('T','O', 'Q') 23 | and decimaltotaldigits = -128 24 | and decimalfractionaldigits = -128 25 | AND include_databases AND exclude_databases AND include_objects 26 | ; 27 | .EXPORT RESET 28 | .OS rm ../output/object_extracts/Reports/Data_Profile_Numbers.txt 29 | .EXPORT FILE = ../output/object_extracts/Reports/Data_Profile_Numbers.txt 30 | .SET WIDTH 65531 31 | .RUN FILE = ../temp/NUMBER_COLUMNS.sql 32 | .EXPORT RESET 33 | 34 | .quit 0; 35 | -------------------------------------------------------------------------------- /[ARCHIVED] TeradataScripts/Teradata/scripts/invalid_objects.btq: -------------------------------------------------------------------------------- 1 | **** Modified by: 2 | **** Modified Date: 3 | **** Description: 4 | 5 | .LOGON connection_string; 6 | 7 | **** CREATE INVALID VIEWS LIST FILE **** 8 | .SET ERROROUT STDOUT 9 | .EXPORT FILE = ../temp/Invalid_Object_Test.sql 10 | .SET WIDTH 65531 11 | SELECT 'SELECT * FROM ' || TRIM(T1.DATABASENAME) || '.' || TRIM(T1.TABLENAME) || ' WHERE 1 = 2;' "--" FROM DBC.TABLESV T1 WHERE T1.TABLEKIND IN ('V') AND include_databases AND exclude_databases AND include_objects; 12 | .EXPORT RESET 13 | 14 | .RUN FILE = ../temp/Invalid_Object_Test.sql 15 | 16 | .quit 0; 17 | -------------------------------------------------------------------------------- /setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SC.DDLExportScripts Repository Setup Script 4 | # Run this script after cloning the repository 5 | # This script sets up the development environment for contributors 6 | 7 | echo "===============================================" 8 | echo " SC.DDLExportScripts Repository Setup" 9 | echo "===============================================" 10 | echo "" 11 | echo "Welcome to the SC.DDLExportScripts repository!" 12 | echo "" 13 | 14 | if [ -f ".git/hooks/post-checkout" ] && [ -f ".git/hooks/post-merge" ]; then 15 | echo "✅ Git hooks are already installed!" 16 | echo "The repository is properly configured for version management." 17 | else 18 | echo "🔧 Setting up Git hooks..." 19 | echo "" 20 | 21 | if ! command -v pre-commit &> /dev/null; then 22 | echo "⚠️ pre-commit is not installed." 23 | echo "Please install it using one of these commands:" 24 | echo " • pip install pre-commit" 25 | echo " • brew install pre-commit" 26 | echo " • conda install -c conda-forge pre-commit" 27 | echo "" 28 | read -p "Do you want to continue without pre-commit? (y/N): " -n 1 -r 29 | echo 30 | if [[ ! $REPLY =~ ^[Yy]$ ]]; then 31 | echo "Please install pre-commit and run this script again." 32 | exit 1 33 | fi 34 | fi 35 | 36 | echo "Installing Git hooks..." 37 | ./.github/scripts/install-hooks.sh 38 | 39 | if [ $? -eq 0 ]; then 40 | echo "" 41 | echo "✅ Setup completed successfully!" 42 | else 43 | echo "" 44 | echo "❌ Setup failed. Please check the error messages above." 45 | exit 1 46 | fi 47 | fi 48 | 49 | echo "" 50 | echo "📚 Available DDL export tools for:" 51 | echo " • BigQuery" 52 | echo " • Databricks" 53 | echo " • DB2" 54 | echo " • Hive" 55 | echo " • Netezza" 56 | echo " • Oracle" 57 | echo " • Redshift" 58 | echo " • SQL Server" 59 | echo " • Synapse" 60 | echo " • Teradata" 61 | echo " • Vertica" 62 | echo "" 63 | echo "Check the README.md files in each folder for specific instructions." 64 | echo "===============================================" 65 | --------------------------------------------------------------------------------