├── .github ├── FUNDING.yml └── workflows │ ├── release.yml │ └── test.yml ├── .gitignore ├── .readthedocs.yaml ├── LICENSE ├── README.md ├── docs ├── assets │ └── logo.png ├── functions │ ├── api │ │ ├── api.md │ │ ├── auth.md │ │ └── scopes.md │ ├── cd │ │ └── support_files.md │ ├── core │ │ ├── capacities.md │ │ ├── connections.md │ │ ├── deployment_pipelines.md │ │ ├── folders.md │ │ ├── gateways.md │ │ ├── git.md │ │ └── workspaces.md │ ├── dmv │ │ └── dmv.md │ ├── helpers │ │ ├── data_pipelines.md │ │ ├── dataflows_gen1.md │ │ ├── dataflows_gen2.md │ │ ├── folders.md │ │ ├── items.md │ │ ├── lakehouses.md │ │ ├── notebooks.md │ │ ├── reports.md │ │ ├── semantic_models.md │ │ ├── warehouses.md │ │ └── workspaces.md │ ├── items │ │ ├── data_pipelines.md │ │ ├── dataflows_gen1.md │ │ ├── dataflows_gen2.md │ │ ├── items.md │ │ ├── lakehouses.md │ │ ├── notebooks.md │ │ ├── reports.md │ │ ├── semantic_models.md │ │ └── warehouses.md │ └── utils │ │ ├── decorators.md │ │ ├── exceptions.md │ │ ├── logging.md │ │ ├── logging_system.md │ │ ├── schemas.md │ │ └── utils.md ├── index.md ├── requirements.txt └── stylesheets │ └── extra.css ├── logging_system.md ├── mkdocs.yml ├── poetry.lock ├── pyproject.toml ├── requirements.txt ├── scripts ├── generate_init.py └── test_package.py ├── src └── pyfabricops │ ├── __init__.py │ ├── _version.py │ ├── api │ ├── __init__.py │ ├── api.py │ ├── auth.py │ └── scopes.py │ ├── cd │ ├── __init__.py │ └── support_files.py │ ├── core │ ├── __init__.py │ ├── capacities.py │ ├── connections.py │ ├── deployment_pipelines.py │ ├── folders.py │ ├── gateways.py │ ├── gateways_encryp_creds.py │ ├── git.py │ └── workspaces.py │ ├── dmv │ ├── __init__.py │ └── dmv.py │ ├── helpers │ ├── __init__.py │ ├── data_pipelines.py │ ├── dataflows_gen1.py │ ├── dataflows_gen2.py │ ├── folders.py │ ├── items.py │ ├── lakehouses.py │ ├── notebooks.py │ ├── reports.py │ ├── semantic_models.py │ ├── warehouses.py │ └── workspaces.py │ ├── items │ ├── __init__.py │ ├── data_pipelines.py │ ├── dataflows_gen1.py │ ├── dataflows_gen2.py │ ├── items.py │ ├── lakehouses.py │ ├── notebooks.py │ ├── reports.py │ ├── semantic_models.py │ ├── shortcuts.py │ └── warehouses.py │ └── utils │ ├── __init__.py │ ├── decorators.py │ ├── exceptions.py │ ├── logging.py │ ├── schemas.py │ └── utils.py └── tests ├── __init__.py └── test_basic.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [alisonpezzott] 2 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Build and Release 2 | 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v4 14 | 15 | - name: Set up Python 16 | uses: actions/setup-python@v4 17 | with: 18 | python-version: "3.11" 19 | 20 | - name: Install Poetry 21 | uses: snok/install-poetry@v1 22 | with: 23 | version: latest 24 | virtualenvs-create: true 25 | virtualenvs-in-project: true 26 | 27 | - name: Cache dependencies 28 | uses: actions/cache@v3 29 | with: 30 | path: ~/.cache/pypoetry 31 | key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} 32 | restore-keys: | 33 | ${{ runner.os }}-poetry- 34 | 35 | - name: Install dependencies 36 | run: poetry install --no-interaction 37 | 38 | - name: Run tests 39 | run: poetry run pytest 40 | 41 | - name: Build package 42 | run: poetry build 43 | 44 | - name: Check package 45 | run: poetry run pip install twine && poetry run twine check dist/* 46 | 47 | - name: Publish to PyPI 48 | if: github.event_name == 'release' 49 | env: 50 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} 51 | run: poetry publish 52 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: [ main, dev ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python-version: ["3.12.10"] 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Set up Python ${{ matrix.python-version }} 20 | uses: actions/setup-python@v4 21 | with: 22 | python-version: ${{ matrix.python-version }} 23 | 24 | - name: Install Poetry 25 | uses: snok/install-poetry@v1 26 | with: 27 | version: latest 28 | virtualenvs-create: true 29 | virtualenvs-in-project: true 30 | 31 | - name: Load cached venv 32 | id: cached-poetry-dependencies 33 | uses: actions/cache@v3 34 | with: 35 | path: .venv 36 | key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }} 37 | 38 | - name: Install dependencies 39 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 40 | run: poetry install --no-interaction --no-root 41 | 42 | - name: Install project 43 | run: poetry install --no-interaction 44 | 45 | - name: Run linting 46 | run: poetry run task lint 47 | 48 | - name: Run tests with coverage 49 | env: 50 | FAB_CLIENT_ID: ${{ secrets.FAB_CLIENT_ID }} 51 | FAB_CLIENT_SECRET: ${{ secrets.FAB_CLIENT_SECRET }} 52 | FAB_TENANT_ID: ${{ secrets.FAB_TENANT_ID }} 53 | FAB_USERNAME: ${{ secrets.FAB_USERNAME }} 54 | FAB_PASSWORD: ${{ secrets.FAB_PASSWORD }} 55 | AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} 56 | AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} 57 | AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} 58 | AZURE_KEY_VAULT_NAME: ${{ secrets.AZURE_KEY_VAULT_NAME }} 59 | run: poetry run pytest --cov=pyfabricops --cov-report=xml --cov-report=term || echo "Tests completed with warnings" 60 | 61 | - name: Upload coverage reports to Codecov 62 | uses: codecov/codecov-action@v5 63 | with: 64 | token: ${{ secrets.CODECOV_TOKEN }} 65 | fail_ci_if_error: false 66 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/python 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=python 3 | 4 | ### Python ### 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | cover/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | .pybuilder/ 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | # For a library or package, you might want to ignore these files since the code is 91 | # intended to run in multiple environments; otherwise, check them in: 92 | # .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # poetry 102 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 103 | # This is especially recommended for binary packages to ensure reproducibility, and is more 104 | # commonly ignored for libraries. 105 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 106 | #poetry.lock 107 | 108 | # pdm 109 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 110 | #pdm.lock 111 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 112 | # in version control. 113 | # https://pdm.fming.dev/#use-with-ide 114 | .pdm.toml 115 | 116 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 117 | __pypackages__/ 118 | 119 | # Celery stuff 120 | celerybeat-schedule 121 | celerybeat.pid 122 | 123 | # SageMath parsed files 124 | *.sage.py 125 | 126 | # Environments 127 | .env 128 | .envexample 129 | .venv 130 | env/ 131 | venv/ 132 | ENV/ 133 | env.bak/ 134 | venv.bak/ 135 | .vs 136 | .vs/ 137 | .vscode 138 | .vscode/ 139 | 140 | # Spyder project settings 141 | .spyderproject 142 | .spyproject 143 | 144 | # Rope project settings 145 | .ropeproject 146 | 147 | # mkdocs documentation 148 | /site 149 | 150 | # mypy 151 | .mypy_cache/ 152 | .dmypy.json 153 | dmypy.json 154 | 155 | # Pyre type checker 156 | .pyre/ 157 | 158 | # pytype static type analyzer 159 | .pytype/ 160 | 161 | # Cython debug symbols 162 | cython_debug/ 163 | 164 | # PyCharm 165 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 166 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 167 | # and can be added to the global gitignore or merged into this file. For a more nuclear 168 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 169 | #.idea/ 170 | 171 | ### Python Patch ### 172 | # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration 173 | poetry.toml 174 | 175 | # ruff 176 | .ruff_cache/ 177 | 178 | # LSP config files 179 | pyrightconfig.json 180 | 181 | # End of https://www.toptal.com/developers/gitignore/api/python 182 | 183 | # Generated using ignr.py - github.com/Antrikshy/ignr.py 184 | 185 | test.ipynb 186 | # This file is used to test the functionality of the project. 187 | # It is a Jupyter Notebook file that contains code and documentation. 188 | 189 | **/.pbi/localSettings.json 190 | **/.pbi/cache.abf 191 | # These files are used by Power BI to store local settings and cache. 192 | # They are not needed for version control and can be ignored. 193 | 194 | 195 | *_stg 196 | **/_stg/** 197 | *_defs 198 | **/_defs/** 199 | # These files are used to store staging and definition information. 200 | # They are not needed for version control and can be ignored. 201 | 202 | 01_fabric_simple 203 | 01_fabric_simple_backup 204 | PowerBI_001 205 | # These directories are used to store specific configurations and data for the project. 206 | branches.json 207 | connections.json 208 | gateways.json 209 | workspaces_roles.json 210 | connections_roles.json 211 | # scripts Exposed on v0.1.8 212 | PUBLISHING.md 213 | .pypirc.example 214 | ProjectTest/ 215 | PowerBIDemo/ 216 | template_report_definition.pbir 217 | fabric_items.py 218 | test.py 219 | test.ipynb -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | version: 2 5 | 6 | # Set the OS, Python version and other tools you might need 7 | build: 8 | os: ubuntu-22.04 9 | tools: 10 | python: "3.11" 11 | 12 | # Build documentation in the docs/ directory with MkDocs 13 | mkdocs: 14 | configuration: mkdocs.yml 15 | 16 | # Optionally declare the Python requirements required to build your docs 17 | python: 18 | install: 19 | - requirements: docs/requirements.txt 20 | - method: pip 21 | path: . 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Alison Pezzott 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Welcome to pyfabricops 2 | 3 | [![PyPI version](https://img.shields.io/pypi/v/pyfabricops.svg)](https://pypi.org/project/pyfabricops/) 4 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 5 | [![Python versions](https://img.shields.io/pypi/pyversions/pyfabricops.svg)](https://pypi.org/project/pyfabricops/) 6 | [![Typing status](https://img.shields.io/badge/typing-PEP%20561-blue)](https://peps.python.org/pep-0561/) 7 | [![Tests](https://github.com/alisonpezzott/pyfabricops/actions/workflows/test.yml/badge.svg)](https://github.com/alisonpezzott/pyfabricops/actions/workflows/test.yml) 8 | 9 | > A Python wrapper library for Microsoft Fabric (and Power BI) operations, providing a simple interface to the official Fabric REST APIs. Falls back to Power BI REST APIs where needed. Designed to run in Python notebooks, pure Python scripts or integrated into YAML-based workflows for CI/CD. 10 | Access to the repositoy on [GitHub](https://github.com/alisonpezzott/pyfabricops). 11 | 12 | ## 🚀 Features 13 | 14 | - Authenticate using environment variables (GitHub Secrets, ADO Secrets, AzKeyVault, .env ...) 15 | - Manage workspaces, capacities, semantic models, lakehouses, reports and connections 16 | - Execute Git operations and automate Fabric deployment flows (Power BI inclusive) 17 | - Capture and Manage Git branches automatically for CI/CD scenarios 18 | - Many use cases and scenarios including yaml for test and deploy using GitHub Actions 19 | 20 | ## 📃 Documentation 21 | Access: [https://pyfabricops.readthedocs.io/en/latest/](https://pyfabricops.readthedocs.io/en/latest/) 22 | 23 | ## ✅ Requirements 24 | 25 | - Requires Python >= 3.10 <=3.12.10 26 | 27 | ## ⚒️ Installation 28 | 29 | ```bash 30 | pip install -U pyfabricops 31 | ``` 32 | 33 | ## ⚙️ Usage 34 | 35 | > Create a repository and clone it locally. 36 | > Create a notebook or a script and import the library: 37 | 38 | ```python 39 | # Import the library 40 | import pyfabricops as pf 41 | ``` 42 | 43 | ### Set the authentication provider 44 | 45 | > Set auth environment variables acording to your authentication method 46 | #### Environment variables (.env, GitHub Secrets, Ado Secrets...) 47 | ```python 48 | pf.set_auth_provider("env") 49 | ``` 50 | 51 | This is the default behavior. 52 | You can set these in a .env file or directly in your environment (GitHub Secrets, ADO Secrets...). 53 | 54 | Example .env file: 55 | ``` 56 | FAB_CLIENT_ID=your_client_id_here 57 | FAB_CLIENT_SECRET=your_client_secret_here 58 | FAB_TENANT_ID=your_tenant_id_here 59 | FAB_USERNAME=your_username_here # Necessary for some functions with no SPN support 60 | FAB_PASSWORD=your_password_here # Necessary for some functions with no SPN support 61 | ``` 62 | 63 | #### Azure Key Vault 64 | 65 | ```python 66 | pf.set_auth_provider("vault") 67 | ``` 68 | Ensure you have the required Azure Key Vault secrets set: 69 | ``` 70 | AZURE_CLIENT_ID=your_azure_client_id_here 71 | AZURE_CLIENT_SECRET=your_azure_client_secret_here 72 | AZURE_TENANT_ID=your_azure_tenant_id_here 73 | AZURE_KEY_VAULT_NAME=your_key_vault_name_here 74 | ``` 75 | 76 | #### OAuth (Interactive) 77 | 78 | ```python 79 | pf.set_auth_provider("oauth") 80 | ``` 81 | This will open a browser window for user authentication. 82 | 83 | > Create a repository and clone it locally. 84 | > Prepare your environment with the required variables according to your authentication method (GitHub Secrets, ADO Secrets, AzKeyVault, .env ...) 85 | 86 | 87 | ### Branches configuration 88 | 89 | Create a branches.json file in the root of your repository to define your branch mappings: 90 | 91 | ```json 92 | { 93 | "main": "-PRD", 94 | "master": "-PRD", 95 | "dev": "-DEV", 96 | "staging": "-STG" 97 | } 98 | ``` 99 | This file maps your local branches to Fabric branches, allowing the library to automatically manage branch names for CI/CD scenarios. 100 | 101 | 102 | ## 🪄 Examples 103 | 104 | Visit: [https://github.com/alisonpezzott/pyfabricops-examples](https://github.com/alisonpezzott/pyfabricops-examples) 105 | 106 | 107 | ## 🧬 Project Structure 108 | 109 | ```bash 110 | src/ 111 | └── pyfabricops/ 112 | ├── api/ 113 | │ ├── __init__.py 114 | │ ├── api.py 115 | │ ├── auth.py 116 | │ └── scopes.py 117 | ├── cd/ 118 | │ ├── __init__.py 119 | │ └── support_files.py 120 | ├── core/ 121 | │ ├── __init__.py 122 | │ ├── capacities.py 123 | │ ├── connections.py 124 | │ ├── deployment_pipelines.py 125 | │ ├── folders.py 126 | │ ├── gateways.py 127 | │ ├── gateways_encryp_creds.py 128 | │ ├── git.py 129 | │ └── workspaces.py 130 | ├── dmv/ 131 | │ ├── __init__.py 132 | │ ├── dmv.py 133 | ├── helpers/ 134 | │ ├── __init__.py 135 | │ ├── dataflows_gen1.py 136 | │ ├── dataflows_gen2.py 137 | │ ├── data_pipelines.py 138 | │ ├── folders.py 139 | │ ├── items.py 140 | │ ├── lakehouses.py 141 | │ ├── notebooks.py 142 | │ ├── reports.py 143 | │ ├── semantic_models.py 144 | │ ├── warehouses.py 145 | │ └── workspaces.py 146 | ├── items/ 147 | │ ├── __init__.py 148 | │ ├── dataflows_gen1.py 149 | │ ├── dataflows_gen2.py 150 | │ ├── data_pipelines.py 151 | │ ├── items.py 152 | │ ├── lakehouses.py 153 | │ ├── notebooks.py 154 | │ ├── reports.py 155 | │ ├── semantic_models.py 156 | │ ├── shortcuts.py 157 | │ └── warehouses.py 158 | ├── utils/ 159 | │ ├── __init__.py 160 | │ ├── decorators.py 161 | │ ├── exceptions.py 162 | │ ├── logging.py 163 | │ ├── schemas.py 164 | │ └── utils.py 165 | ├── __init__.py 166 | └── _version.py 167 | ``` 168 | 169 | ### Logging configuration 170 | 171 | The custom logging system implemented in `pyfabricops` provides a complete and flexible solution for monitoring and debugging the library. 172 | 173 | 174 | #### 🎨 **Custom Formatting** 175 | - **Automatic colors**: Different colors for each log level (DEBUG=Cyan, INFO=Green, WARNING=Yellow, ERROR=Red, CRITICAL=Magenta) 176 | - **Multiple styles**: 177 | - `minimal`: Only timestamp, level and message 178 | - `standard`: Includes module name in compact form 179 | - `detailed`: Complete format with all information 180 | 181 | #### 🎛️ **Easy Configuration** 182 | ```python 183 | import pyfabricops as pf 184 | 185 | # Basic configuration 186 | pf.setup_logging(level='INFO', format_style='standard') 187 | 188 | # Debug mode for development 189 | pf.enable_debug_mode(include_external=False) 190 | 191 | # Disable logging completely 192 | pf.disable_logging() 193 | 194 | # Reset to default configuration 195 | pf.reset_logging() 196 | ``` 197 | 198 | For complete logging configuration options, refer to the [logging_system.md](logging_system.md) 199 | 200 | 201 | ## ❤️Contributing 202 | 1. Fork this repository 203 | 2. Create a new branch (feat/my-feature) 204 | 3. Run `poetry install` to set up the development environment 205 | 4. Run `poetry run task test` to run tests 206 | 5. Submit a pull request 🚀 207 | 208 | ## 🚀 Publishing 209 | 210 | ### For Maintainers 211 | 212 | To publish a new version to PyPI: 213 | 214 | 1. Update the version in `pyproject.toml` and `src/pyfabricops/_version.py` 215 | 2. Commit and push changes 216 | 3. Create a new release on GitHub with a tag (e.g., `v0.1.0`) 217 | 4. The GitHub Action will automatically: 218 | - Run tests 219 | - Build the package 220 | - Publish to PyPI 221 | 222 | ### Testing with TestPyPI 223 | 224 | ```bash 225 | # Configure TestPyPI 226 | poetry config repositories.testpypi https://test.pypi.org/legacy/ 227 | poetry config pypi-token.testpypi 228 | 229 | # Build and publish to TestPyPI 230 | poetry build 231 | poetry publish -r testpypi 232 | 233 | # Install from TestPyPI 234 | pip install --index-url https://test.pypi.org/simple/ pyfabricops 235 | ``` 236 | 237 | ### Prerequisites for Publishing 238 | 239 | - Set up a PyPI account at https://pypi.org/ 240 | - Generate an API token at https://pypi.org/manage/account/token/ 241 | - Add the token as `PYPI_TOKEN` secret in GitHub repository settings 242 | 243 | ## 🐞 Issues 244 | If you encounter any issues, please report them at [https://github.com/alisonpezzott/pyfabricops/issues](https://github.com/alisonpezzott/pyfabricops/issues) 245 | 246 | ## ⚖️ License 247 | This project is licensed under the MIT License – see the [LICENSE](LICENSE) file for details. 248 | 249 | ## 🌟 Acknowledgements 250 | Created and maintained by Alison Pezzott 251 | Feedback, issues and stars are welcome 🌟 252 | 253 | [![YouTube subscribers](https://img.shields.io/youtube/channel/subscribers/UCst_4Wi9DkGAc28uEPlHHHw?style=flat&logo=youtube&logoColor=ff0000&colorA=fff&colorB=000)](https://www.youtube.com/@alisonpezzott?sub_confirmation=1) 254 | [![GitHub followers](https://img.shields.io/github/followers/alisonpezzott?style=flat&logo=github&logoColor=000&colorA=fff&colorB=000)](https://github.com/alisonpezzott) 255 | [![LinkedIn](https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff)](https://linkedin.com/in/alisonpezzott) 256 | [![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?&logo=discord&logoColor=white)](https://discord.gg/sJTDvWz9sM) 257 | [![Telegram](https://img.shields.io/badge/Telegram-2CA5E0?logo=telegram&logoColor=white)](https://t.me/alisonpezzott) 258 | [![Instagram](https://img.shields.io/badge/Instagram-%23E4405F.svg?logo=Instagram&logoColor=white)](https://instagram.com/alisonpezzott) 259 | 260 | 261 | -------------------------------------------------------------------------------- /docs/assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alisonpezzott/pyfabricops/d20a5d57b4f84246973b4c0c6bfa35c66dd6a1e0/docs/assets/logo.png -------------------------------------------------------------------------------- /docs/functions/api/api.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.api.api -------------------------------------------------------------------------------- /docs/functions/api/auth.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.api.auth -------------------------------------------------------------------------------- /docs/functions/api/scopes.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.api.scopes -------------------------------------------------------------------------------- /docs/functions/cd/support_files.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.cd.support_files -------------------------------------------------------------------------------- /docs/functions/core/capacities.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.core.capacities -------------------------------------------------------------------------------- /docs/functions/core/connections.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.core.connections -------------------------------------------------------------------------------- /docs/functions/core/deployment_pipelines.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.core.deployment_pipelines -------------------------------------------------------------------------------- /docs/functions/core/folders.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.core.folders -------------------------------------------------------------------------------- /docs/functions/core/gateways.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.core.gateways -------------------------------------------------------------------------------- /docs/functions/core/git.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.core.git -------------------------------------------------------------------------------- /docs/functions/core/workspaces.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.core.workspaces -------------------------------------------------------------------------------- /docs/functions/dmv/dmv.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.dmv.dmv -------------------------------------------------------------------------------- /docs/functions/helpers/data_pipelines.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.data_pipelines -------------------------------------------------------------------------------- /docs/functions/helpers/dataflows_gen1.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.dataflows_gen1 -------------------------------------------------------------------------------- /docs/functions/helpers/dataflows_gen2.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.dataflows_gen2 -------------------------------------------------------------------------------- /docs/functions/helpers/folders.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.folders -------------------------------------------------------------------------------- /docs/functions/helpers/items.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.items -------------------------------------------------------------------------------- /docs/functions/helpers/lakehouses.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.lakehouses -------------------------------------------------------------------------------- /docs/functions/helpers/notebooks.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.notebooks -------------------------------------------------------------------------------- /docs/functions/helpers/reports.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.reports -------------------------------------------------------------------------------- /docs/functions/helpers/semantic_models.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.semantic_models -------------------------------------------------------------------------------- /docs/functions/helpers/warehouses.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.warehouses -------------------------------------------------------------------------------- /docs/functions/helpers/workspaces.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.helpers.workspaces -------------------------------------------------------------------------------- /docs/functions/items/data_pipelines.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.data_pipelines -------------------------------------------------------------------------------- /docs/functions/items/dataflows_gen1.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.dataflows_gen1 -------------------------------------------------------------------------------- /docs/functions/items/dataflows_gen2.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.dataflows_gen2 -------------------------------------------------------------------------------- /docs/functions/items/items.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.items -------------------------------------------------------------------------------- /docs/functions/items/lakehouses.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.lakehouses -------------------------------------------------------------------------------- /docs/functions/items/notebooks.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.notebooks -------------------------------------------------------------------------------- /docs/functions/items/reports.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.reports -------------------------------------------------------------------------------- /docs/functions/items/semantic_models.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.semantic_models -------------------------------------------------------------------------------- /docs/functions/items/warehouses.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.items.warehouses -------------------------------------------------------------------------------- /docs/functions/utils/decorators.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.utils.decorators -------------------------------------------------------------------------------- /docs/functions/utils/exceptions.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.utils.exceptions -------------------------------------------------------------------------------- /docs/functions/utils/logging.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.utils.logging -------------------------------------------------------------------------------- /docs/functions/utils/logging_system.md: -------------------------------------------------------------------------------- 1 | # Custom Logging System - pyfabricops 2 | 3 | ## 📋 Feature Overview 4 | 5 | The custom logging system implemented in `pyfabricops` provides a complete and flexible solution for monitoring and debugging the library. 6 | 7 | ## ✨ Main Features 8 | 9 | ### 🎨 **Custom Formatting** 10 | - **Automatic colors**: Different colors for each log level (DEBUG=Cyan, INFO=Green, WARNING=Yellow, ERROR=Red, CRITICAL=Magenta) 11 | - **Multiple styles**: 12 | - `minimal`: Only timestamp, level and message 13 | - `standard`: Includes module name in compact form 14 | - `detailed`: Complete format with all information 15 | 16 | ### 🎛️ **Easy Configuration** 17 | ```python 18 | import pyfabricops as pf 19 | 20 | # Basic configuration 21 | pf.setup_logging(level='INFO', format_style='standard') 22 | 23 | # Debug mode for development 24 | pf.enable_debug_mode(include_external=False) 25 | 26 | # Disable logging completely 27 | pf.disable_logging() 28 | 29 | # Reset to default configuration 30 | pf.reset_logging() 31 | ``` 32 | 33 | ### 📁 **File Logging** 34 | ```python 35 | # Save logs to file with automatic rotation 36 | pf.setup_logging( 37 | level='DEBUG', 38 | log_file='logs/pyfabricops.log', 39 | max_file_size=10*1024*1024, # 10MB 40 | backup_count=5 41 | ) 42 | ``` 43 | 44 | ### 🔍 **Smart Filtering** 45 | - **By default**: Shows only pyfabricops logs 46 | - **Optional**: Include logs from external libraries (requests, urllib3, etc.) 47 | - **Granular control**: Configuration by module 48 | 49 | ### 🖥️ **Terminal Detection** 50 | - **Automatic colors**: Detects if terminal supports colors 51 | - **CI/CD friendly**: Automatically disables colors in build environments 52 | - **Environment variables**: Respects `NO_COLOR` and `FORCE_COLOR` 53 | 54 | ## 🚀 **Usage Examples** 55 | 56 | ### Basic Usage 57 | ```python 58 | import pyfabricops as pf 59 | 60 | # Default setup 61 | pf.setup_logging(level='INFO') 62 | 63 | # Use library normally 64 | workspaces = pf.list_workspaces() 65 | ``` 66 | 67 | ### Development and Debug 68 | ```python 69 | # Complete debug mode 70 | pf.enable_debug_mode(include_external=True) 71 | 72 | # Execute functions - you'll see all API details 73 | definition = pf.get_semantic_model_definition('workspace', 'model') 74 | ``` 75 | 76 | ### Production 77 | ```python 78 | # Minimal logging in production 79 | pf.setup_logging( 80 | level='WARNING', 81 | format_style='minimal', 82 | include_colors=False, 83 | log_file='/var/log/app/pyfabricops.log' 84 | ) 85 | ``` 86 | 87 | ## 📊 **Benefits** 88 | 89 | 1. **🎯 Efficient Debugging**: See exactly which APIs are being called 90 | 2. **🎨 Visual Appeal**: Colors and clear formatting make reading easier 91 | 3. **⚡ Performance**: Optimized system with smart filters 92 | 4. **🔧 Flexibility**: Multiple configuration options 93 | 5. **📝 Audit**: File logs with automatic rotation 94 | 6. **🔒 Security**: Sensitive headers are automatically masked 95 | 96 | ## 🛠️ **Available Functions** 97 | 98 | | Function | Description | 99 | |----------|-------------| 100 | | `setup_logging()` | Complete logging system configuration | 101 | | `enable_debug_mode()` | Quick debug mode activation | 102 | | `disable_logging()` | Disables all logs | 103 | | `reset_logging()` | Returns to default configuration | 104 | | `get_logger()` | Gets a configured logger | 105 | 106 | ## 🎨 **Visual Example** 107 | 108 | ``` 109 | 13:00:25 | pyfabricops._core | INFO | Making GET request to https://api.fabric.microsoft.com/v1/workspaces 110 | 13:00:25 | pyfabricops._core | DEBUG | Headers: {'Content-Type': 'application/json', 'Authorization': 'Bearer ***'} 111 | 13:00:25 | pyfabricops._core | DEBUG | Response status: 200 112 | ``` 113 | 114 | ## 🎯 **Use Cases** 115 | 116 | 1. **Development**: Detailed debug with colors 117 | 2. **Testing**: Structured logs for analysis 118 | 3. **Production**: Controlled logging with files 119 | 4. **CI/CD**: Logs without colors, consistent format 120 | 5. **Troubleshooting**: Complete API call tracing 121 | 122 | --- 123 | 124 | *This system transforms the development and debugging experience with pyfabricops, offering complete visibility over library operations in an elegant and configurable way.* 125 | -------------------------------------------------------------------------------- /docs/functions/utils/schemas.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.utils.schemas -------------------------------------------------------------------------------- /docs/functions/utils/utils.md: -------------------------------------------------------------------------------- 1 | ::: pyfabricops.utils.utils -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | |![project_logo](assets/logo.png){width="96" .left} 2 | 3 | # Welcome to pyfabricops 4 | 5 | [![PyPI version](https://img.shields.io/pypi/v/pyfabricops.svg)](https://pypi.org/project/pyfabricops/) 6 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 7 | [![Python versions](https://img.shields.io/pypi/pyversions/pyfabricops.svg)](https://pypi.org/project/pyfabricops/) 8 | [![Typing status](https://img.shields.io/badge/typing-PEP%20561-blue)](https://peps.python.org/pep-0561/) 9 | [![Tests](https://github.com/alisonpezzott/pyfabricops/actions/workflows/test.yml/badge.svg)](https://github.com/alisonpezzott/pyfabricops/actions/workflows/test.yml) 10 | 11 | > A Python wrapper library for Microsoft Fabric (and Power BI) operations, providing a simple interface to the official Fabric REST APIs. Falls back to Power BI REST APIs where needed. Designed to run in Python notebooks, pure Python scripts or integrated into YAML-based workflows for CI/CD. 12 | Access to the repositoy on [GitHub](https://github.com/alisonpezzott/pyfabricops). 13 | 14 | ## 🚀 Features 15 | 16 | - Authenticate using environment variables (GitHub Secrets, ADO Secrets, AzKeyVault, .env ...) 17 | - Manage workspaces, capacities, semantic models, lakehouses, reports and connections 18 | - Execute Git operations and automate Fabric deployment flows (Power BI inclusive) 19 | - Capture and Manage Git branches automatically for CI/CD scenarios 20 | - Many use cases and scenarios including yaml for test and deploy using GitHub Actions 21 | 22 | ## 📃 Documentation 23 | Access: [https://pyfabricops.readthedocs.io/en/latest/](https://pyfabricops.readthedocs.io/en/latest/) 24 | 25 | ## ✅ Requirements 26 | 27 | - Requires Python >= 3.10 <=3.12.10 28 | 29 | ## ⚒️ Installation 30 | 31 | ```bash 32 | pip install -U pyfabricops 33 | ``` 34 | 35 | ## ⚙️ Usage 36 | 37 | > Create a repository and clone it locally. 38 | > Create a notebook or a script and import the library: 39 | 40 | ```python 41 | # Import the library 42 | import pyfabricops as pf 43 | ``` 44 | 45 | ### Set the authentication provider 46 | 47 | > Set auth environment variables acording to your authentication method 48 | #### Environment variables (.env, GitHub Secrets, Ado Secrets...) 49 | ```python 50 | pf.set_auth_provider("env") 51 | ``` 52 | 53 | This is the default behavior. 54 | You can set these in a .env file or directly in your environment (GitHub Secrets, ADO Secrets...). 55 | 56 | Example .env file: 57 | ``` 58 | FAB_CLIENT_ID=your_client_id_here 59 | FAB_CLIENT_SECRET=your_client_secret_here 60 | FAB_TENANT_ID=your_tenant_id_here 61 | FAB_USERNAME=your_username_here # Necessary for some functions with no SPN support 62 | FAB_PASSWORD=your_password_here # Necessary for some functions with no SPN support 63 | ``` 64 | 65 | #### Azure Key Vault 66 | 67 | ```python 68 | pf.set_auth_provider("vault") 69 | ``` 70 | Ensure you have the required Azure Key Vault secrets set: 71 | ``` 72 | AZURE_CLIENT_ID=your_azure_client_id_here 73 | AZURE_CLIENT_SECRET=your_azure_client_secret_here 74 | AZURE_TENANT_ID=your_azure_tenant_id_here 75 | AZURE_KEY_VAULT_NAME=your_key_vault_name_here 76 | ``` 77 | 78 | #### OAuth (Interactive) 79 | 80 | ```python 81 | pf.set_auth_provider("oauth") 82 | ``` 83 | This will open a browser window for user authentication. 84 | 85 | ``` 86 | 87 | > Create a repository and clone it locally. 88 | > Prepare your environment with the required variables according to your authentication method (GitHub Secrets, ADO Secrets, AzKeyVault, .env ...) 89 | 90 | 91 | ### Branches configuration 92 | 93 | Create a branches.json file in the root of your repository to define your branch mappings: 94 | 95 | ```json 96 | { 97 | "main": "-PRD", 98 | "master": "-PRD", 99 | "dev": "-DEV", 100 | "staging": "-STG" 101 | } 102 | ``` 103 | This file maps your local branches to Fabric branches, allowing the library to automatically manage branch names for CI/CD scenarios. 104 | 105 | 106 | ## 🪄 Examples 107 | 108 | Visit: [https://github.com/alisonpezzott/pyfabricops-examples](https://github.com/alisonpezzott/pyfabricops-examples) 109 | 110 | 111 | ## 🧬 Project Structure 112 | 113 | ```bash 114 | src/ 115 | └── pyfabricops/ 116 | ├── api/ 117 | │ ├── __init__.py 118 | │ ├── api.py 119 | │ ├── auth.py 120 | │ └── scopes.py 121 | ├── cd/ 122 | │ ├── __init__.py 123 | │ └── support_files.py 124 | ├── core/ 125 | │ ├── __init__.py 126 | │ ├── capacities.py 127 | │ ├── connections.py 128 | │ ├── deployment_pipelines.py 129 | │ ├── folders.py 130 | │ ├── gateways.py 131 | │ ├── gateways_encryp_creds.py 132 | │ ├── git.py 133 | │ └── workspaces.py 134 | ├── dmv/ 135 | │ ├── __init__.py 136 | │ ├── dmv.py 137 | ├── helpers/ 138 | │ ├── __init__.py 139 | │ ├── dataflows_gen1.py 140 | │ ├── dataflows_gen2.py 141 | │ ├── data_pipelines.py 142 | │ ├── folders.py 143 | │ ├── items.py 144 | │ ├── lakehouses.py 145 | │ ├── notebooks.py 146 | │ ├── reports.py 147 | │ ├── semantic_models.py 148 | │ ├── warehouses.py 149 | │ └── workspaces.py 150 | ├── items/ 151 | │ ├── __init__.py 152 | │ ├── dataflows_gen1.py 153 | │ ├── dataflows_gen2.py 154 | │ ├── data_pipelines.py 155 | │ ├── items.py 156 | │ ├── lakehouses.py 157 | │ ├── notebooks.py 158 | │ ├── reports.py 159 | │ ├── semantic_models.py 160 | │ ├── shortcuts.py 161 | │ └── warehouses.py 162 | ├── utils/ 163 | │ ├── __init__.py 164 | │ ├── decorators.py 165 | │ ├── exceptions.py 166 | │ ├── logging.py 167 | │ ├── schemas.py 168 | │ └── utils.py 169 | ├── __init__.py 170 | └── _version.py 171 | ``` 172 | 173 | ### Logging configuration 174 | 175 | The custom logging system implemented in `pyfabricops` provides a complete and flexible solution for monitoring and debugging the library. 176 | 177 | 178 | #### 🎨 **Custom Formatting** 179 | - **Automatic colors**: Different colors for each log level (DEBUG=Cyan, INFO=Green, WARNING=Yellow, ERROR=Red, CRITICAL=Magenta) 180 | - **Multiple styles**: 181 | - `minimal`: Only timestamp, level and message 182 | - `standard`: Includes module name in compact form 183 | - `detailed`: Complete format with all information 184 | 185 | #### 🎛️ **Easy Configuration** 186 | ```python 187 | import pyfabricops as pf 188 | 189 | # Basic configuration 190 | pf.setup_logging(level='INFO', format_style='standard') 191 | 192 | # Debug mode for development 193 | pf.enable_debug_mode(include_external=False) 194 | 195 | # Disable logging completely 196 | pf.disable_logging() 197 | 198 | # Reset to default configuration 199 | pf.reset_logging() 200 | ``` 201 | 202 | For complete logging configuration options, refer to the [logging_system.md](functions/utils/logging_system.md) 203 | 204 | 205 | ## ❤️Contributing 206 | 1. Fork this repository 207 | 2. Create a new branch (feat/my-feature) 208 | 3. Run `poetry install` to set up the development environment 209 | 4. Run `poetry run task test` to run tests 210 | 5. Submit a pull request 🚀 211 | 212 | ## 🚀 Publishing 213 | 214 | ### For Maintainers 215 | 216 | To publish a new version to PyPI: 217 | 218 | 1. Update the version in `pyproject.toml` and `src/pyfabricops/_version.py` 219 | 2. Commit and push changes 220 | 3. Create a new release on GitHub with a tag (e.g., `v0.1.0`) 221 | 4. The GitHub Action will automatically: 222 | - Run tests 223 | - Build the package 224 | - Publish to PyPI 225 | 226 | ### Testing with TestPyPI 227 | 228 | ```bash 229 | # Configure TestPyPI 230 | poetry config repositories.testpypi https://test.pypi.org/legacy/ 231 | poetry config pypi-token.testpypi 232 | 233 | # Build and publish to TestPyPI 234 | poetry build 235 | poetry publish -r testpypi 236 | 237 | # Install from TestPyPI 238 | pip install --index-url https://test.pypi.org/simple/ pyfabricops 239 | ``` 240 | 241 | ### Prerequisites for Publishing 242 | 243 | - Set up a PyPI account at https://pypi.org/ 244 | - Generate an API token at https://pypi.org/manage/account/token/ 245 | - Add the token as `PYPI_TOKEN` secret in GitHub repository settings 246 | 247 | ## 🐞 Issues 248 | If you encounter any issues, please report them at [https://github.com/alisonpezzott/pyfabricops/issues](https://github.com/alisonpezzott/pyfabricops/issues) 249 | 250 | ## ⚖️ License 251 | This project is licensed under the MIT License – see the [LICENSE](https://github.com/alisonpezzott/pyfabricops/blob/main/LICENSE) file for details. 252 | 253 | ## 🌟 Acknowledgements 254 | Created and maintained by Alison Pezzott 255 | Feedback, issues and stars are welcome 🌟 256 | 257 | [![YouTube subscribers](https://img.shields.io/youtube/channel/subscribers/UCst_4Wi9DkGAc28uEPlHHHw?style=flat&logo=youtube&logoColor=ff0000&colorA=fff&colorB=000)](https://www.youtube.com/@alisonpezzott?sub_confirmation=1) 258 | [![GitHub followers](https://img.shields.io/github/followers/alisonpezzott?style=flat&logo=github&logoColor=000&colorA=fff&colorB=000)](https://github.com/alisonpezzott) 259 | [![LinkedIn](https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff)](https://linkedin.com/in/alisonpezzott) 260 | [![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?&logo=discord&logoColor=white)](https://discord.gg/sJTDvWz9sM) 261 | [![Telegram](https://img.shields.io/badge/Telegram-2CA5E0?logo=telegram&logoColor=white)](https://t.me/alisonpezzott) 262 | [![Instagram](https://img.shields.io/badge/Instagram-%23E4405F.svg?logo=Instagram&logoColor=white)](https://instagram.com/alisonpezzott) 263 | 264 | 265 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | mkdocs-material==9.6.15 2 | mkdocstrings==0.29.1 3 | mkdocstrings-python==1.16.12 4 | griffe==1.7.3 5 | pymdown-extensions==10.16 6 | -------------------------------------------------------------------------------- /docs/stylesheets/extra.css: -------------------------------------------------------------------------------- 1 | .left { 2 | display: flexbox; 3 | position: relative; 4 | } -------------------------------------------------------------------------------- /logging_system.md: -------------------------------------------------------------------------------- 1 | # Custom Logging System - pyfabricops 2 | 3 | ## 📋 Feature Overview 4 | 5 | The custom logging system implemented in `pyfabricops` provides a complete and flexible solution for monitoring and debugging the library. 6 | 7 | ## ✨ Main Features 8 | 9 | ### 🎨 **Custom Formatting** 10 | - **Automatic colors**: Different colors for each log level (DEBUG=Cyan, INFO=Green, WARNING=Yellow, ERROR=Red, CRITICAL=Magenta) 11 | - **Multiple styles**: 12 | - `minimal`: Only timestamp, level and message 13 | - `standard`: Includes module name in compact form 14 | - `detailed`: Complete format with all information 15 | 16 | ### 🎛️ **Easy Configuration** 17 | ```python 18 | import pyfabricops as pf 19 | 20 | # Basic configuration 21 | pf.setup_logging(level='INFO', format_style='standard') 22 | 23 | # Debug mode for development 24 | pf.enable_debug_mode(include_external=False) 25 | 26 | # Disable logging completely 27 | pf.disable_logging() 28 | 29 | # Reset to default configuration 30 | pf.reset_logging() 31 | ``` 32 | 33 | ### 📁 **File Logging** 34 | ```python 35 | # Save logs to file with automatic rotation 36 | pf.setup_logging( 37 | level='DEBUG', 38 | log_file='logs/pyfabricops.log', 39 | max_file_size=10*1024*1024, # 10MB 40 | backup_count=5 41 | ) 42 | ``` 43 | 44 | ### 🔍 **Smart Filtering** 45 | - **By default**: Shows only pyfabricops logs 46 | - **Optional**: Include logs from external libraries (requests, urllib3, etc.) 47 | - **Granular control**: Configuration by module 48 | 49 | ### 🖥️ **Terminal Detection** 50 | - **Automatic colors**: Detects if terminal supports colors 51 | - **CI/CD friendly**: Automatically disables colors in build environments 52 | - **Environment variables**: Respects `NO_COLOR` and `FORCE_COLOR` 53 | 54 | ## 🚀 **Usage Examples** 55 | 56 | ### Basic Usage 57 | ```python 58 | import pyfabricops as pf 59 | 60 | # Default setup 61 | pf.setup_logging(level='INFO') 62 | 63 | # Use library normally 64 | workspaces = pf.list_workspaces() 65 | ``` 66 | 67 | ### Development and Debug 68 | ```python 69 | # Complete debug mode 70 | pf.enable_debug_mode(include_external=True) 71 | 72 | # Execute functions - you'll see all API details 73 | definition = pf.get_semantic_model_definition('workspace', 'model') 74 | ``` 75 | 76 | ### Production 77 | ```python 78 | # Minimal logging in production 79 | pf.setup_logging( 80 | level='WARNING', 81 | format_style='minimal', 82 | include_colors=False, 83 | log_file='/var/log/app/pyfabricops.log' 84 | ) 85 | ``` 86 | 87 | ## 📊 **Benefits** 88 | 89 | 1. **🎯 Efficient Debugging**: See exactly which APIs are being called 90 | 2. **🎨 Visual Appeal**: Colors and clear formatting make reading easier 91 | 3. **⚡ Performance**: Optimized system with smart filters 92 | 4. **🔧 Flexibility**: Multiple configuration options 93 | 5. **📝 Audit**: File logs with automatic rotation 94 | 6. **🔒 Security**: Sensitive headers are automatically masked 95 | 96 | ## 🛠️ **Available Functions** 97 | 98 | | Function | Description | 99 | |----------|-------------| 100 | | `setup_logging()` | Complete logging system configuration | 101 | | `enable_debug_mode()` | Quick debug mode activation | 102 | | `disable_logging()` | Disables all logs | 103 | | `reset_logging()` | Returns to default configuration | 104 | | `get_logger()` | Gets a configured logger | 105 | 106 | ## 🎨 **Visual Example** 107 | 108 | ``` 109 | 13:00:25 | pyfabricops._core | INFO | Making GET request to https://api.fabric.microsoft.com/v1/workspaces 110 | 13:00:25 | pyfabricops._core | DEBUG | Headers: {'Content-Type': 'application/json', 'Authorization': 'Bearer ***'} 111 | 13:00:25 | pyfabricops._core | DEBUG | Response status: 200 112 | ``` 113 | 114 | ## 🎯 **Use Cases** 115 | 116 | 1. **Development**: Detailed debug with colors 117 | 2. **Testing**: Structured logs for analysis 118 | 3. **Production**: Controlled logging with files 119 | 4. **CI/CD**: Logs without colors, consistent format 120 | 5. **Troubleshooting**: Complete API call tracing 121 | 122 | --- 123 | 124 | *This system transforms the development and debugging experience with pyfabricops, offering complete visibility over library operations in an elegant and configurable way.* 125 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: pyfabricops 2 | repo_url: https://github.com/alisonpezzott/pyfabricops 3 | repo_name: alisonpezzott/pyfabricops 4 | site_description: A modern Python wrapper for Microsoft Fabric and Power BI operations with official REST APIs. 5 | site_author: Alison Pezzott 6 | edit_uri: tree/main/docs 7 | 8 | nav: 9 | - Home: index.md 10 | - Functions: 11 | - API: 12 | - API: functions/api/api.md 13 | - Auth: functions/api/auth.md 14 | - Scopes: functions/api/scopes.md 15 | - CD (Continuous Deployment): 16 | - Support files: functions/cd/support_files.md 17 | - Core: 18 | - Capacities: functions/core/capacities.md 19 | - Connections: functions/core/connections.md 20 | - Folders: functions/core/folders.md 21 | - Gateways: functions/core/gateways.md 22 | - Git: functions/core/git.md 23 | - Workspaces: functions/core/workspaces.md 24 | - DMV: 25 | - DMV: functions/dmv/dmv.md 26 | - Helpers: 27 | - Data Pipelines: functions/helpers/data_pipelines.md 28 | - Dataflows Gen1: functions/helpers/dataflows_gen1.md 29 | - Dataflows Gen2: functions/helpers/dataflows_gen2.md 30 | - Folders: functions/helpers/folders.md 31 | - Items: functions/helpers/items.md 32 | - Lakehouses: functions/helpers/lakehouses.md 33 | - Notebooks: functions/helpers/notebooks.md 34 | - Reports: functions/helpers/reports.md 35 | - Semantic Models: functions/helpers/semantic_models.md 36 | - Warehouses: functions/helpers/warehouses.md 37 | - Workspaces: functions/helpers/workspaces.md 38 | - Items: 39 | - Data Pipelines: functions/items/data_pipelines.md 40 | - Dataflows Gen1: functions/items/dataflows_gen1.md 41 | - Dataflows Gen2: functions/items/dataflows_gen2.md 42 | - Items: functions/items/items.md 43 | - Lakehouses: functions/items/lakehouses.md 44 | - Notebooks: functions/items/notebooks.md 45 | - Reports: functions/items/reports.md 46 | - Semantic Models: functions/items/semantic_models.md 47 | - Warehouses: functions/items/warehouses.md 48 | - Utils: 49 | - Decorators: functions/utils/decorators.md 50 | - Exceptions: functions/utils/exceptions.md 51 | - Logging: functions/utils/logging.md 52 | - Logging System: functions/utils/logging_system.md 53 | - Schemas: functions/utils/schemas.md 54 | - Utils: functions/utils/utils.md 55 | 56 | theme: 57 | name: material 58 | palette: 59 | scheme: default 60 | primary: teal 61 | accent: teal 62 | logo: assets/logo.png 63 | favicon: assets/logo.png 64 | 65 | markdown_extensions: 66 | - attr_list 67 | - pymdownx.highlight 68 | - pymdownx.superfences 69 | 70 | extra_css: 71 | - stylesheets/extra.css 72 | 73 | plugins: 74 | - search 75 | - mkdocstrings: 76 | handlers: 77 | python: 78 | paths: 79 | - src 80 | options: 81 | show_source: true 82 | docstring_style: google -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "pyfabricops" 3 | version = "0.2.7" 4 | description = "A Python wrapper library for Microsoft Fabric (and Power BI) operations, providing a simple interface to the official Fabric REST APIs. Falls back to Power BI REST APIs where needed. Designed to run in Python notebooks, pure Python scripts or integrated into YAML-based workflows for CI/CD." 5 | authors = [ 6 | {name = "alisonpezzott",email = "alisonpezzott@gmail.com"} 7 | ] 8 | license = "MIT" 9 | license-files = ["LICENSE"] 10 | readme = "README.md" 11 | requires-python = ">=3.10, <= 3.12.10" 12 | classifiers = [ 13 | "Development Status :: 3 - Alpha", 14 | "Intended Audience :: Developers", 15 | "License :: OSI Approved :: MIT License", 16 | "Programming Language :: Python :: 3.10", 17 | "Programming Language :: Python :: 3.11", 18 | "Programming Language :: Python :: 3.12", 19 | "Operating System :: OS Independent", 20 | "Environment :: Console", 21 | "Framework :: Jupyter", 22 | "Framework :: IPython", 23 | "Intended Audience :: System Administrators", 24 | "Topic :: Software Development :: Libraries", 25 | "Topic :: Software Development :: Testing", 26 | "Topic :: Software Development :: Build Tools", 27 | "Topic :: Education", 28 | "Topic :: Scientific/Engineering :: Information Analysis", 29 | ] 30 | 31 | dependencies = [ 32 | "azure-storage-blob==12.25.1", 33 | "azure-identity==1.16.0", 34 | "azure-keyvault-secrets==4.9.0", 35 | "cryptography>=3.4.8,<47.0.0", 36 | "json5==0.9.28", 37 | "pandas==2.3.0", 38 | "pyadomd>=0.1.1", 39 | "python-dotenv==1.1.1", 40 | "pythonnet>=3.0.0", 41 | "requests==2.32.4" 42 | ] 43 | 44 | [project.urls] 45 | Repository = "https://github.com/alisonpezzott/pyfabricops.git" 46 | Docs = "https://pyfabricops.readthedocs.io/en/latest/" 47 | Issues = "https://github.com/alisonpezzott/pyfabricops/issues" 48 | Examples = "https://github.com/alisonpezzott/pyfabricops-examples.git" 49 | 50 | [tool.poetry] 51 | packages = [{include = "pyfabricops", from = "src"}] 52 | 53 | [tool.poetry.group.dev.dependencies] 54 | pytest = "^8.4.1" 55 | pytest-cov = "^6.2.1" 56 | blue = "^0.9.1" 57 | isort = "^6.0.1" 58 | taskipy = "^1.14.1" 59 | ipykernel = "^6.29.5" 60 | twine = "^6.1.0" 61 | 62 | [tool.poetry.group.doc.dependencies] 63 | mkdocs-material = "^9.6.15" 64 | mkdocstrings = "^0.29.1" 65 | mkdocstrings-python = "^1.16.12" 66 | 67 | [build-system] 68 | requires = ["poetry-core>=2.0.0,<3.0.0"] 69 | build-backend = "poetry.core.masonry.api" 70 | 71 | [tool.pytest.ini.options] 72 | python = "." 73 | addopts = "--doctest-modules" 74 | 75 | [tool.blue] 76 | line_length = 79 77 | quote_style = "single" 78 | 79 | [tool.isort] 80 | profile = "black" 81 | line_length = 79 82 | 83 | [tool.taskipy.tasks] 84 | lint = "blue --check --diff . && isort --check --diff ." 85 | docs = "poetry run mkdocs serve" 86 | pre_test = "task lint" 87 | test = "poetry run pytest -s -x --cov=pyfabricops -v" 88 | post_test = "poetry run coverage html && poetry run python -c \"import webbrowser, os; webbrowser.open('file://' + os.path.abspath('htmlcov/index.html'))\"" 89 | generate_init = "poetry run python scripts/generate_init.py" 90 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | azure-storage-blob==12.25.1 2 | azure-identity==1.16.0 3 | azure-keyvault-secrets==4.9.0 4 | cryptography>=3.4.8,<47.0.0 5 | json5==0.9.28 6 | pandas==2.3.0 7 | pyadomd>=0.1.1 8 | python-dotenv==1.1.1 9 | pythonnet>=3.0.0 10 | requests==2.32.4 -------------------------------------------------------------------------------- /scripts/generate_init.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import os 3 | import subprocess 4 | from pathlib import Path 5 | 6 | 7 | def get_public_functions_and_classes(file_path): 8 | """Extracts functions and publics class from a python file.""" 9 | public_items = [] 10 | 11 | try: 12 | with open(file_path, 'r', encoding='utf-8') as f: 13 | content = f.read() 14 | 15 | tree = ast.parse(content) 16 | 17 | # Check if __all__ is defined in the module 18 | has_all = False 19 | all_items_from_module = [] 20 | 21 | for node in ast.walk(tree): 22 | if isinstance(node, ast.Assign): 23 | for target in node.targets: 24 | if isinstance(target, ast.Name) and target.id == '__all__': 25 | has_all = True 26 | # Extract items from __all__ list 27 | if isinstance(node.value, ast.List): 28 | for elt in node.value.elts: 29 | if isinstance(elt, ast.Str): # Python < 3.8 30 | all_items_from_module.append(elt.s) 31 | elif isinstance( 32 | elt, ast.Constant 33 | ) and isinstance( 34 | elt.value, str 35 | ): # Python >= 3.8 36 | all_items_from_module.append(elt.value) 37 | 38 | # If __all__ is defined, use only those items 39 | if has_all: 40 | return all_items_from_module 41 | 42 | # Otherwise, use the original logic (all public functions/classes) 43 | for node in ast.walk(tree): 44 | if isinstance(node, ast.FunctionDef): 45 | if not node.name.startswith('_'): 46 | public_items.append(node.name) 47 | elif isinstance(node, ast.ClassDef): 48 | if not node.name.startswith('_'): 49 | public_items.append(node.name) 50 | 51 | except Exception as e: 52 | print(f'Error processing {file_path}: {e}') 53 | 54 | return public_items 55 | 56 | 57 | def generate_init_file(src_dir): 58 | """Generate the content __init__.py automatically.""" 59 | src_path = Path(src_dir) 60 | 61 | imports = [] 62 | all_items = [] 63 | 64 | # Iterate over .py files on root path 65 | for py_file in src_path.glob('*.py'): 66 | if py_file.name in ['__init__.py', '_version.py']: 67 | continue 68 | 69 | module_name = py_file.stem 70 | public_items = get_public_functions_and_classes(py_file) 71 | 72 | if public_items: 73 | import_module = module_name 74 | imports.append( 75 | f"from .{import_module} import {', '.join(public_items)}" 76 | ) 77 | all_items.extend(public_items) 78 | 79 | # Iterate subfolders 80 | for subdir in src_path.iterdir(): 81 | if ( 82 | subdir.is_dir() 83 | and not subdir.name.startswith('_') 84 | and subdir.name != '__pycache__' 85 | ): 86 | for py_file in subdir.glob('*.py'): 87 | if py_file.name in ['__init__.py']: 88 | continue 89 | 90 | module_name = py_file.stem 91 | public_items = get_public_functions_and_classes(py_file) 92 | 93 | if public_items: 94 | # For subfolders use full path 95 | import_module = f'{subdir.name}.{module_name}' 96 | imports.append( 97 | f"from .{import_module} import {', '.join(public_items)}" 98 | ) 99 | all_items.extend(public_items) 100 | 101 | # Special imports 102 | special_imports = [ 103 | 'from ._version import __version__', 104 | ] 105 | 106 | # Generate the __init__.py content 107 | content = [] 108 | 109 | # Add special imports 110 | content.extend(special_imports) 111 | content.append('') 112 | 113 | # Add the imports 114 | content.extend(sorted(imports)) 115 | content.append('') 116 | 117 | # Add __all__ 118 | all_items_sorted = sorted(set(all_items)) 119 | 120 | content.append('__all__ = [') 121 | for item in sorted(set(all_items_sorted)): 122 | content.append(f" '{item}',") 123 | content.append(']') 124 | 125 | return '\n'.join(content) 126 | 127 | 128 | def run_formatter(tool: str, file_path: str): 129 | """Run a code formatter on the specified file.""" 130 | print(f'🔄 Running {tool} on {file_path}...') 131 | try: 132 | result = subprocess.run( 133 | [tool, file_path], 134 | check=True, 135 | capture_output=True, 136 | text=True, 137 | encoding='utf-8', 138 | errors='ignore', 139 | ) # Ignora caracteres problemáticos 140 | print(f'✅ {tool} completed successfully!') 141 | if result.stdout: 142 | print(f'Output: {result.stdout}') 143 | return True 144 | except subprocess.CalledProcessError as e: 145 | print(f'❌ {tool} failed: {e}') 146 | if e.stderr: 147 | print(f'Error output: {e.stderr}') 148 | return False 149 | except FileNotFoundError: 150 | print( 151 | f'❌ {tool} not found. Make sure {tool} is installed and in PATH.' 152 | ) 153 | return False 154 | except UnicodeDecodeError as e: 155 | print(f'❌ {tool} encoding error: {e}') 156 | # Tenta executar sem capturar a saída 157 | try: 158 | subprocess.run([tool, file_path], check=True) 159 | print(f'✅ {tool} completed successfully (without output capture)!') 160 | return True 161 | except subprocess.CalledProcessError: 162 | return False 163 | 164 | 165 | if __name__ == '__main__': 166 | src_dir = 'src/pyfabricops' 167 | content = generate_init_file(src_dir) 168 | 169 | # write __init__.py 170 | init_file = os.path.join(src_dir, '__init__.py') 171 | with open(init_file, 'w', encoding='utf-8') as f: 172 | f.write(content) 173 | 174 | print(f'✅ {init_file} generated automatically!') 175 | total_items = content.split('__all__')[1].count(',') 176 | print(f'📦 {total_items} exported items.') 177 | 178 | # run blue and isort on __init__.py 179 | run_formatter('blue', init_file) 180 | run_formatter('isort', init_file) 181 | -------------------------------------------------------------------------------- /scripts/test_package.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Test script to verify the package installation and basic functionality. 4 | """ 5 | 6 | 7 | def test_import(): 8 | """Test if the package can be imported successfully.""" 9 | try: 10 | import pyfabricops 11 | 12 | print( 13 | f'✅ pyfabricops imported successfully, version: {pyfabricops.__version__}' 14 | ) 15 | return True 16 | except ImportError as e: 17 | print(f'❌ Failed to import pyfabricops: {e}') 18 | return False 19 | 20 | 21 | def test_basic_functions(): 22 | """Test if basic functions are available.""" 23 | try: 24 | import pyfabricops as pf 25 | 26 | # Test if key functions are available 27 | functions_to_test = [ 28 | 'set_auth_provider', 29 | 'list_workspaces', 30 | 'list_capacities', 31 | '_api_request', 32 | ] 33 | 34 | for func_name in functions_to_test: 35 | if hasattr(pf, func_name): 36 | print(f"✅ Function '{func_name}' is available") 37 | else: 38 | print(f"❌ Function '{func_name}' is missing") 39 | return False 40 | 41 | return True 42 | except Exception as e: 43 | print(f'❌ Error testing functions: {e}') 44 | return False 45 | 46 | 47 | def main(): 48 | """Main test function.""" 49 | print('🧪 Testing pyfabricops package...') 50 | print('=' * 50) 51 | 52 | success = True 53 | 54 | # Test import 55 | success &= test_import() 56 | print() 57 | 58 | # Test basic functions 59 | success &= test_basic_functions() 60 | print() 61 | 62 | if success: 63 | print('🎉 All tests passed! Package is ready for use.') 64 | else: 65 | print('❌ Some tests failed. Please check the package.') 66 | 67 | return 0 if success else 1 68 | 69 | 70 | if __name__ == '__main__': 71 | exit(main()) 72 | -------------------------------------------------------------------------------- /src/pyfabricops/_version.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.2.7' 2 | -------------------------------------------------------------------------------- /src/pyfabricops/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alisonpezzott/pyfabricops/d20a5d57b4f84246973b4c0c6bfa35c66dd6a1e0/src/pyfabricops/api/__init__.py -------------------------------------------------------------------------------- /src/pyfabricops/api/scopes.py: -------------------------------------------------------------------------------- 1 | TOKEN_TEMPLATE = ( 2 | f'https://login.microsoftonline.com/{{tenant_id}}/oauth2/v2.0/token' 3 | ) 4 | FABRIC_SCOPE = 'https://api.fabric.microsoft.com/.default' 5 | POWERBI_SCOPE = 'https://analysis.windows.net/powerbi/api/.default' 6 | POWERBI_API = 'https://api.powerbi.com/v1.0/myorg' 7 | FABRIC_API = 'https://api.fabric.microsoft.com/v1' 8 | PLATFORM_SCHEMA = 'https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json' 9 | PLATFORM_VERSION = '2.0' 10 | -------------------------------------------------------------------------------- /src/pyfabricops/cd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alisonpezzott/pyfabricops/d20a5d57b4f84246973b4c0c6bfa35c66dd6a1e0/src/pyfabricops/cd/__init__.py -------------------------------------------------------------------------------- /src/pyfabricops/cd/support_files.py: -------------------------------------------------------------------------------- 1 | from ..utils.utils import get_logger 2 | 3 | logger = get_logger(__name__) 4 | 5 | ENV = { 6 | 'path': '.env', 7 | 'content': """FAB_CLIENT_ID=your_client_id_here 8 | FAB_CLIENT_SECRET=your_client_secret_here 9 | FAB_TENANT_ID=your_tenant_id_here 10 | FAB_USERNAME=your_username_here 11 | FAB_PASSWORD=your_password_here 12 | AZURE_CLIENT_ID=your_azure_client_id_here 13 | AZURE_CLIENT_SECRET=your_azure_client_secret_here 14 | AZURE_TENANT_ID=your_azure_tenant_id_here 15 | AZURE_KEY_VAULT_NAME=your_key_vault_name_here 16 | DATABASE_USERNAME=your_database_username_here 17 | DATABASE_PASSWORD=your_database_password_here 18 | GH_TOKEN=your_github_token_here""", 19 | } 20 | 21 | 22 | BRANCHES = { 23 | 'path': 'branches.json', 24 | 'content': """{ 25 | "main": "-PRD", 26 | "master": "-PRD", 27 | "dev": "-DEV", 28 | "staging": "-STG" 29 | }""", 30 | } 31 | 32 | 33 | WORKSPACES_ROLES = { 34 | 'path': 'workspaces_roles.json', 35 | 'content': """[ 36 | { 37 | "user_uuid": "00000000-0000-0000-0000-0000000000000", 38 | "user_type": "User", 39 | "role": "Admin" 40 | }, 41 | { 42 | "user_uuid": "00000000-0000-0000-0000-0000000000000", 43 | "user_type": "Group", 44 | "role": "Member" 45 | }, 46 | { 47 | "user_uuid": "00000000-0000-0000-0000-0000000000000", 48 | "user_type": "ServicePrincipal", 49 | "role": "Contributor" 50 | }, 51 | { 52 | "user_uuid": "00000000-0000-0000-0000-0000000000000", 53 | "user_type": "ServicePrincipalProfile", 54 | "role": "Viewer" 55 | } 56 | ]""", 57 | } 58 | 59 | 60 | CONNECTIONS_ROLES = { 61 | 'path': 'connections_roles.json', 62 | 'content': """[ 63 | { 64 | "user_uuid": "00000000-0000-0000-0000-0000000000000", 65 | "user_type": "User", 66 | "role": "Owner" 67 | }, 68 | { 69 | "user_uuid": "00000000-0000-0000-0000-0000000000000", 70 | "user_type": "Group", 71 | "role": "User" 72 | }, 73 | { 74 | "user_uuid": "00000000-0000-0000-0000-0000000000000", 75 | "user_type": "ServicePrincipal", 76 | "role": "UserWithReshare" 77 | }, 78 | { 79 | "user_uuid": "00000000-0000-0000-0000-0000000000000", 80 | "user_type": "ServicePrincipalProfile", 81 | "role": "UserWithReshare" 82 | } 83 | ]""", 84 | } 85 | 86 | 87 | GITIGNORE = { 88 | 'path': '.gitignore', 89 | 'content': """**/.pbi/localSettings.json 90 | **/.pbi/cache.abf 91 | **/__pycache__/** 92 | **/_stg/** 93 | .vscode/ 94 | .venv 95 | .env 96 | **/py_fab.egg-info 97 | **/dist 98 | **/build 99 | metadata/""", 100 | } 101 | 102 | 103 | GITATTRIBUTES = { 104 | 'path': '.gitattributes', 105 | 'content': """src/**/config.json merge=union 106 | # This file is used to define attributes for paths in the repository. 107 | # The 'merge=union' attribute allows for union merging of JSON files in the 'src' directory. 108 | # This means that when merging changes, if there are conflicts, the resulting file will contain all unique elements from the conflicting files. 109 | """, 110 | } 111 | 112 | 113 | SRC = { 114 | 'path': 'src/README.md', 115 | 'content': """(# Source Directory 116 | This directory contains the source code for the project. 117 | It is structured to facilitate development and deployment of the application.""", 118 | } 119 | 120 | import os 121 | 122 | 123 | def create_support_files(): 124 | """ 125 | Create support files with predefined content for PyFabricOps CI/CD operations. 126 | """ 127 | files = [ 128 | ENV, 129 | BRANCHES, 130 | WORKSPACES_ROLES, 131 | CONNECTIONS_ROLES, 132 | GITIGNORE, 133 | GITATTRIBUTES, 134 | SRC, 135 | ] 136 | 137 | # Create directories and files 138 | for file_dict in files: 139 | path = file_dict.get('path') 140 | if path: 141 | os.makedirs(os.path.dirname(path) or '.', exist_ok=True) 142 | with open(path, 'w', encoding='utf-8') as f: 143 | f.write(file_dict['content']) 144 | logger.success(f'Created {path}') 145 | -------------------------------------------------------------------------------- /src/pyfabricops/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alisonpezzott/pyfabricops/d20a5d57b4f84246973b4c0c6bfa35c66dd6a1e0/src/pyfabricops/core/__init__.py -------------------------------------------------------------------------------- /src/pyfabricops/core/capacities.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Union 2 | 3 | from pandas import DataFrame 4 | 5 | from ..api.api import api_request 6 | from ..utils.decorators import df 7 | from ..utils.logging import get_logger 8 | from ..utils.utils import is_valid_uuid 9 | 10 | logger = get_logger(__name__) 11 | 12 | 13 | @df 14 | def list_capacities( 15 | df: Optional[bool] = True, 16 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 17 | """ 18 | Returns a list of capacities. 19 | 20 | Args: 21 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 22 | If False, returns a list of dictionaries. 23 | 24 | Returns: 25 | (Union[DataFrame, List[Dict[str, Any]], None]): A DataFrame with the list of capacities. 26 | If `df=False`, returns a list of dictionaries. If no capacities are found, returns None. 27 | """ 28 | return api_request('/capacities', support_pagination=True) 29 | 30 | 31 | def get_capacity_id(capacity_name: str) -> str | None: 32 | """ 33 | Retrieves the ID of a capacity by its name. 34 | 35 | Args: 36 | capacity_name (str): The name of the capacity. 37 | 38 | Returns: 39 | str | None: The ID of the capacity if found, otherwise None. 40 | """ 41 | capacities = list_capacities(df=False) 42 | 43 | for _capacity in capacities: 44 | if _capacity['displayName'] == capacity_name: 45 | return _capacity['id'] 46 | 47 | logger.warning(f"Capacity '{capacity_name}' not found.") 48 | return None 49 | 50 | 51 | def resolve_capacity(capacity: str) -> str | None: 52 | """ 53 | Resolves a capacity name to its ID. 54 | 55 | Args: 56 | capacity (str): The name of the capacity. 57 | 58 | Returns: 59 | str | None: The ID of the capacity if found, otherwise None. 60 | """ 61 | if is_valid_uuid(capacity): 62 | return capacity 63 | else: 64 | return get_capacity_id(capacity) 65 | -------------------------------------------------------------------------------- /src/pyfabricops/core/folders.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Union 2 | 3 | from pandas import DataFrame 4 | 5 | from ..api.api import api_request 6 | from ..core.workspaces import resolve_workspace 7 | from ..utils.decorators import df 8 | from ..utils.logging import get_logger 9 | from ..utils.utils import is_valid_uuid 10 | 11 | logger = get_logger(__name__) 12 | 13 | 14 | @df 15 | def list_folders( 16 | workspace: str, 17 | *, 18 | df: Optional[bool] = True, 19 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 20 | """ 21 | List folders in a workspace 22 | 23 | Args: 24 | workspace (str): The workspace to list folders from. 25 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 26 | If False, returns a list of dictionaries. 27 | 28 | Returns: 29 | (Union[DataFrame, List[Dict[str, Any]], None]): A list of folders in the workspace. 30 | 31 | Examples: 32 | ```python 33 | list_folders('123e4567-e89b-12d3-a456-426614174000') 34 | ``` 35 | """ 36 | return api_request( 37 | '/workspaces/' + resolve_workspace(workspace) + '/folders', 38 | support_pagination=True, 39 | ) 40 | 41 | 42 | def get_folder_id(workspace: str, folder_name: str) -> Union[str, None]: 43 | """ 44 | Retrieves the ID of a folder by its name. 45 | 46 | Args: 47 | folder_name (str): The name of the folder. 48 | 49 | Returns: 50 | str | None: The ID of the folder if found, otherwise None. 51 | """ 52 | folders = list_folders( 53 | workspace=resolve_workspace(workspace), 54 | df=False, 55 | ) 56 | for _folder in folders: 57 | if _folder['displayName'] == folder_name: 58 | return _folder['id'] 59 | logger.warning( 60 | f"Folder '{folder_name}' not found in workspace '{workspace}'." 61 | ) 62 | return None 63 | 64 | 65 | def resolve_folder(workspace: str, folder: str) -> Union[str, None]: 66 | """ 67 | Resolves a folder name to its ID. 68 | 69 | Args: 70 | workspace (str): The name or ID of the workspace. 71 | folder (str): The name or ID of the folder. 72 | 73 | Returns: 74 | str | None: The ID of the folder if found, otherwise None. 75 | """ 76 | if is_valid_uuid(folder): 77 | return folder 78 | else: 79 | return get_folder_id(workspace, folder) 80 | 81 | 82 | @df 83 | def get_folder( 84 | workspace: str, folder: str, *, df: Optional[bool] = True 85 | ) -> Union[DataFrame, Dict[str, Any], None]: 86 | """ 87 | Get a folder in a workspace. 88 | 89 | Args: 90 | workspace (str): The name or id of the workspace to get the folder from. 91 | folder (str): The name or id of the folder to get. 92 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 93 | If False, returns a list of dictionaries. 94 | 95 | Returns: 96 | (Union[DataFrame, Dict[str, Any], None]): The folder details if found, otherwise None. 97 | 98 | Examples: 99 | ```python 100 | get_folder( 101 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 102 | folder_id='98f6b7c8-1234-5678-90ab-cdef12345678' 103 | ) 104 | ``` 105 | """ 106 | workspace_id = resolve_workspace(workspace) 107 | return api_request( 108 | '/workspaces/' 109 | + workspace_id 110 | + '/folders/' 111 | + resolve_folder(workspace_id, folder), 112 | ) 113 | 114 | 115 | @df 116 | def create_folder( 117 | workspace: str, 118 | display_name: str, 119 | *, 120 | parent_folder: str = None, 121 | df: Optional[bool] = True, 122 | ) -> Union[DataFrame, Dict[str, Any], None]: 123 | """ 124 | Create a new folder in the specified workspace. 125 | 126 | Args: 127 | workspace (str): The name or ID of the workspace where the folder will be created. 128 | display_name (str): The name of the folder to create. 129 | parent_folder (str): The name or ID of the parent folder. 130 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 131 | If False, returns a list of dictionaries. 132 | 133 | Returns: 134 | (Union[DataFrame, Dict[str, Any], None]): The created folder details if successful, otherwise None. 135 | 136 | Examples: 137 | ```python 138 | create_folder( 139 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 140 | display_name='NewFolder', 141 | parent_folder_id='456e7890-e12b-34d5-a678-90abcdef1234' 142 | ) 143 | ``` 144 | """ 145 | workspace_id = resolve_workspace(workspace) 146 | 147 | payload = {'displayName': display_name} 148 | 149 | if parent_folder: 150 | payload['parentFolderId'] = resolve_folder(workspace_id, parent_folder) 151 | 152 | return api_request( 153 | '/workspaces/' + workspace_id + '/folders', 154 | payload=payload, 155 | method='post', 156 | ) 157 | 158 | 159 | def delete_folder(workspace: str, folder: str) -> None: 160 | """ 161 | Delete a folder in a workspace 162 | 163 | Args: 164 | workspace (str): The name or ID of the workspace to delete the folder from. 165 | folder (str): The name or ID of the folder to delete. 166 | 167 | Returns: 168 | None. 169 | 170 | Examples: 171 | ```python 172 | delete_folder( 173 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 174 | folder_id='98f6b7c8-1234-5678-90ab-cdef12345678' 175 | ) 176 | ``` 177 | """ 178 | workspace_id = resolve_workspace(workspace) 179 | 180 | return api_request( 181 | '/workspaces/' 182 | + workspace_id 183 | + '/folders/' 184 | + resolve_folder(workspace_id, folder), 185 | method='delete', 186 | ) 187 | 188 | 189 | @df 190 | def update_folder( 191 | workspace: str, 192 | folder: str, 193 | display_name: str, 194 | *, 195 | df: Optional[bool] = True, 196 | ) -> Union[DataFrame, Dict[str, Any], None]: 197 | """ 198 | Update a existing folder in the specified workspace. 199 | 200 | Args: 201 | workspace (str): The name or id of the workspace where the folder will be updated. 202 | folder (str): The name or id of the folder to update. 203 | display_name (str): The name of the folder to update. 204 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 205 | If False, returns a list of dictionaries. 206 | 207 | Returns: 208 | (Union[DataFrame, Dict[str, Any], None]): The updated folder details if successful, otherwise None. 209 | 210 | Examples: 211 | ```python 212 | update_folder( 213 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 214 | folder_id='98f6b7c8-1234-5678-90ab-cdef12345678', 215 | display_name='NewFolderName', 216 | ) 217 | ``` 218 | """ 219 | workspace_id = resolve_workspace(workspace) 220 | 221 | payload = {'displayName': display_name} 222 | 223 | return api_request( 224 | '/workspaces/' 225 | + workspace_id 226 | + '/folders/' 227 | + resolve_folder(workspace_id, folder), 228 | payload=payload, 229 | method='patch', 230 | ) 231 | 232 | 233 | @df 234 | def move_folder( 235 | workspace: str, 236 | folder: str, 237 | *, 238 | target_folder: Optional[str] = None, 239 | df: Optional[bool] = True, 240 | ) -> Union[DataFrame, Dict[str, Any], None]: 241 | """ 242 | Move a existing folder into other or root folder. 243 | 244 | Args: 245 | workspace (str): The workspace where the folder will be updated. 246 | folder (str): The folder to be moved. 247 | target_folder (str): The name of the parent folder will receive the moved folder. 248 | df (bool, optional): Keyword-only. 249 | If True or not provided, returns a DataFrame with flattened keys. 250 | If False, returns a list of dictionaries. 251 | 252 | Returns: 253 | (Union[DataFrame, Dict[str, Any], None]): The moved folder details if successful, otherwise None. 254 | 255 | Examples: 256 | ```python 257 | move_folder( 258 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 259 | folder_id='414e7890-e12b-34d5-a678-90abcdef1234', 260 | target_folder_id='9859b7c8-1234-5678-90ab-cdef12345678', 261 | ) 262 | ``` 263 | """ 264 | workspace_id = resolve_workspace(workspace) 265 | 266 | payload = {} 267 | 268 | if target_folder: 269 | payload = { 270 | 'targetFolderId': resolve_folder(workspace_id, target_folder) 271 | } 272 | 273 | return api_request( 274 | '/workspaces/' 275 | + workspace_id 276 | + '/folders/' 277 | + resolve_folder(workspace_id, folder) 278 | + '/move', 279 | payload=payload, 280 | method='post', 281 | ) 282 | -------------------------------------------------------------------------------- /src/pyfabricops/core/gateways.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Union 2 | 3 | from pandas import DataFrame 4 | 5 | from ..api.api import api_request 6 | from ..utils.decorators import df 7 | from ..utils.logging import get_logger 8 | from ..utils.utils import is_valid_uuid 9 | 10 | logger = get_logger(__name__) 11 | 12 | 13 | @df 14 | def list_gateways( 15 | df: Optional[bool] = True, 16 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 17 | """ 18 | Lists all available gateways. 19 | 20 | Args: 21 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 22 | If False, returns a list of dictionaries. 23 | 24 | Returns: 25 | (Union[DataFrame, List[Dict[str, Any]], None]): The list of gateways. 26 | 27 | Examples: 28 | ```python 29 | list_gateways() 30 | ``` 31 | """ 32 | return api_request('/gateways', support_pagination=True) 33 | 34 | 35 | def get_gateway_id(gateway_name: str) -> Union[str, None]: 36 | """ 37 | Retrieves the ID of a gateway by its name. 38 | 39 | Args: 40 | gateway_name (str): The name of the gateway. 41 | 42 | Returns: 43 | str | None: The ID of the gateway if found, otherwise None. 44 | """ 45 | gateways = list_gateways(df=False) 46 | for _gateway in gateways: 47 | if _gateway['displayName'] == gateway_name: 48 | return _gateway['id'] 49 | logger.warning(f"Gateway '{gateway_name}' not found.") 50 | return None 51 | 52 | 53 | def resolve_gateway(gateway: str) -> Union[str, None]: 54 | """ 55 | Resolves a gateway name to its ID. 56 | 57 | Args: 58 | gateway (str): The name of the gateway. 59 | 60 | Returns: 61 | str | None: The ID of the gateway if found, otherwise None. 62 | """ 63 | if is_valid_uuid(gateway): 64 | return gateway 65 | else: 66 | return get_gateway_id(gateway) 67 | 68 | 69 | @df 70 | def get_gateway( 71 | gateway: str, 72 | *, 73 | df: Optional[bool] = True, 74 | ) -> Union[DataFrame, Dict[str, Any], None]: 75 | """ 76 | Retrieves the details of a gateway by its ID. 77 | 78 | Args: 79 | gateway (str): The name or ID of the gateway to retrieve. 80 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 81 | If False, returns a list of dictionaries. 82 | 83 | Returns: 84 | (Union[DataFrame, Dict[str, Any], None]): The gateway details if found, otherwise None. 85 | 86 | Examples: 87 | ```python 88 | get_gateway('123e4567-e89b-12d3-a456-426614174000') 89 | get_gateway('my-gateway') 90 | ``` 91 | """ 92 | gateway_id = resolve_gateway(gateway) 93 | if not gateway_id: 94 | return None 95 | return api_request( 96 | '/gateways/' + gateway_id, 97 | ) 98 | 99 | 100 | def get_gateway_public_key(gateway: str) -> dict | None: 101 | """ 102 | Extracts the public key of a gateway by its ID. 103 | 104 | Args: 105 | gateway (str): The ID of the gateway to retrieve the public key from. 106 | 107 | Returns: 108 | dict: The public key details if found, otherwise None. 109 | 110 | Examples: 111 | ```python 112 | get_gateway_public_key('123e4567-e89b-12d3-a456-426614174000') 113 | ``` 114 | """ 115 | response = get_gateway(gateway, df=False) 116 | if not response: 117 | return None 118 | 119 | return response.get('publicKey') 120 | -------------------------------------------------------------------------------- /src/pyfabricops/core/gateways_encryp_creds.py: -------------------------------------------------------------------------------- 1 | # Adapted from https://github.com/microsoft/PowerBI-Developer-Samples/tree/master/Python/Encrypt%20credentials 2 | # Available on Fabric API Rest Connections method: https://learn.microsoft.com/pt-br/rest/api/fabric/core/connections/create-connection?tabs=HTTP 3 | # Copyright (c) Microsoft Corporation. All rights reserved. 4 | # Licensed under the MIT License. 5 | 6 | import base64 7 | import json 8 | import logging 9 | import os 10 | 11 | from cryptography.hazmat.backends import default_backend 12 | from cryptography.hazmat.primitives import hashes, hmac, padding 13 | from cryptography.hazmat.primitives.asymmetric import rsa 14 | from cryptography.hazmat.primitives.asymmetric.padding import MGF1, OAEP 15 | from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes 16 | 17 | from ..utils.logging import get_logger 18 | from .gateways import get_gateway_public_key 19 | 20 | logger = get_logger(__name__) 21 | 22 | 23 | class _AuthenticatedEncryption: 24 | 25 | Aes256CbcPkcs7 = 0 26 | HMACSHA256 = 0 27 | 28 | algorithm_choices = [Aes256CbcPkcs7, HMACSHA256] 29 | 30 | def _encrypt(self, key_enc, key_mac, message): 31 | """Encrypts the message with AES, CBC padding and PKCS7 32 | 33 | Args: 34 | key_enc (bytes): Encryption Key 35 | key_mac (bytes): MAC Key 36 | message (bytes): message to get encrypted 37 | 38 | Returns: 39 | String: Encrypted credentials 40 | """ 41 | 42 | if len(key_enc) < 32: 43 | raise ValueError( 44 | 'Encryption Key must be at least 256 bits (32 bytes)' 45 | ) 46 | 47 | if len(key_mac) < 32: 48 | raise ValueError('Mac Key must be at least 256 bits (32 bytes)') 49 | 50 | if not message: 51 | raise TypeError('Credentials cannot be null') 52 | 53 | # Initialization vector 54 | iv = os.urandom(16) 55 | 56 | # PKC7 Padding 57 | padder = padding.PKCS7(128).padder() 58 | 59 | # Apply padding to the test data 60 | padded_data = padder.update(message) + padder.finalize() 61 | 62 | # Cipher object with CBC mode 63 | cipher = Cipher( 64 | algorithms.AES(key_enc), modes.CBC(iv), backend=default_backend() 65 | ) 66 | encryptor = cipher.encryptor() 67 | 68 | # Cipher text 69 | cipher_text = encryptor.update(padded_data) + encryptor.finalize() 70 | 71 | # Prepare the data on which MAC will be executed 72 | tag_data = bytearray( 73 | [0] * (len(self.algorithm_choices) + len(iv) + len(cipher_text)) 74 | ) 75 | tag_data_offset = 0 76 | 77 | # Copy algorithm choices array in tag_data 78 | tag_data[0 : len(self.algorithm_choices)] = self.algorithm_choices[ 79 | 0 : len(self.algorithm_choices) 80 | ] 81 | tag_data_offset = len(self.algorithm_choices) + tag_data_offset 82 | 83 | # Copy initialization vector in tag_data 84 | tag_data[tag_data_offset : len(iv) + tag_data_offset] = iv[0 : len(iv)] 85 | tag_data_offset = len(iv) + tag_data_offset 86 | 87 | # Copy cipher text vector in tag_data 88 | tag_data[ 89 | tag_data_offset : len(cipher_text) + tag_data_offset 90 | ] = cipher_text[0 : len(cipher_text)] 91 | tag_data_offset = len(cipher_text) + tag_data_offset 92 | 93 | # Pass random generated key and hash algorithm to calculate authentication code 94 | hmac_instance = hmac.HMAC( 95 | key_mac, hashes.SHA256(), backend=default_backend() 96 | ) 97 | 98 | # Pass the bytes to hash and authenticate 99 | hmac_instance.update(tag_data) 100 | 101 | # Finalize the current context and return the message digest as bytes 102 | mac = hmac_instance.finalize() 103 | 104 | # Build the final result as the concatenation of everything except the keys 105 | output = bytearray( 106 | [0] 107 | * ( 108 | len(self.algorithm_choices) 109 | + len(mac) 110 | + len(iv) 111 | + len(cipher_text) 112 | ) 113 | ) 114 | output_offset = 0 115 | 116 | output[0 : len(self.algorithm_choices)] = self.algorithm_choices[ 117 | 0 : len(self.algorithm_choices) 118 | ] 119 | output_offset = len(self.algorithm_choices) + output_offset 120 | 121 | output[output_offset : len(mac) + output_offset] = mac[0 : len(mac)] 122 | output_offset = len(mac) + output_offset 123 | 124 | output[output_offset : len(iv) + output_offset] = iv[0 : len(iv)] 125 | output_offset = len(iv) + output_offset 126 | 127 | output[output_offset : len(cipher_text) + output_offset] = cipher_text[ 128 | 0 : len(cipher_text) 129 | ] 130 | output_offset = len(cipher_text) + output_offset 131 | 132 | return output 133 | 134 | 135 | class _AsymmetricHigherKeyEncryptionHelper: 136 | 137 | KEY_LENGTHS_PREFIX = 2 138 | HMAC_KEY_SIZE_BYTES = 64 139 | AES_KEY_SIZE_BYTES = 32 140 | 141 | KEY_LENGTH_32 = 0 142 | KEY_LENGTH_64 = 1 143 | 144 | def _encrypt(self, plain_text_bytes, modulus_bytes, exponent_bytes): 145 | """Encrypts the message with RSA, MGF and SHA hashes 146 | 147 | Args: 148 | plain_text_bytes (bytes): Message to be encrypted 149 | modulus_bytes (bytes): Modulus bytes returned from GET gateway API 150 | exponent_bytes (bytes): Exponent bytes returned from GET gateway API 151 | 152 | Returns: 153 | String: Encrypted credentials 154 | """ 155 | 156 | # Generate ephemeral random keys for encryption (32 bytes), hmac (64 bytes) 157 | key_enc = os.urandom(self.AES_KEY_SIZE_BYTES) 158 | key_mac = os.urandom(self.HMAC_KEY_SIZE_BYTES) 159 | 160 | authenticated_encryption = _AuthenticatedEncryption() 161 | 162 | # Encrypt message using ephemeral keys and Authenticated Encryption 163 | # Symmetric algorithm and encryptor 164 | cipher_text = authenticated_encryption._encrypt( 165 | key_enc, key_mac, plain_text_bytes 166 | ) 167 | 168 | # Encrypt ephemeral keys using RSA 169 | keys = bytearray( 170 | [0] * (len(key_enc) + len(key_mac) + self.KEY_LENGTHS_PREFIX) 171 | ) 172 | 173 | # Prefixing length of Keys. Symmetric Key length followed by HMAC key length 174 | keys[0] = self.KEY_LENGTH_32 175 | keys[1] = self.KEY_LENGTH_64 176 | 177 | # Copy key enc and key mac into keys array 178 | keys[2 : len(key_enc) + 2] = key_enc[0 : len(key_enc)] 179 | keys[len(key_enc) + 2 : len(key_enc) + len(key_mac) + 2] = key_mac[ 180 | 0 : len(key_mac) 181 | ] 182 | 183 | # Convert exponent and modulus byte arrays to integers 184 | exponent = int.from_bytes(exponent_bytes, 'big') 185 | modulus = int.from_bytes(modulus_bytes, 'big') 186 | 187 | # Generate public key based on modulus and exponent returned by the API 188 | public_key = rsa.RSAPublicNumbers(exponent, modulus).public_key( 189 | default_backend() 190 | ) 191 | 192 | # Encrypt the data 193 | # Pass padding algorithm, mask generation function and hashing algorithm 194 | encrypted_bytes = public_key.encrypt( 195 | bytes(keys), 196 | OAEP( 197 | mgf=MGF1(algorithm=hashes.SHA256()), 198 | algorithm=hashes.SHA256(), 199 | label=None, 200 | ), 201 | ) 202 | 203 | # Return final output 204 | return ( 205 | base64.b64encode(encrypted_bytes).decode() 206 | + base64.b64encode(cipher_text).decode() 207 | ) 208 | 209 | 210 | def _get_encrypt_gateway_credentials( 211 | gateway_id: str, username: str, password: str 212 | ) -> str: 213 | """ 214 | Encrypts the JSON-serialized credentials list using RSA-OAEP and returns 215 | a base64 string suitable for Power BI on-prem gateway. 216 | Args: 217 | gateway_id (str): The ID or name of the gateway. 218 | username (str): The username for the credentials. 219 | password (str): The password for the credentials. 220 | Returns: 221 | str: Base64-encoded encrypted credentials. 222 | Raises: 223 | ValueError: If the gateway is not found or credentials are invalid. 224 | """ 225 | # Load database credentials from environment 226 | gateway_resp = get_gateway_public_key(gateway_id) 227 | 228 | if not gateway_resp: 229 | raise ValueError( 230 | 'Gateway not found. Please check the gateway ID or name.' 231 | ) 232 | 233 | # Decode exponent and modulus from base64 to integers 234 | e = base64.b64decode(gateway_resp['exponent']) 235 | n = base64.b64decode(gateway_resp['modulus']) 236 | 237 | # Serialize credentials to the compact JSON form 238 | credentials = { 239 | 'credentialData': [ 240 | {'name': 'username', 'value': username}, 241 | {'name': 'password', 'value': password}, 242 | ] 243 | } 244 | plaintext = json.dumps(credentials, separators=(',', ':')).encode('utf-8') 245 | 246 | # Encrypt the plaintext using RSA-OAEP 247 | # Create an instance of the _AsymmetricHigherKeyEncryptionHelper 248 | helper = _AsymmetricHigherKeyEncryptionHelper() 249 | encrypted_credentials = helper._encrypt(plaintext, n, e) 250 | 251 | return encrypted_credentials 252 | -------------------------------------------------------------------------------- /src/pyfabricops/dmv/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /src/pyfabricops/dmv/dmv.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | from pathlib import Path 4 | 5 | import pandas as pd 6 | 7 | from ..utils.logging import get_logger 8 | 9 | logger = get_logger(__name__) 10 | 11 | # Global variable to hold Pyadomd class after import 12 | Pyadomd = None 13 | 14 | 15 | def _set_adomd_client_dll_path(path: Path) -> bool: 16 | """Configure the path for AdomdClient.dll""" 17 | if not path.exists(): 18 | logger.error(f'Path does not exist: {path}') 19 | return False 20 | 21 | # Add to system PATH 22 | if str(path) not in os.environ.get('PATH', ''): 23 | os.environ['PATH'] = str(path) + ';' + os.environ.get('PATH', '') 24 | 25 | # Add to Python sys.path 26 | if str(path) not in sys.path: 27 | sys.path.insert(0, str(path)) 28 | 29 | logger.info(f'Configured path of AdomdClient.dll: {path}') 30 | return True 31 | 32 | 33 | def import_pyadomd( 34 | path: Path = Path(r'C:\Program Files\DAX Studio\bin'), 35 | ) -> bool: 36 | """Import pyadomd with proper error handling and fallback""" 37 | global Pyadomd # Make Pyadomd available globally 38 | 39 | # First configure the AdomdClient.dll path 40 | if not _set_adomd_client_dll_path(path): 41 | logger.error('Failed to configure AdomdClient.dll path') 42 | return False 43 | 44 | try: 45 | from pyadomd import Pyadomd 46 | 47 | logger.info('pyadomd successfully imported') 48 | return True 49 | except Exception as e: 50 | logger.error(f'Error importing pyadomd: {e}') 51 | logger.info(' Trying alternative solution...') 52 | 53 | # Try to load the DLL manually via clr 54 | try: 55 | import clr 56 | 57 | dll_path = os.path.join( 58 | str(path), 'Microsoft.AnalysisServices.AdomdClient.dll' 59 | ) 60 | clr.AddReference(dll_path) 61 | from pyadomd import Pyadomd 62 | 63 | logger.info('pyadomd successfully imported using manual CLR!') 64 | return True 65 | except Exception as e2: 66 | logger.error(f'Alternative solution also failed: {e2}') 67 | return False 68 | 69 | 70 | def set_dmv_connection_string_spn( 71 | client_id: str, 72 | client_secret: str, 73 | tenant_id: str, 74 | workspace_name: str, 75 | semantic_model_name: str, 76 | ) -> str: 77 | conn_str = ( 78 | f'Data Source=powerbi://api.powerbi.com/v1.0/myorg/{workspace_name};' 79 | f'Initial Catalog={semantic_model_name};' 80 | f'User ID=app:{client_id}@{tenant_id};' 81 | f'Password={client_secret};' 82 | ) 83 | return conn_str 84 | 85 | 86 | def set_dmv_connection_string_user( 87 | user_email: str, 88 | password: str, 89 | workspace_name: str, 90 | semantic_model_name: str, 91 | ) -> str: 92 | conn_str = ( 93 | f'Data Source=powerbi://api.powerbi.com/v1.0/myorg/{workspace_name};' 94 | f'Initial Catalog={semantic_model_name};' 95 | f'User ID={user_email};' 96 | f'Password={password};' 97 | ) 98 | return conn_str 99 | 100 | 101 | def evaluate_dmv_queries( 102 | conn_str: str, 103 | query: str, 104 | ) -> pd.DataFrame: 105 | """Execute DMV query against Power BI XMLA endpoint""" 106 | if Pyadomd is None: 107 | raise RuntimeError( 108 | 'Pyadomd is not available. Call import_pyadomd() first.' 109 | ) 110 | 111 | try: 112 | with Pyadomd(conn_str) as conn: 113 | with conn.cursor().execute(query) as cur: 114 | cols = [c[0] for c in cur.description] 115 | rows = cur.fetchall() 116 | df = pd.DataFrame(rows, columns=cols) 117 | return df 118 | except Exception as e: 119 | logger.error(f'Error executing DMV query: {e}') 120 | raise 121 | 122 | 123 | def dmv_fetch_tables_raw( 124 | conn_str: str, 125 | ) -> pd.DataFrame: 126 | """ 127 | Build a lookup to map TableID -> Table Name (from TMSCHEMA_TABLES) 128 | """ 129 | query = """ 130 | SELECT * FROM $SYSTEM.TMSCHEMA_TABLES 131 | """ 132 | return evaluate_dmv_queries(conn_str, query) 133 | 134 | 135 | def dmv_fetch_partitions_raw( 136 | conn_str: str, 137 | ) -> pd.DataFrame: 138 | """ 139 | Build a lookup to map TableID -> Table Name (from TMSCHEMA_PARTITIONS) 140 | """ 141 | query = """ 142 | SELECT * FROM $SYSTEM.TMSCHEMA_PARTITIONS 143 | """ 144 | return evaluate_dmv_queries(conn_str, query) 145 | 146 | 147 | def dmv_fetch_partitions_enriched( 148 | conn_str: str, 149 | ) -> pd.DataFrame: 150 | """ 151 | Enrich partition information with additional metadata. 152 | """ 153 | parts = dmv_fetch_partitions_raw(conn_str) 154 | if parts.empty: 155 | print( 156 | 'No partitions returned. Check permissions, XMLA endpoint, and semantic model access.' 157 | ) 158 | return None 159 | 160 | tables = dmv_fetch_tables_raw(conn_str) 161 | tables = tables[['ID', 'Name']] 162 | df = parts.merge( 163 | tables.rename(columns={'ID': 'TableID', 'Name': 'TableName'}), 164 | on='TableID', 165 | how='left', 166 | ) 167 | 168 | # Sort for readability 169 | sort_cols = [ 170 | c 171 | for c in ['TableName', 'Name', 'RefreshedTime', 'ModifiedTime'] 172 | if c in df.columns 173 | ] 174 | if sort_cols: 175 | df = df.sort_values(sort_cols, ascending=[True, True, False, False]) 176 | df = df[['TableName', 'Name', 'RefreshedTime', 'ModifiedTime']] 177 | 178 | return df 179 | -------------------------------------------------------------------------------- /src/pyfabricops/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alisonpezzott/pyfabricops/d20a5d57b4f84246973b4c0c6bfa35c66dd6a1e0/src/pyfabricops/helpers/__init__.py -------------------------------------------------------------------------------- /src/pyfabricops/helpers/dataflows_gen1.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import uuid 4 | from pathlib import Path 5 | from typing import Any, Dict, Optional, Union 6 | 7 | from ..api.api import _base_api 8 | from ..core.workspaces import resolve_workspace 9 | from ..items.dataflows_gen1 import ( 10 | get_dataflow_gen1, 11 | get_dataflow_gen1_definition, 12 | list_dataflows_gen1, 13 | ) 14 | from ..utils.logging import get_logger 15 | from ..utils.utils import ( 16 | list_paths_of_type, 17 | load_and_sanitize, 18 | write_single_line_json, 19 | ) 20 | 21 | logger = get_logger(__name__) 22 | 23 | 24 | def get_dataflow_gen1_config( 25 | workspace: str, dataflow_gen1: str 26 | ) -> Union[Dict[str, Any], None]: 27 | """ 28 | Get a specific dataflow_gen1 config from a workspace. 29 | 30 | Args: 31 | workspace (str): The name or ID of the workspace. 32 | dataflow_gen1 (str): The name or ID of the dataflow_gen1. 33 | 34 | Returns: 35 | (Union[Dict[str, Any], None]): The dict config from the dataflow_gen1. 36 | """ 37 | item = dataflow_gen1 38 | item_data = get_dataflow_gen1(workspace, item, df=False) 39 | 40 | if item_data is None: 41 | return None 42 | 43 | else: 44 | config = {} 45 | config = config[item_data.get('name')] = {} 46 | 47 | config = { 48 | 'id': item_data['objectId'], 49 | 'description': item_data.get('description', None), 50 | 'folder_id': '', 51 | } 52 | 53 | return config 54 | 55 | 56 | def get_all_dataflows_gen1_config( 57 | workspace: str, 58 | ) -> Union[Dict[str, Any], None]: 59 | """ 60 | Get dataflows_gen1 config from a workspace. 61 | 62 | Args: 63 | workspace (str): The name or ID from the workspace. 64 | 65 | Returns: 66 | (Union[Dict[str, Any], None]): The dict config of all dataflows_gen1 in the workspace 67 | """ 68 | items = list_dataflows_gen1(workspace, df=False) 69 | 70 | if items is None: 71 | return None 72 | 73 | config = {} 74 | 75 | for item in items: 76 | config[item['name']] = { 77 | 'id': item['objectId'], 78 | 'description': item.get('description', None), 79 | 'folder_id': '', 80 | } 81 | 82 | return config 83 | 84 | 85 | def export_dataflow_gen1( 86 | workspace: str, 87 | dataflow: str, 88 | path: str, 89 | ) -> None: 90 | """ 91 | Export a dataflow from a workspace to a file. 92 | 93 | Args: 94 | workspace (str): The workspace name or ID. 95 | dataflow (str): The dataflow name or ID. 96 | path (str, optional): The path to the project folder. 97 | 98 | Examples: 99 | ```python 100 | export_dataflow_gen1('MyProjectWorkspace', 'SalesDataflowGen1', path='path/to/project') 101 | export_dataflow_gen1('123e4567-e89b-12d3-a456-426614174000', 'SalesDataflowGen1', path='path/to/project') 102 | ``` 103 | """ 104 | workspace_id = resolve_workspace(workspace) 105 | if not workspace_id: 106 | return None 107 | 108 | # Get the dataflow details 109 | dataflow_ = get_dataflow_gen1(workspace_id, dataflow) 110 | if not dataflow_: 111 | return None 112 | 113 | dataflow_id = dataflow_['objectId'] 114 | dataflow_name = dataflow_['name'] 115 | 116 | definition_response = get_dataflow_gen1_definition( 117 | workspace=workspace_id, 118 | dataflow=dataflow_id, 119 | ) 120 | 121 | if not definition_response: 122 | return None 123 | 124 | dataflow_name = dataflow_['name'] 125 | dataflow_path = Path(path) / dataflow_name + '.Dataflow' 126 | os.makedirs(dataflow_path, exist_ok=True) 127 | 128 | # Save the model as model.json inside the item folder in single-line format (Power BI portal format) 129 | model_json_path = dataflow_path / 'model.json' 130 | write_single_line_json(definition_response, model_json_path) 131 | 132 | logger.success(f'Exported dataflow {dataflow_name} to {dataflow_path}.') 133 | return None 134 | 135 | 136 | def export_all_dataflows_gen1( 137 | workspace: str, 138 | path: str, 139 | ) -> None: 140 | """ 141 | Export all dataflows gen1 from a workspace to a file. 142 | 143 | Args: 144 | workspace (str): The workspace name or ID. 145 | path (str): The path to the project folder. 146 | 147 | Examples: 148 | ```python 149 | export_all_dataflows_gen1('MyProjectWorkspace', path='path/to/project') 150 | export_all_dataflows_gen1('123e4567-e89b-12d3-a456-426614174000', path='path/to/project') 151 | ``` 152 | """ 153 | workspace_id = resolve_workspace(workspace) 154 | if not workspace_id: 155 | return None 156 | 157 | dataflows = list_dataflows_gen1(workspace_id, df=False) 158 | 159 | if not dataflows: 160 | return None 161 | else: 162 | for dataflow in dataflows: 163 | export_dataflow_gen1( 164 | workspace=workspace, dataflow=dataflow['objectId'], path=path 165 | ) 166 | return None 167 | 168 | 169 | def _serialize_dataflow_gen1_model(path: str) -> tuple[bytes, str]: 170 | """ 171 | Prepares the body for a dataflow deployment by reading and serializing the model.json file. 172 | 173 | Args: 174 | path (str): The path to the directory containing the model.json file. 175 | 176 | Returns: 177 | tuple[bytes, str]: The serialized multipart body and the boundary string. 178 | 179 | Raises: 180 | UnicodeEncodeError: If there is an encoding issue with the JSON content. 181 | 182 | Examples: 183 | ```python 184 | _serialize_dataflow_gen1_model('path/to/MyDataflowGen1.Dataflow') 185 | ``` 186 | """ 187 | # Read and clean JSON using load_and_sanitize function 188 | df_json = load_and_sanitize(Path(path) / 'model.json') 189 | 190 | json_str = json.dumps(df_json, ensure_ascii=False, separators=(',', ':')) 191 | 192 | # Boundary setup 193 | boundary = uuid.uuid4().hex 194 | LF = '\r\n' 195 | 196 | # Serialized Json Body 197 | body = ( 198 | f'--{boundary}{LF}' 199 | f'Content-Disposition: form-data; name="model.json"; filename="model.json"{LF}' 200 | f'Content-Type: application/json{LF}{LF}' 201 | f'{json_str}{LF}' 202 | f'--{boundary}--{LF}' 203 | ) 204 | 205 | try: 206 | body.encode('utf-8') 207 | except UnicodeEncodeError as e: 208 | logger.error(f'Encoding error: {e}') 209 | raise 210 | return body.encode('utf-8'), boundary 211 | 212 | 213 | def deploy_dataflow_gen1(workspace: str, path: str) -> Union[bool, None]: 214 | """ 215 | Deploy a dataflow in a workspace from a model.json file 216 | 217 | Args: 218 | workspace (str): The workspace name or ID. 219 | path (str): Path to the model.json file for the dataflow. 220 | 221 | Returns: 222 | None 223 | 224 | Raises: 225 | Exception: If the API request fails or returns an error. 226 | 227 | Examples: 228 | ```python 229 | deploy_dataflow_gen1('MyProjectWorkspace', 'path/to/MyDataflowGen1.Dataflow') 230 | deploy_dataflow_gen1('123e4567-e89b-12d3-a456-426614174000', 'path/to/MyDataflowGen1.Dataflow') 231 | ``` 232 | """ 233 | # Read and clean JSON 234 | body, boundary = _serialize_dataflow_gen1_model(path) 235 | 236 | content_type = f'multipart/form-data; boundary={boundary}' 237 | 238 | params = { 239 | 'datasetDisplayName': 'model.json', 240 | 'nameConflict': 'Abort', 241 | } 242 | 243 | workspace_id = resolve_workspace(workspace) 244 | if not workspace_id: 245 | return None 246 | 247 | response = _base_api( 248 | audience='powerbi', 249 | endpoint=f'/groups/{workspace_id}/imports', 250 | content_type=content_type, 251 | credential_type='user', 252 | method='post', 253 | data=body, 254 | params=params, 255 | return_raw=True, 256 | ) 257 | # Handle response 258 | if not response.status_code in (200, 202): 259 | logger.error( 260 | f'Error deploying the dataflow: {response.status_code} - {response.json().get("error", {})}' 261 | ) 262 | return None 263 | logger.success(f'Dataflow deployed successfully.') 264 | return True 265 | 266 | 267 | def deploy_all_dataflows_gen1( 268 | workspace: str, 269 | path: str, 270 | start_path: Optional[str] = None, 271 | ) -> None: 272 | """ 273 | Deploy all dataflows_gen1 to workspace. 274 | 275 | Args: 276 | workspace (str): The name or ID of the workspace. 277 | path (str): The path to the dataflows_gen2. 278 | start_path (Optional[str]): The starting path for folder creation. 279 | """ 280 | workspace_id = resolve_workspace(workspace) 281 | if workspace_id is None: 282 | return None 283 | 284 | dataflows_gen2_paths = list_paths_of_type(path, 'Dataflow') 285 | 286 | for path_ in dataflows_gen2_paths: 287 | 288 | deploy_dataflow_gen1(workspace_id, path_) 289 | 290 | logger.success( 291 | f'All dataflows_gen1 were deployed to workspace "{workspace}" successfully.' 292 | ) 293 | return None 294 | -------------------------------------------------------------------------------- /src/pyfabricops/helpers/folders.py: -------------------------------------------------------------------------------- 1 | import os 2 | from functools import lru_cache 3 | from pathlib import Path 4 | from typing import Any, Dict, List, Optional, Union 5 | 6 | import pandas 7 | from pandas import DataFrame 8 | 9 | from ..core.folders import create_folder, list_folders, resolve_folder 10 | from ..core.workspaces import resolve_workspace 11 | from ..utils.logging import get_logger 12 | 13 | logger = get_logger(__name__) 14 | 15 | 16 | def generate_folders_paths( 17 | folders_df: DataFrame, 18 | ) -> DataFrame: 19 | """ 20 | Returns the full path for the folder `folder_id` recursively concatenating the names of its parents. 21 | 22 | Args: 23 | folders_df (DataFrame): The DataFrame containing folder information. 24 | 25 | Returns: 26 | DataFrame: The full folder paths. 27 | """ 28 | 29 | df = folders_df 30 | 31 | # Create a dict to lookup: id → {displayName, parentFolderId} 32 | folder_map = df.set_index('id')[['displayName', 'parentFolderId']].to_dict( 33 | 'index' 34 | ) 35 | 36 | # Recursive function with cache to build the full path 37 | @lru_cache(maxsize=None) 38 | def _build_full_path(folder_id: str) -> str: 39 | """ 40 | Returns the full path for the folder `folder_id`, 41 | recursively concatenating the names of its parents. 42 | """ 43 | node = folder_map.get(folder_id) 44 | if node is None: 45 | return '' # id not found 46 | name = node['displayName'] 47 | parent = node['parentFolderId'] 48 | # If without parent, is root 49 | if pandas.isna(parent) or parent == '': 50 | return name 51 | # Otherwise, joins the parent path with self name 52 | return _build_full_path(parent) + '/' + name 53 | 54 | # Apply the function by each dataframe row 55 | df['folder_path'] = df['id'].apply(lambda x: _build_full_path(x)) 56 | 57 | df = df.rename(columns={'id': 'folder_id'}) 58 | return df[['folder_id', 'folder_path']] 59 | 60 | 61 | def get_folders_paths(workspace: str) -> DataFrame: 62 | """ 63 | Get the full folder paths for all folders in the workspace. 64 | 65 | Args: 66 | workspace (str): The workspace name. 67 | 68 | Returns: 69 | DataFrame: A DataFrame with folder IDs and their full paths. 70 | """ 71 | folders_df = list_folders(workspace) 72 | 73 | if 'parentFolderId' not in folders_df.columns: 74 | folders_df['parentFolderId'] = '' 75 | 76 | return generate_folders_paths(folders_df) 77 | 78 | 79 | def get_folders_config(workspace: str) -> Union[Dict[str, Any], None]: 80 | """ 81 | Get the folder configuration for a specific workspace. 82 | 83 | Args: 84 | workspace (str): The workspace name or ID. 85 | 86 | Returns: 87 | (Union[Dict[str, Any], None]): The folder configuration or None if not found. 88 | """ 89 | folders = get_folders_paths(workspace) 90 | if folders is None: 91 | return None 92 | 93 | return folders.to_dict(orient='records') 94 | 95 | 96 | def export_folders(workspace: str, path: Union[str, Path]) -> None: 97 | """ 98 | Export all folders from a workspace to a specified path 99 | """ 100 | folders = get_folders_paths(workspace) 101 | folders_list = folders.to_dict(orient='records') 102 | for folder in folders_list: 103 | folder_path_ = Path(path) / folder['folder_path'] 104 | os.makedirs(folder_path_, exist_ok=True) 105 | # Create a dummy README.md in each created folder 106 | with open( 107 | Path(folder_path_) / 'README.md', 'w', encoding='utf-8' 108 | ) as f: 109 | f.write( 110 | f'# {folder["folder_path"]}\n\nThis folder corresponds to the Fabric workspace folder: **{folder["folder_path"]}**\n' 111 | ) 112 | logger.success( 113 | f'All folders from workspace {workspace} were exported to {path} successfully.' 114 | ) 115 | 116 | 117 | def resolve_folder_from_id_to_path( 118 | workspace: str, folder_id: str 119 | ) -> Union[str, None]: 120 | """ 121 | Return the folder path to the folder_id given for a specified worspace. 122 | """ 123 | folders = get_folders_paths(workspace) 124 | if folders is None: 125 | return None 126 | 127 | folder_path = folders[folders['folder_id'] == folder_id][ 128 | 'folder_path' 129 | ].iloc[0] 130 | 131 | if folder_path is None: 132 | logger.info(f'{folder_id} not found in the workspace {workspace}') 133 | return None 134 | 135 | return folder_path 136 | 137 | 138 | def deploy_folders( 139 | workspace: str, 140 | path: Union[str, Path], 141 | ): 142 | """ 143 | Creates folders in Fabric workspace based on local folder structure 144 | 145 | Args: 146 | workspace (str): The name or ID of the workspace. 147 | path (str): The path to the project directory. 148 | """ 149 | if not os.path.exists(path): 150 | logger.error(f'Path {path} does not exist.') 151 | return None 152 | 153 | # Resolve workspace ID 154 | workspace_id = resolve_workspace(workspace) 155 | if not workspace_id: 156 | return None 157 | 158 | # Get all local folders that contain Fabric artifacts 159 | fabric_artifacts = [ 160 | '.SemanticModel', 161 | '.Report', 162 | '.Dataflow', 163 | '.Lakehouse', 164 | '.Warehouse', 165 | '.Notebook', 166 | '.DataPipeline', 167 | ] 168 | 169 | def _has_fabric_artifacts(path): 170 | """Check if folder or any subfolder contains Fabric artifacts""" 171 | for root, dirs, files in os.walk(path): 172 | for dir_name in dirs: 173 | if any( 174 | dir_name.endswith(artifact) 175 | for artifact in fabric_artifacts 176 | ): 177 | return True 178 | return False 179 | 180 | # First pass: identify folders with artifacts and their parent folders 181 | folders_with_artifacts = set() 182 | 183 | for root, dirs, files in os.walk(path): 184 | for dir_name in dirs: 185 | full_path = os.path.join(root, dir_name) 186 | 187 | # Check if this folder has Fabric artifacts 188 | if _has_fabric_artifacts(full_path): 189 | relative_path = os.path.relpath(full_path, path).replace( 190 | '\\', '/' 191 | ) 192 | folders_with_artifacts.add(relative_path) 193 | 194 | # Also mark all parent folders as needed 195 | parent_path = os.path.dirname(relative_path).replace('\\', '/') 196 | while ( 197 | parent_path != path 198 | and parent_path != '.' 199 | and parent_path != '' 200 | ): 201 | folders_with_artifacts.add(parent_path) 202 | parent_path = os.path.dirname(parent_path).replace( 203 | '\\', '/' 204 | ) 205 | 206 | # Second pass: build folder list only for folders with artifacts 207 | local_folders = [] 208 | for root, dirs, files in os.walk(path): 209 | for dir_name in dirs: 210 | full_path = os.path.join(root, dir_name) 211 | relative_path = os.path.relpath(full_path, path).replace('\\', '/') 212 | 213 | # Only include folders that contain artifacts or are parents of folders with artifacts 214 | if relative_path in folders_with_artifacts: 215 | # Calculate depth for proper ordering (parents before children) 216 | depth = relative_path.count('/') 217 | 218 | # Get parent folder name (not full path) 219 | parent_relative_path = os.path.dirname(relative_path).replace( 220 | '\\', '/' 221 | ) 222 | parent_folder_name = None 223 | if ( 224 | parent_relative_path 225 | and parent_relative_path != '.' 226 | and parent_relative_path != '' 227 | ): 228 | parent_folder_name = os.path.basename(parent_relative_path) 229 | 230 | local_folders.append( 231 | { 232 | 'path': relative_path, 233 | 'name': dir_name, 234 | 'full_path': full_path, 235 | 'depth': depth, 236 | 'parent_path': parent_relative_path, 237 | 'parent_name': parent_folder_name, 238 | } 239 | ) 240 | 241 | # Sort by depth to ensure parent folders are created first 242 | local_folders.sort(key=lambda x: x['depth']) 243 | 244 | logger.info( 245 | f'Found {len(local_folders)} folders containing Fabric artifacts' 246 | ) 247 | 248 | # Keep track of created folders by path -> folder_id 249 | created_folders = {} 250 | 251 | for folder_info in local_folders: 252 | folder_name = folder_info['name'] 253 | parent_path = folder_info['parent_path'] 254 | parent_name = folder_info['parent_name'] 255 | 256 | # Determine parent folder ID from previously created folders 257 | parent_folder_id = None 258 | if parent_path and parent_path in created_folders: 259 | parent_folder_id = created_folders[parent_path] 260 | 261 | # Create folder in Fabric 262 | if parent_folder_id: 263 | create_folder( 264 | workspace, folder_name, parent_folder=parent_folder_id 265 | ) 266 | elif parent_name: 267 | create_folder(workspace, folder_name, parente_folder=parent_name) 268 | else: 269 | create_folder(workspace, folder_name) 270 | 271 | logger.success(f'Created all folders in the workspace {workspace}.') 272 | 273 | 274 | def create_folders_from_path_string(workspace: str, path: str) -> str: 275 | """ 276 | Create recursively folders and subfolders from a path string. 277 | 278 | Args: 279 | workspace (str): The name or ID of the workspace. 280 | path (str): The name or ID of the folder. 281 | 282 | Returns: 283 | str: The ID of the final folder. 284 | """ 285 | workspace_id = resolve_workspace(workspace) 286 | 287 | if path is None or '/' not in path: 288 | return None 289 | 290 | folders_tree = path.split('/') 291 | 292 | parent_folder_id = None 293 | 294 | for folder in folders_tree: 295 | 296 | # Get folder_id if folder exists 297 | folder_id = resolve_folder(workspace_id, folder) 298 | if folder_id is not None: 299 | logger.info( 300 | f'Folder `{folder}` already exists with ID `{folder_id}`.' 301 | ) 302 | 303 | # If not, creates it. 304 | else: 305 | folder_id = create_folder( 306 | workspace_id, 307 | folder, 308 | parent_folder=parent_folder_id, 309 | df=False, 310 | ).get('id') 311 | logger.success( 312 | f'Folder `{folder}` created with ID `{folder_id}` successfully.' 313 | ) 314 | 315 | parent_folder_id = folder_id 316 | 317 | return folder_id 318 | -------------------------------------------------------------------------------- /src/pyfabricops/helpers/items.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from pathlib import Path 4 | from typing import Any, Dict, Optional, Union 5 | 6 | from pandas import DataFrame 7 | 8 | from ..core.workspaces import resolve_workspace 9 | from ..helpers.folders import ( 10 | create_folders_from_path_string, 11 | resolve_folder_from_id_to_path, 12 | ) 13 | from ..items.items import ( 14 | create_item, 15 | get_item, 16 | get_item_definition, 17 | list_items, 18 | resolve_item, 19 | update_item_definition, 20 | ) 21 | from ..utils.decorators import df 22 | from ..utils.logging import get_logger 23 | from ..utils.utils import ( 24 | extract_display_name_from_platform, 25 | extract_middle_path, 26 | list_paths_of_type, 27 | pack_item_definition, 28 | unpack_item_definition, 29 | ) 30 | 31 | logger = get_logger(__name__) 32 | 33 | 34 | def export_item( 35 | workspace: str, 36 | item: str, 37 | path: str, 38 | ): 39 | """ 40 | Exports a item definition to a specified folder structure. 41 | 42 | Args: 43 | workspace (str): The workspace name or ID. 44 | item (str): The name of the item to export. 45 | path (str): The root path of the project. 46 | 47 | Examples: 48 | ```python 49 | export_item('MyProjectWorkspace', 'SalesDataModel', '/path/to/project') 50 | export_item('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', '/path/to/project') 51 | ``` 52 | """ 53 | workspace_id = resolve_workspace(workspace) 54 | if not workspace_id: 55 | return None 56 | 57 | item_ = get_item(workspace_id, item, df=False) 58 | if not item_: 59 | return None 60 | 61 | item_id = item_['id'] 62 | item_type = item_['type'] 63 | folder_id = None 64 | if 'folderId' in item_: 65 | folder_id = item_['folderId'] 66 | 67 | definition = get_item_definition(workspace_id, item_id) 68 | if not definition: 69 | return None 70 | 71 | try: 72 | folder_path = resolve_folder_from_id_to_path(workspace_id, folder_id) 73 | except: 74 | logger.info( 75 | f'{item["displayName"]}.{item_type} is not inside a folder.' 76 | ) 77 | folder_path = None 78 | 79 | if folder_path is None: 80 | item_path = Path(path) / (item['displayName'] + f'.{item_type}') 81 | else: 82 | item_path = ( 83 | Path(path) / folder_path / (item['displayName'] + f'.{item_type}') 84 | ) 85 | os.makedirs(item_path, exist_ok=True) 86 | 87 | unpack_item_definition(definition, item_path) 88 | 89 | logger.success( 90 | f'`{item["displayName"]}.{item_type}` was exported to {item_path} successfully.' 91 | ) 92 | return None 93 | 94 | 95 | def export_all_items( 96 | workspace: str, 97 | path: str, 98 | ) -> None: 99 | """ 100 | Exports all items to the specified folder structure. 101 | 102 | Args: 103 | workspace (str): The workspace name or ID. 104 | path (str): The root path of the project. 105 | """ 106 | workspace_id = resolve_workspace(workspace) 107 | if workspace_id is None: 108 | return None 109 | 110 | items = list_items(workspace_id, df=False) 111 | if items is None: 112 | return None 113 | 114 | for item in items: 115 | item_ = get_item(workspace_id, item, df=False) 116 | if not item_: 117 | return None 118 | 119 | item_id = item_['id'] 120 | item_type = item_['type'] 121 | folder_id = None 122 | if 'folderId' in item_: 123 | folder_id = item_['folderId'] 124 | 125 | definition = get_item_definition(workspace_id, item_id) 126 | if not definition: 127 | return None 128 | 129 | try: 130 | folder_path = resolve_folder_from_id_to_path( 131 | workspace_id, folder_id 132 | ) 133 | except: 134 | logger.info( 135 | f'{item["displayName"]}.{item_type} is not inside a folder.' 136 | ) 137 | folder_path = None 138 | 139 | if folder_path is None: 140 | item_path = Path(path) / (item['displayName'] + f'.{item_type}') 141 | else: 142 | item_path = ( 143 | Path(path) 144 | / folder_path 145 | / (item['displayName'] + f'.{item_type}') 146 | ) 147 | os.makedirs(item_path, exist_ok=True) 148 | 149 | unpack_item_definition(definition, item_path) 150 | 151 | logger.success( 152 | f'`{item["displayName"]}.{item_type}` was exported to {item_path} successfully.' 153 | ) 154 | return None 155 | 156 | 157 | @df 158 | def deploy_item( 159 | workspace: str, 160 | path: str, 161 | start_path: Optional[str] = None, 162 | description: Optional[str] = None, 163 | df: Optional[bool] = True, 164 | ) -> Union[DataFrame, Dict[str, Any], None]: 165 | """ 166 | Creates or updates a item in Fabric based on local folder structure. 167 | Automatically detects the folder_id based on where the item is located locally. 168 | 169 | Args: 170 | workspace (str): The workspace name or ID. 171 | path (str): The root path of the project. 172 | start_path (str, optional): The starting path for the item. 173 | description (str, optional): A description for the item. 174 | df (bool, optional): Whether to return a DataFrame. Defaults to True. 175 | """ 176 | workspace_id = resolve_workspace(workspace) 177 | if not workspace_id: 178 | return None 179 | 180 | display_name = extract_display_name_from_platform(path) 181 | if display_name is None: 182 | return None 183 | 184 | item_id = resolve_item(workspace_id, display_name) 185 | 186 | folder_path_string = extract_middle_path(path, start_path=start_path) 187 | folder_id = create_folders_from_path_string( 188 | workspace_id, folder_path_string 189 | ) 190 | 191 | item_definition = pack_item_definition(path) 192 | 193 | if item_id is None: 194 | return create_item( 195 | workspace_id, 196 | display_name=display_name, 197 | item_definition=item_definition, 198 | description=description, 199 | folder=folder_id, 200 | df=False, 201 | ) 202 | 203 | else: 204 | return update_item_definition( 205 | workspace_id, 206 | item_id, 207 | item_definition=item_definition, 208 | df=False, 209 | ) 210 | 211 | 212 | def deploy_all_items( 213 | workspace: str, 214 | path: str, 215 | start_path: Optional[str] = None, 216 | ) -> None: 217 | """ 218 | Deploy all items to workspace. 219 | 220 | Args: 221 | workspace (str): The name or ID of the workspace. 222 | path (str): The path to the notebooks. 223 | start_path (Optional[str]): The starting path for folder creation. 224 | """ 225 | workspace_id = resolve_workspace(workspace) 226 | if workspace_id is None: 227 | return None 228 | 229 | types = ['Notebook', 'DataPipeline', 'Dataflow', 'SemanticModel', 'Report'] 230 | for type in types: 231 | item_paths = list_paths_of_type(path, type) 232 | 233 | for path_ in item_paths: 234 | 235 | display_name = extract_display_name_from_platform(path_) 236 | if display_name is None: 237 | return None 238 | 239 | item_id = resolve_item(workspace_id, display_name) 240 | 241 | folder_path_string = extract_middle_path( 242 | path_, start_path=start_path 243 | ) 244 | folder_id = create_folders_from_path_string( 245 | workspace_id, folder_path_string 246 | ) 247 | 248 | item_definition = pack_item_definition(path_) 249 | 250 | if item_id is None: 251 | create_item( 252 | workspace_id, 253 | display_name=display_name, 254 | item_definition=item_definition, 255 | folder=folder_id, 256 | df=False, 257 | ) 258 | 259 | else: 260 | update_item_definition( 261 | workspace_id, 262 | item_id, 263 | item_definition=item_definition, 264 | df=False, 265 | ) 266 | 267 | logger.success( 268 | f'All items were deployed to workspace "{workspace}" successfully.' 269 | ) 270 | return None 271 | -------------------------------------------------------------------------------- /src/pyfabricops/helpers/lakehouses.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import sys 4 | from pathlib import Path 5 | from typing import Any, Dict, List, Optional, Union 6 | 7 | import pandas as pd 8 | from pandas import DataFrame 9 | 10 | from ..core.workspaces import resolve_workspace 11 | from ..helpers.folders import resolve_folder_from_id_to_path 12 | from ..items.items import list_items 13 | from ..items.lakehouses import get_lakehouse, list_lakehouses 14 | from ..items.shortcuts import list_shortcuts 15 | from ..utils.decorators import df 16 | from ..utils.logging import get_logger 17 | from ..utils.schemas import PLATFORM_SCHEMA, PLATFORM_VERSION 18 | 19 | logger = get_logger(__name__) 20 | 21 | 22 | def _generate_lakehouse_platform( 23 | display_name: str, 24 | description: Optional[str] = '', 25 | ) -> Dict[str, Any]: 26 | """ 27 | Generate the lakehouse .platform file 28 | 29 | Args: 30 | display_name (str): The lakehouse display name. 31 | description (str): The lakehouse's description. 32 | 33 | Returns: 34 | (Dict[str, Any]): The .platform dict. 35 | """ 36 | return { 37 | '$schema': PLATFORM_SCHEMA, 38 | 'metadata': { 39 | 'type': 'Lakehouse', 40 | 'displayName': display_name, 41 | 'description': description, 42 | }, 43 | 'config': { 44 | 'version': PLATFORM_VERSION, 45 | 'logicalId': '00000000-0000-0000-0000-000000000000', 46 | }, 47 | } 48 | 49 | 50 | def _save_lakehouse_platform( 51 | platform: Dict[str, Any], 52 | path: str, 53 | ) -> None: 54 | """ 55 | Save the lakehouses's .platform in path 56 | 57 | Args: 58 | platform (Dict[str, Any]): The .platform dict. 59 | path (str): The lakehouse directory path to save to. 60 | """ 61 | with open(Path(path) / '.platform', 'w') as f: 62 | json.dump(platform, f, indent=2) 63 | 64 | 65 | def _save_lakehouse_metadata_json(path: str) -> None: 66 | """ 67 | Save metadata.json to lakehouse's path 68 | 69 | Args: 70 | path (str): The lakehouse's path 71 | """ 72 | with open(Path(path) / 'metadata.json', 'w') as f: 73 | json.dump({}, f, indent=2) 74 | 75 | 76 | def get_lakehouse_config( 77 | workspace: str, lakehouse: str 78 | ) -> Union[Dict[str, Any], None]: 79 | """ 80 | Get a specific lakehouse config from a workspace. 81 | 82 | Args: 83 | workspace (str): The name or ID from the workspace. 84 | lakehouse (str): The name or ID from the lakehouse. 85 | 86 | Returns: 87 | (Union[Dict[str, Any], None]): The dict config from the lakehouse 88 | """ 89 | item = lakehouse 90 | item_data = get_lakehouse(workspace, item, df=False) 91 | 92 | if item_data is None: 93 | return None 94 | 95 | else: 96 | config = {} 97 | config = config[item_data.get('displayName')] = {} 98 | 99 | config = { 100 | 'id': item_data['id'], 101 | 'description': item_data.get('description', None), 102 | 'folder_id': '' 103 | if item_data.get('folderId') is None 104 | or pd.isna(item_data.get('folderId')) 105 | else item_data['folderId'], 106 | 'sql_endpoint_connection_string': item_data.get( 107 | 'properties_sqlEndpointProperties_connectionString' 108 | ), 109 | 'sql_endpoint_id': item_data.get( 110 | 'properties_sqlEndpointProperties_id' 111 | ), 112 | } 113 | 114 | return config 115 | 116 | 117 | def get_all_lakehouses_config(workspace: str) -> Union[Dict[str, Any], None]: 118 | """ 119 | Generate lakehouses config from a workspace. 120 | 121 | Args: 122 | workspace (str): The name or ID from the workspace. 123 | 124 | Returns: 125 | (Union[Dict[str, Any], None]): The dict config from the lakehouses of the workspace 126 | """ 127 | items = list_valid_lakehouses(workspace, df=False) 128 | 129 | if items is None: 130 | return None 131 | 132 | config = {} 133 | 134 | for item in items: 135 | 136 | item_data = get_lakehouse(workspace, item['id'], df=False) 137 | 138 | config[item['displayName']] = { 139 | 'id': item['id'], 140 | 'description': item.get('description', None), 141 | 'folder_id': '' 142 | if item.get('folderId') is None or pd.isna(item.get('folderId')) 143 | else item['folderId'], 144 | 'sql_endpoint_connection_string': item_data['properties'][ 145 | 'sqlEndpointProperties' 146 | ]['connectionString'], 147 | 'sql_endpoint_id': item_data['properties'][ 148 | 'sqlEndpointProperties' 149 | ]['id'], 150 | } 151 | 152 | return config 153 | 154 | 155 | @df 156 | def list_valid_lakehouses( 157 | workspace: str, 158 | df: Optional[bool] = True, 159 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 160 | """ 161 | Generate a list of valid lakehouses from a workspace. 162 | 163 | Args: 164 | workspace (str): The name or ID from the workspace. 165 | 166 | Returns: 167 | (Union[Dict[str, Any], None]): The list of valids lakehouses of the workspace 168 | """ 169 | items = list_lakehouses(workspace) 170 | 171 | if items is None: 172 | return None 173 | 174 | return items[ 175 | ~items['displayName'].str.contains('staging', case=False, na=False) 176 | ].to_dict(orient='records') 177 | 178 | 179 | def generate_lakehouse_shortcuts_metadata( 180 | workspace: str, lakehouse: str 181 | ) -> Union[Dict[str, Any], None]: 182 | """ """ 183 | # Create shortcuts.metadata.json 184 | shortcuts_list = list_shortcuts(workspace, lakehouse, df=False) 185 | 186 | if len(shortcuts_list) == 0: 187 | return None 188 | 189 | # Init a empty list for shortcuts 190 | shortcuts_list_new = [] 191 | 192 | for shortcut_dict in shortcuts_list: 193 | shortcut_target = shortcut_dict['target'] 194 | shortcut_target_type = ( 195 | shortcut_target['type'][0].lower() + shortcut_target['type'][1:] 196 | ) 197 | shortcut_target_workspace_id = shortcut_target[shortcut_target_type][ 198 | 'workspaceId' 199 | ] 200 | shortcut_target_item_id = shortcut_target[shortcut_target_type][ 201 | 'itemId' 202 | ] 203 | 204 | workspace_items = list_items(shortcut_target_workspace_id, df=False) 205 | for item in workspace_items: 206 | if item['id'] == shortcut_target_item_id: 207 | shortcut_target_item_type = item['type'] 208 | break 209 | 210 | # Check if the workspace_id is equal shortcut_target_workspace_id then uuid zero 211 | if shortcut_target_workspace_id == resolve_workspace(workspace): 212 | shortcut_target_workspace_id = '00000000-0000-0000-0000-000000000000' 213 | 214 | # Create item type if not exists 215 | if 'artifactType' not in shortcut_dict['target'][shortcut_target_type]: 216 | shortcut_dict['target'][shortcut_target_type]['artifactType'] = '' 217 | if 'workspaceId' not in shortcut_dict['target'][shortcut_target_type]: 218 | shortcut_dict['target'][shortcut_target_type]['workspaceId'] = '' 219 | 220 | # Update if exists 221 | shortcut_dict['target']['oneLake'][ 222 | 'artifactType' 223 | ] = shortcut_target_item_type 224 | shortcut_dict['target']['oneLake'][ 225 | 'workspaceId' 226 | ] = shortcut_target_workspace_id 227 | 228 | shortcuts_list_new.append(shortcut_dict) 229 | 230 | return shortcuts_list_new 231 | 232 | 233 | def save_lakehouse_shortcuts_metadata( 234 | shortcuts_metadata: Dict[str, Any], path: str 235 | ) -> None: 236 | """ """ 237 | with open(Path(path) / 'shortcuts.metadata.json', 'w') as f: 238 | json.dump(shortcuts_metadata, f, indent=2) 239 | 240 | 241 | def export_lakehouse( 242 | workspace: str, 243 | lakehouse: str, 244 | path: Union[str, Path], 245 | ) -> None: 246 | workspace_id = resolve_workspace(workspace) 247 | if workspace_id is None: 248 | return None 249 | 250 | item = get_lakehouse(workspace_id, lakehouse, df=False) 251 | if item is None: 252 | return None 253 | 254 | try: 255 | folder_path = resolve_folder_from_id_to_path( 256 | workspace_id, item['folderId'] 257 | ) 258 | except: 259 | logger.info(f'{item["displayName"]}.Lakehouse is not inside a folder.') 260 | folder_path = None 261 | 262 | if folder_path is None: 263 | item_path = Path(path) / (item['displayName'] + '.Lakehouse') 264 | else: 265 | item_path = ( 266 | Path(path) / folder_path / (item['displayName'] + '.Lakehouse') 267 | ) 268 | os.makedirs(item_path, exist_ok=True) 269 | 270 | platform = _generate_lakehouse_platform( 271 | display_name=item['displayName'], 272 | description=item['description'], 273 | ) 274 | 275 | _save_lakehouse_platform(platform, item_path) 276 | 277 | _save_lakehouse_metadata_json(item_path) 278 | 279 | shortcuts = generate_lakehouse_shortcuts_metadata(workspace_id, item['id']) 280 | 281 | save_lakehouse_shortcuts_metadata(shortcuts, item_path) 282 | 283 | logger.success( 284 | f'Lakehouse `{lakehouse}` from workspace `{workspace}` was exported to `{path}` successfully.' 285 | ) 286 | return None 287 | 288 | 289 | def export_all_lakehouses(workspace: str, path: Union[str, Path]) -> None: 290 | workspace_id = resolve_workspace(workspace) 291 | if workspace_id is None: 292 | return None 293 | 294 | items = list_valid_lakehouses(workspace_id, df=False) 295 | if items is None: 296 | return None 297 | 298 | for item in items: 299 | try: 300 | folder_path = resolve_folder_from_id_to_path( 301 | workspace_id, item['folderId'] 302 | ) 303 | except: 304 | logger.info( 305 | f'{item["displayName"]}.Lakehouse is not inside a folder.' 306 | ) 307 | folder_path = None 308 | 309 | if folder_path is None: 310 | item_path = Path(path) / (item['displayName'] + '.Lakehouse') 311 | else: 312 | item_path = ( 313 | Path(path) / folder_path / (item['displayName'] + '.Lakehouse') 314 | ) 315 | os.makedirs(item_path, exist_ok=True) 316 | 317 | platform = _generate_lakehouse_platform( 318 | display_name=item['displayName'], 319 | description=item['description'], 320 | ) 321 | 322 | _save_lakehouse_platform(platform, item_path) 323 | 324 | _save_lakehouse_metadata_json(item_path) 325 | 326 | shortcuts = generate_lakehouse_shortcuts_metadata( 327 | workspace_id, item['id'] 328 | ) 329 | 330 | save_lakehouse_shortcuts_metadata(shortcuts, item_path) 331 | 332 | logger.success(f'All lakehouses exported to {path} successfully.') 333 | return None 334 | -------------------------------------------------------------------------------- /src/pyfabricops/helpers/warehouses.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from pathlib import Path 4 | from typing import Any, Dict, List, Optional, Union 5 | 6 | import pandas as pd 7 | from pandas import DataFrame 8 | 9 | from ..core.workspaces import resolve_workspace 10 | from ..helpers.folders import resolve_folder_from_id_to_path 11 | from ..items.warehouses import get_warehouse, list_warehouses 12 | from ..utils.decorators import df 13 | from ..utils.logging import get_logger 14 | from ..utils.schemas import PLATFORM_SCHEMA, PLATFORM_VERSION 15 | 16 | logger = get_logger(__name__) 17 | 18 | 19 | def _save_warehouse_sqlproj( 20 | display_name: str, 21 | path: Union[Path, str], 22 | ) -> None: 23 | """ 24 | Create a dummy warehouse `.sqlproj` file 25 | """ 26 | WAREHOUSE_SQL_PROJECT = r""" 27 | 28 | 29 | {warehouse_display_name} 30 | Microsoft.Data.Tools.Schema.Sql.SqlDwUnifiedDatabaseSchemaProvider 31 | Latin1_General_100_BIN2_UTF8 32 | 33 | 34 | 35 | 36 | """ 37 | 38 | sql_project = WAREHOUSE_SQL_PROJECT.format( 39 | warehouse_display_name=display_name 40 | ) 41 | 42 | with open(Path(path) / f'{display_name}.sqlproj', 'w') as f: 43 | f.write(sql_project) 44 | 45 | logger.success( 46 | f'{display_name}.sqlproject has been created in {path} successfully.' 47 | ) 48 | 49 | 50 | def _save_warehouse_defaultsemanticmodel_txt( 51 | path: Union[Path, str], 52 | ) -> None: 53 | """ 54 | Create a `DefaultSemanticModel.txt` in `Warehouse` path. 55 | """ 56 | with open(Path(path) / 'DefaultSemanticModel.txt', 'w') as f: 57 | f.write('Has default semantic model') 58 | 59 | logger.success( 60 | f'DefaultSemanticModel.txt was created in {path} successfully.' 61 | ) 62 | 63 | 64 | def _save_warehouse_xmla_json( 65 | path: Union[Path, str], 66 | ) -> None: 67 | """ 68 | Create a dummy `xmla.json` on `Warehouse` path. 69 | """ 70 | WAREHOUSE_XMLA_JSON = { 71 | 'name': '{{Dataset_Name}}', 72 | 'compatibilityLevel': 1604, 73 | 'model': { 74 | 'name': '{{Dataset_Name}}', 75 | 'culture': 'en-US', 76 | 'collation': 'Latin1_General_100_BIN2_UTF8', 77 | 'dataAccessOptions': { 78 | 'legacyRedirects': True, 79 | 'returnErrorValuesAsNull': True, 80 | }, 81 | 'defaultPowerBIDataSourceVersion': 'powerBI_V3', 82 | 'sourceQueryCulture': 'en-US', 83 | 'expressions': [ 84 | { 85 | 'name': 'DatabaseQuery', 86 | 'kind': 'm', 87 | 'expression': 'let\n database = {{TDS_Endpoint}}\nin\n database\n', 88 | } 89 | ], 90 | 'annotations': [ 91 | {'name': '__PBI_TimeIntelligenceEnabled', 'value': '0'}, 92 | { 93 | 'name': 'SourceLineageTagType', 94 | 'value': 'DatabaseFullyQualifiedName', 95 | }, 96 | ], 97 | }, 98 | } 99 | with open(Path(path) / 'xmla.json', 'w') as f: 100 | json.dump(WAREHOUSE_XMLA_JSON, f, indent=2) 101 | 102 | logger.success(f'xmla.json was created in {path} successfully.') 103 | 104 | 105 | def _generate_warehouse_platform( 106 | display_name: str, 107 | description: Optional[str] = '', 108 | ) -> Dict[str, Any]: 109 | """ 110 | Generate the warehouse .platform file 111 | 112 | Args: 113 | display_name (str): The warehouse display name. 114 | description (str): The warehouse's description. 115 | 116 | Returns: 117 | (Dict[str, Any]): The .platform dict. 118 | """ 119 | return { 120 | '$schema': PLATFORM_SCHEMA, 121 | 'metadata': { 122 | 'type': 'Warehouse', 123 | 'displayName': display_name, 124 | 'description': description, 125 | }, 126 | 'config': { 127 | 'version': PLATFORM_VERSION, 128 | 'logicalId': '00000000-0000-0000-0000-000000000000', 129 | }, 130 | } 131 | 132 | 133 | def _save_warehouse_platform( 134 | platform: Dict[str, Any], 135 | path: str, 136 | ) -> None: 137 | """ 138 | Save the warehouses's .platform in path 139 | 140 | Args: 141 | platform (Dict[str, Any]): The .platform dict. 142 | path (str): The warehouse directory path to save to. 143 | """ 144 | with open(Path(path) / '.platform', 'w') as f: 145 | json.dump(platform, f, indent=2) 146 | 147 | 148 | def get_warehouse_config( 149 | workspace: str, warehouse: str 150 | ) -> Union[Dict[str, Any], None]: 151 | """ 152 | Get a specific warehouse config from a workspace. 153 | 154 | Args: 155 | workspace (str): The name or ID from the workspace. 156 | warehouse (str): The name or ID from the warehouse. 157 | 158 | Returns: 159 | (Union[Dict[str, Any], None]): The dict config from the warehouse 160 | """ 161 | item = warehouse 162 | item_data = get_warehouse(workspace, item, df=False) 163 | 164 | if item_data is None: 165 | return None 166 | 167 | else: 168 | config = {} 169 | config = config[item_data.get('displayName')] = {} 170 | 171 | config = { 172 | 'id': item_data['id'], 173 | 'description': item_data.get('description', None), 174 | 'folder_id': '' 175 | if item_data.get('folderId') is None 176 | or pd.isna(item_data.get('folderId')) 177 | else item_data['folderId'], 178 | 'connection_string': item_data['properties']['connectionString'], 179 | } 180 | 181 | return config 182 | 183 | 184 | def get_all_warehouses_config(workspace: str) -> Union[Dict[str, Any], None]: 185 | """ 186 | Generate warehouses config from a workspace. 187 | 188 | Args: 189 | workspace (str): The name or ID from the workspace. 190 | 191 | Returns: 192 | (Union[Dict[str, Any], None]): The dict config from the warehouses of the workspace 193 | """ 194 | items = list_valid_warehouses(workspace, df=False) 195 | 196 | if items is None: 197 | return None 198 | 199 | config = {} 200 | 201 | for item in items: 202 | 203 | item_data = get_warehouse(workspace, item['id'], df=False) 204 | 205 | config[item['displayName']] = { 206 | 'id': item['id'], 207 | 'description': item.get('description', None), 208 | 'folder_id': '' 209 | if item.get('folderId') is None or pd.isna(item.get('folderId')) 210 | else item['folderId'], 211 | 'connection_string': item_data['properties']['connectionString'], 212 | } 213 | 214 | return config 215 | 216 | 217 | @df 218 | def list_valid_warehouses( 219 | workspace: str, 220 | df: Optional[bool] = True, 221 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 222 | """ 223 | Generate a list of valid warehouses from a workspace. 224 | 225 | Args: 226 | workspace (str): The name or ID from the workspace. 227 | 228 | Returns: 229 | (Union[Dict[str, Any], None]): The list of valids warehouses of the workspace 230 | """ 231 | items = list_warehouses(workspace) 232 | 233 | if items is None: 234 | return None 235 | 236 | return items[ 237 | ~items['displayName'].str.contains('staging', case=False, na=False) 238 | ].to_dict(orient='records') 239 | 240 | 241 | def export_warehouse( 242 | workspace: str, 243 | warehouse: str, 244 | path: Union[str, Path], 245 | ) -> None: 246 | """ 247 | Export a warehouse to path 248 | 249 | Args: 250 | workspace (str): The name or ID of the workspace. 251 | warehouse (str): The name or ID of the warehouse. 252 | path (Union[str, Path]): The path to export to. 253 | """ 254 | workspace_id = resolve_workspace(workspace) 255 | if workspace_id is None: 256 | return None 257 | 258 | item = get_warehouse(workspace_id, warehouse, df=True) 259 | try: 260 | folder_path = resolve_folder_from_id_to_path( 261 | workspace_id, item['folderId'] 262 | ) 263 | except: 264 | logger.info(f'{item["displayName"]}.Warehouse is not inside a folder.') 265 | folder_path = None 266 | 267 | if folder_path is None: 268 | item_path = Path(path) / (item['displayName'] + '.Warehouse') 269 | else: 270 | item_path = ( 271 | Path(path) / folder_path / (item['displayName'] + '.Warehouse') 272 | ) 273 | os.makedirs(item_path, exist_ok=True) 274 | 275 | platform = _generate_warehouse_platform( 276 | display_name=item['displayName'], 277 | description=item['description'], 278 | ) 279 | 280 | _save_warehouse_platform(platform, item_path) 281 | 282 | _save_warehouse_defaultsemanticmodel_txt(item_path) 283 | 284 | _save_warehouse_sqlproj(item['displayName'], item_path) 285 | 286 | _save_warehouse_xmla_json(item_path) 287 | 288 | logger.success(f'All warehouses exported to {path} successfully.') 289 | return None 290 | 291 | 292 | def export_all_warehouses(workspace: str, path: Union[str, Path]) -> None: 293 | """ 294 | Exports all warehouses from the workspace to path. 295 | 296 | Args: 297 | workspace (str): The ID or name of the workspace. 298 | path (Union[str, Path]): The path to export to. 299 | """ 300 | workspace_id = resolve_workspace(workspace) 301 | if workspace_id is None: 302 | return None 303 | 304 | items = list_valid_warehouses(workspace_id, df=False) 305 | if items is None: 306 | return None 307 | 308 | for item in items: 309 | try: 310 | folder_path = resolve_folder_from_id_to_path( 311 | workspace_id, item['folderId'] 312 | ) 313 | except: 314 | logger.info( 315 | f'{item["displayName"]}.Warehouse is not inside a folder.' 316 | ) 317 | folder_path = None 318 | 319 | if folder_path is None: 320 | item_path = Path(path) / (item['displayName'] + '.Warehouse') 321 | else: 322 | item_path = ( 323 | Path(path) / folder_path / (item['displayName'] + '.Warehouse') 324 | ) 325 | os.makedirs(item_path, exist_ok=True) 326 | 327 | platform = _generate_warehouse_platform( 328 | display_name=item['displayName'], 329 | description=item['description'], 330 | ) 331 | 332 | _save_warehouse_platform(platform, item_path) 333 | 334 | _save_warehouse_defaultsemanticmodel_txt(item_path) 335 | 336 | _save_warehouse_sqlproj(item['displayName'], item_path) 337 | 338 | _save_warehouse_xmla_json(item_path) 339 | 340 | logger.success(f'All warehouses exported to {path} successfully.') 341 | return None 342 | -------------------------------------------------------------------------------- /src/pyfabricops/helpers/workspaces.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | from ..core.workspaces import get_workspace, list_workspace_role_assignments 4 | from ..utils.exceptions import ResourceNotFoundError 5 | 6 | 7 | def get_workspace_config( 8 | workspace: str, 9 | ) -> Dict[str, Any]: 10 | """ 11 | Retrieves the workspace details for a given workspace. 12 | 13 | Args: 14 | workspace (str): The ID or name of the workspace to retrieve configuration for. 15 | 16 | Returns: 17 | dict: A dictionary containing the workspace details, including workspace ID, name, description, capacity ID, region, and roles. 18 | 19 | Examples: 20 | ```python 21 | get_workspace_details('123e4567-e89b-12d3-a456-426614174000') 22 | get_workspace_details('MyProject') 23 | ``` 24 | """ 25 | # Retrieving details from the workspace 26 | workspace_details = get_workspace(workspace, df=False) 27 | if not workspace_details: 28 | raise ResourceNotFoundError(f'Workspace {workspace} not found.') 29 | 30 | workspace_name = workspace_details.get('displayName', '') 31 | workspace_id = workspace_details.get('id', '') 32 | workspace_description = workspace_details.get('description', '') 33 | capacity_id = workspace_details.get('capacityId', '') 34 | capacity_region = workspace_details.get('capacityRegion', '') 35 | 36 | # Retrieving workspace roles 37 | # Retrieve details 38 | roles_details = list_workspace_role_assignments(workspace_id, df=False) 39 | 40 | # Init a empty list 41 | roles = [] 42 | 43 | # Iterate for each role details 44 | for role in roles_details: 45 | principal_type = role['principal']['type'] 46 | role_entry = { 47 | 'user_uuid': role['id'], 48 | 'user_type': principal_type, 49 | 'role': role['role'], 50 | 'display_name': role['principal'].get('displayName', ''), 51 | } 52 | 53 | if principal_type == 'Group': 54 | group_details = role['principal'].get('groupDetails', {}) 55 | role_entry['group_type'] = group_details.get('groupType', '') 56 | role_entry['email'] = group_details.get('email', '') 57 | elif principal_type == 'User': 58 | user_details = role['principal'].get('userDetails', {}) 59 | role_entry['user_principal_name'] = user_details.get( 60 | 'userPrincipalName', '' 61 | ) 62 | elif principal_type == 'ServicePrincipal': 63 | spn_details = role['principal'].get('servicePrincipalDetails', {}) 64 | role_entry['app_id'] = spn_details.get('aadAppId', '') 65 | 66 | roles.append(role_entry) 67 | 68 | # Create a empty dict 69 | workspace_config = {} 70 | 71 | # Populate the dict 72 | workspace_config['workspace_id'] = workspace_id 73 | workspace_config['workspace_name'] = workspace_name 74 | workspace_config['workspace_description'] = workspace_description 75 | workspace_config['capacity_id'] = capacity_id 76 | workspace_config['capacity_region'] = capacity_region 77 | workspace_config['workspace_roles'] = roles 78 | 79 | return workspace_config 80 | -------------------------------------------------------------------------------- /src/pyfabricops/items/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alisonpezzott/pyfabricops/d20a5d57b4f84246973b4c0c6bfa35c66dd6a1e0/src/pyfabricops/items/__init__.py -------------------------------------------------------------------------------- /src/pyfabricops/items/dataflows_gen2.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Union 2 | 3 | from pandas import DataFrame 4 | 5 | from ..api.api import api_request 6 | from ..core.folders import resolve_folder 7 | from ..core.workspaces import resolve_workspace 8 | from ..utils.decorators import df 9 | from ..utils.logging import get_logger 10 | from ..utils.utils import is_valid_uuid 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | @df 16 | def list_dataflows_gen2( 17 | workspace: str, 18 | *, 19 | df: Optional[bool] = True, 20 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 21 | """ 22 | Lists all dataflows in a workspace. 23 | 24 | Args: 25 | workspace (str): The workspace name or ID. 26 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 27 | If False, returns a list of dictionaries. 28 | 29 | Returns: 30 | list | pandas.DataFrame | None: A list of dataflows if successful, otherwise None. 31 | """ 32 | workspace_id = resolve_workspace(workspace) 33 | 34 | return api_request( 35 | endpoint='/workspaces/' + workspace_id + '/dataflows', 36 | support_pagination=True, 37 | ) 38 | 39 | 40 | def get_dataflow_gen2_id( 41 | workspace: str, dataflow_gen2_name: str 42 | ) -> Union[str, None]: 43 | """ 44 | Retrieves the ID of a dataflow by its name. 45 | 46 | Args: 47 | dataflow_gen2_name (str): The name of the dataflow. 48 | 49 | Returns: 50 | (Union[str, None]): The ID of the dataflow if found, otherwise None. 51 | """ 52 | dataflows = list_dataflows_gen2( 53 | workspace=resolve_workspace(workspace), 54 | df=False, 55 | ) 56 | 57 | for _dataflow in dataflows: 58 | if _dataflow['displayName'] == dataflow_gen2_name: 59 | return _dataflow['id'] 60 | logger.warning( 61 | f"Dataflow '{dataflow_gen2_name}' not found in workspace '{workspace}'." 62 | ) 63 | return None 64 | 65 | 66 | def resolve_dataflow_gen2( 67 | workspace: str, 68 | dataflow: str, 69 | ) -> Union[str, None]: 70 | """ 71 | Resolves a dataflow name to its ID. 72 | 73 | Args: 74 | workspace (str): The ID of the workspace. 75 | dataflow (str): The name of the dataflow. 76 | 77 | Returns: 78 | (Union[str, None]): The ID of the dataflow, or None if not found. 79 | 80 | Examples: 81 | ```python 82 | resolve_dataflow('MyProjectWorkspace', 'SalesDataflow') 83 | resolve_dataflow('123e4567-e89b-12d3-a456-426614174000', 'SalesDataflow') 84 | ``` 85 | """ 86 | if is_valid_uuid(dataflow): 87 | return dataflow 88 | else: 89 | return get_dataflow_gen2_id(resolve_workspace(workspace), dataflow) 90 | 91 | 92 | @df 93 | def get_dataflow_gen2( 94 | workspace: str, 95 | dataflow: str, 96 | *, 97 | df: Optional[bool] = True, 98 | ) -> Union[DataFrame, Dict[str, Any], None]: 99 | """ 100 | Gets a dataflow by its name or ID. 101 | 102 | Args: 103 | workspace (str): The workspace name or ID. 104 | dataflow (str): The name or ID of the dataflow. 105 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 106 | If False, returns a list of dictionaries. 107 | 108 | Returns: 109 | (Union[DataFrame, Dict[str, Any], None]): The dataflow details if found, otherwise None. 110 | 111 | Examples: 112 | ```python 113 | get_dataflow('MyProjectWorkspace', 'SalesDataflow') 114 | get_dataflow('123e4567-e89b-12d3-a456-426614174000', 'SalesDataflow') 115 | ``` 116 | """ 117 | workspace_id = resolve_workspace(workspace) 118 | 119 | dataflow_id = resolve_dataflow_gen2(workspace_id, dataflow) 120 | 121 | return api_request( 122 | endpoint='/workspaces/' + workspace_id + '/dataflows/' + dataflow_id, 123 | ) 124 | 125 | 126 | @df 127 | def update_dataflow_gen2( 128 | workspace: str, 129 | dataflow: str, 130 | *, 131 | display_name: str = None, 132 | description: str = None, 133 | df: Optional[bool] = True, 134 | ) -> Union[DataFrame, Dict[str, Any], None]: 135 | """ 136 | Updates the properties of the specified dataflow. 137 | 138 | Args: 139 | workspace (str): The workspace name or ID. 140 | dataflow (str): The name or ID of the dataflow to update. 141 | display_name (str, optional): The new display name for the dataflow. 142 | description (str, optional): The new description for the dataflow. 143 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 144 | If False, returns a list of dictionaries. 145 | 146 | Returns: 147 | (Union[DataFrame, Dict[str, Any], None]): The updated dataflow details if successful, otherwise None. 148 | 149 | Examples: 150 | ```python 151 | update_dataflow('MyProjectWorkspace', 'SalesDataModel', display_name='UpdatedSalesDataModel') 152 | update_dataflow('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', description='Updated description') 153 | ``` 154 | """ 155 | workspace_id = resolve_workspace(workspace) 156 | 157 | dataflow_id = resolve_dataflow_gen2(workspace_id, dataflow) 158 | 159 | payload = {} 160 | 161 | if display_name: 162 | payload['displayName'] = display_name 163 | 164 | if description: 165 | payload['description'] = description 166 | 167 | return api_request( 168 | endpoint='/workspaces/' + workspace_id + '/dataflows/' + dataflow_id, 169 | method='patch', 170 | payload=payload, 171 | ) 172 | 173 | 174 | def delete_dataflow_gen2(workspace: str, dataflow: str) -> None: 175 | """ 176 | Delete a dataflow from the specified workspace. 177 | 178 | Args: 179 | workspace (str): The name or ID of the workspace to delete. 180 | dataflow (str): The name or ID of the dataflow to delete. 181 | 182 | Returns: 183 | None: If the dataflow is successfully deleted. 184 | 185 | Raises: 186 | ResourceNotFoundError: If the specified workspace is not found. 187 | 188 | Examples: 189 | ```python 190 | delete_dataflow('MyProjectWorkspace', 'SalesDataflow') 191 | delete_dataflow('123e4567-e89b-12d3-a456-426614174000', 'SalesDataflow') 192 | ``` 193 | """ 194 | workspace_id = resolve_workspace(workspace) 195 | 196 | dataflow_id = resolve_dataflow_gen2(workspace_id, dataflow) 197 | 198 | return api_request( 199 | endpoint='/workspaces/' + workspace_id + '/dataflows/' + dataflow_id, 200 | method='delete', 201 | ) 202 | 203 | 204 | def get_dataflow_gen2_definition( 205 | workspace: str, dataflow: str 206 | ) -> Union[Dict[str, Any], None]: 207 | """ 208 | Retrieves the definition of a dataflow by its name or ID from the specified workspace. 209 | 210 | Args: 211 | workspace (str): The workspace name or ID. 212 | dataflow (str): The name or ID of the dataflow. 213 | 214 | Returns: 215 | (Union[Dict[str, Any], None]): The dataflow definition if found, otherwise None. 216 | 217 | Examples: 218 | ```python 219 | get_dataflow_definition('MyProjectWorkspace', 'Salesdataflow') 220 | get_dataflow_definition('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 221 | ``` 222 | """ 223 | # Resolving IDs 224 | workspace_id = resolve_workspace(workspace) 225 | 226 | dataflow_id = resolve_dataflow_gen2(workspace_id, dataflow) 227 | 228 | return api_request( 229 | endpoint='/workspaces/' 230 | + workspace_id 231 | + '/dataflows/' 232 | + dataflow_id 233 | + '/getDefinition', 234 | method='post', 235 | support_lro=True, 236 | ) 237 | 238 | 239 | @df 240 | def update_dataflow_gen2_definition( 241 | workspace: str, 242 | dataflow: str, 243 | item_definition: Dict[str, Any], 244 | df: Optional[bool] = True, 245 | ) -> Union[Dict[str, Any], None]: 246 | """ 247 | Updates the definition of an existing dataflow in the specified workspace. 248 | If the dataflow does not exist, it returns None. 249 | 250 | Args: 251 | workspace (str): The workspace name or ID. 252 | dataflow (str): The name or ID of the dataflow to update. 253 | item_definition (Dict[str, Any]): The updated item definition. 254 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 255 | If False, returns a list of dictionaries. 256 | 257 | Returns: 258 | (Union[Dict[str, Any], None]): The updated dataflow details if successful, otherwise None. 259 | 260 | Examples: 261 | ```python 262 | update_dataflow('MyProjectWorkspace', 'SalesDataModel', display_name='UpdatedSalesDataModel') 263 | update_dataflow('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', description='Updated description') 264 | ``` 265 | """ 266 | workspace_id = resolve_workspace(workspace) 267 | 268 | dataflow_id = resolve_dataflow_gen2(workspace_id, dataflow) 269 | 270 | payload = {'definition': item_definition} 271 | 272 | params = {'updateMetadata': True} 273 | 274 | return api_request( 275 | endpoint='/workspaces/' 276 | + workspace_id 277 | + '/dataflows/' 278 | + dataflow_id 279 | + '/updateDefinition', 280 | method='patch', 281 | payload=payload, 282 | params=params, 283 | support_lro=True, 284 | ) 285 | 286 | 287 | @df 288 | def create_dataflow_gen2( 289 | workspace: str, 290 | display_name: str, 291 | item_definition: Dict[str, Any], 292 | *, 293 | description: Optional[str] = None, 294 | folder: Optional[str] = None, 295 | df: Optional[bool] = True, 296 | ) -> Union[dict, None]: 297 | """ 298 | Creates a new dataflow in the specified workspace. 299 | 300 | Args: 301 | workspace (str): The workspace name or ID. 302 | display_name (str): The display name of the dataflow. 303 | description (str, optional): A description for the dataflow. 304 | folder (str, optional): The folder to create the dataflow in. 305 | item_definition (Dict[str, Any]): The definition of the dataflow. 306 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 307 | If False, returns a list of dictionaries. 308 | 309 | Returns: 310 | (dict): The created dataflow details. 311 | 312 | Examples: 313 | ```python 314 | create_dataflow('MyProjectWorkspace', 'SalesDataModel', 'path/to/definition.json') 315 | create_dataflow('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', 'path/to/definition.json', description='Sales data model') 316 | ``` 317 | """ 318 | workspace_id = resolve_workspace(workspace) 319 | 320 | payload = {'displayName': display_name, 'definition': item_definition} 321 | 322 | if description: 323 | payload['description'] = description 324 | 325 | if folder: 326 | folder_id = resolve_folder(workspace_id, folder) 327 | if folder_id: 328 | payload['folderId'] = folder_id 329 | 330 | return api_request( 331 | endpoint='/workspaces/' + workspace_id + '/dataflows', 332 | method='post', 333 | payload=payload, 334 | support_lro=True, 335 | ) 336 | -------------------------------------------------------------------------------- /src/pyfabricops/items/lakehouses.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Any, Dict, List, Optional, Union 3 | 4 | from pandas import DataFrame 5 | 6 | from ..api.api import api_request 7 | from ..core.folders import resolve_folder 8 | from ..core.workspaces import resolve_workspace 9 | from ..utils.decorators import df 10 | from ..utils.logging import get_logger 11 | from ..utils.utils import is_valid_uuid 12 | 13 | logger = get_logger(__name__) 14 | 15 | 16 | @df 17 | def list_lakehouses( 18 | workspace: str, 19 | *, 20 | df: Optional[bool] = True, 21 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 22 | """ 23 | Returns a list of lakehouses from the specified workspace. 24 | This API supports pagination. 25 | 26 | Args: 27 | workspace (str): The workspace name or ID. 28 | ddf (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 29 | If False, returns a list of dictionaries. 30 | 31 | Returns: 32 | (Union[DataFrame, List[Dict[str, Any]], None]): A list of lakehouses, excluding those that start with the specified prefixes. If `df=True`, returns a DataFrame with flattened keys. 33 | 34 | Examples: 35 | ```python 36 | list_lakehouses('MyProjectWorkspace') 37 | ``` 38 | """ 39 | return api_request( 40 | endpoint='/workspaces/' + resolve_workspace(workspace) + '/lakehouses', 41 | support_pagination=True, 42 | ) 43 | 44 | 45 | def get_lakehouse_id(workspace: str, lakehouse: str) -> Union[str, None]: 46 | """ 47 | Retrieves the ID of a lakehouse by its name from the specified workspace. 48 | 49 | Args: 50 | workspace (str): The workspace name or ID. 51 | lakehouse (str): The name of the lakehouse. 52 | 53 | Returns: 54 | (Union[str, None]): The ID of the lakehouse, or None if not found. 55 | 56 | Examples: 57 | ```python 58 | get_lakehouse_id('MyProjectWorkspace', 'SalesDataLakehouse') 59 | ``` 60 | """ 61 | lakehouses = list_lakehouses(workspace, df=False) 62 | if not lakehouses: 63 | return None 64 | 65 | for lakehouse_ in lakehouses: 66 | if lakehouse_['displayName'] == lakehouse: 67 | return lakehouse_['id'] 68 | 69 | logger.warning( 70 | f'Lakehouse {lakehouse} not found in workspace {workspace}.' 71 | ) 72 | return None 73 | 74 | 75 | def resolve_lakehouse( 76 | workspace: str, 77 | lakehouse: str, 78 | ) -> Union[str, None]: 79 | """ 80 | Resolves a lakehouse name to its ID. 81 | 82 | Args: 83 | workspace (str): The ID of the workspace. 84 | lakehouse (str): The name of the lakehouse. 85 | 86 | Returns: 87 | (Union[str, None]): The ID of the lakehouse, or None if not found. 88 | 89 | Examples: 90 | ```python 91 | resolve_lakehouse('MyProjectWorkspace', 'SalesDataLakehouse') 92 | ``` 93 | """ 94 | if is_valid_uuid(lakehouse): 95 | return lakehouse 96 | else: 97 | return get_lakehouse_id(workspace, lakehouse) 98 | 99 | 100 | @df 101 | def get_lakehouse( 102 | workspace: str, 103 | lakehouse: str, 104 | *, 105 | df: Optional[bool] = True, 106 | ) -> Union[DataFrame, Dict[str, Any], None]: 107 | """ 108 | Retrieves a lakehouse by its name or ID from the specified workspace. 109 | 110 | Args: 111 | workspace (str): The workspace name or ID. 112 | lakehouse (str): The name or ID of the lakehouse. 113 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 114 | If False, returns a list of dictionaries. 115 | 116 | Returns: 117 | (Union[DataFrame, Dict[str, Any], None]): The lakehouse details if found. If `df=True`, returns a DataFrame with flattened keys. 118 | 119 | Examples: 120 | ```python 121 | get_lakehouse('MyProjectWorkspace', 'SalesDataLakehouse') 122 | get_lakehouse('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 123 | get_lakehouse('123e4567-e89b-12d3-a456-426614174000', 'SalesDataLakehouse', df=True) 124 | ``` 125 | """ 126 | workspace_id = resolve_workspace(workspace) 127 | if not workspace_id: 128 | return None 129 | 130 | lakehouse_id = resolve_lakehouse(workspace_id, lakehouse) 131 | if not lakehouse_id: 132 | return None 133 | 134 | return api_request( 135 | endpoint='/workspaces/' + workspace_id + '/lakehouses/' + lakehouse_id, 136 | ) 137 | 138 | 139 | @df 140 | def create_lakehouse( 141 | workspace: str, 142 | display_name: str, 143 | *, 144 | description: Optional[str] = None, 145 | folder: Optional[str] = None, 146 | enable_schemas: Optional[bool] = False, 147 | df: Optional[bool] = True, 148 | ) -> Union[DataFrame, Dict[str, Any], None]: 149 | """ 150 | Create a lakehouse in the specified workspace. 151 | 152 | Args: 153 | workspace (str): The workspace name or ID. 154 | display_name (str): The display name for the lakehouse. 155 | description (Optional[str]): The description for the lakehouse. 156 | folder (Optional[str]): The folder to create the lakehouse in. 157 | enable_schemas (Optional[bool]): Whether to enable schemas for the lakehouse. 158 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 159 | If False, returns a list of dictionaries. 160 | 161 | Returns: 162 | (Union[DataFrame, Dict[str, Any], None]): The created lakehouse details if successful, otherwise None. 163 | 164 | Examples: 165 | ```python 166 | create_lakehouse('MyProjectWorkspace', 'SalesDataLakehouse') 167 | create_lakehouse('MyProjectWorkspace', 'SalesDataLakehouse', description='Sales data lakehouse') 168 | ``` 169 | """ 170 | workspace_id = resolve_workspace(workspace) 171 | 172 | payload = {'displayName': display_name} 173 | 174 | if description: 175 | payload['description'] = description 176 | 177 | if folder: 178 | folder_id = resolve_folder(workspace_id, folder) 179 | if folder_id: 180 | payload['folderId'] = folder_id 181 | 182 | if enable_schemas: 183 | payload['creationPayload'] = {'enableSchemas': True} 184 | 185 | return api_request( 186 | endpoint='/workspaces/' + workspace_id + '/lakehouses', 187 | method='post', 188 | payload=payload, 189 | ) 190 | 191 | 192 | @df 193 | def update_lakehouse( 194 | workspace: str, 195 | lakehouse: str, 196 | *, 197 | display_name: Optional[str] = None, 198 | description: Optional[str] = None, 199 | df: Optional[bool] = True, 200 | ) -> Union[DataFrame, Dict[str, Any], None]: 201 | """ 202 | Updates the properties of the specified lakehouse. 203 | 204 | Args: 205 | workspace (str): The workspace name or ID. 206 | lakehouse (str): The name or ID of the lakehouse to update. 207 | display_name (Optional[str]): The new display name for the lakehouse. 208 | description (Optional[str]): The new description for the lakehouse. 209 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 210 | If False, returns a list of dictionaries. 211 | 212 | Returns: 213 | (Union[DataFrame, Dict[str, Any], None]): The updated lakehouse details if successful, otherwise None. 214 | 215 | Examples: 216 | ```python 217 | update_lakehouse('MyProjectWorkspace', 'SalesDataLakehouse', display_name='UpdatedSalesDataLakehouse') 218 | update_lakehouse('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', description='Updated description') 219 | ``` 220 | """ 221 | workspace_id = resolve_workspace(workspace) 222 | lakehouse_id = resolve_lakehouse(workspace_id, lakehouse) 223 | 224 | payload = {} 225 | 226 | if display_name: 227 | payload['displayName'] = display_name 228 | 229 | if description: 230 | payload['description'] = description 231 | 232 | return api_request( 233 | endpoint='/workspaces/' + workspace_id + '/lakehouses/' + lakehouse_id, 234 | method='patch', 235 | payload=payload, 236 | ) 237 | 238 | 239 | def delete_lakehouse(workspace: str, lakehouse: str) -> None: 240 | """ 241 | Delete a lakehouse in the specified workspace. 242 | 243 | Args: 244 | workspace (str): The workspace name or ID. 245 | lakehouse (str): The name or ID of the lakehouse to delete. 246 | 247 | Returns: 248 | (bool): True if the lakehouse was deleted successfully, otherwise False. 249 | 250 | Examples: 251 | ```python 252 | delete_lakehouse('MyProjectWorkspace', 'SalesDataLakehouse') 253 | delete_lakehouse('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 254 | ``` 255 | """ 256 | workspace_id = resolve_workspace(workspace) 257 | lakehouse_id = resolve_lakehouse(workspace_id, lakehouse) 258 | return api_request( 259 | endpoint='/workspaces/' + workspace_id + '/lakehouses/' + lakehouse_id, 260 | method='delete', 261 | ) 262 | -------------------------------------------------------------------------------- /src/pyfabricops/items/notebooks.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Union 2 | 3 | from pandas import DataFrame 4 | 5 | from ..api.api import api_request 6 | from ..core.folders import resolve_folder 7 | from ..core.workspaces import resolve_workspace 8 | from ..utils.decorators import df 9 | from ..utils.logging import get_logger 10 | from ..utils.utils import is_valid_uuid 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | @df 16 | def list_notebooks( 17 | workspace: str, 18 | *, 19 | df: Optional[bool] = True, 20 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 21 | """ 22 | Lists all notebooks in the specified workspace. 23 | 24 | Args: 25 | workspace (str): The workspace name or ID. 26 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 27 | If False, returns a list of dictionaries. 28 | 29 | Returns: 30 | (Union[DataFrame, List[Dict[str, Any]], None]): A list of notebooks, a DataFrame with flattened keys, or None if not found. 31 | 32 | Examples: 33 | ```python 34 | list_notebooks('MyProjectWorkspace') 35 | list_notebooks('MyProjectWorkspace', df=True) 36 | ``` 37 | """ 38 | return api_request( 39 | endpoint='/workspaces/' + resolve_workspace(workspace) + '/notebooks', 40 | support_pagination=True, 41 | ) 42 | 43 | 44 | def get_notebook_id(workspace: str, notebook: str) -> Union[str, None]: 45 | """ 46 | Retrieves the ID of a notebook by its name or ID from the specified workspace. 47 | 48 | Args: 49 | workspace (str): The workspace name or ID. 50 | notebook (str): The name or ID of the notebook. 51 | 52 | Returns: 53 | (Union[str, None]): The ID of the notebook if found, otherwise None. 54 | 55 | Examples: 56 | ```python 57 | get_notebook_id('MyProjectWorkspace', 'SalesDataNotebook') 58 | get_notebook_id('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 59 | ``` 60 | """ 61 | notebooks = list_notebooks(workspace, df=False) 62 | for nb in notebooks: 63 | if nb['displayName'] == notebook or nb['id'] == notebook: 64 | return nb['id'] 65 | return None 66 | 67 | 68 | def resolve_notebook( 69 | workspace: str, 70 | notebook: str, 71 | ) -> Union[str, None]: 72 | """ 73 | Resolves a notebook name or ID to its ID in the specified workspace. 74 | 75 | Args: 76 | workspace (str): The workspace name or ID. 77 | notebook (str): The name or ID of the notebook. 78 | silent (bool): If True, suppresses warnings. Defaults to False. 79 | 80 | Returns: 81 | Optional[str]: The ID of the notebook if found, otherwise None. 82 | 83 | Examples: 84 | ```python 85 | resolve_notebook('MyProjectWorkspace', 'SalesDataNotebook') 86 | resolve_notebook('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 87 | ``` 88 | """ 89 | if is_valid_uuid(notebook): 90 | return notebook 91 | else: 92 | return get_notebook_id(workspace, notebook) 93 | 94 | 95 | @df 96 | def get_notebook( 97 | workspace: str, 98 | notebook: str, 99 | *, 100 | df: Optional[bool] = True, 101 | ) -> Union[DataFrame, Dict[str, Any], None]: 102 | """ 103 | Retrieves a notebook by its name or ID from the specified workspace. 104 | 105 | Args: 106 | workspace (str): The workspace name or ID. 107 | notebook (str): The name or ID of the notebook. 108 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 109 | If False, returns a list of dictionaries. 110 | 111 | Returns: 112 | (Union[DataFrame, Dict[str, Any], None]): The notebook details if found. If `df=True`, returns a DataFrame with flattened keys. 113 | 114 | Examples: 115 | ```python 116 | get_notebook('MyProjectWorkspace', 'SalesDataNotebook') 117 | get_notebook('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', df=True) 118 | ``` 119 | """ 120 | workspace_id = resolve_workspace(workspace) 121 | 122 | notebook_id = resolve_notebook(workspace_id, notebook) 123 | 124 | return api_request( 125 | endpoint='/workspaces/' + workspace_id + '/notebooks/' + notebook_id, 126 | ) 127 | 128 | 129 | @df 130 | def update_notebook( 131 | workspace: str, 132 | notebook: str, 133 | *, 134 | display_name: Optional[str] = None, 135 | description: Optional[str] = None, 136 | df: Optional[bool] = True, 137 | ) -> Union[DataFrame, Dict[str, Any], None]: 138 | """ 139 | Updates the properties of the specified notebook. 140 | 141 | Args: 142 | workspace (str): The workspace name or ID. 143 | notebook (str): The name or ID of the notebook to update. 144 | display_name (Optional[str]): The new display name for the notebook. 145 | description (Optional[str]): The new description for the notebook. 146 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 147 | If False, returns a list of dictionaries. 148 | 149 | Returns: 150 | (Union[DataFrame, Dict[str, Any], None]): The updated notebook details if successful, otherwise None. 151 | 152 | Examples: 153 | ```python 154 | update_notebook('MyProjectWorkspace', 'SalesDataModel', display_name='UpdatedSalesDataModel') 155 | update_notebook('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', description='Updated description') 156 | ``` 157 | """ 158 | workspace_id = resolve_workspace(workspace) 159 | 160 | notebook_id = resolve_notebook(workspace_id, notebook) 161 | 162 | payload = {} 163 | 164 | if display_name: 165 | payload['displayName'] = display_name 166 | 167 | if description: 168 | payload['description'] = description 169 | 170 | return api_request( 171 | endpoint='/workspaces/' + workspace_id + '/notebooks/' + notebook_id, 172 | method='patch', 173 | payload=payload, 174 | ) 175 | 176 | 177 | def delete_notebook(workspace: str, notebook: str) -> None: 178 | """ 179 | Delete a notebook from the specified workspace. 180 | 181 | Args: 182 | workspace (str): The name or ID of the workspace to delete. 183 | notebook (str): The name or ID of the notebook to delete. 184 | 185 | Returns: 186 | None: If the notebook is successfully deleted. 187 | 188 | Raises: 189 | ResourceNotFoundError: If the specified workspace is not found. 190 | 191 | Examples: 192 | ```python 193 | delete_notebook('MyProjectWorkspace', 'SalesDataNotebook') 194 | delete_notebook('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 195 | ``` 196 | """ 197 | workspace_id = resolve_workspace(workspace) 198 | 199 | notebook_id = resolve_notebook(workspace_id, notebook) 200 | 201 | return api_request( 202 | endpoint='/workspaces/' + workspace_id + '/notebooks/' + notebook_id, 203 | method='delete', 204 | ) 205 | 206 | 207 | def get_notebook_definition( 208 | workspace: str, notebook: str 209 | ) -> Union[Dict[str, Any], None]: 210 | """ 211 | Retrieves the definition of a notebook by its name or ID from the specified workspace. 212 | 213 | Args: 214 | workspace (str): The workspace name or ID. 215 | notebook (str): The name or ID of the notebook. 216 | 217 | Returns: 218 | (Union[Dict[str, Any], None]): The notebook definition if found, otherwise None. 219 | 220 | Examples: 221 | ```python 222 | get_notebook_definition('MyProjectWorkspace', 'Salesnotebook') 223 | get_notebook_definition('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 224 | ``` 225 | """ 226 | workspace_id = resolve_workspace(workspace) 227 | 228 | notebook_id = resolve_notebook(workspace_id, notebook) 229 | 230 | return api_request( 231 | endpoint='/workspaces/' 232 | + workspace_id 233 | + '/notebooks/' 234 | + notebook_id 235 | + '/getDefinition', 236 | method='post', 237 | support_lro=True, 238 | ) 239 | 240 | 241 | @df 242 | def update_notebook_definition( 243 | workspace: str, 244 | notebook: str, 245 | item_definition: str, 246 | *, 247 | df: Optional[bool] = True, 248 | ) -> Union[DataFrame, Dict[str, Any], None]: 249 | """ 250 | Updates the definition of an existing notebook in the specified workspace. 251 | If the notebook does not exist, it returns None. 252 | 253 | Args: 254 | workspace (str): The workspace name or ID. 255 | notebook (str): The name or ID of the notebook to update. 256 | path (str): The path to the notebook definition. 257 | 258 | Returns: 259 | (dict or None): The updated notebook details if successful, otherwise None. 260 | 261 | Examples: 262 | ```python 263 | update_notebook('MyProjectWorkspace', 'SalesDataModel', display_name='UpdatedSalesDataModel') 264 | update_notebook('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', description='Updated description') 265 | ``` 266 | """ 267 | workspace_id = resolve_workspace(workspace) 268 | 269 | notebook_id = resolve_notebook(workspace_id, notebook) 270 | 271 | payload = {'definition': item_definition} 272 | 273 | params = {'updateMetadata': True} 274 | 275 | return api_request( 276 | endpoint='/workspaces/' 277 | + workspace_id 278 | + '/notebooks/' 279 | + notebook_id 280 | + '/updateDefinition', 281 | method='post', 282 | payload=payload, 283 | params=params, 284 | support_lro=True, 285 | ) 286 | 287 | 288 | @df 289 | def create_notebook( 290 | workspace: str, 291 | display_name: str, 292 | item_definition: str, 293 | *, 294 | description: Optional[str] = None, 295 | folder: Optional[str] = None, 296 | df: Optional[bool] = True, 297 | ) -> Union[DataFrame, Dict[str, Any], None]: 298 | """ 299 | Creates a new notebook in the specified workspace. 300 | 301 | Args: 302 | workspace (str): The workspace name or ID. 303 | display_name (str): The display name of the notebook. 304 | description (str, optional): A description for the notebook. 305 | folder (str, optional): The folder to create the notebook in. 306 | path (str): The path to the notebook definition file. 307 | 308 | Returns: 309 | (dict): The created notebook details. 310 | 311 | Examples: 312 | ```python 313 | create_notebook('MyProjectWorkspace', 'SalesDataModel', 'path/to/definition.json') 314 | create_notebook('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', 'path/to/definition.json') 315 | ``` 316 | """ 317 | workspace_id = resolve_workspace(workspace) 318 | 319 | payload = {'displayName': display_name, 'definition': item_definition} 320 | 321 | if description: 322 | payload['description'] = description 323 | 324 | if folder: 325 | folder_id = resolve_folder(workspace_id, folder) 326 | if folder_id: 327 | payload['folderId'] = folder_id 328 | 329 | return api_request( 330 | endpoint='/workspaces/' + workspace_id + '/notebooks', 331 | method='post', 332 | payload=payload, 333 | support_lro=True, 334 | ) 335 | -------------------------------------------------------------------------------- /src/pyfabricops/items/reports.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Union 2 | 3 | from pandas import DataFrame 4 | 5 | from ..api.api import api_request 6 | from ..core.folders import resolve_folder 7 | from ..core.workspaces import resolve_workspace 8 | from ..utils.decorators import df 9 | from ..utils.logging import get_logger 10 | from ..utils.utils import is_valid_uuid 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | @df 16 | def list_reports( 17 | workspace: str, 18 | df: Optional[bool] = True, 19 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 20 | """ 21 | Returns a list of semantic models in a specified workspace. 22 | 23 | Args: 24 | workspace_id (str): The ID of the workspace. 25 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 26 | If False, returns a list of dictionaries. 27 | 28 | Returns: 29 | (Union[DataFrame, List[Dict[str, Any]], None]): A list of semantic models or a DataFrame if df is True. 30 | """ 31 | return api_request( 32 | endpoint='/workspaces/' + resolve_workspace(workspace) + '/reports', 33 | support_pagination=True, 34 | ) 35 | 36 | 37 | def get_report_id(workspace: str, report_name: str) -> Union[str, None]: 38 | """ 39 | Retrieves the ID of a semantic model by its name from the specified workspace. 40 | 41 | Args: 42 | workspace (str): The workspace name or ID. 43 | report_name (str): The name of the semantic model. 44 | 45 | Returns: 46 | (Optional[str]): The ID of the semantic model if found, otherwise None. 47 | 48 | Examples: 49 | ```python 50 | get_report_id('123e4567-e89b-12d3-a456-426614174000', 'SalesDataModel') 51 | ``` 52 | """ 53 | reports = list_reports(workspace=resolve_workspace(workspace), df=False) 54 | for report in reports: 55 | if report.get('displayName') == report_name: 56 | return report.get('id') 57 | return None 58 | 59 | 60 | def resolve_report( 61 | workspace: str, 62 | report: str, 63 | ) -> Union[str, None]: 64 | if is_valid_uuid(report): 65 | return report 66 | else: 67 | return get_report_id(workspace, report) 68 | 69 | 70 | @df 71 | def get_report( 72 | workspace: str, report: str, *, df: Optional[bool] = True 73 | ) -> Union[DataFrame, Dict[str, Any], None]: 74 | """ 75 | Retrieves a semantic model by its name or ID from the specified workspace. 76 | 77 | Args: 78 | workspace_id (str): The workspace ID. 79 | report_id (str): The ID of the semantic model. 80 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 81 | If False, returns a list of dictionaries. 82 | 83 | Returns: 84 | (Union[DataFrame, Dict[str, Any], None]): The semantic model details if found. If `df=True`, returns a DataFrame with flattened keys. 85 | 86 | Examples: 87 | ```python 88 | get_report('123e4567-e89b-12d3-a456-426614174000', '123e4567-e89b-12d3-a456-426614174000') 89 | ``` 90 | """ 91 | workspace_id = resolve_workspace(workspace) 92 | report_id = resolve_report(workspace, report) 93 | return api_request( 94 | endpoint='/workspaces/' + workspace_id + '/reports/' + report_id, 95 | ) 96 | 97 | 98 | @df 99 | def create_report( 100 | workspace: str, 101 | display_name: str, 102 | item_definition: Dict[str, Any], 103 | *, 104 | description: Optional[str] = None, 105 | folder: Optional[str] = None, 106 | df: Optional[bool] = True, 107 | ) -> Union[DataFrame, Dict[str, Any], None]: 108 | """ 109 | Creates a new semantic model in the specified workspace. 110 | 111 | Args: 112 | workspace (str): The workspace name or ID. 113 | display_name (str): The display name of the semantic model. 114 | item_definition (Dict[str, Any]): The definition of the semantic model. 115 | description (Optional[str]): A description for the semantic model. 116 | folder (Optional[str]): The ID of the folder to create the semantic model in. 117 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 118 | If False, returns a list of dictionaries. 119 | 120 | Returns: 121 | (Union[DataFrame, Dict[str, Any], None]): The created semantic model details. 122 | 123 | Examples: 124 | ```python 125 | create_report( 126 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 127 | display_name='SalesDataModel', 128 | item_definition= {}, # Definition dict of the semantic model 129 | description='A semantic model for sales data', 130 | folder_id='456e7890-e12b-34d5-a678-9012345678901', 131 | ) 132 | ``` 133 | """ 134 | workspace_id = resolve_workspace(workspace) 135 | 136 | payload = {'displayName': display_name, 'definition': item_definition} 137 | 138 | if description: 139 | payload['description'] = description 140 | 141 | if folder: 142 | folder_id = resolve_folder(folder, workspace_id=workspace_id) 143 | if folder_id: 144 | payload['folderId'] = folder_id 145 | 146 | return api_request( 147 | endpoint='/workspaces/' + workspace_id + '/reports', 148 | method='post', 149 | payload=payload, 150 | support_lro=True, 151 | ) 152 | 153 | 154 | @df 155 | def update_report( 156 | workspace: str, 157 | report: str, 158 | *, 159 | display_name: Optional[str] = None, 160 | description: Optional[str] = None, 161 | df: Optional[bool] = False, 162 | ) -> Union[DataFrame, Dict[str, Any], None]: 163 | """ 164 | Updates the properties of the specified semantic model. 165 | 166 | Args: 167 | workspace (str): The workspace name or ID. 168 | report (str): The ID of the semantic model to update. 169 | display_name (str, optional): The new display name for the semantic model. 170 | description (str, optional): The new description for the semantic model. 171 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 172 | If False, returns a list of dictionaries. 173 | 174 | Returns: 175 | (Union[DataFrame, Dict[str, Any], None]): The updated semantic model details if successful, otherwise None. 176 | 177 | Examples: 178 | ```python 179 | update_report( 180 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 181 | report_id='456e7890-e12b-34d5-a678-9012345678901', 182 | display_name='UpdatedDisplayName', 183 | description='Updated description' 184 | ) 185 | ``` 186 | """ 187 | workspace_id = resolve_workspace(workspace) 188 | report_id = resolve_report(workspace, report) 189 | 190 | payload = {} 191 | 192 | if display_name: 193 | payload['displayName'] = display_name 194 | 195 | if description: 196 | payload['description'] = description 197 | 198 | return api_request( 199 | endpoint='/workspaces/' + workspace_id + '/reports/' + report_id, 200 | method='patch', 201 | payload=payload, 202 | ) 203 | 204 | 205 | def delete_report(workspace: str, report: str) -> None: 206 | """ 207 | Delete a semantic model from the specified workspace. 208 | 209 | Args: 210 | workspace (str): The workspace name or ID. 211 | report (str): The name or ID of the semantic model to delete. 212 | 213 | Returns: 214 | None 215 | 216 | Examples: 217 | ```python 218 | delete_report('123e4567-e89b-12d3-a456-426614174000', '456e7890-e12b-34d5-a678-9012345678901') 219 | ``` 220 | """ 221 | workspace_id = resolve_workspace(workspace) 222 | report_id = resolve_report(workspace, report) 223 | 224 | return api_request( 225 | endpoint='/workspaces/' + workspace_id + '/reports/' + report_id, 226 | method='delete', 227 | ) 228 | 229 | 230 | def get_report_definition( 231 | workspace: str, report: str 232 | ) -> Union[Dict[str, Any], None]: 233 | """ 234 | Retrieves the definition of a semantic model by its name or ID from the specified workspace. 235 | 236 | Args: 237 | workspace (str): The workspace name or ID. 238 | report (str): The name or ID of the semantic model. 239 | 240 | Returns: 241 | ( Union[Dict[str, Any], None]): The semantic model definition if found, otherwise None. 242 | 243 | Examples: 244 | ```python 245 | get_report_definition( 246 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 247 | report_id='456e7890-e12b-34d5-a678-9012345678901', 248 | ) 249 | ``` 250 | """ 251 | workspace_id = resolve_workspace(workspace) 252 | 253 | report_id = resolve_report(workspace, report) 254 | 255 | return api_request( 256 | endpoint='/workspaces/' 257 | + workspace_id 258 | + '/reports/' 259 | + report_id 260 | + '/getDefinition', 261 | method='post', 262 | support_lro=True, 263 | ) 264 | 265 | 266 | @df 267 | def update_report_definition( 268 | workspace: str, 269 | report: str, 270 | item_definition: Dict[str, Any], 271 | *, 272 | df: Optional[bool] = True, 273 | ) -> Union[Dict[str, Any], None]: 274 | """ 275 | Updates the definition of an existing semantic model in the specified workspace. 276 | If the semantic model does not exist, it returns None. 277 | 278 | Args: 279 | workspace (str): The workspace name or ID. 280 | report (str): The name or ID of the semantic model to update. 281 | item_definition (Dict[str, Any]): The new definition for the semantic model. 282 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 283 | If False, returns a list of dictionaries. 284 | 285 | Returns: 286 | (Union[Dict[str, Any], None]): The updated semantic model details if successful, otherwise None. 287 | 288 | Examples: 289 | ```python 290 | update_report( 291 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 292 | report_id='456e7890-e12b-34d5-a678-9012345678901', 293 | item_definition={...} # New definition dict of the semantic model 294 | ) 295 | ``` 296 | """ 297 | workspace_id = resolve_workspace(workspace) 298 | report_id = resolve_report(workspace, report) 299 | params = {'updateMetadata': True} 300 | payload = {'definition': item_definition} 301 | return api_request( 302 | endpoint='/workspaces/' 303 | + workspace_id 304 | + '/reports/' 305 | + report_id 306 | + '/updateDefinition', 307 | method='post', 308 | payload=payload, 309 | params=params, 310 | support_lro=True, 311 | ) 312 | -------------------------------------------------------------------------------- /src/pyfabricops/items/semantic_models.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Union 2 | 3 | from pandas import DataFrame 4 | 5 | from pyfabricops.core import workspaces 6 | 7 | from ..api.api import api_request 8 | from ..core.folders import resolve_folder 9 | from ..core.workspaces import resolve_workspace 10 | from ..utils.decorators import df 11 | from ..utils.logging import get_logger 12 | from ..utils.utils import is_valid_uuid 13 | 14 | logger = get_logger(__name__) 15 | 16 | 17 | @df 18 | def list_semantic_models( 19 | workspace: str, 20 | df: Optional[bool] = True, 21 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 22 | """ 23 | Returns a list of semantic models in a specified workspace. 24 | 25 | Args: 26 | workspace_id (str): The ID of the workspace. 27 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 28 | If False, returns a list of dictionaries. 29 | 30 | Returns: 31 | (Union[DataFrame, List[Dict[str, Any]], None]): A list of semantic models or a DataFrame if df is True. 32 | """ 33 | return api_request( 34 | endpoint='/workspaces/' 35 | + resolve_workspace(workspace) 36 | + '/semanticModels', 37 | support_pagination=True, 38 | ) 39 | 40 | 41 | def get_semantic_model_id( 42 | workspace: str, semantic_model: str 43 | ) -> Union[str, None]: 44 | """ 45 | Retrieves the ID of a semantic model by its name from the specified workspace. 46 | 47 | Args: 48 | workspace (str): The workspace name or ID. 49 | semantic_model (str): The name of the semantic model. 50 | 51 | Returns: 52 | (Optional[str]): The ID of the semantic model if found, otherwise None. 53 | 54 | Examples: 55 | ```python 56 | get_semantic_model_id('123e4567-e89b-12d3-a456-426614174000', 'SalesDataModel') 57 | ``` 58 | """ 59 | workspace_id = resolve_workspace(workspace) 60 | if workspace_id is None: 61 | return None 62 | 63 | semantic_models = list_semantic_models(workspace_id, df=False) 64 | for semantic_model_ in semantic_models: 65 | if semantic_model_['displayName'] == semantic_model: 66 | return semantic_model_['id'] 67 | return None 68 | 69 | 70 | def resolve_semantic_model( 71 | workspace: str, 72 | semantic_model: str, 73 | ) -> Union[str, None]: 74 | if is_valid_uuid(semantic_model): 75 | return semantic_model 76 | else: 77 | return get_semantic_model_id(workspace, semantic_model) 78 | 79 | 80 | @df 81 | def get_semantic_model( 82 | workspace: str, semantic_model: str, *, df: Optional[bool] = True 83 | ) -> Union[DataFrame, Dict[str, Any], None]: 84 | """ 85 | Retrieves a semantic model by its name or ID from the specified workspace. 86 | 87 | Args: 88 | workspace_id (str): The workspace ID. 89 | semantic_model_id (str): The ID of the semantic model. 90 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 91 | If False, returns a list of dictionaries. 92 | 93 | Returns: 94 | (Union[DataFrame, Dict[str, Any], None]): The semantic model details if found. If `df=True`, returns a DataFrame with flattened keys. 95 | 96 | Examples: 97 | ```python 98 | get_semantic_model('123e4567-e89b-12d3-a456-426614174000', '123e4567-e89b-12d3-a456-426614174000') 99 | ``` 100 | """ 101 | workspace_id = resolve_workspace(workspace) 102 | semantic_model_id = resolve_semantic_model(workspace, semantic_model) 103 | return api_request( 104 | endpoint='/workspaces/' 105 | + workspace_id 106 | + '/semanticModels/' 107 | + semantic_model_id, 108 | ) 109 | 110 | 111 | @df 112 | def create_semantic_model( 113 | workspace: str, 114 | display_name: str, 115 | item_definition: Dict[str, Any], 116 | *, 117 | description: Optional[str] = None, 118 | folder: Optional[str] = None, 119 | df: Optional[bool] = True, 120 | ) -> Union[DataFrame, Dict[str, Any], None]: 121 | """ 122 | Creates a new semantic model in the specified workspace. 123 | 124 | Args: 125 | workspace (str): The workspace name or ID. 126 | display_name (str): The display name of the semantic model. 127 | item_definition (Dict[str, Any]): The definition of the semantic model. 128 | description (Optional[str]): A description for the semantic model. 129 | folder (Optional[str]): The ID of the folder to create the semantic model in. 130 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 131 | If False, returns a list of dictionaries. 132 | 133 | Returns: 134 | (Union[DataFrame, Dict[str, Any], None]): The created semantic model details. 135 | 136 | Examples: 137 | ```python 138 | create_semantic_model( 139 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 140 | display_name='SalesDataModel', 141 | item_definition= {}, # Definition dict of the semantic model 142 | description='A semantic model for sales data', 143 | folder_id='456e7890-e12b-34d5-a678-9012345678901', 144 | ) 145 | ``` 146 | """ 147 | workspace_id = resolve_workspace(workspace) 148 | 149 | payload = {'displayName': display_name, 'definition': item_definition} 150 | 151 | if description: 152 | payload['description'] = description 153 | 154 | if folder: 155 | folder_id = resolve_folder(workspace_id, folder) 156 | if folder_id: 157 | payload['folderId'] = folder_id 158 | 159 | return api_request( 160 | endpoint='/workspaces/' + workspace_id + '/semanticModels', 161 | method='post', 162 | payload=payload, 163 | support_lro=True, 164 | ) 165 | 166 | 167 | @df 168 | def update_semantic_model( 169 | workspace: str, 170 | semantic_model: str, 171 | *, 172 | display_name: Optional[str] = None, 173 | description: Optional[str] = None, 174 | df: Optional[bool] = False, 175 | ) -> Union[DataFrame, Dict[str, Any], None]: 176 | """ 177 | Updates the properties of the specified semantic model. 178 | 179 | Args: 180 | workspace (str): The workspace name or ID. 181 | semantic_model (str): The ID of the semantic model to update. 182 | display_name (str, optional): The new display name for the semantic model. 183 | description (str, optional): The new description for the semantic model. 184 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 185 | If False, returns a list of dictionaries. 186 | 187 | Returns: 188 | (Union[DataFrame, Dict[str, Any], None]): The updated semantic model details if successful, otherwise None. 189 | 190 | Examples: 191 | ```python 192 | update_semantic_model( 193 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 194 | semantic_model_id='456e7890-e12b-34d5-a678-9012345678901', 195 | display_name='UpdatedDisplayName', 196 | description='Updated description' 197 | ) 198 | ``` 199 | """ 200 | workspace_id = resolve_workspace(workspace) 201 | semantic_model_id = resolve_semantic_model(workspace, semantic_model) 202 | 203 | payload = {} 204 | 205 | if display_name: 206 | payload['displayName'] = display_name 207 | 208 | if description: 209 | payload['description'] = description 210 | 211 | return api_request( 212 | endpoint='/workspaces/' 213 | + workspace_id 214 | + '/semanticModels/' 215 | + semantic_model_id, 216 | method='patch', 217 | payload=payload, 218 | ) 219 | 220 | 221 | def delete_semantic_model(workspace: str, semantic_model: str) -> None: 222 | """ 223 | Delete a semantic model from the specified workspace. 224 | 225 | Args: 226 | workspace (str): The workspace name or ID. 227 | semantic_model (str): The name or ID of the semantic model to delete. 228 | 229 | Returns: 230 | None 231 | 232 | Examples: 233 | ```python 234 | delete_semantic_model('123e4567-e89b-12d3-a456-426614174000', '456e7890-e12b-34d5-a678-9012345678901') 235 | ``` 236 | """ 237 | workspace_id = resolve_workspace(workspace) 238 | semantic_model_id = resolve_semantic_model(workspace, semantic_model) 239 | 240 | return api_request( 241 | endpoint='/workspaces/' 242 | + workspace_id 243 | + '/semanticModels/' 244 | + semantic_model_id, 245 | method='delete', 246 | ) 247 | 248 | 249 | def get_semantic_model_definition( 250 | workspace: str, semantic_model: str 251 | ) -> Union[Dict[str, Any], None]: 252 | """ 253 | Retrieves the definition of a semantic model by its name or ID from the specified workspace. 254 | 255 | Args: 256 | workspace (str): The workspace name or ID. 257 | semantic_model (str): The name or ID of the semantic model. 258 | 259 | Returns: 260 | ( Union[Dict[str, Any], None]): The semantic model definition if found, otherwise None. 261 | 262 | Examples: 263 | ```python 264 | get_semantic_model_definition( 265 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 266 | semantic_model_id='456e7890-e12b-34d5-a678-9012345678901', 267 | ) 268 | ``` 269 | """ 270 | workspace_id = resolve_workspace(workspace) 271 | 272 | semantic_model_id = resolve_semantic_model(workspace, semantic_model) 273 | 274 | return api_request( 275 | endpoint='/workspaces/' 276 | + workspace_id 277 | + '/semanticModels/' 278 | + semantic_model_id 279 | + '/getDefinition', 280 | method='post', 281 | support_lro=True, 282 | ) 283 | 284 | 285 | @df 286 | def update_semantic_model_definition( 287 | workspace: str, 288 | semantic_model: str, 289 | item_definition: Dict[str, Any], 290 | *, 291 | df: Optional[bool] = True, 292 | ) -> Union[Dict[str, Any], None]: 293 | """ 294 | Updates the definition of an existing semantic model in the specified workspace. 295 | If the semantic model does not exist, it returns None. 296 | 297 | Args: 298 | workspace (str): The workspace name or ID. 299 | semantic_model (str): The name or ID of the semantic model to update. 300 | item_definition (Dict[str, Any]): The new definition for the semantic model. 301 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 302 | If False, returns a list of dictionaries. 303 | 304 | Returns: 305 | (Union[Dict[str, Any], None]): The updated semantic model details if successful, otherwise None. 306 | 307 | Examples: 308 | ```python 309 | update_semantic_model( 310 | workspace_id='123e4567-e89b-12d3-a456-426614174000', 311 | semantic_model_id='456e7890-e12b-34d5-a678-9012345678901', 312 | item_definition={...} # New definition dict of the semantic model 313 | ) 314 | ``` 315 | """ 316 | workspace_id = resolve_workspace(workspace) 317 | semantic_model_id = resolve_semantic_model(workspace, semantic_model) 318 | params = {'updateMetadata': True} 319 | payload = {'definition': item_definition} 320 | return api_request( 321 | endpoint='/workspaces/' 322 | + workspace_id 323 | + '/semanticModels/' 324 | + semantic_model_id 325 | + '/updateDefinition', 326 | method='post', 327 | payload=payload, 328 | params=params, 329 | support_lro=True, 330 | ) 331 | -------------------------------------------------------------------------------- /src/pyfabricops/items/warehouses.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Any, Dict, List, Optional, Union 3 | 4 | from pandas import DataFrame 5 | 6 | from ..api.api import api_request 7 | from ..core.folders import resolve_folder 8 | from ..core.workspaces import resolve_workspace 9 | from ..utils.decorators import df 10 | from ..utils.logging import get_logger 11 | from ..utils.utils import is_valid_uuid 12 | 13 | logger = get_logger(__name__) 14 | 15 | 16 | @df 17 | def list_warehouses( 18 | workspace: str, 19 | *, 20 | df: Optional[bool] = True, 21 | ) -> Union[DataFrame, List[Dict[str, Any]], None]: 22 | """ 23 | Returns a list of warehouses from the specified workspace. 24 | This API supports pagination. 25 | 26 | Args: 27 | workspace (str): The workspace name or ID. 28 | ddf (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 29 | If False, returns a list of dictionaries. 30 | 31 | Returns: 32 | (Union[DataFrame, List[Dict[str, Any]], None]): A list of warehouses, excluding those that start with the specified prefixes. If `df=True`, returns a DataFrame with flattened keys. 33 | 34 | Examples: 35 | ```python 36 | list_warehouses('MyProjectWorkspace') 37 | ``` 38 | """ 39 | return api_request( 40 | endpoint='/workspaces/' + resolve_workspace(workspace) + '/warehouses', 41 | support_pagination=True, 42 | ) 43 | 44 | 45 | def get_warehouse_id(workspace: str, warehouse: str) -> Union[str, None]: 46 | """ 47 | Retrieves the ID of a warehouse by its name from the specified workspace. 48 | 49 | Args: 50 | workspace (str): The workspace name or ID. 51 | warehouse (str): The name of the warehouse. 52 | 53 | Returns: 54 | (Union[str, None]): The ID of the warehouse, or None if not found. 55 | 56 | Examples: 57 | ```python 58 | get_warehouse_id('MyProjectWorkspace', 'SalesDatawarehouse') 59 | ``` 60 | """ 61 | warehouses = list_warehouses(workspace, df=False) 62 | if not warehouses: 63 | return None 64 | 65 | for warehouse_ in warehouses: 66 | if warehouse_['displayName'] == warehouse: 67 | return warehouse_['id'] 68 | return None 69 | 70 | 71 | def resolve_warehouse( 72 | workspace: str, 73 | warehouse: str, 74 | ) -> Union[str, None]: 75 | """ 76 | Resolves a warehouse name to its ID. 77 | 78 | Args: 79 | workspace (str): The ID of the workspace. 80 | warehouse (str): The name of the warehouse. 81 | 82 | Returns: 83 | (Union[str, None]): The ID of the warehouse, or None if not found. 84 | 85 | Examples: 86 | ```python 87 | resolve_warehouse('MyProjectWorkspace', 'SalesDatawarehouse') 88 | ``` 89 | """ 90 | if is_valid_uuid(warehouse): 91 | return warehouse 92 | else: 93 | return get_warehouse_id(workspace, warehouse) 94 | 95 | 96 | @df 97 | def get_warehouse( 98 | workspace: str, 99 | warehouse: str, 100 | *, 101 | df: Optional[bool] = True, 102 | ) -> Union[DataFrame, Dict[str, Any], None]: 103 | """ 104 | Retrieves a warehouse by its name or ID from the specified workspace. 105 | 106 | Args: 107 | workspace (str): The workspace name or ID. 108 | warehouse (str): The name or ID of the warehouse. 109 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 110 | If False, returns a list of dictionaries. 111 | 112 | Returns: 113 | (Union[DataFrame, Dict[str, Any], None]): The warehouse details if found. If `df=True`, returns a DataFrame with flattened keys. 114 | 115 | Examples: 116 | ```python 117 | get_warehouse('MyProjectWorkspace', 'SalesDatawarehouse') 118 | get_warehouse('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 119 | get_warehouse('123e4567-e89b-12d3-a456-426614174000', 'SalesDatawarehouse', df=True) 120 | ``` 121 | """ 122 | workspace_id = resolve_workspace(workspace) 123 | if not workspace_id: 124 | return None 125 | 126 | warehouse_id = resolve_warehouse(workspace_id, warehouse) 127 | if not warehouse_id: 128 | return None 129 | 130 | return api_request( 131 | endpoint='/workspaces/' + workspace_id + '/warehouses/' + warehouse_id, 132 | ) 133 | 134 | 135 | @df 136 | def create_warehouse( 137 | workspace: str, 138 | display_name: str, 139 | *, 140 | description: Optional[str] = None, 141 | folder: Optional[str] = None, 142 | enable_schemas: Optional[bool] = False, 143 | df: Optional[bool] = True, 144 | ) -> Union[DataFrame, Dict[str, Any], None]: 145 | """ 146 | Create a warehouse in the specified workspace. 147 | 148 | Args: 149 | workspace (str): The workspace name or ID. 150 | display_name (str): The display name for the warehouse. 151 | description (Optional[str]): The description for the warehouse. 152 | folder (Optional[str]): The folder to create the warehouse in. 153 | enable_schemas (Optional[bool]): Whether to enable schemas for the warehouse. 154 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 155 | If False, returns a list of dictionaries. 156 | 157 | Returns: 158 | (Union[DataFrame, Dict[str, Any], None]): The created warehouse details if successful, otherwise None. 159 | 160 | Examples: 161 | ```python 162 | create_warehouse('MyProjectWorkspace', 'SalesDatawarehouse') 163 | create_warehouse('MyProjectWorkspace', 'SalesDatawarehouse', description='Sales data warehouse') 164 | ``` 165 | """ 166 | workspace_id = resolve_workspace(workspace) 167 | 168 | payload = {'displayName': display_name} 169 | 170 | if description: 171 | payload['description'] = description 172 | 173 | if folder: 174 | folder_id = resolve_folder(workspace_id, folder) 175 | if folder_id: 176 | payload['folderId'] = folder_id 177 | 178 | if enable_schemas: 179 | payload['creationPayload'] = {'enableSchemas': True} 180 | 181 | return api_request( 182 | endpoint='/workspaces/' + workspace_id + '/warehouses', 183 | method='post', 184 | payload=payload, 185 | support_lro=True, 186 | ) 187 | 188 | 189 | @df 190 | def update_warehouse( 191 | workspace: str, 192 | warehouse: str, 193 | *, 194 | display_name: Optional[str] = None, 195 | description: Optional[str] = None, 196 | df: Optional[bool] = True, 197 | ) -> Union[DataFrame, Dict[str, Any], None]: 198 | """ 199 | Updates the properties of the specified warehouse. 200 | 201 | Args: 202 | workspace (str): The workspace name or ID. 203 | warehouse (str): The name or ID of the warehouse to update. 204 | display_name (Optional[str]): The new display name for the warehouse. 205 | description (Optional[str]): The new description for the warehouse. 206 | df (Optional[bool]): If True or not provided, returns a DataFrame with flattened keys. 207 | If False, returns a list of dictionaries. 208 | 209 | Returns: 210 | (Union[DataFrame, Dict[str, Any], None]): The updated warehouse details if successful, otherwise None. 211 | 212 | Examples: 213 | ```python 214 | update_warehouse('MyProjectWorkspace', 'SalesDatawarehouse', display_name='UpdatedSalesDatawarehouse') 215 | update_warehouse('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000', description='Updated description') 216 | ``` 217 | """ 218 | workspace_id = resolve_workspace(workspace) 219 | warehouse_id = resolve_warehouse(workspace_id, warehouse) 220 | 221 | payload = {} 222 | 223 | if display_name: 224 | payload['displayName'] = display_name 225 | 226 | if description: 227 | payload['description'] = description 228 | 229 | return api_request( 230 | endpoint='/workspaces/' + workspace_id + '/warehouses/' + warehouse_id, 231 | method='patch', 232 | payload=payload, 233 | ) 234 | 235 | 236 | def delete_warehouse(workspace: str, warehouse: str) -> None: 237 | """ 238 | Delete a warehouse in the specified workspace. 239 | 240 | Args: 241 | workspace (str): The workspace name or ID. 242 | warehouse (str): The name or ID of the warehouse to delete. 243 | 244 | Returns: 245 | (bool): True if the warehouse was deleted successfully, otherwise False. 246 | 247 | Examples: 248 | ```python 249 | delete_warehouse('MyProjectWorkspace', 'SalesDatawarehouse') 250 | delete_warehouse('MyProjectWorkspace', '123e4567-e89b-12d3-a456-426614174000') 251 | ``` 252 | """ 253 | workspace_id = resolve_workspace(workspace) 254 | warehouse_id = resolve_warehouse(workspace_id, warehouse) 255 | return api_request( 256 | endpoint='/workspaces/' + workspace_id + '/warehouses/' + warehouse_id, 257 | method='delete', 258 | ) 259 | -------------------------------------------------------------------------------- /src/pyfabricops/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alisonpezzott/pyfabricops/d20a5d57b4f84246973b4c0c6bfa35c66dd6a1e0/src/pyfabricops/utils/__init__.py -------------------------------------------------------------------------------- /src/pyfabricops/utils/decorators.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | 3 | import pandas as pd 4 | 5 | from .logging import get_logger 6 | 7 | logger = get_logger(__name__) 8 | 9 | 10 | def df(func): 11 | """ 12 | This decorator checks if the output is a JSON-like structure and converts it to a DataFrame. 13 | If the output is None, it returns None. If the output is already a DataFrame, it returns it as is. 14 | If the output is a JSON-like structure (dict or list of dicts), it flattens it and converts it to a DataFrame. 15 | 16 | Args: 17 | df(boolean): True to convert output to DataFrame, False to keep as is. Default is True. 18 | 19 | Returns: 20 | function: The wrapped function that returns a DataFrame or None. 21 | 22 | Examples: 23 | ```python 24 | list_capacities(df=True) 25 | ``` 26 | """ 27 | 28 | @wraps(func) 29 | def _wrapper(*args, **kwargs): 30 | df = kwargs.pop('df', True) 31 | result = func(*args, **kwargs) 32 | 33 | if result is None: 34 | return None 35 | 36 | if df: 37 | return _json_df(result) 38 | else: 39 | return result 40 | 41 | return _wrapper 42 | 43 | 44 | def _flatten_json(data, parent_key='', sep='_'): 45 | """ 46 | Helper function to flatten nested JSON. 47 | 48 | Args: 49 | data (dict): JSON to flatten. 50 | parent_key (str): Parent key (used for recursion). 51 | sep (str): Separator for flattened keys. 52 | 53 | Returns: 54 | list[dict]: List of flattened dictionaries. 55 | """ 56 | items = [] 57 | for k, v in data.items(): 58 | new_key = f'{parent_key}{sep}{k}' if parent_key else k 59 | if isinstance(v, dict): 60 | items.extend(_flatten_json(v, new_key, sep=sep).items()) 61 | else: 62 | items.append((new_key, v)) 63 | return dict(items) 64 | 65 | 66 | def _json_df(data): 67 | """ 68 | Converts various types of JSON to a DataFrame. 69 | 70 | Args: 71 | data (dict | list): The JSON to be converted. Can be a simple dictionary, 72 | a nested dictionary, or a list of dictionaries. 73 | 74 | Returns: 75 | pd.DataFrame: The resulting DataFrame. 76 | """ 77 | if not data: 78 | return None 79 | 80 | if isinstance(data, dict): 81 | 82 | # If it"s a simple dictionary 83 | if all(not isinstance(v, (dict, list)) for v in data.values()): 84 | return pd.DataFrame([data]) 85 | 86 | # If it"s a dictionary with nested levels 87 | else: 88 | flattened_data = _flatten_json(data) 89 | return pd.DataFrame([flattened_data]) 90 | 91 | elif isinstance(data, list): 92 | 93 | # If it"s a list of dictionaries 94 | if all(isinstance(item, dict) for item in data): 95 | flattened_list = [_flatten_json(item) for item in data] 96 | return pd.DataFrame(flattened_list) 97 | else: 98 | raise ValueError( 99 | 'The list contains items that are not dictionaries.' 100 | ) 101 | 102 | else: 103 | raise TypeError( 104 | 'Input type must be a dictionary or a list of dictionaries.' 105 | ) 106 | -------------------------------------------------------------------------------- /src/pyfabricops/utils/exceptions.py: -------------------------------------------------------------------------------- 1 | class PyFabricOpsError(Exception): 2 | """Base class for all exceptions raised by the pyfabricops package.""" 3 | 4 | pass 5 | 6 | 7 | class AuthenticationError(PyFabricOpsError): 8 | """Exception raised for authentication-related errors.""" 9 | 10 | pass 11 | 12 | 13 | class ResourceNotFoundError(PyFabricOpsError): 14 | """Exception raised when a requested resource is not found.""" 15 | 16 | pass 17 | 18 | 19 | class OptionNotAvailableError(PyFabricOpsError): 20 | """Exception raised when an option is not available.""" 21 | 22 | pass 23 | 24 | 25 | class RequestError(PyFabricOpsError): 26 | """Exception raised for errors in API requests.""" 27 | 28 | pass 29 | 30 | 31 | class InvalidParameterError(PyFabricOpsError): 32 | """Exception raised for invalid parameters.""" 33 | 34 | pass 35 | 36 | 37 | class ConfigurationError(PyFabricOpsError): 38 | """Exception raised for configuration-related errors.""" 39 | 40 | pass 41 | 42 | 43 | class FileNotFoundError(PyFabricOpsError): 44 | """Exception raised when a file is not found.""" 45 | 46 | pass 47 | -------------------------------------------------------------------------------- /src/pyfabricops/utils/schemas.py: -------------------------------------------------------------------------------- 1 | PLATFORM_SCHEMA = 'https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json' 2 | PLATFORM_VERSION = '2.0' 3 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /tests/test_basic.py: -------------------------------------------------------------------------------- 1 | """Basic tests for pyfabricops package.""" 2 | 3 | import logging 4 | import os 5 | import sys 6 | 7 | import pytest 8 | 9 | # Add src to path for testing 10 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src')) 11 | 12 | import pyfabricops as pf 13 | 14 | 15 | def test_package_import(): 16 | """Test that the package can be imported.""" 17 | assert pf is not None 18 | 19 | 20 | def test_package_has_version(): 21 | """Test that the package has a version attribute.""" 22 | assert hasattr(pf, '__version__') 23 | assert pf.__version__ is not None 24 | 25 | 26 | def test_set_auth_provider(): 27 | """Test that auth provider can be set.""" 28 | # This should not raise an exception 29 | pf.set_auth_provider('env') 30 | 31 | 32 | @pytest.mark.skipif( 33 | not all( 34 | [ 35 | os.getenv('FAB_CLIENT_ID'), 36 | os.getenv('FAB_CLIENT_SECRET'), 37 | os.getenv('FAB_TENANT_ID'), 38 | ] 39 | ), 40 | reason='Fabric credentials not available', 41 | ) 42 | def test_list_workspaces_with_credentials(): 43 | """Test list_workspaces when credentials are available.""" 44 | pf.set_auth_provider('env') 45 | workspaces = pf.list_workspaces() 46 | assert isinstance(workspaces, list) 47 | 48 | 49 | def __basic_test(): 50 | """Manual test function for development.""" 51 | pf.setup_logging() 52 | pf.set_auth_provider('env') 53 | workspaces = pf.list_workspaces(df=True) 54 | print(workspaces) 55 | 56 | 57 | if __name__ == '__main__': 58 | __basic_test() 59 | --------------------------------------------------------------------------------