├── .flake8 ├── .github └── workflows │ ├── pr.yaml │ └── release-pypi.yaml ├── .gitignore ├── LICENSE ├── README.md ├── pbiapi ├── __init__.py ├── pbiapi.py └── utils.py ├── poetry.lock ├── pyproject.toml ├── requirements.txt └── setup.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 120 -------------------------------------------------------------------------------- /.github/workflows/pr.yaml: -------------------------------------------------------------------------------- 1 | name: "Test and build" 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - "master" 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | python-version: [3.7, 3.8] 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | name: Checkout code 19 | 20 | - name: Set up Python ${{ matrix.python-version }} 21 | uses: actions/setup-python@v1 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | 25 | - name: Install dependencies 26 | run: | 27 | python3 -m pip install --upgrade pip poetry 28 | poetry config virtualenvs.create false 29 | poetry install 30 | - name: Check codestyle 31 | run: | 32 | poetry run black --check . 33 | poetry run isort --check-only -rc . 34 | # - name: Run tests 35 | # run: | 36 | # coverage run --source cognite.experimental -m pytest -v tests 37 | # coverage xml 38 | # - uses: codecov/codecov-action@v1 39 | # with: 40 | # token: ${{ secrets.CODECOV_TOKEN }} 41 | # file: ./coverage.xml 42 | 43 | - name: Build package 44 | run: poetry build -------------------------------------------------------------------------------- /.github/workflows/release-pypi.yaml: -------------------------------------------------------------------------------- 1 | name: "Release pypi" 2 | 3 | on: 4 | push: 5 | branches: 6 | - "master" 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v2 13 | name: Checkout code 14 | 15 | - name: Set up Python 16 | uses: actions/setup-python@v1 17 | with: 18 | python-version: 3.8 19 | 20 | - name: Install dependencies 21 | run: | 22 | python3 -m pip install --upgrade pip poetry 23 | poetry config virtualenvs.create false 24 | poetry install 25 | - name: Check codestyle 26 | run: | 27 | poetry run black --check . 28 | poetry run isort --check-only -rc . 29 | # - name: Run tests 30 | # run: | 31 | # coverage run --source inso_toolbox -m pytest -v tests 32 | # coverage xml 33 | 34 | # - uses: codecov/codecov-action@v1 35 | # with: 36 | # token: ${{ secrets.CODECOV_TOKEN }} 37 | # file: ./coverage.xml 38 | 39 | - name: Build package 40 | run: poetry build 41 | 42 | - name: Release to PyPI 43 | run: poetry publish -u __token__ -p ${{ secrets.PYPI_API_TOKEN }} || echo 'Version exists' -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | __pycache__/ 3 | build 4 | pbiapi.egg-info/ 5 | 6 | .DS_Store 7 | venv 8 | .idea 9 | dist 10 | 11 | .venv -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018 The Python Packaging Authority 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PowerBI-API-Python 2 | This python package consists of helper functions for working with the Power BI API. To use this first make sure you have a Service Principal set up in Azure that has access to Power BI API. This [guide](https://cognitedata.atlassian.net/wiki/spaces/FORGE/pages/1003814928/Power+BI+API+Set+Up) shows how to set up a SP App. 3 | 4 | ## Basic Usage 5 | 6 | Install using pip 7 | ```sh 8 | pip install pbiapi 9 | ``` 10 | 11 | Add the client to your project with: 12 | 13 | ```python 14 | from pbiapi import PowerBIAPIClient 15 | ``` 16 | 17 | Initiate the client by running: 18 | ```python 19 | pbi_client = PowerBIAPIClient( 20 | , 21 | , 22 | , 23 | ) 24 | ``` 25 | 26 | You can then get all the workspaces the Service Principal is admin of by running: 27 | ```python 28 | pbi_client.get_workspaces() 29 | ``` 30 | -------------------------------------------------------------------------------- /pbiapi/__init__.py: -------------------------------------------------------------------------------- 1 | name = "pbiapi" 2 | 3 | from .pbiapi import PowerBIAPIClient 4 | -------------------------------------------------------------------------------- /pbiapi/pbiapi.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import os 4 | from typing import Callable, Dict, List, NoReturn, Union 5 | from urllib import parse 6 | 7 | import requests 8 | 9 | from pbiapi.utils import partition 10 | 11 | HTTP_OK_CODE = 200 12 | HTTP_ACCEPTED_CODE = 202 13 | 14 | 15 | def check_token(fn: Callable) -> Callable: 16 | def wrapper(pbi_client, *args, **kwargs): 17 | if pbi_client.token is None or pbi_client.token_expiration < datetime.datetime.utcnow(): 18 | pbi_client.update_token() 19 | return fn(pbi_client, *args, **kwargs) 20 | 21 | return wrapper 22 | 23 | 24 | class PowerBIAPIClient: 25 | def __init__(self, tenant_id: str, client_id: str, client_secret: str): 26 | self.tenant_id = tenant_id 27 | self.client_id = client_id 28 | self.client_secret = client_secret 29 | self.base_url = "https://api.powerbi.com/v1.0/myorg/" 30 | self.url = f"https://login.microsoftonline.com/{self.tenant_id}/oauth2/v2.0/token" 31 | self.token = None 32 | self.token_expiration = None 33 | self._workspaces = None 34 | self.headers = None 35 | 36 | def get_auth_header(self) -> Dict[str, str]: 37 | return {"Authorization": f"Bearer {self.token}"} 38 | 39 | def update_token(self) -> None: 40 | payload = { 41 | "grant_type": "client_credentials", 42 | "client_id": self.client_id, 43 | "scope": "https://analysis.windows.net/powerbi/api/.default", 44 | "client_secret": self.client_secret, 45 | } 46 | headers = {"Content-Type": "application/x-www-form-urlencoded"} 47 | response = requests.post(self.url, data=payload, headers=headers) 48 | 49 | if response.status_code == HTTP_OK_CODE: 50 | self.token = response.json()["access_token"] 51 | self.token_expiration = datetime.datetime.utcnow() + datetime.timedelta(hours=1) 52 | self.headers = {**headers, **self.get_auth_header()} 53 | else: 54 | self.force_raise_http_error(response) 55 | 56 | @property 57 | def workspaces(self) -> List: 58 | return self._workspaces or self.get_workspaces() 59 | 60 | @check_token 61 | def get_workspaces(self) -> List: 62 | url = self.base_url + "groups" 63 | response = requests.get(url, headers=self.headers) 64 | 65 | if response.status_code == HTTP_OK_CODE: 66 | self._workspaces = response.json()["value"] 67 | return self._workspaces 68 | else: 69 | logging.error("Failed to fetch workspaces!") 70 | self.force_raise_http_error(response) 71 | 72 | @staticmethod 73 | def find_entity_id_by_name(entity_list: List, name: str, entity_type: str, raise_if_missing: bool = False) -> str: 74 | for item in entity_list: 75 | if item["name"] == name: 76 | return item["id"] 77 | if raise_if_missing: 78 | raise RuntimeError(f"No {entity_type} was found with the name: '{name}'") 79 | 80 | @check_token 81 | def create_workspace(self, name: str) -> None: 82 | # Check if workspace exists already: 83 | url = self.base_url + "groups?$filter=" + parse.quote(f"name eq '{name}'") 84 | response = requests.get(url, headers=self.headers) 85 | 86 | if response.status_code != HTTP_OK_CODE: 87 | logging.error(f"Failed when checking if the workspace, '{name}' already exists!") 88 | self.force_raise_http_error(response) 89 | 90 | if response.json()["@odata.count"] > 0: 91 | logging.info("Workspace already exists, no changes made!") 92 | return 93 | 94 | # Workspace does not exist, lets create it: 95 | logging.info(f"Trying to create a workspace with name: {name}...") 96 | url = self.base_url + "groups?workspaceV2=true" 97 | response = requests.post(url, data={"name": name}, headers=self.headers) 98 | 99 | if response.status_code == HTTP_OK_CODE: 100 | logging.info("Workspace created successfully!") 101 | self.get_workspaces() # Update internal state 102 | else: 103 | logging.error(f"Failed to create the new workspace: '{name}':") 104 | self.force_raise_http_error(response) 105 | 106 | @check_token 107 | def add_user_to_workspace(self, workspace_name: str, user: Dict) -> None: 108 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 109 | 110 | # Workspace exists, lets add user: 111 | url = self.base_url + f"groups/{workspace_id}/users" 112 | response = requests.post(url, data=user, headers=self.headers) 113 | 114 | if response.status_code == HTTP_OK_CODE: 115 | logging.info(f"Added users to workspace '{workspace_name}'") 116 | else: 117 | logging.error(f"Failed to add user to workspace '{workspace_name}': {user}") 118 | self.force_raise_http_error(response) 119 | 120 | @check_token 121 | def get_users_from_workspace(self, name: str) -> List: 122 | workspace_id = self.find_entity_id_by_name(self.workspaces, name, "workspace", raise_if_missing=True) 123 | 124 | url = self.base_url + f"groups/{workspace_id}/users" 125 | 126 | response = requests.get(url, headers=self.headers) 127 | if response.status_code == 200: 128 | return response.json()["value"] 129 | else: 130 | logging.error("Error getting users from workspace") 131 | self.force_raise_http_error(response) 132 | 133 | @check_token 134 | def delete_workspace(self, workspace_name: str) -> None: 135 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace") 136 | 137 | if workspace_id is None: 138 | # If workspace is already deleted / doesn't exist, we simply return: 139 | return 140 | 141 | url = self.base_url + f"groups/{workspace_id}" 142 | response = requests.delete(url, headers=self.headers) 143 | 144 | if response.status_code == HTTP_OK_CODE: 145 | logging.info("Workspace deleted successfully!") 146 | else: 147 | logging.error("Workspace deletion failed:") 148 | self.force_raise_http_error(response) 149 | 150 | @check_token 151 | def get_datasets_in_workspace(self, workspace_name: str) -> List: 152 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 153 | 154 | datasets_url = self.base_url + f"groups/{workspace_id}/datasets" 155 | response = requests.get(datasets_url, headers=self.headers) 156 | response.raise_for_status() 157 | if response.status_code == HTTP_OK_CODE: 158 | return response.json()["value"] 159 | 160 | @check_token 161 | def refresh_dataset_by_id(self, workspace_name: str, dataset_id: str) -> None: 162 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 163 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/refreshes" 164 | response = requests.post(url, data="notifyOption=NoNotification", headers=self.headers) 165 | 166 | if response.status_code == 202: 167 | logging.info(f"Dataset with id {dataset_id} (and workspace id {workspace_id}) was updated!") 168 | else: 169 | logging.error("Dataset refresh failed!") 170 | self.force_raise_http_error(response, expected_codes=202) 171 | 172 | @check_token 173 | def refresh_dataset_by_name(self, workspace_name: str, dataset_name: str) -> None: 174 | datasets = self.get_datasets_in_workspace(workspace_name) 175 | dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", True) 176 | self.refresh_dataset_by_id(workspace_name, dataset_id) 177 | 178 | @check_token 179 | def create_push_dataset(self, workspace_name: str, retention_policy: str) -> None: 180 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 181 | url = self.base_url + f"groups/{workspace_id}/datasets?defaultRetentionPolicy={retention_policy}" 182 | response = requests.post(url, data="notifyOption=NoNotification", headers=self.headers) 183 | 184 | if response.status_code == 202: 185 | logging.info( 186 | f"Create push dataset successful using workspace_id: {workspace_id} and " 187 | f"retention_policy: {retention_policy}" 188 | ) 189 | else: 190 | logging.error("Create push dataset failed!") 191 | self.force_raise_http_error(response, expected_codes=202) 192 | 193 | @check_token 194 | def create_dataset(self, workspace_name: str, schema: Dict, retention_policy: str) -> None: 195 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 196 | url = self.base_url + f"groups/{workspace_id}/datasets?defaultRetentionPolicy={retention_policy}" 197 | response = requests.post(url, json=schema, headers=self.get_auth_header()) 198 | 199 | if response.status_code in [201, 202]: 200 | logging.info( 201 | f"Create dataset successful using workspace_id: {workspace_id}, schema: {schema} " 202 | f"and retention_policy: {retention_policy}" 203 | ) 204 | else: 205 | logging.error("Failed to create dataset!") 206 | self.force_raise_http_error(response, expected_codes=[201, 202]) 207 | 208 | @check_token 209 | def delete_dataset(self, workspace_name: str, dataset_name: str) -> None: 210 | workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) 211 | 212 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}" 213 | response = requests.delete(url, headers=self.headers) 214 | if response.status_code == HTTP_OK_CODE: 215 | logging.info("Dataset with id: {dataset_id} in workspace with id: {workspace_id} deleted successfully!") 216 | else: 217 | logging.error("Failed to delete dataset!") 218 | self.force_raise_http_error(response) 219 | 220 | @check_token 221 | def post_rows(self, workspace_name: str, dataset_id: str, table_name: str, data, chunk_size: int = 10000) -> None: 222 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 223 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/tables/{table_name}/rows" 224 | 225 | chunked_data = partition(data, n=chunk_size) 226 | tot_chunks = len(chunked_data) 227 | 228 | for i, row_chunk in enumerate(chunked_data, 1): 229 | response = requests.post(url, json={"rows": row_chunk}, headers=self.get_auth_header()) 230 | if response.status_code == HTTP_OK_CODE: 231 | logging.info(f"Chunk [{i}/{tot_chunks}] inserted successfully! Size: {len(row_chunk)} rows") 232 | else: 233 | logging.error("Row insertion failed!") 234 | self.force_raise_http_error(response) 235 | 236 | @check_token 237 | def update_table_schema(self, workspace_name: str, dataset_id: str, table_name: str, schema: Dict) -> None: 238 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 239 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/tables/{table_name}" 240 | response = requests.put(url, json=schema, headers=self.get_auth_header()) 241 | # TODO(scottmelhop): Use/check/raise depending on status code? 242 | logging.info(f"Update table schema returned status code {response.status_code}: {response.text}") 243 | 244 | @check_token 245 | def get_tables(self, workspace_name: str, dataset_id: str) -> List: 246 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 247 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/tables" 248 | response = requests.get(url, headers=self.headers) 249 | 250 | if response.status_code == HTTP_OK_CODE: 251 | return response.json() 252 | 253 | @check_token 254 | def truncate_table(self, workspace_name: str, dataset_id: str, table_name: str) -> None: 255 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 256 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/tables/{table_name}/rows" 257 | response = requests.delete(url, headers=self.headers) 258 | 259 | if response.status_code == HTTP_OK_CODE: 260 | logging.info("Table truncation successful!") 261 | else: 262 | logging.error("Table truncation failed!") 263 | self.force_raise_http_error(response) 264 | 265 | @check_token 266 | def get_reports_in_workspace(self, workspace_name: str) -> List: 267 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 268 | 269 | url = self.base_url + f"groups/{workspace_id}/reports" 270 | response = requests.get(url, headers=self.headers) 271 | 272 | if response.status_code == HTTP_OK_CODE: 273 | return response.json()["value"] 274 | 275 | @check_token 276 | def rebind_report_in_workspace(self, workspace_name: str, dataset_name: str, report_name: str) -> None: 277 | workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) 278 | 279 | reports = self.get_reports_in_workspace(workspace_name) 280 | report_id = self.find_entity_id_by_name(reports, report_name, "report", raise_if_missing=True) 281 | 282 | url = self.base_url + f"groups/{workspace_id}/reports/{report_id}/Rebind" 283 | headers = {"Content-Type": "application/json", **self.get_auth_header()} 284 | payload = {"datasetId": dataset_id} 285 | 286 | response = requests.post(url, json=payload, headers=headers) 287 | if response.status_code == HTTP_OK_CODE: 288 | logging.info(f"Report named '{report_name}' rebound to dataset with name '{dataset_name}'") 289 | else: 290 | logging.error(f"Failed to rebind report with name '{report_name}' to dataset with name '{dataset_name}'") 291 | self.force_raise_http_error(response) 292 | 293 | @check_token 294 | def delete_report(self, workspace_name: str, report_name: str) -> None: 295 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 296 | 297 | reports = self.get_reports_in_workspace(workspace_name) 298 | report_id = self.find_entity_id_by_name(reports, report_name, "report", raise_if_missing=True) 299 | 300 | url = self.base_url + f"groups/{workspace_id}/reports/{report_id}" 301 | response = requests.delete(url, headers=self.headers) 302 | 303 | if response.status_code == HTTP_OK_CODE: 304 | logging.info(f"Report named '{report_name}' in workspace '{workspace_name}' deleted successfully!") 305 | else: 306 | logging.error("Report deletion failed!") 307 | self.force_raise_http_error(response) 308 | 309 | @check_token 310 | def import_file_into_workspace( 311 | self, workspace_name: str, skip_report: bool, file_path: str, display_name: str 312 | ) -> None: 313 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 314 | 315 | if not os.path.isfile(file_path): 316 | raise FileNotFoundError(2, f"No such file or directory: '{file_path}'") 317 | 318 | name_conflict = "CreateOrOverwrite" 319 | url = ( 320 | self.base_url 321 | + f"groups/{workspace_id}/imports?datasetDisplayName={display_name}&nameConflict=" 322 | + f"{name_conflict}" 323 | + ("&skipReport=true" if skip_report else "") 324 | ) 325 | headers = {"Content-Type": "multipart/form-data", **self.get_auth_header()} 326 | 327 | with open(file_path, "rb") as f: 328 | response = requests.post(url, headers=headers, files={"filename": f}) 329 | 330 | if response.status_code == 202: 331 | logging.info(response.json()) 332 | import_id = response.json()["id"] 333 | logging.info(f"File uploading with id: {import_id}") 334 | else: 335 | self.force_raise_http_error(response) 336 | 337 | get_import_url = self.base_url + f"groups/{workspace_id}/imports/{import_id}" 338 | 339 | while True: 340 | response = requests.get(url=get_import_url, headers=self.headers) 341 | if response.status_code != 200: 342 | self.force_raise_http_error(response) 343 | 344 | if response.json()["importState"] == "Succeeded": 345 | logging.info("Import complete") 346 | return 347 | else: 348 | logging.info("Import in progress...") 349 | 350 | @check_token 351 | def update_parameters_in_dataset(self, workspace_name: str, dataset_name: str, parameters: list): 352 | workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) 353 | 354 | update_details = {"updateDetails": parameters} 355 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/UpdateParameters" 356 | headers = {"Content-Type": "application/json", **self.get_auth_header()} 357 | response = requests.post(url, json=update_details, headers=headers) 358 | 359 | if response.status_code == HTTP_OK_CODE: 360 | for parameter in parameters: 361 | logging.info( 362 | f"Parameter \"{parameter['name']}\"", 363 | f" updated to \"{parameter['newValue']}\"", 364 | f" in Dataset named '{dataset_name}' in workspace '{workspace_name}'!", 365 | ) 366 | else: 367 | logging.error(f"Parameter update failed for dataset {dataset_name}!") 368 | self.force_raise_http_error(response) 369 | 370 | @check_token 371 | def get_parameters_in_dataset(self, workspace_name: str, dataset_name: str) -> List: 372 | workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) 373 | 374 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/parameters" 375 | 376 | response = requests.get(url, headers=self.headers) 377 | 378 | if response.status_code == HTTP_OK_CODE: 379 | return response.json()["value"] 380 | else: 381 | logging.error(f"Failed to get parameters for dataset {dataset_name}!") 382 | self.force_raise_http_error(response) 383 | 384 | @check_token 385 | def take_over_dataset(self, workspace_name: str, dataset_name: str) -> None: 386 | workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) 387 | 388 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/TakeOver" 389 | 390 | response = requests.post(url, headers=self.headers) 391 | 392 | if response.status_code == HTTP_OK_CODE: 393 | logging.info(f"Takeover of dataset {dataset_name} Complete") 394 | else: 395 | logging.error(f"Takeover of dataset {dataset_name} failed!") 396 | self.force_raise_http_error(response) 397 | 398 | @check_token 399 | def get_dataset_refresh_history(self, workspace_name: str, dataset_name: str, top=10) -> List: 400 | workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) 401 | 402 | url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/refreshes?$top={top}" 403 | 404 | response = requests.get(url, headers=self.headers) 405 | 406 | if response.status_code in [HTTP_OK_CODE, HTTP_ACCEPTED_CODE]: 407 | return response.json()["value"] 408 | else: 409 | logging.error(f"Failed getting refresh history for {dataset_name}!") 410 | self.force_raise_http_error(response) 411 | 412 | @staticmethod 413 | def force_raise_http_error( 414 | response: requests.Response, expected_codes: Union[List[int], int] = HTTP_OK_CODE 415 | ) -> NoReturn: 416 | logging.error(f"Expected response code(s) {expected_codes}, got {response.status_code}: {response.text}.") 417 | response.raise_for_status() 418 | raise requests.HTTPError(response) 419 | 420 | def get_workspace_and_dataset_id(self, workspace_name: str, dataset_name: str) -> Union: 421 | workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) 422 | 423 | datasets = self.get_datasets_in_workspace(workspace_name) 424 | dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", raise_if_missing=True) 425 | 426 | return workspace_id, dataset_id 427 | -------------------------------------------------------------------------------- /pbiapi/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Any, List 2 | 3 | 4 | def partition(lst: List[Any], n: int) -> List[List[Any]]: 5 | """ 6 | Splits the list into chunks with size n, 7 | except last chunks that has size <= n 8 | """ 9 | return [lst[i : i + n] for i in range(0, len(lst), n)] 10 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | category = "dev" 3 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 4 | name = "appdirs" 5 | optional = false 6 | python-versions = "*" 7 | version = "1.4.4" 8 | 9 | [[package]] 10 | category = "dev" 11 | description = "Classes Without Boilerplate" 12 | name = "attrs" 13 | optional = false 14 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 15 | version = "19.3.0" 16 | 17 | [package.extras] 18 | azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] 19 | dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] 20 | docs = ["sphinx", "zope.interface"] 21 | tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] 22 | 23 | [[package]] 24 | category = "dev" 25 | description = "The uncompromising code formatter." 26 | name = "black" 27 | optional = false 28 | python-versions = ">=3.6" 29 | version = "19.10b0" 30 | 31 | [package.dependencies] 32 | appdirs = "*" 33 | attrs = ">=18.1.0" 34 | click = ">=6.5" 35 | pathspec = ">=0.6,<1" 36 | regex = "*" 37 | toml = ">=0.9.4" 38 | typed-ast = ">=1.4.0" 39 | 40 | [package.extras] 41 | d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] 42 | 43 | [[package]] 44 | category = "main" 45 | description = "Python package for providing Mozilla's CA Bundle." 46 | name = "certifi" 47 | optional = false 48 | python-versions = "*" 49 | version = "2020.4.5.2" 50 | 51 | [[package]] 52 | category = "main" 53 | description = "Universal encoding detector for Python 2 and 3" 54 | name = "chardet" 55 | optional = false 56 | python-versions = "*" 57 | version = "3.0.4" 58 | 59 | [[package]] 60 | category = "dev" 61 | description = "Composable command line interface toolkit" 62 | name = "click" 63 | optional = false 64 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 65 | version = "7.1.2" 66 | 67 | [[package]] 68 | category = "dev" 69 | description = "the modular source code checker: pep8 pyflakes and co" 70 | name = "flake8" 71 | optional = false 72 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" 73 | version = "3.8.3" 74 | 75 | [package.dependencies] 76 | mccabe = ">=0.6.0,<0.7.0" 77 | pycodestyle = ">=2.6.0a1,<2.7.0" 78 | pyflakes = ">=2.2.0,<2.3.0" 79 | 80 | [package.dependencies.importlib-metadata] 81 | python = "<3.8" 82 | version = "*" 83 | 84 | [[package]] 85 | category = "main" 86 | description = "Internationalized Domain Names in Applications (IDNA)" 87 | name = "idna" 88 | optional = false 89 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 90 | version = "2.9" 91 | 92 | [[package]] 93 | category = "dev" 94 | description = "Read metadata from Python packages" 95 | marker = "python_version < \"3.8\"" 96 | name = "importlib-metadata" 97 | optional = false 98 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 99 | version = "1.6.1" 100 | 101 | [package.dependencies] 102 | zipp = ">=0.5" 103 | 104 | [package.extras] 105 | docs = ["sphinx", "rst.linker"] 106 | testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] 107 | 108 | [[package]] 109 | category = "dev" 110 | description = "A Python utility / library to sort Python imports." 111 | name = "isort" 112 | optional = false 113 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 114 | version = "4.3.21" 115 | 116 | [package.extras] 117 | pipfile = ["pipreqs", "requirementslib"] 118 | pyproject = ["toml"] 119 | requirements = ["pipreqs", "pip-api"] 120 | xdg_home = ["appdirs (>=1.4.0)"] 121 | 122 | [[package]] 123 | category = "dev" 124 | description = "McCabe checker, plugin for flake8" 125 | name = "mccabe" 126 | optional = false 127 | python-versions = "*" 128 | version = "0.6.1" 129 | 130 | [[package]] 131 | category = "dev" 132 | description = "Optional static typing for Python" 133 | name = "mypy" 134 | optional = false 135 | python-versions = ">=3.5" 136 | version = "0.761" 137 | 138 | [package.dependencies] 139 | mypy-extensions = ">=0.4.3,<0.5.0" 140 | typed-ast = ">=1.4.0,<1.5.0" 141 | typing-extensions = ">=3.7.4" 142 | 143 | [package.extras] 144 | dmypy = ["psutil (>=4.0)"] 145 | 146 | [[package]] 147 | category = "dev" 148 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 149 | name = "mypy-extensions" 150 | optional = false 151 | python-versions = "*" 152 | version = "0.4.3" 153 | 154 | [[package]] 155 | category = "dev" 156 | description = "Utility library for gitignore style pattern matching of file paths." 157 | name = "pathspec" 158 | optional = false 159 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 160 | version = "0.8.0" 161 | 162 | [[package]] 163 | category = "dev" 164 | description = "Python style guide checker" 165 | name = "pycodestyle" 166 | optional = false 167 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 168 | version = "2.6.0" 169 | 170 | [[package]] 171 | category = "dev" 172 | description = "passive checker of Python programs" 173 | name = "pyflakes" 174 | optional = false 175 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 176 | version = "2.2.0" 177 | 178 | [[package]] 179 | category = "dev" 180 | description = "Alternative regular expression module, to replace re." 181 | name = "regex" 182 | optional = false 183 | python-versions = "*" 184 | version = "2020.6.8" 185 | 186 | [[package]] 187 | category = "main" 188 | description = "Python HTTP for Humans." 189 | name = "requests" 190 | optional = false 191 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 192 | version = "2.23.0" 193 | 194 | [package.dependencies] 195 | certifi = ">=2017.4.17" 196 | chardet = ">=3.0.2,<4" 197 | idna = ">=2.5,<3" 198 | urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" 199 | 200 | [package.extras] 201 | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] 202 | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] 203 | 204 | [[package]] 205 | category = "dev" 206 | description = "Python Library for Tom's Obvious, Minimal Language" 207 | name = "toml" 208 | optional = false 209 | python-versions = "*" 210 | version = "0.10.1" 211 | 212 | [[package]] 213 | category = "dev" 214 | description = "a fork of Python 2 and 3 ast modules with type comment support" 215 | name = "typed-ast" 216 | optional = false 217 | python-versions = "*" 218 | version = "1.4.1" 219 | 220 | [[package]] 221 | category = "dev" 222 | description = "Backported and Experimental Type Hints for Python 3.5+" 223 | name = "typing-extensions" 224 | optional = false 225 | python-versions = "*" 226 | version = "3.7.4.2" 227 | 228 | [[package]] 229 | category = "main" 230 | description = "HTTP library with thread-safe connection pooling, file post, and more." 231 | name = "urllib3" 232 | optional = false 233 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 234 | version = "1.25.9" 235 | 236 | [package.extras] 237 | brotli = ["brotlipy (>=0.6.0)"] 238 | secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"] 239 | socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] 240 | 241 | [[package]] 242 | category = "dev" 243 | description = "Backport of pathlib-compatible object wrapper for zip files" 244 | marker = "python_version < \"3.8\"" 245 | name = "zipp" 246 | optional = false 247 | python-versions = ">=3.6" 248 | version = "3.1.0" 249 | 250 | [package.extras] 251 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 252 | testing = ["jaraco.itertools", "func-timeout"] 253 | 254 | [metadata] 255 | content-hash = "116ea460c7b0377eb1ac6ff124cae22c452ebc6f8b8a8050d1499f4b7c2e6d0a" 256 | python-versions = "^3.7" 257 | 258 | [metadata.files] 259 | appdirs = [ 260 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, 261 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, 262 | ] 263 | attrs = [ 264 | {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, 265 | {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, 266 | ] 267 | black = [ 268 | {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, 269 | {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, 270 | ] 271 | certifi = [ 272 | {file = "certifi-2020.4.5.2-py2.py3-none-any.whl", hash = "sha256:9cd41137dc19af6a5e03b630eefe7d1f458d964d406342dd3edf625839b944cc"}, 273 | {file = "certifi-2020.4.5.2.tar.gz", hash = "sha256:5ad7e9a056d25ffa5082862e36f119f7f7cec6457fa07ee2f8c339814b80c9b1"}, 274 | ] 275 | chardet = [ 276 | {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, 277 | {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, 278 | ] 279 | click = [ 280 | {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, 281 | {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, 282 | ] 283 | flake8 = [ 284 | {file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"}, 285 | {file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"}, 286 | ] 287 | idna = [ 288 | {file = "idna-2.9-py2.py3-none-any.whl", hash = "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"}, 289 | {file = "idna-2.9.tar.gz", hash = "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"}, 290 | ] 291 | importlib-metadata = [ 292 | {file = "importlib_metadata-1.6.1-py2.py3-none-any.whl", hash = "sha256:15ec6c0fd909e893e3a08b3a7c76ecb149122fb14b7efe1199ddd4c7c57ea958"}, 293 | {file = "importlib_metadata-1.6.1.tar.gz", hash = "sha256:0505dd08068cfec00f53a74a0ad927676d7757da81b7436a6eefe4c7cf75c545"}, 294 | ] 295 | isort = [ 296 | {file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"}, 297 | {file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"}, 298 | ] 299 | mccabe = [ 300 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 301 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 302 | ] 303 | mypy = [ 304 | {file = "mypy-0.761-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:7f672d02fffcbace4db2b05369142e0506cdcde20cea0e07c7c2171c4fd11dd6"}, 305 | {file = "mypy-0.761-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:87c556fb85d709dacd4b4cb6167eecc5bbb4f0a9864b69136a0d4640fdc76a36"}, 306 | {file = "mypy-0.761-cp35-cp35m-win_amd64.whl", hash = "sha256:c6d27bd20c3ba60d5b02f20bd28e20091d6286a699174dfad515636cb09b5a72"}, 307 | {file = "mypy-0.761-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:4b9365ade157794cef9685791032521233729cb00ce76b0ddc78749abea463d2"}, 308 | {file = "mypy-0.761-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:634aef60b4ff0f650d3e59d4374626ca6153fcaff96ec075b215b568e6ee3cb0"}, 309 | {file = "mypy-0.761-cp36-cp36m-win_amd64.whl", hash = "sha256:53ea810ae3f83f9c9b452582261ea859828a9ed666f2e1ca840300b69322c474"}, 310 | {file = "mypy-0.761-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:0a9a45157e532da06fe56adcfef8a74629566b607fa2c1ac0122d1ff995c748a"}, 311 | {file = "mypy-0.761-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7eadc91af8270455e0d73565b8964da1642fe226665dd5c9560067cd64d56749"}, 312 | {file = "mypy-0.761-cp37-cp37m-win_amd64.whl", hash = "sha256:e2bb577d10d09a2d8822a042a23b8d62bc3b269667c9eb8e60a6edfa000211b1"}, 313 | {file = "mypy-0.761-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c35cae79ceb20d47facfad51f952df16c2ae9f45db6cb38405a3da1cf8fc0a7"}, 314 | {file = "mypy-0.761-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f97a605d7c8bc2c6d1172c2f0d5a65b24142e11a58de689046e62c2d632ca8c1"}, 315 | {file = "mypy-0.761-cp38-cp38-win_amd64.whl", hash = "sha256:a6bd44efee4dc8c3324c13785a9dc3519b3ee3a92cada42d2b57762b7053b49b"}, 316 | {file = "mypy-0.761-py3-none-any.whl", hash = "sha256:7e396ce53cacd5596ff6d191b47ab0ea18f8e0ec04e15d69728d530e86d4c217"}, 317 | {file = "mypy-0.761.tar.gz", hash = "sha256:85baab8d74ec601e86134afe2bcccd87820f79d2f8d5798c889507d1088287bf"}, 318 | ] 319 | mypy-extensions = [ 320 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 321 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 322 | ] 323 | pathspec = [ 324 | {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, 325 | {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, 326 | ] 327 | pycodestyle = [ 328 | {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, 329 | {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"}, 330 | ] 331 | pyflakes = [ 332 | {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"}, 333 | {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, 334 | ] 335 | regex = [ 336 | {file = "regex-2020.6.8-cp27-cp27m-win32.whl", hash = "sha256:fbff901c54c22425a5b809b914a3bfaf4b9570eee0e5ce8186ac71eb2025191c"}, 337 | {file = "regex-2020.6.8-cp27-cp27m-win_amd64.whl", hash = "sha256:112e34adf95e45158c597feea65d06a8124898bdeac975c9087fe71b572bd938"}, 338 | {file = "regex-2020.6.8-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:92d8a043a4241a710c1cf7593f5577fbb832cf6c3a00ff3fc1ff2052aff5dd89"}, 339 | {file = "regex-2020.6.8-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bae83f2a56ab30d5353b47f9b2a33e4aac4de9401fb582b55c42b132a8ac3868"}, 340 | {file = "regex-2020.6.8-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b2ba0f78b3ef375114856cbdaa30559914d081c416b431f2437f83ce4f8b7f2f"}, 341 | {file = "regex-2020.6.8-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:95fa7726d073c87141f7bbfb04c284901f8328e2d430eeb71b8ffdd5742a5ded"}, 342 | {file = "regex-2020.6.8-cp36-cp36m-win32.whl", hash = "sha256:e3cdc9423808f7e1bb9c2e0bdb1c9dc37b0607b30d646ff6faf0d4e41ee8fee3"}, 343 | {file = "regex-2020.6.8-cp36-cp36m-win_amd64.whl", hash = "sha256:c78e66a922de1c95a208e4ec02e2e5cf0bb83a36ceececc10a72841e53fbf2bd"}, 344 | {file = "regex-2020.6.8-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:08997a37b221a3e27d68ffb601e45abfb0093d39ee770e4257bd2f5115e8cb0a"}, 345 | {file = "regex-2020.6.8-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2f6f211633ee8d3f7706953e9d3edc7ce63a1d6aad0be5dcee1ece127eea13ae"}, 346 | {file = "regex-2020.6.8-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:55b4c25cbb3b29f8d5e63aeed27b49fa0f8476b0d4e1b3171d85db891938cc3a"}, 347 | {file = "regex-2020.6.8-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:89cda1a5d3e33ec9e231ece7307afc101b5217523d55ef4dc7fb2abd6de71ba3"}, 348 | {file = "regex-2020.6.8-cp37-cp37m-win32.whl", hash = "sha256:690f858d9a94d903cf5cada62ce069b5d93b313d7d05456dbcd99420856562d9"}, 349 | {file = "regex-2020.6.8-cp37-cp37m-win_amd64.whl", hash = "sha256:1700419d8a18c26ff396b3b06ace315b5f2a6e780dad387e4c48717a12a22c29"}, 350 | {file = "regex-2020.6.8-cp38-cp38-manylinux1_i686.whl", hash = "sha256:654cb773b2792e50151f0e22be0f2b6e1c3a04c5328ff1d9d59c0398d37ef610"}, 351 | {file = "regex-2020.6.8-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:52e1b4bef02f4040b2fd547357a170fc1146e60ab310cdbdd098db86e929b387"}, 352 | {file = "regex-2020.6.8-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:cf59bbf282b627130f5ba68b7fa3abdb96372b24b66bdf72a4920e8153fc7910"}, 353 | {file = "regex-2020.6.8-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:5aaa5928b039ae440d775acea11d01e42ff26e1561c0ffcd3d805750973c6baf"}, 354 | {file = "regex-2020.6.8-cp38-cp38-win32.whl", hash = "sha256:97712e0d0af05febd8ab63d2ef0ab2d0cd9deddf4476f7aa153f76feef4b2754"}, 355 | {file = "regex-2020.6.8-cp38-cp38-win_amd64.whl", hash = "sha256:6ad8663c17db4c5ef438141f99e291c4d4edfeaacc0ce28b5bba2b0bf273d9b5"}, 356 | {file = "regex-2020.6.8.tar.gz", hash = "sha256:e9b64e609d37438f7d6e68c2546d2cb8062f3adb27e6336bc129b51be20773ac"}, 357 | ] 358 | requests = [ 359 | {file = "requests-2.23.0-py2.py3-none-any.whl", hash = "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee"}, 360 | {file = "requests-2.23.0.tar.gz", hash = "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"}, 361 | ] 362 | toml = [ 363 | {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, 364 | {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, 365 | ] 366 | typed-ast = [ 367 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, 368 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, 369 | {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, 370 | {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, 371 | {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, 372 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, 373 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, 374 | {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, 375 | {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, 376 | {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, 377 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, 378 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, 379 | {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, 380 | {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, 381 | {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, 382 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, 383 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, 384 | {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, 385 | {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, 386 | {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, 387 | {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, 388 | ] 389 | typing-extensions = [ 390 | {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"}, 391 | {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"}, 392 | {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"}, 393 | ] 394 | urllib3 = [ 395 | {file = "urllib3-1.25.9-py2.py3-none-any.whl", hash = "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"}, 396 | {file = "urllib3-1.25.9.tar.gz", hash = "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"}, 397 | ] 398 | zipp = [ 399 | {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"}, 400 | {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"}, 401 | ] 402 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "pbiapi" 3 | version = "0.2.2" 4 | description = "A Python library for working with the Power BI API" 5 | authors = ["Scott Melhop "] 6 | repository = "https://github.com/scottmelhop/PowerBI-API-Python" 7 | classifiers=[ 8 | "Programming Language :: Python :: 3", 9 | "License :: OSI Approved :: MIT License", 10 | "Operating System :: OS Independent", 11 | ] 12 | 13 | [tool.black] 14 | line-length = 120 15 | target_version = ['py37'] 16 | include = '\.py$' 17 | 18 | [tool.isort] 19 | line_length=120 # corresponds to -w flag 20 | multi_line_output=3 # corresponds to -m flag 21 | include_trailing_comma=true # corresponds to -tc flag 22 | skip_glob = '^((?!py$).)*$' # this makes sort all Python files 23 | known_third_party = ["arrow", "autoimpute", "cognite", "cvxpy", "fancyimpute", "matplotlib", "numpy", "pandas", "pomegranate", "pykalman", "pymc3", "pytest", "ruptures", "scipy", "sklearn", "theano", "torch"] 24 | 25 | [tool.poetry.dependencies] 26 | python = "^3.7" 27 | requests = "^2.23.0" 28 | 29 | [tool.poetry.dev-dependencies] 30 | black = "^19.10b0" 31 | isort = "^4.3.21" 32 | mypy = "^0.761" 33 | flake8 = "^3.7.9" 34 | 35 | [build-system] 36 | requires = ["poetry>=0.12"] 37 | build-backend = "poetry.masonry.api" 38 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests>=2.20.0 2 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | with open("requirements.txt") as fh: 7 | install_requires = fh.read().splitlines() 8 | 9 | setuptools.setup( 10 | name="pbiapi", 11 | version="0.2.2", 12 | author="Scott Melhop", 13 | author_email="scott.melhop@gmail.com", 14 | description="A Python library for working with the Power BI API", 15 | long_description=long_description, 16 | long_description_content_type="text/markdown", 17 | url="https://github.com/scottmelhop/PowerBI-API-Python", 18 | packages=setuptools.find_packages(), 19 | install_requires=install_requires, 20 | classifiers=[ 21 | "Programming Language :: Python :: 3", 22 | "License :: OSI Approved :: MIT License", 23 | "Operating System :: OS Independent", 24 | ], 25 | ) 26 | --------------------------------------------------------------------------------