├── .gitignore ├── LICENSE ├── README.md ├── examples ├── credentials.py ├── get_activity_logs.py ├── get_dataset_gateway_datasources.py ├── get_refresh_history.py └── refresh_dataset.py ├── pypowerbi ├── __init__.py ├── activity_logs.py ├── base.py ├── client.py ├── credentials.py ├── dataset.py ├── datasets.py ├── enums.py ├── features.py ├── gateway.py ├── gateways.py ├── group.py ├── group_user.py ├── groups.py ├── import_class.py ├── imports.py ├── report.py ├── reports.py ├── tests │ ├── __init__.py │ ├── credentials_tests.py │ ├── dataset_tests.py │ ├── imports_tests.py │ ├── powerbiclient_json_tests.py │ ├── powerbiclient_tests.py │ ├── settings.py │ ├── test_data.xlsx │ ├── test_report.pbix │ └── utils_tests.py └── utils.py └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | env2/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | db.sqlite3 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # IPython Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # dotenv 81 | .env 82 | 83 | # virtualenv 84 | env/ 85 | venv/ 86 | ENV/ 87 | 88 | # Spyder project settings 89 | .spyderproject 90 | 91 | # Rope project settings 92 | .ropeproject 93 | 94 | # osx dsstore 95 | .DS_Store 96 | 97 | # project specific 98 | static_collected 99 | 100 | 101 | # Created by https://www.gitignore.io/api/pycharm 102 | 103 | ### PyCharm ### 104 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 105 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 106 | 107 | # User-specific stuff: 108 | .idea/ 109 | 110 | # CMake 111 | cmake-build-debug/ 112 | 113 | ## File-based project format: 114 | *.iws 115 | 116 | ## Plugin-specific files: 117 | 118 | # IntelliJ 119 | /out/ 120 | 121 | # mpeltonen/sbt-idea plugin 122 | .idea_modules/ 123 | 124 | # JIRA plugin 125 | atlassian-ide-plugin.xml 126 | 127 | # Cursive Clojure plugin 128 | .idea/replstate.xml 129 | 130 | # Ruby plugin and RubyMine 131 | /.rakeTasks 132 | 133 | # Crashlytics plugin (for Android Studio and IntelliJ) 134 | com_crashlytics_export_strings.xml 135 | crashlytics.properties 136 | crashlytics-build.properties 137 | fabric.properties 138 | 139 | ### PyCharm Patch ### 140 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 141 | 142 | # *.iml 143 | # modules.xml 144 | # .idea/misc.xml 145 | # *.ipr 146 | 147 | # Sonarlint plugin 148 | .idea/sonarlint 149 | 150 | 151 | # End of https://www.gitignore.io/api/pycharm 152 | /ExampleScripts/creds.json 153 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Chris Berry 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pypowerbi 2 | 3 | Python library for PowerBI. Loosely modelled after the C# PowerBI library to keep things somehow consistent. 4 | 5 | ## Installation 6 | 7 | ``` 8 | pip install pypowerbi 9 | ``` 10 | 11 | ## Examples 12 | 13 | ### Posting a dataset 14 | 15 | ``` 16 | import adal 17 | from pypowerbi.dataset import Column, Table, Dataset 18 | from pypowerbi.client import PowerBIClient 19 | 20 | # you might need to change these, but i doubt it 21 | authority_url = 'https://login.windows.net/common' 22 | resource_url = 'https://analysis.windows.net/powerbi/api' 23 | api_url = 'https://api.powerbi.com' 24 | 25 | # change these to your credentials 26 | client_id = '00000000-0000-0000-0000-000000000000' 27 | username = 'someone@somecompany.com' 28 | password = 'averygoodpassword' 29 | 30 | # first you need to authenticate using adal 31 | context = adal.AuthenticationContext(authority=authority_url, 32 | validate_authority=True, 33 | api_version=None) 34 | 35 | # get your authentication token 36 | token = context.acquire_token_with_username_password(resource=resource_url, 37 | client_id=client_id, 38 | username=username, 39 | password=password) 40 | 41 | # create your powerbi api client 42 | client = PowerBIClient(api_url, token) 43 | 44 | # create your columns 45 | columns = [] 46 | columns.append(Column(name='id', data_type='Int64')) 47 | columns.append(Column(name='name', data_type='string')) 48 | columns.append(Column(name='is_interesting', data_type='boolean')) 49 | columns.append(Column(name='cost_usd', data_type='double')) 50 | columns.append(Column(name='purchase_date', data_type='datetime')) 51 | 52 | # create your tables 53 | tables = [] 54 | tables.append(Table(name='AnExampleTableName', columns=columns)) 55 | 56 | # create your dataset 57 | dataset = Dataset(name='AnExampleDatasetName', tables=tables) 58 | 59 | # post your dataset! 60 | client.datasets.post_dataset(dataset) 61 | ``` 62 | 63 | ### Authentication & Authorization 64 | 65 | It uses `adal` library for authentication and authorization. If you need step by step way to do auth, please refer to [this example on Bitbucket](https://bitbucket.org/omnistream/powerbi-api-example/). 66 | -------------------------------------------------------------------------------- /examples/credentials.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | """ 5 | There are two ways to specify credentials used by the example scripts: 6 | 1) Directly enter them into this file. 7 | 2) Enter them into a creds.json file in the same directory as this script. (See below for format) 8 | 9 | If you are just using the library, it should be fine to just enter the credentials in this file. If you're developing code 10 | for this library, you'll probably want to use the .json file and add it to your .gitignore so that you don't 11 | accidentally commit your login info! 12 | 13 | The creds.json file should look like: 14 | ------------------------------------------------------------------------- 15 | { 16 | "client_id": "", 17 | "username": "", 18 | "password": "" 19 | } 20 | ------------------------------------------------------------------------- 21 | """ 22 | 23 | # Enter your credentials here. If you're using the creds.json file, leave them as an empty string 24 | client_id = '' 25 | username = '' 26 | password = '' 27 | 28 | 29 | # If credentials aren't specified in this file, see if there's a "creds.json" file in the current directory and use it 30 | if client_id == '': 31 | scriptpath = os.path.dirname(os.path.realpath(__file__)) 32 | creds_file_path = os.path.join(scriptpath, 'creds.json') 33 | 34 | if os.path.exists(creds_file_path): 35 | creds_fh = open(creds_file_path, "r") 36 | creds = json.load(creds_fh) 37 | 38 | client_id = creds["client_id"] 39 | username = creds["username"] 40 | password = creds["password"] 41 | else: 42 | raise RuntimeError("Credentials must be specified in Credentials.py or creds.json!") -------------------------------------------------------------------------------- /examples/get_activity_logs.py: -------------------------------------------------------------------------------- 1 | from pypowerbi.client import PowerBIClient 2 | from pypowerbi.activity_logs import ActivityLogs 3 | from datetime import datetime 4 | 5 | import pandas as pd 6 | 7 | from Credentials import client_id, username, password 8 | 9 | # create your powerbi api client 10 | client = PowerBIClient.get_client_with_username_password(client_id=client_id, username=username, password=password) 11 | 12 | # When testing, only logs from December 15th, 2019 and later were available. This may change in the future though. 13 | dt = datetime(2019, 12, 16) 14 | logs = client.activity_logs.get_activity_logs(dt) 15 | print(logs) 16 | 17 | pandas_installed = True 18 | try: 19 | import pandas as pd 20 | pd.set_option('display.max_columns', 500) 21 | pd.set_option('display.width', 1000) 22 | except ImportError: 23 | pandas_installed = False 24 | 25 | if pandas_installed: 26 | # If pandas is installed, we can do lots of neat analysis 27 | df = pd.DataFrame(logs) 28 | print(df.columns) 29 | print(df["UserId"].unique()) 30 | #df.to_csv(r"d:\powerbi_activity.csv") 31 | 32 | 33 | -------------------------------------------------------------------------------- /examples/get_dataset_gateway_datasources.py: -------------------------------------------------------------------------------- 1 | import adal 2 | from pypowerbi.dataset import Column, Table, Dataset 3 | from pypowerbi.client import PowerBIClient 4 | 5 | from Credentials import client_id, username, password 6 | 7 | # Dataset to get gateway info for 8 | group_id = "" 9 | dataset_id = "" 10 | 11 | # create your powerbi api client 12 | client = PowerBIClient.get_client_with_username_password(client_id=client_id, username=username, password=password) 13 | 14 | # do_refresh(client, group_id, dataset_id) 15 | notify_option = "MailOnFailure" 16 | data_sources = client.datasets.get_dataset_gateway_datasources(dataset_id, group_id=group_id) 17 | print(data_sources) 18 | -------------------------------------------------------------------------------- /examples/get_refresh_history.py: -------------------------------------------------------------------------------- 1 | import adal 2 | from pypowerbi.dataset import Column, Table, Dataset 3 | from pypowerbi.client import PowerBIClient 4 | 5 | from Credentials import client_id, username, password 6 | 7 | # Dataset to get refresh history for 8 | group_id = "" 9 | dataset_id = "" 10 | 11 | # create your powerbi api client 12 | client = PowerBIClient.get_client_with_username_password(client_id=client_id, username=username, password=password) 13 | 14 | # Get the entire refresh history 15 | history = client.datasets.get_dataset_refresh_history(dataset_id, group_id=group_id) 16 | print(history) 17 | 18 | # Get the most recent refresh 19 | history = client.datasets.get_dataset_refresh_history(dataset_id, group_id=group_id, top=1) 20 | print(history) -------------------------------------------------------------------------------- /examples/refresh_dataset.py: -------------------------------------------------------------------------------- 1 | import adal 2 | from pypowerbi.dataset import Column, Table, Dataset 3 | from pypowerbi.client import PowerBIClient 4 | from Credentials import client_id, username, password 5 | 6 | # The Report to Refresh 7 | group_id = "" 8 | dataset_id = "" 9 | 10 | # create your powerbi api client 11 | client = PowerBIClient.get_client_with_username_password(client_id=client_id, username=username, password=password) 12 | 13 | notify_option = "MailOnFailure" 14 | client.datasets.refresh_dataset(dataset_id, notify_option=notify_option, group_id=group_id) 15 | -------------------------------------------------------------------------------- /pypowerbi/__init__.py: -------------------------------------------------------------------------------- 1 | from .client import * 2 | from .dataset import * 3 | from .datasets import * 4 | from .report import * 5 | from .reports import * 6 | from .imports import * 7 | from .groups import * 8 | from .gateways import * 9 | from .gateway import * 10 | -------------------------------------------------------------------------------- /pypowerbi/activity_logs.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import requests 3 | 4 | import datetime 5 | 6 | from requests.exceptions import HTTPError 7 | 8 | 9 | class ActivityLogs: 10 | 11 | def __init__(self, client): 12 | self.client = client 13 | self.base_url = f'{self.client.api_url}/{self.client.api_version_snippet}/{self.client.api_myorg_snippet}' 14 | 15 | self.activities_events_snippet = "activityevents" 16 | self.group_part = "admin" # This is always admin. Not really a group, but follows the 17 | # format of the rest of the library code 18 | 19 | def get_activity_logs(self, st, et=None, filter=None): 20 | """ 21 | Get's the activity log for the specified date or date range. If et is None, it will get all logs (from midnight 22 | to 11:59:59 UTC) for the date specified by st. If et is set, it will retrieve logs from st-et. Note that the 23 | Power BI Activity Service currently supports only retrieving one day of logs at a time. 24 | 25 | "filter" is a string parameter that's sent to the service to filter the types of events returned. For example, 26 | "Activity eq 'viewreport' and UserId eq 'john@contoso.com'" gets report views for john(contoso.com). 27 | Right now the service only supports the operators ['eq', 'and']. 28 | 29 | NOTE: It appears that only data from December 15th, 2019 and on can be retrieved by the API as of the writing 30 | of this code. This isn't an official limitation I've found in the documentation, but seems to be the case. 31 | 32 | NOTE: This API allows at most 200 Requests per hour. 33 | 34 | For a good overview of the service, see https://powerbi.microsoft.com/en-us/blog/the-power-bi-activity-log-makes-it-easy-to-download-activity-data-for-custom-usage-reporting/ 35 | 36 | :param st: The date to retrieve usage for (python datetime). 37 | :param et: The date to retrieve usage for (python datetime). 38 | :param filter: A string that defines a filter for retrieving the information. See the Power BI REST API 39 | Documentation for details. 40 | :return: 41 | """ 42 | # TODO: It would be nice if the available parameters for the "filter" function were defined somewhere in code. 43 | 44 | if et is None: 45 | dt_str = st.strftime("%Y-%m-%d") 46 | st_dt_str = f"{dt_str}T00:00:00" 47 | et_dt_str = f"{dt_str}T23:59:59" 48 | else: 49 | st_dt_str = st.strftime("%Y-%m-%dT:%H%M%S") 50 | et_dt_str = et.strftime("%Y-%m-%dT:%H%M%S") 51 | 52 | # https://api.powerbi.com/v1.0/myorg/admin/activityevents?startDateTime='{st_dt_str}'&endDateTime='{et_dt_str}' 53 | 54 | # form the url 55 | filter_snippet = f"startDateTime='{st_dt_str}'&endDateTime='{et_dt_str}'" 56 | url = f'{self.base_url}/{self.group_part}/{self.activities_events_snippet}?{filter_snippet}' 57 | 58 | if filter is not None: 59 | url += f"$filter={filter}" 60 | 61 | # form the headers 62 | headers = self.client.auth_header 63 | 64 | # get the response 65 | response = requests.get(url, headers=headers) 66 | 67 | # 200 is the only successful code, raise an exception on any other response code 68 | if response.status_code != 200: 69 | raise HTTPError(response, f'Get Datasets request returned http error: {response.json()}') 70 | 71 | response_obj = response.json() 72 | 73 | event_entities = response_obj["activityEventEntities"] 74 | continuation_uri = response_obj["continuationUri"] 75 | continuation_token = response_obj["continuationToken"] 76 | 77 | activity_events = event_entities 78 | 79 | # Even if nothing is returned, it takes around 24 tries until no continuation token is returned. 80 | # (This is how Microsoft says the API is to be used.) 81 | # It seems to send the first set of actual data around 12-15 calls in. This doesn't seem to change even if you 82 | # slow down the API calls (in total number of calls required or when the first set of actual data is returned). 83 | 84 | cont_count = 1 85 | while continuation_token is not None: 86 | 87 | response = requests.get(continuation_uri, headers=headers) 88 | response_obj = response.json() 89 | 90 | event_entities = response_obj["activityEventEntities"] 91 | continuation_uri = response_obj["continuationUri"] 92 | continuation_token = response_obj["continuationToken"] 93 | 94 | activity_events.extend(event_entities) 95 | # print(f"{cont_count}: {len(event_entities)}") 96 | cont_count += 1 97 | 98 | # print(f"Took {cont_count} tries to exhaust continuation token for {len(activity_events)} events.") 99 | 100 | # Convert Datetime Strings to Python datetimes 101 | _date_fmt_str = '%Y-%m-%dT%H:%M:%S' 102 | for event in activity_events: 103 | event["CreationTime"] = datetime.datetime.strptime(event["CreationTime"], _date_fmt_str) 104 | # Change the Timezone to UTC 105 | event["CreationTime"] = event["CreationTime"].replace(tzinfo=datetime.timezone.utc) 106 | 107 | return activity_events 108 | -------------------------------------------------------------------------------- /pypowerbi/base.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from typing import Dict, Union 3 | 4 | 5 | class Deserializable(metaclass=abc.ABCMeta): 6 | """Interface to ensure operations modules need fewer methods to turn responses into objects""" 7 | @classmethod 8 | @abc.abstractmethod 9 | def from_dict(cls, dictionary: Dict[str, Union[str, Dict[str, str]]]): 10 | pass 11 | -------------------------------------------------------------------------------- /pypowerbi/client.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | 3 | import json 4 | import datetime 5 | import adal 6 | 7 | from .reports import Reports 8 | from .datasets import Datasets 9 | from .imports import Imports 10 | from .groups import Groups 11 | from .gateways import Gateways 12 | from .activity_logs import ActivityLogs 13 | 14 | 15 | class PowerBIClient: 16 | default_resource_url = 'https://analysis.windows.net/powerbi/api' 17 | default_api_url = 'https://api.powerbi.com' 18 | default_authority_url = 'https://login.windows.net/common' 19 | 20 | api_version_snippet = 'v1.0' 21 | api_myorg_snippet = 'myorg' 22 | 23 | @staticmethod 24 | def get_client_with_username_password(client_id, username, password, authority_url=None, resource_url=None, api_url=None): 25 | """ 26 | Constructs a client with the option of using common defaults. 27 | 28 | :param client_id: The Power BI Client ID 29 | :param username: Username 30 | :param password: Password 31 | :param authority_url: The authority_url; defaults to 'https://login.windows.net/common' 32 | :param resource_url: The resource_url; defaults to 'https://analysis.windows.net/powerbi/api' 33 | :param api_url: The api_url: defaults to 'https://api.powerbi.com' 34 | :return: 35 | """ 36 | if authority_url is None: 37 | authority_url = PowerBIClient.default_authority_url 38 | 39 | if resource_url is None: 40 | resource_url = PowerBIClient.default_resource_url 41 | 42 | if api_url is None: 43 | api_url = PowerBIClient.default_api_url 44 | 45 | context = adal.AuthenticationContext(authority=authority_url, 46 | validate_authority=True, 47 | api_version=None) 48 | 49 | # get your authentication token 50 | token = context.acquire_token_with_username_password(resource=resource_url, 51 | client_id=client_id, 52 | username=username, 53 | password=password) 54 | 55 | return PowerBIClient(api_url, token) 56 | 57 | def __init__(self, api_url, token): 58 | self.api_url = api_url 59 | self.token = token 60 | self.datasets = Datasets(self) 61 | self.reports = Reports(self) 62 | self.imports = Imports(self) 63 | self.groups = Groups(self) 64 | self.gateways = Gateways(self) 65 | self.activity_logs = ActivityLogs(self) 66 | 67 | @property 68 | def auth_header(self): 69 | if self._auth_header is None: 70 | self._auth_header = { 71 | 'Authorization': f'Bearer {self.token["accessToken"]}' 72 | } 73 | 74 | return self._auth_header 75 | 76 | _auth_header = None 77 | 78 | 79 | class EffectiveIdentity: 80 | username_key = 'username' 81 | roles_key = 'roles' 82 | datasets_key = 'datasets' 83 | 84 | def __init__(self, username, roles, datasets): 85 | self.username = username 86 | self.roles = roles 87 | self.datasets = datasets 88 | 89 | 90 | class EffectiveIdentityEncoder(json.JSONEncoder): 91 | def default(self, o): 92 | return { 93 | EffectiveIdentity.username_key: o.username, 94 | EffectiveIdentity.roles_key: o.roles, 95 | EffectiveIdentity.datasets_key: o.datasets, 96 | } 97 | 98 | 99 | class TokenRequest: 100 | access_level_key = 'accessLevel' 101 | dataset_id_key = 'datasetId' 102 | allow_saveas_key = 'allowSaveAs' 103 | identities_key = 'identities' 104 | 105 | def __init__(self, access_level, dataset_id=None, allow_saveas=None, identities=None): 106 | self.access_level = access_level 107 | self.dataset_id = dataset_id 108 | self.allow_saveas = allow_saveas 109 | self.identities = identities 110 | 111 | 112 | class TokenRequestEncoder(json.JSONEncoder): 113 | def default(self, o): 114 | effective_identity_encoder = EffectiveIdentityEncoder() 115 | 116 | json_dict = { 117 | TokenRequest.access_level_key: o.access_level 118 | } 119 | 120 | if o.dataset_id is not None: 121 | json_dict[TokenRequest.dataset_id_key] = o.dataset_id 122 | 123 | if o.allow_saveas is not None: 124 | json_dict[TokenRequest.allow_saveas_key] = o.allow_saveas 125 | 126 | if o.identities is not None: 127 | json_dict[TokenRequest.identities_key] = [effective_identity_encoder.default(x) for x in o.identities] 128 | 129 | return json_dict 130 | 131 | 132 | class EmbedToken: 133 | token_key = 'token' 134 | token_id_key = 'tokenId' 135 | expiration_key = 'expiration' 136 | 137 | def __init__(self, token, token_id, expiration): 138 | self.token = token 139 | self.token_id = token_id 140 | self.expiration = expiration 141 | 142 | @classmethod 143 | def from_dict(cls, dictionary): 144 | if cls.token_key not in dictionary: 145 | raise RuntimeError(f'Token dict has no {cls.token_key} key') 146 | 147 | token = dictionary[cls.token_key] 148 | token_id = dictionary[cls.token_id_key] 149 | expiration = dictionary[cls.expiration_key] 150 | 151 | return EmbedToken(token, token_id, expiration) 152 | 153 | @property 154 | def expiration_as_datetime(self): 155 | return datetime.datetime.strptime(self.expiration, '%Y-%m-%dT%H:%M:%SZ') 156 | -------------------------------------------------------------------------------- /pypowerbi/credentials.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Dict, List, Union 2 | import json 3 | 4 | from pypowerbi import CredentialType 5 | 6 | 7 | class CredentialsBase: 8 | CREDENTIAL_TYPE: Optional[CredentialType] = None 9 | credential_data_key = "credentialData" 10 | 11 | def __init__(self): 12 | self.credential_data: Dict[str, Union[str, List[Dict[str, str]]]] = { 13 | self.credential_data_key: [] 14 | } 15 | 16 | def add_credential_data(self, key, value): 17 | self.credential_data[self.credential_data_key].append( 18 | {"name": key, "value": value} 19 | ) 20 | 21 | def to_json(self) -> str: 22 | return json.dumps(self.credential_data, separators=(',', ':'))\ 23 | .replace('\\\\', '\\') 24 | 25 | 26 | class UsernamePasswordCredentials(CredentialsBase): 27 | username_key = "username" 28 | password_key = "password" 29 | 30 | def __init__(self, username: str, password: str): 31 | super().__init__() 32 | 33 | if not username: 34 | raise ValueError("An empty string is not a valid username!") 35 | if not password: 36 | raise ValueError("An empty string is not a valid password!") 37 | 38 | self.username = username 39 | self.password = password 40 | 41 | super().add_credential_data(self.username_key, username) 42 | super().add_credential_data(self.password_key, password) 43 | 44 | 45 | class AnonymousCredentials(CredentialsBase): 46 | CREDENTIAL_TYPE = CredentialType.ANONYMOUS 47 | 48 | def __init__(self): 49 | super().__init__() 50 | self.credential_data[self.credential_data_key] = "" 51 | 52 | def __repr__(self) -> str: 53 | return f'' 54 | 55 | 56 | class BasicCredentials(UsernamePasswordCredentials): 57 | CREDENTIAL_TYPE = CredentialType.BASIC 58 | 59 | def __init__(self, username: str, password: str): 60 | super().__init__(username, password) 61 | 62 | def __repr__(self) -> str: 63 | return f'' 64 | 65 | 66 | class KeyCredentials(CredentialsBase): 67 | CREDENTIAL_TYPE = CredentialType.KEY 68 | key_key = "key" 69 | 70 | def __init__(self, key: str): 71 | super().__init__() 72 | 73 | if not key: 74 | raise ValueError("An empty string is not a valid key!") 75 | 76 | self.key = key 77 | 78 | super().add_credential_data(self.key_key, key) 79 | 80 | def __repr__(self) -> str: 81 | return f'' 82 | 83 | 84 | class OAuth2Credentials(CredentialsBase): 85 | CREDENTIAL_TYPE = CredentialType.OAUTH2 86 | access_token_key = "accessToken" 87 | 88 | def __init__(self, access_token: str): 89 | super().__init__() 90 | 91 | if not access_token: 92 | raise ValueError("An empty string is not a valid access token!") 93 | 94 | super().add_credential_data(self.access_token_key, access_token) 95 | 96 | def __repr__(self) -> str: 97 | return f'' 98 | 99 | 100 | class WindowsCredentials(UsernamePasswordCredentials): 101 | CREDENTIAL_TYPE = CredentialType.WINDOWS 102 | 103 | def __init__(self, username: str, password: str): 104 | super().__init__(username, password) 105 | 106 | def __repr__(self): 107 | return f'' 108 | -------------------------------------------------------------------------------- /pypowerbi/dataset.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import json 3 | from enum import Enum 4 | from typing import List, Optional, Dict, Union 5 | 6 | 7 | class Dataset: 8 | # json keys 9 | id_key = 'id' 10 | name_key = 'name' 11 | add_rows_api_enabled_key = 'addRowsAPIEnabled' 12 | configured_by_key = 'configuredBy' 13 | is_refreshable_key = 'isRefreshable' 14 | is_effective_identity_required_key = 'isEffectiveIdentityRequired' 15 | is_effective_identity_roles_required_key = 'isEffectiveIdentityRolesRequired' 16 | is_on_prem_gateway_required_key = 'isOnPremGatewayRequired' 17 | tables_key = 'tables' 18 | 19 | def __init__(self, name, dataset_id=None, tables=None, add_rows_api_enabled=None, 20 | configured_by=None, is_refreshable=None, is_effective_identity_required=None, 21 | is_effective_identity_roles_required=None, is_on_prem_gateway_required=None): 22 | self.name = name 23 | self.id = dataset_id 24 | self.tables = tables 25 | self.add_rows_api_enabled = add_rows_api_enabled 26 | self.configured_by = configured_by 27 | self.is_refreshable = is_refreshable 28 | self.is_effective_identity_required = is_effective_identity_required 29 | self.is_effective_identity_roles_required = is_effective_identity_roles_required 30 | self.is_on_prem_gateway_required = is_on_prem_gateway_required 31 | 32 | @classmethod 33 | def from_dict(cls, dictionary): 34 | """ 35 | Creates a dataset from a dictionary, key values for 'id' and 'name' required 36 | :param dictionary: The dictionary to create the dataset from 37 | :return: A dataset created from the given dictionary 38 | """ 39 | # id is required 40 | if Dataset.id_key in dictionary: 41 | dataset_id = str(dictionary[Dataset.id_key]) 42 | # id cannot be whitespace 43 | if dataset_id.isspace(): 44 | raise RuntimeError('Dataset dict has empty id key value') 45 | else: 46 | raise RuntimeError('Dataset dict has no id key') 47 | # name is required 48 | if Dataset.name_key in dictionary: 49 | dataset_name = str(dictionary[Dataset.name_key]) 50 | # name cannot be whitespace 51 | if dataset_id.isspace(): 52 | raise RuntimeError('Dataset dict has empty name key value') 53 | else: 54 | raise RuntimeError('Dataset dict has no name key') 55 | 56 | # add api enabled is optional 57 | if Dataset.add_rows_api_enabled_key in dictionary: 58 | add_rows_api_enabled = bool(dictionary[Dataset.add_rows_api_enabled_key]) 59 | else: 60 | add_rows_api_enabled = None 61 | 62 | # configured by is optional 63 | if Dataset.configured_by_key in dictionary: 64 | configured_by = str(dictionary[Dataset.configured_by_key]) 65 | else: 66 | configured_by = None 67 | 68 | # is refreshable is optional 69 | if Dataset.is_refreshable_key in dictionary: 70 | is_refreshable = bool(dictionary[Dataset.is_refreshable_key]) 71 | else: 72 | is_refreshable = None 73 | 74 | # is effective identity required is optional 75 | if Dataset.is_effective_identity_required_key in dictionary: 76 | is_effective_identity_required = bool(dictionary[Dataset.is_effective_identity_required_key]) 77 | else: 78 | is_effective_identity_required = None 79 | 80 | # is effective identity roles required is optional 81 | if Dataset.is_effective_identity_roles_required_key in dictionary: 82 | is_effective_identity_roles_required = bool(dictionary[Dataset.is_effective_identity_roles_required_key]) 83 | else: 84 | is_effective_identity_roles_required = None 85 | 86 | # is on prem gateway required is optional 87 | if Dataset.is_on_prem_gateway_required_key in dictionary: 88 | is_on_prem_gateway_required = bool(dictionary[Dataset.is_on_prem_gateway_required_key]) 89 | else: 90 | is_on_prem_gateway_required = None 91 | 92 | return Dataset(dataset_name, dataset_id, add_rows_api_enabled=add_rows_api_enabled, 93 | configured_by=configured_by, is_refreshable=is_refreshable, 94 | is_effective_identity_required=is_effective_identity_required, 95 | is_effective_identity_roles_required=is_effective_identity_roles_required, 96 | is_on_prem_gateway_required=is_on_prem_gateway_required) 97 | 98 | def __repr__(self): 99 | return f'' 100 | 101 | 102 | class DatasetEncoder(json.JSONEncoder): 103 | def default(self, o): 104 | table_encoder = TableEncoder() 105 | 106 | json_dict = { 107 | Dataset.name_key: o.name, 108 | Dataset.tables_key: [table_encoder.default(x) for x in o.tables], 109 | } 110 | 111 | return json_dict 112 | 113 | 114 | class Table: 115 | name_key = 'name' 116 | columns_key = 'columns' 117 | measures_key = 'measures' 118 | rows_key = 'rows' 119 | 120 | @classmethod 121 | def from_dict(cls, dictionary): 122 | """ 123 | Creates a table from a dictionary, 'name' key value required 124 | :param dictionary: The dictionary to create the table from 125 | :return: A table created from the dictionary 126 | """ 127 | # name is required 128 | if Table.name_key in dictionary: 129 | table_name = str(dictionary[Table.name_key]) 130 | # name cannot be whitespace 131 | if table_name.isspace(): 132 | raise RuntimeError('Table dict has empty name key value') 133 | else: 134 | raise RuntimeError('Table dict has no name key') 135 | 136 | # columns are optional 137 | if Table.columns_key in dictionary: 138 | table_columns = [Table.from_dict(x) for x in dictionary[Table.columns_key]] 139 | else: 140 | table_columns = None 141 | 142 | # measures are optional 143 | if Table.measures_key in dictionary: 144 | table_measures = [Table.from_dict(x) for x in dictionary[Table.measures_key]] 145 | else: 146 | table_measures = None 147 | 148 | return Table(name=table_name, columns=table_columns, measures=table_measures) 149 | 150 | def __init__(self, name, columns, measures=None, rows=None): 151 | self.name = name 152 | self.columns = columns 153 | self.measures = measures 154 | self.rows = rows 155 | 156 | def __repr__(self): 157 | return f'' 158 | 159 | 160 | class TableEncoder(json.JSONEncoder): 161 | def default(self, o): 162 | json_dict = { 163 | Table.name_key: o.name, 164 | } 165 | 166 | if o.columns is not None: 167 | column_encoder = ColumnEncoder() 168 | json_dict[Table.columns_key] = [column_encoder.default(x) for x in o.columns] 169 | 170 | if o.measures is not None: 171 | measure_encoder = MeasureEncoder() 172 | json_dict[Table.measures_key] = [measure_encoder.default(x) for x in o.measures] 173 | 174 | if o.rows is not None: 175 | row_encoder = RowEncoder() 176 | json_dict[Table.row_key] = [row_encoder.default(x) for x in o.rows] 177 | 178 | return json_dict 179 | 180 | 181 | class Measure: 182 | name_key = 'name' 183 | expression_key = 'expression' 184 | formatstring_key = 'formatString' 185 | is_hidden_key = 'isHidden' 186 | 187 | @classmethod 188 | def from_dict(cls, dictionary): 189 | # name is required 190 | if Measure.name_key in dictionary: 191 | measure_name = str(dictionary[Measure.name_key]) 192 | # name cannot be whitespace 193 | if measure_name.isspace(): 194 | raise RuntimeError('Measure dict has empty name key value') 195 | else: 196 | raise RuntimeError('Measure dict has no name key') 197 | 198 | # expression is required 199 | if Measure.expression_key in dictionary: 200 | measure_expression = str(dictionary[Measure.expression_key]) 201 | # expression cannot be whitespace 202 | if measure_expression.isspace(): 203 | raise RuntimeError('Measure dict has empty expression key value') 204 | else: 205 | raise RuntimeError('Measure dict has no expression key') 206 | 207 | if Measure.formatstring_key in dictionary: 208 | measure_formatstring = str(dictionary[Measure.formatstring_key]) 209 | else: 210 | measure_formatstring = None 211 | 212 | if Measure.is_hidden_key in dictionary: 213 | measure_is_hidden = bool(dictionary[Measure.is_hidden_key]) 214 | else: 215 | measure_is_hidden = None 216 | 217 | return Measure(name=measure_name, expression=measure_expression, formatstring=measure_formatstring, 218 | is_hidden=measure_is_hidden) 219 | 220 | def __init__(self, name, expression, formatstring=None, is_hidden=None): 221 | self.name = name 222 | self.expression = expression 223 | self.formatstring = formatstring 224 | self.is_hidden = is_hidden 225 | 226 | def __repr__(self): 227 | return f'' 228 | 229 | 230 | class MeasureEncoder(json.JSONEncoder): 231 | def default(self, o): 232 | json_dict = { 233 | Measure.name_key: o.name, 234 | Measure.expression_key: o.expression, 235 | } 236 | 237 | if o.formatstring is not None: 238 | json_dict[Measure.formatstring_key] = o.formatstring 239 | 240 | if o.is_hidden is not None: 241 | json_dict[Measure.is_hidden_key] = o.is_hidden 242 | 243 | return json_dict 244 | 245 | 246 | class Column: 247 | name_key = 'name' 248 | datatype_key = 'dataType' 249 | formatstring_key = 'formatString' 250 | 251 | def __init__(self, name, data_type, formatstring=None): 252 | self.name = name 253 | self.data_type = data_type 254 | self.formatstring = formatstring 255 | 256 | def __repr__(self): 257 | return f'' 258 | 259 | 260 | class ColumnEncoder(json.JSONEncoder): 261 | def default(self, o): 262 | json_dict = { 263 | Column.name_key: o.name, 264 | Column.datatype_key: o.data_type 265 | } 266 | 267 | if o.formatstring is not None: 268 | json_dict[Column.formatstring_key] = o.formatstring 269 | 270 | return json_dict 271 | 272 | 273 | class Row: 274 | def __init__(self, **kwargs): 275 | for key in kwargs: 276 | setattr(self, key, kwargs[key]) 277 | 278 | def __repr__(self): 279 | return f'' 280 | 281 | 282 | class RowEncoder(json.JSONEncoder): 283 | def default(self, o): 284 | return o.__dict__ 285 | 286 | 287 | class ScheduleNotifyOption(Enum): 288 | MAIL_ON_FAILURE = "MailOnFailure" 289 | NO_NOTIFICATION = "NoNotification" 290 | 291 | 292 | class RefreshSchedule: 293 | notify_option_key = 'NotifyOption' 294 | days_key = 'days' 295 | enabled_key = 'enabled' 296 | local_time_zone_id_key = 'localTimeZoneId' 297 | times_key = 'times' 298 | 299 | @classmethod 300 | def from_dict(cls, dictionary: Dict[str, Union[str, bool]]) -> 'RefreshSchedule': 301 | notify_option_value = dictionary.get(cls.notify_option_key, None) 302 | notify_option = ScheduleNotifyOption(notify_option_value) if notify_option_value else None 303 | days = dictionary.get(cls.days_key, None) 304 | enabled = dictionary.get(cls.enabled_key, None) 305 | local_time_zone_id = dictionary.get(cls.local_time_zone_id_key, "") 306 | times = dictionary.get(cls.times_key, None) 307 | 308 | return cls(notify_option, days, enabled, local_time_zone_id, times) 309 | 310 | def __init__( 311 | self, 312 | notify_option: Optional[ScheduleNotifyOption] = None, 313 | days: Optional[List[str]] = None, 314 | enabled: Optional[bool] = None, 315 | local_time_zone_id: Optional[str] = "", 316 | times: Optional[List[str]] = None 317 | ): 318 | """Constructs a RefreshSchedule object 319 | 320 | :param notify_option: Notify on failure or not 321 | :param days: Days to execute the refresh 322 | :param enabled: Is the scheduled refresh enabled 323 | :param local_time_zone_id: The id of the timezone to use. Follows the conventions described here: 324 | https://docs.microsoft.com/en-us/dotnet/api/system.timezoneinfo.id 325 | :param times: Times to schedule the refresh on each specified day 326 | """ 327 | 328 | self.notify_option = notify_option 329 | self.days = days 330 | self.enabled = enabled 331 | self.local_time_zone_id = local_time_zone_id 332 | self.times = times 333 | 334 | def as_set_values_dict(self) -> Dict[str, Union[str, bool, List[str]]]: 335 | set_values_dict = dict() 336 | 337 | if self.notify_option: 338 | set_values_dict[self.notify_option_key] = self.notify_option.value 339 | 340 | if self.days: 341 | set_values_dict[self.days_key] = self.days 342 | 343 | if self.enabled is not None: 344 | set_values_dict[self.enabled_key] = self.enabled 345 | 346 | if self.local_time_zone_id: 347 | set_values_dict[self.local_time_zone_id_key] = self.local_time_zone_id 348 | 349 | if self.times: 350 | set_values_dict[self.times_key] = self.times 351 | 352 | return set_values_dict 353 | 354 | def __repr__(self): 355 | return f'' 356 | 357 | 358 | class RefreshScheduleRequest: 359 | value_key = "value" 360 | 361 | def __init__(self, refresh_schedule: RefreshSchedule): 362 | """Constructs an update Refresh Schedule request 363 | 364 | :param refresh_schedule: The desired refresh schedule 365 | """ 366 | self.refresh_schedule = refresh_schedule 367 | 368 | def as_dict(self) -> Dict[str, Dict[str, Union[str, bool, List[str]]]]: 369 | return { 370 | self.value_key: self.refresh_schedule.as_set_values_dict() 371 | } 372 | 373 | def __repr__(self): 374 | return f'' 375 | -------------------------------------------------------------------------------- /pypowerbi/datasets.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import requests 3 | import json 4 | from pypowerbi.utils import convert_datetime_fields 5 | 6 | from requests.exceptions import HTTPError 7 | from .dataset import * 8 | 9 | 10 | class Datasets: 11 | # url snippets 12 | groups_snippet = 'groups' 13 | datasets_snippet = 'datasets' 14 | tables_snippet = 'tables' 15 | rows_snippet = 'rows' 16 | parameters_snippet = 'parameters' 17 | set_parameters_snippet = 'Default.UpdateParameters' 18 | bind_gateway_snippet = 'Default.BindToGateway' 19 | refreshes_snippet = 'refreshes' 20 | refresh_schedule_snippet = 'refreshSchedule' 21 | 22 | # json keys 23 | get_datasets_value_key = 'value' 24 | 25 | def __init__(self, client): 26 | self.client = client 27 | self.base_url = f'{self.client.api_url}/{self.client.api_version_snippet}/{self.client.api_myorg_snippet}' 28 | 29 | def count(self, group_id=None): 30 | """ 31 | Evaluates the number of datasets 32 | :param group_id: The optional group id 33 | :return: The number of datasets as returned by the API 34 | """ 35 | return len(self.get_datasets(group_id)) 36 | 37 | def has_dataset(self, dataset_id, group_id=None): 38 | """ 39 | Evaluates if the dataset exists 40 | :param dataset_id: The id of the dataset to evaluate 41 | :param group_id: The optional group id 42 | :return: True if the dataset exists, False otherwise 43 | """ 44 | datasets = self.get_datasets(group_id) 45 | 46 | for dataset in datasets: 47 | if dataset.id == str(dataset_id): 48 | return True 49 | 50 | return False 51 | 52 | def get_datasets(self, group_id=None): 53 | """ 54 | Fetches all datasets 55 | https://msdn.microsoft.com/en-us/library/mt203567.aspx 56 | :param group_id: The optional group id to get datasets from 57 | :return: The list of the datasets found 58 | """ 59 | # group_id can be none, account for it 60 | if group_id is None: 61 | groups_part = '/' 62 | else: 63 | groups_part = f'/{self.groups_snippet}/{group_id}/' 64 | 65 | # form the url 66 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}' 67 | # form the headers 68 | headers = self.client.auth_header 69 | 70 | # get the response 71 | response = requests.get(url, headers=headers) 72 | 73 | # 200 is the only successful code, raise an exception on any other response code 74 | if response.status_code != 200: 75 | raise HTTPError(response, f'Get Datasets request returned http error: {response.json()}') 76 | 77 | return self.datasets_from_get_datasets_response(response) 78 | 79 | def get_dataset(self, dataset_id, group_id=None): 80 | """ 81 | Gets a single dataset 82 | https://msdn.microsoft.com/en-us/library/mt784653.aspx 83 | :param dataset_id: The id of the dataset to get 84 | :param group_id: The optional id of the group to get the dataset from 85 | :return: The dataset returned by the API 86 | """ 87 | # group_id can be none, account for it 88 | if group_id is None: 89 | groups_part = '/' 90 | else: 91 | groups_part = f'/{self.groups_snippet}/{group_id}/' 92 | 93 | # form the url 94 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}' 95 | # form the headers 96 | headers = self.client.auth_header 97 | # get the response 98 | response = requests.get(url, headers=headers) 99 | 100 | # 200 is the only successful code, raise an exception on any other response code 101 | if response.status_code != 200: 102 | raise HTTPError(response, f'Get Datasets request returned http error: {response.json()}') 103 | 104 | return Dataset.from_dict(json.loads(response.text)) 105 | 106 | def post_dataset(self, dataset, group_id=None): 107 | """ 108 | Posts a single dataset 109 | https://msdn.microsoft.com/en-us/library/mt203562.aspx 110 | :param dataset: The dataset to push 111 | :param group_id: The optional group id to push the dataset to 112 | :return: The pushed dataset as returned by the API 113 | """ 114 | # group_id can be none, account for it 115 | if group_id is None: 116 | groups_part = '/' 117 | else: 118 | groups_part = f'/{self.groups_snippet}/{group_id}/' 119 | 120 | # form the url 121 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}' 122 | # form the headers 123 | headers = self.client.auth_header 124 | # form the json dict 125 | json_dict = DatasetEncoder().default(dataset) 126 | 127 | # get the response 128 | response = requests.post(url, headers=headers, json=json_dict) 129 | 130 | # 201 - Created. The request was fulfilled and a new Dataset was created. 131 | if response.status_code != 201: 132 | raise HTTPError(response, f'Post Datasets request returned http code: {response.json()}') 133 | 134 | return Dataset.from_dict(json.loads(response.text)) 135 | 136 | def delete_dataset(self, dataset_id, group_id=None): 137 | """ 138 | Deletes a dataset 139 | :param dataset_id: The id of the dataset to delete 140 | :param group_id: The optional group id to delete the dataset from 141 | """ 142 | # group_id can be none, account for it 143 | if group_id is None: 144 | groups_part = '/' 145 | else: 146 | groups_part = f'/{self.groups_snippet}/{group_id}/' 147 | 148 | # form the url 149 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}' 150 | # form the headers 151 | headers = self.client.auth_header 152 | 153 | # get the response 154 | response = requests.delete(url, headers=headers) 155 | 156 | # 200 is the only successful code 157 | if response.status_code != 200: 158 | raise HTTPError(response, f'Delete Dataset request returned http error: {response.json()}') 159 | 160 | def delete_all_datasets(self, group_id=None): 161 | """ 162 | Deletes all datasets 163 | :param group_id: The optional group id of the group to delete all datasets from 164 | """ 165 | # get all the datasets and delete each one 166 | datasets = self.get_datasets(group_id) 167 | for dataset in datasets: 168 | self.delete_dataset(group_id, dataset.id) 169 | 170 | def get_tables(self, dataset_id, group_id=None): 171 | """ 172 | Gets tables from a dataset 173 | https://msdn.microsoft.com/en-us/library/mt203556.aspx 174 | :param dataset_id: The id of the dataset which to get tables from 175 | :param group_id: The optional id of the group which to get tables from 176 | :return: A list of tables from the given group and dataset 177 | """ 178 | # group_id can be none, account for it 179 | if group_id is None: 180 | groups_part = '/' 181 | else: 182 | groups_part = f'/{self.groups_snippet}/{group_id}/' 183 | 184 | # form the url 185 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.tables_snippet}' 186 | # form the headers 187 | headers = self.client.auth_header 188 | 189 | # get the response 190 | response = requests.get(url, headers=headers) 191 | 192 | # 200 is the only successful code, raise an exception on any other response code 193 | if response.status_code != 200: 194 | raise HTTPError(response, f'Get Datasets request returned http error: {response.json()}') 195 | 196 | return self.tables_from_get_tables_response(response) 197 | 198 | def put_table(self, dataset_id, table_name, table, group_id=None): 199 | """ 200 | Updates the metadata and schema for the specified table within the specified dataset from "My Workspace". 201 | https://docs.microsoft.com/en-us/rest/api/power-bi/pushdatasets/datasets_puttable 202 | :param dataset_id: The id of the dataset to put the table in 203 | :param table_name: The name of the table to put 204 | :param table: The table object to update 205 | :param group_id: The optional id of the group to put the table in 206 | """ 207 | # group_id can be none, account for it 208 | if group_id is None: 209 | groups_part = '/' 210 | else: 211 | groups_part = f'/{self.groups_snippet}/{group_id}/' 212 | 213 | # form the url 214 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/' \ 215 | f'{self.tables_snippet}/{table_name}' 216 | # form the headers 217 | headers = self.client.auth_header 218 | # form the json dict 219 | json_dict = TableEncoder().default(table) 220 | 221 | # get the response 222 | response = requests.post(url, headers=headers, json=json_dict) 223 | 224 | # 200 is the only successful code 225 | if response.status_code != 200: 226 | raise HTTPError(response, f'Post row request returned http error: {response.json()}') 227 | 228 | def post_rows(self, dataset_id, table_name, rows, group_id=None): 229 | """ 230 | Posts rows to a table in a given dataset 231 | https://msdn.microsoft.com/en-us/library/mt203561.aspx 232 | :param dataset_id: The id of the dataset to post rows to 233 | :param table_name: The name of the table to post rows to 234 | :param rows: The rows to post to the table 235 | :param group_id: The optional id of the group to post rows to 236 | """ 237 | # group_id can be none, account for it 238 | if group_id is None: 239 | groups_part = '/' 240 | else: 241 | groups_part = f'/{self.groups_snippet}/{group_id}/' 242 | 243 | # form the url 244 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/' \ 245 | f'{self.tables_snippet}/{table_name}/{self.rows_snippet}' 246 | # form the headers 247 | headers = self.client.auth_header 248 | # form the json dict 249 | row_encoder = RowEncoder() 250 | json_dict = { 251 | 'rows': [row_encoder.default(x) for x in rows] 252 | } 253 | 254 | # get the response 255 | response = requests.post(url, headers=headers, json=json_dict) 256 | 257 | # 200 is the only successful code 258 | if response.status_code != 200: 259 | raise HTTPError(response, f'Post row request returned http error: {response.json()}') 260 | 261 | def delete_rows(self, dataset_id, table_name, group_id=None): 262 | """ 263 | Deletes all rows from a table in a given dataset 264 | https://msdn.microsoft.com/en-us/library/mt238041.aspx 265 | :param dataset_id: The id of the dataset to delete the rows from 266 | :param table_name: The name of the table to delete the rows from 267 | :param group_id: The optional id of the group to delete the rows from 268 | """ 269 | # group_id can be none, account for it 270 | if group_id is None: 271 | groups_part = '/' 272 | else: 273 | groups_part = f'/{self.groups_snippet}/{group_id}/' 274 | 275 | # form the url 276 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/' \ 277 | f'{self.tables_snippet}/{table_name}/{self.rows_snippet}' 278 | # form the headers 279 | headers = self.client.auth_header 280 | 281 | # get the response 282 | response = requests.delete(url, headers=headers) 283 | 284 | # 200 is the only successful code 285 | if response.status_code != 200: 286 | raise HTTPError(response, f'Post row request returned http error: {response.json()}') 287 | 288 | def get_dataset_parameters(self, dataset_id, group_id=None): 289 | """ 290 | Gets all parameters for a single dataset 291 | https://msdn.microsoft.com/en-us/library/mt784653.aspx 292 | :param dataset_id: The id of the dataset from which you want the parameters 293 | :param group_id: The optional id of the group to get the dataset's parameters 294 | :return: The dataset parameters returned by the API 295 | """ 296 | # group_id can be none, account for it 297 | if group_id is None: 298 | groups_part = '/' 299 | else: 300 | groups_part = f'/{self.groups_snippet}/{group_id}/' 301 | 302 | # form the url 303 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.parameters_snippet}' 304 | # form the headers 305 | headers = self.client.auth_header 306 | # get the response 307 | response = requests.get(url, headers=headers) 308 | 309 | # 200 is the only successful code, raise an exception on any other response code 310 | if response.status_code != 200: 311 | raise HTTPError(response, f'Get Dataset parameters request returned http error: {response.json()}') 312 | 313 | return json.loads(response.text) 314 | 315 | def set_dataset_parameters(self, dataset_id, params, group_id=None): 316 | """ 317 | Sets parameters for a single dataset 318 | https://docs.microsoft.com/en-gb/rest/api/power-bi/datasets/updateparametersingroup 319 | :param dataset_id: The id of the dataset which you want to update 320 | :param params: Dict of parameters to set on the dataset 321 | :param group_id: The optional id of the group to get the dataset's parameters 322 | :return: The dataset parameters returned by the API 323 | """ 324 | if group_id is None: 325 | groups_part = '/' 326 | else: 327 | groups_part = f'/{self.groups_snippet}/{group_id}/' 328 | 329 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.set_parameters_snippet}' 330 | 331 | update_details = [{"name": k, "newValue": str(v)} for k, v in params.items()] 332 | body = {"updateDetails": update_details} 333 | 334 | headers = self.client.auth_header 335 | 336 | response = requests.post(url, headers=headers, json=body) 337 | 338 | if response.status_code != 200: 339 | raise HTTPError(response, f'Setting dataset parameters failed with http error: {response.json()}') 340 | 341 | def refresh_dataset(self, dataset_id, notify_option=None, group_id=None): 342 | """ 343 | Refreshes a single dataset 344 | :param dataset_id: The id of the dataset to refresh 345 | :param notify_option: The optional notify_option to add in the request body 346 | :param group_id: The optional id of the group 347 | """ 348 | # group_id can be none, account for it 349 | if group_id is None: 350 | groups_part = '/' 351 | else: 352 | groups_part = f'/{self.groups_snippet}/{group_id}/' 353 | 354 | # form the url 355 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.refreshes_snippet}' 356 | 357 | # form the headers 358 | headers = self.client.auth_header 359 | 360 | if notify_option is not None: 361 | json_dict = { 362 | 'notifyOption': notify_option 363 | } 364 | else: 365 | json_dict = None 366 | 367 | # get the response 368 | response = requests.post(url, headers=headers, json=json_dict) 369 | 370 | # 200 is the only successful code, raise an exception on any other response code 371 | if response.status_code != 202: 372 | raise HTTPError(response, f'Refresh dataset request returned http error: {response.json()}') 373 | 374 | def get_dataset_gateway_datasources(self, dataset_id, group_id=None): 375 | """ 376 | Gets the gateway datasources for a dataset 377 | :param dataset_id: The id of the dataset 378 | :param group_id: The optional id of the group 379 | """ 380 | # group_id can be none, account for it 381 | if group_id is None: 382 | groups_part = '/' 383 | else: 384 | groups_part = f'/{self.groups_snippet}/{group_id}/' 385 | 386 | # form the url 387 | url = f'{self.base_url}{groups_part}{self.datasets_snippet}/{dataset_id}/datasources' 388 | 389 | # form the headers 390 | headers = self.client.auth_header 391 | 392 | # get the response 393 | response = requests.get(url, headers=headers) 394 | 395 | # 200 is the only successful code, raise an exception on any other response code 396 | if response.status_code != 200: 397 | print(url) 398 | raise HTTPError(response, f'Dataset gateway datasources request returned http error: {response.json()}') 399 | 400 | data_sources = json.loads(response.text)["value"] 401 | 402 | return data_sources 403 | 404 | def bind_dataset_gateway(self, dataset_id, gateway_id, group_id=None): 405 | """ 406 | Binds a dataset to a gateway 407 | https://docs.microsoft.com/en-gb/rest/api/power-bi/datasets/bindtogatewayingroup 408 | :param dataset_id: The id of the dataset 409 | :param gateway_id: The id of the gateway 410 | :param group_id: The optional id of the group 411 | """ 412 | if group_id is None: 413 | groups_part = '/' 414 | else: 415 | groups_part = f'/{self.groups_snippet}/{group_id}/' 416 | 417 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.bind_gateway_snippet}' 418 | 419 | body = {"gatewayObjectId": gateway_id} 420 | headers = self.client.auth_header 421 | 422 | response = requests.post(url, headers=headers, json=body) 423 | 424 | if response.status_code != 200: 425 | raise HTTPError(response, f'Binding gateway to dataset failed with http error: {response.json()}') 426 | 427 | def get_dataset_refresh_history(self, dataset_id, group_id=None, top=None): 428 | """ 429 | Gets the refresh history of a dataset 430 | :param dataset_id: The id of the dataset to refresh 431 | :param group_id: The optional id of the group 432 | :param top: The number of refreshes to retrieve. 5 will get the last 5 refreshes. 433 | """ 434 | # group_id can be none, account for it 435 | if group_id is None: 436 | groups_part = '/' 437 | else: 438 | groups_part = f'/{self.groups_snippet}/{group_id}/' 439 | 440 | # form the url 441 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.refreshes_snippet}' 442 | 443 | if top is not None: 444 | url = f'{url}?$top={top}' 445 | 446 | # form the headers 447 | headers = self.client.auth_header 448 | 449 | # get the response 450 | response = requests.get(url, headers=headers) 451 | 452 | # 200 is the only successful code, raise an exception on any other response code 453 | if response.status_code != 200: 454 | raise HTTPError(response, f'Dataset refresh history request returned http error: {response.json()}') 455 | 456 | refresh_data = json.loads(response.text)["value"] 457 | 458 | # Convert the date strings into datetime objects 459 | time_fields = ['startTime', 'endTime'] 460 | refresh_data = convert_datetime_fields(refresh_data, time_fields) 461 | 462 | return refresh_data 463 | 464 | def update_refresh_schedule( 465 | self, 466 | dataset_id: str, 467 | refresh_schedule: RefreshSchedule, 468 | group_id: Optional[str] = None 469 | ): 470 | """Updates the refresh schedule for a given dataset in a given workspace. 471 | 472 | :param dataset_id: The dataset id 473 | :param refresh_schedule: The updates for the refresh schedule. If a field remains None, no changes are made. 474 | :param group_id: The workspace id of the workspace in which the dataset resides. If None, 'My Workspace' is 475 | assumed. 476 | """ 477 | if group_id is None: 478 | groups_part = '/' 479 | else: 480 | groups_part = f'/{self.groups_snippet}/{group_id}/' 481 | 482 | # form the url 483 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.refresh_schedule_snippet}' 484 | 485 | # form the headers 486 | headers = self.client.auth_header 487 | 488 | # form the body 489 | body = RefreshScheduleRequest(refresh_schedule).as_dict() 490 | 491 | # get the response 492 | response = requests.patch(url, headers=headers, json=body) 493 | 494 | # 200 is the only successful code, raise an exception on any other response code 495 | if response.status_code != 200: 496 | raise HTTPError(f'Update refresh schedule request returned the following http error:{response.json()}') 497 | 498 | def get_refresh_schedule(self, dataset_id: str, group_id: Optional[str] = None): 499 | """Retrieves the refresh schedule for a given dataset and group id 500 | 501 | :param dataset_id: The dataset id for which the refresh schedule should be retrieved 502 | :param group_id: The group in which the dataset resides. If None 'My Workspace' is used. 503 | """ 504 | if group_id is None: 505 | groups_part = '/' 506 | else: 507 | groups_part = f'/{self.groups_snippet}/{group_id}/' 508 | 509 | # form the url 510 | url = f'{self.base_url}{groups_part}/{self.datasets_snippet}/{dataset_id}/{self.refresh_schedule_snippet}' 511 | 512 | # form the headers 513 | headers = self.client.auth_header 514 | 515 | # get the response 516 | response = requests.get(url, headers=headers) 517 | 518 | # 200 is the only successful code, raise an exception on any other response code 519 | if response.status_code != 200: 520 | raise HTTPError(f'Get refresh schedule request returned the following http error:{response.json()}') 521 | 522 | return self.refresh_schedule_from_get_refresh_schedule_response(response) 523 | 524 | 525 | @classmethod 526 | def datasets_from_get_datasets_response(cls, response): 527 | """ 528 | Creates a list of datasets from a http response object 529 | :param response: The http response object 530 | :return: A list of datasets created from the given http response object 531 | """ 532 | # load the response into a dict 533 | response_dict = json.loads(response.text) 534 | datasets = [] 535 | # go through entries returned from API 536 | for entry in response_dict[cls.get_datasets_value_key]: 537 | datasets.append(Dataset.from_dict(entry)) 538 | 539 | return datasets 540 | 541 | @classmethod 542 | def tables_from_get_tables_response(cls, response): 543 | """ 544 | Creates a list of tables from a http response object 545 | :param response: The http response object 546 | :return: A list of tables created from the given http response object 547 | """ 548 | # load the response into a dict 549 | response_dict = json.loads(response.text) 550 | tables = [] 551 | # go through entries returned from API 552 | for entry in response_dict[cls.get_datasets_value_key]: 553 | tables.append(Table.from_dict(entry)) 554 | 555 | return tables 556 | 557 | @classmethod 558 | def refresh_schedule_from_get_refresh_schedule_response(cls, response: requests.Response): 559 | response_dict = json.loads(response.text) 560 | return RefreshSchedule.from_dict(response_dict) 561 | -------------------------------------------------------------------------------- /pypowerbi/enums.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | 3 | from enum import Enum 4 | 5 | 6 | class GroupUserAccessRight(Enum): 7 | NONE = 'None' 8 | MEMBER = 'Member' 9 | ADMIN = 'Admin' 10 | CONTRIBUTOR = 'Contributor' 11 | VIEWER = 'Viewer' 12 | 13 | 14 | class PrincipalType(Enum): 15 | USER = "User" 16 | GROUP = "Group" 17 | APP = "App" 18 | 19 | 20 | class DatasourceUserAccessRight(Enum): 21 | # Removes permission to access the datasource 22 | NONE = 'None' 23 | # Datasets owned by the user have read access to this datasource 24 | READ = 'Read' 25 | # The user can override the effective identity for PowerBI Embedded 26 | READ_OVERRIDE_EFFECTIVE_IDENTITY = 'ReadOverrideEffectiveIdentity' 27 | 28 | 29 | class CredentialType(Enum): 30 | ANONYMOUS = 'Anonymous' 31 | BASIC = 'Basic' 32 | KEY = 'Key' 33 | OAUTH2 = 'OAuth2' 34 | WINDOWS = 'Windows' 35 | 36 | 37 | class EncryptedConnection(Enum): 38 | ENCRYPTED = 'Encrypted' 39 | NOT_ENCRYPTED = 'NotEncrypted' 40 | 41 | 42 | class EncryptionAlgorithm(Enum): 43 | NONE = 'None' 44 | RSA_OAEP = 'RSA-OAEP' 45 | 46 | 47 | class PrivacyLevel(Enum): 48 | NONE = 'None' 49 | PUBLIC = 'Public' 50 | ORGANIZATIONAL = 'Organizational' 51 | PRIVATE = 'Private' 52 | -------------------------------------------------------------------------------- /pypowerbi/features.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import requests 3 | import json 4 | 5 | from requests.exceptions import HTTPError 6 | 7 | 8 | class Features: 9 | # url snippets 10 | features_snippet = 'availableFeatures' 11 | 12 | def __init__(self, client): 13 | self.client = client 14 | self.base_url = f'{self.client.api_url}/{self.client.api_version_snippet}/{self.client.api_myorg_snippet}' 15 | 16 | @property 17 | def embed_trial(self): 18 | feat = self.get_available_features(feature_name='embedTrial') 19 | feat.usage = feat.additional_info['usage'] 20 | return feat 21 | 22 | @property 23 | def automatically_push_app_to_end_user(self): 24 | return self.get_available_features(feature_name='automaticallyPushAppToEndUsers') 25 | 26 | @property 27 | def publish_app_to_entire_organization(self): 28 | return self.get_available_features(feature_name='publishAppToEntireOrganization') 29 | 30 | def get_available_features(self, feature_name=None): 31 | # If feature_name is none, returns all the available features 32 | if feature_name is not None: 33 | feature_part = f"(featureName='{feature_name}')" 34 | else: 35 | feature_part = '' 36 | 37 | # form the url 38 | url = f"{self.base_url}/{self.features_snippet}{feature_part}" 39 | # form the headers 40 | headers = self.client.auth_header 41 | 42 | # get the response 43 | response = requests.get(url, headers=headers) 44 | 45 | # 200 is the only successful code, raise an exception on any other response code 46 | if response.status_code != 200: 47 | raise HTTPError(response, f'Get Datasets request returned http error: {response.json()}') 48 | 49 | return Features.features_from_get_available_features_response(response) 50 | 51 | @staticmethod 52 | def features_from_get_available_features_response(response): 53 | response_dict = json.loads(response.text) 54 | 55 | # check wether we are returning a single feature or a list of features 56 | if 'features' in response_dict: 57 | features_list = [] 58 | for feature in response_dict['features']: 59 | additional_info = feature[Feature.additional_info_key] if Feature.additional_info_key in feature else None 60 | features_list.append(Feature( 61 | feature[Feature.name_key], 62 | feature[Feature.state_key], 63 | feature[Feature.extended_state_key], 64 | additional_info 65 | )) 66 | return features_list 67 | else: 68 | additional_info = response_dict[Feature.additional_info_key] if Feature.additional_info_key in response_dict else None 69 | return Feature( 70 | response_dict[Feature.name_key], 71 | response_dict[Feature.state_key], 72 | response_dict[Feature.extended_state_key], 73 | additional_info 74 | ) 75 | 76 | 77 | class Feature: 78 | name_key = 'name' 79 | state_key = 'state' 80 | extended_state_key = 'extendedState' 81 | additional_info_key = 'additionalInfo' 82 | 83 | def __init__(self, name, state, extended_state, additional_info=None): 84 | self.name = name 85 | self.state = state 86 | self.extended_state = extended_state 87 | 88 | if additional_info is not None: 89 | self.additional_info = additional_info 90 | -------------------------------------------------------------------------------- /pypowerbi/gateway.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import json 3 | from typing import Dict, Union, Optional 4 | 5 | from .base import Deserializable 6 | from .enums import CredentialType, DatasourceUserAccessRight, PrincipalType, EncryptedConnection, EncryptionAlgorithm, \ 7 | PrivacyLevel 8 | 9 | 10 | class GatewayPublicKey(Deserializable): 11 | exponent_key = 'exponent' 12 | modulus_key = 'modulus' 13 | 14 | def __init__( 15 | self, 16 | exponent: str, 17 | modulus: str 18 | ): 19 | """Constructs a GatewayPublicKey object 20 | 21 | :param exponent: The exponent of the public key 22 | :param modulus: The modulus of the public key 23 | """ 24 | self.exponent = exponent 25 | self.modulus = modulus 26 | 27 | def as_dict(self) -> Dict[str, str]: 28 | return { 29 | self.exponent_key: self.exponent, 30 | self.modulus_key: self.modulus 31 | } 32 | 33 | @classmethod 34 | def from_dict(cls, dictionary: Dict[str, str]) -> 'GatewayPublicKey': 35 | """Constructs a GatewayPublicKey from a dictionary 36 | 37 | :param dictionary: the dictionary describing the GatewayPublicKey 38 | :return: GatewayPublicKey based on the dictionary 39 | :rtype: GatewayPublicKey 40 | """ 41 | exponent = dictionary.get(cls.exponent_key) 42 | modulus = dictionary.get(cls.modulus_key) 43 | 44 | return cls(exponent, modulus) 45 | 46 | def __repr__(self) -> str: 47 | return f'' 48 | 49 | 50 | class Gateway(Deserializable): 51 | id_key = 'id' 52 | name_key = 'name' 53 | type_key = 'type' 54 | gateway_annotation_key = 'gatewayAnnotation' 55 | public_key_key = 'publicKey' 56 | status_key = 'gatewayStatus' 57 | 58 | def __init__( 59 | self, 60 | gateway_id: str, 61 | name: str, 62 | gateway_type: str, 63 | gateway_annotation: str, 64 | public_key: GatewayPublicKey, 65 | status: str 66 | ): 67 | """Constructs a Gateway object 68 | 69 | :param gateway_id: The gateway id 70 | :param name: The gateway name 71 | :param gateway_type: The gateway type 72 | :param gateway_annotation: Gateway metadata in json format 73 | :param public_key: The gateway public key 74 | :param status: The gateway connectivity status 75 | """ 76 | self.id = gateway_id 77 | self.name = name 78 | self.type = gateway_type 79 | self.gateway_annotation = gateway_annotation 80 | self.public_key = public_key 81 | self.status = status 82 | 83 | @classmethod 84 | def from_dict(cls, dictionary: Dict[str, Union[str, Dict[str, str]]]) -> 'Gateway': 85 | """Constructs a Gateway object from a dict 86 | 87 | :param dictionary: Dictionary describing the gateway 88 | :return: Gateway based on the dictionary 89 | :rtype: Gateway 90 | """ 91 | gateway_id = dictionary.get(cls.id_key) 92 | if gateway_id is None: 93 | raise RuntimeError("Gateway dictionary has no id key") 94 | 95 | name = dictionary.get(cls.name_key) 96 | gateway_type = dictionary.get(cls.type_key) 97 | gateway_annotation = dictionary.get(cls.gateway_annotation_key) 98 | public_key = GatewayPublicKey.from_dict(dictionary.get(cls.public_key_key)) 99 | status = dictionary.get(cls.status_key) 100 | 101 | return cls(gateway_id, name, gateway_type, gateway_annotation, public_key, status) 102 | 103 | def __repr__(self) -> str: 104 | return f'' 105 | 106 | 107 | class GatewayDatasource(Deserializable): 108 | gateway_datasource_id_key = 'id' 109 | gateway_id_key = 'gatewayId' 110 | credential_type_key = 'credentialType' 111 | datasource_name_key = 'datasourceName' 112 | datasource_type_key = 'datasourceType' 113 | connection_details_key = 'connectionDetails' 114 | 115 | def __init__( 116 | self, 117 | gateway_datasource_id: str, 118 | gateway_id: str, 119 | credential_type: CredentialType, 120 | datasource_name: str, 121 | datasource_type: str, 122 | connection_details: str 123 | ): 124 | """Constructs a GatewayDatasource object 125 | 126 | :param gateway_datasource_id: The unique id for this datasource 127 | :param gateway_id: The associated gateway id 128 | :param credential_type: Type of datasource credentials 129 | :param datasource_name: The name of the datasource 130 | :param datasource_type: The type of the datasource 131 | :param connection_details: Connection details in json format 132 | """ 133 | self.id = gateway_datasource_id 134 | self.gateway_id = gateway_id 135 | self.credential_type = credential_type 136 | self.datasource_name = datasource_name 137 | self.datasource_type = datasource_type 138 | self.connection_details = connection_details 139 | 140 | @classmethod 141 | def from_dict(cls, dictionary: Dict[str, str]) -> 'GatewayDatasource': 142 | """Constructs a GatewayDatasource object from a dict 143 | 144 | :param dictionary: Dictionary describing the gatewayDatasource 145 | :return: GatewayDatasource based on the dictionary 146 | :rtype: GatewayDatasource 147 | """ 148 | gateway_datasource_id = dictionary.get(cls.gateway_datasource_id_key) 149 | if gateway_datasource_id is None: 150 | raise RuntimeError("GatewayDatasource dictionary has no id key") 151 | 152 | gateway_id = dictionary.get(cls.gateway_id_key) 153 | # use round brackets below to access enum by value 154 | credential_type = CredentialType(dictionary.get(cls.credential_type_key)) 155 | datasource_name = dictionary.get(cls.datasource_name_key) 156 | datasource_type = dictionary.get(cls.datasource_type_key) 157 | connection_details = json.dumps(dictionary.get(cls.connection_details_key)) 158 | 159 | return cls( 160 | gateway_datasource_id, 161 | gateway_id, 162 | credential_type, 163 | datasource_name, 164 | datasource_type, 165 | connection_details 166 | ) 167 | 168 | def __repr__(self): 169 | return f'' 170 | 171 | 172 | class DatasourceUser(Deserializable): 173 | datasource_access_right_key = 'datasourceAccessRight' 174 | email_address_key = 'emailAddress' 175 | display_name_key = 'displayName' 176 | identifier_key = 'identifier' 177 | principal_type_key = 'principalType' 178 | 179 | def __init__( 180 | self, 181 | datasource_access_right: DatasourceUserAccessRight, 182 | email_address: str = "", 183 | display_name: str = "", 184 | identifier: str = "", 185 | principal_type: Optional[PrincipalType] = None 186 | ): 187 | """Constructs a DataSourceUser object 188 | 189 | :param datasource_access_right: The user access rights for the datasource 190 | :param email_address: Email address of the user 191 | :param display_name: Display name of the principal 192 | :param identifier: Identifier of the principal 193 | :param principal_type: The principal type 194 | """ 195 | self.datasource_access_right = datasource_access_right 196 | self.email_address = email_address 197 | self.display_name = display_name 198 | self.identifier = identifier 199 | self.principal_type = principal_type 200 | 201 | @classmethod 202 | def from_dict(cls, dictionary: Dict[str, str]) -> 'DatasourceUser': 203 | datasource_user_id = dictionary.get(cls.identifier_key) 204 | if datasource_user_id is None: 205 | raise RuntimeError("DatasourceUser dictionary has no identifier key") 206 | 207 | datasource_user_access_right = DatasourceUserAccessRight(dictionary.get(cls.datasource_access_right_key)) 208 | email_address = dictionary.get(cls.email_address_key, "") 209 | display_name = dictionary.get(cls.display_name_key, "") 210 | principal_type_value = dictionary.get(cls.principal_type_key, None) 211 | principal_type = PrincipalType(principal_type_value) if principal_type_value is not None \ 212 | else principal_type_value 213 | 214 | return cls(datasource_user_access_right, email_address, display_name, datasource_user_id, principal_type) 215 | 216 | def as_set_values_dict(self) -> Dict[str, str]: 217 | set_values_dict = dict() 218 | 219 | set_values_dict[self.datasource_access_right_key] = self.datasource_access_right.value 220 | 221 | if self.email_address: 222 | set_values_dict[self.email_address_key] = self.email_address 223 | 224 | if self.display_name: 225 | set_values_dict[self.display_name_key] = self.display_name 226 | 227 | if self.identifier: 228 | set_values_dict[self.identifier_key] = self.identifier 229 | 230 | if self.principal_type: 231 | set_values_dict[self.principal_type_key] = self.principal_type.value 232 | 233 | return set_values_dict 234 | 235 | def __repr__(self) -> str: 236 | return f'' 237 | 238 | 239 | class DatasourceConnectionDetails: 240 | server_key = "server" 241 | database_key = "database" 242 | url_key = "url" 243 | 244 | def __init__( 245 | self, 246 | server: str = "", 247 | database: str = "", 248 | url: str = "" 249 | ): 250 | """Creates a DatasourceConnectionDetails object 251 | 252 | :param server: The connection server 253 | :param database: The connection database 254 | :param url: The connection url 255 | """ 256 | self.server = server 257 | self.database = database 258 | self.url = url 259 | 260 | def __repr__(self): 261 | server_part = f'server={self.server}' if self.server else self.server 262 | database_part = f'database={self.database}' if self.database else self.database 263 | url_part = f'url={self.url}' if self.url else self.url 264 | 265 | return f'' 266 | 267 | def as_set_values_dict(self) -> Dict[str, str]: 268 | """Returns a dictionary with only those values of attributes that are set 269 | 270 | :return: Dictionary with set values 271 | """ 272 | set_values = dict() 273 | 274 | if self.server: 275 | set_values[self.server_key] = self.server 276 | 277 | if self.database: 278 | set_values[self.database_key] = self.database 279 | 280 | if self.url: 281 | set_values[self.url_key] = self.url 282 | 283 | return set_values 284 | 285 | def to_json(self) -> str: 286 | """Provides a json string that can be used in a PublishDatasourceToGatewayRequest 287 | 288 | :return: json string of set values 289 | """ 290 | json_dict = self.as_set_values_dict() 291 | 292 | # dump to json string 293 | # remove spaces between object keys and values 294 | # replace double backslashes with single slashes 295 | # remove double quotes at start and end of str 296 | return json.dumps(json_dict) \ 297 | .replace(r'": ', r'":') \ 298 | .replace('\\\\', '\\') 299 | 300 | 301 | class CredentialDetails: 302 | credentials_key = "credentials" 303 | credential_type_key = "credentialType" 304 | encrypted_connection_key = "encryptedConnection" 305 | encryption_algorithm_key = "encryptionAlgorithm" 306 | privacy_level_key = "privacyLevel" 307 | use_caller_aad_identity_key = "useCallerAADIdentity" 308 | use_end_user_o_auth_2_credentials_key = "useEndUserOAuth2Credentials" 309 | 310 | def __init__( 311 | self, 312 | credentials: str, 313 | credential_type: CredentialType, 314 | encrypted_connection: EncryptedConnection, 315 | encryption_algorithm: EncryptionAlgorithm, 316 | privacy_level: PrivacyLevel, 317 | use_caller_aad_identity: Optional[bool] = None, 318 | use_end_user_o_auth_2_credentials: Optional[bool] = None 319 | ): 320 | """Constructs a CredentialsDetails object 321 | 322 | :param credentials: The credentials to access a datasource 323 | :param credential_type: The type of credentials to access a datasource 324 | :param encrypted_connection: Encryption behaviour applied to the datasource connection 325 | :param encryption_algorithm: The encryption algorithm. For cloud datasource, use 'None'. For an on-premises 326 | datasource, use the gateway public key with the 'RSA-OAEP' algorithm. 327 | :param privacy_level: The privacy level. This becomes relevant when combining data from multiple datasources. 328 | :param use_caller_aad_identity: Should the caller's AAD identity be used for OAuth2 credentials configuration 329 | :param use_end_user_o_auth_2_credentials: Should the end-user's OAuth2 credentials be used for connecting to 330 | the datasource in DirectQuery mode. Only supported for Direct Query to SQL Azure. 331 | """ 332 | self.credentials = credentials 333 | self.credential_type = credential_type 334 | self.encrypted_connection = encrypted_connection 335 | self.encryption_algorithm = encryption_algorithm 336 | self.privacy_level = privacy_level 337 | self.use_caller_aad_identity = use_caller_aad_identity 338 | self.use_end_user_o_auth_2_credentials = use_end_user_o_auth_2_credentials 339 | 340 | def to_dict(self): 341 | return { 342 | self.credentials_key: self.credentials, 343 | self.credential_type_key: self.credential_type.value, 344 | self.encrypted_connection_key: self.encrypted_connection.value, 345 | self.encryption_algorithm_key: self.encryption_algorithm.value, 346 | self.privacy_level_key: self.privacy_level.value, 347 | self.use_caller_aad_identity_key: self.use_caller_aad_identity, 348 | self.use_end_user_o_auth_2_credentials_key: self.use_end_user_o_auth_2_credentials 349 | } 350 | 351 | def __repr__(self) -> str: 352 | return f"" 353 | 354 | 355 | class PublishDatasourceToGatewayRequest: 356 | datasource_type_key = "dataSourceType" 357 | connection_details_key = "connectionDetails" 358 | credential_details_key = "credentialDetails" 359 | datasource_name_key = "datasourceName" 360 | 361 | def __init__( 362 | self, 363 | datasource_type: str, 364 | connection_details: str, 365 | credential_details: CredentialDetails, 366 | datasource_name: str 367 | ): 368 | """Constructs a PublishDatasourceToGatewayRequest 369 | 370 | :param datasource_type: The datasource type 371 | :param connection_details: The connection details 372 | :param credential_details: The credentials to access the datasource 373 | :param datasource_name: The datasource name 374 | """ 375 | self.datasource_type = datasource_type 376 | self.connection_details = connection_details 377 | self.credential_details = credential_details 378 | self.datasource_name = datasource_name 379 | 380 | def to_dict(self) -> Dict[str, Union[str, Dict[str, str]]]: 381 | return { 382 | self.datasource_type_key: self.datasource_type, 383 | self.connection_details_key: self.connection_details, 384 | self.credential_details_key: self.credential_details.to_dict(), 385 | self.datasource_name_key: self.datasource_name 386 | } 387 | 388 | def __repr__(self): 389 | return '' 392 | -------------------------------------------------------------------------------- /pypowerbi/gateways.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | from typing import List, Type, Any 3 | 4 | import requests 5 | import json 6 | 7 | from requests.exceptions import HTTPError 8 | 9 | from .gateway import Gateway, GatewayDatasource, DatasourceUser, PublishDatasourceToGatewayRequest 10 | from .base import Deserializable 11 | 12 | 13 | class Gateways: 14 | # url snippets 15 | gateways_snippet = 'gateways' 16 | datasources_snippet = 'datasources' 17 | users_snippet = 'users' 18 | 19 | # json keys 20 | odata_response_wrapper_key = 'value' 21 | 22 | def __init__(self, client): 23 | self.client = client 24 | self.base_url = f'{self.client.api_url}/{self.client.api_version_snippet}/{self.client.api_myorg_snippet}' 25 | 26 | def get_gateways(self) -> List[Gateway]: 27 | """Fetches all gateways the user is an admin for""" 28 | 29 | # form the url 30 | url = f'{self.base_url}/{self.gateways_snippet}' 31 | 32 | # form the headers 33 | headers = self.client.auth_header 34 | 35 | # get the response 36 | response = requests.get(url, headers=headers) 37 | 38 | # 200 is the only successful code, raise an exception on any other response code 39 | if response.status_code != 200: 40 | raise HTTPError(response, f'Get Gateways request returned http error: {response.json()}') 41 | 42 | return self._models_from_get_multiple_response(response, Gateway) 43 | 44 | def get_gateway(self, gateway_id: str) -> Gateway: 45 | """Return the specified gateway 46 | 47 | :param gateway_id: The gateway id 48 | :return: The gateway 49 | """ 50 | # form the url 51 | url = f'{self.base_url}/{self.gateways_snippet}/{gateway_id}' 52 | 53 | # form the headers 54 | headers = self.client.auth_header 55 | 56 | # get the response 57 | response = requests.get(url, headers=headers) 58 | 59 | # 200 is the only successful code, raise an exception on any other response code 60 | if response.status_code != 200: 61 | raise HTTPError(response, f'Get Gateway request returned http error: {response.json()}') 62 | 63 | return self._model_from_get_one_response(response, Gateway) 64 | 65 | def get_datasources(self, gateway_id: str) -> List[GatewayDatasource]: 66 | """Returns a list of datasources from the specified gateway 67 | 68 | :param gateway_id: The gateway id to return responses for 69 | :return: list 70 | The list of datasources 71 | """ 72 | 73 | # form the url 74 | url = f'{self.base_url}/{self.gateways_snippet}/{gateway_id}/{self.datasources_snippet}' 75 | 76 | # form the headers 77 | headers = self.client.auth_header 78 | 79 | # get the response 80 | response = requests.get(url, headers=headers) 81 | 82 | # 200 is the only successful code, raise an exception on any other response code 83 | if response.status_code != 200: 84 | raise HTTPError(response, f'Get Gateway Datasources request returned http error: {response.json()}') 85 | 86 | return self._models_from_get_multiple_response(response, GatewayDatasource) 87 | 88 | def get_datasource_users(self, gateway_id: str, datasource_id: str) -> List[DatasourceUser]: 89 | """Returns a list of users who have access to the specified datasource 90 | 91 | :param gateway_id: The gateway id 92 | :param datasource_id: The datasource id 93 | """ 94 | # form the url 95 | url = f'{self.base_url}/{self.gateways_snippet}/{gateway_id}' \ 96 | f'/{self.datasources_snippet}/{datasource_id}/{self.users_snippet}' 97 | 98 | # form the headers 99 | headers = self.client.auth_header 100 | 101 | # get the response 102 | response = requests.get(url, headers=headers) 103 | 104 | # 200 is the only successful code, raise an exception on any other response code 105 | if response.status_code != 200: 106 | raise HTTPError(response, f'Get Datasource Users request returned http error: {response.json()}') 107 | 108 | return self._models_from_get_multiple_response(response, DatasourceUser) 109 | 110 | def create_datasource( 111 | self, 112 | gateway_id: str, 113 | datasource_to_gateway_request: PublishDatasourceToGatewayRequest 114 | ) -> GatewayDatasource: 115 | """Creates a new datasource on the specified gateway 116 | 117 | :param gateway_id: The gateway id 118 | :param datasource_to_gateway_request: Request describing the datasource to be created 119 | """ 120 | # form the url 121 | url = f'{self.base_url}/{self.gateways_snippet}/{gateway_id}/{self.datasources_snippet}' 122 | 123 | # define request body 124 | body = datasource_to_gateway_request.to_dict() 125 | 126 | # form the headers 127 | headers = self.client.auth_header 128 | 129 | # get the response 130 | response = requests.post(url, headers=headers, json=body) 131 | 132 | # 201 is the only successful code, raise an exception on any other response code 133 | if response.status_code != 201: 134 | raise HTTPError(f'Create Datasource request returned the following http error: {response.json()}') 135 | 136 | return self._model_from_get_one_response(response, GatewayDatasource) 137 | 138 | def delete_datasource( 139 | self, 140 | gateway_id: str, 141 | datasource_id: str 142 | ) -> None: 143 | """Deletes the specified datasource from the specified gateway 144 | 145 | :param gateway_id: The gateway id 146 | :param datasource_id: The datasource id 147 | :return: None 148 | """ 149 | # form the url 150 | url = f'{self.base_url}/{self.gateways_snippet}/{gateway_id}/{self.datasources_snippet}/{datasource_id}' 151 | 152 | # form the headers 153 | headers = self.client.auth_header 154 | 155 | # get the response 156 | response = requests.delete(url, headers=headers) 157 | 158 | # 200 is the only successful code, raise an exception on any other response code 159 | if response.status_code != 200: 160 | raise HTTPError(f'Delete Datasource request returned the following http error: {response.json()}') 161 | 162 | return None 163 | 164 | def add_datasource_user( 165 | self, 166 | gateway_id: str, 167 | datasource_id: str, 168 | datasource_user: DatasourceUser 169 | ) -> None: 170 | """Grants or updates the permissions required to use the specified datasource for the specified user. Note: 171 | This method does not work with a service principal, only with a username password flow, for which the user 172 | has given consent. 173 | 174 | :param gateway_id: The gateway id 175 | :param datasource_id: The datasource id 176 | :param datasource_user: The datasource user to add 177 | """ 178 | # form the url 179 | url = f'{self.base_url}/{self.gateways_snippet}/{gateway_id}/{self.datasources_snippet}/{datasource_id}/' \ 180 | f'{self.users_snippet}' 181 | 182 | # define request body 183 | body = datasource_user.as_set_values_dict() 184 | 185 | # form the headers 186 | headers = self.client.auth_header 187 | 188 | # get the response 189 | response = requests.post(url, headers=headers, json=body) 190 | 191 | if response.status_code != 200: 192 | # add datasource user requests return an empty body; get the error from headers instead 193 | raise HTTPError(f'Add datasource user request returned the following http error: {response.json()} ' 194 | f'with status code: {response.status_code}') 195 | 196 | return None 197 | 198 | @classmethod 199 | def _models_from_get_multiple_response( 200 | cls, 201 | response: requests.Response, 202 | model_class: Type[Deserializable] 203 | ) -> List[Any]: 204 | """Creates a list of models from a http response object 205 | 206 | :param response: 207 | The http response object 208 | :param model_class: 209 | The model to transform the response items into 210 | :return: list 211 | The list of model_class instances 212 | """ 213 | 214 | # parse json response into a dict 215 | response_dict = json.loads(response.text) 216 | 217 | # Add parsed Gateway objects to list 218 | items = [] 219 | for entry in response_dict[cls.odata_response_wrapper_key]: 220 | items.append(model_class.from_dict(entry)) 221 | 222 | return items 223 | 224 | @classmethod 225 | def _model_from_get_one_response( 226 | cls, 227 | response: requests.Response, 228 | model_class: Type[Deserializable] 229 | ) -> Any: 230 | # parse 231 | response_dict = json.loads(response.text) 232 | 233 | return model_class.from_dict(response_dict) 234 | -------------------------------------------------------------------------------- /pypowerbi/group.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import json 3 | 4 | 5 | class Group: 6 | id_key = 'id' 7 | name_key = 'name' 8 | is_readonly_key = 'isReadOnly' 9 | is_on_dedicated_capacity_key = 'isOnDedicatedCapacity' 10 | 11 | def __init__(self, name, group_id, is_readonly=False, is_on_dedicated_capacity=False): 12 | self.name = name 13 | self.id = group_id 14 | self.is_readonly = is_readonly 15 | self.is_on_dedicated_capacity = is_on_dedicated_capacity 16 | 17 | @classmethod 18 | def from_dict(cls, dictionary): 19 | group_id = dictionary.get(cls.id_key) 20 | if group_id is None: 21 | raise RuntimeError("Group dictionary has no id key") 22 | 23 | name = dictionary.get(cls.name_key) 24 | is_readonly = dictionary.get(cls.is_readonly_key, False) 25 | is_on_dedicated_capacity = dictionary.get(cls.is_on_dedicated_capacity_key, False) 26 | 27 | return cls(name, group_id, is_readonly, is_on_dedicated_capacity) 28 | 29 | def __repr__(self): 30 | return f'' 31 | -------------------------------------------------------------------------------- /pypowerbi/group_user.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | class GroupUser: 3 | group_user_access_right_key = 'groupUserAccessRight' 4 | email_address_key = 'emailAddress' 5 | display_name_key = 'displayName' 6 | identifier_key = 'identifier' 7 | principal_type_key = 'principalType' 8 | 9 | def __init__( 10 | self, 11 | group_user_access_right, 12 | email_address="", 13 | display_name="", 14 | identifier="", 15 | principal_type=None 16 | ): 17 | """Constructs a GroupUser object 18 | 19 | :param group_user_access_right: Enum GroupUserAccessRight - The access right to assign to the GroupUser 20 | :param email_address: str - E-mail address of the user if principal type is user 21 | :param display_name: str - Display name of the principal 22 | :param identifier: str - Identifier of the principal 23 | :param principal_type: Enum PrincipalType - The principal type 24 | """ 25 | self.group_user_access_right = group_user_access_right 26 | self.email_address = email_address 27 | self.display_name = display_name 28 | self.identifier = identifier 29 | self.principal_type = principal_type 30 | 31 | def as_set_values_dict(self): 32 | """Convert GroupUser object to dict with only values that are actually set. This dict can be used for 33 | groups.add_group_user requests. 34 | 35 | :return: Dict with object attributes in camelCase as keys, and attribute values as values. 36 | """ 37 | group_user_dict = dict() 38 | 39 | if self.group_user_access_right: 40 | group_user_dict[self.group_user_access_right_key] = self.group_user_access_right.value 41 | 42 | if self.email_address: 43 | group_user_dict[self.email_address_key] = self.email_address 44 | 45 | if self.display_name: 46 | group_user_dict[self.display_name_key] = self.display_name 47 | 48 | if self.identifier: 49 | group_user_dict[self.identifier_key] = self.identifier 50 | 51 | if self.principal_type: 52 | group_user_dict[self.principal_type_key] = self.principal_type.value 53 | 54 | return group_user_dict 55 | -------------------------------------------------------------------------------- /pypowerbi/groups.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import requests 3 | import json 4 | import urllib.parse 5 | 6 | from requests.exceptions import HTTPError 7 | from .group import Group 8 | from .group_user import GroupUser 9 | 10 | 11 | class Groups: 12 | # url snippets 13 | groups_snippet = 'groups' 14 | users_snippet = 'users' 15 | 16 | # json keys 17 | get_reports_value_key = 'value' 18 | 19 | def __init__(self, client): 20 | self.client = client 21 | self.base_url = f'{self.client.api_url}/{self.client.api_version_snippet}/{self.client.api_myorg_snippet}' 22 | 23 | def create_group(self, name, workspace_v2=False): 24 | """Creates a new workspace 25 | 26 | :param name: The name of the new group to create 27 | :param workspace_v2: Create a workspace V2 28 | :return: Group 29 | The newly created group 30 | """ 31 | # validate request body 32 | if name is None or name == "": 33 | raise ValueError("Group name cannot be empty or None") 34 | 35 | # define request body 36 | body = {'name': name} 37 | 38 | # create url 39 | url = f'{self.base_url}/{self.groups_snippet}' 40 | 41 | uri_parameters = [] 42 | 43 | if workspace_v2: 44 | stripped_workspace_v2 = json.dumps(workspace_v2).strip('"') 45 | uri_parameters.append(f'workspaceV2={urllib.parse.quote(stripped_workspace_v2)}') 46 | 47 | # add query parameters to url if any 48 | if len(uri_parameters) > 0: 49 | url += f'?{str.join("&", uri_parameters)}' 50 | 51 | # form the headers 52 | headers = self.client.auth_header 53 | 54 | # get the response 55 | response = requests.post(url, headers=headers, json=body) 56 | 57 | # 200 is the only successful code, raise an exception on any other response code 58 | if response.status_code != 200: 59 | raise HTTPError(f'Add group request returned the following http error: {response.json()}') 60 | 61 | return self.create_group_from_create_group_response(response) 62 | 63 | @staticmethod 64 | def create_group_from_create_group_response(response): 65 | """Creates a Group object from the response to a create_group call 66 | 67 | :param response: 68 | The http response object 69 | :return: 70 | Group object describing the newly created group 71 | """ 72 | group_dict = json.loads(response.text) 73 | return Group.from_dict(group_dict) 74 | 75 | def add_group_user(self, group_id, group_user): 76 | """Adds a user to a group 77 | 78 | :param group_id: 79 | str - id of the group to add the user to 80 | :param group_user: 81 | GroupUser - Description of the user that should be added to the group 82 | :return: 83 | None 84 | """ 85 | # validate request body 86 | if not isinstance(group_user, GroupUser): 87 | raise TypeError("group_user should be of type group_user.GroupUser !") 88 | 89 | # define request body 90 | body = group_user.as_set_values_dict() 91 | 92 | # create url 93 | stripped_group_id = json.dumps(group_id).strip('"') 94 | url = f'{self.base_url}/{self.groups_snippet}/{urllib.parse.quote(stripped_group_id)}/{self.users_snippet}' 95 | 96 | # form the headers 97 | headers = self.client.auth_header 98 | 99 | # get the response 100 | response = requests.post(url, headers=headers, json=body) 101 | 102 | # 200 is the only successful code, raise an exception on any other response code 103 | if response.status_code != 200: 104 | # add group user requests return an empty body; get the error from headers instead 105 | error_info = response.headers['x-powerbi-error-info'] 106 | raise HTTPError(f'Add group request returned the following http error: {error_info}') 107 | 108 | def count(self): 109 | """ 110 | Evaluates the number of groups that the client has access to 111 | :return: int 112 | The number of groups 113 | """ 114 | return len(self.get_groups()) 115 | 116 | def has_group(self, group_id): 117 | """ 118 | Evaluates if client has access to the group 119 | :param group_id: 120 | :return: bool 121 | True if the client has access to the group, False otherwise 122 | """ 123 | groups = self.get_groups() 124 | 125 | for group in groups: 126 | if group.id == str(group_id): 127 | return True 128 | 129 | return False 130 | 131 | def get_groups(self, filter_str=None, top=None, skip=None): 132 | """ 133 | Fetches all groups that the client has access to 134 | :param filter_str: OData filter string to filter results 135 | :param top: int > 0, OData top parameter to limit to the top n results 136 | :param skip: int > 0, OData skip parameter to skip the first n results 137 | :return: list 138 | The list of groups 139 | """ 140 | query_parameters = [] 141 | 142 | if filter_str: 143 | query_parameters.append(f'$filter={urllib.parse.quote(filter_str)}') 144 | 145 | if top: 146 | stripped_top = json.dumps(top).strip('"') 147 | query_parameters.append(f'$top={urllib.parse.quote(stripped_top)}') 148 | 149 | if skip: 150 | stripped_skip = json.dumps(skip).strip('"') 151 | query_parameters.append(f'$skip={urllib.parse.quote(stripped_skip)}') 152 | 153 | # form the url 154 | url = f'{self.base_url}/{self.groups_snippet}' 155 | 156 | # add query parameters to url if any 157 | if len(query_parameters) > 0: 158 | url += f'?{str.join("&", query_parameters)}' 159 | 160 | # form the headers 161 | headers = self.client.auth_header 162 | # get the response 163 | response = requests.get(url, headers=headers) 164 | 165 | # 200 is the only successful code, raise an exception on any other response code 166 | if response.status_code != 200: 167 | raise HTTPError(response, f'Get Groups request returned http error: {response.json()}') 168 | 169 | return self.groups_from_get_groups_response(response) 170 | 171 | @classmethod 172 | def groups_from_get_groups_response(cls, response): 173 | """ 174 | Creates a list of groups from a http response object 175 | :param response: 176 | The http response object 177 | :return: list 178 | The list of groups 179 | """ 180 | # load the response into a dict 181 | response_dict = json.loads(response.text) 182 | groups = [] 183 | 184 | # go through entries returned from API 185 | for entry in response_dict[cls.get_reports_value_key]: 186 | groups.append(Group.from_dict(entry)) 187 | 188 | return groups 189 | -------------------------------------------------------------------------------- /pypowerbi/import_class.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | 3 | from .dataset import Dataset 4 | from .report import Report 5 | 6 | 7 | class Import: 8 | # json keys 9 | id_key = 'id' 10 | name_key = 'name' 11 | created_timedate_key = 'createdDateTime' 12 | datasets_key = 'datasets' 13 | import_state_key = 'importState' 14 | reports_key = 'reports' 15 | updated_datetime_key = 'updatedDateTime' 16 | source_key = 'source' 17 | connection_type_key = 'connectionType' 18 | value_key = 'value' 19 | 20 | # import state values 21 | import_state_succeeded = 'Succeeded' 22 | import_state_publishing = 'Publishing' 23 | 24 | def __init__(self, import_id, name=None, created_datetime=None, datasets=None, import_state=None, 25 | reports=None, updated_datetime=None, source=None, connection_type=None): 26 | self.id = import_id 27 | self.name = name 28 | self.created_datetime = created_datetime 29 | self.datasets = datasets 30 | self.import_state = import_state 31 | self.reports = reports 32 | self.updated_datetime = updated_datetime 33 | self.source = source 34 | self.connection_type = connection_type 35 | 36 | @classmethod 37 | def from_dict(cls, dictionary): 38 | import_id = dictionary.get(cls.id_key) 39 | if import_id is None: 40 | raise RuntimeError("Import dictionary has no id key") 41 | 42 | name = dictionary.get(cls.name_key) 43 | created_datetime = dictionary.get(cls.created_timedate_key) 44 | 45 | if cls.datasets_key in dictionary: 46 | datasets = [Dataset.from_dict(x) for x in dictionary.get(cls.datasets_key)] 47 | else: 48 | datasets = None 49 | 50 | import_state = dictionary.get(cls.import_state_key) 51 | 52 | if cls.reports_key in dictionary: 53 | reports = [Report.from_dict(x) for x in dictionary.get(cls.reports_key)] 54 | else: 55 | reports = None 56 | 57 | updated_datetime = dictionary.get(cls.updated_datetime_key) 58 | source = dictionary.get(cls.source_key) 59 | connection_type = dictionary.get(cls.connection_type_key) 60 | 61 | return cls(import_id, name, created_datetime, datasets, import_state, 62 | reports, updated_datetime, source, connection_type) 63 | 64 | def __repr__(self): 65 | return f'' 66 | -------------------------------------------------------------------------------- /pypowerbi/imports.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import requests 3 | import json 4 | import urllib 5 | import re 6 | 7 | from requests.exceptions import HTTPError 8 | from .import_class import Import 9 | 10 | 11 | class Imports: 12 | # url snippets 13 | groups_snippet = 'groups' 14 | imports_snippet = 'imports' 15 | dataset_displayname_snippet = 'datasetDisplayName' 16 | nameconflict_snippet = 'nameConflict' 17 | 18 | def __init__(self, client): 19 | self.client = client 20 | self.base_url = f'{self.client.api_url}/{self.client.api_version_snippet}/{self.client.api_myorg_snippet}' 21 | self.upload_file_replace_regex = re.compile('(?![A-z]|[0-9]).') 22 | 23 | @classmethod 24 | def import_from_response(cls, response): 25 | response_dict = json.loads(response.text) 26 | return Import.from_dict(response_dict) 27 | 28 | @classmethod 29 | def imports_from_response(cls, response): 30 | response_list = json.loads(response.text).get(Import.value_key) 31 | return [Import.from_dict(x) for x in response_list] 32 | 33 | def upload_file(self, filename, dataset_displayname, nameconflict=None, group_id=None): 34 | if group_id is None: 35 | groups_part = '/' 36 | else: 37 | groups_part = f'/{self.groups_snippet}/{group_id}/' 38 | 39 | # substitute using the regex pattern 40 | prepared_displayname = re.sub(self.upload_file_replace_regex, '-', dataset_displayname) 41 | # append the pbix extension (strange yes, but names correctly in powerbi service if so) 42 | prepared_displayname = f'{prepared_displayname}.pbix' 43 | 44 | url = f'{self.base_url}{groups_part}{self.imports_snippet}' \ 45 | f'?{urllib.parse.urlencode({self.dataset_displayname_snippet : prepared_displayname})}' 46 | 47 | if nameconflict is not None: 48 | url = url + f'&{self.nameconflict_snippet}={nameconflict}' 49 | 50 | headers = self.client.auth_header 51 | try: 52 | with open(filename, 'rb') as file_obj: 53 | response = requests.post(url, headers=headers, 54 | files={ 55 | 'file': file_obj, 56 | }) 57 | except TypeError: 58 | # assume filename is a file-like object already 59 | response = requests.post(url, headers=headers, 60 | files={ 61 | 'file': filename, 62 | }) 63 | 64 | # 200 OK 65 | if response.status_code == 200: 66 | import_object = self.import_from_response(response) 67 | # 202 Accepted 68 | elif response.status_code == 202: 69 | import_object = self.import_from_response(response) 70 | # 490 Conflict (due to name) 71 | elif response.status_code == 409: 72 | raise NotImplementedError("Name conflict resolution not implemented yet") 73 | else: 74 | raise HTTPError(response, f"Upload file failed with status code: {response.json()}") 75 | 76 | return import_object 77 | 78 | def get_import(self, import_id, group_id=None): 79 | if group_id is None: 80 | groups_part = '/' 81 | else: 82 | groups_part = f'/{self.groups_snippet}/{group_id}/' 83 | 84 | url = f'{self.base_url}{groups_part}{self.imports_snippet}/{import_id}' 85 | 86 | headers = self.client.auth_header 87 | response = requests.get(url, headers=headers) 88 | 89 | # 200 OK 90 | if response.status_code == 200: 91 | import_object = self.import_from_response(response) 92 | else: 93 | raise HTTPError(response, f"Get import failed with status code: {response.json()}") 94 | 95 | return import_object 96 | 97 | def get_imports(self, group_id=None): 98 | if group_id is None: 99 | groups_part = '/' 100 | else: 101 | groups_part = f'/{self.groups_snippet}/{group_id}/' 102 | 103 | url = f'{self.base_url}{groups_part}{self.imports_snippet}' 104 | 105 | headers = self.client.auth_header 106 | response = requests.get(url, headers=headers) 107 | 108 | # 200 OK 109 | if response.status_code == 200: 110 | import_object = self.imports_from_response(response) 111 | else: 112 | raise HTTPError(response, f"Get imports failed with status code: {response.json()}") 113 | 114 | return import_object 115 | -------------------------------------------------------------------------------- /pypowerbi/report.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | 3 | import json 4 | 5 | 6 | class Report: 7 | id_key = 'id' 8 | name_key = 'name' 9 | web_url_key = 'webUrl' 10 | embed_url_key = 'embedUrl' 11 | dataset_id_key = 'datasetId' 12 | target_workspace_id_key = 'targetWorkspaceId' 13 | target_model_id_key = 'targetModelId' 14 | 15 | def __init__(self, report_id, name, web_url, embed_url, dataset_id): 16 | self.id = report_id 17 | self.name = name 18 | self.web_url = web_url 19 | self.embed_url = embed_url 20 | self.dataset_id = dataset_id 21 | 22 | @classmethod 23 | def from_dict(cls, dictionary): 24 | """ 25 | Creates a report from a dictionary 26 | :param dictionary: The dictionary to create a report from 27 | :return: The created dictionary 28 | """ 29 | # id is required 30 | if cls.id_key in dictionary: 31 | report_id = str(dictionary[cls.id_key]) 32 | # id cannot be whitespace 33 | if report_id.isspace(): 34 | raise RuntimeError(f'Report dict has empty {cls.id_key} key value') 35 | else: 36 | raise RuntimeError(f'Report dict has no {cls.id_key} key') 37 | 38 | # name is required 39 | if cls.name_key in dictionary: 40 | report_name = str(dictionary[cls.name_key]) 41 | # name cannot be whitespace 42 | if report_name.isspace(): 43 | raise RuntimeError(f'Report dict has empty {cls.name_key} key value') 44 | else: 45 | raise RuntimeError(f'Report dict has no {cls.name_key} key') 46 | 47 | # web url is optional 48 | if cls.web_url_key in dictionary: 49 | web_url = str(dictionary[cls.web_url_key]) 50 | else: 51 | web_url = None 52 | 53 | # embed url is optional 54 | if cls.embed_url_key in dictionary: 55 | embed_url = str(dictionary[cls.embed_url_key]) 56 | else: 57 | embed_url = None 58 | 59 | # dataset id is optional 60 | dataset_id = dictionary.get(cls.dataset_id_key) 61 | 62 | return Report(report_id, report_name, web_url, embed_url, dataset_id) 63 | 64 | def __repr__(self): 65 | return f'' 66 | 67 | 68 | class ReportEncoder(json.JSONEncoder): 69 | def default(self, o): 70 | return { 71 | Report.id_key: o.id, 72 | Report.name_key: o.name, 73 | Report.web_url_key: o.web_url, 74 | Report.embed_url_key: o.embed_url, 75 | Report.dataset_id_key: o.dataset_id 76 | } 77 | -------------------------------------------------------------------------------- /pypowerbi/reports.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import io 3 | from typing import Optional 4 | 5 | import requests 6 | import json 7 | from requests.exceptions import HTTPError 8 | 9 | import pypowerbi.client 10 | from pypowerbi.report import Report 11 | 12 | 13 | class Reports: 14 | # url snippets 15 | groups_snippet = 'groups' 16 | reports_snippet = 'reports' 17 | rebind_snippet = 'rebind' 18 | clone_snippet = 'clone' 19 | export_snippet = 'Export' 20 | generate_token_snippet = 'generatetoken' 21 | 22 | # json keys 23 | get_reports_value_key = 'value' 24 | 25 | def __init__(self, client): 26 | self.client = client 27 | self.base_url = f'{self.client.api_url}/{self.client.api_version_snippet}/{self.client.api_myorg_snippet}' 28 | 29 | def count(self, group_id=None): 30 | """ 31 | Evaluates the number of reports 32 | :param group_id: The optional group id 33 | :return: The number of reports as returned by the API 34 | """ 35 | return len(self.get_reports(group_id)) 36 | 37 | def has_report(self, report_id, group_id=None): 38 | """ 39 | Evaluates if the report exists 40 | :param report_id: The id of the report to evaluate 41 | :param group_id: The optional group id 42 | :return: True if the report exists, False otherwise 43 | """ 44 | reports = self.get_reports(group_id) 45 | 46 | for report in reports: 47 | if report.id == str(report_id): 48 | return True 49 | 50 | return False 51 | 52 | def get_reports(self, group_id=None): 53 | """ 54 | Gets all reports 55 | https://msdn.microsoft.com/en-us/library/mt634543.aspx 56 | :param group_id: The optional group id to get reports from 57 | :return: The list of reports for the given group 58 | """ 59 | # group_id can be none, account for it 60 | if group_id is None: 61 | groups_part = '/' 62 | else: 63 | groups_part = f'/{self.groups_snippet}/{group_id}/' 64 | 65 | # form the url 66 | url = f'{self.base_url}{groups_part}{self.reports_snippet}/' 67 | # form the headers 68 | headers = self.client.auth_header 69 | 70 | # get the response 71 | response = requests.get(url, headers=headers) 72 | 73 | # 200 - OK. Indicates success. List of reports. 74 | if response.status_code == 200: 75 | reports = self.reports_from_get_reports_response(response) 76 | else: 77 | raise HTTPError(response, f'Get reports request returned http error: {response.json()}') 78 | 79 | return reports 80 | 81 | def get_report(self, report_id, group_id=None): 82 | """ 83 | Gets a report 84 | https://msdn.microsoft.com/en-us/library/mt784668.aspx 85 | :param report_id: The id of the report to get 86 | :param group_id: The optional group id 87 | :return: The report as returned by the API 88 | """ 89 | reports = self.get_reports(group_id) 90 | 91 | for report in reports: 92 | if report.id == report_id: 93 | return report 94 | 95 | raise RuntimeError('Could not find report') 96 | 97 | def clone_report(self, report_id, name, target_group_id, dataset_id, group_id=None): 98 | """ 99 | Clones a report 100 | https://msdn.microsoft.com/en-us/library/mt784674.aspx 101 | :param report_id: The report id to clone 102 | :param name: The name to give the cloned report 103 | :param target_group_id: The target group for the cloned report 104 | :param dataset_id: The dataset id for the cloned report 105 | :param group_id: The optional group id 106 | :return: The cloned report 107 | """ 108 | # group_id can be none, account for it 109 | if group_id is None: 110 | groups_part = '/' 111 | else: 112 | groups_part = f'/{self.groups_snippet}/{group_id}/' 113 | 114 | # form the url 115 | url = f'{self.base_url}{groups_part}{self.reports_snippet}/{report_id}/{self.clone_snippet}' 116 | # form the headers 117 | headers = self.client.auth_header 118 | # form the json 119 | json_dict = { 120 | Report.name_key: name, 121 | Report.target_model_id_key: str(dataset_id), 122 | } 123 | 124 | # target group id can be none, account for it 125 | if target_group_id is not None: 126 | json_dict[Report.target_workspace_id_key] = str(target_group_id) 127 | 128 | # get the response 129 | response = requests.post(url, headers=headers, json=json_dict) 130 | 131 | # 200 - OK. Indicates success. 132 | if response.status_code != 200: 133 | raise HTTPError(response, f'Clone report request returned http error: {response.json()}') 134 | 135 | return Report.from_dict(json.loads(response.text)) 136 | 137 | def delete_report(self, report_id, group_id=None): 138 | """ 139 | Deletes a report 140 | https://msdn.microsoft.com/en-us/library/mt784671.aspx 141 | :param report_id: The id of the report to delete 142 | :param group_id: The id of the group from which to delete the report 143 | """ 144 | # group_id can be none, account for it 145 | if group_id is None: 146 | groups_part = '/' 147 | else: 148 | groups_part = f'/{self.groups_snippet}/{group_id}/' 149 | 150 | # form the url 151 | url = f'{self.base_url}{groups_part}{self.reports_snippet}/{report_id}/' 152 | # form the headers 153 | headers = self.client.auth_header 154 | 155 | # get the response 156 | response = requests.delete(url, headers=headers) 157 | 158 | # 200 - OK. Indicates success. 159 | if response.status_code != 200: 160 | raise HTTPError(response, f'Delete report request returned http error: {response.json()}') 161 | 162 | def rebind_report(self, report_id, dataset_id, group_id=None): 163 | """ 164 | Rebinds a report to another dataset 165 | https://msdn.microsoft.com/en-us/library/mt784672.aspx 166 | :param report_id: The id of the report to rebind 167 | :param dataset_id: The id of the dataset to rebind the report to 168 | :param group_id: The optional id of the group from which the report belongs to 169 | """ 170 | # group_id can be none, account for it 171 | if group_id is None: 172 | groups_part = '/' 173 | else: 174 | groups_part = f'/{self.groups_snippet}/{group_id}/' 175 | 176 | # form the url 177 | url = f'{self.base_url}{groups_part}{self.reports_snippet}/{report_id}/{self.rebind_snippet}' 178 | # form the headers 179 | headers = self.client.auth_header 180 | # form the json 181 | json_dict = { 182 | Report.dataset_id_key: dataset_id 183 | } 184 | 185 | # get the response 186 | response = requests.post(url, headers=headers, json=json_dict) 187 | 188 | # 200 - OK. Indicates success. 189 | if response.status_code != 200: 190 | raise HTTPError(response, f'Rebind report request returned http error: {response.json()}') 191 | 192 | def generate_token(self, report_id, token_request, group_id): 193 | """ 194 | Generates an embed token for a report 195 | https://msdn.microsoft.com/en-us/library/mt784614.aspx 196 | :param report_id: The report to generate teh token for 197 | :param token_request: The token request object 198 | :param group_id: The group id 199 | :return: Returns the embed token 200 | """ 201 | # form the url 202 | url = f'{self.base_url}/{self.groups_snippet}/{group_id}/' \ 203 | f'{self.reports_snippet}/{report_id}/{self.generate_token_snippet}' 204 | # form the headers 205 | headers = self.client.auth_header 206 | # form the json 207 | json_dict = pypowerbi.client.TokenRequestEncoder().default(token_request) 208 | 209 | # get the response 210 | response = requests.post(url, headers=headers, json=json_dict) 211 | 212 | # 200 - OK. Indicates success. 213 | if response.status_code != 200: 214 | raise HTTPError(response, f'Generate token for report request returned http error: {response.json()}') 215 | 216 | return pypowerbi.client.EmbedToken.from_dict(json.loads(response.text)) 217 | 218 | def export_report( 219 | self, 220 | report_id: str, 221 | save_path: str, 222 | filename: Optional[str] = None, 223 | group_id: Optional[str] = None 224 | ) -> None: 225 | """Exports the specified report to a pbix file. 226 | 227 | :param report_id: The report id 228 | :param save_path: The path where the pbix file should be saved 229 | :param filename: The name to assign to the downloaded file (without the pbix extension). 230 | If None, the report name will be used. 231 | :param group_id: The id of the workspace that contains the report. If None, then 'My workspace' is assumed. 232 | """ 233 | # group_id can be None. Account for it here 234 | if group_id is None: 235 | groups_part = '/' 236 | else: 237 | groups_part = f'/{self.groups_snippet}/{group_id}/' 238 | 239 | # form the url 240 | url = f'{self.base_url}{groups_part}{self.reports_snippet}/{report_id}/{self.export_snippet}' 241 | 242 | # form the headers 243 | headers = self.client.auth_header 244 | 245 | # get the response 246 | response = requests.get(url, headers=headers) 247 | 248 | # 200 is the only valid response. Show an error in other cases. 249 | if response.status_code != 200: 250 | in_group_part = "" if group_id is None else "in Group" 251 | raise HTTPError(response, f'Export Report {in_group_part} request returned an http error: ' 252 | f'{response.json()}') 253 | 254 | # save report to save path 255 | if filename is None: 256 | report = self.get_report(report_id, group_id) 257 | filename = report.name 258 | 259 | with open(f'{save_path}/{filename}.pbix', 'wb') as report_file: 260 | report_file.write( 261 | io.BytesIO(response.content).getbuffer() 262 | ) 263 | 264 | @classmethod 265 | def reports_from_get_reports_response(cls, response): 266 | """ 267 | Creates a list of reports from a http response 268 | :param response: The response to create the reports from 269 | :return: A list of reports created from the http response 270 | """ 271 | # load the response into a dict 272 | response_dict = json.loads(response.text) 273 | reports = [] 274 | # go through entries returned from API 275 | for entry in response_dict[cls.get_reports_value_key]: 276 | reports.append(Report.from_dict(entry)) 277 | 278 | return reports 279 | -------------------------------------------------------------------------------- /pypowerbi/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from .powerbiclient_tests import * 2 | from .dataset_tests import * 3 | -------------------------------------------------------------------------------- /pypowerbi/tests/credentials_tests.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from pypowerbi import CredentialType 4 | from pypowerbi.credentials import AnonymousCredentials, BasicCredentials, KeyCredentials, OAuth2Credentials, \ 5 | WindowsCredentials 6 | 7 | 8 | # The following tests are based on the examples found here: 9 | # https://docs.microsoft.com/en-us/rest/api/power-bi/gateways/updatedatasource#examples 10 | class CredentialsTestCase(TestCase): 11 | def test_anonymous_credentials(self): 12 | anonymous_credentials = AnonymousCredentials() 13 | 14 | expected = r'{"credentialData":""}' 15 | actual = anonymous_credentials.to_json() 16 | 17 | self.assertEqual(expected, actual) 18 | self.assertEqual( 19 | CredentialType.ANONYMOUS, 20 | anonymous_credentials.CREDENTIAL_TYPE 21 | ) 22 | 23 | def test_basic_credentials(self): 24 | basic_credentials = BasicCredentials("john", "*****") 25 | 26 | expected = r'{"credentialData":[{"name":"username","value":"john"},{"name":"password","value":"*****"}]}' 27 | actual = basic_credentials.to_json() 28 | 29 | with self.assertRaises(ValueError): 30 | # empty username 31 | BasicCredentials("", "myPassword") 32 | 33 | with self.assertRaises(ValueError): 34 | # empty password 35 | BasicCredentials("myUsername", "") 36 | 37 | self.assertEqual(expected, actual) 38 | self.assertEqual( 39 | CredentialType.BASIC, 40 | basic_credentials.CREDENTIAL_TYPE 41 | ) 42 | 43 | def test_key_credentials(self): 44 | key_credentials = KeyCredentials("ec....LA=") 45 | 46 | expected = r'{"credentialData":[{"name":"key","value":"ec....LA="}]}' 47 | actual = key_credentials.to_json() 48 | 49 | with self.assertRaises(ValueError): 50 | # empty key 51 | KeyCredentials("") 52 | 53 | self.assertEqual(expected, actual) 54 | 55 | self.assertEqual( 56 | CredentialType.KEY, 57 | key_credentials.CREDENTIAL_TYPE 58 | ) 59 | 60 | def test_o_auth_2_credentials(self): 61 | o_auth_2_credentials = OAuth2Credentials("eyJ0....fwtQ") 62 | 63 | expected = r'{"credentialData":[{"name":"accessToken","value":"eyJ0....fwtQ"}]}' 64 | actual = o_auth_2_credentials.to_json() 65 | 66 | with self.assertRaises(ValueError): 67 | # empty access token 68 | OAuth2Credentials("") 69 | 70 | self.assertEqual(expected, actual) 71 | 72 | self.assertEqual( 73 | CredentialType.OAUTH2, 74 | o_auth_2_credentials.CREDENTIAL_TYPE 75 | ) 76 | 77 | def test_windows_credentials(self): 78 | windows_credentials = WindowsCredentials(r'contoso\\john', "*****") 79 | 80 | expected = r'{"credentialData":[{"name":"username","value":"contoso\\john"},' \ 81 | r'{"name":"password","value":"*****"}]}' 82 | actual = windows_credentials.to_json() 83 | 84 | with self.assertRaises(ValueError): 85 | # empty username 86 | WindowsCredentials("", "myPassword") 87 | 88 | with self.assertRaises(ValueError): 89 | # empty password 90 | WindowsCredentials("myUsername", "") 91 | 92 | self.assertEqual(expected, actual) 93 | self.assertEqual( 94 | CredentialType.WINDOWS, 95 | windows_credentials.CREDENTIAL_TYPE 96 | ) 97 | -------------------------------------------------------------------------------- /pypowerbi/tests/dataset_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | 3 | import json 4 | from unittest import TestCase 5 | 6 | from pypowerbi import * 7 | 8 | 9 | class DatasetTests(TestCase): 10 | def test_row_json(self): 11 | row = Row(id=1, name='the name') 12 | self.assertIsNotNone(row) 13 | 14 | row_json = json.dumps(row, cls=RowEncoder) 15 | self.assertIsNotNone(row_json) 16 | 17 | expected_json = '{' \ 18 | '"id": 1, ' \ 19 | '"name": "the name"' \ 20 | '}' 21 | self.assertEqual(row_json, expected_json) 22 | 23 | def test_column_json(self): 24 | column = Column(name='theNameOfTheString', data_type='string') 25 | self.assertIsNotNone(column) 26 | 27 | column_json = json.dumps(column, cls=ColumnEncoder) 28 | self.assertIsNotNone(column_json) 29 | 30 | expected_json = '{' \ 31 | '"name": "theNameOfTheString", ' \ 32 | '"dataType": "string"' \ 33 | '}' 34 | self.assertEqual(column_json, expected_json) 35 | 36 | def test_table_json(self): 37 | column0 = Column(name='id', data_type='Int64') 38 | self.assertIsNotNone(column0) 39 | 40 | column1 = Column(name='name', data_type='string') 41 | self.assertIsNotNone(column1) 42 | 43 | table = Table(name='testTable', columns=[column0, column1]) 44 | self.assertIsNotNone(table) 45 | 46 | table_json = json.dumps(table, cls=TableEncoder) 47 | self.assertIsNotNone(table_json) 48 | 49 | expected_json = '{' \ 50 | '"name": "testTable", ' \ 51 | '"columns": ' \ 52 | '[' \ 53 | '{' \ 54 | '"name": "id", ' \ 55 | '"dataType": "Int64"' \ 56 | '}, ' \ 57 | '{"name": "name", ' \ 58 | '"dataType": "string"' \ 59 | '}' \ 60 | ']' \ 61 | '}' 62 | self.assertEqual(table_json, expected_json) 63 | 64 | def test_dataset_json(self): 65 | column0 = Column(name='id', data_type='Int64') 66 | self.assertIsNotNone(column0) 67 | 68 | column1 = Column(name='name', data_type='string') 69 | self.assertIsNotNone(column1) 70 | 71 | table = Table(name='testTable', columns=[column0, column1]) 72 | self.assertIsNotNone(table) 73 | 74 | dataset = Dataset(name=f'testDataset', tables=[table]) 75 | self.assertIsNotNone(dataset) 76 | 77 | dataset_json = json.dumps(dataset, cls=DatasetEncoder) 78 | self.assertIsNotNone(dataset_json) 79 | 80 | expected_json = '{' \ 81 | '"name": "testDataset", ' \ 82 | '"tables": [' \ 83 | '{' \ 84 | '"name": "testTable", ' \ 85 | '"columns": [' \ 86 | '{' \ 87 | '"name": "id", ' \ 88 | '"dataType": "Int64"' \ 89 | '}, ' \ 90 | '{' \ 91 | '"name": "name", ' \ 92 | '"dataType": "string"' \ 93 | '}' \ 94 | ']' \ 95 | '}' \ 96 | ']' \ 97 | '}' 98 | 99 | self.assertEqual(dataset_json, expected_json) 100 | -------------------------------------------------------------------------------- /pypowerbi/tests/imports_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | 3 | import json 4 | from unittest import TestCase 5 | 6 | 7 | from pypowerbi.imports import * -------------------------------------------------------------------------------- /pypowerbi/tests/powerbiclient_json_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | 3 | import json 4 | from unittest import TestCase 5 | 6 | from pypowerbi.client import * 7 | 8 | 9 | class PowerBIClientJSONTests(TestCase): 10 | def test_effective_identity_json(self): 11 | roles = ['role0', 'role1'] 12 | datasets = ['jsdbfj783uisdkjfjkdsf', '8923u4ihjknkjnfdsk'] 13 | effective_identity = EffectiveIdentity('yabbadabba', roles, datasets) 14 | 15 | effective_identity_json = json.dumps(effective_identity, cls=EffectiveIdentityEncoder) 16 | self.assertIsNotNone(effective_identity_json) 17 | 18 | expected_json = '{"username": "yabbadabba", "roles": ["role0", "role1"], "datasets": ["jsdbfj783uisdkjfjkdsf", "8923u4ihjknkjnfdsk"]}' 19 | 20 | self.assertEqual(expected_json, effective_identity_json) 21 | 22 | def test_token_request_json(self): 23 | roles = ['role0', 'role1'] 24 | datasets = ['jsdbfj783uisdkjfjkdsf', '8923u4ihjknkjnfdsk'] 25 | effective_identity = EffectiveIdentity('yabbadabba', roles, datasets) 26 | 27 | token_request = TokenRequest('view', '8324yihuknjsdf09io2k', True, [effective_identity]) 28 | 29 | token_request_json = json.dumps(token_request, cls=TokenRequestEncoder) 30 | self.assertIsNotNone(token_request_json) 31 | 32 | expected_json = '{"accessLevel": "view", "datasetId": "8324yihuknjsdf09io2k", "allowSaveAs": true, "identities": [{"username": "yabbadabba", "roles": ["role0", "role1"], "datasets": ["jsdbfj783uisdkjfjkdsf", "8923u4ihjknkjnfdsk"]}]}' 33 | 34 | self.assertEqual(expected_json, token_request_json) 35 | -------------------------------------------------------------------------------- /pypowerbi/tests/powerbiclient_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | 3 | import adal 4 | import datetime 5 | import time 6 | from unittest import TestCase 7 | 8 | from pypowerbi.client import * 9 | from pypowerbi.reports import * 10 | from pypowerbi.report import * 11 | from pypowerbi.datasets import * 12 | from pypowerbi.dataset import * 13 | from pypowerbi.imports import * 14 | from pypowerbi.import_class import * 15 | from pypowerbi.tests.settings import PowerBITestSettings 16 | 17 | 18 | class PowerBIAPITests(TestCase): 19 | # default testing urls 20 | authority_url = PowerBITestSettings.authority_url 21 | resource_url = PowerBITestSettings.resource_url 22 | api_url = PowerBITestSettings.api_url 23 | 24 | # default testing credentials 25 | client_id = PowerBITestSettings.client_id 26 | username = PowerBITestSettings.username 27 | password = PowerBITestSettings.password 28 | group_ids = PowerBITestSettings.group_ids 29 | 30 | # default test prefixes 31 | test_dataset_prefix = 'testDataset_' 32 | test_report_prefix = 'testReport_' 33 | test_table_prefix = 'testTable_' 34 | 35 | dataset_counts = {} 36 | report_counts = {} 37 | client = None 38 | 39 | @classmethod 40 | def tearDownClass(cls): 41 | client = PowerBIClient(cls.api_url, PowerBIAPITests().get_token()) 42 | 43 | for group_id in cls.group_ids: 44 | cls.delete_test_datasets(client, group_id) 45 | cls.delete_test_reports(client, group_id) 46 | 47 | def setUp(self): 48 | self.client = PowerBIClient(self.api_url, self.get_token()) 49 | 50 | # delete and count assets for each group 51 | for group_id in self.group_ids: 52 | self.delete_test_assets(self.client, group_id) 53 | self.count_assets(self.client, group_id) 54 | 55 | def delete_test_assets(self, client, group_id): 56 | self.delete_test_datasets(client, group_id) 57 | self.delete_test_reports(client, group_id) 58 | 59 | def count_assets(self, client, group_id): 60 | self.dataset_counts[group_id] = client.datasets.count(group_id) 61 | self.report_counts[group_id] = client.reports.count(group_id) 62 | 63 | @classmethod 64 | def delete_test_datasets(cls, client, group_id=None): 65 | datasets = client.datasets.get_datasets(group_id) 66 | for dataset in datasets: 67 | if cls.test_dataset_prefix in dataset.name: 68 | client.datasets.delete_dataset(dataset.id, group_id) 69 | 70 | @classmethod 71 | def delete_test_reports(cls, client, group_id=None): 72 | reports = client.reports.get_reports(group_id) 73 | for report in reports: 74 | if cls.test_report_prefix in report.name: 75 | client.reports.delete_report(report.id, group_id) 76 | 77 | @classmethod 78 | def add_mock_dataset(cls, client, table_count=1, group_id=None): 79 | tables = cls.create_mock_tables(table_count) 80 | 81 | # create the dataset 82 | dataset = Dataset(name=f'{cls.test_dataset_prefix}{datetime.datetime.utcnow()}', tables=tables) 83 | 84 | # post and return the result 85 | return client.datasets.post_dataset(dataset, group_id) 86 | 87 | @classmethod 88 | def add_mock_dataset_with_tables(cls, client, tables, group_id=None): 89 | # create the dataset 90 | dataset = Dataset(name=f'{cls.test_dataset_prefix}{datetime.datetime.utcnow()}', tables=tables) 91 | 92 | # post and return the result 93 | return client.datasets.post_dataset(dataset, group_id) 94 | 95 | @classmethod 96 | def create_mock_tables(cls, table_count): 97 | tables = [] 98 | for x in range(0, table_count): 99 | # we add a column of each type for each table 100 | columns = [ 101 | Column(name='id', data_type='Int64'), 102 | Column(name='name', data_type='string'), 103 | Column(name='is_interesting', data_type='boolean'), 104 | Column(name='cost_usd', data_type='double'), 105 | Column(name='purchase_date', data_type='datetime'), 106 | ] 107 | 108 | table_name = f'{cls.test_table_prefix}{x}' 109 | 110 | measures = [ 111 | Measure(name=f'entry_count_{x}', expression=f'COUNTROWS( \'{table_name}\' )') 112 | ] 113 | 114 | # add the table 115 | tables.append(Table(name=table_name, columns=columns, measures=measures)) 116 | 117 | return tables 118 | 119 | @classmethod 120 | def add_mock_report(cls, client, group_id): 121 | # to add a mock report, we clone an existing one 122 | reports = client.reports.get_reports(group_id) 123 | return client.reports.clone_report(reports[0].id, 124 | f'{cls.test_report_prefix}' 125 | f'{datetime.datetime.utcnow()}', 126 | group_id, 127 | reports[0].dataset_id, group_id) 128 | 129 | def get_token(self): 130 | context = adal.AuthenticationContext(authority=self.authority_url, 131 | validate_authority=True, 132 | api_version=None) 133 | 134 | return context.acquire_token_with_username_password(resource=self.resource_url, 135 | client_id=self.client_id, 136 | username=self.username, 137 | password=self.password) 138 | 139 | def assert_datasets_valid(self, datasets): 140 | self.assertIsNotNone(datasets) 141 | 142 | for dataset in datasets: 143 | self.assert_dataset_valid(dataset) 144 | 145 | def assert_dataset_valid(self, dataset): 146 | self.assertIsNotNone(dataset) 147 | self.assertIsNotNone(dataset.id) 148 | self.assertIsNotNone(dataset.name) 149 | 150 | def assert_reports_valid(self, reports): 151 | self.assertIsNotNone(reports) 152 | 153 | for report in reports: 154 | self.assert_report_valid(report) 155 | 156 | def assert_report_valid(self, report): 157 | self.assertIsNotNone(report) 158 | self.assertIsNotNone(report.id) 159 | self.assertIsNotNone(report.name) 160 | self.assertIsNotNone(report.web_url) 161 | self.assertIsNotNone(report.embed_url) 162 | self.assertIsNotNone(report.dataset_id) 163 | 164 | def test_aad_auth(self): 165 | self.assertIsNotNone(self.get_token()) 166 | 167 | def test_client_get_datasets(self): 168 | for group_id in self.group_ids: 169 | self._test_client_get_datasets_impl(self.client, group_id) 170 | 171 | def _test_client_get_datasets_impl(self, client, group_id=None): 172 | # validate our initial number of datasets 173 | datasets = client.datasets.get_datasets(group_id) 174 | # validate our datasets and the count 175 | self.assert_datasets_valid(datasets) 176 | self.assertEqual(len(datasets), 0 + self.dataset_counts[group_id]) 177 | 178 | # add a mock dataset 179 | self.add_mock_dataset(client, group_id=group_id) 180 | 181 | # validate that we now have one more than before 182 | datasets = client.datasets.get_datasets(group_id) 183 | # validate our datasets and the count 184 | self.assert_datasets_valid(datasets) 185 | self.assertEqual(len(datasets), 1 + self.dataset_counts[group_id]) 186 | 187 | # add another 188 | self.add_mock_dataset(client, group_id=group_id) 189 | 190 | # validate that we now have one more than before 191 | datasets = client.datasets.get_datasets(group_id) 192 | # validate our datasets and the count 193 | self.assert_datasets_valid(datasets) 194 | self.assertEqual(len(datasets), 2 + self.dataset_counts[group_id]) 195 | 196 | def test_client_get_dataset(self): 197 | for group_id in self.group_ids: 198 | self._test_client_get_dataset_impl(self.client, group_id) 199 | 200 | def _test_client_get_dataset_impl(self, client, group_id=None): 201 | # add a mock dataset 202 | self.add_mock_dataset(client, group_id=group_id) 203 | # validate that we have some valid datasets 204 | datasets = client.datasets.get_datasets(group_id) 205 | self.assertGreater(len(datasets), 0) 206 | self.assert_datasets_valid(datasets) 207 | 208 | # validate that the single dataset get is what we expect 209 | dataset = client.datasets.get_dataset(datasets[0].id, group_id) 210 | self.assert_dataset_valid(dataset) 211 | self.assertDictEqual(datasets[0].__dict__, dataset.__dict__) 212 | 213 | def test_client_post_dataset(self): 214 | for group_id in self.group_ids: 215 | self._test_client_post_dataset_impl(self.client, group_id) 216 | 217 | def _test_client_post_dataset_impl(self, client, group_id=None): 218 | # validate that we have 0 additional datasets 219 | datasets = client.datasets.get_datasets(group_id) 220 | self.assert_datasets_valid(datasets) 221 | self.assertEqual(len(datasets), 0 + self.dataset_counts[group_id]) 222 | 223 | # we add a column of each type 224 | columns = [ 225 | Column(name='id', data_type='Int64'), 226 | Column(name='name', data_type='string'), 227 | Column(name='is_interesting', data_type='boolean'), 228 | Column(name='cost_usd', data_type='double'), 229 | Column(name='purchase_date', data_type='datetime') 230 | ] 231 | 232 | table_name = f'{self.test_table_prefix}0' 233 | 234 | # add a measure 235 | measures = [ 236 | Measure(name='entry_count_0', expression=f'COUNTROWS( \'{table_name}\' )') 237 | ] 238 | 239 | table = Table(name=table_name, columns=columns, measures=measures) 240 | dataset_name = f'{self.test_dataset_prefix}{datetime.datetime.utcnow()}' 241 | dataset = Dataset(name=dataset_name, tables=[table]) 242 | 243 | # validate that the returned dataset is what we expected to be posted 244 | returned_dataset = client.datasets.post_dataset(dataset, group_id) 245 | 246 | self.assert_dataset_valid(returned_dataset) 247 | self.assertEqual(dataset_name, returned_dataset.name) 248 | 249 | # validate that we now have one more dataset 250 | datasets = client.datasets.get_datasets(group_id) 251 | self.assert_datasets_valid(datasets) 252 | self.assertEqual(len(datasets), 1 + self.dataset_counts[group_id]) 253 | 254 | def test_client_delete_dataset(self): 255 | for group_id in self.group_ids: 256 | self._test_client_delete_dataset_impl(self.client, group_id) 257 | 258 | def _test_client_delete_dataset_impl(self, client, group_id=None): 259 | # validate our initial number of datasets 260 | datasets = client.datasets.get_datasets(group_id) 261 | self.assert_datasets_valid(datasets) 262 | self.assertEqual(len(datasets), 0 + self.dataset_counts[group_id]) 263 | 264 | # add another dataset 265 | dataset = self.add_mock_dataset(client, group_id=group_id) 266 | 267 | # validate that we have an additional dataset 268 | datasets = client.datasets.get_datasets(group_id) 269 | self.assertEqual(len(datasets), 1 + self.dataset_counts[group_id]) 270 | 271 | # delete a dataset 272 | client.datasets.delete_dataset(dataset.id, group_id) 273 | 274 | # validate that we have deleted the dataset 275 | datasets = client.datasets.get_datasets(group_id) 276 | self.assertEqual(len(datasets), 0 + self.dataset_counts[group_id]) 277 | 278 | # ensure no returned dataset has the deleted dataset id 279 | for returned_dataset in datasets: 280 | self.assertNotEqual(dataset.id, returned_dataset.id) 281 | 282 | def test_client_get_tables(self): 283 | for group_id in self.group_ids: 284 | self._test_client_get_tables_impl(self.client, group_id) 285 | 286 | def _test_client_get_tables_impl(self, client, group_id=None): 287 | dataset = self.add_mock_dataset(client, 1, group_id) 288 | tables = client.datasets.get_tables(dataset.id, group_id) 289 | 290 | # make sure that we get one table back 291 | self.assertIsNotNone(tables) 292 | self.assertEqual(len(tables), 1) 293 | 294 | # make sure that the tables are named as expected 295 | for table in tables: 296 | self.assertIn(self.test_table_prefix, table.name) 297 | 298 | # add another dataset, but with two tables 299 | dataset = self.add_mock_dataset(client, 2, group_id) 300 | tables = client.datasets.get_tables(dataset.id, group_id) 301 | 302 | # make sure that we get two tables back 303 | self.assertIsNotNone(tables) 304 | self.assertEqual(len(tables), 2) 305 | 306 | # make sure that the tables are named as expected 307 | for table in tables: 308 | self.assertIn(self.test_table_prefix, table.name) 309 | 310 | def test_client_get_dataset_parameters(self): 311 | for group_id in self.group_ids: 312 | self._test_client_get_dataset_parameters_impl(self.client, group_id) 313 | 314 | def _test_client_get_dataset_parameters_impl(self, client, group_id=None): 315 | dataset = self.add_mock_dataset(client, 1, group_id) 316 | parameters = client.datasets.get_dataset_parameters(dataset.id, group_id) 317 | 318 | # make sure that we get some parameters back 319 | self.assertIsNotNone(parameters) 320 | 321 | def test_client_post_rows(self): 322 | for group_id in self.group_ids: 323 | self._test_client_post_rows_impl(self.client, group_id) 324 | 325 | def _test_client_post_rows_impl(self, client, group_id=None): 326 | dataset = self.add_mock_dataset(client, 1, group_id) 327 | tables = client.datasets.get_tables(dataset.id, group_id) 328 | 329 | row0 = Row(id=1, name='yabbadabba') 330 | row1 = Row(id=2, name='oogabooga') 331 | 332 | client.datasets.post_rows(dataset.id, tables[0].name, [row0, row1], group_id) 333 | tables = client.datasets.get_tables(dataset.id, group_id) 334 | 335 | self.assertIsNotNone(tables) 336 | self.assertEqual(len(tables), 1) 337 | 338 | # we have no way to validate rows at this point, powerbi api does not allow row queries 339 | 340 | def test_client_delete_rows(self): 341 | for group_id in self.group_ids: 342 | self._test_client_delete_rows_impl(self.client, group_id) 343 | 344 | def _test_client_delete_rows_impl(self, client, group_id=None): 345 | dataset = self.add_mock_dataset(client, 1, group_id) 346 | tables = client.datasets.get_tables(dataset.id, group_id) 347 | 348 | row0 = Row(id=1, name='yabbadabba') 349 | row1 = Row(id=2, name='oogabooga') 350 | 351 | client.datasets.post_rows(dataset.id, tables[0].name, [row0, row1], group_id) 352 | client.datasets.delete_rows(dataset.id, tables[0].name, group_id) 353 | 354 | # we have no way to validate rows at this point, powerbi api does not allow row queries 355 | 356 | def test_client_get_reports(self): 357 | for group_id in self.group_ids: 358 | self._test_client_get_reports_impl(self.client, group_id) 359 | 360 | def _test_client_get_reports_impl(self, client, group_id=None): 361 | reports = client.reports.get_reports(group_id) 362 | 363 | # validate that we got valid reports 364 | self.assert_reports_valid(reports) 365 | self.assertGreater(len(reports), 0) 366 | self.assertEqual(len(reports), self.report_counts[group_id]) 367 | 368 | def test_client_get_report(self): 369 | for group_id in self.group_ids: 370 | self._test_client_get_report_impl(self.client, group_id) 371 | 372 | def _test_client_get_report_impl(self, client, group_id=None): 373 | reports = client.reports.get_reports(group_id) 374 | 375 | # validate that we got reports 376 | self.assert_reports_valid(reports) 377 | self.assertGreater(len(reports), 0) 378 | self.assertEqual(len(reports), self.report_counts[group_id]) 379 | 380 | # validate that they are valid 381 | self.assert_reports_valid(reports) 382 | 383 | # get a specific report using the first report id 384 | report = client.reports.get_report(reports[0].id, group_id) 385 | 386 | # validate that the returned report is valid 387 | self.assert_report_valid(report) 388 | 389 | # validate that the first report and the fetched report are the same 390 | self.assertDictEqual(reports[0].__dict__, report.__dict__) 391 | 392 | def test_client_clone_report(self): 393 | for group_id in self.group_ids: 394 | self._test_client_clone_report_impl(self.client, group_id) 395 | 396 | def _test_client_clone_report_impl(self, client, group_id=None): 397 | reports = client.reports.get_reports(group_id) 398 | 399 | # validate that we got reports 400 | self.assert_reports_valid(reports) 401 | self.assertGreater(len(reports), 0) 402 | self.assertEqual(len(reports), self.report_counts[group_id]) 403 | 404 | # validate that they are valid 405 | self.assert_reports_valid(reports) 406 | 407 | # close the first report 408 | report = client.reports.clone_report(reports[0].id, 409 | f'{self.test_report_prefix}' 410 | f'{datetime.datetime.utcnow()}', 411 | None, 412 | reports[0].dataset_id, group_id) 413 | 414 | # validate that the cloned report is valid 415 | self.assert_report_valid(report) 416 | 417 | # validate that the cloned report differs from the original where relevant 418 | self.assertNotEqual(report.id, reports[0].id) 419 | self.assertIsNotNone(report.name) 420 | self.assertNotEqual(report.name, reports[0].name) 421 | self.assertIsNotNone(report.web_url) 422 | self.assertNotEqual(report.web_url, reports[0].web_url) 423 | self.assertIsNotNone(report.embed_url) 424 | self.assertNotEqual(report.embed_url, reports[0].embed_url) 425 | self.assertIsNotNone(report.dataset_id) 426 | 427 | # validate that the cloned report does not differ from the original where valid 428 | self.assertEqual(report.dataset_id, reports[0].dataset_id) 429 | 430 | # validate that the report was actually cloned 431 | reports = client.reports.get_reports(group_id) 432 | 433 | # validate that we got reports 434 | self.assert_reports_valid(reports) 435 | self.assertGreater(len(reports), 0) 436 | self.assertEqual(len(reports), 1 + self.report_counts[group_id]) 437 | 438 | # validate that the reports are valid 439 | self.assert_reports_valid(reports) 440 | 441 | def test_client_delete_report(self): 442 | for group_id in self.group_ids: 443 | self._test_client_delete_report_impl(self.client, group_id) 444 | 445 | def _test_client_delete_report_impl(self, client, group_id=None): 446 | reports = client.reports.get_reports(group_id) 447 | 448 | # validate that we got reports 449 | self.assert_reports_valid(reports) 450 | self.assertGreater(len(reports), 0) 451 | self.assertEqual(len(reports), self.report_counts[group_id]) 452 | 453 | # validate that the reports are valid 454 | self.assert_reports_valid(reports) 455 | 456 | # add a mock report 457 | report = self.add_mock_report(client, group_id) 458 | 459 | # validate that the number of reports has increased 460 | reports = client.reports.get_reports(group_id) 461 | 462 | # validate that we got reports 463 | self.assert_reports_valid(reports) 464 | self.assertGreater(len(reports), 0) 465 | self.assertEqual(len(reports), 1 + self.report_counts[group_id]) 466 | 467 | # validate that the reports are valid 468 | self.assert_reports_valid(reports) 469 | 470 | # delete the previously added report 471 | client.reports.delete_report(report.id, group_id) 472 | 473 | # validate that the number of reports is one less 474 | reports = client.reports.get_reports(group_id) 475 | 476 | # validate that we got reports 477 | self.assert_reports_valid(reports) 478 | self.assertGreater(len(reports), 0) 479 | self.assertEqual(len(reports), self.report_counts[group_id]) 480 | 481 | # validate that the reports are valid 482 | self.assert_reports_valid(reports) 483 | 484 | def test_client_rebind_report(self): 485 | for group_id in self.group_ids: 486 | self._test_client_rebind_report_impl(self.client, group_id) 487 | 488 | def _test_client_rebind_report_impl(self, client, group_id=None): 489 | # get the current reports 490 | reports = client.reports.get_reports(group_id) 491 | 492 | # validate that we got reports 493 | self.assert_reports_valid(reports) 494 | self.assertGreater(len(reports), 0) 495 | self.assertEqual(len(reports), self.report_counts[group_id]) 496 | 497 | # validate that the reports are valid 498 | self.assert_reports_valid(reports) 499 | 500 | # rebind the first report to the second report's dataset 501 | client.reports.rebind_report(reports[0].id, reports[1].dataset_id, group_id) 502 | 503 | # get the reports again 504 | reports = client.reports.get_reports(group_id) 505 | 506 | # validate that we got reports 507 | self.assertIsNotNone(reports) 508 | self.assertGreater(len(reports), 0) 509 | self.assertEqual(len(reports), self.report_counts[group_id]) 510 | 511 | # validate that the reports are valid 512 | self.assert_reports_valid(reports) 513 | 514 | # validate that the report has been rebound 515 | self.assertEqual(reports[0].dataset_id, reports[1].dataset_id) 516 | 517 | def test_generate_report_token(self): 518 | for group_id in self.group_ids: 519 | # embedding is not supported for non group workspaces 520 | if group_id is not None: 521 | self._test_generate_report_token_impl(self.client, group_id) 522 | 523 | def _test_generate_report_token_impl(self, client, group_id): 524 | # get the current reports 525 | reports = client.reports.get_reports(group_id) 526 | 527 | # validate that we got reports 528 | self.assert_reports_valid(reports) 529 | self.assertGreater(len(reports), 0) 530 | self.assertEqual(len(reports), self.report_counts[group_id]) 531 | 532 | # create the the token request with just a view access level 533 | token_request = TokenRequest('view') 534 | 535 | # get the token 536 | token = client.reports.generate_token(reports[0].id, token_request, group_id) 537 | 538 | # validate that we got a token back 539 | self.assertIsNotNone(token) 540 | self.assertIsNotNone(token.token) 541 | self.assertIsNotNone(token.token_id) 542 | self.assertIsNotNone(token.expiration) 543 | 544 | def test_upload_file(self): 545 | for group_id in self.group_ids: 546 | self._test_upload_file_impl(self.client, group_id) 547 | 548 | def _test_upload_file_impl(self, client, group_id): 549 | # upload the file, get back an import object 550 | import_object = client.imports.upload_file('test_report.pbix', 'test_report', None, group_id) 551 | self.assertIsNotNone(import_object) 552 | 553 | # ensure that there is at least one import 554 | imports = client.imports.get_imports(group_id) 555 | self.assertLess(0, len(imports)) 556 | 557 | # search for our most recent import 558 | found = False 559 | for fetched_import in imports: 560 | self.assertIsNotNone(fetched_import) 561 | if fetched_import.id == import_object.id: 562 | found = True 563 | 564 | self.assertTrue(found) 565 | 566 | checks = 0 567 | while import_object.import_state != Import.import_state_succeeded and checks < 3: 568 | time.sleep(1) 569 | # get the import object again to check the import state 570 | import_object = client.imports.get_import(import_object.id, group_id) 571 | self.assertIsNotNone(import_object) 572 | checks = checks + 1 573 | 574 | self.assertEqual(1, len(import_object.datasets)) 575 | self.assertEqual(1, len(import_object.reports)) 576 | 577 | dataset = client.datasets.get_dataset(import_object.datasets[0].id, group_id) 578 | report = client.reports.get_report(import_object.reports[0].id, group_id) 579 | 580 | self.assertIsNotNone(dataset) 581 | self.assertIsNotNone(report) 582 | 583 | client.reports.delete_report(report.id, group_id) 584 | client.datasets.delete_dataset(dataset.id, group_id) 585 | 586 | def test_groups_count_method(self): 587 | group_count = self.client.groups.count() 588 | self.assertGreater(group_count, 0) 589 | 590 | def test_groups_has_group_method(self): 591 | for group_id in self.group_ids: 592 | self._test_groups_has_group_method(self.client, group_id) 593 | 594 | def _test_groups_has_group_method(self, client, group_id): 595 | self.assertTrue(client.groups.has_group(group_id)) 596 | 597 | def test_groups_get_groups_method(self): 598 | groups = self.client.groups.get_groups() 599 | for group in groups: 600 | self.assertTrue(self.client.groups.has_group(group.id)) 601 | 602 | def test_refresh_dataset_method(self): 603 | for group_id in self.group_ids: 604 | self._test_refresh_dataset_method(self.client, group_id) 605 | 606 | def _test_refresh_dataset_method(self, client, group_id): 607 | dataset = self.add_mock_dataset(client, group_id=group_id) 608 | 609 | client.datasets.refresh_dataset(dataset_id=dataset.id, group_id=group_id) 610 | client.datasets.delete_dataset(dataset_id=dataset.id, group_id=group_id) -------------------------------------------------------------------------------- /pypowerbi/tests/settings.py: -------------------------------------------------------------------------------- 1 | class PowerBITestSettings: 2 | # default testing urls 3 | authority_url = 'https://login.windows.net/common' 4 | resource_url = 'https://analysis.windows.net/powerbi/api' 5 | api_url = 'https://api.powerbi.com' 6 | 7 | # testing credentials 8 | client_id = '00000000-0000-0000-0000-000000000000' 9 | username = 'someone@somecompany.com' 10 | password = 'averygoodpassword' 11 | group_ids = [None, '00000000-0000-0000-0000-000000000000'] 12 | -------------------------------------------------------------------------------- /pypowerbi/tests/test_data.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmberryau/pypowerbi/dbee379775fb2889cf3f1dff4267cc91af623e1a/pypowerbi/tests/test_data.xlsx -------------------------------------------------------------------------------- /pypowerbi/tests/test_report.pbix: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cmberryau/pypowerbi/dbee379775fb2889cf3f1dff4267cc91af623e1a/pypowerbi/tests/test_report.pbix -------------------------------------------------------------------------------- /pypowerbi/tests/utils_tests.py: -------------------------------------------------------------------------------- 1 | import json 2 | from unittest import TestCase 3 | 4 | import datetime 5 | 6 | from pypowerbi import utils 7 | from pypowerbi.utils import CredentialsBuilder 8 | 9 | 10 | class UtilsTests(TestCase): 11 | def test_date_from_powerbi_str(self): 12 | now = datetime.datetime.now() 13 | power_bi_date_str = now.strftime(utils._date_fmt_str) 14 | now_converted = utils.date_from_powerbi_str(power_bi_date_str) 15 | self.assertEqual(now, now_converted) 16 | 17 | def test_convert_datetime_fields(self): 18 | 19 | # These are essentially random datetimes 20 | dt1 = datetime.datetime.now() 21 | dt2 = datetime.datetime(2019, 1, 4, 2, 4, 6, 23) 22 | dt3 = datetime.datetime(2017, 3, 7, 1, 12, 12, 23) 23 | 24 | # Convert them into Power BI Formatted Date Strings 25 | dt1_str = dt1.strftime(utils._date_fmt_str) 26 | dt2_str = dt2.strftime(utils._date_fmt_str) 27 | dt3_str = dt3.strftime(utils._date_fmt_str) 28 | 29 | # Build up several "Records" to convert. The dict_target dictionaries contain what the data should 30 | # look like after being converted. 31 | 32 | dict1 = { 33 | "col1": "Hello", 34 | "col2": dt1_str, 35 | "col3": dt2_str 36 | } 37 | 38 | dict1_target = dict1.copy() 39 | dict1_target["col2"] = dt1 40 | dict1_target["col3"] = dt2 41 | 42 | dict2 = { 43 | "col1": "World", 44 | "col2": dt3_str, 45 | "col3": None # Make sure it doesn't try to convert None 46 | } 47 | dict2_target = dict2.copy() 48 | dict2_target["col2"] = dt3 49 | 50 | dict3 = { 51 | "col1": "World", 52 | "col2": '', # Make sure it doesn't try to convert empty strings 53 | "col3": dt1_str 54 | } 55 | 56 | dict3_target = dict3.copy() 57 | dict3_target["col3"] = dt1 58 | 59 | sample_list = [dict1, dict2, dict3] 60 | 61 | target_list = [dict1_target, dict2_target, dict3_target] 62 | 63 | converted_list = utils.convert_datetime_fields(sample_list, ["col2", "col3"]) 64 | 65 | for converted, target in zip(converted_list, target_list): 66 | self.assertEqual(converted, target) 67 | -------------------------------------------------------------------------------- /pypowerbi/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: future_fstrings -*- 2 | import datetime 3 | 4 | 5 | """ 6 | This file contains helper and utility functions used elsewhere in the library. 7 | """ 8 | 9 | 10 | # Datetime's come in from PowerBI in the format 2019-03-05T03:09:31.493Z 11 | _date_fmt_str = '%Y-%m-%dT%H:%M:%S.%fZ' 12 | _date_fmt_str2 = '%Y-%m-%dT%H:%M:%SZ' 13 | 14 | 15 | def date_from_powerbi_str(dstr): 16 | """ 17 | Utility function to convert datetime strings from the Power BI service into Python Datetime objects 18 | 19 | :param dstr: A String retrieved from the Power BI Service that's a datetime 20 | :return: A Python datetime object generated from the parameter 21 | """ 22 | if "." in dstr: 23 | return datetime.datetime.strptime(dstr, _date_fmt_str) 24 | else: 25 | # Fractional seconds are not zero padded in the API and will not be included at all if 0, thus the second format 26 | return datetime.datetime.strptime(dstr, _date_fmt_str2) 27 | 28 | 29 | def convert_datetime_fields(list_of_dicts, fields_to_convert): 30 | """ 31 | Takes in a list of dictionaries and for each dictionary it converts all fields in fields_to_convert to 32 | datetime objects from Power BI Datetime Strings. This is typically used when retrieving a list of records 33 | from the Power BI Service and you want all date fields to be converted from the date string to python datetimes. 34 | 35 | If the value of the field is None or an empty string, it does nothing for that field. 36 | 37 | :param list_of_dicts: A list of dictionaries 38 | :param fields_to_convert: A list of fields to be converted to datetimes from Power BI Datetime Strings 39 | :return: list of dictionaries with all fields specified in 'fields_to_convert' into python datetime objects 40 | """ 41 | new_list = [] 42 | for rec in list_of_dicts: 43 | # Create a copy so we don't overwrite the original dictionary 44 | new_rec = rec.copy() 45 | new_list.append(new_rec) 46 | for field in fields_to_convert: 47 | if field in new_rec.keys() and new_rec[field] not in [None, '']: 48 | new_rec[field] = date_from_powerbi_str(new_rec[field]) 49 | 50 | return new_list 51 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup(name='pypowerbi', 4 | version='0.26', 5 | description='A python library for Microsoft\'s PowerBI', 6 | url='http://github.com/cmberryau/pypowerbi', 7 | author='Chris Berry', 8 | author_email='chris@chrisberry.com.au', 9 | license='MIT', 10 | packages=['pypowerbi'], 11 | install_requires=[ 12 | 'requests', 13 | 'adal', 14 | 'future-fstrings', 15 | ], 16 | zip_safe=False) 17 | --------------------------------------------------------------------------------