├── .gitattributes ├── .gitignore ├── LICENSE ├── README.md └── ssas_api.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | 4 | # User-specific files 5 | *.suo 6 | *.user 7 | *.userosscache 8 | *.sln.docstates 9 | 10 | # User-specific files (MonoDevelop/Xamarin Studio) 11 | *.userprefs 12 | 13 | # Build results 14 | [Dd]ebug/ 15 | [Dd]ebugPublic/ 16 | [Rr]elease/ 17 | [Rr]eleases/ 18 | x64/ 19 | x86/ 20 | bld/ 21 | [Bb]in/ 22 | [Oo]bj/ 23 | [Ll]og/ 24 | 25 | # Visual Studio 2015 cache/options directory 26 | .vs/ 27 | # Uncomment if you have tasks that create the project's static files in wwwroot 28 | #wwwroot/ 29 | 30 | # MSTest test Results 31 | [Tt]est[Rr]esult*/ 32 | [Bb]uild[Ll]og.* 33 | 34 | # NUNIT 35 | *.VisualState.xml 36 | TestResult.xml 37 | 38 | # Build Results of an ATL Project 39 | [Dd]ebugPS/ 40 | [Rr]eleasePS/ 41 | dlldata.c 42 | 43 | # DNX 44 | project.lock.json 45 | project.fragment.lock.json 46 | artifacts/ 47 | 48 | *_i.c 49 | *_p.c 50 | *_i.h 51 | *.ilk 52 | *.meta 53 | *.obj 54 | *.pch 55 | *.pdb 56 | *.pgc 57 | *.pgd 58 | *.rsp 59 | *.sbr 60 | *.tlb 61 | *.tli 62 | *.tlh 63 | *.tmp 64 | *.tmp_proj 65 | *.log 66 | *.vspscc 67 | *.vssscc 68 | .builds 69 | *.pidb 70 | *.svclog 71 | *.scc 72 | 73 | # Chutzpah Test files 74 | _Chutzpah* 75 | 76 | # Visual C++ cache files 77 | ipch/ 78 | *.aps 79 | *.ncb 80 | *.opendb 81 | *.opensdf 82 | *.sdf 83 | *.cachefile 84 | *.VC.db 85 | *.VC.VC.opendb 86 | 87 | # Visual Studio profiler 88 | *.psess 89 | *.vsp 90 | *.vspx 91 | *.sap 92 | 93 | # TFS 2012 Local Workspace 94 | $tf/ 95 | 96 | # Guidance Automation Toolkit 97 | *.gpState 98 | 99 | # ReSharper is a .NET coding add-in 100 | _ReSharper*/ 101 | *.[Rr]e[Ss]harper 102 | *.DotSettings.user 103 | 104 | # JustCode is a .NET coding add-in 105 | .JustCode 106 | 107 | # TeamCity is a build add-in 108 | _TeamCity* 109 | 110 | # DotCover is a Code Coverage Tool 111 | *.dotCover 112 | 113 | # NCrunch 114 | _NCrunch_* 115 | .*crunch*.local.xml 116 | nCrunchTemp_* 117 | 118 | # MightyMoose 119 | *.mm.* 120 | AutoTest.Net/ 121 | 122 | # Web workbench (sass) 123 | .sass-cache/ 124 | 125 | # Installshield output folder 126 | [Ee]xpress/ 127 | 128 | # DocProject is a documentation generator add-in 129 | DocProject/buildhelp/ 130 | DocProject/Help/*.HxT 131 | DocProject/Help/*.HxC 132 | DocProject/Help/*.hhc 133 | DocProject/Help/*.hhk 134 | DocProject/Help/*.hhp 135 | DocProject/Help/Html2 136 | DocProject/Help/html 137 | 138 | # Click-Once directory 139 | publish/ 140 | 141 | # Publish Web Output 142 | *.[Pp]ublish.xml 143 | *.azurePubxml 144 | # TODO: Comment the next line if you want to checkin your web deploy settings 145 | # but database connection strings (with potential passwords) will be unencrypted 146 | #*.pubxml 147 | *.publishproj 148 | 149 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 150 | # checkin your Azure Web App publish settings, but sensitive information contained 151 | # in these scripts will be unencrypted 152 | PublishScripts/ 153 | 154 | # NuGet Packages 155 | *.nupkg 156 | # The packages folder can be ignored because of Package Restore 157 | **/packages/* 158 | # except build/, which is used as an MSBuild target. 159 | !**/packages/build/ 160 | # Uncomment if necessary however generally it will be regenerated when needed 161 | #!**/packages/repositories.config 162 | # NuGet v3's project.json files produces more ignoreable files 163 | *.nuget.props 164 | *.nuget.targets 165 | 166 | # Microsoft Azure Build Output 167 | csx/ 168 | *.build.csdef 169 | 170 | # Microsoft Azure Emulator 171 | ecf/ 172 | rcf/ 173 | 174 | # Windows Store app package directories and files 175 | AppPackages/ 176 | BundleArtifacts/ 177 | Package.StoreAssociation.xml 178 | _pkginfo.txt 179 | 180 | # Visual Studio cache files 181 | # files ending in .cache can be ignored 182 | *.[Cc]ache 183 | # but keep track of directories ending in .cache 184 | !*.[Cc]ache/ 185 | 186 | # Others 187 | ClientBin/ 188 | ~$* 189 | *~ 190 | *.dbmdl 191 | *.dbproj.schemaview 192 | *.jfm 193 | *.pfx 194 | *.publishsettings 195 | node_modules/ 196 | orleans.codegen.cs 197 | 198 | # Since there are multiple workflows, uncomment next line to ignore bower_components 199 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 200 | #bower_components/ 201 | 202 | # RIA/Silverlight projects 203 | Generated_Code/ 204 | 205 | # Backup & report files from converting an old project file 206 | # to a newer Visual Studio version. Backup files are not needed, 207 | # because we have git ;-) 208 | _UpgradeReport_Files/ 209 | Backup*/ 210 | UpgradeLog*.XML 211 | UpgradeLog*.htm 212 | 213 | # SQL Server files 214 | *.mdf 215 | *.ldf 216 | 217 | # Business Intelligence projects 218 | *.rdl.data 219 | *.bim.layout 220 | *.bim_*.settings 221 | 222 | # Microsoft Fakes 223 | FakesAssemblies/ 224 | 225 | # GhostDoc plugin setting file 226 | *.GhostDoc.xml 227 | 228 | # Node.js Tools for Visual Studio 229 | .ntvs_analysis.dat 230 | 231 | # Visual Studio 6 build log 232 | *.plg 233 | 234 | # Visual Studio 6 workspace options file 235 | *.opt 236 | 237 | # Visual Studio LightSwitch build output 238 | **/*.HTMLClient/GeneratedArtifacts 239 | **/*.DesktopClient/GeneratedArtifacts 240 | **/*.DesktopClient/ModelManifest.xml 241 | **/*.Server/GeneratedArtifacts 242 | **/*.Server/ModelManifest.xml 243 | _Pvt_Extensions 244 | 245 | # Paket dependency manager 246 | .paket/paket.exe 247 | paket-files/ 248 | 249 | # FAKE - F# Make 250 | .fake/ 251 | 252 | # JetBrains Rider 253 | .idea/ 254 | *.sln.iml 255 | 256 | # CodeRush 257 | .cr/ 258 | 259 | # Python Tools for Visual Studio (PTVS) 260 | __pycache__/ 261 | *.pyc 262 | 263 | # Byte-compiled / optimized / DLL files 264 | __pycache__/ 265 | *.py[cod] 266 | *$py.class 267 | 268 | # C extensions 269 | *.so 270 | 271 | # Distribution / packaging 272 | .Python 273 | build/ 274 | develop-eggs/ 275 | dist/ 276 | downloads/ 277 | eggs/ 278 | .eggs/ 279 | lib/ 280 | lib64/ 281 | parts/ 282 | sdist/ 283 | var/ 284 | wheels/ 285 | *.egg-info/ 286 | .installed.cfg 287 | *.egg 288 | MANIFEST 289 | 290 | # PyInstaller 291 | # Usually these files are written by a python script from a template 292 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 293 | *.manifest 294 | *.spec 295 | 296 | # Installer logs 297 | pip-log.txt 298 | pip-delete-this-directory.txt 299 | 300 | # Unit test / coverage reports 301 | htmlcov/ 302 | .tox/ 303 | .coverage 304 | .coverage.* 305 | .cache 306 | nosetests.xml 307 | coverage.xml 308 | *.cover 309 | .hypothesis/ 310 | 311 | # Translations 312 | *.mo 313 | *.pot 314 | 315 | # Django stuff: 316 | *.log 317 | .static_storage/ 318 | .media/ 319 | local_settings.py 320 | 321 | # Flask stuff: 322 | instance/ 323 | .webassets-cache 324 | 325 | # Scrapy stuff: 326 | .scrapy 327 | 328 | # Sphinx documentation 329 | docs/_build/ 330 | 331 | # PyBuilder 332 | target/ 333 | 334 | # Jupyter Notebook 335 | .ipynb_checkpoints 336 | 337 | # pyenv 338 | .python-version 339 | 340 | # celery beat schedule file 341 | celerybeat-schedule 342 | 343 | # SageMath parsed files 344 | *.sage.py 345 | 346 | # Environments 347 | .env 348 | .venv 349 | env/ 350 | venv/ 351 | ENV/ 352 | env.bak/ 353 | venv.bak/ 354 | 355 | # Spyder project settings 356 | .spyderproject 357 | .spyproject 358 | 359 | # Rope project settings 360 | .ropeproject 361 | 362 | # mkdocs documentation 363 | /site 364 | 365 | # mypy 366 | .mypy_cache/ 367 | 368 | # VSCode settings 369 | .vscode/ 370 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Yehoshua Dimarsky 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # python-ssas 2 | 3 | ## Prerequisites: 4 | - Good working knowledge of: 5 | - Microsoft SQL Server Analysis Services (Tabular models) 6 | - Python (specifically pandas) 7 | - Some knowledge required: 8 | - General Microsoft .Net familiarity 9 | 10 | 11 | ## Motivation: 12 | I’ve been working for some time analyzing data using two unrelated tools – Python (primarily pandas), and DAX in Microsoft’s Tabular models. They are very different – Python is open source, Microsoft’s products are (obviously) not. It was frustrating to not be able to merge them. If I wanted to get data from a DAX data model into a Pandas dataframe, I would typically need to first export it to a file (like CSV) and then read it from there. 13 | 14 | Also, I wanted a way to programatically "refresh" the data model (called "processing" it) from Python. 15 | 16 | ## Solution: 17 | Inspired by [@akavalar's great post](https://github.com/akavalar/SSAS-on-a-shoestring), I discovered a nice workaround: 18 | - DAX models (or any Analysis Services model) have several .Net APIs, see [here](https://docs.microsoft.com/en-us/sql/analysis-services/analysis-services-developer-documentation) for the Microsoft documentation 19 | - Also, there is a fantastic Python library called [Pythonnet](https://github.com/pythonnet/pythonnet) that enables near seamless integration between Python and .Net. This is for the mainstream Python, called CPython, and not to be confused with the .Net implementation of Python which is called IronPython. 20 | 21 | Using these ingredients, I created my `ssas_api.py` module with some utilities that I use frequently. Note that this just uses the parts of the APIs that I needed; there is a wealth more available, just dig through the documentation. 22 | 23 | **Note:** I've only been using Azure Analysis Services, so the code is designed for that regarding the URL of the server and authentication string. 24 | 25 | I haven't found anything like this online, so feel free to use it. 26 | 27 | ## Getting The Required .Net Libraries 28 | `ssas_api.py` requires 2 specific DLLs to work: 29 | - Microsoft.AnalysisServices.Tabular.dll 30 | - Microsoft.AnalysisServices.AdomdClient.dll 31 | 32 | These are usually already installed on most users' computers if they are using any of the Microsoft tools that interact with DAX, such as Excel, Power BI Desktop, or SSMS. By default, they are installed in `C:\Windows\Microsoft.NET\assembly\GAC_MSIL`. 33 | 34 | In cases when they aren't installed, or if the user wants to install them manually, here is a quick and conveinent way to do so using PowerShell (requires Admin access) 35 | 36 | ```powershell 37 | # Register NuGet provider if not yet registered 38 | Install-PackageProvider -Name "Nuget" -Force 39 | Register-PackageSource -Name MyNuGet -Location https://www.nuget.org/api/v2 -ProviderName NuGet -Trusted -Force 40 | 41 | # Install the packages 42 | Install-Package Microsoft.AnalysisServices.retail.amd64 43 | Install-Package Microsoft.AnalysisServices.AdomdClient.retail.amd64 44 | ``` 45 | 46 | If installing via NuGet here is a Python snippet that will help with managing the path where it installs it to (`C:/Program Files/PackageManagement/NuGet/Packages/Microsoft.AnalysisServices`): 47 | 48 | ```python 49 | # dll paths setup, NuGet puts them here 50 | base = "C:/Program Files/PackageManagement/NuGet/Packages/Microsoft.AnalysisServices" 51 | _version = "19.4.0.2" # at time of this writing 52 | AMO_PATH = f"{base}.retail.amd64.{_version}/lib/net45/Microsoft.AnalysisServices.Tabular.dll" 53 | ADOMD_PATH = f"{base}.AdomdClient.retail.amd64.{_version}/lib/net45/Microsoft.AnalysisServices.AdomdClient.dll" 54 | ``` 55 | 56 | ## Quickstart 57 | ```python 58 | In [1]: import ssas_api 59 | ...: 60 | ...: conn = ssas_api.set_conn_string( 61 | ...: server='', 62 | ...: db_name='', 63 | ...: username='', 64 | ...: password='' 65 | ...: ) 66 | 67 | In [2]: dax_string = ''' 68 | ...: //any valid DAX query 69 | ...: EVALUATE 70 | ...: CALCULATETABLE(MyTable) 71 | ...: ''' 72 | 73 | In [3]: df = ssas_api.get_DAX(connection_string=conn, dax_string=dax_string) 74 | ``` 75 | -------------------------------------------------------------------------------- /ssas_api.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Wed Sep 20 16:59:43 2017 4 | 5 | @author: Yehoshua 6 | """ 7 | 8 | import pandas as pd 9 | import numpy as np 10 | 11 | from functools import wraps 12 | from pathlib import Path 13 | import logging 14 | import warnings 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | try: 19 | import clr # name for pythonnet 20 | except ImportError: 21 | msg = """ 22 | Could not import 'clr', install the 'pythonnet' library. 23 | For conda, `conda install -c pythonnet pythonnet` 24 | """ 25 | raise ImportError(msg) 26 | 27 | 28 | def _load_assemblies(amo_path=None, adomd_path=None): 29 | """ 30 | Loads required assemblies, called after function definition. 31 | Might need to install SSAS client libraries: 32 | https://docs.microsoft.com/en-us/azure/analysis-services/analysis-services-data-providers 33 | 34 | Parameters 35 | ---------- 36 | amo_path : str, default None 37 | The full path to the DLL file of the assembly for AMO. 38 | Should end with '**Microsoft.AnalysisServices.Tabular.dll**' 39 | Example: C:/my/path/to/Microsoft.AnalysisServices.Tabular.dll 40 | If None, will use the default location on Windows. 41 | adomd_path : str, default None 42 | The full path to the DLL file of the assembly for ADOMD. 43 | Should end with '**Microsoft.AnalysisServices.AdomdClient.dll**' 44 | Example: C:/my/path/to/Microsoft.AnalysisServices.AdomdClient.dll 45 | If None, will use the default location on Windows. 46 | """ 47 | # Full path of .dll files 48 | root = Path(r"C:\Windows\Microsoft.NET\assembly\GAC_MSIL") 49 | # get latest version of libraries if multiple libraries are installed (max func) 50 | if amo_path is None: 51 | amo_path = str( 52 | max((root / "Microsoft.AnalysisServices.Tabular").iterdir()) 53 | / "Microsoft.AnalysisServices.Tabular.dll" 54 | ) 55 | if adomd_path is None: 56 | adomd_path = str( 57 | max((root / "Microsoft.AnalysisServices.AdomdClient").iterdir()) 58 | / "Microsoft.AnalysisServices.AdomdClient.dll" 59 | ) 60 | 61 | # load .Net assemblies 62 | logger.info("Loading .Net assemblies...") 63 | clr.AddReference("System") 64 | clr.AddReference("System.Data") 65 | clr.AddReference(amo_path) 66 | clr.AddReference(adomd_path) 67 | 68 | # Only after loaded .Net assemblies 69 | global System, DataTable, AMO, ADOMD 70 | 71 | import System 72 | from System.Data import DataTable 73 | import Microsoft.AnalysisServices.Tabular as AMO 74 | import Microsoft.AnalysisServices.AdomdClient as ADOMD 75 | 76 | logger.info("Successfully loaded these .Net assemblies: ") 77 | for a in clr.ListAssemblies(True): 78 | logger.info(a.split(",")[0]) 79 | 80 | 81 | def _assert_dotnet_loaded(func): 82 | """ 83 | Wrapper to make sure that required .NET assemblies have been loaded and imported. 84 | Can pass the keyword arguments 'amo_path' and 'adomd_path' to any annotated function, 85 | it will use them in the `_load_assemblies` function. 86 | 87 | Example: 88 | .. code-block:: python 89 | 90 | import ssas_api 91 | conn = ssas_api.set_conn_string( 92 | 's', 'd', 'u', 'p', 93 | amo_path='C:/path/number/one', 94 | adomd_path='C:/path/number/two' 95 | ) 96 | """ 97 | @wraps(func) 98 | def wrapper(*args, **kwargs): 99 | amo_path = kwargs.pop("amo_path", None) 100 | adomd_path = kwargs.pop("adomd_path", None) 101 | try: 102 | type(DataTable) 103 | except NameError: 104 | # .NET assemblies not loaded/imported 105 | logger.warning(".Net assemblies not loaded and imported, doing so now...") 106 | _load_assemblies(amo_path=amo_path, adomd_path=adomd_path) 107 | return func(*args, **kwargs) 108 | return wrapper 109 | 110 | 111 | @_assert_dotnet_loaded 112 | def set_conn_string(server, db_name, username, password): 113 | """ 114 | Sets connection string to SSAS database, 115 | in this case designed for Azure Analysis Services 116 | """ 117 | conn_string = ( 118 | "Provider=MSOLAP;Data Source={};Initial Catalog={};User ID={};" 119 | "Password={};Persist Security Info=True;Impersonation Level=Impersonate".format( 120 | server, db_name, username, password 121 | ) 122 | ) 123 | return conn_string 124 | 125 | 126 | @_assert_dotnet_loaded 127 | def get_DAX(connection_string, dax_string): 128 | """ 129 | Executes DAX query and returns the results as a pandas DataFrame 130 | 131 | Parameters 132 | --------------- 133 | connection_string : string 134 | Valid SSAS connection string, use the set_conn_string() method to set 135 | dax_string : string 136 | Valid DAX query, beginning with EVALUATE or VAR or DEFINE 137 | 138 | Returns 139 | ---------------- 140 | pandas DataFrame with the results 141 | """ 142 | table = _get_DAX(connection_string, dax_string) 143 | df = _parse_DAX_result(table) 144 | return df 145 | 146 | 147 | def _get_DAX(connection_string, dax_string) -> "DataTable": 148 | dataadapter = ADOMD.AdomdDataAdapter(dax_string, connection_string) 149 | table = DataTable() 150 | logger.info("Getting DAX query...") 151 | dataadapter.Fill(table) 152 | logger.info("DAX query successfully retrieved") 153 | return table 154 | 155 | 156 | def _parse_DAX_result(table: "DataTable") -> pd.DataFrame: 157 | cols = [c for c in table.Columns.List] 158 | rows = [] 159 | # much better performance to just access data by position instead of name 160 | # and then add column names afterwards 161 | for r in range(table.Rows.Count): 162 | row = [table.Rows[r][c] for c in cols] 163 | rows.append(row) 164 | 165 | df = pd.DataFrame.from_records(rows, columns=[c.ColumnName for c in cols]) 166 | 167 | # replace System.DBNull with None 168 | # df.replace({System.DBNull: np.NaN}) doesn't work for some reason 169 | df = df.applymap(lambda x: np.NaN if isinstance(x, System.DBNull) else x) 170 | 171 | # convert datetimes 172 | dt_types = [c.ColumnName for c in cols if c.DataType.FullName == "System.DateTime"] 173 | if dt_types: 174 | for dtt in dt_types: 175 | # if all nulls, then pd.to_datetime will fail 176 | if not df.loc[:, dtt].isna().all(): 177 | # https://docs.microsoft.com/en-us/dotnet/standard/base-types/standard-date-and-time-format-strings#Sortable 178 | ser = df.loc[:, dtt].map(lambda x: x.ToString('s')) 179 | df.loc[:, dtt] = pd.to_datetime(ser) 180 | 181 | # convert other types 182 | types_map = {"System.Int64": int, "System.Double": float, "System.String": str} 183 | col_types = {c.ColumnName: types_map.get(c.DataType.FullName, "object") for c in cols} 184 | 185 | # handle NaNs (which are floats, as of pandas v.0.25.3) in int columns 186 | col_types_ints = {k for k,v in col_types.items() if v == int} 187 | ser = df.isna().any(axis=0) 188 | col_types.update({k:float for k in set(ser[ser].index).intersection(col_types_ints)}) 189 | 190 | # convert 191 | df = df.astype(col_types) 192 | 193 | return df 194 | 195 | 196 | @_assert_dotnet_loaded 197 | def process_database(connection_string, refresh_type, db_name): 198 | process_model( 199 | connection_string=connection_string, 200 | item_type="model", 201 | refresh_type=refresh_type, 202 | db_name=db_name, 203 | ) 204 | 205 | 206 | @_assert_dotnet_loaded 207 | def process_table(connection_string, table_name, refresh_type, db_name): 208 | process_model( 209 | connection_string=connection_string, 210 | item_type="table", 211 | item=table_name, 212 | refresh_type=refresh_type, 213 | db_name=db_name, 214 | ) 215 | 216 | 217 | @_assert_dotnet_loaded 218 | def process_model(connection_string, db_name, refresh_type="full", item_type="model", item=None): 219 | """ 220 | Processes SSAS data model to get new data from underlying source. 221 | 222 | Parameters 223 | ------------- 224 | connection_string : string 225 | Valid SSAS connection string, use the set_conn_string() method to set 226 | db_name : string 227 | The data model on the SSAS server to process 228 | refresh_type : string, default `full` 229 | Type of refresh to process. Currently only supports `full`. 230 | item_type : string, choice of {'model','table'}, default 'model' 231 | item : string, optional. 232 | Then name of the item. Only needed when item_type is 'table', to specify the table name 233 | """ 234 | assert item_type.lower() in ("table", "model"), f"Invalid item type: {item_type}" 235 | if item_type.lower() == "table" and not item: 236 | raise ValueError("If item_type is table, must supply an item (a table name) to process") 237 | 238 | # connect to the AS instance from Python 239 | AMOServer = AMO.Server() 240 | logger.info("Connecting to database...") 241 | AMOServer.Connect(connection_string) 242 | 243 | # Dict of refresh types 244 | refresh_dict = {"full": AMO.RefreshType.Full} 245 | 246 | # process 247 | db = AMOServer.Databases[db_name] 248 | 249 | if item_type.lower() == "table": 250 | table = db.Model.Tables.Find(item) 251 | table.RequestRefresh(refresh_dict[refresh_type]) 252 | else: 253 | db.Model.RequestRefresh(refresh_dict[refresh_type]) 254 | 255 | op_result = db.Model.SaveChanges() 256 | if op_result.Impact.IsEmpty: 257 | logger.info("No objects affected by the refresh") 258 | 259 | logger.info("Disconnecting from Database...") 260 | # Disconnect 261 | AMOServer.Disconnect() 262 | --------------------------------------------------------------------------------