├── VERSION ├── commands ├── __init__.py ├── flusher.py ├── exporter.py └── migrator.py ├── requirements.txt ├── pyproject.toml ├── config ├── flusher.config.json └── migration.config.json ├── metabasepy ├── __init__.py ├── table_parser.py └── client.py ├── scripts └── release.sh ├── .bumpversion.cfg ├── README.md ├── LICENSE ├── .github └── workflows │ └── python-publish.yml ├── setup.py ├── .gitignore └── docs ├── commands.md └── guide.md /VERSION: -------------------------------------------------------------------------------- 1 | 1.12.0-dev0 2 | -------------------------------------------------------------------------------- /commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | slugify 2 | requests -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=42"] 3 | build-backend = "setuptools.build_meta" -------------------------------------------------------------------------------- /config/flusher.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Metabase A", 3 | "username": "john.doe@email.com", 4 | "password": "******", 5 | "base_url": "http://localhost:3000" 6 | } -------------------------------------------------------------------------------- /metabasepy/__init__.py: -------------------------------------------------------------------------------- 1 | from metabasepy.client import ( 2 | Client, 3 | AuthorizationFailedException, 4 | RequestException 5 | ) 6 | 7 | from metabasepy.table_parser import ( 8 | MetabaseTableParser, 9 | MetabaseTable, 10 | MetabaseResultInvalidException 11 | ) 12 | -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | : ${1?"Usage: $0 release_version (ex: 1.9 or 2.4 etc) "} 4 | 5 | cd "$(dirname "$0")" 6 | 7 | scripts_dir=`pwd` 8 | project_dir="$(dirname "$scripts_dir")" 9 | 10 | echo $1 > "$project_dir"/VERSION 11 | git tag -a "$1" 12 | git push --tag 13 | 14 | cd "$project_dir" 15 | 16 | rm -rf dist/* 17 | python setup.py bdist_wheel --universal 18 | twine check dist/* 19 | python -m twine upload dist/* 20 | 21 | git commit -am "new release $1" 22 | git push origin master 23 | -------------------------------------------------------------------------------- /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 1.12.0-dev0 3 | commit = True 4 | tag = False 5 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+)(?P\d+))? 6 | serialize = 7 | {major}.{minor}.{patch}-{release}{build} 8 | {major}.{minor}.{patch} 9 | 10 | [bumpversion:part:release] 11 | optional_value = prod 12 | first_value = dev 13 | values = 14 | dev 15 | prod 16 | 17 | [bumpversion:part:build] 18 | 19 | [bumpversion:file:VERSION] 20 | 21 | [bumpversion:file:setup.py] 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # metabasepy: Python wrapper for metabase REST Api 3 | [![](https://img.shields.io/pypi/v/metabasepy.svg?maxAge=3600)](https://pypi.org/project/metabasepy/) 4 | [![Coverage Status](https://coveralls.io/repos/github/mertsalik/metabasepy/badge.svg?branch=master)](https://coveralls.io/github/mertsalik/metabasepy?branch=master) 5 | 6 | 7 | ## Installing 8 | 9 | Install metabasepy by running: 10 | 11 | ```shell 12 | pip install metabasepy 13 | ``` 14 | 15 | ## Documentation 16 | 17 | - [User Guide][] 18 | - [Commands Tutorial][] 19 | 20 | [User Guide]: ./docs/guide.md 21 | [Commands Tutorial]: ./docs/commands.md 22 | 23 | -------------------------------------------------------------------------------- /config/migration.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "source": { 3 | "name": "Client A", 4 | "username": "john.doe@email.com", 5 | "password": "*******", 6 | "base_url": "https://localhost:3001" 7 | }, 8 | "destination": { 9 | "name": "Client B", 10 | "username": "john.doe@email.com", 11 | "password": "************", 12 | "base_url": "http://localhost:3000" 13 | }, 14 | "mappings": { 15 | "databases": [ 16 | { 17 | "source": "books_db_a", 18 | "destination": "books_db_b" 19 | }, 20 | { 21 | "source": "sales_db_a", 22 | "destination": "sales_db_b" 23 | } 24 | ] 25 | } 26 | } -------------------------------------------------------------------------------- /commands/flusher.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import sys 4 | import os 5 | 6 | sys.path.insert(1, os.path.join(sys.path[0], '..')) 7 | 8 | from metabasepy.client import Client 9 | 10 | if __name__ == '__main__': 11 | parser = argparse.ArgumentParser( 12 | prog="flush_queries", 13 | usage="flusher -c /your/config/file/path.json", 14 | description="Delete cards (queries) from metabase server." 15 | ) 16 | parser.add_argument('--configuration_file', '-c', 17 | dest='conf_file_path', 18 | type=str, 19 | required=True, 20 | help='configuration file path for credentials', 21 | ) 22 | args = parser.parse_args() 23 | 24 | credentials = {} 25 | with open(args.conf_file_path, 'r') as config_file: 26 | credentials = json.load(config_file) 27 | 28 | client = Client(**credentials) 29 | client.authenticate() 30 | 31 | # delete cards 32 | for card in client.cards.get(): 33 | client.cards.delete(card_id=card['id']) 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2019 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | 24 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from setuptools import setup, find_packages 4 | 5 | with open('README.md') as f: 6 | README = f.read() 7 | 8 | with open('LICENSE') as f: 9 | LICENSE = f.read() 10 | 11 | # allow setup.py to be run from any path 12 | os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) 13 | 14 | install_requires = ['requests >= 1.5.5', 'slugify'] 15 | 16 | setup( 17 | name='metabasepy', 18 | version='1.12.0-dev0', 19 | use_scm_version=True, 20 | setup_requires=['setuptools_scm', 'future'], 21 | description='Python wrapper for Metabase REST API', 22 | long_description=README, 23 | long_description_content_type='text/markdown', 24 | author='mertsalik', 25 | author_email='salik@itu.edu.tr', 26 | url='https://github.com/mertsalik/metabasepy', 27 | license=LICENSE, 28 | packages=find_packages(exclude=['tests', 'docs']), 29 | classifiers=[ 30 | "Intended Audience :: Developers", 31 | "License :: OSI Approved :: MIT License", 32 | "Operating System :: OS Independent", 33 | "Programming Language :: Python", 34 | "Programming Language :: Python :: Implementation :: PyPy", 35 | "Topic :: Software Development :: Libraries :: Python Modules", 36 | ], 37 | ) 38 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask instance folder 57 | instance/ 58 | 59 | # Scrapy stuff: 60 | .scrapy 61 | 62 | # Sphinx documentation 63 | docs/_build/ 64 | 65 | # PyBuilder 66 | target/ 67 | 68 | # IPython Notebook 69 | .ipynb_checkpoints 70 | 71 | # pyenv 72 | .python-version 73 | 74 | # celery beat schedule file 75 | celerybeat-schedule 76 | 77 | # dotenv 78 | .env 79 | 80 | # Spyder project settings 81 | .spyderproject 82 | 83 | # ide 84 | # IntelliJ project files 85 | .idea 86 | *.iml 87 | out 88 | gen 89 | 90 | # metabase instance 91 | .metabase/ 92 | *.db.mv.db 93 | *.db.trace.db 94 | 95 | # virtual environment 96 | venv/ 97 | .venv/ 98 | -------------------------------------------------------------------------------- /metabasepy/table_parser.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | __author__ = "mertsalik" 4 | __copyright__ = "Copyright 2018" 5 | __credits__ = ["mertsalik", ""] 6 | __license__ = "Private" 7 | __email__ = "" 8 | 9 | 10 | class MetabaseResultInvalidException(Exception): 11 | pass 12 | 13 | 14 | class MetabaseTable(object): 15 | def __init__(self): 16 | self.status = None 17 | self.native_query = None 18 | self.columns = [] 19 | self.rows = [] 20 | self.database = None 21 | 22 | @property 23 | def column_count(self): 24 | return len(self.columns) 25 | 26 | @property 27 | def row_count(self): 28 | return len(self.rows) 29 | 30 | 31 | class MetabaseTableParser(object): 32 | @staticmethod 33 | def validate_metabase_response(metabase_response): 34 | response_requirements = { 35 | 'json_query', 36 | 'data' 37 | } 38 | if not response_requirements <= set(metabase_response): 39 | raise MetabaseResultInvalidException() 40 | 41 | json_query_requirements = { 42 | 'database' 43 | } 44 | if not json_query_requirements <= set(metabase_response['json_query']): 45 | raise MetabaseResultInvalidException() 46 | 47 | data_requirements = { 48 | 'cols', 49 | 'rows', 50 | 'native_form' 51 | } 52 | if not data_requirements <= set(metabase_response['data']): 53 | raise MetabaseResultInvalidException() 54 | 55 | native_form_requirements = { 56 | 'query' 57 | } 58 | if not native_form_requirements <= set( 59 | metabase_response['data']['native_form']): 60 | raise MetabaseResultInvalidException() 61 | 62 | @staticmethod 63 | def get_table(metabase_response): 64 | MetabaseTableParser.validate_metabase_response(metabase_response) 65 | 66 | table = MetabaseTable() 67 | table.rows = metabase_response['data']['rows'] 68 | 69 | table.cols = metabase_response['data']['cols'] 70 | table.native_query = metabase_response['data']['native_form']['query'] 71 | table.status = metabase_response['status'] 72 | table.database = metabase_response['json_query']['database'] 73 | 74 | return table 75 | -------------------------------------------------------------------------------- /docs/commands.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Commands 4 | 5 | ## exporter: Download Cards (sql queries) into local machine 6 | 7 | Simply create a configuration file for example: `query_export_config.json` 8 | 9 | Add your credentials of metabase account into that file: 10 | 11 | [ 12 | { 13 | "name": "Metabase Account", 14 | "username": "john.doe@domain.com", 15 | "password": "******", 16 | "base_url": "http://localhost:3000" 17 | }, 18 | { 19 | "name": "Another Metabase Account", 20 | "username": "john.doe@domain.com", 21 | "password": "-------", 22 | "base_url": "http://your-remote-metabase-url.com" 23 | } 24 | ] 25 | 26 | ```bash 27 | exporter -c /your/config/file/path.json -d /export_directory 28 | ``` 29 | 30 | or 31 | 32 | ```bash 33 | cd metabasepy 34 | 35 | python commands/exporter.py -c /your/config/file/path.json -d /export_directory 36 | ``` 37 | 38 | Your sql queries will be saved into `/export_directory` 39 | 40 | ## flusher: Delete all cards (sql queries) defined on metabase server 41 | 42 | Create a configuration file for example: `flusher_config.json` 43 | 44 | Add your credentials of metabase account into that file: 45 | 46 | { 47 | "name": "Metabase A", 48 | "username": "john.doe@email.com", 49 | "password": "******", 50 | "base_url": "http://localhost:3000" 51 | } 52 | 53 | Than run: 54 | 55 | ```bash 56 | flusher -c /your/config/file/path.json 57 | ``` 58 | 59 | or 60 | 61 | ``` 62 | cd metabasepy 63 | 64 | python metabasepy/flusher.py -c /your/config/file/path.json 65 | ``` 66 | 67 | 68 | ## migrator: Copy cards (sql queries) from one server to another 69 | 70 | Create a configuration file for example: `migrator_config.json` 71 | 72 | Add source & destination server credentials with specified database mapping into this configuration: 73 | 74 | { 75 | "source": { 76 | "name": "Metabase Server A", 77 | "username": "john.doe@email.com", 78 | "password": "*******", 79 | "base_url": "https://localhost:3001" 80 | }, 81 | "destination": { 82 | "name": "Metabase Server B", 83 | "username": "john.doe@email.com", 84 | "password": "************", 85 | "base_url": "http://localhost:4000" 86 | }, 87 | "mappings": { 88 | "databases": [ 89 | { 90 | "source": "books_db_a", 91 | "destination": "books_db_b" 92 | }, 93 | { 94 | "source": "sales_db_a", 95 | "destination": "sales_db_b" 96 | } 97 | ] 98 | } 99 | } 100 | 101 | than simpy run: 102 | 103 | ```bash 104 | migrator -c /your/config/file/path.json 105 | ``` 106 | 107 | or 108 | 109 | ```bash 110 | cd metabasepy 111 | 112 | python commands/migrator.py -c /your/config/file/path.json 113 | ``` 114 | 115 | program will be trying to create every card from source to destination metabase server. 116 | -------------------------------------------------------------------------------- /docs/guide.md: -------------------------------------------------------------------------------- 1 | # Guide 2 | 3 | ## Client 4 | 5 | first instantiate a client object 6 | 7 | ```python 8 | from metabasepy import Client 9 | 10 | cli = Client(username="XXX", password="****", base_url="https://your-remote-metabase-url.com") 11 | 12 | ``` 13 | 14 | than you can simply authenticate with 15 | 16 | ```python 17 | cli.authenticate() 18 | ``` 19 | ### Add Card to server 20 | 21 | Save new card with custom sql query: 22 | 23 | ```python 24 | cli.cards.post(database_id=1, 25 | name="Card Name (eg: Available Stocks)", 26 | query="select * from your_db.table_name;") 27 | 28 | ``` 29 | 30 | ### List available Databases 31 | 32 | ```python 33 | all_dbs = cli.databases.get() 34 | print(all_dbs.__dict__) 35 | ``` 36 | 37 | ### Create new Collection 38 | 39 | ```python 40 | cli.collections.post(name="Finance Reports", color="#ff0000") 41 | ``` 42 | 43 | ### List all collections 44 | 45 | ```python 46 | cli.collections.get() 47 | ``` 48 | 49 | You can also query collection by its id: 50 | 51 | ```python 52 | cli.collections.get(collection_id=1) 53 | ``` 54 | 55 | ### Query Dataset ( Live Query ) 56 | ```python 57 | from metabasepy import Client, MetabaseTableParser 58 | 59 | cli = Client(username="XXX", password="****", base_url="https://your-remote-metabase-url.com") 60 | query_response = cli.dataset.post(database_id=1, query="select * from customers;") 61 | 62 | data_table = MetabaseTableParser.get_table(metabase_response=query_response) 63 | print(data_table.__dict__) 64 | ``` 65 | 66 | ### Query Data from Card ( Pre-Saved Query ) 67 | 68 | ```python 69 | from metabasepy import Client, MetabaseTableParser 70 | 71 | cli = Client(username="XXX", password="****", base_url="https://your-remote-metabase-url.com") 72 | query_response = cli.cards.query(card_id="1") 73 | 74 | data_table = MetabaseTableParser.get_table(metabase_response=query_response) 75 | print(data_table.__dict__) 76 | ``` 77 | 78 | Now you have table of query results (note that it will only list the first 2000 rows): 79 | 80 | { 81 | 'status': 'completed', 82 | 'native_query': 'select \n u.first_name as "First Name", 83 | \n u.last_name as "Last Name", 84 | \n t.amount as "Amount", 85 | \n t.description as "Description", 86 | \n t.created_date as "Transaction Date" 87 | from users_user u 88 | inner join transactions_transaction t 89 | on 90 | t.user_id = u.id 91 | where t.channel_id = 4; 92 | ', 93 | 'columns': [ 94 | 'First Name', 95 | 'Last Name', 96 | 'Amount', 97 | 'Description', 98 | 'Transaction Date' 99 | ], 100 | 'rows': [...], 101 | 'database': 4 102 | } 103 | 104 | Than you can loop through rows & columns 105 | 106 | ```python 107 | for heading in data_table.columns: 108 | print(heading) 109 | ``` 110 | 111 | ### Export DataSet Result ( Download The Results of Live Query ) 112 | 113 | 114 | ```python 115 | from metabasepy import Client, MetabaseTableParser 116 | 117 | cli = Client(username="XXX", password="****", base_url="https://your-remote-metabase-url.com") 118 | cli.dataset.export(database_id=1, query="select * from customers;", export_format="xlsx") 119 | ``` 120 | Sample call: 121 | 122 | > In[8]: cli.dataset.export(database_id=2, query="select * from customers limit 10;", export\_format="csv") 123 | 124 | > Out[8]: '/Users/john\_doe/development/metabasepy/query_result_2020-10-30T10:55:30.663Z.csv' 125 | 126 | 127 | ### Export Card ( Pre-Saved Query ) to Pandas 128 | 129 | ```python 130 | from metabasepy import Client, MetabaseTableParser 131 | import pandas as pd 132 | 133 | cli = Client(username="XXX", password="****", base_url="https://your-remote-metabase-url.com") 134 | json_result = cli.cards.download(card_id='123', format='json') 135 | 136 | df = pd.DataFrame(json_result) 137 | df.head() 138 | -------------------------------------------------------------------------------- /commands/exporter.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import json 4 | from urllib.parse import urlparse 5 | from slugify import slugify 6 | import logging 7 | 8 | from metabasepy.client import Client, AuthorizationFailedException 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | def create_dir(dirname): 14 | try: 15 | os.mkdir(dirname) 16 | except FileExistsError: 17 | pass 18 | 19 | 20 | def download_cards(username, password, base_url, destination_directory, 21 | **kwargs): 22 | cli = Client(username=username, password=password, base_url=base_url) 23 | cli.authenticate() 24 | 25 | create_dir(destination_directory) 26 | 27 | all_collections = cli.collections.get() 28 | if not all_collections: 29 | # save all cards for one default collection 30 | default_collection_path = os.path.join(destination_directory, "default") 31 | create_dir(default_collection_path) 32 | for card_info in cli.cards.get(): 33 | card_name = slugify(card_info.get('name', "Question")) 34 | try: 35 | sql_query = card_info['dataset_query']['native']['query'] 36 | except KeyError as ke: 37 | # Probably this is not a native query, skip this 38 | logger.error(ke) 39 | continue 40 | 41 | sql_save_path = os.path.join(default_collection_path, 42 | "{}.sql".format(card_name)) 43 | with open(sql_save_path, 'w') as f: 44 | f.write(sql_query) 45 | else: 46 | for collection_data in all_collections: 47 | collection_directory = os.path.join(destination_directory, 48 | collection_data.get('name')) 49 | create_dir(collection_directory) 50 | 51 | for card_info in cli.cards.get_by_collection( 52 | collection_data.get('slug')): 53 | card_name = slugify(card_info.get('name', "Question")) 54 | try: 55 | sql_query = card_info['dataset_query']['native']['query'] 56 | except KeyError as ke: 57 | # Probably this is not a native query, skip this 58 | logger.error(ke) 59 | continue 60 | 61 | sql_save_path = os.path.join(collection_directory, 62 | "{}.sql".format(card_name)) 63 | with open(sql_save_path, 'w') as f: 64 | f.write(sql_query) 65 | 66 | 67 | if __name__ == '__main__': 68 | current_directory_path = os.getcwd() 69 | default_export_path = os.path.join(current_directory_path, 70 | "metabase_export") 71 | parser = argparse.ArgumentParser( 72 | prog="metabase_query_exporter", 73 | usage="exporter -c /your/config/file/path.json", 74 | description="Download and save metabase Cards into folders." 75 | ) 76 | parser.add_argument('--download_path', '-d', 77 | dest='download_path', 78 | default=default_export_path, 79 | type=str, 80 | help='sql path to save sql-query files', 81 | ) 82 | parser.add_argument('--configuration_file', '-c', 83 | dest='conf_file_path', 84 | type=str, 85 | required=True, 86 | help='configuration file path for credentials', 87 | ) 88 | 89 | args = parser.parse_args() 90 | 91 | credentials = [] 92 | with open(args.conf_file_path, 'r') as config_file: 93 | credentials = json.load(config_file) 94 | if type(credentials) is not list: 95 | raise ValueError("Credentials must be list of dictionary!") 96 | 97 | for credential_info in credentials: 98 | if "username" not in credential_info or "password" not in credential_info or "base_url" not in credential_info: 99 | raise ValueError( 100 | "Invalid configuration. Credential object must include " 101 | "'username', 'password' and 'base_url' values ") 102 | 103 | for credential_info in credentials: 104 | metabase_uri = urlparse(credential_info.get('base_url')) 105 | destination_directory = os.path.join(args.download_path, 106 | metabase_uri.netloc) 107 | create_dir(destination_directory) 108 | try: 109 | download_cards(destination_directory=destination_directory, 110 | **credential_info) 111 | except AuthorizationFailedException as afex: 112 | logger.error("Authentication failed for {} -> {}".format( 113 | credential_info.get('username'), 114 | credential_info.get('base_url'))) 115 | logger.error("Skipping {}".format(credential_info.get('username'))) 116 | -------------------------------------------------------------------------------- /commands/migrator.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import sys 4 | import os 5 | import logging 6 | 7 | sys.path.insert(1, os.path.join(sys.path[0], '..')) 8 | 9 | from metabasepy import Client, RequestException 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class ConfigurationException(Exception): 15 | def __init__(self, msg=None, *args, **kwargs): 16 | Exception.__init__(self, msg, *args, **kwargs) 17 | 18 | 19 | class InvalidCardException(Exception): 20 | def __init__(self, msg=None, *args, **kwargs): 21 | Exception.__init__(self, msg, *args, **kwargs) 22 | 23 | 24 | class CollectionException(Exception): 25 | def __init__(self, msg=None, *args, **kwargs): 26 | Exception.__init__(self, msg, *args, **kwargs) 27 | 28 | 29 | def migrate(source_client, destination_client, database_mappings): 30 | created_card_ids = [] 31 | all_collections = source_client.collections.get() 32 | 33 | # import queries that placed in collections 34 | for collection_data in all_collections: 35 | 36 | # create collection 37 | collection_id = create_collection(collection_data, destination_client) 38 | 39 | for card_info in source_client.cards.get_by_collection( 40 | collection_data.get('slug')): 41 | create_card(card_info, collection_id) 42 | created_card_ids.append(card_info['id']) 43 | 44 | for card_info in source_client.cards.get(): 45 | if card_info.get('id') not in created_card_ids: 46 | create_card(card_info) 47 | created_card_ids.append(card_info['id']) 48 | 49 | 50 | def create_card(card_info, collection_id=None): 51 | card_name = card_info.get('name', "Question") 52 | try: 53 | dataset_query = card_info.get('dataset_query', None) 54 | if not dataset_query: 55 | raise InvalidCardException( 56 | msg="dataset_query does not exists") 57 | native = dataset_query.get('native', None) 58 | if not native: 59 | raise InvalidCardException( 60 | msg="dataset_query->native does not exists") 61 | sql_query = native.get('query', None) 62 | if not sql_query: 63 | raise InvalidCardException( 64 | msg="dataset_query->native->query does not exists") 65 | template_tags = native.get('template_tags', None) 66 | destination_db_id = database_mappings.get( 67 | card_info['database_id'], None) 68 | destination_client.cards.post(database_id=destination_db_id, 69 | name=card_name, 70 | query=sql_query, 71 | template_tags=template_tags, 72 | collection_id=collection_id) 73 | except InvalidCardException as icex: 74 | logger.info(icex) 75 | except KeyError as ke: 76 | # Probably this is not a native query, skip this 77 | 78 | logger.error(ke) 79 | logger.error("skipping {}.".format(card_name)) 80 | except Exception as any_ex: 81 | logger.error(any_ex) 82 | 83 | 84 | def create_collection(collection_data, destination_client): 85 | collection_id = None 86 | try: 87 | collection_response = destination_client.collections.post( 88 | **collection_data) 89 | collection_id = collection_response.get('id') 90 | except RequestException as rex: 91 | if "already exists" in rex.message: 92 | dest_collections = destination_client.collections.get() 93 | for collection in dest_collections: 94 | if collection["name"] == collection_data["name"]: 95 | collection_id = collection['id'] 96 | break 97 | if not collection_id: 98 | raise CollectionException("Collections cant be created!") 99 | return collection_id 100 | 101 | 102 | def get_database_mappings(source_client, destination_client, 103 | migration_config): 104 | mapping_conf = migration_config.get('mappings') 105 | database_mappings = mapping_conf.get('databases') # must be a list of dict 106 | directions = {} 107 | for index, mapping in enumerate(database_mappings): 108 | # there is no get by name call for databases, yet. 109 | source_db_id = None 110 | destination_db_id = None 111 | 112 | source_databases = source_client.databases.get() 113 | for _db in source_databases: 114 | if _db["name"] == mapping["source"]: 115 | source_db_id = _db.get('id') 116 | break 117 | if not source_db_id: 118 | raise ConfigurationException( 119 | msg="{} not found in source databases".format( 120 | mapping["source"])) 121 | destination_databases = destination_client.databases.get() 122 | for _db in destination_databases: 123 | if _db["name"] == mapping["destination"]: 124 | destination_db_id = _db.get('id') 125 | break 126 | if not destination_db_id: 127 | raise ConfigurationException( 128 | msg="{} not found in destination databases".format( 129 | mapping["source"])) 130 | 131 | directions.update({ 132 | source_db_id: destination_db_id 133 | }) 134 | return directions 135 | 136 | 137 | if __name__ == '__main__': 138 | parser = argparse.ArgumentParser( 139 | prog="metabase_query_migrator", 140 | usage="migrator -c /your/config/file/path.json", 141 | description="Copy / move sql queries from one " 142 | "metabase server to another" 143 | ) 144 | 145 | parser.add_argument('--configuration', '-c', 146 | dest='configuration_file_path', 147 | type=str, 148 | required=True, 149 | help='configuration file path for credentials of ' 150 | 'destination & source metabase servers' 151 | ) 152 | args = parser.parse_args() 153 | 154 | credentials = [] 155 | with open(args.configuration_file_path, 'r') as config_file: 156 | configuration = json.load(config_file) 157 | 158 | source = configuration.get('source') 159 | destination = configuration.get('destination') 160 | 161 | source_client = Client(**source) 162 | destination_client = Client(**destination) 163 | 164 | source_client.authenticate() 165 | destination_client.authenticate() 166 | 167 | # validate mappings 168 | database_mappings = get_database_mappings( 169 | source_client=source_client, destination_client=destination_client, 170 | migration_config=configuration) 171 | 172 | migrate(source_client=source_client, destination_client=destination_client, 173 | database_mappings=database_mappings) 174 | -------------------------------------------------------------------------------- /metabasepy/client.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | import requests 4 | import json 5 | 6 | try: 7 | from urllib import urlencode 8 | except ImportError: 9 | from urllib.parse import urlencode 10 | 11 | 12 | def get_file_export_path(file_name): 13 | from os import getcwd 14 | from os.path import join 15 | return join(getcwd(), file_name) 16 | 17 | 18 | def parse_filename_from_response_header(response): 19 | if type(response) != requests.Response: 20 | raise ValueError("{} is not a valid Response object!") 21 | 22 | content_disposition = response.headers.get('Content-Disposition') 23 | if not content_disposition: 24 | return None 25 | 26 | filenames = re.findall('filename=(.+)', content_disposition) 27 | if not filenames: 28 | return None 29 | selected_filename = filenames[0] 30 | return selected_filename.strip('"').strip("'") 31 | 32 | 33 | class AuthorizationFailedException(Exception): 34 | pass 35 | 36 | 37 | class RequestException(Exception): 38 | def __init__(self, message=None): 39 | self.message = message 40 | 41 | 42 | class Resource(object): 43 | def __init__(self, **kwargs): 44 | self.base_url = kwargs.get('base_url') 45 | self.token = kwargs.get('token') 46 | self.verify = kwargs.get('verify', True) 47 | self.proxies = kwargs.get('proxies') 48 | 49 | def prepare_headers(self): 50 | return { 51 | 'X-Metabase-Session': self.token, 52 | 'Content-Type': 'application/json' 53 | } 54 | 55 | @staticmethod 56 | def validate_response(response): 57 | request_method = response.request.method 58 | status_code = response.status_code 59 | if request_method == "GET": 60 | if status_code != 200: 61 | raise RequestException(message=response.content) 62 | elif request_method == "POST": 63 | if status_code not in [200, 201, 202]: 64 | raise RequestException(message=response.content) 65 | elif request_method == "PUT": 66 | if status_code != 204: 67 | raise RequestException(message=response.content) 68 | elif request_method == "DELETE": 69 | if status_code != 204: 70 | raise RequestException(message=response.content) 71 | 72 | @property 73 | def endpoint(self): 74 | raise NotImplementedError() 75 | 76 | def get(self): 77 | raise NotImplementedError() 78 | 79 | def post(self, **kwargs): 80 | raise NotImplementedError() 81 | 82 | def put(self, **kwargs): 83 | raise NotImplementedError() 84 | 85 | def delete(self, **kwargs): 86 | raise NotImplementedError() 87 | 88 | 89 | class ApiCommand(object): 90 | """ This is a general interface to implement a wrapper of endpoints which 91 | only allows POST methods and not a resource representation. """ 92 | 93 | def __init__(self, **kwargs): 94 | self.base_url = kwargs.get('base_url') 95 | self.token = kwargs.get('token') 96 | self.verify = kwargs.get('verify', True) 97 | self.proxies = kwargs.get('proxies') 98 | 99 | def prepare_headers(self): 100 | return { 101 | 'X-Metabase-Session': self.token, 102 | 'Content-Type': 'application/json' 103 | } 104 | 105 | def post(self, **kwargs): 106 | raise NotImplementedError() 107 | 108 | @staticmethod 109 | def validate_response(response): 110 | status_code = response.status_code 111 | if status_code not in [200, 201, 202]: 112 | raise RequestException(message=response.content) 113 | 114 | @property 115 | def endpoint(self): 116 | raise NotImplementedError() 117 | 118 | 119 | class DatabaseResource(Resource): 120 | 121 | @property 122 | def endpoint(self): 123 | return "{}/api/database".format(self.base_url) 124 | 125 | def get(self, database_id=None): 126 | url = self.endpoint 127 | if database_id: 128 | url = "{}/{}".format(url, database_id) 129 | resp = requests.get( 130 | url=self.endpoint, 131 | headers=self.prepare_headers(), 132 | verify=self.verify, 133 | proxies=self.proxies 134 | ) 135 | Resource.validate_response(response=resp) 136 | return resp.json() 137 | 138 | def get_by_name(self, name): 139 | all_dbs = self.get() 140 | return [db for db in all_dbs if db['name'] == name] 141 | 142 | def delete(self, database_id): 143 | url = "{}/{}".format(self.endpoint, database_id) 144 | resp = requests.delete( 145 | url=url, 146 | headers=self.prepare_headers(), 147 | verify=self.verify, 148 | proxies=self.proxies 149 | ) 150 | Resource.validate_response(resp) 151 | 152 | def post(self, name, engine, host, port, dbname, user, password, ssl=False, 153 | tunnel_port=22): 154 | request_data = { 155 | "name": name, 156 | "engine": engine, 157 | "details": { 158 | "host": host, 159 | "port": port, 160 | "dbname": dbname, 161 | "user": user, 162 | "password": password, 163 | "ssl": ssl, 164 | "tunnel_port": tunnel_port 165 | } 166 | } 167 | resp = requests.post( 168 | url=self.endpoint, 169 | json=request_data, 170 | headers=self.prepare_headers(), 171 | verify=self.verify, 172 | proxies=self.proxies 173 | ) 174 | Resource.validate_response(response=resp) 175 | json_response = resp.json() 176 | return json_response['id'] 177 | 178 | 179 | class CardResource(Resource): 180 | 181 | @property 182 | def endpoint(self): 183 | return "{}/api/card".format(self.base_url) 184 | 185 | def get(self, card_id=None): 186 | url = self.endpoint 187 | if card_id: 188 | url = "{}/{}".format(self.endpoint, card_id) 189 | resp = requests.get( 190 | url=url, 191 | headers=self.prepare_headers(), 192 | verify=self.verify, 193 | proxies=self.proxies 194 | ) 195 | Resource.validate_response(response=resp) 196 | return resp.json() 197 | 198 | def get_by_collection(self, collection_slug): 199 | """ 200 | :param collection_slug: 201 | :return: 202 | """ 203 | url = "{}?f=all&collection={}".format(self.endpoint, collection_slug) 204 | resp = requests.get( 205 | url=url, 206 | headers=self.prepare_headers(), 207 | verify=self.verify, 208 | proxies=self.proxies 209 | ) 210 | Resource.validate_response(response=resp) 211 | return resp.json() 212 | 213 | def post(self, database_id, name, query, **kwargs): 214 | request_data = { 215 | "name": name, 216 | "display": kwargs.get('display', 'scalar'), 217 | "visualization_settings": kwargs.get('visualization_settings', {}), 218 | "dataset_query": { 219 | "database": database_id, 220 | "type": "native", 221 | "native": { 222 | "query": query, 223 | "collection": kwargs.get('collection', None), 224 | "template_tags": kwargs.get('template_tags', {}) 225 | } 226 | }, 227 | "description": kwargs.get('description', None), 228 | "collection_id": kwargs.get('collection_id', None) 229 | } 230 | resp = requests.post( 231 | url=self.endpoint, 232 | json=request_data, 233 | headers=self.prepare_headers(), 234 | verify=self.verify, 235 | proxies=self.proxies 236 | ) 237 | Resource.validate_response(response=resp) 238 | json_response = resp.json() 239 | return json_response['id'] 240 | 241 | def put(self, card_id, **kwargs): 242 | url = "{}/{}".format(self.endpoint, card_id) 243 | resp = requests.put( 244 | url=url, 245 | json=kwargs, 246 | headers=self.prepare_headers(), 247 | proxies=self.proxies 248 | ) 249 | Resource.validate_response(response=resp) 250 | 251 | def delete(self, card_id): 252 | url = "{}/{}".format(self.endpoint, card_id) 253 | resp = requests.delete( 254 | url=url, 255 | headers=self.prepare_headers(), 256 | verify=self.verify, 257 | proxies=self.proxies 258 | ) 259 | Resource.validate_response(response=resp) 260 | 261 | def query(self, card_id, parameters=None): 262 | # TODO : add parameters usage 263 | url = "{}/{}/query".format(self.endpoint, card_id) 264 | resp = requests.post( 265 | url=url, 266 | headers=self.prepare_headers(), 267 | verify=self.verify, 268 | proxies=self.proxies 269 | ) 270 | Resource.validate_response(response=resp) 271 | return resp.json() 272 | 273 | def download(self, card_id, format, parameters=None): 274 | url = "{}/{}/query".format(self.endpoint, card_id) 275 | if format not in ['csv', 'json', 'xlsx']: 276 | raise ValueError('{} format not supported.'.format(format)) 277 | url = "{}/{}".format(url, format) 278 | if parameters: 279 | parameters = urlencode({k: json.dumps(v) 280 | for k, v in parameters.items()}) 281 | resp = requests.post( 282 | url=url, 283 | headers=self.prepare_headers(), 284 | params=parameters, verify=self.verify, 285 | proxies=self.proxies 286 | ) 287 | Resource.validate_response(response=resp) 288 | return resp.json() 289 | 290 | 291 | class CollectionResource(Resource): 292 | 293 | @property 294 | def endpoint(self): 295 | return "{}/api/collection".format(self.base_url) 296 | 297 | def get(self, collection_id=None, archived=False): 298 | url = self.endpoint 299 | if collection_id: 300 | url = "{}/{}".format(self.endpoint, collection_id) 301 | elif archived: 302 | url = "{}?archived=true" 303 | resp = requests.get( 304 | url=url, 305 | headers=self.prepare_headers(), 306 | verify=self.verify, 307 | proxies=self.proxies 308 | ) 309 | Resource.validate_response(response=resp) 310 | return resp.json() 311 | 312 | def post(self, name, color="#000000", **kwargs): 313 | request_data = { 314 | "name": name, 315 | "description": kwargs.get('description'), 316 | "color": color 317 | } 318 | resp = requests.post( 319 | url=self.endpoint, 320 | json=request_data, 321 | headers=self.prepare_headers(), 322 | verify=self.verify, 323 | proxies=self.proxies 324 | ) 325 | Resource.validate_response(response=resp) 326 | return resp.json() 327 | 328 | def delete(self, collection_id): 329 | url = "{}/{}".format(self.endpoint, collection_id) 330 | resp = requests.delete( 331 | url=url, 332 | headers=self.prepare_headers(), 333 | verify=self.verify, 334 | proxies=self.proxies 335 | ) 336 | Resource.validate_response(response=resp) 337 | 338 | 339 | class UserResource(Resource): 340 | 341 | @property 342 | def endpoint(self): 343 | return "{}/api/user".format(self.base_url) 344 | 345 | def get(self, user_id=None): 346 | url = self.endpoint 347 | if user_id: 348 | url = "{}/{}".format(self.endpoint, user_id) 349 | 350 | resp = requests.get( 351 | url=url, 352 | headers=self.prepare_headers(), 353 | verify=self.verify, 354 | proxies=self.proxies 355 | ) 356 | Resource.validate_response(response=resp) 357 | return resp.json() 358 | 359 | def current(self): 360 | url = "{}/current".format(self.endpoint) 361 | resp = requests.get( 362 | url=url, 363 | headers=self.prepare_headers(), 364 | verify=self.verify, 365 | proxies=self.proxies 366 | ) 367 | Resource.validate_response(response=resp) 368 | return resp.json() 369 | 370 | def post(self, first_name, last_name, email, password): 371 | request_data = { 372 | "first_name": first_name, 373 | "last_name": last_name, 374 | "email": email, 375 | "password": password 376 | } 377 | resp = requests.post( 378 | url=self.endpoint, 379 | json=request_data, 380 | headers=self.prepare_headers(), 381 | verify=self.verify, 382 | proxies=self.proxies 383 | ) 384 | Resource.validate_response(response=resp) 385 | json_response = resp.json() 386 | return json_response['id'] 387 | 388 | def delete(self, user_id): 389 | url = "{}/{}".format(self.endpoint, user_id) 390 | resp = requests.delete( 391 | url=url, 392 | headers=self.prepare_headers(), 393 | verify=self.verify, 394 | proxies=self.proxies 395 | ) 396 | Resource.validate_response(response=resp) 397 | 398 | def send_invite(self, user_id): 399 | url = "{}/{}/send_invite".format(self.endpoint, user_id) 400 | resp = requests.post( 401 | url=url, 402 | headers=self.prepare_headers(), 403 | verify=self.verify, 404 | proxies=self.proxies 405 | ) 406 | Resource.validate_response(response=resp) 407 | return resp.json() 408 | 409 | def password(self, user_id, password, old_password): 410 | url = "{}/{}/password".format(self.endpoint, user_id) 411 | request_data = { 412 | "password": password, 413 | "old_password": old_password 414 | } 415 | resp = requests.put( 416 | url=url, 417 | json=request_data, 418 | headers=self.prepare_headers(), 419 | verify=self.verify, 420 | proxies=self.proxies 421 | ) 422 | Resource.validate_response(response=resp) 423 | return resp.json() 424 | 425 | 426 | class UtilityResource(Resource): 427 | 428 | @property 429 | def endpoint(self): 430 | return "{}/api/util".format(self.base_url) 431 | 432 | def logs(self): 433 | url = "{}/logs".format(self.endpoint) 434 | resp = requests.get( 435 | url=url, 436 | headers=self.prepare_headers(), 437 | verify=self.verify, 438 | proxies=self.proxies 439 | ) 440 | Resource.validate_response(response=resp) 441 | return resp.json() 442 | 443 | def random_token(self): 444 | url = "{}/random_token".format(self.endpoint) 445 | resp = requests.get( 446 | url=url, 447 | headers=self.prepare_headers(), 448 | verify=self.verify, 449 | proxies=self.proxies 450 | ) 451 | Resource.validate_response(response=resp) 452 | return resp.json() 453 | 454 | def stats(self): 455 | url = "{}/stats".format(self.endpoint) 456 | resp = requests.get( 457 | url=url, 458 | headers=self.prepare_headers(), 459 | verify=self.verify, 460 | proxies=self.proxies 461 | ) 462 | Resource.validate_response(response=resp) 463 | return resp.json() 464 | 465 | def password_check(self, password): 466 | url = "{}/password_check".format(self.endpoint) 467 | request_data = { 468 | "password": password, 469 | } 470 | resp = requests.post( 471 | url=url, 472 | json=request_data, 473 | headers=self.prepare_headers(), 474 | verify=self.verify, 475 | proxies=self.proxies 476 | ) 477 | Resource.validate_response(response=resp) 478 | return resp.json() 479 | 480 | def connection_pool_info(self): 481 | url = "{}/diagnostic_info/connection_pool_info".format(self.endpoint) 482 | resp = requests.get( 483 | url=url, 484 | headers=self.prepare_headers(), 485 | verify=self.verify, 486 | proxies=self.proxies 487 | ) 488 | Resource.validate_response(response=resp) 489 | return resp.json() 490 | 491 | 492 | class DatasetCommand(ApiCommand): 493 | 494 | @staticmethod 495 | def validate_export_format(export_format_value): 496 | allowed_export_formats = ['api', 'csv', 'json', 'xlsx'] 497 | if export_format_value not in allowed_export_formats: 498 | raise ValueError('{} not supported!'.format(export_format_value)) 499 | 500 | @property 501 | def endpoint(self): 502 | return "{}/api/dataset".format(self.base_url) 503 | 504 | def post(self, database_id, query): 505 | """ Execute a query and retrieve the results in the usual format.""" 506 | request_data = { 507 | "type": "native", 508 | "native": { 509 | "query": query, 510 | "template-tags": {} 511 | }, 512 | "database": database_id, 513 | "parameters": [] 514 | } 515 | resp = requests.post( 516 | url=self.endpoint, 517 | json=request_data, 518 | headers=self.prepare_headers(), 519 | verify=self.verify, 520 | proxies=self.proxies 521 | ) 522 | Resource.validate_response(response=resp) 523 | json_response = resp.json() 524 | return json_response 525 | 526 | def export(self, database_id, query, export_format, full_path=None): 527 | """ redirects dataset query to available export endpoint, 528 | saves it in folder given with to_file_path parameter 529 | or current working directory by default.""" 530 | 531 | query_request_data = { 532 | "type": "native", 533 | "native": { 534 | "query": query, 535 | "template-tags": {} 536 | }, 537 | "database": database_id, 538 | "parameters": [], 539 | } 540 | request_data = { 541 | "query": json.dumps(query_request_data) 542 | } 543 | 544 | headers = self.prepare_headers() 545 | headers.update({'Content-Type': 'application/x-www-form-urlencoded'}) 546 | 547 | DatasetCommand.validate_export_format( 548 | export_format_value=export_format) 549 | command_url = "{command_endpoint}/{export_param}".format( 550 | command_endpoint=self.endpoint, 551 | export_param=export_format 552 | ) 553 | resp = requests.post( 554 | url=command_url, 555 | data=request_data, 556 | headers=headers, 557 | verify=self.verify, 558 | proxies=self.proxies 559 | ) 560 | 561 | if not full_path: 562 | file_name = parse_filename_from_response_header(response=resp) \ 563 | or "metabase_dataset_export.{extension}".format( 564 | extension=export_format) 565 | export_file_path = get_file_export_path(file_name=file_name) 566 | else: 567 | export_file_path = full_path 568 | 569 | file_access_mode = "w" 570 | if type(resp.content) == bytes: 571 | file_access_mode = "wb" 572 | 573 | with open(file=export_file_path, mode=file_access_mode) as f: 574 | f.write(resp.content) 575 | 576 | return export_file_path 577 | 578 | def duration(self, database_id, query): 579 | """ Get historical query execution duration. """ 580 | request_data = { 581 | "type": "native", 582 | "native": { 583 | "query": query, 584 | "template-tags": {} 585 | }, 586 | "database": database_id, 587 | "parameters": [] 588 | } 589 | command_url = "{}/duration".format(self.endpoint) 590 | resp = requests.post( 591 | url=command_url, 592 | json=request_data, 593 | headers=self.prepare_headers(), 594 | verify=self.verify, 595 | proxies=self.proxies 596 | ) 597 | Resource.validate_response(response=resp) 598 | json_response = resp.json() 599 | return json_response 600 | 601 | 602 | class Client(object): 603 | def __init__(self, username, password, base_url, **kwargs): 604 | self.__username = username 605 | self.__passw = password 606 | self.base_url = base_url 607 | self.token = kwargs.get('token') 608 | self.verify = kwargs.get('verify', True) 609 | self.proxies = kwargs.get('proxies') 610 | 611 | def __get_auth_url(self): 612 | return "{}/api/session".format(self.base_url) 613 | 614 | def authenticate(self): 615 | request_data = { 616 | "username": self.__username, 617 | "password": self.__passw 618 | } 619 | request_headers = { 620 | 'Content-Type': 'application/json' 621 | } 622 | resp = requests.post( 623 | url=self.__get_auth_url(), 624 | json=request_data, 625 | headers=request_headers, 626 | verify=self.verify, 627 | proxies=self.proxies 628 | ) 629 | 630 | json_response = resp.json() 631 | if "id" not in json_response: 632 | raise AuthorizationFailedException() 633 | 634 | self.token = json_response['id'] 635 | 636 | @property 637 | def databases(self): 638 | return DatabaseResource(base_url=self.base_url, 639 | token=self.token, 640 | verify=self.verify) 641 | 642 | @property 643 | def cards(self): 644 | return CardResource(base_url=self.base_url, 645 | token=self.token, 646 | verify=self.verify) 647 | 648 | @property 649 | def collections(self): 650 | return CollectionResource(base_url=self.base_url, 651 | token=self.token, 652 | verify=self.verify) 653 | 654 | @property 655 | def users(self): 656 | return UserResource(base_url=self.base_url, 657 | token=self.token, 658 | verify=self.verify) 659 | 660 | @property 661 | def utils(self): 662 | return UtilityResource(base_url=self.base_url, 663 | token=self.token, 664 | verify=self.verify) 665 | 666 | @property 667 | def dataset(self): 668 | return DatasetCommand(base_url=self.base_url, 669 | token=self.token, 670 | verify=self.verify) 671 | --------------------------------------------------------------------------------