├── .gitignore ├── LICENSE ├── README.md ├── apis ├── __init__.py ├── endpoints │ ├── __init__.py │ ├── assets.py │ ├── catalogs.py │ ├── categories.py │ ├── connections.py │ ├── datarequests.py │ ├── glossaryTerms.py │ ├── governanceArtifactTypes.py │ ├── jobs.py │ ├── projects.py │ ├── referenceData.py │ ├── roles.py │ ├── rules.py │ ├── users.py │ └── workflows.py ├── main.py └── usecases │ ├── __init__.py │ ├── assignTerms.py │ ├── categoryTrees.py │ └── itemWithWorkflow.py ├── curl ├── assets │ └── getasset.http ├── authorize.http ├── catalogs │ └── getcatalogs.http ├── categories │ ├── deletecategory.http │ └── reassign_collaborator.http ├── datarequests │ └── deletedatarequest.http ├── referencedata │ └── getrefdata.http ├── reindex.http ├── resync.http ├── resynchdatraclasses.http ├── users │ ├── createuser.http │ ├── getallusers.http │ └── getuserinformation.http └── workflows │ ├── getWorkflowTask.http │ ├── housekeeping.http │ └── updateWorkflowTask.http ├── devRequirements.txt ├── htmlReference └── apis │ ├── endpoints │ ├── assets.html │ ├── catalogs.html │ ├── categories.html │ ├── connections.html │ ├── datarequests.html │ ├── glossaryTerms.html │ ├── governanceArtifactTypes.html │ ├── index.html │ ├── jobs.html │ ├── projects.html │ ├── referenceData.html │ ├── rules.html │ ├── users.html │ └── workflows.html │ ├── index.html │ ├── main.html │ └── usecases │ ├── assignTerms.html │ ├── categoryTrees.html │ ├── index.html │ └── itemWithWorkflow.html ├── notebooks ├── .DS_Store ├── endpoints │ ├── .DS_Store │ ├── assets.ipynb │ ├── catalogs.ipynb │ ├── categories.ipynb │ ├── connections.ipynb │ ├── datarequest.ipynb │ ├── glossaryterms.ipynb │ ├── governanceartifacttypes.ipynb │ ├── jobs.ipynb │ ├── refdata.ipynb │ ├── rules.ipynb │ ├── search.ipynb │ ├── users.ipynb │ └── workflow.ipynb ├── sandbox │ ├── categories_export.csv │ ├── createUsers.py │ ├── create_categories.py │ ├── create_terms.py │ ├── demo_categories.csv │ ├── demo_terms.csv │ ├── demo_users.csv │ ├── exportProject.py │ ├── exportUsers.py │ ├── export_categories.py │ ├── export_terms.py │ ├── governance_artifacts.ipynb │ ├── importProject.py │ ├── new_categories.csv │ ├── new_terms.csv │ ├── new_users.csv │ ├── refDataTest.csv │ ├── terms_export.csv │ ├── updateUsers.py │ ├── update_categories.py │ ├── update_terms.py │ ├── user_management.ipynb │ └── users_export.csv ├── tk │ └── ekv.ipynb └── usecases │ ├── TK Demo.ipynb │ ├── batchupdateterm.ipynb │ ├── customasset.ipynb │ ├── customattribute.ipynb │ ├── deletecategory.ipynb │ ├── glossaryTerms with Workflow.ipynb │ └── healthcheck.ipynb ├── olm-utils ├── delete.sh ├── deployment.yaml ├── env.sh ├── preview.sh └── update-env-vars.py ├── payloads ├── asset_relationship_types │ └── createAssetRelationship.json ├── asset_types │ ├── createCustomAssetBook.json │ ├── createCustomAssetRestAPI.json │ ├── createCustomAssetTypeBook.json │ └── createCustomAssetTypeRestAPI.json ├── assets │ ├── addtermtoasset.json │ ├── createassetattribute.json │ ├── patchAssetDescription.json │ └── patchAssetTags.json ├── categories │ └── createCategory.json ├── datarequest │ ├── postnewrequest.json │ └── updaterequest.json ├── glossary_terms │ ├── addTermTags.json │ ├── createTerm.json │ ├── createTermComplete.json │ ├── patchGlossaryTerm.json │ └── updateTerm.json ├── governance_artifact_types │ ├── createcustomattribute.json │ └── patchcustom attribute.json ├── jobs │ └── startJobRun.json ├── reference_data │ ├── createRefData.json │ ├── createRefDataWithTime.json │ ├── createRefDataWithValues.json │ └── createRefDataWithValuesAndTime.json ├── search │ ├── getTerm.json │ ├── sampleQuery.json │ ├── sampleQueryWithAggregation.json │ └── sampleQueryWithSort.json ├── tk │ ├── createCustomAssetEKV.json │ ├── createCustomAssetEKVwithConnectedAsset.json │ ├── createCustomAssetTypeEKV.json │ └── updatecustomattributediagnose.json └── workflows │ ├── deleteTask.json │ ├── discardTask.json │ ├── publishTask.json │ ├── queryAllWorkflows.json │ └── updateWorkflowUserTask.json ├── pytest.ini ├── requirements.txt ├── sandbox ├── createCategories.py ├── createTerms.py ├── createUsers.py ├── demo_users.csv ├── exportDataProtectionRules.py ├── exportGovArtifacts.py ├── exportProject.py ├── exportTerms.py ├── exportUsers.py ├── getDemoFiles.sh ├── github.sh ├── importDataProtectionRules.py ├── importGovArtifacts.py ├── importProject.py ├── update-env.py ├── updateCategories.py ├── updateTerms.py └── updateUsers.py ├── tests ├── __init__.py ├── csvs │ └── refDataUpload.csv ├── glossaryTerm_test.py ├── main_test.py ├── payloads │ └── reference_data │ │ └── createRefData.json └── referenceData_test.py └── utilities ├── __init__.py ├── csvProcessing.py ├── helper.py └── subAPIs.py /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This is the repo of scripts required for olm-utils and CP4d demo-builder -------------------------------------------------------------------------------- /apis/__init__.py: -------------------------------------------------------------------------------- 1 | from apis import endpoints 2 | from apis import usecases 3 | from apis.main import MainAPI 4 | -------------------------------------------------------------------------------- /apis/endpoints/__init__.py: -------------------------------------------------------------------------------- 1 | from apis.endpoints.assets import AssetsAPI 2 | from apis.endpoints.catalogs import CatalogsAPI 3 | from apis.endpoints.categories import CategoriesAPI 4 | from apis.endpoints.connections import ConnectionsAPI 5 | from apis.endpoints.datarequests import DataRequestAPI 6 | from apis.endpoints.glossaryTerms import TermsAPI 7 | from apis.endpoints.governanceArtifactTypes import GovArtifactAPI 8 | from apis.endpoints.jobs import JobsAPI 9 | from apis.endpoints.projects import ProjectsAPI 10 | from apis.endpoints.rules import RulesAPI 11 | from apis.endpoints.referenceData import RefDataAPI 12 | from apis.endpoints.users import UsersAPI 13 | from apis.endpoints.workflows import WorkflowsAPI 14 | -------------------------------------------------------------------------------- /apis/endpoints/assets.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from utilities import subAPIs 3 | 4 | class AssetsAPI(subAPIs.SubAPI): 5 | """This class is wrapping all functions related to assets and custom assets""" 6 | 7 | # GET functions 8 | def getAssetIds(self, catalogId, assetPath, artifact_type='data_asset'): 9 | ''' 10 | Gets the Asset/Artifact Id from an asset path in a catalog. 11 | Returns one or many assets. 12 | Throws an exception if no assets are found 13 | ''' 14 | resJSON = self.mainAPI.search(f'entity.assets.connection_paths.keyword:{assetPath} AND metadata.artifact_type:{artifact_type} AND entity.assets.catalog_id:{catalogId}') 15 | assert resJSON['size'] > 0, 'No Asset Id found' 16 | return resJSON['rows'][0]['artifact_id'] 17 | 18 | def getAssetById(self, assetId, catalogId): 19 | """ Gets an asset from its artifact id in a catalog 20 | Returns the assets JSON structure if found """ 21 | return self.mainAPI._getJSON(f'/v2/assets/{assetId}?catalog_id={catalogId}') 22 | 23 | def getAssetTypes(self, catalogId): 24 | """ Gets all asset types setup in a catalog 25 | Returns the assets JSON structure if found """ 26 | return self.mainAPI._getJSON(f'/v2/asset_types?catalog_id={catalogId}') 27 | 28 | def getAssetType(self, typeName, catalogId): 29 | """ Get all assets of a given type, e.g. data_asset 30 | Returns all assets that match the asset type """ 31 | query = f'asset.name:{typeName}' 32 | payload = { 33 | "query": query, 34 | "limit": 10 35 | } 36 | return self.mainAPI._POST(f'/v2/asset_types/{typeName}/search?catalog_id={catalogId}', payload) 37 | 38 | def getAssetTypeRelationships(self, typeName): 39 | """ Get all assets of a given type, e.g. data_asset 40 | Returns all assets that match the asset type """ 41 | return self.mainAPI._getJSON(f'/v2/asset_types/{typeName}/relationships') 42 | 43 | def getAssetRelationships(self, assetId, catalogId): 44 | """ Get all assets of a given type, e.g. data_asset 45 | Returns all assets that match the asset type """ 46 | return self.mainAPI._getJSON(f'/v2/assets/{assetId}/relationships?catalog_id={catalogId}') 47 | 48 | def getAssetRelationshipTypes(self, assetId, catalogId): 49 | """ Get all assets of a given type, e.g. data_asset 50 | Returns all assets that match the asset type """ 51 | return self.mainAPI._getJSON(f'/v2/assets/{assetId}/relationship_types?catalog_id={catalogId}') 52 | 53 | def getAssetAttributes(self, assetId, catalogId): 54 | """ Get all asset attributess of a given asset """ 55 | return self.mainAPI._getJSON(f'/v2/assets/{assetId}/attributes?catalog_id={catalogId}') 56 | 57 | # PUT functions 58 | def createAssetAttributes(self, attributeName, catalog_id, payloadFile='assets/createassetattribute.json'): 59 | payload = self.mainAPI._loadPayload(payloadFile) 60 | return self.mainAPI._putJSON(f'/v2/asset_types/{attributeName}?catalog_id={catalog_id}', payload, statusCheck=200) 61 | 62 | def createAssetRelationship(self, relName, t_catalog_id, s_asset_id, t_asset_id): 63 | payload = { 64 | "relationship_targets": [ 65 | { 66 | "catalog_id": t_catalog_id, 67 | "asset_id": t_asset_id 68 | } 69 | ] 70 | } 71 | return self.mainAPI._putJSON(f'/v2/assets/{s_asset_id}/relationships/{relName}?catalog_id={t_catalog_id}', payload, statusCheck=207) 72 | 73 | # POST functions 74 | def getListOfAllDataAssets(self, catalogId): 75 | """ Get list of all data assets in a given catalog 76 | Returns a list of all data assets """ 77 | payload = {'query': '*:*'} 78 | return self.mainAPI._POST(f'/v2/asset_types/data_asset/search?catalog_id={catalogId}', payload) 79 | 80 | def getListOfAllAssetsByType(self, catalogId, assetType): 81 | """ Get a list of all assets of a given type in a given catalog 82 | Returns a list of all assets that match the asset type """ 83 | payload = {'query': '*:*'} 84 | return self.mainAPI._POST(f'/v2/asset_types/{assetType}/search?catalog_id={catalogId}', payload) 85 | 86 | def getListOfBooksByAuthor(self, catalogId, assetType, author): 87 | """ Get a list of Books by a certain author in a given catalog 88 | Applies to custom type Book """ 89 | payload = {'query': f'book.author.last_name:{author}'} 90 | return self.mainAPI._POST(f'/v2/asset_types/{assetType}/search?catalog_id={catalogId}', payload) 91 | 92 | def createAsset(self, catalogId, payload=None, payloadFile='asset_types/createCustomAssetBook.json'): # example: book from Watson API Doc 93 | """Creates a custom asset in a catalog from the payloadfile provided""" 94 | if not payload and payloadFile: 95 | payload = self.mainAPI._loadPayload(payloadFile) 96 | assert payload, 'No payload passed in' 97 | return self.mainAPI._POST(f'/v2/assets?catalog_id={catalogId}', payload, statusCheck=201) 98 | 99 | def createAssetAttribute(self, assetId, catalogId, payloadFile='assets/createassetattribute.json'): 100 | """Creates an attributes to an asset in a catalog from the payloadfile provided""" 101 | payload = self.mainAPI._loadPayload(payloadFile) 102 | return self.mainAPI._POST(f'/v2/assets/{assetId}/attributes?catalog_id={catalogId}', payload, statusCheck=201) 103 | 104 | def createCustomAssetType(self, catalogId, payloadFile='asset_types/createCustomAssetTypeBook.json'): # example: book from Watson API Doc 105 | """Creates a custom asset type in a catalog from the payloadfile provided""" 106 | payload = self.mainAPI._loadPayload(payloadFile) 107 | return self.mainAPI._POST(f'/v2/asset_types?catalog_id={catalogId}', payload, statusCheck=201) 108 | 109 | def createAssetRelationshipType(self, payloadFile='asset_relationship_types/createAssetRelationship.json'): 110 | """Creates a custom asset type in a catalog from the payloadfile provided""" 111 | payload = self.mainAPI._loadPayload(payloadFile) 112 | return self.mainAPI._POST(f'/v2/asset_relationship_types', payload, statusCheck=201) 113 | 114 | 115 | # PATCH functions 116 | def updateAsset(self, assetId, catalogId, payload=None, payloadFile=None, statusCheck=200): 117 | """ Updates a custom asset in a catalog from the payloadfile provided 118 | follow https://tools.ietf.org/html/rfc6902 """ 119 | if payloadFile: 120 | payload = self.mainAPI._loadPayload(payloadFile) 121 | assert payload, 'No payload passed in' 122 | self.mainAPI._PATCH(f'/v2/assets/{assetId}?catalog_id={catalogId}', payload, statusCheck=statusCheck) 123 | 124 | def addTagToAsset(self, catalogId, assetId, tag): 125 | """ Adds a Tag to an asset """ 126 | payload = [{'op': 'add', 'path': '/metadata/tags/-', 'value': tag}] 127 | self.updateAsset(assetId, catalogId, payload) 128 | 129 | def removeTagFromAsset(self, catalogId, assetId, idx): 130 | """ Removes a tag from an asset 131 | Idx specifies the positional argument for the tag in the tag list 132 | could be evaluated as: idx = res["metadata"]["tags"].index('TAGNAME')""" 133 | payload = [{'op': 'remove', 'path': f'/metadata/tags/{idx}'}] 134 | self.updateAsset(assetId, catalogId, payload) 135 | 136 | def patchAssetAttribute(self, assetId, catalogId, attribute_key, payloadFile='asset_types/addtermtoasset.json'): 137 | """Creates a attributes to an asset in a catalog from the payloadfile provided""" 138 | payload = self.mainAPI._loadPayload(payloadFile) 139 | return self.mainAPI._PATCH(f'/v2/assets/{assetId}/attributes/{attribute_key}?catalog_id={catalogId}', payload, statusCheck=200) 140 | 141 | # DELETE functions 142 | def deleteAsset(self, assetId, catalogId): 143 | """Deletes an asset in a catalog""" 144 | return self.mainAPI._DELETE(f'/v2/assets/{assetId}?catalog_id={catalogId}', statusCheck=204) 145 | 146 | def deleteCustomAssetType(self, assetType, catalogId): 147 | """Deletes a custom asset type in a catalog""" 148 | return self.mainAPI._DELETE(f'/v2/asset_types/{assetType}?catalog_id={catalogId}', statusCheck=204) 149 | -------------------------------------------------------------------------------- /apis/endpoints/catalogs.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from utilities import subAPIs 3 | 4 | class CatalogsAPI(subAPIs.SubAPI): 5 | """ This class represents all functions related to catalogs """ 6 | 7 | # GET functions 8 | def getDefaultCatalog(self): 9 | """ Returns the default catalog """ 10 | return self.mainAPI._getJSON('/v2/catalogs/default') 11 | 12 | def getPlatformAssetCatalog(self): 13 | """ Returns the Platform Assets catalog """ 14 | return self.mainAPI._getJSON('/v2/catalogs/ibm-global-catalog') 15 | 16 | def getDefaultCatalogId(self): 17 | """ Gets the Default Catalog 18 | Returns catalog_id if default catalog was found 19 | Returns an exception if default catalog was not found """ 20 | resJSON = self.getDefaultCatalog() 21 | try: 22 | return resJSON['metadata']['guid'] 23 | except: 24 | raise ValueError('No Default Catalog found') 25 | 26 | def getCatalogs(self): 27 | """ Returns all catalogs """ 28 | return self.mainAPI._getJSON('/v2/catalogs') 29 | 30 | def getCatalog(self, catalog_id): 31 | """ Returns one catalog """ 32 | return self.mainAPI._getJSON(f'/v2/catalogs/{catalog_id}') 33 | 34 | def getCatalogProperties(self, catalog_id): 35 | """ Returns all catalog properties """ 36 | return self.mainAPI._getJSON(f'/v2/catalogs/{catalog_id}/properties') 37 | 38 | def getCatalogMembers(self, catalog_id): 39 | """ Returns all catalog members """ 40 | return self.mainAPI._getJSON(f'/v2/catalogs/{catalog_id}/members') 41 | 42 | def getCatalogMembershipByUserId(self, catalog_id, member_id): 43 | """ Returns catalogs membership for a user """ 44 | return self.mainAPI._getJSON(f'/v2/catalogs/{catalog_id}/members/{member_id}') 45 | 46 | def getCatalogNames(self): 47 | """ Gets a list of all Catalog names 48 | Returns catalog names for all catalogs found """ 49 | resJSON = self.mainAPI._getJSON('/v2/catalogs') 50 | logging.info(resJSON['catalogs']) 51 | catlist = [] 52 | for obj in resJSON['catalogs']: 53 | logging.info(f'Catalog Name is: {obj["entity"]["name"]} + GUID is: {obj["metadata"]["guid"]}') 54 | catlist.append(obj['entity']['name']) 55 | return catlist 56 | 57 | def getCatalogIdByName(self, catalogName): 58 | """ Gets a Catalog by a given name 59 | Returns catalog_id if catalog name was found 60 | Returns an exception if catalog name was not found """ 61 | # Invoke Endpoint 62 | resJSON = self.getCatalogs() 63 | catalogId = [obj for obj in resJSON['catalogs'] if(obj['entity']['name'] == catalogName)] 64 | logging.debug(catalogId) 65 | try: 66 | return catalogId[0]['metadata']['guid'] 67 | except: 68 | raise ValueError('No Catalog found') 69 | -------------------------------------------------------------------------------- /apis/endpoints/categories.py: -------------------------------------------------------------------------------- 1 | from logging import debug, info 2 | from utilities import csvProcessing, subAPIs 3 | import ast 4 | 5 | class CategoriesAPI(subAPIs.SubAPI): 6 | """This class represents all functions related to categories""" 7 | 8 | # GET functions 9 | def getListOfCategories(self): 10 | """Get a list of all categories""" 11 | return self.mainAPI.search('metadata.artifact_type:category') 12 | 13 | def getListOfTopLevelCategories(self): 14 | """Get a list of all top level categories""" 15 | return self.mainAPI.search('metadata.artifact_type:category NOT categories.primary_category_name:*') 16 | 17 | 18 | def getListOfCategoriesForParentById(self, parentId): 19 | """Get a list of all categories underneath one parent category as indicated by its id""" 20 | return self.mainAPI.search(f'metadata.artifact_type:category AND categories.primary_category_id:{parentId}') 21 | 22 | def getCategoryByName(self, category): 23 | """Get a Category by a given name 24 | This function may return many categories""" 25 | return self.mainAPI.search(f'metadata.artifact_type:category AND metadata.name.keyword:{category}') 26 | 27 | def getCategoryByPath(self, catpath): 28 | """Get a Category by a given path 29 | Path should follow the structure /top/sub1/sub2 30 | This function returns exactly one category""" 31 | cat = catpath.split('/') 32 | if catpath.startswith("/"): 33 | cat.pop(0) 34 | rescat = '' 35 | parentId = '' 36 | for lvl in cat: 37 | rescat = '' 38 | if lvl == cat[0]: 39 | cats = self.getListOfTopLevelCategories()["rows"] 40 | for c in cats: 41 | if lvl == c["metadata"]["name"]: 42 | parentId = c["artifact_id"] 43 | rescat = c 44 | else: 45 | cats = self.getListOfCategoriesForParentById(parentId)["rows"] 46 | for c in cats: 47 | if lvl == c["metadata"]["name"]: 48 | parentId = c["artifact_id"] 49 | rescat = c 50 | return rescat 51 | 52 | # DELETE functions 53 | # TODO: delete other artifacts as well 54 | def deleteCategory(self, guid): 55 | """Delete a category by a given id 56 | Currently requires that there are no artifacts stored in that category anymore""" 57 | return self.mainAPI._DELETE(f'/v3/categories/{guid}') 58 | 59 | # POST functions 60 | def createCategory(self, payloadFile='categories/createCategory.json'): 61 | """Create a new term from a json payload file""" 62 | payload = self.mainAPI._loadPayload(payloadFile) 63 | endpoint = f'/v3/categories' 64 | return self.mainAPI._POST(endpoint, payload, statusCheck=201) 65 | 66 | def createCategory(self, categoryName, categoryShortDescription, categoryLongDescription): 67 | """Create a new category from a name, short and long description""" 68 | payload = { 69 | 'long_description': categoryLongDescription, 70 | 'name': categoryName, 71 | 'short_description': categoryShortDescription 72 | } 73 | endpoint = f'/v3/categories' 74 | return self.mainAPI._POST(endpoint, payload, statusCheck=201) 75 | 76 | def categories2Table(self, categoriesJSON): 77 | """creates a table from a list of categories 78 | Args: 79 | categoriesJSON (dict): JSON formatted output of the WKC API for a list of categories 80 | Returns: 81 | pandas.DataFrame: table of categories 82 | """ 83 | assert categoriesJSON['size'] > 0, 'No Categories found' 84 | return csvProcessing.items2Table(self._addCategory2Table, categoriesJSON['rows']) 85 | 86 | def _addCategory2Table(self, table, categoriesJSON): 87 | debug(categoriesJSON) 88 | artifacts = categoriesJSON['entity']['artifacts'] 89 | metadata = categoriesJSON['metadata'] 90 | debug(metadata) 91 | row = self._creatCategoriesRow(artifacts, metadata) 92 | row = self._addDescriptionIfPresent(row, metadata) 93 | return table.append(row, ignore_index=True) 94 | 95 | def _creatCategoriesRow(self, artifacts, metadata): 96 | return { 97 | 'artifact_id': artifacts['artifact_id'], 98 | 'name': metadata['name'], 99 | 'steward_ids': metadata['steward_ids'], 100 | 'tags': metadata['tags'] 101 | } 102 | 103 | def _addDescriptionIfPresent(self, row, metadata): 104 | if 'description' in metadata.keys(): 105 | row['description'] = metadata['description'] 106 | return row 107 | 108 | def categories2CSV(self, categoriesJSON, filePath): 109 | """creates a csv file of terms 110 | Args: 111 | itemsJSON (dict): JSON formatted output of the WKC API for a list of terms 112 | filePath (str): name or filepath for the creation of the csv file 113 | """ 114 | csvProcessing.items2CSV(self._addCategory2Table, categoriesJSON['rows'], filePath) 115 | 116 | # Functions for mass category updates 117 | def _getCategoryPayload(self, categoryRow, revision, steward_ids, tags): 118 | """Construct a category payload from some of its parameters""" 119 | return { 120 | 'long_description': categoryRow['description'], 121 | 'name': categoryRow['name'], 122 | 'steward_ids': steward_ids, 123 | 'tags': tags, 124 | } 125 | 126 | def _updateCategoryFromRow(self, categoryRow): 127 | """Updates a category with the content from the table""" 128 | steward_ids = ast.literal_eval(categoryRow['steward_ids']) 129 | tags = ast.literal_eval(categoryRow['tags']) 130 | version_id = self.mainAPI.getVersionId(categoryRow['artifact_id']) 131 | revision = self.getCategory(categoryRow['artifact_id'], version_id)["metadata"]["revision"] 132 | payload = self._getCategoryPayload(categoryRow, revision, steward_ids, tags) 133 | self.updateCategory(categoryRow['artifact_id'], payload, version_id) 134 | 135 | def updateCategoriesFromTable(self, categoryTable): 136 | """Iterates over the terms contained in a table""" 137 | for _, categoryRow in categoryTable.iterrows(): 138 | self._updateCategoryFromRow(categoryRow) 139 | 140 | def getCategory(self, artifact_id, version_id=None): 141 | """ Get a category 142 | If version id is provided then that version is returned, else the latest version is returned""" 143 | if not version_id: 144 | version_id = self.mainAPI.getVersionId(artifact_id) 145 | return self.mainAPI._getJSON(f'/v3/categories/{artifact_id}') 146 | 147 | # PATCH functions 148 | def updateCategory(self, artifact_id, payload, version_id=None, skip_workflow=True): 149 | """Update a category with a payload provided""" 150 | if not version_id: 151 | version_id = self.mainAPI.getVersionId(artifact_id) 152 | self.mainAPI._PATCH(f'/v3/categories/{artifact_id}', payload) 153 | -------------------------------------------------------------------------------- /apis/endpoints/connections.py: -------------------------------------------------------------------------------- 1 | from utilities import subAPIs 2 | 3 | class ConnectionsAPI(subAPIs.SubAPI): 4 | """This class represents all functions related to Connections""" 5 | 6 | # GET functions 7 | def getListOfConnections(self, catalog_id): 8 | """Get a list of Connections in a givven catalog""" 9 | return self.mainAPI._getJSON(f'/v2/connections?catalog_id={catalog_id}') 10 | 11 | def getListOfSSLConnections(self, catalog_id, name): 12 | """Get a list of SSL connections in a given catalog""" 13 | return self.mainAPI._getJSON(f'/v2/connections?catalog_id={catalog_id}&entity.properties.ssl=true&entity.name={name}') 14 | 15 | def getConnection(self, connection_id, catalog_id): 16 | """Get a connection by a given id in a given catalog""" 17 | return self.mainAPI._getJSON(f'/v2/connections/{connection_id}?catalog_id={catalog_id}') 18 | 19 | def getConnectionAssets(self, connection_id, catalog_id): 20 | """Get a list of assets that belong to a connection by a given id in a given catalog""" 21 | return self.mainAPI._getJSON(f'/v2/connections/{connection_id}/assets?catalog_id={catalog_id}') 22 | -------------------------------------------------------------------------------- /apis/endpoints/datarequests.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from utilities import subAPIs 3 | 4 | class DataRequestAPI(subAPIs.SubAPI): 5 | """This class represents all functions dealing with Data Requests""" 6 | 7 | # GET functions 8 | def getDataRequests(self, SelectBy=None, Value=None): 9 | """Reading out data requests with or without filtering""" 10 | if not SelectBy: 11 | # Returns all data requests stored in the system 12 | return self.mainAPI._getJSON('/zen-data-ui/v1/datarequest') 13 | else: 14 | # Returns filtered set of data requests 15 | return self.mainAPI._getJSON(f'/zen-data-ui/v1/datarequest?SelectBy={SelectBy}&Value={Value}') 16 | 17 | # DELETE functions 18 | def deleteDataRequest(self, id): 19 | """Deletes a data requests stored in the system by its id""" 20 | return self.mainAPI._DELETE(f'/zen-data-ui/v1/datarequest/{id}') 21 | 22 | def _iterateDelete(self, res): 23 | """Iterates over an array of requestObj as contained in response from getDataRequest function""" 24 | for dr in res["requestObj"]: 25 | id = dr["Id"] 26 | logging.info(dr["Title"]) 27 | logging.debug(id) 28 | self.deleteDataRequest(id) 29 | 30 | def deleteAllDataRequestsByState(self, status): 31 | """Gets all data requests of a given status and deletes these""" 32 | res = self.getDataRequests('State', status) 33 | self._iterateDelete(res) 34 | 35 | def deleteAllDataRequestsByUser(self, user): 36 | """Gets all data requests from a given user and deletes these""" 37 | res = self.getDataRequests('AssignedTo', user) 38 | self._iterateDelete(res) 39 | 40 | # POST functions 41 | def createDataRequest(self, payloadFile='datarequest/postnewrequest.json'): 42 | """Create a new data request from a json payload file""" 43 | payload = self.mainAPI._loadPayload(payloadFile) 44 | endpoint = f'/zen-data-ui/v1/datarequest' 45 | return self.mainAPI._POST(endpoint, payload, statusCheck=200) -------------------------------------------------------------------------------- /apis/endpoints/glossaryTerms.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import csv 3 | from logging import debug, info 4 | from utilities import csvProcessing, subAPIs 5 | 6 | class TermsAPI(subAPIs.SubAPI): 7 | """This class represents all functions related to glossary terms""" 8 | def __init__(self, mainAPI=None): 9 | super().__init__(mainAPI) 10 | 11 | # GET functions 12 | def getListOfTerms(self, termName): 13 | """Gets a list of published terms by keyword""" 14 | return self.mainAPI.search(f'metadata.name.keyword:{termName} AND metadata.artifact_type:glosary_term') 15 | 16 | def getListOfTermDrafts(self, limit=200): 17 | """Gets a list of Term drafts""" 18 | return self.mainAPI._getJSON(f'/v3/governance_artifact_types/glossary_term?limit={limit}') 19 | 20 | def getListOfTermDraftsByCategory(self, category, limit=200): 21 | """Gets a list of Term drafts in a given category""" 22 | res = self.mainAPI._getJSON(f'/v3/governance_artifact_types/glossary_term?limit={limit}') 23 | lst = [] 24 | if res["count"] > 0: 25 | for term in res["resources"]: 26 | if term["parent_category"]["name"] == category: 27 | lst.append(term) 28 | return lst 29 | 30 | def getTerms(self, status, limit): 31 | """Get a list of terms in a given status""" 32 | termsJSON = self.mainAPI._getJSON(f'/v3/glossary_terms?status={status}&limit={limit}') 33 | allTerms = termsJSON['resources'] 34 | while 'next' in termsJSON: 35 | termsJSON = self.mainAPI._getJSON(termsJSON['next']['href']) 36 | allTerms.extend(termsJSON['resources']) 37 | return allTerms 38 | 39 | def getPublishedVersionsOfTerm(self, artifact_id): 40 | """Get the published version of a given term""" 41 | return self.mainAPI._getJSON(f'/v3/glossary_terms/{artifact_id}/versions?status=PUBLISHED') 42 | 43 | def getDraftVersionsOfTerm(self, artifact_id): 44 | """Get the draft version of a given term""" 45 | return self.mainAPI._getJSON(f'/v3/glossary_terms/{artifact_id}/versions?status=DRAFT') 46 | 47 | def getTerm(self, artifact_id, version_id=None): 48 | """ Get a term 49 | If version id is provided then that version is returned, else the latest version is returned""" 50 | if not version_id: 51 | version_id = self.mainAPI.getVersionId(artifact_id) 52 | return self.mainAPI._getJSON(f'/v3/glossary_terms/{artifact_id}/versions/{version_id}?all_parents=true') 53 | 54 | # POST functions 55 | def createTerm(self, payloadFile='glossary_terms/createTerm.json', skip_workflow=True): 56 | """Create a new term from a json payload file""" 57 | payload = self.mainAPI._loadPayload(payloadFile) 58 | endpoint = f'/v3/glossary_terms?skip_workflow_if_possible={skip_workflow}' 59 | return self.mainAPI._POST(endpoint, payload, statusCheck=201) 60 | 61 | def createTerm(self, termAbbreviations, termName, termShortDescription, termLongDescription, skip_workflow=True): 62 | """Create a new term from a json payload file""" 63 | payload = [ 64 | { 65 | "abbreviations": termAbbreviations, 66 | "long_description": termLongDescription, 67 | "name": termName, 68 | "short_description": termShortDescription 69 | } 70 | ] 71 | endpoint = f'/v3/glossary_terms?skip_workflow_if_possible={skip_workflow}' 72 | return self.mainAPI._POST(endpoint, payload, statusCheck=201) 73 | 74 | # PATCH functions 75 | def updateTerm(self, artifact_id, payload, version_id=None, skip_workflow=True): 76 | """Update a term with a payload provided""" 77 | if not version_id: 78 | version_id = self.mainAPI.getVersionId(artifact_id) 79 | self.mainAPI._PATCH(f'/v3/glossary_terms/{artifact_id}/versions/{version_id}?skip_workflow_if_possible={skip_workflow}', payload) 80 | 81 | def patchGlossaryTerm(self, artifact_id, custom_attribute_value, version_id=None, skip_workflow=True): 82 | """Update a given term with a custom attribute""" 83 | if not version_id: 84 | version_id = self.mainAPI.getVersionId(artifact_id) 85 | payload = { 86 | 'revision': '1', 87 | 'custom_attributes': [{ 88 | 'custom_attribute_definition_id': '9d32bf51-4c2c-49b1-ab8f-cc82fe90785d', 89 | 'name': 'Enterprise Data ID', 90 | 'values': [{'value': custom_attribute_value}] 91 | }] 92 | } 93 | self.mainAPI._PATCH(f'/v3/glossary_terms/{artifact_id}/versions/{version_id}?skip_workflow_if_possible={skip_workflow}', payload) 94 | info('Term patched') 95 | 96 | def patchTermsWithCustomAttribute(self, filename): 97 | """Patch terms from a list with a custom attribute definition""" 98 | with open(filename, newline='') as csvFile: 99 | reader = csv.DictReader(csvFile, delimiter=',') 100 | for row in reader: 101 | info(row['Asset Name'], row['Parent Category'], row['Enterprise Data ID']) 102 | # Go fetch the artifact_id and version_id for each of the combination of term and parent category 103 | termIds = self.mainAPI.search(f'metadata.name.keyword:{row["Asset Name"]} AND categories.primary_category_name.keyword:{row["Parent Category"]} AND metadata.artifact_type:glossary_term') 104 | artifact_id = termIds['artifact_id'] 105 | version_id = termIds['entity']['artifacts']['version_id'] 106 | # Patch the term with the new Enterprise ID 107 | self.patchGlossaryTerm(artifact_id, version_id, row['Enterprise Data ID']) 108 | info('Term updates done!') 109 | 110 | # DELETE functions 111 | def deleteTerm(self, artifact_id, version_id=None, skip_workflow=True): 112 | """Delete a given term""" 113 | if not version_id: 114 | version_id = self.mainAPI.getVersionId(artifact_id) 115 | if skip_workflow: 116 | return self.mainAPI._DELETE(f'/v3/glossary_terms/{artifact_id}/versions/{version_id}?skip_workflow_if_possible={skip_workflow}', statusCheck=204) 117 | else: 118 | return self.mainAPI._DELETE(f'/v3/glossary_terms/{artifact_id}/versions/{version_id}?skip_workflow_if_possible={skip_workflow}', statusCheck=201) 119 | 120 | def deleteAllTermDrafts(self): 121 | """Delete all Draft Terms""" 122 | resJSON = self.mainAPI._getJSON('/v3/governance_artifact_types/glossary_term?limit=200') 123 | # iterate over list of terms 124 | for term in resJSON['resources']: 125 | debug(term) 126 | self.deleteTerm(term["artifact_id"], term["version_id"], skip_workflow=True) 127 | 128 | def deleteAllTermsFromCategory(self, category): 129 | """Delete all Published Terms in a given category""" 130 | res = self.mainAPI.search(f'categories.primary_category_name.keyword:{category} AND metadata.artifact_type:glossary_term') 131 | # iterate over list of terms 132 | for term in res['rows']: 133 | artifact_id = term["entity"]["artifacts"]["artifact_id"] 134 | version_id = term["entity"]["artifacts"]["version_id"] 135 | info(term["metadata"]["name"]) 136 | debug(artifact_id) 137 | debug(version_id) 138 | self.deleteTerm(artifact_id, version_id, skip_workflow=True) 139 | return res 140 | 141 | # Functions for mass term updates 142 | def _getTermPayload(self, termRow, revision, steward_ids, tags): 143 | """Construct a term payload from some of its parameters""" 144 | return { 145 | 'revision': revision, 146 | 'long_description': termRow['description'], 147 | 'name': termRow['name'], 148 | 'steward_ids': steward_ids, 149 | 'tags': tags, 150 | } 151 | 152 | def _updateTermFromRow(self, termRow): 153 | """Updates a term with the content from the table""" 154 | steward_ids = ast.literal_eval(termRow['steward_ids']) 155 | tags = ast.literal_eval(termRow['tags']) 156 | version_id = self.mainAPI.getVersionId(termRow['artifact_id']) 157 | revision = self.getTerm(termRow['artifact_id'], version_id)["metadata"]["revision"] 158 | payload = self._getTermPayload(termRow, revision, steward_ids, tags) 159 | self.updateTerm(termRow['artifact_id'], payload, version_id) 160 | 161 | def updateTermsFromTable(self, termTable): 162 | """Iterates over the terms contained in a table""" 163 | for _, termRow in termTable.iterrows(): 164 | self._updateTermFromRow(termRow) 165 | 166 | def _addTerm2Table(self, table, termJSON): 167 | debug(termJSON) 168 | artifacts = termJSON['entity']['artifacts'] 169 | metadata = termJSON['metadata'] 170 | debug(metadata) 171 | row = self._creatTermsRow(artifacts, metadata) 172 | row = self._addDescriptionIfPresent(row, metadata) 173 | return table.append(row, ignore_index=True) 174 | 175 | def _addDescriptionIfPresent(self, row, metadata): 176 | if 'description' in metadata.keys(): 177 | row['description'] = metadata['description'] 178 | return row 179 | 180 | def _creatTermsRow(self, artifacts, metadata): 181 | return { 182 | 'artifact_id': artifacts['artifact_id'], 183 | 'name': metadata['name'], 184 | 'steward_ids': metadata['steward_ids'], 185 | 'tags': metadata['tags'] 186 | } 187 | 188 | def terms2Table(self, termsJSON): 189 | """creates a table from a list of terms 190 | Args: 191 | termsJSON (dict): JSON formatted output of the WKC API for a list of terms 192 | Returns: 193 | pandas.DataFrame: table of terms 194 | """ 195 | assert termsJSON['size'] > 0, 'No Terms found' 196 | return csvProcessing.items2Table(self._addTerm2Table, termsJSON['rows']) 197 | 198 | def terms2CSV(self, termsJSON, filePath): 199 | """creates a csv file of terms 200 | Args: 201 | itemsJSON (dict): JSON formatted output of the WKC API for a list of terms 202 | filePath (str): name or filepath for the creation of the csv file 203 | """ 204 | csvProcessing.items2CSV(self._addTerm2Table, termsJSON['rows'], filePath) 205 | -------------------------------------------------------------------------------- /apis/endpoints/governanceArtifactTypes.py: -------------------------------------------------------------------------------- 1 | from utilities import subAPIs 2 | 3 | class GovArtifactAPI(subAPIs.SubAPI): 4 | """This class represents all functions related to governance artifact types""" 5 | 6 | # GET functions 7 | def getArtefactByType(self, artifact_type): 8 | """Get the draft artifacts by a given type 9 | Returns or or many artifacts""" 10 | return self.mainAPI._getJSON(f'/v3/governance_artifact_types/{artifact_type}') 11 | 12 | def getCustomAttributes(self, artifactType): 13 | """Get all Custom Attributes for a given artifact type 14 | Returns many objects in a JSON structure """ 15 | return self.mainAPI._getJSON(f'/v3/governance_artifact_types/{artifactType}/custom_attribute_definitions') 16 | 17 | def getCustomAttributeByName(self, artifactType, name): 18 | """Get one Custom Attribute for a given artifcat type by its name 19 | Filters from the list of all custom attributes the one that matches the name provided. 20 | Returns one or none object""" 21 | res = self.getCustomAttributes(artifactType) 22 | customAttribute = list(filter(lambda x: x["metadata"]["name"] == name, res)) 23 | return customAttribute 24 | 25 | def getActivityLogEntries(self, artifact_type, artifact_id, version_id): 26 | """Get all actvity log entries for a given artifact type 27 | Returns many objects in a JSON structure """ 28 | return self.mainAPI._getJSON(f'/v3/governance_artifact_types/{artifact_type}/{artifact_id}/versions/{version_id}/aggregated_logs') 29 | 30 | def getActivityLogEntry(self, artifact_type, artifact_id, version_id, mod_id): 31 | """Get all actvity log entries for a given artifact type 32 | Returns many objects in a JSON structure """ 33 | return self.mainAPI._getJSON(f'/v3/governance_artifact_types/{artifact_type}/{artifact_id}/versions/{version_id}/aggregated_logs{mod_id}') 34 | 35 | 36 | #TODO: Verify functionality 37 | def exportArtifactsZIP(self, filename, artifact_id_mode="always", artifact_types="all", category_ids="all_top_level"): 38 | """Export a ZIP file containing artifacts export 39 | Requires CP4D 4.x""" 40 | endpoint = f'/v3/governance_artifact_types/export?artifact_id_mode={artifact_id_mode}&artifact_types={artifact_types}&category_ids={category_ids}' 41 | self.mainAPI._getFile(endpoint, filename) 42 | 43 | # POST functions 44 | def createCustomAttribute(self, artifact_type, payloadFile='governance_artifact_types/createCustomAttribute.json'): 45 | """Create a Custom Attribute for a given artifact type from a payload file provided""" 46 | payload = self.mainAPI._loadPayload(payloadFile) 47 | return self.mainAPI._POST(f'/v3/governance_artifact_types/{artifact_type}/custom_attribute_definitions', payload, statusCheck=201) 48 | 49 | def importArtifactsZIP(self, filename): 50 | """Import artifacts from a zip file 51 | Requires CP4D 4.x""" 52 | files = {'file': (filename, open(filename, 'rb'), 'application/x-zip-compressed')} 53 | endpoint = f'/v3/governance_artifact_types/import' 54 | self.mainAPI._postFile(endpoint, files=files, contentType='multipart/form-data', statusCheck=202) 55 | 56 | # DELETE functions 57 | def deleteCustomAttribute(self, artifact_type, custom_attribute_definition_id): 58 | """Delete a Custom Attribute of a given artifact type""" 59 | return self.mainAPI._DELETE(f'/v3/governance_artifact_types/{artifact_type}/custom_attribute_definitions/{custom_attribute_definition_id}', statusCheck=200) 60 | -------------------------------------------------------------------------------- /apis/endpoints/jobs.py: -------------------------------------------------------------------------------- 1 | from utilities import subAPIs 2 | 3 | class JobsAPI(subAPIs.SubAPI): 4 | """This class represents all functions related to jobs""" 5 | 6 | # GET functions 7 | def getListOfJobs(self, project_id): 8 | """Get a list of jobs for a given project""" 9 | return self.mainAPI._getJSON(f'/v2/jobs/?project_id={project_id}') 10 | 11 | # GET functions 12 | def getListOfJobRuns(self, project_id, job_id): 13 | """Get a list of jobs for a given project""" 14 | return self.mainAPI._getJSON(f'/v2/jobs/{job_id}/runs?project_id={project_id}') 15 | 16 | def getRunOfAJob(self, job_id, run_id, project_id): 17 | """Get the results of a run from a given job in a given project""" 18 | return self.mainAPI._getJSON(f'/v2/jobs/{job_id}/runs/{run_id}?project_id={project_id}') 19 | 20 | def getLogOfAJobRun(self, job_id, run_id): 21 | """Get the log of a given job and a given run""" 22 | return self.mainAPI._getJSON(f'/v2/jobs/{job_id}/runs/{run_id}/logs') 23 | 24 | # POST functions 25 | def startJobRun(self, job_id, project_id, payloadFile='jobs/startJobRun.json'): 26 | """Start a given job in a given project""" 27 | payload = self.mainAPI._loadPayload(payloadFile) 28 | return self.mainAPI._POST(f'/v2/jobs/{job_id}/runs?project_id={project_id}', payload, statusCheck=201) 29 | 30 | # DELETE functions 31 | def deleteRunOfAJob(self, job_id, run_id, project_id): 32 | """Delete the log of a given run of a job in a given project""" 33 | return self.mainAPI._DELETE(f'/v2/jobs/{job_id}/runs/{run_id}?project_id={project_id}', statusCheck=204) 34 | -------------------------------------------------------------------------------- /apis/endpoints/projects.py: -------------------------------------------------------------------------------- 1 | from utilities import subAPIs 2 | import logging 3 | 4 | class ProjectsAPI(subAPIs.SubAPI): 5 | """This class represents all functions related to projects""" 6 | 7 | # GET functions 8 | def getListOfProjects(self): 9 | """Get a list of projects""" 10 | return self.mainAPI._getJSON('/v2/projects') 11 | -------------------------------------------------------------------------------- /apis/endpoints/referenceData.py: -------------------------------------------------------------------------------- 1 | from utilities import subAPIs 2 | import logging 3 | 4 | class RefDataAPI(subAPIs.SubAPI): 5 | """This class represents all functions related to reference data""" 6 | 7 | def getListOfRefData(self, refDataName): 8 | """Get a list of reference data sets""" 9 | return self.mainAPI.search(f'metadata.name.keyword:{refDataName} AND metadata.artifact_type:reference_data') 10 | 11 | # GET functions 12 | def getRefData(self, artifact_id, version_id=None): 13 | """Get reference data from a given set""" 14 | if not version_id: 15 | version_id = self.mainAPI.getVersionId(artifact_id) 16 | allRefdata = self.mainAPI._getJSON(f'/v3/reference_data/{artifact_id}/versions/{version_id}?values_offset=0&values_limit=1') 17 | logging.info(allRefdata) 18 | return self._expandRefDataWithValues(artifact_id, version_id, allRefdata) 19 | 20 | def getRefDataCSV(self, artifact_id, filename, version_id=None): 21 | """Export a CSV file containing reference data values of a given reference data set""" 22 | if not version_id: 23 | version_id = self.mainAPI.getVersionId(artifact_id) 24 | endpoint = f'/v3/reference_data/{artifact_id}/versions/{version_id}/values' 25 | self.mainAPI._getFile(endpoint, filename) 26 | 27 | # POST functions 28 | def createRefData(self, payloadFile='reference_data/createRefData.json', skip_workflow=True): 29 | """ Create new reference data 30 | This can create a new set with or without reference data values""" 31 | payload = self.mainAPI._loadPayload(payloadFile) 32 | endpoint = f'/v3/reference_data/?skip_workflow_if_possible={skip_workflow}' 33 | return self.mainAPI._POST(endpoint, payload, statusCheck=201) 34 | 35 | # PUT functions 36 | def loadRefDataFromCSV(self, artifact_id, filename, version_id=None, skip_workflow=True): 37 | """Import reference data values from a csv file into a given reference data set""" 38 | if not version_id: 39 | version_id = self.mainAPI.getVersionId(artifact_id) 40 | files = {'file': (filename, open(filename, 'rb'), 'text/csv')} 41 | endpoint = f'/v3/reference_data/{artifact_id}/versions/{version_id}/values/import?code=code&value=value&description=description&skip_workflow_if_possible={skip_workflow}' 42 | self.mainAPI._putFile(endpoint, files=files, contentType='multipart/form-data', statusCheck=202) 43 | 44 | # DELETE functions 45 | def deleteRefData(self, artifact_id, version_id=None, skip_workflow=True): 46 | """Delete a given reference data set""" 47 | if not version_id: 48 | version_id = self.mainAPI.getVersionId(artifact_id) 49 | endpoint = f'/v3/reference_data/{artifact_id}/versions/{version_id}?skip_workflow_if_possible={skip_workflow}' 50 | self.mainAPI._DELETE(endpoint) 51 | 52 | # Functions for mass reference data extraction 53 | def _expandRefDataWithValues(self, artifact_id, version_id, allRefdata): 54 | """Iterates over the list of reference data values in a given set""" 55 | for i in range(allRefdata['entity']['rds_values_total_counts']): 56 | value = self._extractRefDataValue(f'/v3/reference_data/{artifact_id}/versions/{version_id}?values_offset={i + 1}&values_limit=1') 57 | allRefdata['entity']['rds_values']['resources'].extend(value) 58 | return allRefdata 59 | 60 | def _extractRefDataValue(self, endpoint): 61 | """Get a reference data value""" 62 | resJSON = self.mainAPI._getJSON(endpoint) 63 | logging.info(resJSON['entity']['rds_values']) 64 | return resJSON['entity']['rds_values']['resources'] 65 | -------------------------------------------------------------------------------- /apis/endpoints/roles.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from utilities import csvProcessing, subAPIs 3 | import csv 4 | import logging 5 | 6 | class RolesAPI(subAPIs.SubAPI): 7 | """This class represents all functions related to users""" 8 | 9 | # GET functions 10 | def getAllRoles(self): 11 | """Get all roles""" 12 | return self.mainAPI._getJSON('/api/v1/usermgmt/v1/roles') 13 | 14 | # Get all user of a given role 15 | def getRoleByName(self, roleName): 16 | """Get all users that have a given role""" 17 | resJSON = self.mainAPI._getJSON('/icp4d-api/v1/roles') 18 | role = [obj for obj in resJSON['Roles'] if(obj['role_name'] == roleName)] 19 | try: 20 | return role[0]['id'] 21 | except: 22 | raise ValueError('No Role found') 23 | 24 | # POST functions 25 | def addRole(self, payload): 26 | """Add a new role to the system""" 27 | return self.mainAPI._POST('/api/v1/usermgmt/v1/roles', payload) 28 | 29 | # Functions for mass user update 30 | def _getRolePayload(self, roleRow, permissions): 31 | return { 32 | 'role_name': roleRow['role_name'], 33 | 'description': roleRow['description'], 34 | 'permissions': permissions 35 | } 36 | 37 | def _updateRoleFromRow(self, roleRow): 38 | permissions = ast.literal_eval(roleRow['permissions']) 39 | payload = self._getRolePayload(roleRow, permissions) 40 | self.addRole(payload) 41 | 42 | def updateRolesFromTable(self, roleTable): 43 | for _, roleRow in roleTable.iterrows(): 44 | self._updateRoleFromRow(roleRow) 45 | 46 | def _addRole2Table(self, table, roleJSON): 47 | logging.debug(roleJSON) 48 | row = self._creatRoleRow(roleJSON) 49 | return table.append(row, ignore_index=True) 50 | 51 | def _creatRoleRow(self, role): 52 | logging.debug("role") 53 | logging.debug(role) 54 | return { 55 | 'role_name': role['role_name'], 56 | 'description': role['description'], 57 | 'permissions': role['permissions'] 58 | } 59 | 60 | def roles2Table(self, rolesJSON): 61 | """creates a table from a list of roles 62 | Args: 63 | usersJSON (dict): JSON formatted output of the WKC API for a list of roles 64 | Returns: 65 | pandas.DataFrame: table of roles 66 | """ 67 | usersFound = len(rolesJSON['UsersInfo']) 68 | assert usersFound > 0, 'No Users found' 69 | return csvProcessing.items2Table(self._addRole2Table, rolesJSON['UsersInfo']) 70 | 71 | def roles2CSV(self, rolesJSON, filePath): 72 | """creates a csv file of roles 73 | Args: 74 | rolesJSON (dict): JSON formatted output of the WKC API for a list of roles 75 | filePath (str): name or filepath for the creation of the csv file 76 | """ 77 | csvProcessing.items2CSV(self._addRole2Table, rolesJSON['UsersInfo'], filePath) 78 | -------------------------------------------------------------------------------- /apis/endpoints/rules.py: -------------------------------------------------------------------------------- 1 | import json 2 | from utilities import subAPIs 3 | 4 | class RulesAPI(subAPIs.SubAPI): 5 | """This class represents all functions related to data protection rules""" 6 | 7 | # GET functions 8 | def getListOfRules(self): 9 | """Get a list of rules""" 10 | return self.mainAPI._getJSON('/v3/enforcement/rules') 11 | 12 | # DELETE functions 13 | def deleteRule(self, rule_id): 14 | """Delete the log of a given rule""" 15 | return self.mainAPI._DELETE(f'/v3/enforcement/rules/{rule_id}', statusCheck=204) 16 | 17 | def exportDataProtectionRules(self, file_name): 18 | return self.mainAPI._getFile('/v3/enforcement/rules/export',file_name ,200) 19 | 20 | def importDataProtectionRules(self, dataprotectionrulefile): 21 | payload = json.load(open(dataprotectionrulefile, encoding= 'utf-8' )) 22 | expected = [element for element in payload["rules"] if element['governance_type_id'] != "ResourceControl" ] 23 | payload["rules"]=expected 24 | return self.mainAPI._POST('/v3/enforcement/rules/import',payload,'application/octet-stream',200) 25 | -------------------------------------------------------------------------------- /apis/endpoints/users.py: -------------------------------------------------------------------------------- 1 | import ast 2 | from utilities import csvProcessing, subAPIs 3 | import csv 4 | import logging 5 | 6 | class UsersAPI(subAPIs.SubAPI): 7 | """This class represents all functions related to users""" 8 | 9 | # GET functions 10 | def getAllUsers(self): 11 | """Get all users""" 12 | return self.mainAPI._getJSON('/icp4d-api/v1/users') 13 | 14 | def getUser(self, user_name): 15 | """Get information about a given user""" 16 | return self.mainAPI._getJSON(f'/icp4d-api/v1/users/{user_name}') 17 | 18 | def getUserIdByName(self, user_name): 19 | """Get the id for a given user""" 20 | return self.mainAPI._getJSON(f'/icp4d-api/v1/users/{user_name}')["UserInfo"]["uid"] 21 | 22 | # Get all user of a given role 23 | def getRoleByName(self, roleName): 24 | """Get all users that have a given role""" 25 | resJSON = self.mainAPI._getJSON('/icp4d-api/v1/roles') 26 | role = [obj for obj in resJSON['Roles'] if(obj['role_name'] == roleName)] 27 | try: 28 | return role[0]['id'] 29 | except: 30 | raise ValueError('No Role found') 31 | 32 | # POST functions 33 | def addUser(self, payload): 34 | """Add a new user to the system""" 35 | return self.mainAPI._POST('/icp4d-api/v1/users', payload) 36 | 37 | # Functions for mass user update 38 | def _getUserPayload(self, userRow, user_roles): 39 | return { 40 | 'user_name': userRow['username'], 41 | 'password': userRow['password'], 42 | 'displayName': userRow['displayName'], 43 | 'user_roles': user_roles, 44 | 'email': userRow['email'] 45 | } 46 | 47 | def _updateUserFromRow(self, userRow): 48 | user_roles = ast.literal_eval(userRow['user_roles']) 49 | payload = self._getUserPayload(userRow, user_roles) 50 | self.addUser(payload) 51 | 52 | def updateUsersFromTable(self, userTable): 53 | for _, userRow in userTable.iterrows(): 54 | self._updateUserFromRow(userRow) 55 | 56 | def _addUser2Table(self, table, userJSON): 57 | logging.debug(userJSON) 58 | row = self._creatUsersRow(userJSON) 59 | return table.append(row, ignore_index=True) 60 | 61 | def _creatUsersRow(self, user): 62 | logging.debug("user") 63 | logging.debug(user) 64 | return { 65 | 'displayName': user["displayName"], 66 | 'email': user["email"], 67 | 'username': user["username"], 68 | 'uid': user["uid"], 69 | 'user_roles': user["user_roles"], 70 | 'permissions': user["permissions"] 71 | } 72 | 73 | def users2Table(self, usersJSON): 74 | """creates a table from a list of users 75 | Args: 76 | usersJSON (dict): JSON formatted output of the WKC API for a list of users 77 | Returns: 78 | pandas.DataFrame: table of users 79 | """ 80 | usersFound = len(usersJSON['UsersInfo']) 81 | assert usersFound > 0, 'No Users found' 82 | return csvProcessing.items2Table(self._addUser2Table, usersJSON['UsersInfo']) 83 | 84 | def users2CSV(self, usersJSON, filePath): 85 | """creates a csv file of users 86 | Args: 87 | usersJSON (dict): JSON formatted output of the WKC API for a list of users 88 | filePath (str): name or filepath for the creation of the csv file 89 | """ 90 | csvProcessing.items2CSV(self._addUser2Table, usersJSON['UsersInfo'], filePath) 91 | -------------------------------------------------------------------------------- /apis/endpoints/workflows.py: -------------------------------------------------------------------------------- 1 | from utilities import subAPIs 2 | 3 | class WorkflowsAPI(subAPIs.SubAPI): 4 | """This class represents all functions related to workflows""" 5 | 6 | # GET functions 7 | def getAllWorkflows(self): 8 | """Get all Workflows 9 | Returns JSON structure""" 10 | return self.mainAPI._getJSON('/v3/workflows') 11 | 12 | def getWorkflowById(self, workflow_id): 13 | """Get a given workflow""" 14 | return self.mainAPI._getJSON(f'/v3/workflows/{workflow_id}') 15 | 16 | def getWorkflowUserTasks(self): 17 | """Get all workflows tasks 18 | Returns JSON structure 19 | TaskIds can be queried as this: ['resources'][0]['metadata']['task_id']""" 20 | return self.mainAPI._getJSON(f'/v3/workflow_user_tasks') 21 | 22 | def getWorkflowUserTaskById(self, task_id): 23 | """Get a workflow tasks by a given id 24 | Returns a JSON structure 25 | Contains the possible actions in the "form_properties" object. 26 | That holds enum_values and the id specifies the possible action""" 27 | return self.mainAPI._getJSON(f'/v3/workflow_user_tasks/{task_id}') 28 | 29 | def getWorkflowUserTaskByArtifact(self, artifact_id, version_id): 30 | """Get a workflow task related to a specific artifact""" 31 | return self.mainAPI._getJSON(f'/v3/workflow_user_tasks?artifact_id={artifact_id}&version_id={version_id}') 32 | 33 | def getWorkflowHousekeeping(self): 34 | """you can query for workflows in an inconsistent state using the following call 35 | Be aware that you need to “page” through all currently running workflow instances by repeatedly running the above statement and increasing the offset by 100 each time. The paging is necessary to prevent timeouts. The resources listed in the response show the inconsistencies found. 36 | Also be aware that the total_count reported by the housekeeping endpoint refers to the number of inconsistencies found. However, the limit and offset input parameters refer to the currently active workflow instances. 37 | See https://www.ibm.com/support/pages/node/6479631""" 38 | return self.mainAPI._getJSON(f'/v3/workflows/housekeeping?limit=100&offset=0') 39 | 40 | 41 | # POST functions 42 | def queryAllWorkflows(self, payloadFile='workflows/queryAllWorkflows.json'): 43 | """Get contents of all workflows""" 44 | payload = self.mainAPI._loadPayload(payloadFile) 45 | return self.mainAPI._POST('/v3/workflows/all/query', payload) 46 | 47 | def updateWorkflowUserTask(self, task_id, payloadFile='workflows/updateWorkflowUserTask.json'): 48 | """Update a given workflow task by a payload provided""" 49 | payload = self.mainAPI._loadPayload(payloadFile) 50 | return self.mainAPI._POST(f'/v3/workflow_user_tasks/{task_id}/actions', payload, statusCheck=204) 51 | 52 | def doWorkflowHousekeeping(self): 53 | """For each "page" that contains an inconsistent workflow you can remove those inconsistent workflows on that page using this call: 54 | Be aware that in case an inconsistent workflow has more than 10 artifacts attached you need to repeatedly invoke the cleanup using above command because on each invocation only 10 artifacts are cleaned up. You need to repeat this until all artifacts managed by the inconsistent workflow have been processed. 55 | See https://www.ibm.com/support/pages/node/6479631""" 56 | return self.mainAPI._POST(f'/v3/workflows/housekeeping?limit=100&offset=0') 57 | -------------------------------------------------------------------------------- /apis/main.py: -------------------------------------------------------------------------------- 1 | """""" 2 | from distutils.log import info 3 | from utilities import helper 4 | from decouple import config 5 | import logging 6 | import json 7 | import os 8 | import requests 9 | from requests.packages import urllib3 10 | 11 | class MainAPI(): 12 | """Watson Knowledge Catalog (WKC) Application Programmer Interface (API). This class is the main class holding functions to access the CP4D backend""" 13 | def __init__(self): 14 | self.baseURL = f'https://{config("TZHOSTNAME")}' 15 | logging.info(f'URL: {self.baseURL}') 16 | self.headers = {'cache-control': 'no-cache'} 17 | self.session = requests.Session() 18 | self._authorize() 19 | self.payloadsPath = helper.getPayloadsPath() 20 | 21 | # Authorization 22 | def _authorize(self): 23 | """Core function""" 24 | try: 25 | return self._tryAuthorize() 26 | except Exception as e: 27 | raise ValueError(f'Error authenticating: {e}') 28 | 29 | def _tryAuthorize(self): 30 | """Slipping in the credentials 31 | Executing the endpoint 32 | Checking for successful response""" 33 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 34 | res = self.session.post(f'{self.baseURL}/icp4d-api/v1/authorize', headers=self.headers, json={'username': config('WKCUSER'), 'password': config('PASSWORD')}, verify=False) 35 | assert res.status_code == 200, f'{res.status_code}: {res.text}' 36 | resJSON = res.json() 37 | return self._authorizeHeaders(resJSON) 38 | 39 | def _authorizeHeaders(self, resJSON): 40 | """Injecting the Token into the Header""" 41 | access_token = resJSON['token'] 42 | self.headers['Authorization'] = f'Bearer {access_token}' 43 | logging.info(f'HEADERS: {self.headers}') 44 | 45 | # Requests 46 | def _getJSON(self, endpoint, payload=None, statusCheck=200): 47 | """GET requests that use and return a JSON structures""" 48 | res = self._GET(endpoint, payload, 'application/json', statusCheck) 49 | logging.debug(f'Response: {res.text}') 50 | return res.json() 51 | 52 | def _getFile(self, endpoint, filename, statusCheck=200): 53 | """GET requests that return a file""" 54 | res = self._GET(endpoint, None, 'application/octet-stream', statusCheck) 55 | logging.debug(res) 56 | with open(filename, 'wb') as f: 57 | f.writelines(res) 58 | logging.info(res.text) 59 | 60 | def _GET(self, endpoint, payload, contentType, statusCheck): 61 | """The generic GET function, getJSON or getFile should be preferably used""" 62 | return self._createRequest(self.session.get, endpoint, contentType, statusCheck, payload=payload) 63 | 64 | def _POST(self, endpoint, payload, contentType='application/json', statusCheck=200): 65 | """POST requests that use a JSON payload""" 66 | res = self._createRequest(self.session.post, endpoint, contentType, statusCheck, payload=payload) 67 | logging.debug(f'Response Headers: {res.headers}') 68 | logging.debug(f'Response Text: {res.text}') 69 | if 'Content-Length' in res.headers: 70 | if res.headers['Content-Length'] != '0': 71 | return res.json() 72 | else: 73 | return {} 74 | else: 75 | if res.headers['Content-Type'] == 'application/json': 76 | return res.json() 77 | else: 78 | return {} 79 | 80 | def _PATCH(self, endpoint, payload, contentType='application/json', statusCheck=200): 81 | """PATCH requests that use a JSON payload, but return nothing""" 82 | res = self._createRequest(self.session.patch, endpoint, contentType, statusCheck, payload=payload) 83 | logging.info(res.text) 84 | 85 | def _putFile(self, endpoint, files, contentType='application/json', statusCheck=200): 86 | """PUT requests that use a JSON payload, but return nothing""" 87 | res = self._createRequest(self.session.put, endpoint, contentType, statusCheck, files=files) 88 | logging.info(res.text) 89 | 90 | def _putJSON(self, endpoint, payload, contentType='application/json', statusCheck=200): 91 | """PUT requests that use a JSON payload, and returns JSON""" 92 | return self._createRequest(self.session.put, endpoint, contentType, statusCheck, payload=payload) 93 | 94 | def _DELETE(self, endpoint, contentType='application/json', statusCheck=200): 95 | """DELETE requests that use a JSON payload""" 96 | res = self._createRequest(self.session.delete, endpoint, contentType, statusCheck) 97 | logging.debug(f'Res: {res}') 98 | if res.headers['Content-Length'] != '0': 99 | return res.json() 100 | else: 101 | return {} 102 | 103 | def _createRequest(self, requestFun, endpoint, contentType, statusCheck, payload=None, files=None): 104 | """Generic Request function for all request types""" 105 | url = self.baseURL + endpoint 106 | headers = self._createContentTypeHeaders(contentType) 107 | self._logAPIFunctionInfo(headers, endpoint, payload, files) 108 | res = helper.sendRequestWithPayload(requestFun, url, headers, payload=payload, files=files) 109 | logging.debug(f'Res: {res}') 110 | assert res.status_code == statusCheck, f'{res.status_code}: {res.text}' 111 | return res 112 | 113 | def _createContentTypeHeaders(self, contentType): 114 | """Setting the content type in the request header""" 115 | headers = self.headers.copy() 116 | if contentType: 117 | headers['Content-Type'] = contentType 118 | #headers['Accept'] = "*/*" 119 | #headers['Accept-Encoding'] = "gzip, deflate, br" 120 | #headers['Connection'] = "keep-alive" 121 | return headers 122 | 123 | def _logAPIFunctionInfo(self, headers, endpoint, payload, files): 124 | """Central logging of the various session parameters""" 125 | logging.debug(f'URL: {self.baseURL}') 126 | logging.debug(f'HEADERS: {headers}') 127 | logging.info(f'{helper.getRequestName()} {endpoint}') 128 | if payload: 129 | logging.info(f'PAYLOAD: {payload}') 130 | if files: 131 | logging.info(f'FILES: {files}') 132 | 133 | # API functions 134 | def search(self, searchString, payloadFile=None): 135 | """Searches WKC 136 | Can be based on lucene parser syntax usable for many search needs 137 | Can also take a complex JSON payload usable for very specific search needs 138 | Args: 139 | searchString (str): Search Phrase. Lucene Parser Syntax can be applied for filtering (https://lucene.apache.org/core/2_9_4/queryparsersyntax.html) 140 | Returns: 141 | dict: Search result with `dict_keys(['size', 'rows', 'aggregations'])` 142 | """ 143 | if payloadFile: 144 | payload = self._loadPayload(payloadFile) 145 | else: 146 | payload = None 147 | return self._getJSON(f'/v3/search?query={searchString}', payload) 148 | 149 | def executeCAMSHealthCheck(self): 150 | """Executes a CAMS health check""" 151 | return self._getJSON('/v2/catalogs/default/healthcheck') 152 | 153 | def executeBGHeartbeat(self): 154 | """Executes a Business Glossary Heartbeat""" 155 | logging.info('Heartbeat') 156 | return self._getJSON('/v3/glossary_terms/heartbeat') 157 | 158 | def executeBGHealthCheck(self): 159 | """Executes a Business Glossary health check""" 160 | return self._getJSON('/v3/glossary_terms/admin/open-metadata/healthcheck') 161 | 162 | # TODO: Returning a 404 163 | def executeTenantInitStatusCheck(self): 164 | """Executes a Tenant init status check""" 165 | return self._getJSON('/v3/glossary_status/tenant_init_status') 166 | 167 | def getVersionId(self, artifact_id): 168 | """Returning the Version of an artifact""" 169 | resJSON = self.search(f'entity.artifacts.artifact_id:{artifact_id}') 170 | return resJSON['rows'][0]['entity']['artifacts']['version_id'] 171 | 172 | def _loadPayload(self, payloadFile): 173 | """Loading JSON Payload from a file""" 174 | jsonPath = os.path.join(self.payloadsPath, payloadFile) 175 | logging.info(f'Loading Payload: {jsonPath}') 176 | with open(jsonPath, 'r') as f: 177 | return json.load(f) 178 | 179 | def _postFile(self, endpoint, files, contentType='application/octet-stream', statusCheck=202): 180 | """POST requests that use a zip file payload, but return nothing""" 181 | res = self._createRequest(self.session.post, endpoint, contentType, statusCheck, files=files) 182 | logging.info(res.text) 183 | -------------------------------------------------------------------------------- /apis/usecases/__init__.py: -------------------------------------------------------------------------------- 1 | from apis.usecases.assignTerms import AssignTermsAPI 2 | from apis.usecases.categoryTrees import CategoryTreeAPI 3 | -------------------------------------------------------------------------------- /apis/usecases/assignTerms.py: -------------------------------------------------------------------------------- 1 | import apis 2 | from utilities import subAPIs 3 | import logging 4 | import csv 5 | 6 | class AssignTermsAPI(subAPIs.SubAPI): 7 | """This class represents all functions related to assigning terms""" 8 | def __init__(self, mainAPI=None): 9 | super().__init__(mainAPI) 10 | self.assetsAPI = apis.AssetsAPI(self.mainAPI) 11 | self.catalogsAPI = apis.CatalogsAPI(self.mainAPI) 12 | 13 | # TODO: Understand this function 14 | def assignTermToColumnWKC(self, parentCategory, term, catalog, schemaAssetPath, column): 15 | catalogId = self.catalogsAPI.getCatalogIdByName(catalog) 16 | termInfo = self.mainAPI.search(f'metadata.name.keyword:{term} AND categories.primary_category_name.keyword:{parentCategory} AND metadata.artifact_type:glossary_term') 17 | termId = termInfo['entity']['artifacts']['artifact_id'] 18 | termName = termInfo['metadata']['name'] 19 | assetId = self.assetsAPI.getAssetId(catalog, schemaAssetPath) 20 | 21 | logging.info(f'termid:{termId}') 22 | payload = [{ 23 | 'op': 'add', 24 | 'path': f'/{column}', 25 | 'value': {'column_terms': [{ 26 | 'term_id': termId, 27 | 'term_display_name': termName 28 | }]}, 29 | 'attribute': 'column_info' 30 | }] 31 | self.mainAPI._PATCH(f'/v2/assets/{assetId}/attributes/column_info?catalog_id={catalogId}', payload) 32 | 33 | # TODO: Understand this function 34 | def assignTermsToColumnsWKC(self, csvTermSourceFile): 35 | with open(csvTermSourceFile, newline='') as csvFile: 36 | reader = csv.DictReader(csvFile, delimiter=',') 37 | list_of_dicts = list(reader) 38 | for row in list_of_dicts: 39 | self.assignTermToColumnWKC(row['Parent Category'], row['Term'], row['Catalog'], row['Schema/Asset Path'], row['Column']) 40 | return list_of_dicts 41 | -------------------------------------------------------------------------------- /apis/usecases/categoryTrees.py: -------------------------------------------------------------------------------- 1 | import apis 2 | from utilities import subAPIs 3 | 4 | class CategoryTreeAPI(subAPIs.SubAPI): 5 | """This class represents all functions related to category tree operations""" 6 | def __init__(self, mainAPI=None): 7 | super().__init__(mainAPI) 8 | self.categoryAPI = apis.endpoints.CategoriesAPI(self.mainAPI) 9 | 10 | def deleteCategoryTree(self, guid): 11 | """Deletes all categories starting with the provided guid. 12 | The function iterates over all subcategories and deletes each""" 13 | res = self.categoryAPI.getListOfCategoriesForParentById(guid) 14 | if res["size"] > 0: 15 | for cat in res["rows"]: 16 | catId = cat["artifact_id"] 17 | self.deleteCategoryTree(catId) 18 | # Invoke deletion of category 19 | self.categoryAPI.deleteCategory(guid) 20 | 21 | def selectTermsCategoryTree(self, guid, termsJSON=None): 22 | """Extract all Business Terms from a given Category recursivly. 23 | The function iterates over all subcategories and extract the Terms""" 24 | if termsJSON is None: 25 | termsJSON = [] 26 | res = self.categoryAPI.getListOfCategoriesForParentById(guid) 27 | if res["size"] > 0: 28 | for cat in res["rows"]: 29 | catId = cat["artifact_id"] 30 | self.selectTermsCategoryTree(catId, termsJSON) 31 | # select all the stuff 32 | termsJSON.append(self.mainAPI.search(f'categories.primary_category_id:{guid} AND metadata.artifact_type:glossary_term')) 33 | return termsJSON 34 | -------------------------------------------------------------------------------- /apis/usecases/itemWithWorkflow.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/CPDemoFramework/fb7bb84f4dec395cf023a633a0c1c0b4d92c2d3a/apis/usecases/itemWithWorkflow.py -------------------------------------------------------------------------------- /curl/assets/getasset.http: -------------------------------------------------------------------------------- 1 | curl -k -X GET https://{hostname}/v2/assets/{asset_id}?catalog_id={catalog_id} -H 'cache-control: no-cache' -H 'content-type: application/json' -H 'Authorization: Bearer {TOKEN}' -------------------------------------------------------------------------------- /curl/authorize.http: -------------------------------------------------------------------------------- 1 | curl -k -X POST https://zen-gov-cpd-zen-gov.apps.ocp45.tec.uk.ibm.com/icp4d-api/v1/authorize -H 'cache-control: no-cache' -H 'content-type: application/json' -d '{"username":"admin","password":"password"}' -------------------------------------------------------------------------------- /curl/catalogs/getcatalogs.http: -------------------------------------------------------------------------------- 1 | curl -k -X GET https://zen-gov-cpd-zen-gov.apps.ocp45.tec.uk.ibm.com/v2/catalogs -H 'cache-control: no-cache' -H 'content-type: application/json' -H 'Authorization: Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwicm9sZSI6IkFkbWluIiwicGVybWlzc2lvbnMiOlsiYWNjZXNzX2FkdmFuY2VkX2dvdmVybmFuY2VfY2FwYWJpbGl0aWVzIiwiYWNjZXNzX2FkdmFuY2VkX21hcHBpbmdfY2FwYWJpbGl0aWVzIiwic2lnbl9pbl9vbmx5IiwiYWNjZXNzX2NhdGFsb2ciLCJ2aWV3X2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwiYWRtaW5pc3RyYXRvciIsIm1hbmFnZV9xdWFsaXR5IiwiY2FuX3Byb3Zpc2lvbiIsIm1hbmFnZV9kaXNjb3ZlcnkiLCJtYW5hZ2VfbWV0YWRhdGFfaW1wb3J0IiwidmlydHVhbGl6ZV90cmFuc2Zvcm0iLCJtYW5hZ2VfY2F0YWxvZyIsImF1dGhvcl9nb3Zlcm5hbmNlX2FydGlmYWN0cyIsIm1hbmFnZV9jYXRlZ29yaWVzIiwibWFuYWdlX2luZm9ybWF0aW9uX2Fzc2V0cyIsIm1hbmFnZV9nb3Zlcm5hbmNlX3dvcmtmbG93Iiwidmlld19xdWFsaXR5IiwiYWNjZXNzX2luZm9ybWF0aW9uX2Fzc2V0cyJdLCJncm91cHMiOlsxMDAwMF0sInN1YiI6ImFkbWluIiwiaXNzIjoiS05PWFNTTyIsImF1ZCI6IkRTWCIsInVpZCI6IjEwMDAzMzA5OTkiLCJhdXRoZW50aWNhdG9yIjoiZGVmYXVsdCIsImlhdCI6MTYxOTEyMTE1NywiZXhwIjoxNjE5MTY0MzIxfQ.kQ0tYsfE6oL8rnHmgDy0K-sK6RPCabz5GotMtBQOGtKPfH8C_1KOlsmF_H2Ir90U6VeFWNNgQWxK3jYX_Z_OdsUK0ejauDx4pmGM01XwP5iKo3iEX23abQF-GGs95H30XQpq1WuhQyiFZqGaAK12lYUX3MWBqxlzSv6_aFJsxfewb_UGwCcCkvewpOo6AkywhmRw2A6sV1sFazAy0luPg1iIseUMT9HWsCj0JJcfb7oQz4jTD0z6ukEFflMM2Lzyc2Q_oS-uNSSw6oVfOMDmDFxBy08iCmHYdtdXb7ZnNEnQ4e5BC28xs27WXNe5JUSsK-RA-TaL8ZmOWPw0C98Csw' -------------------------------------------------------------------------------- /curl/categories/deletecategory.http: -------------------------------------------------------------------------------- 1 | curl -k -X DELETE 'https://cpd-cp4d.apps.plnt.tec.uk.ibm.com/v3/categories/a70dd211-eed7-4aba-8908-47a971d9f67a' -H "accept: */*" -H "Content-Type: */*" -H 'Authorization: Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6ImMtWGdjTEhIRmtZdkIzczJHMjlmMkdDOUZzanZuSjhXVFRRQ1FjNmJCVXMifQ.eyJ1c2VybmFtZSI6ImFkbWluIiwicm9sZSI6IkFkbWluIiwicGVybWlzc2lvbnMiOlsiYWRtaW5pc3RyYXRvciIsImNhbl9wcm92aXNpb24iLCJtYW5hZ2VfY2F0YWxvZyIsImNyZWF0ZV9wcm9qZWN0IiwiY3JlYXRlX3NwYWNlIiwiYXV0aG9yX2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwibWFuYWdlX2dvdmVybmFuY2Vfd29ya2Zsb3ciLCJ2aWV3X2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwibWFuYWdlX2NhdGVnb3JpZXMiLCJtYW5hZ2VfcXVhbGl0eSIsIm1hbmFnZV9pbmZvcm1hdGlvbl9hc3NldHMiLCJtYW5hZ2VfZGlzY292ZXJ5IiwibWFuYWdlX21ldGFkYXRhX2ltcG9ydCIsImFjY2Vzc19jYXRhbG9nIiwidmlld19xdWFsaXR5IiwiYWNjZXNzX2FkdmFuY2VkX2dvdmVybmFuY2VfY2FwYWJpbGl0aWVzIiwiYWNjZXNzX2FkdmFuY2VkX21hcHBpbmdfY2FwYWJpbGl0aWVzIiwiYWNjZXNzX2luZm9ybWF0aW9uX2Fzc2V0cyJdLCJncm91cHMiOlsxMDAwMF0sInN1YiI6ImFkbWluIiwiaXNzIjoiS05PWFNTTyIsImF1ZCI6IkRTWCIsInVpZCI6IjEwMDAzMzA5OTkiLCJhdXRoZW50aWNhdG9yIjoiZGVmYXVsdCIsImRpc3BsYXlfbmFtZSI6ImFkbWluIiwiaWF0IjoxNjQzMDUxMzY0LCJleHAiOjE2NDMwOTQ1Mjh9.ff2AJ0X391yMM7ij7crFNMlyubmKzv6FM6-r2eTsElh7pCchNc9vJb9eTx5Up71U6r_vMfqMNvBL93vLKyd4VpJwlVjASxA6-TqGz4GkZgs_cFtxchTOifTSBbXOuGOl-c4n15IkRm-l15E1o_gl9p1FxWNpE7MabAkyVrbmAuE-PoS1iasyRY65JIT6_YJjkifB-Ew72oq4oLCU55SqVkc6vkiyQJa7lgfKEpyVRGZxz__x9NzRzcOVKhyuwSBSLt_UCxsR5My8rIu9Gq6ndGgsMxchz_GJ5q3kISnj06wK7o_Tm1xDG09_gtL9JxXjY2XCScFDgZ-lA_PJPZH6hw' -------------------------------------------------------------------------------- /curl/categories/reassign_collaborator.http: -------------------------------------------------------------------------------- 1 | curl -k -X POST "https://cpd-cp4d.apps.plnt.tec.uk.ibm.com/v3/categories/a70dd211-eed7-4aba-8908-47a971d9f67a/collaborators" -H "accept: */*" -H "Content-Type: */*" -H 'Authorization: Basic aWNwNGQtZGV2OnVBZGR4dDRheHJhMzg' -d '{"principal_id":"1000330999","role":"owner","user_type":"USER"}' -k -------------------------------------------------------------------------------- /curl/datarequests/deletedatarequest.http: -------------------------------------------------------------------------------- 1 | curl --location --request DELETE 'https://zen-cpd-zen.apps.10.99.101.91.nip.io/zen-data/v1/datarequest/{id}' --header 'Authorization: Bearer {TOKEN}' -------------------------------------------------------------------------------- /curl/referencedata/getrefdata.http: -------------------------------------------------------------------------------- 1 | curl -k -X GET 'https://zen-gov-cpd-zen-gov.apps.ocp45.tec.uk.ibm.com/v3/search?query=metadata.artifact_type:reference_data AND metadata.name.keyword:Geschlecht' -H 'cache-control: no-cache' -H 'content-type: application/json' -H 'Authorization: Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwicm9sZSI6IkFkbWluIiwicGVybWlzc2lvbnMiOlsiYWNjZXNzX2FkdmFuY2VkX2dvdmVybmFuY2VfY2FwYWJpbGl0aWVzIiwiYWNjZXNzX2FkdmFuY2VkX21hcHBpbmdfY2FwYWJpbGl0aWVzIiwic2lnbl9pbl9vbmx5IiwiYWNjZXNzX2NhdGFsb2ciLCJ2aWV3X2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwiYWRtaW5pc3RyYXRvciIsIm1hbmFnZV9xdWFsaXR5IiwiY2FuX3Byb3Zpc2lvbiIsIm1hbmFnZV9kaXNjb3ZlcnkiLCJtYW5hZ2VfbWV0YWRhdGFfaW1wb3J0IiwidmlydHVhbGl6ZV90cmFuc2Zvcm0iLCJtYW5hZ2VfY2F0YWxvZyIsImF1dGhvcl9nb3Zlcm5hbmNlX2FydGlmYWN0cyIsIm1hbmFnZV9jYXRlZ29yaWVzIiwibWFuYWdlX2luZm9ybWF0aW9uX2Fzc2V0cyIsIm1hbmFnZV9nb3Zlcm5hbmNlX3dvcmtmbG93Iiwidmlld19xdWFsaXR5IiwiYWNjZXNzX2luZm9ybWF0aW9uX2Fzc2V0cyJdLCJncm91cHMiOlsxMDAwMF0sInN1YiI6ImFkbWluIiwiaXNzIjoiS05PWFNTTyIsImF1ZCI6IkRTWCIsInVpZCI6IjEwMDAzMzA5OTkiLCJhdXRoZW50aWNhdG9yIjoiZGVmYXVsdCIsImlhdCI6MTYzNTI3MzYyOCwiZXhwIjoxNjM1MzE2NzkyfQ.VqXcueR6cUlTe7Xu-cie6swk3JNSu8OgZVSrpc9uxEGE9E3oAgN-ArTv3xUFPscEbFla6l6hXPVsPu-HM8Ig1t-B5gIjA8KbmNWwUXUkXt_3M16Fp9E_cUTntyBI_d_fhO6OmQK-YULkw21I9da-n3Ijoy_uRMuuO9hSJh9Mt3Xtz7V5R_wYbc6EZN6i_U2CYvfj4hmyjaSKWhGYcs4Es0BIeIw8j1PppsMl_88RERDode6atjQnbz-CRMaiMRvj_ycwLd6pCrwGOuQvr4SbKUw39MyIAl7ncWCucoOWTwt9Hy07h3LJaFc4rPUhjKSh_HQEksRa5OZvuTYSkRcrxQ' -------------------------------------------------------------------------------- /curl/reindex.http: -------------------------------------------------------------------------------- 1 | https:///ibm/iis/common-utils/rest/v1/app/reindex?batchSize=300&solrBatchSize=200&threadCount=6 -------------------------------------------------------------------------------- /curl/resync.http: -------------------------------------------------------------------------------- 1 | curl -k -X GET "https:///v3/glossary_terms/admin/resync?artifact_type=all" -H "accept: application/json" -H "Authorization: bearer " -------------------------------------------------------------------------------- /curl/resynchdatraclasses.http: -------------------------------------------------------------------------------- 1 | curl -k -X GET "https:///v3/glossary_terms/admin/resync?artifact_type=DATACLASS" -H "accept: application/json" -H "Authorization: bearer " -------------------------------------------------------------------------------- /curl/users/createuser.http: -------------------------------------------------------------------------------- 1 | curl -k -X POST -H "Authorization: Bearer {token}" -H "cache-control: no-cache" -d "{\"user_name\":\"{username}\",\"password\":\"{password}\",\"displayName\":\"{display_name}\",\"user_roles\":\"{user_roles}\",\"email\":\"{email}\"}" "https://{cpd_cluster_host}/icp4d-api/v1/users" -------------------------------------------------------------------------------- /curl/users/getallusers.http: -------------------------------------------------------------------------------- 1 | curl -k -X GET -H "Authorization: Bearer {token}" -H "cache-control: no-cache" "https://{cpd_cluster_host}/icp4d-api/v1/users" -------------------------------------------------------------------------------- /curl/users/getuserinformation.http: -------------------------------------------------------------------------------- 1 | curl -k -X GET -H "Authorization: Bearer {token}" -H "cache-control: no-cache" "https://{cpd_cluster_host}/icp4d-api/v1/users/{user_name}" -------------------------------------------------------------------------------- /curl/workflows/getWorkflowTask.http: -------------------------------------------------------------------------------- 1 | curl -k "https://$WKC_HOST/v3/workflow_user_tasks?artifact_id=$ARTIFACT_ID&version_id=$VERSION_ID" -H "accept: application/json" -H "Authorization: Bearer $TOKEN" -------------------------------------------------------------------------------- /curl/workflows/updateWorkflowTask.http: -------------------------------------------------------------------------------- 1 | curl -k -X POST -H "Content-Type: application/json" -H "accept: application/json" -H "Authorization: Bearer $TOKEN" "https://$WKC_HOST/v3/workflow_user_tasks/$TASK_ID/actions" -d "{ \"action\": \"complete\", \"form_properties\": [ { \"id\": \"action\", \"value\": \"#publish\" } ]}" -------------------------------------------------------------------------------- /devRequirements.txt: -------------------------------------------------------------------------------- 1 | pdoc3==0.9.2 2 | pytest==6.2.2 -------------------------------------------------------------------------------- /htmlReference/apis/endpoints/projects.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | apis.endpoints.projects API documentation 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 |
20 |
21 |
22 |

Module apis.endpoints.projects

23 |
24 |
25 |
26 | 27 | Expand source code 28 | 29 |
from utilities import subAPIs
 30 | import logging
 31 | 
 32 | class ProjectsAPI(subAPIs.SubAPI):
 33 |   """This class represents all functions related to projects"""
 34 | 
 35 |   # GET functions
 36 |   def getListOfProjects(self):
 37 |     """Get a list of projects"""
 38 |     return self.mainAPI._getJSON('/v2/projects')
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |

Classes

49 |
50 |
51 | class ProjectsAPI 52 | (mainAPI=None) 53 |
54 |
55 |

This class represents all functions related to projects

56 |

Creating a new SubAPI

57 |

Args

58 |
59 |
mainAPI : main.MainAPI, optional
60 |
When a MainAPI is available, then it is used. If not, a new MainAPI is created and saved in self.mainAPI. Defaults to None.
61 |
62 |
63 | 64 | Expand source code 65 | 66 |
class ProjectsAPI(subAPIs.SubAPI):
 67 |   """This class represents all functions related to projects"""
 68 | 
 69 |   # GET functions
 70 |   def getListOfProjects(self):
 71 |     """Get a list of projects"""
 72 |     return self.mainAPI._getJSON('/v2/projects')
73 |
74 |

Ancestors

75 |
    76 |
  • utilities.subAPIs.SubAPI
  • 77 |
78 |

Methods

79 |
80 |
81 | def getListOfProjects(self) 82 |
83 |
84 |

Get a list of projects

85 |
86 | 87 | Expand source code 88 | 89 |
def getListOfProjects(self):
 90 |   """Get a list of projects"""
 91 |   return self.mainAPI._getJSON('/v2/projects')
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 | 122 |
123 | 126 | 127 | -------------------------------------------------------------------------------- /htmlReference/apis/endpoints/rules.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | apis.endpoints.rules API documentation 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 |
20 |
21 |
22 |

Module apis.endpoints.rules

23 |
24 |
25 |
26 | 27 | Expand source code 28 | 29 |
from utilities import subAPIs
 30 | 
 31 | class RulesAPI(subAPIs.SubAPI):
 32 |   """This class represents all functions related to data protection rules"""
 33 | 
 34 |   # GET functions
 35 |   def getListOfRules(self):
 36 |     """Get a list of rules"""
 37 |     return self.mainAPI._getJSON('/v3/enforcement/rules')
 38 | 
 39 |   # DELETE functions
 40 |   def deleteRule(self, rule_id):
 41 |     """Delete the log of a given rule"""
 42 |     return self.mainAPI._DELETE(f'/v3/enforcement/rules/{rule_id}', statusCheck=204)
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |

Classes

53 |
54 |
55 | class RulesAPI 56 | (mainAPI=None) 57 |
58 |
59 |

This class represents all functions related to data protection rules

60 |

Creating a new SubAPI

61 |

Args

62 |
63 |
mainAPI : main.MainAPI, optional
64 |
When a MainAPI is available, then it is used. If not, a new MainAPI is created and saved in self.mainAPI. Defaults to None.
65 |
66 |
67 | 68 | Expand source code 69 | 70 |
class RulesAPI(subAPIs.SubAPI):
 71 |   """This class represents all functions related to data protection rules"""
 72 | 
 73 |   # GET functions
 74 |   def getListOfRules(self):
 75 |     """Get a list of rules"""
 76 |     return self.mainAPI._getJSON('/v3/enforcement/rules')
 77 | 
 78 |   # DELETE functions
 79 |   def deleteRule(self, rule_id):
 80 |     """Delete the log of a given rule"""
 81 |     return self.mainAPI._DELETE(f'/v3/enforcement/rules/{rule_id}', statusCheck=204)
82 |
83 |

Ancestors

84 |
    85 |
  • utilities.subAPIs.SubAPI
  • 86 |
87 |

Methods

88 |
89 |
90 | def deleteRule(self, rule_id) 91 |
92 |
93 |

Delete the log of a given rule

94 |
95 | 96 | Expand source code 97 | 98 |
def deleteRule(self, rule_id):
 99 |   """Delete the log of a given rule"""
100 |   return self.mainAPI._DELETE(f'/v3/enforcement/rules/{rule_id}', statusCheck=204)
101 |
102 |
103 |
104 | def getListOfRules(self) 105 |
106 |
107 |

Get a list of rules

108 |
109 | 110 | Expand source code 111 | 112 |
def getListOfRules(self):
113 |   """Get a list of rules"""
114 |   return self.mainAPI._getJSON('/v3/enforcement/rules')
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 | 146 |
147 | 150 | 151 | -------------------------------------------------------------------------------- /htmlReference/apis/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | apis API documentation 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 |
20 |
21 |
22 |

Package apis

23 |
24 |
25 |
26 | 27 | Expand source code 28 | 29 |
from apis import endpoints
30 | from apis import usecases
31 | from apis.main import MainAPI
32 |
33 |
34 |
35 |

Sub-modules

36 |
37 |
apis.endpoints
38 |
39 |
40 |
41 |
apis.main
42 |
43 |
44 |
45 |
apis.usecases
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 | 73 |
74 | 77 | 78 | -------------------------------------------------------------------------------- /htmlReference/apis/usecases/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | apis.usecases API documentation 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 |
20 |
21 |
22 |

Module apis.usecases

23 |
24 |
25 |
26 | 27 | Expand source code 28 | 29 |
from apis.usecases.assignTerms import AssignTermsAPI
30 | from apis.usecases.categoryTrees import CategoryTreeAPI
31 |
32 |
33 |
34 |

Sub-modules

35 |
36 |
apis.usecases.assignTerms
37 |
38 |
39 |
40 |
apis.usecases.categoryTrees
41 |
42 |
43 |
44 |
apis.usecases.itemWithWorkflow
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 | 77 |
78 | 81 | 82 | -------------------------------------------------------------------------------- /htmlReference/apis/usecases/itemWithWorkflow.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | apis.usecases.itemWithWorkflow API documentation 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 |
20 |
21 |
22 |

Module apis.usecases.itemWithWorkflow

23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 | 48 |
49 | 52 | 53 | -------------------------------------------------------------------------------- /notebooks/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/CPDemoFramework/fb7bb84f4dec395cf023a633a0c1c0b4d92c2d3a/notebooks/.DS_Store -------------------------------------------------------------------------------- /notebooks/endpoints/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/CPDemoFramework/fb7bb84f4dec395cf023a633a0c1c0b4d92c2d3a/notebooks/endpoints/.DS_Store -------------------------------------------------------------------------------- /notebooks/endpoints/datarequest.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "2022-03-12 08:57:04,755 INFO URL: https://datafabric.ibmcloudpack.com:12864\n", 13 | "2022-03-12 08:57:05,333 INFO HEADERS: {'cache-control': 'no-cache', 'Authorization': 'Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjR2TzJpeGJ1UExCZ2dtZEhpNXpSOVFOQ1gzUldNcFY4Q1hqUGZFaHdyNDAifQ.eyJ1c2VybmFtZSI6ImFkbWluIiwicm9sZSI6IkFkbWluIiwicGVybWlzc2lvbnMiOlsiYWRtaW5pc3RyYXRvciIsImNhbl9wcm92aXNpb24iLCJtYW5hZ2VfY2F0YWxvZyIsImNyZWF0ZV9wcm9qZWN0IiwiY3JlYXRlX3NwYWNlIiwiYXV0aG9yX2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwibWFuYWdlX2dvdmVybmFuY2Vfd29ya2Zsb3ciLCJ2aWV3X2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwibWFuYWdlX2NhdGVnb3JpZXMiLCJtYW5hZ2VfZ2xvc3NhcnkiLCJtYW5hZ2VfcXVhbGl0eSIsIm1hbmFnZV9pbmZvcm1hdGlvbl9hc3NldHMiLCJtYW5hZ2VfZGlzY292ZXJ5IiwibWFuYWdlX21ldGFkYXRhX2ltcG9ydCIsImFjY2Vzc19jYXRhbG9nIiwidmlld19xdWFsaXR5IiwiYWNjZXNzX2luZm9ybWF0aW9uX2Fzc2V0cyJdLCJncm91cHMiOlsxMDAwMF0sInN1YiI6ImFkbWluIiwiaXNzIjoiS05PWFNTTyIsImF1ZCI6IkRTWCIsInVpZCI6IjEwMDAzMzA5OTkiLCJhdXRoZW50aWNhdG9yIjoiZGVmYXVsdCIsImRpc3BsYXlfbmFtZSI6ImFkbWluIiwiaWF0IjoxNjQ3MTA0MjI1LCJleHAiOjE2NDcxNDczODl9.FPRBBL1g86rFhKx05sQlfjeRvQ5YRpA8j0aF-2z5wMvo1QOs5lWeJy2PdFpxCnuhw5HfFgD0lTOs9Xvh9PElKZdM13iSwLNPNff-4U8UTOZH8ntL4XOyBSHLJrHD96NvxSacGslrUz3SfJYFpX602-CPei9-3r1Si13wmuVSmTp5cCLCCZu_MhohIwDfg1-VsI6roSjbzXyQwwrAnqy9mEXV0xTPfRNj29fCzZsfuL9MND9IFc7oknaHUf8XKS4u4aoT5PQYouGMN_xAzfbzGsjgDISXZ_viCg3mnIjes63_ZQg_nuNun-TwEB-isVzArVllYccZBGTWUJCEPZYMPA'}\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "%load_ext autoreload\n", 19 | "%autoreload 2\n", 20 | "import logging\n", 21 | "logging.basicConfig(format=\"%(asctime)s %(levelname)-7s %(message)s\", level=logging.INFO)\n", 22 | "import pandas\n", 23 | "from pprint import pprint\n", 24 | "import sys\n", 25 | "sys.path.append('../..')\n", 26 | "import apis\n", 27 | "api = apis.endpoints.DataRequestAPI()" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 7, 33 | "metadata": {}, 34 | "outputs": [ 35 | { 36 | "name": "stderr", 37 | "output_type": "stream", 38 | "text": [ 39 | "2022-03-12 08:59:31,690 INFO Loading Payload: /Users/drangar@us.ibm.com/Documents/GitHub/wkc-api/notebooks/endpoints/../../payloads/datarequest/postnewrequest.json\n", 40 | "2022-03-12 08:59:31,699 INFO POST /zen-data-ui/v1/datarequest\n", 41 | "2022-03-12 08:59:31,699 INFO PAYLOAD: {'Abstract': 'Request description', 'AssignedTo': 'Unassigned', 'DataIntent': 'Request intent', 'DataObj': {'DataFields': [], 'DataSets': [], 'DataSource': {}}, 'Deadline': 'High', 'IssueLink': 'Request http link', 'Project': '', 'RequestedBy': 'admin', 'State': 'New', 'Title': 'Request title', 'project_id': ''}\n" 42 | ] 43 | }, 44 | { 45 | "data": { 46 | "text/plain": [ 47 | "{'_messageCode_': '200', 'message': 'Request For Data Successful '}" 48 | ] 49 | }, 50 | "execution_count": 7, 51 | "metadata": {}, 52 | "output_type": "execute_result" 53 | } 54 | ], 55 | "source": [ 56 | "api.createDataRequest()" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 8, 62 | "metadata": {}, 63 | "outputs": [ 64 | { 65 | "name": "stderr", 66 | "output_type": "stream", 67 | "text": [ 68 | "2022-03-12 08:59:37,208 INFO GET /zen-data-ui/v1/datarequest\n" 69 | ] 70 | }, 71 | { 72 | "data": { 73 | "text/plain": [ 74 | "{'_messageCode_': 'success',\n", 75 | " 'count': 1,\n", 76 | " 'message': 'Success in getting requests',\n", 77 | " 'requestObj': [{'Id': 4,\n", 78 | " 'UID': '1000330999',\n", 79 | " 'Title': 'Request title',\n", 80 | " 'State': 'New',\n", 81 | " 'AssignedTo': 'Unassigned',\n", 82 | " 'RequestedBy': 'admin',\n", 83 | " 'Project': '',\n", 84 | " 'ProjectID': '',\n", 85 | " 'LastModifiedTime': '2022-03-12T16:59:31.81246529Z',\n", 86 | " 'Abstract': 'Request description',\n", 87 | " 'DataIntent': 'Request intent',\n", 88 | " 'IssueLink': 'Request http link',\n", 89 | " 'Deadline': 'High',\n", 90 | " 'FulfilledBy': '',\n", 91 | " 'DataObj': {'DataSource': {'Shared': False,\n", 92 | " 'User': '',\n", 93 | " 'Password': '',\n", 94 | " 'Name': '',\n", 95 | " 'Description': '',\n", 96 | " 'DataSourcetype': '',\n", 97 | " 'DatabaseType': '',\n", 98 | " 'Server': '',\n", 99 | " 'Host': '',\n", 100 | " 'Port': 0,\n", 101 | " 'JdbcHost': '',\n", 102 | " 'JdbcPort': 0,\n", 103 | " 'JdbcDatabase': '',\n", 104 | " 'DataSourceTypeID': '',\n", 105 | " 'URL': '',\n", 106 | " 'WebHDFSURL': '',\n", 107 | " 'WebHCatURL': '',\n", 108 | " 'LivyURL': '',\n", 109 | " 'DriverClassName': ''},\n", 110 | " 'DataSets': None,\n", 111 | " 'DataFields': None}}]}" 112 | ] 113 | }, 114 | "execution_count": 8, 115 | "metadata": {}, 116 | "output_type": "execute_result" 117 | } 118 | ], 119 | "source": [ 120 | "# Get all data requests unconditionally\n", 121 | "api.getDataRequests()" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": 3, 127 | "metadata": {}, 128 | "outputs": [ 129 | { 130 | "name": "stderr", 131 | "output_type": "stream", 132 | "text": [ 133 | "2022-03-12 08:12:35,524 INFO GET /zen-data-ui/v1/datarequest?SelectBy=State&Value=Completed\n" 134 | ] 135 | }, 136 | { 137 | "data": { 138 | "text/plain": [ 139 | "{'_messageCode_': 'Success', 'count': 0, 'requestObj': []}" 140 | ] 141 | }, 142 | "execution_count": 3, 143 | "metadata": {}, 144 | "output_type": "execute_result" 145 | } 146 | ], 147 | "source": [ 148 | "# Get all data requests that have Test in their Title\n", 149 | "api.getDataRequests('State','Completed')" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": 4, 155 | "metadata": {}, 156 | "outputs": [ 157 | { 158 | "name": "stderr", 159 | "output_type": "stream", 160 | "text": [ 161 | "2022-03-12 08:12:40,906 INFO GET /zen-data-ui/v1/datarequest?SelectBy=State&Value=Closed\n" 162 | ] 163 | }, 164 | { 165 | "data": { 166 | "text/plain": [ 167 | "{'_messageCode_': 'Success', 'count': 0, 'requestObj': []}" 168 | ] 169 | }, 170 | "execution_count": 4, 171 | "metadata": {}, 172 | "output_type": "execute_result" 173 | } 174 | ], 175 | "source": [ 176 | "# Get all data requests that are closed\n", 177 | "api.getDataRequests('State','Closed')" 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": 5, 183 | "metadata": {}, 184 | "outputs": [ 185 | { 186 | "name": "stderr", 187 | "output_type": "stream", 188 | "text": [ 189 | "2022-03-12 08:12:44,004 INFO GET /zen-data-ui/v1/datarequest?SelectBy=AssignedTo&Value=Unassigned\n" 190 | ] 191 | }, 192 | { 193 | "data": { 194 | "text/plain": [ 195 | "{'_messageCode_': 'Success', 'count': 0, 'requestObj': []}" 196 | ] 197 | }, 198 | "execution_count": 5, 199 | "metadata": {}, 200 | "output_type": "execute_result" 201 | } 202 | ], 203 | "source": [ 204 | "# get all data requests that are Unassigned\n", 205 | "api.getDataRequests('AssignedTo','Unassigned')" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": 6, 211 | "metadata": {}, 212 | "outputs": [ 213 | { 214 | "name": "stderr", 215 | "output_type": "stream", 216 | "text": [ 217 | "2022-03-12 08:12:51,868 INFO GET /zen-data-ui/v1/datarequest?SelectBy=AssignedTo&Value=Unassigned\n" 218 | ] 219 | } 220 | ], 221 | "source": [ 222 | "# Delete all data requests that a Unassigned\n", 223 | "api.deleteAllDataRequestsByUser(\"Unassigned\")" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": 4, 229 | "metadata": {}, 230 | "outputs": [ 231 | { 232 | "name": "stderr", 233 | "output_type": "stream", 234 | "text": [ 235 | "2022-03-12 08:57:34,091 INFO GET /zen-data-ui/v1/datarequest?SelectBy=State&Value=New\n", 236 | "2022-03-12 08:57:34,161 INFO request name\n", 237 | "2022-03-12 08:57:34,169 INFO DELETE /zen-data-ui/v1/datarequest/1\n" 238 | ] 239 | } 240 | ], 241 | "source": [ 242 | "# Delete all data requests that are in Status Completed\n", 243 | "api.deleteAllDataRequestsByState(\"New\")" 244 | ] 245 | }, 246 | { 247 | "cell_type": "code", 248 | "execution_count": 9, 249 | "metadata": {}, 250 | "outputs": [ 251 | { 252 | "name": "stderr", 253 | "output_type": "stream", 254 | "text": [ 255 | "2022-03-12 08:59:48,775 INFO DELETE /zen-data-ui/v1/datarequest/4\n" 256 | ] 257 | }, 258 | { 259 | "data": { 260 | "text/plain": [ 261 | "{'_messageCode_': '200', 'message': 'Request to Delete Successful '}" 262 | ] 263 | }, 264 | "execution_count": 9, 265 | "metadata": {}, 266 | "output_type": "execute_result" 267 | } 268 | ], 269 | "source": [ 270 | "# Delete data request with ID 7\n", 271 | "api.deleteDataRequest('4')" 272 | ] 273 | }, 274 | { 275 | "cell_type": "code", 276 | "execution_count": null, 277 | "metadata": {}, 278 | "outputs": [], 279 | "source": [] 280 | } 281 | ], 282 | "metadata": { 283 | "interpreter": { 284 | "hash": "ac2eaa0ea0ebeafcc7822e65e46aa9d4f966f30b695406963e145ea4a91cd4fc" 285 | }, 286 | "kernelspec": { 287 | "display_name": "Python 3.9.5 64-bit ('python@3.9')", 288 | "name": "python3" 289 | }, 290 | "language_info": { 291 | "codemirror_mode": { 292 | "name": "ipython", 293 | "version": 3 294 | }, 295 | "file_extension": ".py", 296 | "mimetype": "text/x-python", 297 | "name": "python", 298 | "nbconvert_exporter": "python", 299 | "pygments_lexer": "ipython3", 300 | "version": "3.9.7" 301 | }, 302 | "metadata": { 303 | "interpreter": { 304 | "hash": "fad7527b2d45399d92cf1691744c463ac1fe118d029136e3c300f68b4fdf27d6" 305 | } 306 | }, 307 | "orig_nbformat": 2 308 | }, 309 | "nbformat": 4, 310 | "nbformat_minor": 2 311 | } 312 | -------------------------------------------------------------------------------- /notebooks/endpoints/rules.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "2022-03-14 13:22:01,031 INFO URL: https://datafabric.ibmcloudpack.com:12864\n", 13 | "2022-03-14 13:22:01,514 INFO HEADERS: {'cache-control': 'no-cache', 'Authorization': 'Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjR2TzJpeGJ1UExCZ2dtZEhpNXpSOVFOQ1gzUldNcFY4Q1hqUGZFaHdyNDAifQ.eyJ1c2VybmFtZSI6ImFkbWluIiwicm9sZSI6IkFkbWluIiwicGVybWlzc2lvbnMiOlsiYWRtaW5pc3RyYXRvciIsImNhbl9wcm92aXNpb24iLCJtYW5hZ2VfY2F0YWxvZyIsImNyZWF0ZV9wcm9qZWN0IiwiY3JlYXRlX3NwYWNlIiwiYXV0aG9yX2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwibWFuYWdlX2dvdmVybmFuY2Vfd29ya2Zsb3ciLCJ2aWV3X2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwibWFuYWdlX2NhdGVnb3JpZXMiLCJtYW5hZ2VfZ2xvc3NhcnkiLCJtYW5hZ2VfcXVhbGl0eSIsIm1hbmFnZV9pbmZvcm1hdGlvbl9hc3NldHMiLCJtYW5hZ2VfZGlzY292ZXJ5IiwibWFuYWdlX21ldGFkYXRhX2ltcG9ydCIsImFjY2Vzc19jYXRhbG9nIiwidmlld19xdWFsaXR5IiwiYWNjZXNzX2luZm9ybWF0aW9uX2Fzc2V0cyJdLCJncm91cHMiOlsxMDAwMF0sInN1YiI6ImFkbWluIiwiaXNzIjoiS05PWFNTTyIsImF1ZCI6IkRTWCIsInVpZCI6IjEwMDAzMzA5OTkiLCJhdXRoZW50aWNhdG9yIjoiZGVmYXVsdCIsImRpc3BsYXlfbmFtZSI6ImFkbWluIiwiaWF0IjoxNjQ3Mjg5MzIxLCJleHAiOjE2NDczMzI0ODV9.IyWNJWg2gj3FvEG0_JUUpZvrH4gKr5qCkpduIGsFODEfGTbHCZdTRZBeWivx4Z7mzjLM2IZRuwitmVl4C1gK2EDAixTCeP84VMcBuc5SnaPn03BYRQvc89pyXtxO0Hyvlppx4ykfqV4IXUYnIDwhbvaaYOFy5ZJ0vhfk18zqG_tZ0yVh82oCmu2j2FJ4agd-e6fKhTktt-2W20UCDIaZd_A3VwPGZHWNJRh0w5k3k6kpMiNEMb3Qon3GTQ6vIc51q2YdcBiIVWKlGRtnZeEII5pT6Dqcu2lKzW0GMIgSFQ5JxnEAGuOtOnXcNVTT-Zdu9EMwjE6xG0SpmDqirSrNtQ'}\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "%load_ext autoreload\n", 19 | "%autoreload 2\n", 20 | "import logging\n", 21 | "from pprint import pprint\n", 22 | "logging.basicConfig(format=\"%(asctime)s %(levelname)-7s %(message)s\", level=logging.INFO)\n", 23 | "import sys\n", 24 | "sys.path.append('../..')\n", 25 | "import apis\n", 26 | "api = apis.endpoints.RulesAPI()" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 4, 32 | "metadata": {}, 33 | "outputs": [ 34 | { 35 | "name": "stderr", 36 | "output_type": "stream", 37 | "text": [ 38 | "2022-03-14 15:00:51,925 INFO GET /v3/enforcement/rules\n" 39 | ] 40 | }, 41 | { 42 | "name": "stdout", 43 | "output_type": "stream", 44 | "text": [ 45 | "{'count': 3,\n", 46 | " 'first': {'href': '/v3/enforcement/rules?offset=0'},\n", 47 | " 'last': {'href': '/v3/enforcement/rules?offset=0'},\n", 48 | " 'limit': 50,\n", 49 | " 'offset': 0,\n", 50 | " 'resources': [{'entity': {'action': {'name': 'Transform',\n", 51 | " 'subaction': {'name': 'redactColumns',\n", 52 | " 'parameters': [{'name': 'column_names',\n", 53 | " 'value': ['DOB']}]}},\n", 54 | " 'description': 'Obfuscate date of birth values',\n", 55 | " 'governance_type_id': 'Access',\n", 56 | " 'name': 'Protect DOB',\n", 57 | " 'state': 'active',\n", 58 | " 'trigger': ['$Asset.ColumnName',\n", 59 | " 'CONTAINS',\n", 60 | " ['#DOB']]},\n", 61 | " 'metadata': {'created_at': '2022-02-07T23:51:41.872Z',\n", 62 | " 'creator': '1000331003',\n", 63 | " 'guid': 'f054d10d-583e-4355-adf8-206cbf6024e4',\n", 64 | " 'modifier': '1000331003',\n", 65 | " 'updated_at': '2022-02-07T23:51:41.872Z'}},\n", 66 | " {'entity': {'action': {'name': 'Transform',\n", 67 | " 'subaction': {'name': 'redactDataClasses',\n", 68 | " 'parameters': [{'name': 'dataclass_ids',\n", 69 | " 'value': ['4e07dd80-9270-43f4-a804-f7c7b967870f_29c80e6d-885a-4fc5-94fa-5a4aea922812']}]}},\n", 70 | " 'description': 'Protect all credit card numbers '\n", 71 | " 'using the data privacy advanced '\n", 72 | " 'masking method',\n", 73 | " 'governance_type_id': 'Access',\n", 74 | " 'name': 'Protect credit card numbers',\n", 75 | " 'state': 'active',\n", 76 | " 'trigger': ['$Asset.InferredClassification',\n", 77 | " 'CONTAINS',\n", 78 | " ['$4e07dd80-9270-43f4-a804-f7c7b967870f_29c80e6d-885a-4fc5-94fa-5a4aea922812']]},\n", 79 | " 'metadata': {'created_at': '2022-02-07T23:50:34.264Z',\n", 80 | " 'creator': '1000331003',\n", 81 | " 'guid': '9ff55541-1915-49bb-a76a-8e186203444c',\n", 82 | " 'modifier': '1000331003',\n", 83 | " 'updated_at': '2022-02-07T23:50:34.264Z'}},\n", 84 | " {'entity': {'action': {'name': 'Deny'},\n", 85 | " 'description': 'rule description',\n", 86 | " 'governance_type_id': 'Access',\n", 87 | " 'name': 'demoprotectionrule',\n", 88 | " 'state': 'active',\n", 89 | " 'trigger': ['$Asset.Tags', 'CONTAINS', ['#demo']]},\n", 90 | " 'metadata': {'created_at': '2022-03-14T22:00:44.530Z',\n", 91 | " 'creator': '1000330999',\n", 92 | " 'guid': 'e4cad3be-d320-4aa1-8868-cbb9a35835e1',\n", 93 | " 'modifier': '1000330999',\n", 94 | " 'updated_at': '2022-03-14T22:00:44.530Z'}}]}\n" 95 | ] 96 | } 97 | ], 98 | "source": [ 99 | "#Get Project ID\n", 100 | "res = api.getListOfRules()\n", 101 | "pprint(res)\n" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 5, 107 | "metadata": {}, 108 | "outputs": [ 109 | { 110 | "name": "stderr", 111 | "output_type": "stream", 112 | "text": [ 113 | "2022-03-14 15:01:50,962 INFO DELETE /v3/enforcement/rules/e4cad3be-d320-4aa1-8868-cbb9a35835e1\n" 114 | ] 115 | }, 116 | { 117 | "data": { 118 | "text/plain": [ 119 | "{}" 120 | ] 121 | }, 122 | "execution_count": 5, 123 | "metadata": {}, 124 | "output_type": "execute_result" 125 | } 126 | ], 127 | "source": [ 128 | "res= api.deleteRule('e4cad3be-d320-4aa1-8868-cbb9a35835e1')\n", 129 | "res" 130 | ] 131 | } 132 | ], 133 | "metadata": { 134 | "interpreter": { 135 | "hash": "ac2eaa0ea0ebeafcc7822e65e46aa9d4f966f30b695406963e145ea4a91cd4fc" 136 | }, 137 | "kernelspec": { 138 | "display_name": "Python 3.9.5 64-bit ('python@3.9')", 139 | "name": "python3" 140 | }, 141 | "language_info": { 142 | "codemirror_mode": { 143 | "name": "ipython", 144 | "version": 3 145 | }, 146 | "file_extension": ".py", 147 | "mimetype": "text/x-python", 148 | "name": "python", 149 | "nbconvert_exporter": "python", 150 | "pygments_lexer": "ipython3", 151 | "version": "3.9.7" 152 | }, 153 | "metadata": { 154 | "interpreter": { 155 | "hash": "fad7527b2d45399d92cf1691744c463ac1fe118d029136e3c300f68b4fdf27d6" 156 | } 157 | }, 158 | "orig_nbformat": 2 159 | }, 160 | "nbformat": 4, 161 | "nbformat_minor": 2 162 | } 163 | -------------------------------------------------------------------------------- /notebooks/sandbox/categories_export.csv: -------------------------------------------------------------------------------- 1 | artifact_id,name,steward_ids,tags,description 2 | 4f043e06-e9c6-4b33-a89e-98dbc93a1295,apicat,[],[],updated category long description for POT 3 | 55de4f02-0130-4b9f-a5ec-90567ff66a60,cat1,[],[], cat1 long 4 | da9686ae-6827-4fc0-b851-9185122a05d3,cat5,[],[], cat5 long 5 | d93d184d-6cea-458d-8525-64be89ba578d,cat-1234,[],[],updated fsdfss 6 | adfaa0b1-5162-43eb-a125-23c96fc09974,Property,[],[],updated NaN 7 | f12a4eb2-87cb-4e5d-9f51-4aba4715e20c,cat53,[],[], cat53 long 8 | dfa16ce2-bf03-4743-b3dd-4a85348be6dd,AHU,[],[],updated Air handling unit 9 | b7027f1e-dfbc-409e-b9e9-50bba87e512f,cat13,[],[], cat13 long 10 | d4efb010-d158-4e47-a63d-3750ee9665d2,apicat2,[],[],updated category long description for POT 11 | cb22ab06-23b0-4504-be8b-28aa27dee8c7,cat54,[],[], cat54 long 12 | e39ada11-8338-3704-90e3-681a71e7c839,[uncategorized],[],[],updated This is the system default if a standard category is not assigned. 13 | b066a536-c294-48f0-8311-47f9c71947f6,new-cat-2345,[],[],updated NaN 14 | eebd338d-dbd5-37e4-9e4a-1296de82963a,Locations,[],[],updated Locations Category (LC) is defined as parent for locations reference datasets. 15 | 932f0cd7-7142-4eeb-aa45-d8de49f2ad65,cat12,[],[], cat12 long 16 | caff718c-1247-4c75-a41b-a75d94b0d739,Employee,[],[],updated Employee information 17 | 578c29ee-6173-4bd9-bd50-19c30b8729f5,cat14,[],[], cat14 long 18 | 88868daf-18e8-4189-883f-09556dae20dc,IBM,[],[],All business information relating to IBM Data and AI 19 | 14b1d5e8-6d2b-49b2-aa7a-8087f716ef58,cat52,[],[], cat52 long 20 | -------------------------------------------------------------------------------- /notebooks/sandbox/createUsers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 3 | import pandas 4 | import sys 5 | sys.path.append('../..') 6 | import apis 7 | userApi = apis.endpoints.UsersAPI() 8 | 9 | usersTable = pandas.read_csv('new_users.csv') 10 | userApi.updateUsersFromTable(usersTable) 11 | -------------------------------------------------------------------------------- /notebooks/sandbox/create_categories.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../..') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Create new categories from file 14 | categoriesTable = pandas.read_csv('new_categories.csv') 15 | for index, row in categoriesTable.iterrows(): 16 | categoriesAPI.createCategory(row['name'], row['short_description'], row['long_description']) -------------------------------------------------------------------------------- /notebooks/sandbox/create_terms.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../..') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Create new terms from file 14 | termsTable = pandas.read_csv('new_terms.csv') 15 | for index, row in termsTable.iterrows(): 16 | termAbbreviations = [row['name'][0:2]] 17 | termsAPI.createTerm(termAbbreviations, row['name'], row['short_description'], row['long_description']) -------------------------------------------------------------------------------- /notebooks/sandbox/demo_categories.csv: -------------------------------------------------------------------------------- 1 | name,short_description,long_description 2 | -------------------------------------------------------------------------------- /notebooks/sandbox/demo_terms.csv: -------------------------------------------------------------------------------- 1 | name,short_description,long_description 2 | -------------------------------------------------------------------------------- /notebooks/sandbox/demo_users.csv: -------------------------------------------------------------------------------- 1 | username,displayName,email,password,user_roles 2 | -------------------------------------------------------------------------------- /notebooks/sandbox/exportProject.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Train, promote and deploy Boston house prices prediction model 5 | 6 | # This notebook contains steps and code to demonstrate support of AI Lifecycle features in Cloud Pak for Data. 7 | # It contains steps and code to work with [`cpdctl`](https://github.com/IBM/cpdctl) CLI tool available in IBM github repository. 8 | # It also introduces commands for getting model and training data, persisting model, deploying model 9 | # and promoting it between deployment spaces. 10 | # 11 | # Some familiarity with Python is helpful. This notebook uses Python 3.7. 12 | # 13 | 14 | # In[1]: 15 | 16 | 17 | # import base64 18 | import json 19 | import os 20 | # import platform 21 | # import requests 22 | # import tarfile 23 | # import zipfile 24 | # from IPython.core.display import display, HTML 25 | from decouple import config 26 | from simple_term_menu import TerminalMenu 27 | import sys 28 | 29 | # ## CPD Credentials 30 | 31 | # In[2]: 32 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 33 | 34 | CPD_USER_NAME = config('WKCUSER') 35 | CPD_USER_PASSWORD = config('PASSWORD') 36 | CPD_URL = config('TZHOSTNAME') 37 | 38 | version_r = os.popen('cpdctl version').read() 39 | 40 | CPDCTL_VERSION = version_r 41 | CPDCTL_VERSION=CPDCTL_VERSION.strip() 42 | 43 | print("cpdctl version: {}".format(CPDCTL_VERSION)) 44 | 45 | 46 | # ### Add CPD profile and context configuration 47 | 48 | # Add "cpd_user" user to the `cpdctl` configuration 49 | 50 | # In[6]: 51 | 52 | os.system('cpdctl config user set cpd_user --username '+CPD_USER_NAME+ ' --password '+CPD_USER_PASSWORD) 53 | 54 | 55 | # Add "cpd" profile to the `cpdctl` configuration 56 | 57 | # In[7]: 58 | 59 | 60 | os.system('cpdctl config profile set cpd --url ' +CPD_URL+' --user cpd_user') 61 | 62 | 63 | # Add "cpd" context to the `cpdctl` configuration 64 | 65 | # In[8]: 66 | 67 | 68 | os.system('cpdctl config context set cpd --profile cpd --user cpd_user') 69 | 70 | 71 | # List available contexts 72 | 73 | # In[9]: 74 | 75 | os.system('cpdctl config context list') 76 | 77 | 78 | # In[10]: 79 | 80 | 81 | os.system('cpdctl config context use cpd') 82 | 83 | 84 | # List available projects in current context 85 | 86 | # In[11]: 87 | 88 | #####################Function to select an existing project##################### 89 | def existing_projects(options, service_info): 90 | #########Printing the existing project list menu in the terminal######### 91 | terminal_menu = TerminalMenu(options,title = "Select a project to export. Use Keyboard keys to select.", menu_cursor_style = ("fg_cyan", "bold"), menu_highlight_style =("bold",)) 92 | menu_entry_index = terminal_menu.show() 93 | #########Confirmation message######### 94 | print("The Project "+ service_info[menu_entry_index]['name']+" having Project ID "+service_info[menu_entry_index]['guid']+" will be Exported.\nDo you want to continue?(Y/N)") 95 | confirm=input() 96 | if(confirm=="Y" or confirm=="y"): 97 | return service_info[menu_entry_index]['guid'] # return guid of selected project 98 | elif(confirm=="N" or confirm=="n"): 99 | #########Printing the next step menu in the terminal######### 100 | optionsNo = ["Want to select a different existing project", "Exit from the Menu"] 101 | terminal_menu_No = TerminalMenu(optionsNo,title = "Select the next step. Use Keyboard keys to select.", menu_cursor_style = ("fg_cyan", "bold"), menu_highlight_style =("bold",)) 102 | menu_entry_index_No = terminal_menu_No.show() 103 | if(menu_entry_index_No==0): 104 | return existing_projects(options, service_info) # recursive call to select a project again 105 | else: 106 | return 0 # No project selected 107 | 108 | #####################End of function existing_projects##################### 109 | 110 | ## Access the selected project assets 111 | 112 | # Get cpdctl-demo project ID and show details 113 | 114 | # In[12]: 115 | options = [] 116 | service_info = {} 117 | data = json.loads(os.popen("cpdctl project list --output json").read()) 118 | 119 | entries=data['total_results'] 120 | # print(data) 121 | for i in range(0,entries): 122 | #########creating list of existing projects######### 123 | options.append(data['resources'][i]['entity']['name']) 124 | service_info[i] = { 125 | "name": data['resources'][i]['entity']['name'], 126 | "guid": data['resources'][i]['metadata']['guid'] 127 | } 128 | options[i]+=" ("+data['resources'][i]['metadata']['guid']+")" 129 | 130 | PROJECT_ID=existing_projects(options, service_info) #function call to list the existing projects and returning the selected project guid 131 | # print(PROJECT_ID) 132 | if(PROJECT_ID==0): 133 | print("####################################\n\tNo Project Selected!!\n Please Select a Project to Export\n####################################") 134 | sys.exit() 135 | 136 | # In[13]: 137 | 138 | PROJECT_ID=PROJECT_ID.strip() 139 | os.system(' cpdctl project get --project-id '+PROJECT_ID) 140 | 141 | 142 | # Get project details in JSON format and extract it's name 143 | 144 | # In[14]: 145 | 146 | 147 | os.system(' cpdctl project get --project-id '+PROJECT_ID+' --output json') 148 | 149 | 150 | # In[15]: 151 | 152 | 153 | result = os.popen('cpdctl project get --project-id '+PROJECT_ID+' --output json --jmes-query "entity.name" --raw-output').read() 154 | PROJECT_NAME = result 155 | print("{}project ID is: {}".format(PROJECT_NAME, PROJECT_ID)) 156 | 157 | EXPORT = { 158 | "all_assets": True 159 | } 160 | EXPORT_JSON = json.dumps(EXPORT) 161 | print(EXPORT_JSON) 162 | result = os.popen('cpdctl asset export start --project-id '+PROJECT_ID+ ' --assets \''+EXPORT_JSON+'\' --name demo-project-assets --output json --jmes-query "metadata.id" --raw-output').read() 163 | EXPORT_ID = result 164 | print('Export ID: {}'.format(EXPORT_ID)) 165 | EXPORT_ID=EXPORT_ID.strip() 166 | os.system('cpdctl asset export download --project-id '+PROJECT_ID+' --export-id '+EXPORT_ID+' --output-file project-assets.zip --progress') 167 | -------------------------------------------------------------------------------- /notebooks/sandbox/exportUsers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 3 | import pandas 4 | import sys 5 | sys.path.append('../..') 6 | import apis 7 | userApi = apis.endpoints.UsersAPI() 8 | 9 | usersJSON = userApi.getAllUsers() 10 | userApi.users2CSV(usersJSON, 'users_export.csv') 11 | usersTable = pandas.read_csv('users_export.csv') 12 | usersTable 13 | -------------------------------------------------------------------------------- /notebooks/sandbox/export_categories.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../..') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Get list of categories and export to csv 14 | categoriesJSON = mainAPI.search(f'metadata.artifact_type:category') 15 | categoriesAPI.categories2CSV(categoriesJSON,"export_categories.csv") 16 | categoriesTable = pandas.read_csv('export_categories.csv') 17 | categoriesTable -------------------------------------------------------------------------------- /notebooks/sandbox/export_terms.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../..') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Get list of terms 14 | termsJSON = mainAPI.search(f'metadata.artifact_type:glossary_term') 15 | termsAPI.terms2CSV(termsJSON, 'export_terms.csv') -------------------------------------------------------------------------------- /notebooks/sandbox/importProject.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Train, promote and deploy Boston house prices prediction model 5 | 6 | # This notebook contains steps and code to demonstrate support of AI Lifecycle features in Cloud Pak for Data. 7 | # It contains steps and code to work with [`cpdctl`](https://github.com/IBM/cpdctl) CLI tool available in IBM github repository. 8 | # It also introduces commands for getting model and training data, persisting model, deploying model 9 | # and promoting it between deployment spaces. 10 | # 11 | # Some familiarity with Python is helpful. This notebook uses Python 3.7. 12 | # 13 | 14 | # In[1]: 15 | 16 | 17 | # import base64 18 | import json 19 | import os 20 | # import platform 21 | # import requests 22 | # import tarfile 23 | # import zipfile 24 | # from IPython.core.display import display, HTML 25 | from decouple import config 26 | 27 | 28 | # ## CPD Credentials 29 | 30 | # In[2]: 31 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 32 | 33 | CPD_USER_NAME = config("WKCUSER") 34 | CPD_USER_PASSWORD = config("PASSWORD") 35 | CPD_URL = config("TZHOSTNAME") 36 | 37 | 38 | version_r = os.popen('cpdctl version').read() 39 | 40 | CPDCTL_VERSION = version_r 41 | CPDCTL_VERSION=CPDCTL_VERSION.split() 42 | 43 | print("cpdctl version: {}".format(CPDCTL_VERSION)) 44 | 45 | 46 | # ### Add CPD profile and context configuration 47 | 48 | # Add "cpd_user" user to the `cpdctl` configuration 49 | 50 | # In[6]: 51 | 52 | 53 | os.system(' cpdctl config user set cpd_user --username '+CPD_USER_NAME+' --password '+CPD_USER_PASSWORD) 54 | 55 | 56 | # Add "cpd" profile to the `cpdctl` configuration 57 | 58 | # In[7]: 59 | 60 | 61 | os.system(' cpdctl config profile set cpd --url '+CPD_URL+' --user cpd_user') 62 | 63 | 64 | # Add "cpd" context to the `cpdctl` configuration 65 | 66 | # In[8]: 67 | 68 | 69 | os.system(' cpdctl config context set cpd --profile cpd --user cpd_user') 70 | 71 | 72 | # List available contexts 73 | 74 | # In[9]: 75 | 76 | 77 | os.system(' cpdctl config context list') 78 | 79 | 80 | # In[10]: 81 | 82 | 83 | os.system(' cpdctl config context use cpd') 84 | 85 | 86 | RESTORED_PROJECT_NAME = 'cpdctl-demo-restored-project' 87 | JMES_QUERY = "resources[?entity.name == '{}'].metadata.guid".format(RESTORED_PROJECT_NAME) 88 | result = os.popen('cpdctl project list --output json --jmes-query "'+JMES_QUERY+'"').read() 89 | PROJECT_IDS = json.loads(result) 90 | if PROJECT_IDS: 91 | for project_id in PROJECT_IDS: 92 | print('Deleting project with ID: {}'.format(project_id)) 93 | os.system('cpdctl project delete --project-id "'+project_id+'"') 94 | 95 | 96 | os.system('cpdctl project list --output json') 97 | 98 | import uuid 99 | STORAGE = {"type": "assetfiles", "guid": str(uuid.uuid4())} 100 | STORAGE_JSON = json.dumps(STORAGE) 101 | 102 | result = os.popen('cpdctl project create --name '+RESTORED_PROJECT_NAME+' --output json --raw-output --storage \''+STORAGE_JSON+'\' --jmes-query \'location\'').read() 103 | RESTORED_PROJECT_ID = result.rsplit('/', 1)[-1] 104 | print("The new '{}' project ID is: {}".format(RESTORED_PROJECT_NAME, RESTORED_PROJECT_ID)) 105 | 106 | RESTORED_PROJECT_ID=RESTORED_PROJECT_ID.strip() 107 | result = os.popen('cpdctl asset import start --project-id '+RESTORED_PROJECT_ID+' --import-file project-assets.zip --output json --jmes-query "metadata.id" --raw-output').read() 108 | IMPORT_ID = result 109 | print("The new import ID is: {}".format(IMPORT_ID)) 110 | 111 | os.system('cpdctl asset import get --project-id '+RESTORED_PROJECT_ID+' --import-id '+IMPORT_ID) 112 | 113 | os.system('cpdctl asset search --query \'*:*\' --type-name asset --project-id '+RESTORED_PROJECT_ID) 114 | 115 | -------------------------------------------------------------------------------- /notebooks/sandbox/new_categories.csv: -------------------------------------------------------------------------------- 1 | name,short_description,long_description 2 | -------------------------------------------------------------------------------- /notebooks/sandbox/new_terms.csv: -------------------------------------------------------------------------------- 1 | name,short_description,long_description -------------------------------------------------------------------------------- /notebooks/sandbox/new_users.csv: -------------------------------------------------------------------------------- 1 | username,displayName,email,password,user_roles -------------------------------------------------------------------------------- /notebooks/sandbox/refDataTest.csv: -------------------------------------------------------------------------------- 1 | "code","value","description","parent","is_multi_mapped_to_Locations>>Physical Locations" 2 | "US","United States of America","United States of America",,"DAL" 3 | "UK","United Kingdom","United Kingdom",,"LON" 4 | "JP","Japan","Japan",,"TOK" 5 | "EU","European Union","European Union",, 6 | "AU","Australia","Australia",, 7 | "DE","Germany","Germany","EU","FRA" 8 | -------------------------------------------------------------------------------- /notebooks/sandbox/terms_export.csv: -------------------------------------------------------------------------------- 1 | artifact_id,description,name,steward_ids,tags 2 | -------------------------------------------------------------------------------- /notebooks/sandbox/updateUsers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 3 | import pandas 4 | import sys 5 | sys.path.append('../..') 6 | import apis 7 | userApi = apis.endpoints.UsersAPI() 8 | 9 | usersTable = pandas.read_csv('users_export.csv') 10 | userApi.updateUsersFromTable(usersTable) 11 | -------------------------------------------------------------------------------- /notebooks/sandbox/update_categories.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../..') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Bring categories back to object for bulk updates to catalog 14 | categoriesTable = pandas.read_csv('export_categories.csv') 15 | categoriesAPI.updateCategoriesFromTable(categoriesTable) 16 | -------------------------------------------------------------------------------- /notebooks/sandbox/update_terms.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../..') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | termTable = pandas.read_csv('export_terms.csv') 14 | termsAPI.updateTermsFromTable(termTable) -------------------------------------------------------------------------------- /notebooks/sandbox/users_export.csv: -------------------------------------------------------------------------------- 1 | displayName,email,username,uid,user_roles,permissions 2 | -------------------------------------------------------------------------------- /notebooks/usecases/deletecategory.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "language_info": { 4 | "codemirror_mode": { 5 | "name": "ipython", 6 | "version": 3 7 | }, 8 | "file_extension": ".py", 9 | "mimetype": "text/x-python", 10 | "name": "python", 11 | "nbconvert_exporter": "python", 12 | "pygments_lexer": "ipython3", 13 | "version": "3.9.5" 14 | }, 15 | "orig_nbformat": 2, 16 | "kernelspec": { 17 | "name": "python3", 18 | "display_name": "Python 3.9.5 64-bit ('python@3.9')" 19 | }, 20 | "metadata": { 21 | "interpreter": { 22 | "hash": "fad7527b2d45399d92cf1691744c463ac1fe118d029136e3c300f68b4fdf27d6" 23 | } 24 | }, 25 | "interpreter": { 26 | "hash": "ac2eaa0ea0ebeafcc7822e65e46aa9d4f966f30b695406963e145ea4a91cd4fc" 27 | } 28 | }, 29 | "nbformat": 4, 30 | "nbformat_minor": 2, 31 | "cells": [ 32 | { 33 | "cell_type": "code", 34 | "execution_count": 1, 35 | "metadata": {}, 36 | "outputs": [ 37 | { 38 | "output_type": "stream", 39 | "name": "stderr", 40 | "text": [ 41 | "2021-06-22 16:01:57,684 INFO URL: https://zen-gov-cpd-zen-gov.apps.ocp45.tec.uk.ibm.com\n", 42 | "2021-06-22 16:01:58,141 INFO HEADERS: {'cache-control': 'no-cache', 'Authorization': 'Bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwicm9sZSI6IkFkbWluIiwicGVybWlzc2lvbnMiOlsiYWNjZXNzX2FkdmFuY2VkX2dvdmVybmFuY2VfY2FwYWJpbGl0aWVzIiwiYWNjZXNzX2FkdmFuY2VkX21hcHBpbmdfY2FwYWJpbGl0aWVzIiwic2lnbl9pbl9vbmx5IiwiYWNjZXNzX2NhdGFsb2ciLCJ2aWV3X2dvdmVybmFuY2VfYXJ0aWZhY3RzIiwiYWRtaW5pc3RyYXRvciIsIm1hbmFnZV9xdWFsaXR5IiwiY2FuX3Byb3Zpc2lvbiIsIm1hbmFnZV9kaXNjb3ZlcnkiLCJtYW5hZ2VfbWV0YWRhdGFfaW1wb3J0IiwidmlydHVhbGl6ZV90cmFuc2Zvcm0iLCJtYW5hZ2VfY2F0YWxvZyIsImF1dGhvcl9nb3Zlcm5hbmNlX2FydGlmYWN0cyIsIm1hbmFnZV9jYXRlZ29yaWVzIiwibWFuYWdlX2luZm9ybWF0aW9uX2Fzc2V0cyIsIm1hbmFnZV9nb3Zlcm5hbmNlX3dvcmtmbG93Iiwidmlld19xdWFsaXR5IiwiYWNjZXNzX2luZm9ybWF0aW9uX2Fzc2V0cyJdLCJncm91cHMiOlsxMDAwMF0sInN1YiI6ImFkbWluIiwiaXNzIjoiS05PWFNTTyIsImF1ZCI6IkRTWCIsInVpZCI6IjEwMDAzMzA5OTkiLCJhdXRoZW50aWNhdG9yIjoiZGVmYXVsdCIsImlhdCI6MTYyNDM3MDUxOCwiZXhwIjoxNjI0NDEzNjgyfQ.IWkNtz29OJP6dd-IoGJsF796-m1IFk4PyvKLAeWVxhf-Fk07YbnSas_fsOkhLTQ38AyyqJq1eF22itOhM1jEBO5bEwS92a05WQbDPyYXvYdKrnHSBDkzV1OBJ5Vg4BfklXT2dCouEsZfFZGScRtmhuWIdUI8Wljj9xDV74BcC97LXw0eZZJLjrtakjZX9XDe432ey630lVBzx2-ExLYhewtysOMQ-Oadw_Nu5s4M2ey50688i8g1WjcOy1CuW5-_ZncUQfY2AsDf7Ohv-oMLCFAsqcdA33V66WoK9F3WRw8ilJCfnrFjJPeifRXHMWyGNgQylXY6-0HJtnLi6YlJeA'}\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "%load_ext autoreload\n", 48 | "%autoreload 2\n", 49 | "import logging\n", 50 | "logging.basicConfig(format=\"%(asctime)s %(levelname)-7s %(message)s\", level=logging.INFO)\n", 51 | "import sys\n", 52 | "sys.path.append('../..')\n", 53 | "import apis\n", 54 | "t_api = apis.endpoints.TermsAPI()" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": 2, 60 | "metadata": {}, 61 | "outputs": [ 62 | { 63 | "output_type": "stream", 64 | "name": "stderr", 65 | "text": [ 66 | "2021-06-22 16:01:58,360 INFO GET /v3/search?query=categories.primary_category_name.keyword:Joerg AND metadata.artifact_type:glossary_term\n" 67 | ] 68 | }, 69 | { 70 | "output_type": "execute_result", 71 | "data": { 72 | "text/plain": [ 73 | "{'size': 0, 'rows': [], 'aggregations': {}}" 74 | ] 75 | }, 76 | "metadata": {}, 77 | "execution_count": 2 78 | } 79 | ], 80 | "source": [ 81 | "t_api.deleteAllTermsFromCategory('Joerg')" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": null, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [] 90 | } 91 | ] 92 | } -------------------------------------------------------------------------------- /olm-utils/delete.sh: -------------------------------------------------------------------------------- 1 | oc delete project olm-utils -------------------------------------------------------------------------------- /olm-utils/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: olm-utils 5 | spec: 6 | containers: 7 | - name: olm-utils 8 | image: icr.io/cpopen/olm-utils:pre-release 9 | imagePullPolicy: Always 10 | ports: 11 | - containerPort: 3100 12 | securityContext: 13 | privileged: true 14 | serviceAccount: olm-utils-sa 15 | -------------------------------------------------------------------------------- /olm-utils/env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | SERVER= 3 | 4 | # APITOKEN 5 | API_TOKEN= 6 | 7 | # OR 8 | 9 | # Username and password 10 | KUBEADMIN_USER= 11 | KUBEADMIN_PASS= 12 | 13 | # ICR KEY 14 | # Please enter the ICR KEY if the server value is pointing to IBM cloud ROKS cluster. 15 | ICR_KEY= 16 | 17 | 18 | # SCRIPT 19 | #Pod login and auto login to oc cluster from runutils 20 | if [ -n "$KUBEADMIN_USER" ] && [ -n "$KUBEADMIN_PASS" ] 21 | then 22 | alias oclogin_auto="run_utils login-to-ocp -u ${KUBEADMIN_USER} -p ${KUBEADMIN_PASS} --server=${SERVER}"; 23 | alias pod_login="oc login -u ${KUBEADMIN_USER} -p ${KUBEADMIN_PASS} --server ${SERVER}"; 24 | else 25 | if [ -z "$API_TOKEN" ] 26 | then 27 | echo "Invalid api token, please check env.sh file"; 28 | else 29 | alias pod_login="oc login --token=${API_TOKEN} --server=${SERVER}"; 30 | alias oclogin_auto="run_utils login-to-ocp --token=${API_TOKEN} --server=${SERVER}"; 31 | fi 32 | fi 33 | # Pod login 34 | pod_login 35 | 36 | # Check if the last command executed properly 37 | if [ $? -eq 0 ]; then 38 | echo "Logged in Successfully"; 39 | else 40 | echo "Login Failed"; 41 | fi 42 | 43 | 44 | # Deploy olm_utils to cluster 45 | export PROJECT_NAME='olm-utils' 46 | oc create namespace ${PROJECT_NAME} 47 | oc project ${PROJECT_NAME} 48 | oc create serviceaccount olm-utils-sa 49 | oc adm policy add-cluster-role-to-user cluster-admin system:serviceaccount:olm-utils:olm-utils-sa 50 | oc apply -f deployment.yaml 51 | 52 | # # Setting the aliases 53 | alias run_utils="kubectl exec ${PROJECT_NAME} --"; 54 | alias oclogin="run_utils login-to-ocp"; 55 | alias get_pods="kubectl get pods -n $PROJECT_NAME"; 56 | # alias oclogin_auto="run_utils login-to-ocp --token=${API_TOKEN} --server=${SERVER}" 57 | alias get_preview="kubectl cp $PROJECT_NAME/$PROJECT_NAME:/tmp/work/preview.sh ${CHE_PROJECTS_ROOT}/techzone-demo/olm-utils/preview.sh" 58 | 59 | if [ -n "$ICR_KEY" ] 60 | then 61 | run_utils add_icr_cred_to_global_pull_secret.sh $ICR_KEY 62 | fi 63 | 64 | -------------------------------------------------------------------------------- /olm-utils/preview.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/CPDemoFramework/fb7bb84f4dec395cf023a633a0c1c0b4d92c2d3a/olm-utils/preview.sh -------------------------------------------------------------------------------- /olm-utils/update-env-vars.py: -------------------------------------------------------------------------------- 1 | import sys 2 | server = sys.argv[1] 3 | api_token = sys.argv[2] 4 | kubeadmin_user = sys.argv[3] 5 | kubeadmin_password = sys.argv[4] 6 | lines = [] 7 | with open("env.sh") as f: 8 | lines = f.readlines() 9 | lines[1] = server + "\n" 10 | lines[4] = api_token + "\n" 11 | lines[9] = kubeadmin_user + "\n" 12 | lines[10] = kubeadmin_password + "\n" 13 | 14 | with open("env.sh","w") as f: 15 | f.writelines(lines) 16 | -------------------------------------------------------------------------------- /payloads/asset_relationship_types/createAssetRelationship.json: -------------------------------------------------------------------------------- 1 | { 2 | "end1": { 3 | "default_display_name": "string", 4 | "localized_display_name": {}, 5 | "relationship_name": "string", 6 | "containing_asset_type": "book", 7 | "container_id": "cb898dfa-9e0d-45d3-adc7-4de772d1d17f", 8 | "multiplicity": "ONE", 9 | "on_delete": "CASCADE", 10 | "container_type": "CATALOG", 11 | "qualifiedRelationshipName": "string" 12 | }, 13 | "end2": { 14 | "default_display_name": "string", 15 | "localized_display_name": {}, 16 | "relationship_name": "string", 17 | "containing_asset_type": "book", 18 | "container_id": "cb898dfa-9e0d-45d3-adc7-4de772d1d17f", 19 | "multiplicity": "ONE", 20 | "on_delete": "CASCADE", 21 | "container_type": "CATALOG", 22 | "qualifiedRelationshipName": "string" 23 | } 24 | } -------------------------------------------------------------------------------- /payloads/asset_types/createCustomAssetBook.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "Getting Started with Assets 1", 4 | "description": "Describes how to create and use metadata for assets 2", 5 | "tags": ["getting", "started", "documentation"], 6 | "asset_type": "book", 7 | "origin_country": "us", 8 | "rov": { "mode": 0 } 9 | }, 10 | "entity": { 11 | "book": { 12 | "author": { 13 | "first_name": "Douglas", 14 | "last_name": "Adams" 15 | }, 16 | "price": 29.95 17 | }} 18 | } -------------------------------------------------------------------------------- /payloads/asset_types/createCustomAssetRestAPI.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "Rest API 1", 4 | "description": "Represents Rest API 1", 5 | "tags": ["wkc"], 6 | "asset_type": "rest_api", 7 | "origin_country": "us", 8 | "rov": {"mode": 0} 9 | }, 10 | "entity": { "rest_api": { 11 | "product": { 12 | "short_name": "WKC", 13 | "long_name": "Watson Knowledge Catalog" 14 | }, 15 | "type": "GET", 16 | "url2": "https://{hostname}/v3/glossary_terms/api#/", 17 | "version": "1.0" 18 | }} 19 | } -------------------------------------------------------------------------------- /payloads/asset_types/createCustomAssetTypeBook.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "book", 3 | "description": "Book asset type", 4 | "fields": [{ 5 | "key": "author.last_name", 6 | "type": "string", 7 | "facet": false, 8 | "is_array": false, 9 | "search_path": "author.last_name", 10 | "is_searchable_across_types": true 11 | }], 12 | "properties": { "price": { 13 | "type": "number", 14 | "description": "Suggested retail price" 15 | }} 16 | } -------------------------------------------------------------------------------- /payloads/asset_types/createCustomAssetTypeRestAPI.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rest_api", 3 | "description": "REST API asset type", 4 | "fields": [ 5 | { 6 | "key": "product.short_name", 7 | "type": "string", 8 | "facet": false, 9 | "is_array": false, 10 | "search_path": "product.short_name", 11 | "is_searchable_across_types": true 12 | } 13 | ], 14 | "properties": { 15 | "type": { 16 | "type": "string", 17 | "description": "GET / POST / etc" 18 | }, 19 | "url2": { 20 | "type": "string", 21 | "description": "URL for the API endpoint", 22 | }, 23 | "version": { 24 | "type": "string", 25 | "description": "Version information", 26 | } 27 | } 28 | } -------------------------------------------------------------------------------- /payloads/assets/addtermtoasset.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "op": "replace", 3 | "path": "/list", 4 | "value": [ 5 | { 6 | "term_display_name":"Versicherte Person", 7 | "term_id":"5d2d5419-0032-4c64-90e2-ce68c6997bb5_e1d44823-b7aa-4a65-bfa3-d32e9180078a" 8 | } 9 | ], 10 | "attribute": "asset_terms" 11 | }] -------------------------------------------------------------------------------- /payloads/assets/createassetattribute.json: -------------------------------------------------------------------------------- 1 | { 2 | "description":"Custom attributes for data asset type", 3 | "fields": [ 4 | { 5 | "key":"alias", 6 | "type":"string", 7 | "facet":false, 8 | "is_array":false, 9 | "is_searchable_across_types":true 10 | }, 11 | { 12 | "key":"environment", 13 | "type":"string", 14 | "facet":false, 15 | "is_array":true, 16 | "is_searchable_across_types":true 17 | } 18 | 19 | ], 20 | "localized_metadata_attributes": { 21 | "name": { 22 | "default": "Custom Properties", 23 | "en": "Custom Properties" 24 | } 25 | }, 26 | "relationships":[ 27 | ], 28 | "properties": { 29 | "alias": { 30 | "type": "string", 31 | "max_length": 100 32 | }, 33 | "environment": { 34 | "type": "string", 35 | "max_length": 50, 36 | "is_array": true 37 | }, 38 | "review_date": { 39 | "type": "date", 40 | "description": "Review date for the data asset" 41 | } 42 | }, 43 | "decorates": [ 44 | { 45 | "asset_type_name": "data_asset" 46 | } 47 | ] 48 | } -------------------------------------------------------------------------------- /payloads/assets/patchAssetDescription.json: -------------------------------------------------------------------------------- 1 | [{"op": "replace", "path": "/metadata/description", "value": "newDescription1"}] -------------------------------------------------------------------------------- /payloads/assets/patchAssetTags.json: -------------------------------------------------------------------------------- 1 | [{ "op": "add", "path": "/metadata/tags/-", "value": "tag1" }] -------------------------------------------------------------------------------- /payloads/categories/createCategory.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "apicat2", 3 | "short_description": "category short description for POT", 4 | "long_description": "category long description for POT" 5 | } -------------------------------------------------------------------------------- /payloads/datarequest/postnewrequest.json: -------------------------------------------------------------------------------- 1 | { 2 | "Abstract": "Request description", 3 | "AssignedTo": "Unassigned", 4 | "DataIntent": "Request intent", 5 | "DataObj": 6 | { 7 | 8 | "DataFields": [], 9 | "DataSets": [], 10 | "DataSource": {} 11 | }, 12 | "Deadline": "High", 13 | "IssueLink": "Request http link", 14 | "Project": "", 15 | "RequestedBy": "admin", 16 | "State": "New", 17 | "Title": "Request title", 18 | "project_id": "" 19 | 20 | } -------------------------------------------------------------------------------- /payloads/datarequest/updaterequest.json: -------------------------------------------------------------------------------- 1 | { 2 | 3 | "Abstract": "string", 4 | "AssignedTo": "string", 5 | "DataIntent": "string", 6 | "DataObj": 7 | 8 | { 9 | 10 | "DataFields": 11 | 12 | [], 13 | "DataSets": 14 | 15 | [], 16 | "DataSource": 17 | 18 | {} 19 | }, 20 | "Deadline": "string", 21 | "FulfilledBy": "string", 22 | "IssueLink": "string", 23 | "Project": "string", 24 | "RequestedBy": "string", 25 | "State": "string", 26 | "Title": "string", 27 | "project_id": "string" 28 | 29 | } -------------------------------------------------------------------------------- /payloads/glossary_terms/addTermTags.json: -------------------------------------------------------------------------------- 1 | { 2 | "revision": "1", 3 | "tags": [ "tag1", "tag2" ] 4 | } 5 | 6 | -------------------------------------------------------------------------------- /payloads/glossary_terms/createTerm.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "abbreviations": [ 4 | "Term" 5 | ], 6 | "long_description": "This is a new business term", 7 | "name": "Demo term", 8 | "short_description": "This is a new business term" 9 | } 10 | ] -------------------------------------------------------------------------------- /payloads/glossary_terms/createTermComplete.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "abbreviations": [ 4 | "string" 5 | ], 6 | "categories": [ 7 | { 8 | "description": "Description about the artifact relationship", 9 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 10 | } 11 | ], 12 | "classifications": [ 13 | { 14 | "description": "Description about the artifact relationship", 15 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 16 | } 17 | ], 18 | "custom_attributes": [ 19 | { 20 | "custom_attribute_definition_id": "string", 21 | "name": "string", 22 | "values": [ 23 | { 24 | "value": {} 25 | } 26 | ] 27 | } 28 | ], 29 | "custom_relationships": [ 30 | { 31 | "definition_id": "string", 32 | "relationships": [ 33 | { 34 | "description": "Description about the artifact relationship", 35 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 36 | } 37 | ], 38 | "reversed": true, 39 | "target_type": "data_class" 40 | } 41 | ], 42 | "data_classes": [ 43 | { 44 | "description": "Description about the artifact relationship", 45 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 46 | } 47 | ], 48 | "effective_end_date": "2021-05-05T12:19:42.539Z", 49 | "effective_start_date": "2021-05-05T12:19:42.539Z", 50 | "example": "string", 51 | "has_terms": [ 52 | { 53 | "description": "Description about the artifact relationship", 54 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 55 | } 56 | ], 57 | "has_type_terms": [ 58 | { 59 | "description": "Description about the artifact relationship", 60 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 61 | } 62 | ], 63 | "import_source_created_by": "string", 64 | "import_source_created_on": "2021-05-05T12:19:42.539Z", 65 | "import_source_usage": "string", 66 | "is_a_type_of_terms": [ 67 | { 68 | "description": "Description about the artifact relationship", 69 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 70 | } 71 | ], 72 | "is_of_terms": [ 73 | { 74 | "description": "Description about the artifact relationship", 75 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 76 | } 77 | ], 78 | "long_description": "string", 79 | "name": "Customer Number", 80 | "parent_category": { 81 | "description": "Description about the artifact relationship", 82 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 83 | }, 84 | "reference_copy": true, 85 | "related_terms": [ 86 | { 87 | "description": "Description about the artifact relationship", 88 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 89 | } 90 | ], 91 | "replaced_by_terms": [ 92 | { 93 | "description": "Description about the artifact relationship", 94 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 95 | } 96 | ], 97 | "replaces_terms": [ 98 | { 99 | "description": "Description about the artifact relationship", 100 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 101 | } 102 | ], 103 | "short_description": "string", 104 | "steward_ids": [ 105 | "string" 106 | ], 107 | "synonym_terms": [ 108 | { 109 | "description": "Description about the artifact relationship", 110 | "id": "990e33f5-3108-4d45-a530-0307458362d4" 111 | } 112 | ], 113 | "tags": [ 114 | "string" 115 | ], 116 | "workflow_state": "string" 117 | } 118 | ] -------------------------------------------------------------------------------- /payloads/glossary_terms/patchGlossaryTerm.json: -------------------------------------------------------------------------------- 1 | { 2 | "revision": "1", 3 | "custom_attributes": [{ 4 | "custom_attribute_definition_id": "9d32bf51-4c2c-49b1-ab8f-cc82fe90785d", 5 | "name": "Enterprise Data ID", 6 | "values": [{"value": "A0059"}] 7 | }] 8 | } -------------------------------------------------------------------------------- /payloads/glossary_terms/updateTerm.json: -------------------------------------------------------------------------------- 1 | { 2 | "revision": "1", 3 | "short_description": "Datum an dem eine Person geboren wurde", 4 | "long_description": "Datum an dem eine Person geboren wurde" 5 | } 6 | -------------------------------------------------------------------------------- /payloads/governance_artifact_types/createcustomattribute.json: -------------------------------------------------------------------------------- 1 | { 2 | "artifact_types": ["glossary_term"], 3 | "default_value": "12345", 4 | "description": "description of custom attribute definition", 5 | "display_in_child_assets": true, 6 | "hidden": false, 7 | "max_length": 10, 8 | "maximum": 0, 9 | "min_length": 1, 10 | "minimum": 0, 11 | "multiple_values": true, 12 | "name": "My Custom Attribute", 13 | "placeholder": false, 14 | "read_only": false, 15 | "related_artifact_types": [ 16 | "string" 17 | ], 18 | "required": true, 19 | "reverse_name": "Term related to category", 20 | "type": "TEXT" 21 | } -------------------------------------------------------------------------------- /payloads/governance_artifact_types/patchcustom attribute.json: -------------------------------------------------------------------------------- 1 | {"custom_attributes":[ 2 | { 3 | "custom_attribute_definition_id":"fdc49eaf-9eb3-4c16-bfd2-cd2028ecabc8", 4 | "name":"txt_val", 5 | "values":[ 6 | 7 | { 8 | "value":"v2" 9 | } 10 | ] 11 | } 12 | ] 13 | , 14 | revision:0 15 | } -------------------------------------------------------------------------------- /payloads/jobs/startJobRun.json: -------------------------------------------------------------------------------- 1 | { 2 | "job_run": { 3 | "configuration": { 4 | "env_variables": ["key1=value1", "key2=value2"] 5 | }}} 6 | -------------------------------------------------------------------------------- /payloads/reference_data/createRefData.json: -------------------------------------------------------------------------------- 1 | { 2 | "long_description": "description of reference data set", 3 | "name": "Gender2", 4 | "type": "TEXT" 5 | } -------------------------------------------------------------------------------- /payloads/reference_data/createRefDataWithTime.json: -------------------------------------------------------------------------------- 1 | { 2 | "effective_end_date": "2021-04-13T18:59:06.782Z", 3 | "effective_start_date": "2021-04-13T18:59:06.782Z", 4 | "long_description": "description of reference data set", 5 | "name": "Gender2", 6 | "type": "TEXT" 7 | } -------------------------------------------------------------------------------- /payloads/reference_data/createRefDataWithValues.json: -------------------------------------------------------------------------------- 1 | { 2 | "long_description": "description of reference data set", 3 | "name": "Gender2", 4 | "rds_values": [ 5 | { 6 | "code": "M", 7 | "description": "Male", 8 | "value": "Male" 9 | }, 10 | { 11 | "code": "F", 12 | "description": "Female", 13 | "value": "Female" 14 | }, 15 | { 16 | "code": "U", 17 | "description": "Unknown", 18 | "value": "Unknown" 19 | }, 20 | { 21 | "code": "D", 22 | "description": "Diverse", 23 | "value": "Diverse" 24 | } 25 | ], 26 | "type": "TEXT" 27 | } -------------------------------------------------------------------------------- /payloads/reference_data/createRefDataWithValuesAndTime.json: -------------------------------------------------------------------------------- 1 | { 2 | "effective_start_date": "2021-04-13T18:59:06.782Z", 3 | "effective_end_date": "2021-04-13T18:59:06.782Z", 4 | "long_description": "description of reference data set", 5 | "name": "Gender2", 6 | "rds_values": [ 7 | { 8 | "code": "M", 9 | "description": "Male", 10 | "value": "Male" 11 | }, 12 | { 13 | "code": "F", 14 | "description": "Female", 15 | "value": "Female" 16 | }, 17 | { 18 | "code": "U", 19 | "description": "Unknown", 20 | "value": "Unknown" 21 | }, 22 | { 23 | "code": "D", 24 | "description": "Diverse", 25 | "value": "Diverse" 26 | } 27 | ], 28 | "type": "TEXT" 29 | } -------------------------------------------------------------------------------- /payloads/search/getTerm.json: -------------------------------------------------------------------------------- 1 | { 2 | "from": 0, 3 | "size": 10, 4 | "_source": ["metadata", "entity", "glossary_term", "categories"], 5 | "query": { "bool": { "must": [ 6 | {"term": {"metadata.name.keyword": "Adresse"}}, 7 | {"term": {"categories.primary_category_name.keyword": "Versicherte"}}, 8 | {"term": {"metadata.artifact_type": "glossary_term"}} 9 | ]}} 10 | } -------------------------------------------------------------------------------- /payloads/search/sampleQuery.json: -------------------------------------------------------------------------------- 1 | { 2 | "_source":["provider_type_id", "artifact_id", "metadata.name"], 3 | "query": { 4 | "query_string" : { "query" : "Adresse" } 5 | } 6 | } -------------------------------------------------------------------------------- /payloads/search/sampleQueryWithAggregation.json: -------------------------------------------------------------------------------- 1 | { 2 | "query": { 3 | "query_string" : { 4 | "query" : "Adresse" 5 | } 6 | }, 7 | "aggregations" : { 8 | "num_tags" : {"terms" : { "field" : "metadata.tags" }}, 9 | "num_terms" : {"terms" : { "field" : "metadata.terms" }} 10 | } 11 | } -------------------------------------------------------------------------------- /payloads/search/sampleQueryWithSort.json: -------------------------------------------------------------------------------- 1 | { 2 | "_source":["provider_type_id", "artifact_id", "metadata.name"], 3 | "query": { 4 | "query_string" : { 5 | "query" : "Adresse" 6 | } 7 | }, 8 | "sort": [ 9 | {"metadata.modified_on": {"order": "desc","unmapped_type": "date"}} 10 | ] 11 | } -------------------------------------------------------------------------------- /payloads/tk/createCustomAssetEKV.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "EKVName", 4 | "description": "2021-Susi-01", 5 | "tags": ["le_ik:330551143","kt_ik:105177505"], 6 | "asset_type": "ekv", 7 | "origin_country": "de", 8 | "rov": {"mode": 0} 9 | }, 10 | "entity": { "ekv": { 11 | "ekvnr": "2021-Susi-01", 12 | "ekvdatum": "2021-08-21", 13 | "leistnr": "330551143", 14 | "kostnr": "105177505", 15 | "versnr": "3113461811", 16 | "hilfsnr": ["1050040"], 17 | "produkt": ["Pflegeirgendwas"], 18 | "arztnr": "987269119", 19 | "diagschl": ["F.45"], 20 | "diagorg": ["IrgendeineDiagnose"], 21 | "diagmined": ["Die geminte Diagnose"], 22 | "xml": "....."}, 23 | "asset_terms": {"list": [{"term_display_name": "Elektronischer Kostenvoranschlag", 24 | "term_id": "5d2d5419-0032-4c64-90e2-ce68c6997bb5_47011eed-f099-4dac-ac4a-62a0e6635cc1"}]}, 25 | "data_profile": {"data_classification_manual": [{"id": "f19bd342-c1ed-405b-89db-c10a8901d982", 26 | "name": "Personally Identifiable Information (GDPR relevant)", 27 | "global_id": "5d2d5419-0032-4c64-90e2-ce68c6997bb5_f19bd342-c1ed-405b-89db-c10a8901d982"}] 28 | }} 29 | } -------------------------------------------------------------------------------- /payloads/tk/createCustomAssetEKVwithConnectedAsset.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "EKVName1", 4 | "description": "2021-Susi-01", 5 | "tags": ["le_ik:330551143","kt_ik:105177505"], 6 | "asset_type": "ekv", 7 | "origin_country": "de", 8 | "rov": {"mode": 0} 9 | }, 10 | "entity": { "ekv": { 11 | "ekvnr": "2021-Susi-01", 12 | "ekvdatum": "2021-08-21", 13 | "leistnr": "330551143", 14 | "kostnr": "105177505", 15 | "versnr": "3113461811", 16 | "hilfsnr": ["1050040"], 17 | "produkt": ["Pflegeirgendwas"], 18 | "arztnr": "987269119", 19 | "diagschl": ["F.45"], 20 | "diagorg": ["IrgendeineDiagnose"], 21 | "diagmined": ["Die geminte Diagnose"], 22 | "xml": "....."}, 23 | "asset_terms": {"list": [{"term_display_name": "Elektronischer Kostenvoranschlag", 24 | "term_id": "5d2d5419-0032-4c64-90e2-ce68c6997bb5_47011eed-f099-4dac-ac4a-62a0e6635cc1"}]}, 25 | "data_profile": {"data_classification_manual": [{"id": "f19bd342-c1ed-405b-89db-c10a8901d982", 26 | "name": "Personally Identifiable Information (GDPR relevant)", 27 | "global_id": "5d2d5419-0032-4c64-90e2-ce68c6997bb5_f19bd342-c1ed-405b-89db-c10a8901d982"}] 28 | }}, 29 | "attachments": [{ 30 | "asset_type": "data_asset", 31 | "name": "remote", 32 | "description": "remote", 33 | "connection_id": "f2ca0a71-2865-4198-be49-b78568bcb490", 34 | "connection_path": "/tkcos/ibm.png", 35 | "is_remote": true, 36 | "is_managed": false, 37 | "is_referenced": false 38 | }] 39 | } -------------------------------------------------------------------------------- /payloads/tk/createCustomAssetTypeEKV.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ekv", 3 | "description": "ekv asset type", 4 | "fields": [ 5 | { 6 | "key": "ekvnr", 7 | "type": "string", 8 | "facet": true, 9 | "is_array": false, 10 | "search_path": "ekvnr", 11 | "is_searchable_across_types": true 12 | }, 13 | { 14 | "key": "ekvdatum", 15 | "type": "date", 16 | "facet": true, 17 | "is_array": false, 18 | "search_path": "ekvdatum", 19 | "is_searchable_across_types": true 20 | }, 21 | { 22 | "key": "leistnr", 23 | "type": "string", 24 | "facet": true, 25 | "is_array": false, 26 | "search_path": "leistnr", 27 | "is_searchable_across_types": false 28 | }, 29 | { 30 | "key": "kostnr", 31 | "type": "string", 32 | "facet": true, 33 | "is_array": false, 34 | "search_path": "kostnr", 35 | "is_searchable_across_types": false 36 | }, 37 | { 38 | "key": "versnr", 39 | "type": "string", 40 | "facet": true, 41 | "is_array": false, 42 | "search_path": "versnr", 43 | "is_searchable_across_types": false 44 | }, 45 | { 46 | "key": "hilfsnr", 47 | "type": "string", 48 | "facet": true, 49 | "is_array": true, 50 | "search_path": "hilfsnr", 51 | "is_searchable_across_types": false 52 | }, 53 | { 54 | "key": "produkt", 55 | "type": "string", 56 | "facet": true, 57 | "is_array": true, 58 | "search_path": "produkt", 59 | "is_searchable_across_types": false 60 | }, 61 | { 62 | "key": "arztnr", 63 | "type": "string", 64 | "facet": true, 65 | "is_array": false, 66 | "search_path": "arztnr", 67 | "is_searchable_across_types": false 68 | }, 69 | { 70 | "key": "diagschl", 71 | "type": "string", 72 | "facet": true, 73 | "is_array": true, 74 | "search_path": "diagschl", 75 | "is_searchable_across_types": false 76 | }, 77 | { 78 | "key": "diagorg", 79 | "type": "string", 80 | "facet": true, 81 | "is_array": true, 82 | "search_path": "diagorg", 83 | "is_searchable_across_types": false 84 | }, 85 | { 86 | "key": "diagmined", 87 | "type": "string", 88 | "facet": true, 89 | "is_array": true, 90 | "search_path": "diagmined", 91 | "is_searchable_across_types": false 92 | }, 93 | { 94 | "key": "xml", 95 | "type": "string", 96 | "facet": true, 97 | "is_array": false, 98 | "search_path": "xml", 99 | "is_searchable_across_types": false 100 | } 101 | ], 102 | "properties": { 103 | "ekvnr": { 104 | "type": "string", 105 | "description": "Nummer des Kostenvoranschlag" 106 | }, 107 | "ekvdatum": { 108 | "type": "date", 109 | "description": "Datum des Kostenvoranschlag" 110 | }, 111 | "leistnr": { 112 | "type": "string", 113 | "description": "Nummer des Leistungserbringers" 114 | }, 115 | "kostnr": { 116 | "type": "string", 117 | "description": "Nummer des Kostenträgers" 118 | }, 119 | "hilfsnr": { 120 | "type": "string", 121 | "description": "Nummer des Hilfsmittels", 122 | "is_array": true 123 | }, 124 | "produkt": { 125 | "type": "string", 126 | "description": "Bezeichnung des Hilfsmittels", 127 | "is_array": true 128 | }, 129 | "diagschl": { 130 | "type": "string", 131 | "description": "Diagnoseschlüssel", 132 | "is_array": true 133 | }, 134 | "diagorg": { 135 | "type": "string", 136 | "description": "Original beschriebene Diagnose", 137 | "is_array": true 138 | }, 139 | "diagmined": { 140 | "type": "string", 141 | "description": "Per Text Mining ermittelte Diagnose", 142 | "is_array": true 143 | } 144 | } 145 | } -------------------------------------------------------------------------------- /payloads/tk/updatecustomattributediagnose.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "op": "replace", 3 | "path": "/diagmined/0", 4 | "value": "Test" 5 | }] -------------------------------------------------------------------------------- /payloads/workflows/deleteTask.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "complete", 3 | "form_properties": [ 4 | { 5 | "id": "action", 6 | "value": "#delete" 7 | } 8 | ] 9 | } -------------------------------------------------------------------------------- /payloads/workflows/discardTask.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "complete", 3 | "form_properties": [ 4 | { 5 | "id": "action", 6 | "value": "?discard" 7 | } 8 | ] 9 | } -------------------------------------------------------------------------------- /payloads/workflows/publishTask.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "complete", 3 | "form_properties": [ 4 | { 5 | "id": "action", 6 | "value": "#publish" 7 | } 8 | ] 9 | } -------------------------------------------------------------------------------- /payloads/workflows/queryAllWorkflows.json: -------------------------------------------------------------------------------- 1 | { 2 | "conditions": [{ 3 | "type": "workflow_id", 4 | "values": ["49c96ea8-9b62-11eb-a697-0a580a800239"] 5 | }] 6 | } -------------------------------------------------------------------------------- /payloads/workflows/updateWorkflowUserTask.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "complete", 3 | "form_properties": [{ 4 | "id": "action", 5 | "value": "publish" 6 | }] 7 | } -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | log_cli = 1 3 | log_cli_level = INFO 4 | log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) 5 | log_cli_date_format=%Y-%m-%d %H:%M:%S -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas 2 | requests 3 | python-decouple 4 | simple_term_menu 5 | -------------------------------------------------------------------------------- /sandbox/createCategories.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Create new categories from file 14 | categoriesTable = pandas.read_csv(sys.argv[1]) 15 | for index, row in categoriesTable.iterrows(): 16 | categoriesAPI.createCategory(row['name'], row['short_description'], row['long_description']) -------------------------------------------------------------------------------- /sandbox/createTerms.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Create new terms from file 14 | termsTable = pandas.read_csv(sys.argv[1]) 15 | for index, row in termsTable.iterrows(): 16 | termAbbreviations = [row['name'][0:2]] 17 | termsAPI.createTerm(termAbbreviations, row['name'], row['short_description'], row['long_description']) -------------------------------------------------------------------------------- /sandbox/createUsers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 3 | import pandas 4 | import sys 5 | sys.path.append('../') 6 | import apis 7 | userApi = apis.endpoints.UsersAPI() 8 | 9 | usersTable = pandas.read_csv(sys.argv[1]) 10 | userApi.updateUsersFromTable(usersTable) 11 | -------------------------------------------------------------------------------- /sandbox/demo_users.csv: -------------------------------------------------------------------------------- 1 | username,displayName,email,password,user_roles 2 | -------------------------------------------------------------------------------- /sandbox/exportDataProtectionRules.py: -------------------------------------------------------------------------------- 1 | 2 | import logging 3 | import pandas 4 | 5 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 6 | import sys 7 | sys.path.append('../') 8 | from apis import endpoints, MainAPI, usecases 9 | mainAPI = MainAPI() 10 | rulesAPI = endpoints.RulesAPI(mainAPI) 11 | rulesAPI.exportDataProtectionRules(sys.argv[1]) -------------------------------------------------------------------------------- /sandbox/exportGovArtifacts.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | mainAPI = MainAPI() 10 | governanceArtifactAPI = endpoints.GovArtifactAPI(mainAPI) 11 | governanceArtifactAPI.exportArtifactsZIP(sys.argv[1],"always",sys.argv[2],"all_top_level") -------------------------------------------------------------------------------- /sandbox/exportProject.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Train, promote and deploy Boston house prices prediction model 5 | 6 | # This notebook contains steps and code to demonstrate support of AI Lifecycle features in Cloud Pak for Data. 7 | # It contains steps and code to work with [`cpdctl`](https://github.com/IBM/cpdctl) CLI tool available in IBM github repository. 8 | # It also introduces commands for getting model and training data, persisting model, deploying model 9 | # and promoting it between deployment spaces. 10 | # 11 | # Some familiarity with Python is helpful. This notebook uses Python 3.7. 12 | # 13 | 14 | # In[1]: 15 | 16 | 17 | # import base64 18 | import json 19 | import os 20 | # import platform 21 | # import requests 22 | # import tarfile 23 | # import zipfile 24 | # from IPython.core.display import display, HTML 25 | from decouple import config 26 | from simple_term_menu import TerminalMenu 27 | import sys 28 | 29 | # ## CPD Credentials 30 | 31 | # In[2]: 32 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 33 | 34 | CPD_USER_NAME = config('WKCUSER') 35 | CPD_USER_PASSWORD = config('PASSWORD') 36 | CPD_URL = "https://"+config('TZHOSTNAME') 37 | 38 | version_r = os.popen('cpdctl version').read() 39 | 40 | CPDCTL_VERSION = version_r 41 | CPDCTL_VERSION=CPDCTL_VERSION.strip() 42 | 43 | print("cpdctl version: {}".format(CPDCTL_VERSION)) 44 | 45 | 46 | # ### Add CPD profile and context configuration 47 | 48 | # Add "cpd_user" user to the `cpdctl` configuration 49 | 50 | # In[6]: 51 | 52 | os.system('cpdctl config user set cpd_user --username '+CPD_USER_NAME+ ' --password '+CPD_USER_PASSWORD) 53 | 54 | 55 | # Add "cpd" profile to the `cpdctl` configuration 56 | 57 | # In[7]: 58 | 59 | 60 | os.system('cpdctl config profile set cpd --url ' +CPD_URL+' --user cpd_user') 61 | 62 | 63 | # Add "cpd" context to the `cpdctl` configuration 64 | 65 | # In[8]: 66 | 67 | 68 | os.system('cpdctl config context set cpd --profile cpd --user cpd_user') 69 | 70 | 71 | # List available contexts 72 | 73 | # In[9]: 74 | 75 | os.system('cpdctl config context list') 76 | 77 | 78 | # In[10]: 79 | 80 | 81 | os.system('cpdctl config context use cpd') 82 | 83 | 84 | # List available projects in current context 85 | 86 | # In[11]: 87 | 88 | #####################Function to select an existing project##################### 89 | def existing_projects(options, service_info): 90 | #########Printing the existing project list menu in the terminal######### 91 | terminal_menu = TerminalMenu(options,title = "Select a project to export. Use Keyboard keys to select.", menu_cursor_style = ("fg_cyan", "bold"), menu_highlight_style =("bold",)) 92 | menu_entry_index = terminal_menu.show() 93 | #########Confirmation message######### 94 | if(menu_entry_index!=len(options)-1): 95 | confirm=input("The Project "+ service_info[menu_entry_index]['name']+" having Project ID "+service_info[menu_entry_index]['guid']+" will be Exported.\nDo you want to continue?(Y/N)") 96 | if(confirm=="Y" or confirm=="y"): 97 | return service_info[menu_entry_index]['guid'] # return guid of selected project 98 | elif(confirm=="N" or confirm=="n"): 99 | return existing_projects(options, service_info) # recursive call to select a project again 100 | else: 101 | return 0 102 | #####################End of function existing_projects##################### 103 | 104 | ## Access the selected project assets 105 | 106 | # Get cpdctl-demo project ID and show details 107 | 108 | # In[12]: 109 | options = [] 110 | service_info = {} 111 | data = json.loads(os.popen("cpdctl project list --output json").read()) 112 | 113 | entries=data['total_results'] 114 | # print(data) 115 | for i in range(0,entries): 116 | #########creating list of existing projects######### 117 | options.append(data['resources'][i]['entity']['name']) 118 | service_info[i] = { 119 | "name": data['resources'][i]['entity']['name'], 120 | "guid": data['resources'][i]['metadata']['guid'] 121 | } 122 | options[i]+=" ("+data['resources'][i]['metadata']['guid']+")" 123 | options.append("Exit from the Menu") 124 | PROJECT_ID=existing_projects(options, service_info) #function call to list the existing projects and returning the selected project guid 125 | # print(PROJECT_ID) 126 | if(PROJECT_ID==0): 127 | print("####################################\n\tNo Project Selected!!\n Please Select a Project to Export\n####################################") 128 | sys.exit() 129 | 130 | # In[13]: 131 | 132 | PROJECT_ID=PROJECT_ID.strip() 133 | os.system(' cpdctl project get --project-id '+PROJECT_ID) 134 | 135 | 136 | # Get project details in JSON format and extract it's name 137 | 138 | # In[14]: 139 | 140 | 141 | os.system(' cpdctl project get --project-id '+PROJECT_ID+' --output json') 142 | 143 | 144 | # In[15]: 145 | 146 | 147 | result = os.popen('cpdctl project get --project-id '+PROJECT_ID+' --output json --jmes-query "entity.name" --raw-output').read() 148 | PROJECT_NAME = result 149 | print("{}project ID is: {}".format(PROJECT_NAME, PROJECT_ID)) 150 | 151 | EXPORT = { 152 | "all_assets": True 153 | } 154 | EXPORT_JSON = json.dumps(EXPORT) 155 | print(EXPORT_JSON) 156 | result = os.popen('cpdctl asset export start --project-id '+PROJECT_ID+ ' --assets \''+EXPORT_JSON+'\' --name demo-project-assets --output json --jmes-query "metadata.id" --raw-output').read() 157 | EXPORT_ID = result 158 | print('Export ID: {}'.format(EXPORT_ID)) 159 | EXPORT_ID=EXPORT_ID.strip() 160 | os.system('cpdctl asset export download --project-id '+PROJECT_ID+' --export-id '+EXPORT_ID+' --output-file '+ sys.argv[1] +' --progress') 161 | -------------------------------------------------------------------------------- /sandbox/exportTerms.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Get list of terms 14 | termsJSON = mainAPI.search(f'metadata.artifact_type:glossary_term') 15 | termsAPI.terms2CSV(termsJSON, sys.argv[1]) -------------------------------------------------------------------------------- /sandbox/exportUsers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 3 | import pandas 4 | import sys 5 | sys.path.append('../') 6 | import apis 7 | userApi = apis.endpoints.UsersAPI() 8 | 9 | usersJSON = userApi.getAllUsers() 10 | userApi.users2CSV(usersJSON, sys.argv[1]) 11 | usersTable = pandas.read_csv(sys.argv[1]) 12 | usersTable 13 | -------------------------------------------------------------------------------- /sandbox/getDemoFiles.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | demo_name=$1 3 | project_name=ibmtechzone-demo-artifacts 4 | branch="main" 5 | if [[ $PLAYGROUND_ENVIRONMENT == *"development"* ]]; then 6 | branch="development" 7 | fi 8 | if [[ $PLAYGROUND_ENVIRONMENT == *"staging"* ]]; then 9 | branch="staging" 10 | fi 11 | 12 | if [[ $PLAYGROUND_ENVIRONMENT == *"techzone"* ]]; then 13 | branch="techzone" 14 | fi 15 | 16 | github_url=https://github.com/IBM/ibmtechzone-demo-artifacts 17 | cd /projects/techzone-demo/sandbox 18 | rm -rf /projects/techzone-demo/sandbox/$project_name 19 | git clone --single-branch --branch $branch --sparse $github_url 20 | cd /projects/techzone-demo/sandbox/$project_name 21 | git sparse-checkout set $demo_name 22 | # cd /projects/techzone-demo/sandbox/ 23 | 24 | if [ -d "$demo_name" ]; then 25 | # Control will enter here if $DIRECTORY doesn't exist. 26 | echo "DemoName Found!" 27 | cp -a /projects/techzone-demo/sandbox/$project_name/$demo_name/. /projects/techzone-demo/sandbox/ 28 | rm -rf /projects/techzone-demo/sandbox/$project_name 29 | echo "Files loaded successfully!" 30 | exit 2 31 | fi 32 | echo "DemoName Not Found!" 33 | echo "File loading failed!" 34 | rm -rf /projects/techzone-demo/sandbox/$project_name 35 | 36 | -------------------------------------------------------------------------------- /sandbox/github.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | demo_name=$1 3 | meta_data=$2 4 | sandbox_username=$3 5 | desc=$4 6 | git_token=$5 7 | private_github_url=$6 8 | private_git_token=$7 9 | 10 | isPrivate="false" 11 | branch="main" 12 | github_url=https://github.com/IBM/ibmtechzone-demo-artifacts 13 | project_name=ibmtechzone-demo-artifacts 14 | username=IBM 15 | 16 | if [[ $PLAYGROUND_ENVIRONMENT == *"development"* ]]; then 17 | branch="development" 18 | fi 19 | if [[ $PLAYGROUND_ENVIRONMENT == *"staging"* ]]; then 20 | branch="staging" 21 | fi 22 | if [[ $PLAYGROUND_ENVIRONMENT == *"techzone"* ]]; then 23 | branch="techzone" 24 | fi 25 | 26 | if [ "$6" ]; then 27 | isPrivate="true" 28 | fi 29 | 30 | 31 | gtd=$git_token #`echo $git_token | base64 -d` 32 | 33 | 34 | if [ $isPrivate == "true" ]; then 35 | echo "Pushing demo artifacts to Private repository" 36 | #Lets push the demo artifacts to the private git repo. 37 | #add the git token to the git url 38 | private_github_url="${private_github_url:0:8}$private_git_token@${private_github_url:8}" 39 | echo $private_github_url 40 | cd /projects/techzone-demo/sandbox/ 41 | git clone --sparse $private_github_url $project_name 42 | 43 | if [ $? != 0 ]; then 44 | echo "Something went wrong in cloning Private Repository: ${private_github_url}" 45 | exit 1 46 | fi 47 | 48 | cd /projects/techzone-demo/sandbox/$project_name 49 | 50 | if [ -d "$sandbox_username-$demo_name" ]; then 51 | # Control will enter here if $DIRECTORY doesn't exist. 52 | echo "DemoName Already Exists in private repo, Choose another name!" 53 | exit 2 54 | fi 55 | mkdir $sandbox_username-$demo_name 56 | git sparse-checkout set $sandbox_username-$demo_name 57 | 58 | # Add the meta_data and files to demo_name variable 59 | echo $meta_data > $sandbox_username-$demo_name/readme.json 60 | 61 | # # add the required files 62 | cp /projects/techzone-demo/sandbox/governance_artifacts.zip $sandbox_username-$demo_name/ 63 | cp /projects/techzone-demo/sandbox/users.json $sandbox_username-$demo_name/ 64 | cp /projects/techzone-demo/sandbox/data_protection_rules.json $sandbox_username-$demo_name/ 65 | cp /projects/techzone-demo/sandbox/project_assets.zip $sandbox_username-$demo_name/ 66 | cp /projects/techzone-demo/sandbox/demo_users.csv $sandbox_username-$demo_name/ 67 | cp /projects/techzone-demo/sandbox/groups.json $sandbox_username-$demo_name/ 68 | cp /projects/techzone-demo/sandbox/roles.json $sandbox_username-$demo_name/ 69 | 70 | git add . 71 | git commit -am "private demo commit by '$sandbox_username'" 72 | git push --set-upstream origin 73 | 74 | status_check=$? 75 | 76 | if [ $status_check != 0 ]; then 77 | echo "Something went wrong with pushing artifacts to Private Repository" 78 | exit 1 79 | fi 80 | 81 | fi 82 | 83 | 84 | cd /projects/techzone-demo/sandbox 85 | rm -rf $project_name 86 | if [ ! -d "$project_name" ]; then 87 | git clone --single-branch --branch $branch --sparse $github_url 88 | fi 89 | 90 | cd /projects/techzone-demo/sandbox/$project_name 91 | git checkout -b $sandbox_username-$demo_name 92 | git sparse-checkout set $sandbox_username-$demo_name 93 | 94 | if [ -d "$sandbox_username-$demo_name" ]; then 95 | # Control will enter here if $DIRECTORY doesn't exist. 96 | echo "DemoName Already Exists, Choose another name!" 97 | exit 2 98 | fi 99 | 100 | mkdir $sandbox_username-$demo_name 101 | git sparse-checkout set $sandbox_username-$demo_name 102 | 103 | # Add the meta_data to demo_name variable 104 | echo $meta_data > $sandbox_username-$demo_name/readme.json 105 | 106 | 107 | if [ $isPrivate == "true" ]; then 108 | # Push metadata to public repo 109 | if [ $status_check == 0 ]; then 110 | echo "Successfully pushed Artifacts to Private Repository" 111 | git add . 112 | git commit -am "private demo commit by '$sandbox_username'" 113 | git push https://user_name:$gtd@github.com/$username/$project_name.git 114 | fi 115 | 116 | fi 117 | 118 | 119 | if [ $isPrivate == "false" ]; then 120 | echo "Pushing demo artifacts to Public repository" 121 | # # add the required files 122 | cp /projects/techzone-demo/sandbox/governance_artifacts.zip $sandbox_username-$demo_name/ 123 | cp /projects/techzone-demo/sandbox/users.json $sandbox_username-$demo_name/ 124 | cp /projects/techzone-demo/sandbox/data_protection_rules.json $sandbox_username-$demo_name/ 125 | cp /projects/techzone-demo/sandbox/project_assets.zip $sandbox_username-$demo_name/ 126 | cp /projects/techzone-demo/sandbox/demo_users.csv $sandbox_username-$demo_name/ 127 | cp /projects/techzone-demo/sandbox/groups.json $sandbox_username-$demo_name/ 128 | cp /projects/techzone-demo/sandbox/roles.json $sandbox_username-$demo_name/ 129 | 130 | git add . 131 | git commit -am "public demo commit by '$sandbox_username'" 132 | git push https://user_name:$gtd@github.com/$username/$project_name.git 133 | 134 | if [ $? == 0 ]; then 135 | echo "Successfully pushed Artifacts to Public Repository" 136 | fi 137 | 138 | if [ $? != 0 ]; then 139 | echo "Something went wrong with pushing artifacts to Public Repository" 140 | fi 141 | 142 | fi 143 | 144 | 145 | # Pushing to artifacts repo 146 | result="$(curl \ 147 | -X POST \ 148 | -H 'Authorization: token '${gtd}'' \ 149 | https://api.github.com/repos/${username}/${project_name}/pulls \ 150 | -d '{"title":"Sandbox Demo by '$sandbox_username'","body":"","head":"'$sandbox_username-$demo_name'","base":"'$branch'"}')" 151 | 152 | requestNumber=$(echo "${result}" | python -c 'import json,sys;obj=json.load(sys.stdin);print obj["number"]') 153 | 154 | re='^[0-9]+$' 155 | if ! [[ $requestNumber =~ $re ]] ; then 156 | echo "Something went wrong while creating the pull request" >&2; exit 1 157 | fi 158 | 159 | echo "Pull Request created successfully and the Number is : $requestNumber" 160 | echo "Auto merging the PR" 161 | 162 | 163 | merge_status="$(curl --location --request PUT "https://api.github.com/repos/${username}/${project_name}/pulls/$requestNumber/merge" \ 164 | --header "Authorization: Bearer ${gtd}" -w 'HTTP_CODE:%{http_code}')" 165 | 166 | status=$(echo "${merge_status}" | grep -o 'HTTP_CODE:[1-4][0-9][0-9]' | sed 's/HTTP_CODE://') 167 | 168 | if [ $status == "200" ]; then 169 | echo "Successfully merged the pull request" 170 | fi 171 | if [ $status != "200" ]; then 172 | echo "Something went wrong while merging the pull request" 173 | exit 1 174 | fi 175 | 176 | cd /projects/techzone-demo/sandbox 177 | rm -rf $project_name 178 | echo "success" 179 | -------------------------------------------------------------------------------- /sandbox/importDataProtectionRules.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | mainAPI = MainAPI() 10 | rulesAPI = endpoints.RulesAPI(mainAPI) 11 | rulesAPI.importDataProtectionRules(sys.argv[1]) -------------------------------------------------------------------------------- /sandbox/importGovArtifacts.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | mainAPI = MainAPI() 10 | governanceArtifactAPI = endpoints.GovArtifactAPI(mainAPI) 11 | governanceArtifactAPI.importArtifactsZIP(sys.argv[1]) -------------------------------------------------------------------------------- /sandbox/importProject.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Train, promote and deploy Boston house prices prediction model 5 | 6 | # This notebook contains steps and code to demonstrate support of AI Lifecycle features in Cloud Pak for Data. 7 | # It contains steps and code to work with [`cpdctl`](https://github.com/IBM/cpdctl) CLI tool available in IBM github repository. 8 | # It also introduces commands for getting model and training data, persisting model, deploying model 9 | # and promoting it between deployment spaces. 10 | # 11 | # Some familiarity with Python is helpful. This notebook uses Python 3.7. 12 | # 13 | 14 | # In[1]: 15 | 16 | 17 | # import base64 18 | import json 19 | import os 20 | # import platform 21 | # import requests 22 | # import tarfile 23 | # import zipfile 24 | # from IPython.core.display import display, HTML 25 | from decouple import config 26 | import sys 27 | 28 | # ## CPD Credentials 29 | 30 | # In[2]: 31 | #!/usr/bin/python 32 | 33 | #Pick the Demo Project and and zip file name from arguments 34 | ###python3.8 importProject.py PROJECT_ZIP DEMO_NAME### 35 | 36 | DEMO_NAME=str(sys.argv[2]) 37 | PROJECT_ZIP=str(sys.argv[1])+'.zip' 38 | # print(DEMO_NAME) 39 | # print(PROJECT_ZIP) 40 | 41 | # print(sys.argv) 42 | 43 | ### getting credentials from .env file ### 44 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 45 | 46 | CPD_USER_NAME = config("WKCUSER") 47 | CPD_USER_PASSWORD = config("PASSWORD") 48 | CPD_URL = "https://"+config('TZHOSTNAME') 49 | 50 | 51 | version_r = os.popen('cpdctl version').read() 52 | 53 | CPDCTL_VERSION = version_r 54 | CPDCTL_VERSION=CPDCTL_VERSION.split() 55 | 56 | print("cpdctl version: {}".format(CPDCTL_VERSION)) 57 | 58 | 59 | # ### Add CPD profile and context configuration 60 | 61 | # Add "cpd_user" user to the `cpdctl` configuration 62 | 63 | # In[6]: 64 | 65 | 66 | os.system(' cpdctl config user set cpd_user --username '+CPD_USER_NAME+' --password '+CPD_USER_PASSWORD) 67 | 68 | 69 | # Add "cpd" profile to the `cpdctl` configuration 70 | 71 | # In[7]: 72 | 73 | 74 | os.system(' cpdctl config profile set cpd --url '+CPD_URL+' --user cpd_user') 75 | 76 | 77 | # Add "cpd" context to the `cpdctl` configuration 78 | 79 | # In[8]: 80 | 81 | 82 | os.system(' cpdctl config context set cpd --profile cpd --user cpd_user') 83 | 84 | 85 | # List available contexts 86 | 87 | # In[9]: 88 | 89 | 90 | os.system(' cpdctl config context list') 91 | 92 | 93 | # In[10]: 94 | 95 | 96 | os.system(' cpdctl config context use cpd') 97 | 98 | 99 | JMES_QUERY = "resources[?entity.name == '{}'].metadata.guid".format(DEMO_NAME) 100 | result = os.popen('cpdctl project list --output json --jmes-query "'+JMES_QUERY+'"').read() 101 | PROJECT_IDS = json.loads(result) 102 | if PROJECT_IDS: 103 | for project_id in PROJECT_IDS: 104 | print('Deleting project '+DEMO_NAME+' with ID: {}'.format(project_id)) 105 | os.system('cpdctl project delete --project-id "'+project_id+'"') 106 | 107 | 108 | os.system('cpdctl project list --output json') 109 | 110 | import uuid 111 | STORAGE = {"type": "assetfiles", "guid": str(uuid.uuid4())} 112 | STORAGE_JSON = json.dumps(STORAGE) 113 | 114 | result = os.popen('cpdctl project create --name '+DEMO_NAME+' --output json --raw-output --storage \''+STORAGE_JSON+'\' --jmes-query \'location\'').read() 115 | DEMO_PROJECT_ID = result.rsplit('/', 1)[-1] 116 | print("The new '{}' project ID is: {}".format(DEMO_NAME, DEMO_PROJECT_ID)) 117 | 118 | DEMO_PROJECT_ID=DEMO_PROJECT_ID.strip() 119 | result = os.popen('cpdctl asset import start --project-id '+DEMO_PROJECT_ID+' --import-file '+PROJECT_ZIP+' --output json --jmes-query "metadata.id" --raw-output').read() 120 | IMPORT_ID = result 121 | print("The new import ID is: {}".format(IMPORT_ID)) 122 | 123 | os.system('cpdctl asset import get --project-id '+DEMO_PROJECT_ID+' --import-id '+IMPORT_ID) 124 | 125 | os.system('cpdctl asset search --query \'*:*\' --type-name asset --project-id '+DEMO_PROJECT_ID) 126 | 127 | -------------------------------------------------------------------------------- /sandbox/update-env.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | hostname = sys.argv[1] 4 | wkcuser=sys.argv[2] 5 | password=sys.argv[3] 6 | 7 | f = open("../.env", "w") 8 | f.write("TZHOSTNAME="+hostname+"\nWKCUSER="+wkcuser+"\nPASSWORD="+password) 9 | f.close() -------------------------------------------------------------------------------- /sandbox/updateCategories.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | # Bring categories back to object for bulk updates to catalog 14 | categoriesTable = pandas.read_csv(sys.argv[1]) 15 | categoriesAPI.updateCategoriesFromTable(categoriesTable) 16 | -------------------------------------------------------------------------------- /sandbox/updateTerms.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pandas 3 | 4 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 5 | import sys 6 | sys.path.append('../') 7 | from apis import endpoints, MainAPI, usecases 8 | 9 | categoriesAPI = endpoints.CategoriesAPI() 10 | termsAPI = endpoints.TermsAPI() 11 | mainAPI = MainAPI() 12 | refDataAPI = endpoints.RefDataAPI(mainAPI) 13 | termTable = pandas.read_csv(sys.argv[1]) 14 | termsAPI.updateTermsFromTable(termTable) -------------------------------------------------------------------------------- /sandbox/updateUsers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | logging.basicConfig(format="%(asctime)s %(levelname)-7s %(message)s", level=logging.INFO) 3 | import pandas 4 | import sys 5 | sys.path.append('../') 6 | import apis 7 | userApi = apis.endpoints.UsersAPI() 8 | 9 | usersTable = pandas.read_csv(sys.argv[1]) 10 | userApi.updateUsersFromTable(usersTable) 11 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/CPDemoFramework/fb7bb84f4dec395cf023a633a0c1c0b4d92c2d3a/tests/__init__.py -------------------------------------------------------------------------------- /tests/csvs/refDataUpload.csv: -------------------------------------------------------------------------------- 1 | "code","value","description","parent" 2 | "4","Keine Angabe","Es wurde keine Angabe zum Geschlecht gemacht", 3 | "3","Divers","Das Geschlecht konnte als Divers identifiziert werden", 4 | "2","Weiblich","Das Geschlecht konnte als Weiblich identifiziert werden", 5 | "1","Männlich","Das Geschlecht konnte als Männlich identifiziert werden", 6 | "0","Unbekannt","Das Geschlecht konnte nicht abgefragt oder erkannt werden", 7 | -------------------------------------------------------------------------------- /tests/glossaryTerm_test.py: -------------------------------------------------------------------------------- 1 | from apis import endpoints, MainAPI 2 | import filecmp 3 | import os 4 | import time 5 | import unittest 6 | 7 | class TermsAPI(unittest.TestCase): 8 | @classmethod 9 | def setUpClass(cls): # prepare something ahead of all tests 10 | cls.mainAPI = MainAPI() 11 | cls.t_api = endpoints.TermsAPI(cls.mainAPI) 12 | cls.w_api = endpoints.WorkflowsAPI(cls.mainAPI) 13 | cls.testString = 'T3st' 14 | 15 | def test_TermsWithWorkflow(self): 16 | res = self.mainAPI.search('T3st') 17 | assert res['size'] == 0, f'Term with name {self.testString} already exists, please delete or rename it manually' 18 | 19 | newTerm = self.t_api.createTerm('glossary_terms/createTerm.json', False)["resources"] 20 | assert len(newTerm) == 1 21 | 22 | artifact_id = newTerm[0]['artifact_id'] 23 | version_id = newTerm[0]['version_id'] 24 | 25 | termJSON = self.t_api.getDraftVersionsOfTerm(artifact_id) 26 | assert termJSON["count"] == 1 27 | #termJSON = t_api.getPublishedVersionsOfTerm(artifact_id) 28 | 29 | res = self.w_api.getWorkflowUserTaskByArtifact(artifact_id, version_id)["resources"] 30 | assert len(res) == 1 31 | # res["resources"][0]["entity"]["form_properties"] 32 | 33 | taskId = res[0]["metadata"]["task_id"] 34 | self.w_api.updateWorkflowUserTask(taskId, 'workflows/publishTask.json') 35 | 36 | pubTerm = self.t_api.getPublishedVersionsOfTerm(artifact_id)["resources"] 37 | assert len(pubTerm) == 1 38 | 39 | artifact_id = pubTerm[0]["metadata"]['artifact_id'] 40 | version_id = pubTerm[0]["metadata"]['version_id'] 41 | 42 | delTerm = self.t_api.deleteTerm(artifact_id, version_id, False)["resources"] 43 | assert len(delTerm) == 1 44 | 45 | artifact_id = delTerm[0]['artifact_id'] 46 | version_id = delTerm[0]['version_id'] 47 | 48 | res = self.w_api.getWorkflowUserTaskByArtifact(artifact_id, version_id)["resources"] 49 | assert len(res) == 1 50 | # res["resources"][0]["entity"]["form_properties"] 51 | 52 | taskId = res[0]["metadata"]["task_id"] 53 | self.w_api.updateWorkflowUserTask(taskId, 'workflows/deleteTask.json') 54 | 55 | time.sleep(1) # necessary to wait here for ref data to be found by getListOfRefData 56 | 57 | res = self.mainAPI.search('T3st') 58 | assert res['size'] == 0, f'Term with name {self.testString} still exists, please delete or rename it manually' 59 | 60 | def test_TermsWithoutWorkflow(self): 61 | res = self.mainAPI.search('T3st') 62 | assert res['size'] == 0, f'Term with name {self.testString} already exists, please delete or rename it manually' 63 | 64 | newTerm = self.t_api.createTerm('glossary_terms/createTerm.json', True)["resources"] 65 | assert len(newTerm) == 1 66 | 67 | artifact_id = newTerm[0]['artifact_id'] 68 | version_id = newTerm[0]['version_id'] 69 | 70 | pubTerm = self.t_api.getPublishedVersionsOfTerm(artifact_id)["resources"] 71 | assert len(pubTerm) == 1 72 | 73 | artifact_id = pubTerm[0]["metadata"]['artifact_id'] 74 | version_id = pubTerm[0]["metadata"]['version_id'] 75 | 76 | delTerm = self.t_api.deleteTerm(artifact_id, version_id, True) 77 | 78 | time.sleep(1) # necessary to wait here for ref data to be found by getListOfRefData 79 | 80 | res = self.mainAPI.search('T3st') 81 | assert res['size'] == 0, f'Term with name {self.testString} still exists, please delete or rename it manually' 82 | -------------------------------------------------------------------------------- /tests/main_test.py: -------------------------------------------------------------------------------- 1 | import apis 2 | from requests.packages import urllib3 3 | import unittest 4 | 5 | class MainAPI(unittest.TestCase): 6 | @classmethod 7 | def setUpClass(cls): # prepare something ahead of all tests 8 | cls.api = apis.MainAPI() 9 | cls.testString = 'T3st' 10 | cls.uploadFilename = 'tests/csvs/refDataUpload.csv' 11 | cls.downloadFilename = 'tests/csvs/refDataDownload.csv' 12 | 13 | def setUp(self): 14 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 15 | 16 | def test_executeCAMSHealthCheck(self): 17 | self.api.executeCAMSHealthCheck() 18 | 19 | def test_executeBGHeartbeat(self): 20 | self.api.executeBGHeartbeat() 21 | 22 | def test_executeBGHealthCheck(self): 23 | self.api.executeBGHealthCheck() 24 | 25 | def test_executeTenantInitStatusCheck(self): 26 | self.api.executeTenantInitStatusCheck() 27 | -------------------------------------------------------------------------------- /tests/payloads/reference_data/createRefData.json: -------------------------------------------------------------------------------- 1 | { 2 | "long_description": "test description", 3 | "name": "T3st", 4 | "type": "TEXT" 5 | } -------------------------------------------------------------------------------- /tests/referenceData_test.py: -------------------------------------------------------------------------------- 1 | import apis 2 | import filecmp 3 | import os 4 | import time 5 | import unittest 6 | 7 | class RefDataAPI(unittest.TestCase): 8 | @classmethod 9 | def setUpClass(cls): # prepare something ahead of all tests 10 | cls.api = apis.endpoints.RefDataAPI() 11 | cls.testString = 'T3st' 12 | cls.uploadFilename = 'tests/csvs/refDataUpload.csv' 13 | cls.downloadFilename = 'tests/csvs/refDataDownload.csv' 14 | 15 | def test_refData(self): 16 | res = self.api.getListOfRefData(self.testString) 17 | assert res['size'] == 0, f'reference Dataset with name {self.testString} already exists, please delete or rename it manually' 18 | 19 | self.api.createRefData('../tests/payloads/reference_data/createRefData.json') 20 | 21 | time.sleep(1) # necessary to wait here for ref data to be found by getListOfRefData 22 | res = self.api.getListOfRefData(self.testString) 23 | assert res['size'] == 1 24 | artifact_id = res['rows'][0]['artifact_id'] 25 | version_id = res['rows'][0]['entity']['artifacts']['version_id'] 26 | 27 | self.api.getRefData(artifact_id, version_id) 28 | 29 | self.api.loadRefDataFromCSV(artifact_id, self.uploadFilename, version_id) 30 | 31 | self.api.getRefDataCSV(artifact_id, self.downloadFilename, version_id) 32 | assert filecmp.cmp(self.uploadFilename, self.downloadFilename) 33 | os.remove(self.downloadFilename) 34 | 35 | self.api.deleteRefData(artifact_id) 36 | -------------------------------------------------------------------------------- /utilities/__init__.py: -------------------------------------------------------------------------------- 1 | from utilities import csvProcessing 2 | -------------------------------------------------------------------------------- /utilities/csvProcessing.py: -------------------------------------------------------------------------------- 1 | """CSV Processing Functions""" 2 | import logging 3 | import pandas 4 | 5 | def items2CSV(add2TableFun, itemsJSON, filePath): 6 | """creates a csv file from a list of certain WKC API items 7 | Args: 8 | add2TableFun (function): function that contains the logic how a API type object should be added to the pandas table. 9 | itemsJSON (dict): JSON formatted output of the WKC API for a list of certain WKC API type. 10 | filePath (str): name or filepath for the creation of the csv file 11 | """ 12 | table = items2Table(add2TableFun, itemsJSON) 13 | table.to_csv(filePath, index=False) 14 | 15 | def items2Table(add2TableFun, itemsJSON): 16 | """creates a table from a list of certain WKC API items 17 | Args: 18 | add2TableFun (function): function that contains the logic how a API type object should be added to the pandas table. 19 | itemsJSON (dict): JSON formatted output of the WKC API for a list of certain WKC API type. 20 | Returns: 21 | pandas.DataFrame: table of certain WKC API items 22 | """ 23 | table = pandas.DataFrame() 24 | logging.debug(itemsJSON) 25 | for itemJSON in itemsJSON: 26 | table = add2TableFun(table, itemJSON) 27 | return table 28 | -------------------------------------------------------------------------------- /utilities/helper.py: -------------------------------------------------------------------------------- 1 | """Helper Functions for the WKC API""" 2 | import inspect 3 | import logging 4 | from os.path import dirname 5 | 6 | def getPayloadsPath(): 7 | """Returns the path of the payloads directory. To be precise, it returns ../payloads 8 | Returns: 9 | str: path to the payload directory 10 | """ 11 | repoPath = dirname(dirname(__file__)) 12 | payloadsPath = f'{repoPath}/payloads' 13 | logging.debug(f'Payloads Path: {payloadsPath}') 14 | return payloadsPath 15 | 16 | def _getFunctionName(positionOnStack): 17 | """gets function name when given the position in the function stack when called 18 | Args: 19 | positionOnStack (int): refers to the location in the stack, i.e. the nth most recently called function 20 | Returns: 21 | str: function name 22 | """ 23 | return inspect.stack()[positionOnStack][3] # second index refers to the position of the functions name in the stack entry 24 | 25 | def getRequestName(): 26 | """gets HTTP request type when called in main._logAPIFunctionInfo. To be precise, it returns the name of an functions without the first character of the 4th function on the function stack 27 | Returns: 28 | str: requestName 29 | """ 30 | functionName = _getFunctionName(4) 31 | return functionName[1:] # strip '_' 32 | 33 | def sendRequestWithPayload(requestFun, url, headers, payload, files): 34 | """Sends a request, can optionally be enriched with payload, headers and files. 35 | Args: 36 | requestFun (function): HTTP request function from apis.MainAPI 37 | url (str): full URL with endpoint for the HTTP request 38 | headers (dict): JSON formatted headers. Can be None 39 | payload (dict): JSON formatted request payload. Can be None 40 | files (dict): JSON formatted files used for the request. Can be None 41 | Returns: 42 | requests.Response: HTTP response of the request 43 | """ 44 | if payload: 45 | logging.debug('adding json') 46 | return requestFun(url, headers=headers, json=payload, verify=False) 47 | elif files: 48 | return requestFun(url, headers=headers, files=files, verify=False) 49 | else: 50 | return requestFun(url, headers=headers, verify=False) 51 | -------------------------------------------------------------------------------- /utilities/subAPIs.py: -------------------------------------------------------------------------------- 1 | from apis import main 2 | import logging 3 | 4 | class SubAPI(): 5 | """Abstract API class that ensures the same MainAPI's can be used for multiple SubAPI's 6 | """ 7 | def __init__(self, mainAPI=None): 8 | """Creating a new SubAPI 9 | Args: 10 | mainAPI (main.MainAPI, optional): When a MainAPI is available, then it is used. If not, a new MainAPI is created and saved in self.mainAPI. Defaults to None. 11 | """ 12 | logging.debug('calling SubAPI init') 13 | logging.debug(mainAPI) 14 | if mainAPI: 15 | self.mainAPI = mainAPI 16 | else: 17 | self.mainAPI = main.MainAPI() 18 | --------------------------------------------------------------------------------