├── .gitignore ├── .vscode └── launch.json ├── BAW_connector ├── BAWExtraction_utils.py ├── BAW_IPM_backup.idp ├── ProcessMining_utils.py ├── README.md ├── config │ ├── BAW_default_fields.json │ └── config_techzone.json ├── pictures │ ├── BAW_config.jpeg │ ├── BAW_inspector.jpeg │ ├── Extraction_config.jpeg │ ├── IPM_config.jpeg │ └── IPM_userprofile.jpeg └── requirements.txt ├── Custom Filters ├── Case_started_after_datetime.js ├── Case_started_between_x_and_y_days_after_a_date.js ├── Case_started_with_these_activities.js ├── Cases_with_N_reworks.js ├── Cases_with_events_since_last_N_days.js ├── Exclude_reworks_except_selfloops.js ├── Keep_cases_where_2_activities_by_same_resource.js ├── OR_on_activities.js ├── Rework_on_this_activity.js ├── This_activity_started_between_date1_and_date2.js └── This_activity_started_more_than_X_days_ago.js ├── Custom Metrics ├── CaseQuality.js ├── CaseSLA.js ├── CaseWorkingDays.js ├── CountActivitytWithReworks.js ├── CountActivitytWithSelfLoops.js ├── CountReworks.js ├── CountReworksforTheseActivities.js ├── CountSelfLoops.js ├── CountSelfLoopsforTheseActivities.js ├── CustomCategory.js ├── NetLeadtime.js ├── PathTime.js ├── README.md └── test_flat.csv ├── Custom Widgets ├── ECharts_examples │ └── frontend.js ├── README.md ├── activity_relationships │ ├── README.md │ ├── activity_relationship.jpg │ ├── activity_relationships.json │ ├── backend.js │ ├── frontend.js │ └── style.css ├── case_category_evolution │ ├── backend.js │ ├── case_category_evolution.json │ └── frontend.js ├── chart widgets before 1.14.3 │ ├── custom_metric_linechart.json │ ├── dimension_leadtime_LC │ │ ├── DimensionLeadtimeLC.jpeg │ │ ├── backend.js │ │ ├── dimension_leadtime_LC.json │ │ ├── frontend_1.14.2.js │ │ ├── frontend_1.14.3 copy.js │ │ ├── frontend_1.14.3.js │ │ ├── index.html │ │ └── view.html │ ├── dimension_leadtime_LC_1.14.2.json │ ├── dimension_linechart │ │ ├── DimensionLineChart.jpg │ │ ├── README.md │ │ ├── Untitled 2.jpg │ │ ├── backend.js │ │ ├── backend_1.14.2.js │ │ ├── backend_console_log.jpg │ │ ├── debugbackend.js │ │ ├── frontend_1.14.2.js │ │ ├── frontend_1.14.3.js │ │ ├── generate_csv.py │ │ ├── test.js │ │ └── view.html │ ├── dimension_linechart_1.14.2.json │ ├── dimension_linechart_custom_metric │ │ ├── backend_1_14_2.js │ │ ├── custom_metric_linechart.jpg │ │ ├── custom_metric_linechart.json │ │ ├── frontend_1_14_2.js │ │ └── view.html │ └── v_barchart │ │ ├── backend.js │ │ ├── frontend.js │ │ ├── v_barchart.json │ │ ├── v_barcharts.jpg │ │ └── view.html └── quartiles_deciles │ ├── backend.js │ ├── frontend.js │ └── quartiles.json ├── Datasets_usecases ├── Bank Account Closure │ ├── README.md │ └── bank_account_closure.zip ├── Client onboarding │ ├── Client_onboarding.zip │ ├── Client_onboarding │ │ ├── Onboard Client_2023-10-05_083936+0000.idp │ │ └── onboard_client.zip │ └── README.md ├── README.md └── Tutorial │ ├── Eventlog sample for Analytics explanation.csv │ ├── IBM - Hands On Tutorial.zip │ └── README.md ├── IBM Process Mining Dashboards ├── README.md └── dist │ └── dashboard-export-job-1.0-SNAPSHOT.jar ├── IBM Process Mining Trimming ├── README.md ├── dist │ └── csv-trimming-job-1.0-SNAPSHOT.jar └── filters.json ├── Installation_on_prem ├── 1_14_4_processmining_taskmining_on_prem_same_server.md ├── Previous releases │ ├── 1_14_1_processmining_taskmining_on_prem_distinct_servers.md │ ├── 1_14_1_processmining_taskmining_on_prem_same_server.md │ ├── 1_14_3_processmining_taskmining_on_prem_same_server.md │ └── 1_14_3_processmining_taskmining_upgrade_same_server.md └── README.md ├── LICENSE ├── LLM └── chat-with-api │ ├── BasePMTool.py │ ├── CreateLLM.py │ ├── CreateProjectTool.py │ ├── CustomParser.py │ ├── DeleteProjectTool.py │ ├── GetModelStatisticsTool.py │ ├── LoadDataTool.py │ ├── MiningTool.py │ ├── ProjectDetailTool.py │ ├── ProjectsTool.py │ ├── README.md │ ├── SetDataMappingTool.py │ ├── SetDateFormatTool.py │ ├── SuggestDateFormat.py │ ├── SuggestMapping.py │ ├── app.py │ ├── prompts.py │ └── requirements.txt ├── Process Apps ├── BAW BPM │ ├── BAW BPM.pma │ ├── BAW_BPMN_ProcessApp.py │ ├── BAW_IPM_backup.idp │ └── README.md ├── BAW-IBM-Process-Mining-Assets │ ├── 1.14.4 │ │ ├── BAW-IBM-Process-Mining-Assets.zip │ │ └── BAW-IBM-Process-Mining-Assets │ │ │ ├── BAW-Connector │ │ │ ├── BAW_Connector.py │ │ │ ├── README.pdf │ │ │ ├── baw_config.json │ │ │ └── requirements.txt │ │ │ ├── BAW-Process_App │ │ │ └── BAW Business Process Management.pma │ │ │ └── README.pdf │ ├── 1.15.0 │ │ ├── BAW-IBM-Process-Mining-Assets.zip │ │ └── BAW-IBM-Process-Mining-Assets │ │ │ ├── BAW-Connector │ │ │ ├── BAW_Connector.py │ │ │ ├── README.pdf │ │ │ ├── baw_config.json │ │ │ └── requirements.txt │ │ │ ├── BAW-Process_App │ │ │ └── BAW Business Process Management.pma │ │ │ └── README.pdf │ └── 2.0.0 │ │ └── BAW-IBM-Process-Mining-Assets.zip ├── IT_Ticketing_ServiceNow │ ├── README.md │ ├── ServiceNow.jpg │ ├── ServiceNowConnector.py │ ├── Ticketing with ServiceNow.pma │ └── serviceNow1_2023-06-05_075444-0700.idp ├── Jira_ticketing │ ├── .vscode │ │ └── launch.json │ ├── Jira Ticketing.pma │ ├── JiraConnector.py │ ├── JiraConnectorTutorial.ipynb │ ├── JiraSoftware.jpg │ ├── jira_mapping.idp │ └── my_config_template.json ├── Open_apps │ ├── Aha! Ideas Management open.pma │ ├── GitHub Ticketing - Trial open.pma │ └── Zenhub SDLC - Trial open.pma ├── P2P_data_xform_lab │ ├── P2P LAB_2023-08-03_015726-0700.idp │ ├── P2P.zip │ ├── P2P_data_extraction_tutorial.ipynb │ ├── P2P_data_xform_lab.py │ └── README.md ├── Pandas Tuto │ ├── date_utillities.ipynb │ ├── incidents_REST.csv │ └── pandas_basics_process_apps.ipynb ├── README.md ├── testProcessAppWithFile │ ├── README.md │ ├── justatest.csv │ ├── justatest.zip │ └── simplecode.py └── testProcessApps │ ├── pattest_2023-06-02_081520+0000.idp │ └── testprocessapp.py ├── README.md ├── REST APIs ├── IPMClient │ ├── CAFileUpload.py │ ├── COS.ipynb │ ├── COSConfig.json │ ├── CognosAnalytics.json │ ├── CognosAnalyticsClient.py │ ├── CognosAnalyticsExample.json │ ├── CognosAnalytics_ProcessMining_demo.ipynb │ ├── IPMAccount.py │ ├── IPMAlerts.py │ ├── IPMBase.py │ ├── IPMClient.py │ ├── IPMConfigExample.json │ ├── IPMDashboard.py │ ├── IPMOrganization.py │ ├── IPMProject.py │ ├── IPMTenant.py │ ├── IPMWidgets.py │ ├── IPM_CognosAnalytics copy.ipynb │ ├── IPM_CognosAnalytics.ipynb │ ├── IPM_CognosAnalytics.py │ ├── Nextgen.py │ ├── README.md │ ├── Raffaello.py │ ├── __pycache__ │ │ ├── CognosAnalyticsClient.cpython-310.pyc │ │ ├── IPMAccount.cpython-310.pyc │ │ ├── IPMBase.cpython-310.pyc │ │ ├── IPMClient.cpython-310.pyc │ │ ├── IPMDashboard.cpython-310.pyc │ │ ├── IPMOrganization.cpython-310.pyc │ │ ├── IPMProject.cpython-310.pyc │ │ ├── IPMTenant.cpython-310.pyc │ │ └── IPMWidgets.cpython-310.pyc │ ├── acticityStatsFromWidget.csv │ ├── activityStats.csv │ ├── activityStatsFromWidget.csv │ ├── allVariants.csv │ ├── blocked_at_pending_liquidation_request.csv │ ├── completedCasesBizData.csv │ ├── completedCasesDates.csv │ ├── completedCasesStartDate.csv │ ├── data │ │ ├── Client Onboarding.idp │ │ └── Client Onboarding.zip │ ├── example.py │ ├── images │ │ └── business_performance.jpg │ ├── json_result_examples │ │ ├── deviations.json │ │ ├── filers2.json │ │ ├── filter-templates.json │ │ ├── filters-OR.json │ │ ├── filters.json │ │ ├── kpi-settings.json │ │ ├── model-statistics.json │ │ ├── project-settings activities-cost.json │ │ ├── project-settings activities-working-time.json │ │ ├── settings.json │ │ ├── status.json │ │ └── variants.json │ ├── objectstorage.js │ ├── patPlay.ipynb │ ├── processStats.csv │ ├── setupTechzone.py │ ├── test.py │ ├── testIPMClient.ipynb │ ├── testPermissionsOrgandTenant.py │ ├── testVariants.py │ ├── transitionStats.csv │ ├── users_to_add.csv │ ├── variants.csv │ └── variants2.csv └── REST_API_Examples │ ├── .vscode │ └── launch.json │ ├── CSV_Alerts.ipynb │ ├── PM-REST-API-oas.json │ ├── Patrick_Curl.md │ ├── ProcessMining_API.py │ ├── Using_Curl.md │ ├── VariablesAlerts.py │ ├── __pycache__ │ └── ProcessMining_API.cpython-310.pyc │ ├── pharoses1_config.json │ ├── techzone.json │ ├── tests.ipynb │ └── view.html └── RPA-bot-generation └── uipath-pluggable-rpabot-generator-sample.js /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | REST API Examples/Pat_config.conf 3 | Process Apps/IT_Ticketing_ServiceNow/FixCloseDate.ipynb 4 | Process Apps/IT_Ticketing_ServiceNow/hold_reason_changes_df.csv 5 | Process Apps/IT_Ticketing_ServiceNow/eventlog.csv 6 | Process Apps/IT_Ticketing_ServiceNow/incident_audit.csv 7 | Process Apps/IT_Ticketing_ServiceNow/instance_state_changes_df.csv 8 | Process Apps/IT_Ticketing_ServiceNow/my_config.json 9 | Process Apps/IT_Ticketing_ServiceNow/Curl.txt 10 | Process Apps/IT_Ticketing_ServiceNow/ServiceNowIT.csv 11 | Process Apps/IT_Ticketing_ServiceNow/state_changes_df.csv 12 | Process Apps/IT_Ticketing_ServiceNow/state_changes.csv 13 | Process Apps/IT_Ticketing_ServiceNow/testProcessApps.zip 14 | Process Apps/IT_Ticketing_ServiceNow/tests.ipynb 15 | Process Apps/IT_Ticketing_ServiceNow/user_groups_df.csv 16 | Process Apps/testProcessApps/justatest.csv 17 | Process Apps/Jira_ticketing/JiraConnector_original.py 18 | Process Apps/Jira_ticketing/JiraConnector copy.py 19 | Process Apps/Jira_ticketing/all_tickets.csv 20 | Process Apps/Jira_ticketing/curl.txt 21 | Process Apps/Jira_ticketing/changelog.csv 22 | Process Apps/Jira_ticketing/IBM Process Mining - Process Apps ETL.pptx 23 | Process Apps/Jira_ticketing/jiraevents.csv 24 | Process Apps/Jira_ticketing/my_config.json 25 | Process Apps/Jira_ticketing/test.csv 26 | Process Apps/Jira_ticketing/test.ipynb 27 | Process Apps/Jira_ticketing/test2.ipynb 28 | Process Apps/Jira_ticketing/tt.csv 29 | Process Apps/Jira_ticketing/uncompress.zip 30 | Process Apps/Jira_ticketing/issues.csv 31 | Custom Widgets/dimension_linechart/test.js 32 | Process Apps/Jira_ticketing/JiraConnectorTutorial.zip 33 | Process Apps/Jira_ticketing/eventlog.csv 34 | Process Apps/P2P_data_xform_lab/__MACOSX/._invoice.csv 35 | Process Apps/P2P_data_xform_lab/__MACOSX/._procurement.csv 36 | Process Apps/P2P_data_xform_lab/__MACOSX/._requisition.csv 37 | Process Apps/P2P_data_xform_lab/procurement.csv 38 | Process Apps/P2P_data_xform_lab/P2Peventlog.csv 39 | Process Apps/P2P_data_xform_lab/requisition.csv 40 | Process Apps/P2P_data_xform_lab/invoice.csv 41 | Process Apps/P2P_data_xform_lab/P2P_data_extraction_tutorial copy.ipynb 42 | Process Apps/P2P_data_xform_lab/proc_and_invoices.csv 43 | Process Apps/P2P_data_xform_lab/P2Peventlogtest.csv 44 | Custom Widgets/v_barchart/view copy.html 45 | REST API Examples/test.js 46 | .gitignore 47 | Custom Widgets/activity_relationships/test.js 48 | Custom Widgets/activity_relationships/backend.js 49 | Custom Widgets/activity_relationships/frontend.js 50 | Custom Widgets/activity_relationships/output.json 51 | REST API Examples/Patrick_Curl.md 52 | REST API Examples/PM-REST-API-oas.json 53 | Process Apps/Jira_ticketing/my_config_ibm.json 54 | Custom Widgets/ECharts_examples/frontend timeaxis only year.js 55 | Custom Widgets/ECharts_examples/frontend timeaxis type time.js 56 | Custom Widgets/ECharts_examples/frontend timeaxis.js 57 | REST_API_Examples/pharoses1_config.json 58 | REST_API_Examples/Pat_config.conf 59 | REST_API_Examples/Patrick_Curl.md 60 | REST_API_Examples/view.html 61 | REST_API_Examples/alert_history_Alerts Dashboard_alerts-widget-1.csv 62 | REST_API_Examples/alert_summary_Alerts Dashboard_alerts-widget-1.csv 63 | REST_API_Examples/.vscode/launch.json 64 | REST_API_Examples/__pycache__/ProcessMining_API.cpython-310.pyc 65 | REST_API_Examples/tests.ipynb 66 | IPMClient/__pycache__/* 67 | REST_API_Examples/__pycache__/* 68 | mylog.csv 69 | mysummary.csv 70 | IPMClient/IPMConfig.json 71 | REST_API_Examples/__pycache__/ProcessMining_API.cpython-310.pyc 72 | IPMClient/IPMConfig_raffaello.json 73 | IPMClient/Raffaello.py 74 | IPMClient/Nextgen.py 75 | IPMClient/test.py 76 | IPMClient/testIPMClient.ipynb 77 | IPMClient/IPMConfig_nextgen.json 78 | IPMClient/COS.json 79 | IPMClient/activityStats.csv 80 | IPMClient/activityStatsFromWidget.csv 81 | IPMClient/CAFileUpload.py 82 | IPMClient/CognosAnalytics.json 83 | IPMClient/processStats.csv 84 | IPMClient/completedCases.csv 85 | IPMClient/transitionStats.csv 86 | jupyter.bak 87 | IPMClient/objectstorage.js 88 | .gitignore 89 | IPMClient/acticityStatsFromWidget.csv 90 | IPMClient/allVariants.csv 91 | IPMClient/variants.csv 92 | IPMClient/variants2.csv 93 | IPMClient/COSConfig.json 94 | IPMClient/testVariants.py 95 | IPMClient/completedCasesStartDate.csv 96 | IPMClient/patPlay.ipynb 97 | IPMClient/setupTechzone.py 98 | IPMClient/testPermissionsOrgandTenant.py 99 | IPMClient/blocked_at_pending_liquidation_request.csv 100 | IPMClient/completedCasesBizData.csv 101 | IPMClient/completedCasesDates.csv 102 | IPMClient/IPM_CognosAnalytics copy.ipynb 103 | IPMClient/users_to_add.csv 104 | IPMClient/IPMConfig_nextgen.json 105 | 106 | mySVG.svg 107 | IPMClient/anjana.json 108 | IPMClient/IPM_CognosAnalytics_Anjana.py 109 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "configurations": [ 3 | { 4 | "type": "node-terminal", 5 | "name": "JavaScript Debug Terminal", 6 | "request": "launch", 7 | "cwd": "${workspaceFolder}" 8 | } 9 | ] 10 | } -------------------------------------------------------------------------------- /BAW_connector/BAW_IPM_backup.idp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/BAW_connector/BAW_IPM_backup.idp -------------------------------------------------------------------------------- /BAW_connector/config/BAW_default_fields.json: -------------------------------------------------------------------------------- 1 | { 2 | "process_mining_mapping": { 3 | "process_ID":"piid", 4 | "task_name":"name", 5 | "start_date":"startTime", 6 | "end_date":"completionTime", 7 | "owner":"owner", 8 | "team":"teamDisplayName" 9 | }, 10 | "included_task_data": [ 11 | "activationTime", 12 | "atRiskTime", 13 | "completionTime", 14 | "description", 15 | "isAtRisk", 16 | "originator", 17 | "priority", 18 | "startTime", 19 | "state", 20 | "piid", 21 | "priorityName", 22 | "teamDisplayName", 23 | "managerTeamDisplayName", 24 | "tkiid", 25 | "name", 26 | "status", 27 | "owner", 28 | "assignedToDisplayName", 29 | "assignedToType", 30 | "dueTime", 31 | "closeByUser" 32 | ], 33 | "excluded_task_data": [ 34 | "description", 35 | "clientTypes", 36 | "containmentContextID", 37 | "kind", 38 | "externalActivitySnapshotID", 39 | "serviceID", 40 | "serviceSnapshotID", 41 | "serviceType", 42 | "flowObjectID", 43 | "nextTaskId", 44 | "actions", 45 | "teamName", 46 | "teamID", 47 | "managerTeamName", 48 | "managerTeamID", 49 | "displayName", 50 | "processInstanceName", 51 | "assignedTo", 52 | "assignedToID", 53 | "collaboration", 54 | "activationTime", 55 | "lastModificationTime", 56 | "assignedToDisplayName", 57 | "closeByUserFullName" 58 | ] 59 | } -------------------------------------------------------------------------------- /BAW_connector/config/config_techzone.json: -------------------------------------------------------------------------------- 1 | { 2 | "JOB": { 3 | "job_name": "techzone", 4 | "exit": 0 5 | }, 6 | "BAW": { 7 | "root_url": "https://ibmbaw:9443/", 8 | "user": "admin", 9 | "password": "admin", 10 | "password_env_var": "", 11 | "project": "HSS", 12 | "process_name": "Standard HR Open New Position", 13 | "from_date": "2021-01-15T15:16:50Z", 14 | "from_date_criteria": "modifiedAfter", 15 | "to_date": "2022-12-15T15:17:13Z", 16 | "to_date_criteria": "modifiedBefore", 17 | "paging_size": 0, 18 | "status_filter": "", 19 | "loop_rate": 0, 20 | "thread_count": 1, 21 | "instance_limit": 1, 22 | "offset": 0, 23 | "task_data_variables": [], 24 | "export_exposed_variables": false, 25 | "csv_at_each_loop": false, 26 | "trigger_csv_beyond": 500000 27 | }, 28 | "IPM": { 29 | "url": "", 30 | "user_id": "", 31 | "api_key": "", 32 | "org_key": "", 33 | "project_key": "", 34 | "version": "1.13.1+" 35 | } 36 | } -------------------------------------------------------------------------------- /BAW_connector/pictures/BAW_config.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/BAW_connector/pictures/BAW_config.jpeg -------------------------------------------------------------------------------- /BAW_connector/pictures/BAW_inspector.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/BAW_connector/pictures/BAW_inspector.jpeg -------------------------------------------------------------------------------- /BAW_connector/pictures/Extraction_config.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/BAW_connector/pictures/Extraction_config.jpeg -------------------------------------------------------------------------------- /BAW_connector/pictures/IPM_config.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/BAW_connector/pictures/IPM_config.jpeg -------------------------------------------------------------------------------- /BAW_connector/pictures/IPM_userprofile.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/BAW_connector/pictures/IPM_userprofile.jpeg -------------------------------------------------------------------------------- /BAW_connector/requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp==3.7.4.post0 2 | async-timeout==3.0.1 3 | attrs==21.2.0 4 | backports.entry-points-selectable==1.1.0 5 | certifi==2021.10.8 6 | chardet==4.0.0 7 | charset-normalizer==2.0.7 8 | decorator==5.1.0 9 | distlib==0.3.3 10 | filelock==3.3.1 11 | idna==3.3 12 | jsonpath-ng==1.5.3 13 | multidict==5.2.0 14 | platformdirs==2.4.0 15 | ply==3.11 16 | PyYAML==6.0 17 | requests==2.26.0 18 | six==1.16.0 19 | tqdm==4.62.3 20 | typing-extensions==3.10.0.2 21 | urllib3==1.26.7 22 | yarl==1.7.0 -------------------------------------------------------------------------------- /Custom Filters/Case_started_after_datetime.js: -------------------------------------------------------------------------------- 1 | // Keep the cases started after the specified datetime. 2 | 3 | // Parameters 4 | var compareDate = '2022-12-17T03:24:00'; 5 | 6 | // Function 7 | var compareTime = new Date(compareDate).getTime(); 8 | var filter = { 9 | keepTrace: function (trace) { 10 | 11 | if (trace.get(0).getStartTime() >= compareTime) 12 | return true; 13 | else 14 | return false; 15 | } 16 | }; -------------------------------------------------------------------------------- /Custom Filters/Case_started_between_x_and_y_days_after_a_date.js: -------------------------------------------------------------------------------- 1 | //Keep the cases started between X and Y days after the specified datetime custom field. 2 | //The custom field’s value is detected on the first event of the case. 3 | //Note: X is the lower bound, Y is the upper. 4 | 5 | 6 | days_X = 30; 7 | days_Y = 60; 8 | var DIMENSION_WITH_DATE = 'INVOICE_DATE'; 9 | 10 | 11 | days_X_ms = days_X * 24 * 60 * 60 * 1000; 12 | days_Y_ms = days_Y * 24 * 60 * 60 * 1000; 13 | DIMENSION_WITH_DATE = 'attr-custom-' + DIMENSION_WITH_DATE; 14 | 15 | var filter = { 16 | keepTrace: function (trace) { 17 | if (trace.get(0).getStartTime() - trace.get(0).getLongAttributeValue(DIMENSION_WITH_DATE) >= days_X_ms && 18 | trace.get(0).getStartTime() - trace.get(0).getLongAttributeValue(DIMENSION_WITH_DATE) < days_Y_ms) 19 | return true; 20 | else 21 | return false; 22 | } 23 | }; 24 | -------------------------------------------------------------------------------- /Custom Filters/Case_started_with_these_activities.js: -------------------------------------------------------------------------------- 1 | //Keep cases where the first activity is one of those inserted in activities 2 | 3 | var activities = ['Requisition Created', 'Order Item Created']; 4 | 5 | var filter = { 6 | keepTrace: function (trace) { 7 | start_activity = trace.get(0).getEventClass(); 8 | if (activities.indexOf(start_activity) > -1) 9 | return true; 10 | else return false; 11 | } 12 | }; -------------------------------------------------------------------------------- /Custom Filters/Cases_with_N_reworks.js: -------------------------------------------------------------------------------- 1 | // Keep cases with N reworks on any activity. 2 | 3 | reworkGoal = 2; 4 | 5 | var filter = { 6 | keepTrace: function (trace) { 7 | var activities = []; 8 | for (var k = 0; k < trace.size(); k++) { 9 | var event = trace.get(k); 10 | var activity = event.getEventClass(); 11 | if (previous_activity == activity) // self loop 12 | continue; 13 | else if (activities.indexOf(activity) < 0) // first occurence of activity 14 | activities.push(activity); 15 | else // activity rework, not a self loop 16 | return false; 17 | previous_activity = activity; 18 | } 19 | return true; 20 | } 21 | }; -------------------------------------------------------------------------------- /Custom Filters/Cases_with_events_since_last_N_days.js: -------------------------------------------------------------------------------- 1 | 2 | var delay = 30; // days 3 | 4 | // Keep cases that had an event since the last 30 days (delay) 5 | var today = new Date().getTime(); 6 | var delay_ms = delay*24*3600*1000; 7 | 8 | var filter = { 9 | keepTrace: function(trace) { 10 | lastEvent = trace.get(trace.size()-1); 11 | if (today - lastEvent.getStartTime() <= delay_ms) 12 | return true; 13 | return false; 14 | } 15 | }; -------------------------------------------------------------------------------- /Custom Filters/Exclude_reworks_except_selfloops.js: -------------------------------------------------------------------------------- 1 | // Exclude the case if there is a rework for a specified activity, except for self-loops (self-loops should not be excluded). 2 | 3 | var filter = { 4 | keepTrace: function (trace) { 5 | var activities = []; 6 | var previous_activity = ''; 7 | for (var k = 0; k < trace.size(); k++) { 8 | var event = trace.get(k); 9 | var activity = event.getEventClass(); 10 | if (previous_activity == activity) // self loop 11 | continue; 12 | else if (activities.indexOf(activity) < 0) // first occurence of activity 13 | activities.push(activity); 14 | else // activity rework, not a self loop 15 | return false; 16 | previous_activity = activity; 17 | } 18 | return true; 19 | } 20 | }; -------------------------------------------------------------------------------- /Custom Filters/Keep_cases_where_2_activities_by_same_resource.js: -------------------------------------------------------------------------------- 1 | // Keep the case if the 2 specified activities are performed by the same resource. 2 | 3 | var act1 = 'Request created'; 4 | var act2 = 'Request completed with account closure'; 5 | 6 | var filter = { 7 | keepTrace: function (trace) { 8 | var res1 = ''; 9 | var res2 = ''; 10 | for (var k = 0; k < trace.size(); k++) { 11 | var event = trace.get(k); 12 | var activity = event.getEventClass(); 13 | if (activity == act1 && event.getResource() != '') { 14 | res1 = event.getResource(); 15 | } 16 | if (activity == act2 && 17 | event.getResource() != '') { 18 | res2 = event.getResource(); 19 | if (res1 == res2) 20 | return true; 21 | } 22 | } 23 | return false; 24 | } 25 | }; -------------------------------------------------------------------------------- /Custom Filters/OR_on_activities.js: -------------------------------------------------------------------------------- 1 | // Filter cases that include at least one of the activities below 2 | // This is a logical OR. It can't be done with the UI 3 | 4 | // Adapt this list to your process 5 | var activityList = [ 6 | 'Purch Order Line Quantity Changed', 7 | 'Purch Order Line Price Changed', 8 | 'Purch Order Line Payment Terms Changed', 9 | 'Purch Order Line Price Unit Changed', 10 | 'Purch Order Line Info Records Changed', 11 | 'Purch Order Supplier Changed', 12 | 'Purch Order Line Account Category Changed' 13 | ]; 14 | 15 | 16 | var filter = { 17 | keepTrace: function(trace) { 18 | for(var k = 0 ; k < trace.size(); k++) { 19 | var event = trace.get(k); 20 | if (activityList.indexOf(event.getEventClass())>0) 21 | return true; 22 | } 23 | return false; 24 | } 25 | }; 26 | 27 | -------------------------------------------------------------------------------- /Custom Filters/Rework_on_this_activity.js: -------------------------------------------------------------------------------- 1 | // Filter cases that include at least one of the activities below 2 | // This is a logical OR. It can't be done with the UI 3 | 4 | var testedActivities = ['activity1', 'activity2']; 5 | var counters = []; 6 | 7 | var filter = { 8 | keepTrace: function (trace) { 9 | testedActivities.forEach(element=> counters.push(0)); 10 | for (var k = 0; k < trace.size(); k++) { 11 | var event = trace.get(k); 12 | var index = testedActivities.indexOf(event.getEventClass()); 13 | if (index >= 0) counters[index]++; 14 | } 15 | // if an activity appears more than once in the list, that's a rework 16 | return counters.reduce((a,v) => (v > 1 ? true : false), false); 17 | } 18 | }; 19 | -------------------------------------------------------------------------------- /Custom Filters/This_activity_started_between_date1_and_date2.js: -------------------------------------------------------------------------------- 1 | // One of these activities started between date1Min and dateMax 2 | // Adapt this list to your process 3 | var activityList = [ 4 | 'Purch Order Line Quantity Changed', 5 | 'Purch Order Line Price Changed', 6 | 'Purch Order Line Payment Terms Changed', 7 | 'Purch Order Line Price Unit Changed', 8 | 'Purch Order Line Info Records Changed', 9 | 'Purch Order Supplier Changed', 10 | 'Purch Order Line Account Category Changed' 11 | ]; 12 | var dateMin = "2023-01-31 00:00:00"; 13 | var dateMax = "2023-02-06 00:00:00"; //dateMax could be today 14 | 15 | // 16 | 17 | var dateMin_ms = new Date(dateMin); 18 | var dateMax_ms = new Date(dateMax); 19 | 20 | var filter = { 21 | keepTrace: function(trace) { 22 | for(var k = 0 ; k < trace.size(); k++) { 23 | var event = trace.get(k); 24 | if (activityList.indexOf(event.getEventClass())>0 && 25 | (event.getStartTime()>dateMin_ms) && 26 | (event.getStartTime()0 && ((today - event.getStartTime()) > delay_ms)) 24 | return true; 25 | } 26 | return false; 27 | } 28 | }; -------------------------------------------------------------------------------- /Custom Metrics/CaseQuality.js: -------------------------------------------------------------------------------- 1 | // This metrics associate a quality value (0-100) to each case, which would typically decrease if deviations occur. 2 | // 3 | // In the provided example, the quality decreases by 7 every time an event which includes the word ‘Change’ occurs. 4 | // AUTHOR: LORENZO LUCCHI 5 | 6 | // COPY FROM HERE 7 | 8 | // VARIABLES 9 | var wordToMatch = 'Change'; 10 | // FUNCTION 11 | 12 | var customMetric = { 13 | caseMetric: function (aCase) { 14 | var quality = 100; 15 | var changes = 0; 16 | for (var k = 0; k < aCase.size(); k++) { 17 | var event = aCase.get(k); 18 | var activity = event.getEventClass(); 19 | if (activity.indexOf(wordToMatch) > -1) changes++; 20 | } 21 | quality -= changes * 7; 22 | return quality < 0 ? 0 : quality; 23 | } 24 | }; 25 | 26 | // COPY UNTIL HERE -------------------------------------------------------------------------------- /Custom Metrics/CaseSLA.js: -------------------------------------------------------------------------------- 1 | // This metrics associate an SLA (double value) to each case, based on a case attribute. 2 | 3 | // AUTHORS: LORENZO LUCCHI, PATRICK MEGARD 4 | 5 | //COPY FROM HERE 6 | // Variables 7 | var fieldName = 'RISK_LEVEL'; // You can change the name of the column 8 | 9 | // You can replace the keys (1, 2, 3, 4) by strings ('High', 'Medium', 'Low') 10 | // and the value for each key: number of hours per priority level 11 | var SLA_hours_per_priority = { 12 | 1: 3, 13 | 2: 8, 14 | 3: 10, 15 | 4: 20 16 | }; 17 | 18 | // Custom metric 19 | 20 | var customMetric = { 21 | caseMetric: function(aCase) { 22 | // We assume that the priority attribute is in the last event of the case. Adjust if needed 23 | var event = aCase.get(aCase.size()-1); 24 | var priority = 0; 25 | try { 26 | // if the cell is empty we'll get an error, hence the 'try/catch' 27 | // Keep the line that match the type of the field 28 | // priority = event.getStringCustomAttributeValue(fieldName); 29 | priority = event.getIntCustomAttributeValue(fieldName); 30 | // priority = event.getDoubleCustomAttributeValue(fieldName); 31 | } 32 | catch(e){ 33 | // when the value is empty 34 | } 35 | return SLA_hours_per_priority[priority]*3600000; 36 | } 37 | }; 38 | 39 | // COPY UNTIL HERE 40 | -------------------------------------------------------------------------------- /Custom Metrics/CaseWorkingDays.js: -------------------------------------------------------------------------------- 1 | // Returns the number of working days to complete a case 2 | // Remove week ends 3 | // Remove holidays provided as an array of dates (strings) 4 | 5 | // Custom Metric code starts here 6 | // COPY FROM THE LINE BELOW 7 | 8 | 9 | function countWeekendDays(d0, d1, ndays){ 10 | var nsaturdays = Math.floor( (d0.getDay()+ndays) / 7 ); 11 | return 2*nsaturdays + (d0.getDay()==0) - (d1.getDay()==6); 12 | } 13 | 14 | function excludeWeekendFromVacations(vacations){ 15 | var vacationsWithoutWeekends = []; 16 | for (var i=0; i0 && vacations[i].getDay()<6) 19 | vacationsWithoutWeekends.push(vacations[i]); 20 | } 21 | return vacationsWithoutWeekends; 22 | } 23 | 24 | function countWorkingDays(startTime, endTime, vacations){ 25 | var millisecondsPerDay = 24 * 60 * 60 * 1000; // Day in milliseconds 26 | var startDate = new Date(startTime); 27 | var endDate = new Date(endTime); 28 | if (startDate.getDate() == endDate.getDate()) 29 | return 1; // case completed within a day 30 | 31 | startDate.setUTCHours(0,0,0,0); 32 | endDate.setUTCHours(0,0,0,0); 33 | var ndays = (endDate - startDate)/millisecondsPerDay + 1; 34 | 35 | // remove weekends 36 | ndays -= countWeekendDays(startDate, endDate, ndays); 37 | 38 | // remove vacation days that are not saturday or sunday 39 | var vacationsWithoutWeekends = excludeWeekendFromVacations(vacations); 40 | 41 | for (var j=0; j < vacationsWithoutWeekends.length; j++){ 42 | if (startDate < vacationsWithoutWeekends[j] && endDate > vacationsWithoutWeekends[j]) 43 | ndays--; 44 | } 45 | 46 | 47 | return ndays; 48 | } 49 | 50 | 51 | 52 | // This is for multi-event logs where the latest activity of a case is not always the last one. 53 | // It works also for flat processes. 54 | var customMetric = { 55 | caseMetric: function(aCase){ 56 | // Ideally we would just set the dates as strings like "2023-1-1", but the JS server does not allow 57 | //the creation of dates through string. 58 | var vacationDays = [new Date(2023,1,1), new Date(2023,5,1), new Date(2023,7,14), new Date(2023,8,15), 59 | new Date(2023,11,1), new Date(2023,11,11), new Date(2023,12,25)]; 60 | 61 | var startTime = Number.MAX_VALUE; 62 | var endTime = 0; 63 | for(var k = 0 ; k < aCase.size(); k++) { 64 | var event = aCase.get(k); 65 | if (event.getStartTime() < startTime){ 66 | startTime = event.getStartTime(); 67 | } 68 | if(event.getEndTime() > endTime){ 69 | endTime = event.getEndTime(); 70 | } 71 | } 72 | 73 | return (countWorkingDays(startTime, endTime, vacationDays)); 74 | } 75 | }; 76 | 77 | // COPY UNTIL THE LINE ABOVE 78 | // Custom Metric code stops here 79 | 80 | 81 | // This section is for testing the code within a javascript development environment 82 | 83 | let startTime = "2023-12-22T10:00:00"; 84 | let endTime = "2023-12-26T20:01:00"; 85 | let millisecondsPerDay = 24 * 60 * 60 * 1000; 86 | 87 | startTime = "2018-11-02T15:03:56"; 88 | console.log(new Date(startTime)); 89 | startTimeMs = 1541171036000; //1541171036000 90 | console.log(new Date(startTimeMs)); 91 | 92 | endTime = "2018-11-05T16:36:39"; 93 | endTimeMs = 1541435799000; //1541435799000 94 | console.log(new Date(endTime)); 95 | console.log(new Date(endTimeMs)); 96 | 97 | d0 = new Date(startTime); 98 | d1 = new Date(endTime); 99 | console.log(d1-d0); 100 | 101 | d0.setHours(0,0,0,0);// 1541404800000 102 | console.log(d0); 103 | console.log(d0.getTime()); //1541142000000 104 | console.log("diff" + ((d0.getTime() - 1541142000000)/(60*60*1000))) 105 | 106 | 107 | d1.setHours(0,0,0,0);// 1541142000000 108 | console.log(d1.getTime()); 109 | console.log(d1-d0); 110 | console.log((d1-d0)/millisecondsPerDay + 1) 111 | 112 | var vacationsWithoutWeekends = excludeWeekendFromVacations(vacationDays); 113 | console.log(vacationsWithoutWeekends); 114 | console.log(new Date(startTime)); 115 | console.log(new Date(endTime)); 116 | console.log(countWorkingDays(startTime, endTime, vacationsWithoutWeekends)); 117 | 118 | -------------------------------------------------------------------------------- /Custom Metrics/CountActivitytWithReworks.js: -------------------------------------------------------------------------------- 1 | var customMetric = { 2 | caseMetric: function (aCase) { 3 | var nbr_activity_with_reworks = 0; 4 | var activities = []; 5 | var counters = []; 6 | for (var k = 0; k < aCase.size(); k++) { 7 | var activity = aCase.get(k).getEventClass(); 8 | var index = activities.indexOf(activity); 9 | if (index < 0) { 10 | activities.push(activity); 11 | counters.push(1); 12 | } 13 | else { 14 | counters[index] += 1; 15 | } 16 | } 17 | for (var i = 0; i < counters.length; i++) 18 | if (counters[i] > 1) 19 | nbr_activity_with_reworks++ 20 | return nbr_activity_with_reworks; 21 | } 22 | }; -------------------------------------------------------------------------------- /Custom Metrics/CountActivitytWithSelfLoops.js: -------------------------------------------------------------------------------- 1 | var customMetric = { 2 | caseMetric: function (aCase) { 3 | var activities_with_selfloops = []; 4 | var counters = []; 5 | var previous_activity = ''; 6 | for (var k = 0; k < aCase.size(); k++) { 7 | var activity = aCase.get(k).getEventClass(); 8 | if (activity == previous_activity){ 9 | var index = activities_with_selfloops.indexOf(activity); 10 | if (index < 0) { 11 | activities_with_selfloops.push(activity); 12 | counters.push(1); 13 | } 14 | } 15 | else { 16 | counters[index] += 1; 17 | } 18 | previous_activity = activity; 19 | } 20 | return counters.lenth; 21 | } 22 | }; -------------------------------------------------------------------------------- /Custom Metrics/CountReworks.js: -------------------------------------------------------------------------------- 1 | var customMetric = { 2 | caseMetric: function (aCase) { 3 | var nbr_reworks = 0; 4 | var activities = []; 5 | var counters = []; 6 | for (var k = 0; k < aCase.size(); k++) { 7 | var activity = aCase.get(k).getEventClass(); 8 | var index = activities.indexOf(activity); 9 | if (index < 0) { 10 | activities.push(activity); 11 | counters.push(0); 12 | } 13 | else { 14 | counters[index] += 1; 15 | } 16 | } 17 | for (var i = 0; i < counters.length; i++) 18 | nbr_reworks += counters[i]; 19 | return nbr_reworks; 20 | } 21 | }; -------------------------------------------------------------------------------- /Custom Metrics/CountReworksforTheseActivities.js: -------------------------------------------------------------------------------- 1 | var customMetric = { 2 | caseMetric: function(aCase) { 3 | var filteredActivities = ['BO Service Closure', 'activity2']; 4 | var counters = []; 5 | var nbr_reworks = 0; 6 | for (var i=0; i< filteredActivities.length; i++) 7 | counters.push(0); 8 | for(var k = 0 ; k < aCase.size(); k++) { 9 | var index = filteredActivities.indexOf(aCase.get(k).getEventClass()); 10 | if (index >= 0) // one of the activities searched 11 | counters[index]++; 12 | } 13 | for (var j=0; j< counters.length; j++) 14 | if (counters[j] > 1) 15 | nbr_reworks += counters[j]; 16 | return nbr_reworks; 17 | } 18 | }; -------------------------------------------------------------------------------- /Custom Metrics/CountSelfLoops.js: -------------------------------------------------------------------------------- 1 | var customMetric = { 2 | caseMetric: function(aCase) { 3 | var previous_activity = ''; 4 | var nbr_self_loops = 0; 5 | 6 | for(var k = 0 ; k < aCase.size(); k++) { 7 | var activity = aCase.get(k).getEventClass(); 8 | if (previous_activity == activity) // self loop 9 | nbr_self_loops++; 10 | previous_activity = activity; 11 | } 12 | return nbr_self_loops; 13 | } 14 | }; -------------------------------------------------------------------------------- /Custom Metrics/CountSelfLoopsforTheseActivities.js: -------------------------------------------------------------------------------- 1 | var customMetric = { 2 | caseMetric: function(aCase) { 3 | var filteredActivities = ['activity1', 'activity2']; 4 | var previous_activity = ''; 5 | var counters = []; 6 | var nbr_self_loops = 0; 7 | for (var i=0; i< filteredActivities.length; i++) 8 | counters.push(0); 9 | for(var k = 0 ; k < aCase.size(); k++) { 10 | var event = aCase.get(k); 11 | var activity = event.getEventClass(); 12 | var index = filteredActivities.indexOf(activity); 13 | if (index > 0){ // one of the activities searched 14 | if (activity == previous_activity) // self loop 15 | counters[index]++; 16 | } 17 | previous_activity = activity; 18 | } 19 | for (var i=0; i< counters.length; i++) 20 | nbr_self_loops += counters[i]; 21 | return nbr_self_loops; 22 | } 23 | }; 24 | -------------------------------------------------------------------------------- /Custom Metrics/CustomCategory.js: -------------------------------------------------------------------------------- 1 | // This metric determines a category for each case, based on a combination of characteristics 2 | // The metric is then used in filters and widgets 3 | // You can adapt this metric to your project by changing the activity names and the column names 4 | // Or by changing the logic 5 | // This example can work as is in Bank Account Closure project 6 | var column1 = 'CLOSURE_REASON'; 7 | var column2 = 'CLOSURE_TYPE'; 8 | var activity1 = 'BO Service Closure'; 9 | var activity2 = 'Network Adjustment Requested'; 10 | var column1Value = '1 - Client lost'; 11 | var column2Value = 'Client Recess'; 12 | 13 | var customMetric = { 14 | caseMetric: function(aCase) { 15 | 16 | var act1RunNumber = 0; 17 | var act1EndTime = 0; 18 | var act2StartTime = 0; 19 | var act1_act2_pathTime = 0; 20 | var pathTimeThreshold1 = 7*24*3600*1000; // 1 day in millisec 21 | var pathTimeThreshold2 = 10*24*3600*1000; // 1 day in millisec 22 | 23 | if ((aCase.get(0).getStringCustomAttributeValue(column1) != column1Value) || (aCase.get(0).getStringCustomAttributeValue(column2) != column2Value)) 24 | return "NA"; 25 | 26 | for (var k=0; k pathTimeThreshold2) && (act1RunNumber > 1)) 40 | return "long waiting time + rework"; 41 | 42 | if ((act1_act2_pathTime > pathTimeThreshold2) && (act1RunNumber == 1)) 43 | return "long waiting time no rework"; 44 | 45 | if ((act1_act2_pathTime > pathTimeThreshold1) && (act1_act2_pathTime < pathTimeThreshold2) && (act1RunNumber == 1)) 46 | return "medium waiting time no rework"; 47 | 48 | if ((act1_act2_pathTime > pathTimeThreshold1) && (act1RunNumber > 1)) 49 | return "medium waiting time + rework"; 50 | 51 | if ((act1_act2_pathTime < pathTimeThreshold1) && (act1RunNumber > 1)) 52 | return "short waiting time + rework"; 53 | 54 | if ((act1_act2_pathTime < pathTimeThreshold1) && (act1RunNumber == 1)) 55 | return "short waiting time no rework"; 56 | } 57 | return "NA"; 58 | } 59 | }; 60 | 61 | // COPY UNTIL HERE -------------------------------------------------------------------------------- /Custom Metrics/NetLeadtime.js: -------------------------------------------------------------------------------- 1 | // Netleadtime computes the leadtime of a case, and removes the off-office hours and week-ends. 2 | // The function assumes that the processes start and end during working-hours. 3 | // AUTHOR: Patrick Megard (patrick.megard@fr.ibm.com) 4 | // Feb 3, 2023 5 | 6 | // Custom Metric code starts here 7 | // COPY FROM THE LINE BELOW 8 | 9 | 10 | // Parameter: workingHoursNumber, number of working hours per day. 11 | var workingHoursNumber = 8 12 | 13 | function countWeekendDays(d0, d1, ndays){ 14 | var nsaturdays = Math.floor( (d0.getDay()+ndays) / 7 ); 15 | return 2*nsaturdays + (d0.getDay()==0) - (d1.getDay()==6); 16 | } 17 | 18 | function NetLeadTime(startTime, endTime, workingHours){ 19 | // startTime = process start time in ms (double) 20 | // endTime = process end time in ms (double) 21 | // workingHours = number of working hours 22 | // We considere that start and end are during working hours 23 | // We remove non-working hours and week-ends 24 | var millisecondsPerDay = 24 * 60 * 60 * 1000; // Day in milliseconds 25 | var millisecondsPerNonWorkingHours = (24-workingHours) * 60 * 60 * 1000; 26 | var d0 = new Date(startTime); 27 | var d1 = new Date(endTime); 28 | var D0 = new Date(d0); 29 | D0.setHours(0); 30 | var D1 = new Date(d1); 31 | D1.setHours(0); 32 | var nbCalendarDays = Math.round((D1 - D0)/millisecondsPerDay); 33 | var leadTimeMs = d1 - d0; 34 | if (nbCalendarDays == 0){ // done within a working day 35 | return leadTimeMs; 36 | } 37 | // Remove number of non-working hours for each calendar day 38 | // Remove number of week-end hours - non-working hours (already removed) 39 | return leadTimeMs - (millisecondsPerNonWorkingHours * nbCalendarDays) - (countWeekendDays(d0, d1, nbCalendarDays) * (millisecondsPerDay-millisecondsPerNonWorkingHours)); 40 | } 41 | 42 | // This is for multi-event logs where the latest activity of a case is not always the last one. 43 | // It works also for flat processes. 44 | var customMetric = { 45 | caseMetric: function(aCase) { 46 | var startTime = Number.MAX_VALUE; 47 | var endTime = 0; 48 | for(var k = 0 ; k < aCase.size(); k++) { 49 | var event = aCase.get(k); 50 | if (event.getStartTime() < startTime){ 51 | startTime = event.getStartTime(); 52 | } 53 | if(event.getEndTime() > endTime){ 54 | endTime = event.getEndTime(); 55 | } 56 | } 57 | 58 | return (NetLeadTime(startTime, endTime, workingHoursNumber)); 59 | } 60 | }; 61 | 62 | // if you are not using multi-level process, you can use this version that will be faster 63 | //var customMetric = { 64 | // caseMetric: function(aCase) { 65 | // return (NetLeadTime(aCase.get(0).getStartTime(), aCase.get(aCase.size() -1).getEndTime(), workingHoursNumber)); 66 | // } 67 | //}; 68 | 69 | // COPY UNTIL THE LINE ABOVE 70 | // Custom Metric code stops here 71 | 72 | 73 | // This section is for testing the code within a javascript development environment 74 | 75 | let startTime = "2023-02-01T17:00:00"; 76 | let endTime = "2023-02-02T09:01:01"; 77 | startTime = "2017-07-21T00:00:00"; 78 | endTime = "2019-02-21T00:01:01"; 79 | //startTime = "2019-02-25T11:56:29"; 80 | //endTime = "2019-02-25T15:54:59" 81 | 82 | let d0 = new Date(startTime); 83 | let d1 = new Date(endTime) 84 | let D0 = new Date(startTime); 85 | D0.setHours(0); 86 | let D1 = new Date(endTime); 87 | let millisecondsPerDay = 24 * 60 * 60 * 1000; 88 | D1.setHours(0); 89 | let nbCalendarDays = Math.round((D1 - D0)/millisecondsPerDay); 90 | 91 | console.log(d0) 92 | console.log(d1) 93 | console.log("days: %s", nbCalendarDays) 94 | console.log("week ends:%s", countWeekendDays(d0,d1, nbCalendarDays)) 95 | console.log("leadtime: %s", d1-d0) 96 | console.log("net leadtime: %s", NetLeadTime(d0,d1, 8)) 97 | console.log("difference: %s", d1-d0-NetLeadTime(d0,d1, 8)) 98 | -------------------------------------------------------------------------------- /Custom Metrics/PathTime.js: -------------------------------------------------------------------------------- 1 | var list1 = ['Activity1']; 2 | var list2 = ['Activity2']; 3 | 4 | var customMetric = { 5 | caseMetric: function (aCase) { 6 | var dt1 = 0; 7 | var dt2 = 0; 8 | for (var k = 0; k < aCase.size(); k++) { 9 | var event1 = aCase.get(k); 10 | if (list1.indexOf(event1.getEventClass()) > -1) { 11 | dt1 = event1.getStartTime(); 12 | if (k + 1 < aCase.size()) { 13 | var event2 = aCase.get(k + 1); 14 | if (list2.indexOf(event2.getEventClass()) > -1) { 15 | dt2 = event2.getStartTime(); 16 | return ((dt2 - dt1)/(24*60*60*1000)); 17 | } 18 | } 19 | } 20 | } 21 | return -1; 22 | } 23 | }; 24 | 25 | 26 | -------------------------------------------------------------------------------- /Custom Metrics/README.md: -------------------------------------------------------------------------------- 1 | # Custom Metrics 2 | 3 | Custom metrics are javascript functions called for each case. The function access the events of a case to retrieve values and to compute a new metric. Custom metrics are case-level metrics. 4 | 5 | - Refer to the documentation for more details. 6 | - Refer also to the Advanced Filters documentation to list the functions available for each event. 7 | 8 | 9 | ## Leadtime without non-working business hours 10 | Netleadtime computes the leadtime of a case, and removes the off-office hours and week-ends. It assumes that the activities start and end during working-hours. 11 | 12 | [NetLeadtime.js](NetLeadtime.js) 13 | 14 | ## Case quality 15 | This metrics associate a quality value (0-100) to each case, which would typically decrease if deviations occur. 16 | 17 | [CaseQuality.js](CaseQuality.js) 18 | 19 | ## Case SLA 20 | This metrics associate an SLA (double value) to each case, based on a case attribute like “priority”. 21 | 22 | [CaseSLA.js](CaseSLA.js) 23 | 24 | 25 | -------------------------------------------------------------------------------- /Custom Metrics/test_flat.csv: -------------------------------------------------------------------------------- 1 | Activity,Date,POLICY_ID,ENDORSEMENT_NUMBER,RISK_LEVEL,OFFICE_ID,EFFECTIVE_DATE_POL,EXPIRATION_DATE_POL,MANUAL_PREMIUMS_CHG,ENDORSEMENT_TYPE,RESOURCE,YEARLY_PREMIUM 2 | POLICIES: APPROVE,2020-10-04 00:00:00.000,POLICY-01,0,1,OFFICE-901,2020-10-06 00:00:00.000,2021-10-06 00:00:00.000,N,ISSUE,USER-24,592 3 | POLICIES: APPROVE,2020-10-04 00:00:00.000,POLICY-02,0,1,OFFICE-901,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,USER-24,992 4 | POLICIES: APPROVE,2020-10-04 00:00:00.000,POLICY-03,0,1,OFFICE-901,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,APPXYZ,861 5 | POLICIES: APPROVE,2020-10-08 00:00:00.000,POLICY-04,0,1,OFFICE-1061,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,USER-22,706 6 | POLICIES: APPROVE,2020-10-16 00:00:00.000,POLICY-10,0,1,OFFICE-1708,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,APPXYZ,492 7 | POLICIES: APPROVE,2020-10-18 00:00:00.000,POLICY-12,0,1,OFFICE-1710,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,APPXYZ,694 8 | POLICIES: APPROVE,2020-10-23 00:00:00.000,POLICY-13,0,1,OFFICE-1708,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,USER-18,1058 9 | POLICIES: APPROVE,2020-10-03 00:00:00.000,POLICY-18,0,1,OFFICE-957,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,TRXYZ,890 10 | POLICIES: APPROVE,2020-10-04 00:00:00.000,POLICY-19,0,1,OFFICE-752,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,USER-16,538 11 | POLICIES: APPROVE,2020-10-04 00:00:00.000,POLICY-20,0,1,OFFICE-752,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,USER-16,769 12 | POLICIES: APPROVE,2020-10-04 00:00:00.000,POLICY-20,0,2,OFFICE-752,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,USER-16,526 13 | POLICIES: APPROVE,2020-10-04 00:00:00.000,POLICY-20,0,3,OFFICE-752,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,APPXYZ,526 14 | POLICIES: APPROVE,2020-10-08 00:00:00.000,POLICY-20,0,4,OFFICE-1851,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,USER-20,890 15 | POLICIES: APPROVE,2020-10-23 00:00:00.000,POLICY-21,0,1,OFFICE-1060,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,APPXYZ,1080 16 | POLICIES: APPROVE,2020-10-24 00:00:00.000,POLICY-22,0,1,OFFICE-1653,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,APPXYZ,962 17 | POLICIES: APPROVE,2020-10-01 00:00:00.000,POLICY-24,0,1,OFFICE-957,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,APPXYZ,1044 18 | POLICIES: APPROVE,2020-10-02 00:00:00.000,POLICY-25,0,1,OFFICE-1459,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,N,ISSUE,USER-24,692 19 | POLICIES: APPROVE,2020-10-08 00:00:00.000,POLICY-26,0,1,OFFICE-1715,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,Y,ISSUE,USER-22,133 20 | POLICIES: APPROVE,2020-09-28 00:00:00.000,POLICY-27,0,1,OFFICE-1711,2020-10-13 00:00:00.000,2021-10-13 00:00:00.000,Y,ISSUE,APPXYZ,133 -------------------------------------------------------------------------------- /Custom Widgets/ECharts_examples/frontend.js: -------------------------------------------------------------------------------- 1 | return { 2 | init: function(context){ 3 | 4 | var widget = document.getElementById(context.scope.widgetId); 5 | var div = document.createElement('div'); 6 | div.id = context.scope.widgetId + '_div'; // creates a unique div id 7 | widget.appendChild(div); 8 | echarts.init(div); 9 | }, 10 | 11 | update: function(data, context){ 12 | 13 | var div = document.getElementById(context.scope.widgetId + '_div'); 14 | 15 | if(div) { 16 | var myChart = echarts.getInstanceByDom(div); 17 | var option = { 18 | xAxis: { 19 | type: 'category', 20 | data: ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] 21 | }, 22 | yAxis: { 23 | type: 'value' 24 | }, 25 | series: [ 26 | { 27 | data: [150, 230, 224, 218, 135, 147, 260], 28 | type: 'line' 29 | } 30 | ] 31 | }; 32 | myChart.setOption(option); 33 | } 34 | 35 | }, 36 | 37 | resize: function(size, context){ 38 | var div = document.getElementById(context.scope.widgetId + '_div'); 39 | 40 | if(div) { 41 | var myChart = echarts.getInstanceByDom(div); 42 | myChart.resize( 43 | { 44 | height: size.height, 45 | width: size.width 46 | }); 47 | 48 | } 49 | } 50 | }; -------------------------------------------------------------------------------- /Custom Widgets/README.md: -------------------------------------------------------------------------------- 1 | # IBM Process Mining Custom Widgets 2 | 3 | ## Dimension_linechart 4 | Displays the evolution of the number of cases along the time axis. The cases are split among the values of a dimension. Example: what is the number of cases for each month, according to the 'CLOSURE REASON' dimension. 5 | 6 | There is a complete tutorial about developing advanced custom widgets based on this widget: [Tutorial](./dimension_linechart/README.md) 7 | 8 | 9 | ![Image](./dimension_linechart/DimensionLineChart.jpg) 10 | 11 | 12 | ## Dimension_leadtime_LC 13 | Displays the evolution of the average leadtime of cases along the time axis. The cases are split among the values of a dimension. Example: what is the average of cases for each month, according to the 'CLOSURE REASON' dimension. 14 | 15 | ![Image](./dimension_leadtime_LC/DimensionLeadtimeLC.jpeg) 16 | 17 | ## v-barchart 18 | Displays different values of a dimension, for each value the # of cases and the average leadtime. 19 | 20 | ![Image](./v_barchart/v_barcharts.jpg) 21 | 22 | ## activity relationships 23 | Displays the relationship between all the activities and the average pathtime. This is a nice complement to the activity relationship view 24 | 25 | ![Image](./activity_relationships/activity_relationship.jpg) 26 | 27 | ## Custom Metric Line Chart 28 | Displays a custom metric along time. The timestamp can be chosen like this: 29 | - an activity, then enter STARTTIME or ENDTIME 30 | - a process; then enter PROCESS in the ACTIVITY field 31 | ![Image](./dimension_linechart_custom_metric/custom_metric_linechart.jpg) 32 | -------------------------------------------------------------------------------- /Custom Widgets/activity_relationships/README.md: -------------------------------------------------------------------------------- 1 | # Activity Relationships Custom Widget 2 | This custom widget displays the predecessors and the successors of all the activities. For each relationship, it shows: 3 | - frequency 4 | - average pathtime 5 | 6 | This widget is useful to complement the activity relationship view in IBM Process Mining. 7 | 8 | Caution: by default the widget computes the calendar duration. It does not take into consideration the business hours. 9 | 10 | An approximation of the business pathtime (removing non-business hours and week ends) if available, but it does not take into consideration the actual project business hours settings. You might find differences with the waiting time computed by process mining. -------------------------------------------------------------------------------- /Custom Widgets/activity_relationships/activity_relationship.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Custom Widgets/activity_relationships/activity_relationship.jpg -------------------------------------------------------------------------------- /Custom Widgets/activity_relationships/style.css: -------------------------------------------------------------------------------- 1 | *{ 2 | color:#333; 3 | } 4 | .table-wrapper { 5 | margin: 1rem; 6 | box-shadow: 0px 0px 6px -2px rgb(0 50 72 / 14%); 7 | border: 1px solid #dae4ee; 8 | overflow: hidden; 9 | text-align: right; 10 | } 11 | 12 | table { 13 | width: 100%; 14 | } 15 | 16 | .table-row:nth-child(odd) { 17 | background: #F3F3F3; 18 | } 19 | 20 | .table-row:nth-child(odd):hover { 21 | background: #CCE5E9; 22 | } 23 | 24 | .table-row:nth-child(even) { 25 | background: #fbfdff; 26 | } 27 | 28 | .table-row:nth-child(even):hover { 29 | background: #CCE5E9; 30 | } 31 | 32 | .table-row .table-cell:not(:last-child) { 33 | border-right: 1px solid #DAE4EE; 34 | } 35 | 36 | .table-cell{ 37 | padding: 0.5rem; 38 | } 39 | 40 | .table-headings{ 41 | background: #F3F3F3; 42 | 43 | color:#222b45; 44 | line-height:1.5; 45 | text-align: left; 46 | font-size:1rem; 47 | padding: 0.5rem; 48 | border-bottom: 1px solid #DAE4EE; 49 | } 50 | 51 | .table-headings:not(:last-child) { 52 | border-right: 1px solid #DAE4EE; 53 | } -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_leadtime_LC/DimensionLeadtimeLC.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Custom Widgets/chart widgets before 1.14.3/dimension_leadtime_LC/DimensionLeadtimeLC.jpeg -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_leadtime_LC/index.html: -------------------------------------------------------------------------------- 1 |
2 | -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_leadtime_LC/view.html: -------------------------------------------------------------------------------- 1 |
2 | 3 |
-------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart/DimensionLineChart.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Custom Widgets/chart widgets before 1.14.3/dimension_linechart/DimensionLineChart.jpg -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart/Untitled 2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Custom Widgets/chart widgets before 1.14.3/dimension_linechart/Untitled 2.jpg -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart/backend_console_log.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Custom Widgets/chart widgets before 1.14.3/dimension_linechart/backend_console_log.jpg -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart/debugbackend.js: -------------------------------------------------------------------------------- 1 | 2 | // TESTING THE WIDGET WITH FAKE DATA (not from the process) 3 | function filterAndComputeMetrics_emulate(trace) { 4 | var values = ['a', 'b', 'c', 'd', 'e', 'f']; 5 | var metrics = { 6 | exclude: 0, 7 | value: 0, 8 | eventTime: 0 9 | }; 10 | // emulate event.getStartTime() 11 | datePeriod.fromTime = new Date(datePeriod.fromDate).getTime(); 12 | datePeriod.duration = new Date(datePeriod.toDate).getTime() - datePeriod.fromTime; 13 | metrics.eventTime = datePeriod.fromTime + Math.floor(Math.random() * datePeriod.duration); 14 | 15 | // emulate event.getStringCustomAttributeValue(DIMENSION); 16 | metrics.value = values[Math.floor(Math.random() * values.length)]; 17 | 18 | return metrics; 19 | } 20 | 21 | 22 | function computeScale(groupBy, aDate) { 23 | // Update timesScale array, and push new counters to each dataset value 24 | 25 | var newTimeScale = []; 26 | var appendCounters = []; 27 | aDate.setHours(0, 0, 0, 0); 28 | switch (groupBy) { 29 | case "week": aDate.setDate(aDate.getDate() - aDate.getDay()); break; 30 | case "month": aDate.setMonth(aDate.getMonth(), 1); break; 31 | case "year": aDate.setFullYear(aDate.getFullYear(), 0, 1); break; 32 | } 33 | var eventTime = aDate.getTime(); 34 | 35 | if (timeScale.length == 0) { // first event 36 | timeScale.push(eventTime); 37 | } 38 | else if (eventTime < timeScale[0]) { // create new dates array and concat it with previous, until earliestTimePrevious 39 | aDate.setTime(eventTime); 40 | while (aDate.getTime() < timeScale[0]) { 41 | newTimeScale.push(aDate.getTime()); 42 | switch (groupBy) { 43 | case "day": aDate.setDate(aDate.getDate() + 1); break; 44 | case "week": aDate.setDate(aDate.getDate() + 7); break; 45 | case "month": aDate.setMonth(aDate.getMonth() + 1); break; 46 | case "year": aDate.setFullYear(aDate.getFullYear() + 1); break; 47 | } 48 | appendCounters.push(0); 49 | } 50 | timeScale = newTimeScale.concat(timeScale); 51 | for (k = 0; k < dataset.length; k++) { 52 | dataset[k].counters = appendCounters.concat(dataset[k].counters); 53 | } 54 | } 55 | else if (eventTime > timeScale[timeScale.length - 1]) { // last time was the latest, eventTime is later: add days 56 | aDate.setTime(timeScale[timeScale.length - 1]); 57 | while (aDate.getTime() < eventTime) { 58 | switch (groupBy) { 59 | case "day": aDate.setDate(aDate.getDate() + 1); break; 60 | case "week": aDate.setDate(aDate.getDate() + 7); break; 61 | case "month": aDate.setMonth(aDate.getMonth() + 1); break; 62 | case "year": aDate.setFullYear(aDate.getFullYear() + 1); break; 63 | } 64 | timeScale.push(aDate.getTime()); 65 | for (k = 0; k < dataset.length; k++) { 66 | dataset[k].counters.push(0); // add 0 for each new date, to each counter array 67 | } 68 | } 69 | } 70 | return eventTime; 71 | } 72 | 73 | 74 | function update(trace) { 75 | 76 | if (trace.getDiscarded == 1) { 77 | return; 78 | } 79 | 80 | var metrics = filterAndComputeMetrics_emulate(trace); 81 | if (metrics.exclude) 82 | return; 83 | 84 | eventTime = computeScale(GROUPBY, new Date(metrics.eventTime)); 85 | 86 | if (dataset.length == 0) { // first event 87 | dataset.push({ 'value': metrics.value, 'counters': [1] }); 88 | return; 89 | } 90 | 91 | // search the index of 'value' in the dataset array 92 | var valueindex = -1; 93 | for (i = 0; i < dataset.length; i++) { 94 | if (dataset[i].value == metrics.value) { 95 | valueindex = i; 96 | break; 97 | } 98 | } 99 | if (valueindex < 0) {// value not yet added; add it to dataset 100 | valueindex = dataset.push({value: metrics.value, counters: [] }) - 1; 101 | // update counters 102 | for (var i = 0; i < dataset[0].counters.length; i++) 103 | dataset[valueindex].counters.push(0); 104 | } 105 | // update the counters 106 | dataset[valueindex].counters[timeScale.indexOf(eventTime)]++; 107 | } 108 | 109 | // GLOBAL VARIABLES 110 | var timeScale = []; 111 | var dataset = []; 112 | var GROUPBY = 'month'; 113 | var trace = {getDiscarded: 0 }; 114 | 115 | // period of dates to generate random start_times 116 | var datePeriod = { fromDate: "2020-01-01", toDate: "2022-01-01" }; 117 | var numberOfCases = 1000; 118 | 119 | 120 | // EMULATE the call to update() by process mining for all the cases 121 | for (i = 0; i < numberOfCases; i++) { 122 | update(trace); 123 | } 124 | 125 | // CHECK that we compute correctly this.times and this.dataset 126 | var nbevents = 0; 127 | for (var k = 0; k < dataset.length; k++) { 128 | console.log(dataset[k].value); 129 | console.log(dataset[k].counters); 130 | for (var kk = 0; kk < timeScale.length; kk++) 131 | nbevents += dataset[k].counters[kk]; 132 | } 133 | console.log("Number of events: " + nbevents); 134 | console.log("timeScale :" + timeScale); 135 | 136 | -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart/generate_csv.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | def execute(context): 4 | events = [ 5 | {'pid':'p1', 'activity':'A1', 'date':'2023-01-01'}, 6 | {'pid':'p1', 'activity':'A2 request', 'date':'2023-01-02'}, 7 | {'pid':'p2', 'activity':'A1', 'date':'2023-01-02'}, 8 | {'pid':'p2', 'activity':'A3', 'date':'2023-01-04'}, 9 | ] 10 | df = pd.DataFrame(events) 11 | return df -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart/test.js: -------------------------------------------------------------------------------- 1 | let toto = 'attr-custom-metricsMYNAME2'; 2 | let toto_display = toto.replace('attr-custom-metrics', ''); 3 | toto_display = toto_display.replace('attr-custom-',''); 4 | console.log(toto_display); -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart/view.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 | Total number of cases for top 5 values: 5 |
6 |

7 | {{data.dataset[0].count}} 8 |

9 |

10 | {{data.dataset[0].value.slice(0,10)}} 11 |

12 |
13 | 14 |
15 |

16 | {{data.dataset[1].count}} 17 |

18 |

19 | {{data.dataset[1].value.slice(0,10)}} 20 |

21 |
22 | 23 |
24 |

25 | {{data.dataset[2].count}} 26 |

27 |

28 | {{data.dataset[2].value.slice(0,10)}} 29 |

30 |
31 |
32 |
33 |
-------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart_custom_metric/custom_metric_linechart.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Custom Widgets/chart widgets before 1.14.3/dimension_linechart_custom_metric/custom_metric_linechart.jpg -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/dimension_linechart_custom_metric/view.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Custom Widgets/chart widgets before 1.14.3/dimension_linechart_custom_metric/view.html -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/v_barchart/backend.js: -------------------------------------------------------------------------------- 1 | (function () { 2 | 3 | function filterAndComputeMetrics(trace) { 4 | // Use this metrics variable for your own metrics (add more if needed) 5 | var metrics = { 6 | 'exclude': 0, 7 | 'value': 0, 8 | 'eventTime': 0, 9 | 'leadtime': 0 10 | }; 11 | var firstevent = trace.get(0).getStartTime(); 12 | var lastevent = firstevent; 13 | for (var k = 0; k < trace.size(); k++) { 14 | var event = trace.get(k); 15 | if (event.getStartTime() < firstevent) { 16 | firstevent = event.getStartTime(); 17 | } 18 | else if (event.getStartTime() > lastevent) 19 | lastevent = event.getStartTime(); 20 | 21 | if (event.getEventClass() == ACTIVITY) { // don't break if found, we need parse all the events for the leadtime 22 | metrics.value = event.getStringCustomAttributeValue(DIMENSION); 23 | if (metrics.value == '') { 24 | if (KEEP_EMPTY_VALUES[0] == 'n') { // exclude cases with no value for DIMENSION 25 | metrics.exclude = 1; 26 | } 27 | else { // keep the case and replace the value with None 28 | metrics.value = "None"; 29 | } 30 | } 31 | metrics.eventTime = event.getStartTime(); 32 | } 33 | } 34 | metrics.leadtime = lastevent - firstevent; 35 | //metrics.leadtime = trace.get(trace.size() - 1).getStartTime() - trace.get(0).getStartTime(); 36 | if (metrics.eventTime == 0) metrics.exclude = 1; // ACTIVITY NOT FOUND 37 | return metrics; 38 | } 39 | 40 | return { 41 | init: function (params) { 42 | var groupByChoices = ['day', 'week', 'month', 'year']; 43 | 44 | DIMENSION = params.DIMENSION.replace('attr-custom-', ''); 45 | 46 | ACTIVITY = params.ACTIVITY; // the activity in which we find the value of the dimension 47 | KEEP_EMPTY_VALUES = params.KEEP_EMPTY_VALUES; // "yes"=keep cases when value=='', "no"= exclude these cases 48 | 49 | MAX_NUMBER_OF_VALUES_DISPLAYED = params.MAX_NUMBER_OF_VALUES_DISPLAYED; // show top N values (max is hardcoded at 50) 50 | if (MAX_NUMBER_OF_VALUES_DISPLAYED == '') 51 | MAX_NUMBER_OF_VALUES_DISPLAYED = 50; 52 | else 53 | MAX_NUMBER_OF_VALUES_DISPLAYED = Math.min(50, Number(MAX_NUMBER_OF_VALUES_DISPLAYED)); 54 | 55 | dataset = []; 56 | }, 57 | 58 | update: function (trace) { 59 | 60 | if (trace.getDiscarded == 1) { 61 | return; 62 | } 63 | 64 | var metrics = filterAndComputeMetrics(trace); 65 | if (metrics.exclude) 66 | return; 67 | 68 | if (dataset.length == 0) { // first case 69 | dataset.push({ 70 | 'value': metrics.value, 71 | 'case_count': 1, 72 | 'leadtime_sum': metrics.leadtime 73 | }); 74 | return; 75 | } 76 | 77 | // search the index of 'value' in the dataset array 78 | var valueindex = -1; 79 | for (var k = 0; k < dataset.length; k++) { 80 | if (dataset[k].value == metrics.value) { 81 | valueindex = k; 82 | dataset[k].case_count += 1; 83 | dataset[k].leadtime_sum += metrics.leadtime; 84 | break; 85 | } 86 | } 87 | if (valueindex < 0) {// value not yet added; add it to dataset 88 | dataset.push({ 89 | 'value': metrics.value, 90 | 'case_count': 1, 91 | 'leadtime_sum': metrics.leadtime 92 | }); 93 | } 94 | }, 95 | 96 | finalize: function (output) { 97 | output.DIMENSION = DIMENSION; 98 | output.MAX_NUMBER_OF_VALUES_DISPLAYED = MAX_NUMBER_OF_VALUES_DISPLAYED; 99 | output.ACTIVITY = ACTIVITY; 100 | output.dataset = dataset; 101 | } 102 | }; 103 | })(); 104 | 105 | -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/v_barchart/v_barcharts.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Custom Widgets/chart widgets before 1.14.3/v_barchart/v_barcharts.jpg -------------------------------------------------------------------------------- /Custom Widgets/chart widgets before 1.14.3/v_barchart/view.html: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Custom Widgets/quartiles_deciles/frontend.js: -------------------------------------------------------------------------------- 1 | return { 2 | 3 | 4 | init: function(context){ 5 | var widget = document.getElementById(context.scope.widgetId); 6 | var div = document.createElement('div'); 7 | div.id = context.scope.widgetId + '_div'; // creates a unique div id 8 | widget.appendChild(div); 9 | echarts.init(div); 10 | }, 11 | 12 | update: function(data, context){ 13 | 14 | 15 | const formatDuration = ms => { 16 | if (ms < 0) { 17 | ms = 0; 18 | } 19 | const time = { 20 | d: Math.floor(ms / 86400000), 21 | h: Math.floor(ms / 3600000) % 24, 22 | m: Math.floor(ms / 60000) % 60, 23 | s: Math.floor(ms / 1000) % 60, 24 | ms: Math.floor(ms) % 1000 25 | }; 26 | return Object.entries(time) 27 | .filter(val => val[1] !== 0) 28 | .map(([key, val]) => `${val} ${key}${val !== 1 ? '' : ''}`) 29 | .join(', '); 30 | }; 31 | 32 | context.scope.data = data; 33 | // transform the displayed value if data.TYPE = 1 (duration) 34 | if (data.TYPE == 'DURATION'){ 35 | // duration 36 | data.MIN = formatDuration(data.MIN); 37 | data.MAX = formatDuration(data.MAX); 38 | data.Q1 = formatDuration(data.Q1); 39 | data.MEDIAN = formatDuration(data.MEDIAN); 40 | data.Q3 = formatDuration(data.Q3); 41 | data.IQR = formatDuration(data.IQR); 42 | data.LOW_OUTLIER = formatDuration(data.LOW_OUTLIER); 43 | data.HIGH_OUTLIER = formatDuration(data.HIGH_OUTLIER); 44 | data.DECILE_1 = formatDuration(data.DECILE_1); 45 | data.DECILE_9 = formatDuration(data.DECILE_9); 46 | 47 | } 48 | 49 | }, 50 | 51 | resize: function(size, context){ 52 | 53 | } 54 | }; -------------------------------------------------------------------------------- /Datasets_usecases/Bank Account Closure/README.md: -------------------------------------------------------------------------------- 1 | # Bank Account Closure Demo 2 | Download and unzip [IBM - Bank Account Closure.zip](./bank_account_closure.zip) 3 | 4 | When unzipped, you will find 4 ZIP files (event logs) and 4 IPD files (project backups). 5 | 6 | - Create a Process project named Bank Account Closure in an organization named myBank. 7 | - Load the banking_account_closure.csv file. 8 | - Check that you have a backup (IDP) file, and load Banking Account Closure_2022-09-08_143426+0000.idp 9 | - Click continue on each wizard page, until the process is eventually displayed. 10 | 11 | Then create the following projects in myBank organization and load their CSV and their IDP files. 12 | 13 | Naming is important, they have to match the parent's activity names: 14 | - Task Mining: BO Service Closure 15 | - Task Mining: Evaluating Request (WITH registered letter) 16 | - Task Mining: Network Service Closure 17 | 18 | -------------------------------------------------------------------------------- /Datasets_usecases/Bank Account Closure/bank_account_closure.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Datasets_usecases/Bank Account Closure/bank_account_closure.zip -------------------------------------------------------------------------------- /Datasets_usecases/Client onboarding/Client_onboarding.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Datasets_usecases/Client onboarding/Client_onboarding.zip -------------------------------------------------------------------------------- /Datasets_usecases/Client onboarding/Client_onboarding/Onboard Client_2023-10-05_083936+0000.idp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Datasets_usecases/Client onboarding/Client_onboarding/Onboard Client_2023-10-05_083936+0000.idp -------------------------------------------------------------------------------- /Datasets_usecases/Client onboarding/Client_onboarding/onboard_client.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Datasets_usecases/Client onboarding/Client_onboarding/onboard_client.zip -------------------------------------------------------------------------------- /Datasets_usecases/Client onboarding/README.md: -------------------------------------------------------------------------------- 1 | # Client Onboarding 2 | - Download [Client_onboarding.zip](./Client_onboarding.zip) 3 | - Unzip the file Client_onboarding.zip 4 | - Create a Process Mining project with a name (ex: ClientOnboarding) 5 | - Load the file onboard_client.zip 6 | - Load the backup file Onboard Client_2023-10-05_083936+0000.idp 7 | 8 | -------------------------------------------------------------------------------- /Datasets_usecases/README.md: -------------------------------------------------------------------------------- 1 | # Process Mining datasets and use cases 2 | These data sets can be loaded into IBM Process Mining. You typically need the event-log (CSV), and the backup file (IDP). 3 | 4 | ## Procure To Pay from SAP (Tutorial) 5 | Procure to Pay with SAP shows a typical multi-level process. The end-to-end process is built from 4 distinct SAP processes: procurement, order, materials, invoice. 6 | 7 | [Procure to Pay](./Tutorial/README.md) 8 | 9 | ## Bank Account Closure 10 | Bank Account Closure shows the powerful combination of Process Mining and Task Mining. Some activites like BO Service Closure were detailed using Task Mining. 11 | 12 | [Bank Account Closure](./Bank%20Account%20Closure/README.md) 13 | 14 | ## Client Onboarding 15 | [Client onboarding](./Client%20onboarding/R) -------------------------------------------------------------------------------- /Datasets_usecases/Tutorial/Eventlog sample for Analytics explanation.csv: -------------------------------------------------------------------------------- 1 | ProcessID;Activity;StartTime;Resource;Role;TicketType;CustomerRating 2 | A0001;Take in charge;2021-01-01;U001;Support;Product issue; 3 | A0001;Notify customer;2021-01-02;Robot1;Bot;Product issue; 4 | A0001;Information requested;2021-01-13;U003;Development;Product issue; 5 | A0001;Complete;2021-01-14;U001;Support;Product issue;3 6 | A0002;Take in charge;2021-01-05;U002;Financial;Invoice information request; 7 | A0002;Notify customer;2021-01-06;Robot1;Bot;Invoice information request; 8 | A0002;Complete;2021-01-10;U002;Financial;Invoice information request;5 9 | A0003;Take in charge;2021-01-08;U005;Development;DevOps activity request; 10 | A0003;Notify customer;2021-01-09;Robot1;Bot;DevOps activity request; 11 | A0003;Ticket rejected;2021-01-10;U005;Development;DevOps activity request; -------------------------------------------------------------------------------- /Datasets_usecases/Tutorial/IBM - Hands On Tutorial.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Datasets_usecases/Tutorial/IBM - Hands On Tutorial.zip -------------------------------------------------------------------------------- /Datasets_usecases/Tutorial/README.md: -------------------------------------------------------------------------------- 1 | # Procure To Pay (SAP) 2 | Procure to Pay with SAP shows a typical multi-level process. The end-to-end process is built from 4 distinct SAP processes: procurement, order, materials, invoice. 3 | 4 | Download and unzip [IBM - Hands On Tutorial.zip](./IBM%20-%20Hands%20On%20Tutorial.zip) 5 | 6 | - Create a project named P2P and load the event log: Hands_On_tutorial_exercise.zip 7 | - Check that you have a backup file and load : Hands_On_tutorial_exercise_2020-10-23_073658.idp 8 | 9 | Navigate through all the pages of the project creation wizard (accept all pages) until the process is displayed. 10 | 11 | From the Analytics page you can also see several dashboards dedicated to P2P, such as Maverick byuing. -------------------------------------------------------------------------------- /IBM Process Mining Dashboards/README.md: -------------------------------------------------------------------------------- 1 | # IBM Process Mining 2 | API client samples 3 | 4 | ## Export dashboard data 5 | 6 | The exporting of Analytics' dashboard results as CSV to an external S3 storage is a new capability introduced with the 1.12.0 release. 7 | 8 | The pre-requisites of using this capability are: 9 | - the S3 storage tier to be set up in the Process Mining server configuration 10 | - dashboards to be marked as "exportable" in their "Dashboard info" panel in the Analytics platform. 11 | 12 | Usage example: 13 | 14 | java -DserverUrl= -DuserId= -DapiKey= -jar dashboard-export-job-1.0-SNAPSHOT.jar 15 | -------------------------------------------------------------------------------- /IBM Process Mining Dashboards/dist/dashboard-export-job-1.0-SNAPSHOT.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/IBM Process Mining Dashboards/dist/dashboard-export-job-1.0-SNAPSHOT.jar -------------------------------------------------------------------------------- /IBM Process Mining Trimming/README.md: -------------------------------------------------------------------------------- 1 | # IBM Process Mining 2 | API client samples 3 | 4 | ## CSV data trimming 5 | 6 | Data trimming is the capability introduced in the 1.12.0 release, that would truncate a process' data source by retaining only the cases that satisfy the filter conditions specified in the input JSON. 7 | 8 | Usage example: 9 | 10 | java -DserverUrl= -DuserId= -DapiKey= -jar csv-trimming-job-1.0-SNAPSHOT.jar 11 | -------------------------------------------------------------------------------- /IBM Process Mining Trimming/dist/csv-trimming-job-1.0-SNAPSHOT.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/IBM Process Mining Trimming/dist/csv-trimming-job-1.0-SNAPSHOT.jar -------------------------------------------------------------------------------- /IBM Process Mining Trimming/filters.json: -------------------------------------------------------------------------------- 1 | { 2 | "caseFilters": [ 3 | { 4 | "filterType": "aggregate", 5 | "expression": "((0 OR (1 AND NOT 2)) OR (3 AND NOT 1) OR (4 AND NOT 1))", 6 | "filters": [ 7 | { 8 | "filterType": "attribute", 9 | "attributeKey": "attr-activity", 10 | "attributeValue": "Invoice Cleared", 11 | "startDate": "${TODAY-92}", 12 | "endDate": "${TODAY}" 13 | }, 14 | { 15 | "filterType": "attribute", 16 | "attributeKey": "attr-activity", 17 | "attributeValue": "Invoice Registered" 18 | }, 19 | { 20 | "filterType": "attribute", 21 | "attributeKey": "attr-activity", 22 | "attributeValue": "Invoice Cleared" 23 | }, 24 | { 25 | "filterType": "attribute", 26 | "attributeKey": "attr-activity", 27 | "attributeValue": "SES Line Registered" 28 | }, 29 | { 30 | "filterType": "attribute", 31 | "attributeKey": "attr-activity", 32 | "attributeValue": "Goods Line Registered" 33 | } 34 | ] 35 | } 36 | ], 37 | "entityFilters": [ 38 | { 39 | "filterType": "aggregate", 40 | "expression": "(0 AND NOT (1 OR 2))", 41 | "filters": [ 42 | { 43 | "filterType": "entity", 44 | "entityKey": "attr-process-1" 45 | }, 46 | { 47 | "filterType": "attribute", 48 | "attributeKey": "attr-activity", 49 | "attributeValue": "Purchase Order Closed" 50 | }, 51 | { 52 | "filterType": "attribute", 53 | "attributeKey": "attr-activity", 54 | "attributeValue": "Purchase Order Line Deleted" 55 | } 56 | ] 57 | } 58 | ] 59 | } -------------------------------------------------------------------------------- /Installation_on_prem/README.md: -------------------------------------------------------------------------------- 1 | # Installation of IBM Process Mining 1.14.4 - Traditional Installation on RedHat 8 2 | 3 | This installation process is for a POC, everything is installed as root, with no password in the database. You can follow the installation documentation to add more security. https://www.ibm.com/docs/en/process-mining/1.14.4?topic=installing-traditional-environments 4 | 5 | The installation process includes both process mining and task mining (optional). 6 | 7 | There are two choices: Process Mining (PM) and Task Mining (TM) on the same server, or on distinct servers. 8 | 9 | ## Installing PM and TM on the same server 10 | That's the easiest. 11 | [Installing PM and TM on the same server](./1_14_4_processmining_taskmining_on_prem_same_server.md) 12 | 13 | ## Installing PM and TM on distinct servers 14 | A bit more difficult, but largely feasible (not updated since 1.14.1) 15 | [Installing PM and TM on distinct servers](./Previous%20releases/1_14_1_processmining_taskmining_on_prem_distinct_servers.md) 16 | 17 | 18 | -------------------------------------------------------------------------------- /LLM/chat-with-api/BasePMTool.py: -------------------------------------------------------------------------------- 1 | from langchain_core.tools import BaseTool 2 | import requests 3 | import json 4 | import os 5 | 6 | class BasePMTool(BaseTool): 7 | 8 | url :str= os.getenv('PM_API_URL') 9 | user :str = os.getenv('PM_API_USER') 10 | key :str =os.getenv('PM_API_KEY') 11 | 12 | def getToken(self): 13 | keyResponse = requests.post(self.url+"/integration/sign", 14 | headers = {"content-type": "application/json"}, 15 | verify=False, 16 | data=json.dumps({"uid": self.user, 17 | "apiKey": self.key})) 18 | return keyResponse.json()["sign"]; 19 | 20 | def getRequestHeader(self, token): 21 | return {"content-type": "application/json", "Authorization": f"Bearer {token}" } -------------------------------------------------------------------------------- /LLM/chat-with-api/CreateLLM.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | from genai.extensions.langchain import LangChainChatInterface 4 | from genai.schema import TextGenerationParameters, TextGenerationReturnOptions 5 | from genai import Client, Credentials 6 | 7 | def createLLM(): 8 | api_key = os.getenv("GENAI_KEY") 9 | api_url = os.getenv("GENAI_API") 10 | 11 | creds = Credentials(api_key, api_endpoint=api_url) 12 | params = TextGenerationParameters(decoding_method="greedy", max_new_tokens=400) 13 | client = Client(credentials=creds) 14 | 15 | llm = LangChainChatInterface(client=client, 16 | model_id="meta-llama/llama-2-70b-chat", parameters=params) 17 | return llm 18 | -------------------------------------------------------------------------------- /LLM/chat-with-api/CreateProjectTool.py: -------------------------------------------------------------------------------- 1 | from BasePMTool import BasePMTool 2 | import requests 3 | from typing import Optional 4 | from langchain_core.callbacks import ( 5 | AsyncCallbackManagerForToolRun, 6 | CallbackManagerForToolRun, 7 | ) 8 | from langchain_core.pydantic_v1 import ( 9 | BaseModel, 10 | Field 11 | ) 12 | import json 13 | 14 | 15 | class CreateProjectInput(BaseModel): 16 | project_name: str = Field(..., description="Name of a process mining project") 17 | 18 | 19 | class CreateProjectTool(BasePMTool): 20 | return_direct: bool = True 21 | name = "create_project" 22 | args_schema = CreateProjectInput 23 | 24 | description = """Use this tool when you need to create a new process mining project.Make sure you use a input format similar to the JSON below: 25 | {{ "project_name": "the project name"}}""" 26 | 27 | def _run(self, project_name: str) -> str: 28 | """Use the tool.""" 29 | 30 | return self.createProject(project_name) 31 | 32 | async def _arun(self, query: str) -> str: 33 | """Use the tool asynchronously.""" 34 | raise NotImplementedError("API does not support async") 35 | 36 | def createProject(self, project_name): 37 | token = self.getToken(); 38 | project_name = str(project_name).strip() 39 | response = requests.post(self.url+f'/integration/processes', 40 | headers = self.getRequestHeader(token), 41 | verify=False, 42 | data=json.dumps({"title" : project_name, 43 | "org" : "" })) 44 | jsonresponse = response.json(); 45 | if response.status_code == 200: 46 | return f"The project was created with name {jsonresponse['projectKey']} " ; 47 | else: 48 | if 'data' in jsonresponse: 49 | return f"There was en error creating the project : {jsonresponse['data']}"; 50 | return 'There was en error creating the project' 51 | 52 | -------------------------------------------------------------------------------- /LLM/chat-with-api/CustomParser.py: -------------------------------------------------------------------------------- 1 | from langchain_core.exceptions import OutputParserException 2 | from langchain.agents import AgentOutputParser 3 | from typing import Union 4 | from langchain.schema import AgentAction, AgentFinish 5 | import re 6 | import json 7 | from langchain.output_parsers.json import parse_json_markdown 8 | from prompts import FORMAT_INSTRUCTIONS 9 | 10 | class CustomParser(AgentOutputParser): 11 | 12 | def get_format_instructions(self) -> str: 13 | return FORMAT_INSTRUCTIONS 14 | 15 | def parse(self, text: str) -> AgentAction | AgentFinish: 16 | try: 17 | #indexObservation = text.index('Observation:'); 18 | #if indexObservation > 0: 19 | # text = text[0:indexObservation]; 20 | # this will work IF the text is a valid JSON with action and action_input 21 | response = parse_json_markdown(text) 22 | action, action_input = response["action"], "" 23 | if 'action_input' in response: 24 | action_input = response["action_input"] 25 | 26 | 27 | if action == "Final Answer": 28 | #if action input is a dictionaty 29 | if isinstance(action_input, dict) : 30 | if 'answer' in action_input: 31 | action_input = action_input['answer']; 32 | if 'response' in action_input: 33 | action_input = action_input['response']; 34 | else: 35 | action_input = str(action_input); 36 | 37 | # this means the agent is finished so we call AgentFinish 38 | return AgentFinish({"output": action_input}, text) 39 | else: 40 | # otherwise the agent wants to use an action, so we call AgentAction 41 | return AgentAction(action, action_input, text) 42 | except Exception as e: 43 | print(e) 44 | # sometimes the agent will return a string that is not a valid JSON 45 | # often this happens when the agent is finished 46 | # so we just return the text as the output 47 | return AgentFinish({"output": text}, text) 48 | 49 | @property 50 | def _type(self) -> str: 51 | return "conversational_chat" 52 | 53 | -------------------------------------------------------------------------------- /LLM/chat-with-api/DeleteProjectTool.py: -------------------------------------------------------------------------------- 1 | from BasePMTool import BasePMTool 2 | import requests 3 | from typing import Optional 4 | from langchain_core.callbacks import ( 5 | AsyncCallbackManagerForToolRun, 6 | CallbackManagerForToolRun, 7 | ) 8 | from langchain_core.pydantic_v1 import ( 9 | BaseModel, 10 | Field 11 | ) 12 | import json 13 | 14 | class DeleteProjectInput(BaseModel): 15 | project_name: str = Field(..., description="Name of a process mining project") 16 | 17 | class DeleteProjectTool(BasePMTool): 18 | return_direct: bool = True 19 | name = "delete_project" 20 | args_schema = DeleteProjectInput 21 | 22 | description = """Use this tool when you need to delete a process mining project providing its name.""" 23 | 24 | def _run(self, project_name: str) -> str: 25 | """Use the tool.""" 26 | 27 | return self.deleteProject(project_name) 28 | 29 | async def _arun(self, query: str) -> str: 30 | """Use the tool asynchronously.""" 31 | raise NotImplementedError("API does not support async") 32 | 33 | def deleteProject(self, project_name): 34 | token = self.getToken(); 35 | project_name = str(project_name).strip() 36 | response = requests.delete(self.url+f'/integration/processes/{project_name}?org=', 37 | headers = self.getRequestHeader(token), 38 | verify=False) 39 | responsedata = response.json() 40 | if response.status_code == 200: 41 | return f'The project {project_name} was removed' 42 | else: 43 | if 'data' in responsedata: 44 | return f"Error trying to delete the project {project_name} : {responsedata['data']}"; 45 | return f'Error trying to delete the project {project_name}' 46 | 47 | -------------------------------------------------------------------------------- /LLM/chat-with-api/GetModelStatisticsTool.py: -------------------------------------------------------------------------------- 1 | from BasePMTool import BasePMTool 2 | import SetDataMappingTool 3 | import SetDateFormatTool 4 | import os 5 | import time 6 | import requests 7 | from typing import Optional 8 | from langchain_core.callbacks import ( 9 | AsyncCallbackManagerForToolRun, 10 | CallbackManagerForToolRun, 11 | ) 12 | import uuid 13 | from langchain_core.pydantic_v1 import ( 14 | BaseModel, 15 | Field 16 | ) 17 | import json 18 | 19 | 20 | class GetModelStatisticsInput(BaseModel): 21 | project_name: str = Field(..., description="Name of a process mining project") 22 | 23 | class GetModelStatisticsTool(BasePMTool): 24 | return_direct: bool = True 25 | name = "get_statistics" 26 | args_schema = GetModelStatisticsInput 27 | 28 | description = """The tool provides statistics on activities for a specified project. You will need to use the format : 29 | {{ "project_name": "the project name"}}""" 30 | 31 | def _run(self, project_name) -> str: 32 | """Use the tool.""" 33 | return self.getprocessModelStats(project_name) 34 | 35 | async def _arun(self, **params) -> str: 36 | """Use the tool asynchronously.""" 37 | raise NotImplementedError("API does not support async") 38 | 39 | def getprocessModelStats(self, project_name): 40 | """Gets the process model stats""" 41 | try: 42 | token = self.getToken() 43 | response = requests.get(self.url+f'/integration/processes/{project_name}/model-statistics', 44 | headers = self.getRequestHeader(token), 45 | params = { 'org' : ''}, 46 | verify=False) 47 | jsonresponse = response.json() 48 | if response.status_code == 200: 49 | # downloading in progress 50 | data = jsonresponse['data']; 51 | print(data) 52 | res = "Here are the statistics on activities:\n" 53 | for node in data['model']['nodes']: 54 | if node['activityName'] != 'START' and node['activityName'] != 'STOP': 55 | res += f"{node['activityName']} : frequency ({node['statistics']['frequency']}), avg duration ({node['statistics']['avgDuration']}) , cost ({node['statistics']['cost']})\n""" 56 | return res; 57 | else: 58 | if 'data' in jsonresponse: 59 | return jsonresponse['data']; 60 | return 'Error running the tool' 61 | except Exception as e: 62 | return 'Cannot get activity stats on the project. ' + str(e) -------------------------------------------------------------------------------- /LLM/chat-with-api/MiningTool.py: -------------------------------------------------------------------------------- 1 | from BasePMTool import BasePMTool 2 | import SetDataMappingTool 3 | import SetDateFormatTool 4 | import os 5 | import time 6 | import requests 7 | from typing import Optional 8 | from langchain_core.callbacks import ( 9 | AsyncCallbackManagerForToolRun, 10 | CallbackManagerForToolRun, 11 | ) 12 | import uuid 13 | from langchain_core.pydantic_v1 import ( 14 | BaseModel, 15 | Field 16 | ) 17 | import json 18 | 19 | 20 | class MiningToolInput(BaseModel): 21 | project_name: str = Field(..., description="Name of a process mining project") 22 | 23 | class MiningTool(BasePMTool): 24 | return_direct: bool = True 25 | name = "mine_data" 26 | args_schema = MiningToolInput 27 | 28 | description = """Use this tool when you need perform the mining on a project. Ue the following format: 29 | {{ "project_name": "the project name"}}""" 30 | 31 | def _run(self, project_name) -> str: 32 | """Use the tool.""" 33 | return self.mine(project_name) 34 | 35 | async def _arun(self, **params) -> str: 36 | """Use the tool asynchronously.""" 37 | raise NotImplementedError("API does not support async") 38 | 39 | def mine(self, project_name): 40 | """Loads the object in cloud ot processmining""" 41 | try: 42 | # check there is a 43 | if project_name not in SetDataMappingTool.column_mappings: 44 | return "you should first specify a column mapping for the project" 45 | mapping = SetDataMappingTool.column_mappings[project_name]; 46 | 47 | formattedMapping = {} 48 | dateformat = 'yyyy-MM-dd HH:mm:ss.SSS' 49 | if project_name in SetDateFormatTool.date_formats: 50 | dateformat = SetDateFormatTool.date_formats[project_name]; 51 | formattedMapping[str(mapping['case_id_index'])] ={"id":"attr-process","mask":"","name":mapping['case_id_name']}; 52 | formattedMapping[str(mapping['activity_column_index'])] ={"id":"attr-activity","mask":"","name":mapping['activity_column_name']}; 53 | formattedMapping[str(mapping['start_time_index'])] ={"id":"attr-start-time","mask":dateformat,"name":mapping['start_time_name']}; 54 | 55 | token = self.getToken() 56 | response = requests.post(self.url+f'/integration/csv/{project_name}/create-log', 57 | headers = self.getRequestHeader(token), 58 | params = {'mapping' : json.dumps(formattedMapping), 'org' : ''}, 59 | verify=False) 60 | jsonresponse = response.json() 61 | if response.status_code == 200: 62 | # downloading in progress 63 | jobid = jsonresponse['data']; 64 | done= False; 65 | while not done: 66 | time.sleep(2); 67 | response = requests.get(self.url+f'/integration/csv/job-status/{jobid}', 68 | headers = self.getRequestHeader(token), 69 | verify=False) 70 | data = response.json() 71 | if response.status_code == 200 : 72 | done = data['data'] == 'complete' 73 | if data['data'] == 'error': 74 | if 'errors.providedFieldMappingNotValid' == data['message'] : 75 | return 'The project does not have a mapping defined' 76 | return f"There was a problem mining the project : {data['message']}" ; 77 | else: 78 | return "There was a problem mining the data."; 79 | return "The mining was done"; 80 | else: 81 | if 'data' in jsonresponse: 82 | return jsonresponse['data']; 83 | return 'Error running the tool' 84 | except Exception as e: 85 | return 'Cannot do mining on the project. ' + str(e) -------------------------------------------------------------------------------- /LLM/chat-with-api/ProjectDetailTool.py: -------------------------------------------------------------------------------- 1 | from langchain_core.tools import BaseTool 2 | from langchain_core.pydantic_v1 import ( 3 | BaseModel, 4 | Field 5 | ) 6 | import requests 7 | from typing import Optional 8 | from langchain_core.callbacks import ( 9 | AsyncCallbackManagerForToolRun, 10 | CallbackManagerForToolRun, 11 | ) 12 | import json 13 | from BasePMTool import BasePMTool 14 | 15 | 16 | class ProjectDetailInput(BaseModel): 17 | project_name: str = Field(..., description="Name of a process mining project") 18 | 19 | class ProjectDetailTool(BasePMTool): 20 | 21 | name = "get_project_detail" 22 | description = """ The tool can provide the owner of the project, the number of cases, 23 | the number of events and the organization of the project. Please note the returned 24 | durations are expressed in milliseconds. 25 | """ 26 | 27 | args_schema = ProjectDetailInput 28 | 29 | def _run(self, project_name: str, 30 | run_manager: Optional[CallbackManagerForToolRun] = None) -> str: 31 | """Use the tool.""" 32 | return self.getProjectFromNameWithAPI(project_name) 33 | 34 | async def _arun(self, query: str) -> str: 35 | """Use the tool asynchronously.""" 36 | raise NotImplementedError("API does not support async") 37 | 38 | def getProjectFromNameWithAPI(self, name): 39 | response = requests.get(self.url+f'/integration/processes', 40 | headers = self.getRequestHeader(self.getToken()), 41 | verify = False) 42 | 43 | if response.status_code == 200: 44 | #might be that the process is incomplete and the API return 400. invoke the API with all projects to check.abs 45 | for p in response.json()['data'] : 46 | if p['projectName'] == name or p['projectTitle'] == name: 47 | return str(p) 48 | return "The project does not exist." 49 | 50 | else: 51 | return "The tool has some issue, try later." -------------------------------------------------------------------------------- /LLM/chat-with-api/ProjectsTool.py: -------------------------------------------------------------------------------- 1 | from BasePMTool import BasePMTool 2 | from langchain_core.tools import BaseTool 3 | import requests 4 | from typing import Optional 5 | from langchain_core.callbacks import ( 6 | AsyncCallbackManagerForToolRun, 7 | CallbackManagerForToolRun, 8 | ) 9 | import json 10 | 11 | class ProjectsTool(BasePMTool): 12 | return_direct: bool = False 13 | name = "get_project_list" 14 | description = "return a list of all process mining projects. " 15 | 16 | def _run(self) -> str: 17 | """Use the tool.""" 18 | return self.getProjectsFromAPI() 19 | 20 | async def _arun(self) -> str: 21 | """Use the tool asynchronously.""" 22 | raise NotImplementedError("API does not support async") 23 | 24 | def getProjectsFromAPI(self): 25 | token = self.getToken(); 26 | response = requests.get(self.url+f'/integration/processes', 27 | verify=False, 28 | headers = self.getRequestHeader(token)) 29 | data = response.json()['data'] 30 | 31 | return str(data) -------------------------------------------------------------------------------- /LLM/chat-with-api/README.md: -------------------------------------------------------------------------------- 1 | ## Introduction 2 | The integration of the Langchain project with ProcessMining allows to invoke a various set of tools to create a proccess mining project with a conversational interface. 3 | 4 | 5 | ## Pre-requisites 6 | * Python 3.8 or higher 7 | * A process mining server running 8 | * An IBM® Cloud Object Storage account where you place the CSV files you want to use for process mining. The CSV files will be retrieved from a bucket named processmining 9 | * A IBM® watsonx.ai™ AI studio account to access the fondational model used - here the llama-2-70b-chat 10 | 11 | 12 | ## Setup Pre-Requisites 13 | 14 | setup the environement variables. 15 | 16 | GENAI_KEY : your watsonX.ai key 17 | GENAI_API : the watsonX.ai endpoint URL 18 | 19 | 20 | PM_API_URL : The URL of you process mining public API 21 | PM_API_USER : Your user name in the process mining instace 22 | PM_API_KEY : the API key for process mining 23 | 24 | COS_ENDPOINT :optional COS endpoint , the default is https://s3.us-south.cloud-object-storage.appdomain.cloud 25 | COS_API_KEY_ID : Your COS API KEY 26 | COS_INSTANCE_CRN : Your COS instance CRN 27 | 28 | 29 | ### Create a virtual env and install the Python package 30 | ```shell 31 | python -m venv ~/pm-llm 32 | source ~/pm-llm/bin/activate 33 | pip install -r requirements.txt 34 | ``` 35 | 36 | 37 | ### Run the Chat application 38 | 39 | Open a new terminal 40 | ``` 41 | python app.py 42 | ``` 43 | 44 | 45 | Then open a browser to this url : http://127.0.0.1:7860 46 | 47 | Then you can ask queston to the chat bot such as: 48 | * can you create a project name XY 49 | * how many events in project AB 50 | * can you upload the file AA.csv to the project B 51 | * can you mine the project Z 52 | 53 | -------------------------------------------------------------------------------- /LLM/chat-with-api/SetDataMappingTool.py: -------------------------------------------------------------------------------- 1 | from BasePMTool import BasePMTool 2 | import requests 3 | from typing import Optional 4 | from langchain_core.callbacks import ( 5 | AsyncCallbackManagerForToolRun, 6 | CallbackManagerForToolRun, 7 | ) 8 | from langchain_core.pydantic_v1 import ( 9 | BaseModel, 10 | Field 11 | ) 12 | import json 13 | 14 | class SetDataMappingToolInput(BaseModel): 15 | project_name: str = Field(..., description="Name of a process mining project") 16 | activity_column_name: str = Field(..., description="Name of a column in the CSV representing activity name") 17 | activity_column_index: int = Field(..., description="Index of a column in the CSV representing activity name") 18 | case_id_name: str = Field(..., description="Name of a column in the CSV representing the case id") 19 | case_id_index: int = Field(..., description="Index of a column in the CSV representing the case id") 20 | start_time_name: str = Field(..., description="Name of a column in the CSV representing the start time") 21 | start_time_index: int = Field(..., description="Index of a column in the CSV representing the end time") 22 | 23 | 24 | column_mappings = {} 25 | 26 | class SetDataMappingTool(BasePMTool): 27 | return_direct: bool = True 28 | name = "set_data_mapping" 29 | args_schema = SetDataMappingToolInput 30 | 31 | description = """Use this tool when you need to specify the data mapping for the CSV columns""" 32 | 33 | def _run(self, project_name: str, 34 | activity_column_name: str = '' , 35 | activity_column_index: int = -1, 36 | case_id_name: str = '', 37 | case_id_index: int =-1, 38 | start_time_name: str = '', 39 | start_time_index: int =-1 40 | ) -> str: 41 | """Use the tool.""" 42 | mapping = {} 43 | if (project_name in column_mappings): 44 | mapping = column_mappings[project_name]; 45 | 46 | if activity_column_name != '' : 47 | mapping.update({'activity_column_name' : activity_column_name}) 48 | if case_id_name != '' : 49 | mapping.update({'case_id_name' : case_id_name}) 50 | if start_time_name != '' : 51 | mapping.update({'start_time_name' : start_time_name}) 52 | if start_time_index >=0 : 53 | mapping.update({'start_time_index' : start_time_index}) 54 | if activity_column_index >= 0 : 55 | mapping.update({'activity_column_index' : activity_column_index}) 56 | if case_id_index >= 0 : 57 | mapping.update({'case_id_index' : case_id_index}) 58 | 59 | column_mappings[project_name] = mapping; 60 | 61 | if 'activity_column_name' in mapping: 62 | activity_column_name = mapping['activity_column_name'] 63 | else: 64 | activity_column_name = 'Unknown' 65 | 66 | if 'case_id_name' in mapping: 67 | case_id_name = mapping['case_id_name'] 68 | else: 69 | case_id_name = 'Unknown' 70 | 71 | if 'start_time_name' in mapping: 72 | start_time_name = mapping['start_time_name'] 73 | else: 74 | start_time_name = 'Unknown' 75 | 76 | if 'activity_column_index' in mapping: 77 | activity_column_index = mapping['activity_column_index'] 78 | else: 79 | activity_column_index = 'Unknown' 80 | if 'case_id_index' in mapping: 81 | case_id_index = mapping['case_id_index'] 82 | else: 83 | case_id_index = 'Unknown' 84 | if 'start_time_index' in mapping: 85 | start_time_index = mapping['start_time_index'] 86 | else: 87 | start_time_index = 'Unknown' 88 | 89 | return f"""The current binding is : 90 | Activity name: '{activity_column_name}' (column index: {activity_column_index}) 91 | Start time: '{start_time_name}' (column index: {start_time_index}) 92 | Case ID: '{case_id_name}' (column index: {case_id_index}) """ 93 | 94 | 95 | async def _arun(self, query: str) -> str: 96 | """Use the tool asynchronously.""" 97 | raise NotImplementedError("API does not support async") 98 | 99 | -------------------------------------------------------------------------------- /LLM/chat-with-api/SetDateFormatTool.py: -------------------------------------------------------------------------------- 1 | from BasePMTool import BasePMTool 2 | import requests 3 | from typing import Optional 4 | from langchain_core.callbacks import ( 5 | AsyncCallbackManagerForToolRun, 6 | CallbackManagerForToolRun, 7 | ) 8 | from langchain_core.pydantic_v1 import ( 9 | BaseModel, 10 | Field 11 | ) 12 | import json 13 | 14 | class SetDateFormatToolInput(BaseModel): 15 | project_name: str = Field(..., description="Name of a process mining project") 16 | dateformat: str = Field(..., description="java date time format for the project") 17 | 18 | 19 | date_formats = {} 20 | 21 | class SetDateFormatTool(BasePMTool): 22 | return_direct: bool = True 23 | name = "set_date_format" 24 | args_schema = SetDateFormatToolInput 25 | 26 | description = """Use this tool when you need to set the date format that should be used for a project""" 27 | 28 | def _run(self, project_name: str, 29 | date_format: str , 30 | ) -> str: 31 | """Use the tool.""" 32 | date_formats[project_name] = date_format; 33 | 34 | return f"""The format for dates will be {date_format}""" 35 | 36 | 37 | async def _arun(self, query: str) -> str: 38 | """Use the tool asynchronously.""" 39 | raise NotImplementedError("API does not support async") 40 | 41 | -------------------------------------------------------------------------------- /LLM/chat-with-api/SuggestDateFormat.py: -------------------------------------------------------------------------------- 1 | from langchain_core.prompts import ChatPromptTemplate 2 | import prompts 3 | from CreateLLM import createLLM 4 | 5 | def suggestDateFormat(data:str): 6 | prompt = ChatPromptTemplate.from_template(prompts.DATE_FORMAT_SUGGESTION) 7 | model = createLLM() 8 | chain = prompt | model 9 | 10 | return chain.invoke({"data": data}).content 11 | -------------------------------------------------------------------------------- /LLM/chat-with-api/SuggestMapping.py: -------------------------------------------------------------------------------- 1 | from langchain_core.prompts import ChatPromptTemplate 2 | import prompts 3 | from CreateLLM import createLLM 4 | 5 | def suggestMapping(columns:str): 6 | print(columns, type(columns)) 7 | prompt = ChatPromptTemplate.from_template(prompts.MAPPING_SUGGESTION) 8 | model = createLLM() 9 | chain = prompt | model 10 | 11 | return chain.invoke({"columns": columns}).content 12 | -------------------------------------------------------------------------------- /LLM/chat-with-api/app.py: -------------------------------------------------------------------------------- 1 | import gradio as gr 2 | from CreateLLM import createLLM 3 | from ProjectDetailTool import ProjectDetailTool 4 | from ProjectsTool import ProjectsTool 5 | from CreateProjectTool import CreateProjectTool 6 | from DeleteProjectTool import DeleteProjectTool 7 | from MiningTool import MiningTool 8 | from SetDataMappingTool import SetDataMappingTool 9 | from SetDateFormatTool import SetDateFormatTool 10 | from GetModelStatisticsTool import GetModelStatisticsTool 11 | 12 | 13 | from LoadDataTool import LoadDataTool 14 | from CustomParser import CustomParser 15 | import prompts 16 | 17 | from langchain.agents import initialize_agent, AgentType 18 | from langchain.memory import ConversationBufferWindowMemory 19 | from langchain.memory import ChatMessageHistory 20 | 21 | from SuggestMapping import suggestMapping 22 | 23 | import urllib3 24 | urllib3.disable_warnings() 25 | 26 | import os 27 | 28 | 29 | def checkEnvironment(): 30 | if not 'GENAI_KEY' in os.environ: 31 | print('Please set env variable GENAI_KEY to your IBM Generative AI key') 32 | exit() 33 | if not 'GENAI_API' in os.environ: 34 | print('Please set env variable GENAI_API to your IBM Generative AI endpoint URL') 35 | exit() 36 | if not 'PM_API_URL' in os.environ: 37 | print('Please set env variable PM_API_URL to process mining API URL') 38 | exit() 39 | if not 'PM_API_USER' in os.environ: 40 | print('Please set env variable PM_API_USER to process mining API USER') 41 | exit() 42 | if not 'PM_API_KEY' in os.environ: 43 | print('Please set env variable PM_API_KEY to process mining API key') 44 | exit() 45 | if not 'COS_API_KEY_ID' in os.environ: 46 | print('Please set env variable COS_API_KEY_ID to Cloud Object Storage API Key') 47 | exit() 48 | if not 'COS_INSTANCE_CRN' in os.environ: 49 | print('Please set env variable COS_INSTANCE_CRN to Cloud Object Storage Intance CRN') 50 | exit() 51 | 52 | 53 | def initializeLLMAgent(): 54 | 55 | 56 | llm = createLLM() 57 | 58 | tools = [MiningTool(),CreateProjectTool(), GetModelStatisticsTool(), DeleteProjectTool(), 59 | ProjectDetailTool(), 60 | LoadDataTool(), SetDataMappingTool(), SetDateFormatTool()]; 61 | 62 | memory = ConversationBufferWindowMemory( 63 | memory_key="chat_history", k=5, return_messages=True, output_key="output" 64 | ) 65 | 66 | pm_agent = initialize_agent(tools, llm, 67 | agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION, verbose=True, 68 | early_stopping_method="generate", 69 | memory=memory, 70 | agent_kwargs = { 71 | 'output_parser': CustomParser(), 72 | 'prefix':prompts.PREFIX, 73 | 'format_instructions': prompts.FORMAT_INSTRUCTIONS, 74 | 'suffix': prompts.SUFFIX 75 | }) 76 | 77 | return pm_agent; 78 | 79 | 80 | checkEnvironment() 81 | 82 | print("Creating the LLMAgent") 83 | 84 | pm_agent = initializeLLMAgent() 85 | 86 | def add_text(history, text): 87 | history = history + [(text, None)] 88 | return history, "" 89 | 90 | def bot(history): 91 | response = infer(history[-1][0]) 92 | history[-1][1] = response['result'] 93 | return history 94 | 95 | def infer(question): 96 | query = f'[INST] {question}[/INST]' 97 | response={} 98 | res = pm_agent.invoke({'input' : query}); 99 | response['result'] =res['output'] 100 | return response 101 | 102 | 103 | css=""" 104 | #col-container {max-width: 700px; margin-left: auto; margin-right: auto;} 105 | """ 106 | 107 | title = """ 108 |
109 |

Chat with IBM Process Mining

110 |

This sample allow to interact with IBM Process Mining projects: 111 |

  • how many events in project XX
  • 112 |
  • how many cases in project XX
  • 113 |
  • who is the owner of project XX
  • 114 | 115 |
  • can you create a project named YY
  • 116 |
  • can you delete the project named YY
  • 117 |
  • can you upload the file myfile.csv to the project named YY
  • 118 |
  • please do the mining of project A
  • 119 | 120 |

    121 |
    122 | """ 123 | 124 | with gr.Blocks(css=css, title='Chat with IBM Process Mining') as demo: 125 | with gr.Column(elem_id="col-container"): 126 | gr.HTML(title) 127 | 128 | chatbot = gr.Chatbot() 129 | question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ") 130 | submit_btn = gr.Button("Send message") 131 | 132 | question.submit(add_text, [chatbot, question], [chatbot, question]).then( 133 | bot, chatbot, chatbot 134 | ) 135 | 136 | submit_btn.click(add_text, [chatbot, question], [chatbot, question]).then( 137 | bot, chatbot, chatbot 138 | ) 139 | 140 | demo.launch() 141 | -------------------------------------------------------------------------------- /LLM/chat-with-api/prompts.py: -------------------------------------------------------------------------------- 1 | from langchain.agents.structured_chat.prompt import FORMAT_INSTRUCTIONS 2 | 3 | PREFIX = """<>Assistant is a expert JSON builder designed to assist with a wide range of tasks. 4 | 5 | To answer the question of the user, the assistant can use tools. Tools available to Assistant are: 6 | 7 | :<>""" 8 | FORMAT_INSTRUCTIONS = """RESPONSE FORMAT INSTRUCTIONS 9 | ---------------------------- 10 | 11 | When responding to me, please output a response in one of two formats: 12 | 13 | **Option 1:** 14 | Use this if you want the human to use a tool. 15 | Markdown code snippet formatted in the following schema: 16 | 17 | ```json 18 | {{{{ 19 | "action": string, \\\\ The action to take. Must be one of {tool_names} 20 | "action_input": string \\\\ The input to the action 21 | }}}} 22 | ``` 23 | 24 | **Option #2:** 25 | Use this if you want to respond directly to the human. Markdown code snippet formatted in the following schema: 26 | 27 | ```json 28 | {{{{ 29 | "action": "Final Answer", 30 | "action_input": string \\\\ You should put what you want to return to use here in a human readable text. 31 | }}}} 32 | ```""" 33 | 34 | SUFFIX = """Begin! Remember, all actions must be formatted as markdown JSON strings. 35 | Question: {input} 36 | Thought:{agent_scratchpad}""" 37 | 38 | DATE_FORMAT_SUGGESTION= """Provide a date format (java) for the data below, do not provide explanations and answer like : 'a suggested data format is': {data}""" 39 | 40 | MAPPING_SUGGESTION = """with the columns of the csv file: {columns}, suggest the column index and column name of the column that best corresponds to 'activity name', the 'start time ' and the 'case id'. 41 | For every suggestion give the column name and index in the cvs, if you do not find a corespondent column do not make suggestion. You must answer starting with :'here is a suggestion for column binding:', do not add any other observation.'""" -------------------------------------------------------------------------------- /LLM/chat-with-api/requirements.txt: -------------------------------------------------------------------------------- 1 | aiofiles==23.2.1 2 | aiohttp==3.9.5 3 | aiolimiter==1.1.0 4 | aiosignal==1.3.1 5 | altair==5.2.0 6 | annotated-types==0.6.0 7 | anyio==4.3.0 8 | attrs==23.2.0 9 | certifi==2024.2.2 10 | charset-normalizer==3.3.2 11 | click==8.1.7 12 | colorama==0.4.6 13 | contourpy==1.2.0 14 | cycler==0.12.1 15 | dataclasses-json==0.6.4 16 | fastapi==0.110.0 17 | ffmpy==0.3.2 18 | filelock==3.13.1 19 | fonttools==4.49.0 20 | frozenlist==1.4.1 21 | fsspec==2024.2.0 22 | gradio==4.19.2 23 | gradio_client==0.10.1 24 | h11==0.14.0 25 | httpcore==1.0.4 26 | httpx==0.26.0 27 | httpx-sse==0.3.1 28 | huggingface-hub==0.21.3 29 | ibm-cos-sdk==2.13.4 30 | ibm-cos-sdk-core==2.13.4 31 | ibm-cos-sdk-s3transfer==2.13.4 32 | ibm-generative-ai==2.2.0 33 | idna==3.6 34 | importlib_resources==6.1.2 35 | Jinja2==3.1.3 36 | jmespath==1.0.1 37 | jsonpatch==1.33 38 | jsonpointer==2.4 39 | jsonschema==4.21.1 40 | jsonschema-specifications==2023.12.1 41 | kiwisolver==1.4.5 42 | langchain==0.1.9 43 | langchain-community==0.0.24 44 | langchain-core==0.1.28 45 | langsmith==0.1.10 46 | markdown-it-py==3.0.0 47 | MarkupSafe==2.1.5 48 | marshmallow==3.21.0 49 | matplotlib==3.8.3 50 | mdurl==0.1.2 51 | multidict==6.0.5 52 | mypy-extensions==1.0.0 53 | numpy==1.26.4 54 | openapi-pydantic==0.4.0 55 | orjson==3.9.15 56 | packaging==23.2 57 | pandas==2.2.1 58 | pillow==10.2.0 59 | pydantic==2.6.3 60 | pydantic_core==2.16.3 61 | pydub==0.25.1 62 | Pygments==2.17.2 63 | pyparsing==3.1.1 64 | python-dateutil==2.9.0 65 | python-multipart==0.0.9 66 | pytz==2024.1 67 | PyYAML==6.0.1 68 | referencing==0.33.0 69 | regex==2023.12.25 70 | requests==2.31.0 71 | rich==13.7.1 72 | rpds-py==0.18.0 73 | ruff==0.3.0 74 | semantic-version==2.10.0 75 | shellingham==1.5.4 76 | six==1.16.0 77 | sniffio==1.3.1 78 | SQLAlchemy==2.0.27 79 | starlette==0.36.3 80 | tenacity==8.2.3 81 | tomlkit==0.12.0 82 | toolz==0.12.1 83 | tqdm==4.66.2 84 | typer==0.9.0 85 | typing-inspect==0.9.0 86 | typing_extensions==4.10.0 87 | tzdata==2024.1 88 | urllib3==2.1.0 89 | uvicorn==0.27.1 90 | websockets==11.0.3 91 | yarl==1.9.4 92 | -------------------------------------------------------------------------------- /Process Apps/BAW BPM/BAW BPM.pma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW BPM/BAW BPM.pma -------------------------------------------------------------------------------- /Process Apps/BAW BPM/BAW_IPM_backup.idp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW BPM/BAW_IPM_backup.idp -------------------------------------------------------------------------------- /Process Apps/BAW BPM/README.md: -------------------------------------------------------------------------------- 1 | # BAW BPM Process App 2 | 3 | WARNING: Since version 1.14.4 (March 2024), a new version of the BAW process app has been released by the Process Mining development team. For more information, read the official documentation: [BAW App](https://www.ibm.com/docs/en/process-mining/1.14.4?topic=SSWR2IP_1.14.4/process-mining-documentation/user-manuals/accelerator/Using_the_custom_Process_App_for_BAW.htm) 4 | 5 | You can download the asset from this link: [Download](https://github.com/nikhil-ps-ibm/processmining_externalFilesbyDocs/blob/BAWAssets/Process%20Apps/BAW-IBM-Process-Mining-Assets.zip) 6 | 7 | ## OBSOLETE 8 | This process app relies on BAW BPM REST API to fetch instances and tasks from the BAW database, and to create an event for each BPD instance task. 9 | 10 | This is a simplified and limited version of the BAW Accelerator https://github.com/IBM/ibm-process-mining-BAW-accelerator. This standalone version should be used whenever you need to fetch several thousands of tasks. You can leverage multi threading increase the speed by 10 times, you can recover from errors, and so forth. 11 | 12 | 13 | ## BAW BPM Process App Overview 14 | A Process App is created in IBM Process Mining through a simple UI wizard 15 | - Name the process app, provide a short description, etc 16 | - Upload the python file that is executed 17 | - Upload a process mining backup file (IDP) that defines the mapping, and optionnally KPIs, filters, custom metrics, costs, dashboards, etc. 18 | - Define the process app input parameters that the user will set to connect and extract the data 19 | 20 | ## Python file 21 | The python program defines entirely what the process app does to connect to the data source and to create the event-log. IBM Process Mining will call the function ```execute(context)``` that you must declare and define. 22 | 23 | ```python 24 | def execute(context): 25 | # Get the input parameters 26 | config = context['config'] 27 | 28 | # Get the data from the source (ex: call BAW REST API to fetch instances and tasks for each instance) 29 | event_list = everything_you_need_to_do_to_get_the_events(config) 30 | 31 | # Create and return a Pandas dataframe that contains all the events 32 | return(pd.dataframe(event_list)) 33 | ``` 34 | 35 | ## Tips 36 | You need to test your python code as a standalone program, before loading it into the process app. 37 | - Create a ```default_config``` object for testing your app 38 | - Add a ```__main__``` 39 | 40 | The process app can display exceptions from the process app UI, but you need to raise ```ProcessAppException``` to see these messages. We import ProcessAppException from process_app, but this package is not yet available externally. For convenience, we redefine this class when running as standalone, such that our code can run unchanged as standalone or when loaded in IBM Process Mining. 41 | 42 | ```python 43 | # When loaded into Process Mining, the import works. 44 | # To run/debug this program as a standalone code, we redefine the ProcessAppException class below 45 | try: 46 | from process_app import ProcessAppException 47 | except: 48 | class ProcessAppException(Exception): 49 | def __init__(self, message): 50 | self.message=message 51 | super().__init__(self.message) 52 | def getMessage(self): 53 | return self.message 54 | 55 | def execute(context): 56 | # Get the input parameters 57 | config = context['config'] 58 | 59 | # Example testing that a input parameter from_date is matching the expected date format 60 | try: 61 | dateObject = datetime.datetime.strptime(config['from_date'], date_format) 62 | config['from_date'] = dateObject.strftime(baw_date_format) 63 | # If the date validation goes wrong 64 | except Exception as e: # printing the appropriate text if ValueError occurs 65 | raise ProcessAppException("Incorrect date format, should be like this 2022-10-08" + str(e)) 66 | 67 | # Get the data from the source (ex: call BAW REST API to fetch instances and tasks for each instance) 68 | event_list = everything_you_need_to_do_to_get_the_events(config) 69 | 70 | # Create and return a Pandas dataframe that contains all the events 71 | return(pd.dataframe(event_list)) 72 | 73 | if __name__ == "__main__": 74 | context = {'config': default_config} 75 | df = execute(context) 76 | print (df) 77 | ``` 78 | 79 | 80 | 81 | 82 | 83 | -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets.zip -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets/BAW-Connector/README.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets/BAW-Connector/README.pdf -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets/BAW-Connector/baw_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "root_url": "", 3 | "process_name": "", 4 | "project": "", 5 | "from_date": "2011-08-04T03:12:53Z", 6 | "from_date_criteria": "", 7 | "to_date": "2024-08-04T03:12:53Z", 8 | "to_date_criteria": "", 9 | "status_filter": "Active,Completed,Failed,Terminated,Suspended,Late,At_Risk", 10 | "instance_limit": 10, 11 | "offset": 0, 12 | "thread_count": 1, 13 | "csvpath": "", 14 | "csvfilename": "", 15 | "logfile": "", 16 | "user_name": "", 17 | "password": "" 18 | } -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets/BAW-Connector/requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp==3.9.3 2 | aioresponses==0.7.6 3 | aiosignal==1.3.1 4 | attrs==23.2.0 5 | certifi==2023.11.17 6 | charset-normalizer==3.3.2 7 | colorama==0.4.6 8 | croniter==2.0.1 9 | frozenlist==1.4.1 10 | idna==3.6 11 | iniconfig==2.0.0 12 | multidict==6.0.4 13 | packaging==23.2 14 | pluggy==1.4.0 15 | pytest==7.4.4 16 | pytest-mock==3.12.0 17 | python-dateutil==2.8.2 18 | pytz==2024.1 19 | requests==2.31.0 20 | six==1.16.0 21 | tqdm==4.66.1 22 | urllib3==2.1.0 23 | yarl==1.9.4 24 | -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets/BAW-Process_App/BAW Business Process Management.pma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets/BAW-Process_App/BAW Business Process Management.pma -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets/README.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/1.14.4/BAW-IBM-Process-Mining-Assets/README.pdf -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets.zip -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets/BAW-Connector/README.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets/BAW-Connector/README.pdf -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets/BAW-Connector/baw_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "root_url": "", 3 | "process_name": "", 4 | "project": "", 5 | "from_date": "2011-08-04T03:12:53Z", 6 | "from_date_criteria": "", 7 | "to_date": "2024-08-04T03:12:53Z", 8 | "to_date_criteria": "", 9 | "status_filter": "Active,Completed,Failed,Terminated,Suspended,Late,At_Risk", 10 | "instance_limit": 10, 11 | "offset": 0, 12 | "thread_count": 1, 13 | "csvpath": "", 14 | "csvfilename": "", 15 | "logfile": "", 16 | "user_name": "" 17 | } 18 | -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets/BAW-Connector/requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp==3.9.5 2 | aioresponses==0.7.6 3 | aiosignal==1.3.1 4 | attrs==23.2.0 5 | certifi==2023.11.17 6 | charset-normalizer==3.3.2 7 | colorama==0.4.6 8 | croniter==2.0.1 9 | frozenlist==1.4.1 10 | idna==3.7 11 | iniconfig==2.0.0 12 | multidict==6.0.4 13 | packaging==23.2 14 | pluggy==1.4.0 15 | pytest==7.4.4 16 | pytest-mock==3.12.0 17 | python-dateutil==2.8.2 18 | pytz==2024.1 19 | requests==2.31.0 20 | six==1.16.0 21 | tqdm==4.66.4 22 | urllib3==2.1.0 23 | yarl==1.9.4 24 | -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets/BAW-Process_App/BAW Business Process Management.pma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets/BAW-Process_App/BAW Business Process Management.pma -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets/README.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/1.15.0/BAW-IBM-Process-Mining-Assets/README.pdf -------------------------------------------------------------------------------- /Process Apps/BAW-IBM-Process-Mining-Assets/2.0.0/BAW-IBM-Process-Mining-Assets.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/BAW-IBM-Process-Mining-Assets/2.0.0/BAW-IBM-Process-Mining-Assets.zip -------------------------------------------------------------------------------- /Process Apps/IT_Ticketing_ServiceNow/README.md: -------------------------------------------------------------------------------- 1 | # ServiceNow Ticketing Process App 2 | 3 | Author: Patrick Megard, patrick.megard@fr.ibm.com 4 | 5 | To create a process app, upload ```ServiceNowConnector.py``` in process app builder, and ```serviceNow1_2023-06-05_075444-0700.idp``` as a backup. 6 | 7 | Note: So far, the connector has only be tested using the ServiceNow development trial instance that includes a few incidents. We need to access real systems to finalize the development. 8 | 9 | ## Service Now tables 10 | The connector is fetching data using the ServiceNow REST API. The following tables are used: 11 | - INCIDENT 12 | - SYS_USER 13 | - SYS_USER_GROUP 14 | - SYS_AUDIT 15 | 16 | The INCIDENT table returns a snapshot of each incident with the main possible incident statuses and dates (create, open, on hold, resolve, close). 17 | 18 | When the AUDIT feature is enabled for the INCIDENT table, changes are logged. We are using these changes to add historical status changes that are not visible in INCIDENT, and to add ticket assignment changes. Additional changes could be added such as priority, category, etc. 19 | 20 | ## Process mining settings and dashboards 21 | Due to the lack of data, we have not yet created specific filters and dashboards. 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /Process Apps/IT_Ticketing_ServiceNow/ServiceNow.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/IT_Ticketing_ServiceNow/ServiceNow.jpg -------------------------------------------------------------------------------- /Process Apps/IT_Ticketing_ServiceNow/Ticketing with ServiceNow.pma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/IT_Ticketing_ServiceNow/Ticketing with ServiceNow.pma -------------------------------------------------------------------------------- /Process Apps/IT_Ticketing_ServiceNow/serviceNow1_2023-06-05_075444-0700.idp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/IT_Ticketing_ServiceNow/serviceNow1_2023-06-05_075444-0700.idp -------------------------------------------------------------------------------- /Process Apps/Jira_ticketing/.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python Debugger: Current File", 9 | "type": "debugpy", 10 | "request": "launch", 11 | "program": "${file}", 12 | "console": "integratedTerminal" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /Process Apps/Jira_ticketing/Jira Ticketing.pma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/Jira_ticketing/Jira Ticketing.pma -------------------------------------------------------------------------------- /Process Apps/Jira_ticketing/JiraSoftware.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/Jira_ticketing/JiraSoftware.jpg -------------------------------------------------------------------------------- /Process Apps/Jira_ticketing/jira_mapping.idp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/Jira_ticketing/jira_mapping.idp -------------------------------------------------------------------------------- /Process Apps/Jira_ticketing/my_config_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "url":"https:///rest/api/2/", 3 | "user":"", 4 | "token":"", 5 | "maxResults":100, 6 | "startAt":0, 7 | "projects":"", 8 | "minimalIssueNumber":100, 9 | "from_date":"2000-01-01", 10 | "to_date":"2023-07-01" 11 | } -------------------------------------------------------------------------------- /Process Apps/Open_apps/Aha! Ideas Management open.pma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/Open_apps/Aha! Ideas Management open.pma -------------------------------------------------------------------------------- /Process Apps/Open_apps/GitHub Ticketing - Trial open.pma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/Open_apps/GitHub Ticketing - Trial open.pma -------------------------------------------------------------------------------- /Process Apps/Open_apps/Zenhub SDLC - Trial open.pma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/Open_apps/Zenhub SDLC - Trial open.pma -------------------------------------------------------------------------------- /Process Apps/P2P_data_xform_lab/P2P LAB_2023-08-03_015726-0700.idp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/P2P_data_xform_lab/P2P LAB_2023-08-03_015726-0700.idp -------------------------------------------------------------------------------- /Process Apps/P2P_data_xform_lab/P2P.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/P2P_data_xform_lab/P2P.zip -------------------------------------------------------------------------------- /Process Apps/P2P_data_xform_lab/P2P_data_xform_lab.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from zipfile import ZipFile 3 | import time 4 | 5 | def execute(context): 6 | 7 | # Unzip the file that includes the 3 files that we need to transform and merge 8 | myFileUploadName = context["fileUploadName"] 9 | with ZipFile(myFileUploadName, 'r') as f: 10 | f.extractall() 11 | 12 | # The 3 files are now stored in the Process Mining Server, in a directory dedicated to the process app, where the python code is stored and executed 13 | # as well as where the ZIP file loaded by the user is stored. 14 | # Therefore, all the unzipped files are accessible in the current directory 15 | # sleep a while until all files are extracted 16 | time.sleep(5) 17 | 18 | requisitions = pd.read_csv('./requisition.csv') 19 | create_requisitions = requisitions.copy() 20 | create_requisitions['activity'] = 'Requisition Created' 21 | create_requisitions.rename(columns={'Create_Date': 'datetime', 'Create_User': 'user', 22 | 'Create_Role': 'role', 'Create_Type': 'type', 'Create_Source': 'source'}, inplace=True) 23 | create_requisitions.drop(['Release_DateTime', 'Release_User', 'Release_Role', 24 | 'Release_Type', 'Release_Source'], axis=1, inplace=True) 25 | 26 | release_requisitions = requisitions.copy() 27 | release_requisitions['activity'] = 'Requisition Released' 28 | release_requisitions.rename(columns={'Release_DateTime': 'datetime', 'Release_User': 'user', 29 | 'Release_Role': 'role', 'Release_Type': 'type', 'Release_Source': 'source'}, inplace=True) 30 | release_requisitions.drop(['Create_Date', 'Create_User', 'Create_Role', 31 | 'Create_Type', 'Create_Source'], axis=1, inplace=True) 32 | # If some requisition cases are not complete, we remove rows where the requisition release date is NaN, as in 33 | # this case, the activity did not yet occur. This is actually never the case. 34 | release_requisitions = release_requisitions[release_requisitions['datetime'].notna()] 35 | 36 | # procurements 37 | procurements = pd.read_csv('./procurement.csv', low_memory=False) 38 | 39 | # invoices 40 | invoices = pd.read_csv('./invoice.csv') 41 | 42 | # Merging invoice.csv information into procurement.csv 43 | procurements = procurements.merge(invoices, on="Invoice_ID", how="left") 44 | 45 | # Finally we append the requisition and the procurement event logs to create the final event log. Again, we can remove the events with a null `datetime`P2P_events = pd.concat([P2P_events, procurement_events]) 46 | P2P_events = pd.concat([create_requisitions, release_requisitions, procurements]) 47 | # removing rows with no datetime if any 48 | P2P_events = P2P_events[P2P_events["datetime"].notna()] 49 | P2P_events = P2P_events.convert_dtypes() # applying the best known types 50 | # Reordering columns to simplify mapping 51 | P2P_events = P2P_events[['activity','datetime', 'user', 'role', 'type', 52 | 'source', 'Req_ID','Req_Header', 'Req_Line', 'PO_Header', 'PO_Line', 'PO_ID', 'MatDoc_Header', 53 | 'MatDoc_Line', 'MatDoc_Year', 'MatDoc_ID', 'gr_h_y', 'Invoice_ID', 54 | 'rses_h', 'rses_l', 'rses_y', 'mandt', 'bukrs', 'xblnr', 'fl_h', 'fl_y', 55 | 'value_old', 'value_new', 'clear_doc', 'qmnum', 'data_gr_effettiva', 56 | 'usertype', 'Order_Type', 'Purchasing_Group', 'Purch_Group_Type', 57 | 'Material_Group_Area', 'Accounting_Type', 'Order_Vendor', 58 | 'Order_Source', 'Department', 'Order_Amount', 'Material', 59 | 'lead_time_material', 'Material_Type', 'Purch_Group_Area', 60 | 'Requisition_Plant', 'Order_Plant', 'Material_Plant', 'data_gr_ordine', 61 | 'data_gr_stat', 'data_gr_ipo', 'Paid_Amount', 'Paid_Vendor', 62 | 'split_ordine', 'split_riga_ordine', 'missmatch_riga_oda', 63 | 'check_riga_gagm', 'consegna_ipotetica', 'consegna_oda_ipotetica', 64 | '_consegna_statistica_ipotetica_', 'pay_delay', 'pay_type', 65 | 'Req_Required_Vendor', 'Material_Group', 'Invoice_Date', 66 | 'Requisition_Vendor', 'Purchase_Organization', 'insert_date', 67 | 'Invoice_Header', 'Invoice_Year', 'Invoice_Amount', 'Invoice_Vendor', 68 | 'Invoice_Due_Date', 'Invoice_Vendor_City']] 69 | 70 | return(P2P_events) 71 | 72 | 73 | if __name__ == "__main__": 74 | 75 | context = {'fileUploadName':'P2P.zip'} 76 | df = execute(context) 77 | df.to_csv('P2Peventlog.csv', index=None) -------------------------------------------------------------------------------- /Process Apps/P2P_data_xform_lab/README.md: -------------------------------------------------------------------------------- 1 | # Process App Example: transforming P2P CSV files 2 | The file [P2P.zip](P2P.zip) contains 3 CSV files extracted from a Procure to Pay application (like SAP). 3 | 4 | Each file in [P2P.zip](P2P.zip) includes events (steps) from the processes involved in the end-to-end P2P process: 5 | - procurement.csv 6 | - requisition.csv 7 | - invoice.csv 8 | 9 | The data transformation python [code](P2P_data_xform_lab.py) shows how to extract these files from [P2P.zip](P2P.zip), and how to use Pandas to transform these data sources in a Process Mining event log, by creating specific fields and merging tables. 10 | 11 | A complete tutorial is available in this [Jupyter Notebook](./P2P_data_extraction_tutorial.ipynb). You can experiment easily in the notebook. -------------------------------------------------------------------------------- /Process Apps/Pandas Tuto/date_utillities.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "2023-01-07 18:52:54\n", 13 | "2022-05-30 18:52:54\n", 14 | "2022-05-30T18:52:54\n" 15 | ] 16 | } 17 | ], 18 | "source": [ 19 | "import time\n", 20 | "from datetime import datetime as dt, timedelta\n", 21 | "import random\n", 22 | "\n", 23 | "d = \"2023-01-07T18:52:54\"\n", 24 | "t = time.strptime(d, \"%Y-%m-%dT%H:%M:%S\")\n", 25 | "deltadays = timedelta(days=random.randint(-365,0))\n", 26 | "dd = dt.strptime(d,\"%Y-%m-%dT%H:%M:%S\")\n", 27 | "print(dd)\n", 28 | "ddd = dd+deltadays\n", 29 | "print(ddd)\n", 30 | "print(dt.strftime(ddd, \"%Y-%m-%dT%H:%M:%S\"))" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 44, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "from datetime import datetime as dt, timedelta\n", 40 | "# Correct date format errors present in the original file\n", 41 | "def change_the_date(thedate, addDays) : # thedate is a string 2023-01-07T18:52:54\n", 42 | " # create a date\n", 43 | " date = dt.strptime(thedate,\"%Y-%m-%dT%H:%M:%S\")\n", 44 | " delta = timedelta(days=addDays)\n", 45 | " date = date+delta\n", 46 | " # return the string\n", 47 | " return dt.strftime(date, \"%Y-%m-%dT%H:%M:%S\")" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 47, 53 | "metadata": {}, 54 | "outputs": [ 55 | { 56 | "data": { 57 | "text/plain": [ 58 | "'2022-10-09T18:52:54'" 59 | ] 60 | }, 61 | "execution_count": 47, 62 | "metadata": {}, 63 | "output_type": "execute_result" 64 | } 65 | ], 66 | "source": [ 67 | "change_the_date(\"2023-01-07T18:52:54\", -3)" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "# when using df.apply:\n", 77 | "# df['EVENTTIME'] = df['EVENTTIME'].apply(change_the_date,addDays=shiftDays)\n", 78 | "\n" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": 7, 84 | "metadata": {}, 85 | "outputs": [ 86 | { 87 | "data": { 88 | "text/plain": [ 89 | "'15_03_2024_10_43_25'" 90 | ] 91 | }, 92 | "execution_count": 7, 93 | "metadata": {}, 94 | "output_type": "execute_result" 95 | } 96 | ], 97 | "source": [ 98 | "from datetime import datetime\n", 99 | "\n", 100 | "now = datetime.now()\n", 101 | "now.strftime(\"%d_%m_%Y_%H_%M_%S\")" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 3, 107 | "metadata": {}, 108 | "outputs": [ 109 | { 110 | "data": { 111 | "text/plain": [ 112 | "datetime.datetime(2018, 12, 31, 15, 57, 44)" 113 | ] 114 | }, 115 | "execution_count": 3, 116 | "metadata": {}, 117 | "output_type": "execute_result" 118 | } 119 | ], 120 | "source": [ 121 | "from datetime import datetime as dt\n", 122 | "dt.fromtimestamp(1546268264000.0/1000)\n" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": 23, 128 | "metadata": {}, 129 | "outputs": [ 130 | { 131 | "data": { 132 | "text/plain": [ 133 | "'2020/18'" 134 | ] 135 | }, 136 | "execution_count": 23, 137 | "metadata": {}, 138 | "output_type": "execute_result" 139 | } 140 | ], 141 | "source": [ 142 | "import time\n", 143 | "from datetime import datetime as dt, timedelta\n", 144 | "import datetime\n", 145 | "import random\n", 146 | "\n", 147 | "d= dt.fromtimestamp(1587987987)\n", 148 | "d = d.replace(day=30, hour=0, minute=0, second=0)\n", 149 | "\"%s/%s\" % (dt.date(d).isocalendar().year, dt.date(d).isocalendar().week)\n" 150 | ] 151 | } 152 | ], 153 | "metadata": { 154 | "kernelspec": { 155 | "display_name": "Python 3", 156 | "language": "python", 157 | "name": "python3" 158 | }, 159 | "language_info": { 160 | "codemirror_mode": { 161 | "name": "ipython", 162 | "version": 3 163 | }, 164 | "file_extension": ".py", 165 | "mimetype": "text/x-python", 166 | "name": "python", 167 | "nbconvert_exporter": "python", 168 | "pygments_lexer": "ipython3", 169 | "version": "3.10.5" 170 | }, 171 | "orig_nbformat": 4, 172 | "vscode": { 173 | "interpreter": { 174 | "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" 175 | } 176 | } 177 | }, 178 | "nbformat": 4, 179 | "nbformat_minor": 2 180 | } 181 | -------------------------------------------------------------------------------- /Process Apps/README.md: -------------------------------------------------------------------------------- 1 | ## Process Apps (public and open source) 2 | 3 | These process apps are delivered as python source code that you can use to create your own process app. 4 | - BAW BPM : [BAW BPM](BAW%20BPM) 5 | - Service Now Ticketing: [ServiceNow](IT_Ticketing_ServiceNow) 6 | - Jira Ticketing: [Jira Ticketing](Jira_ticketing) 7 | - Redmine Ticketing: [Redmine](https://github.com/IBM/japan-technology/tree/main/usecases/process-mining/ProcessApps/Redmine_ticketing) -- Thanks to Tatsuya.Miyazaki@ibm.com 8 | 9 | These process apps are delivered as a .pma binary file that can be imported in IBM Process Mining. The python code can be downloaded from the imported process app. 10 | - Aha! Ideas Management : [Aha! Ideas Management](./Open_apps/Aha!%20Ideas%20Management%20open.pma) 11 | - Github Ticketing: [Github Ticketing](./Open_apps/GitHub%20Ticketing%20-%20Trial%20open.pma) 12 | - Zenhub Software Development Lifecycle: [Zenhub SDLC](./Open_apps/Zenhub%20SDLC%20-%20Trial%20open.pma) 13 | 14 | ## Process Apps (IBM private and open source) 15 | - Salesforce opportunity management process app: [Salesforce](https://github.ibm.com/automation-base-pak/pm-process-apps/tree/main/salesforce-opportunity-management) -- Thanks to emmanuel.tissandier@fr.ibm.com. This asset is still in the IBM github enterprise repository. Please contact Emmanuel Tissandier or patrick.megard@fr.ibm.com to get it. 16 | 17 | ## Process Apps: How to 18 | - Process app test: [testProcessApps](testProcessApps) 19 | - Process app with uploaded file test: [more](./testProcessAppWithFile) 20 | - Jira Ticketing Tutorial: [JiraConnectorTutorial notebook](./Jira_ticketing/JiraConnectorTutorial.ipynb) 21 | 22 | Pandas is required to create process apps: understand useful pandas code (under construction) and save time 23 | - Pandas tutorial: [Pandas Tutorial](Pandas%20Tuto/pandas_basics_process_apps.ipynb) 24 | -------------------------------------------------------------------------------- /Process Apps/testProcessAppWithFile/README.md: -------------------------------------------------------------------------------- 1 | # Simple Process App that uses a ZIP file loaded by the user 2 | In this example, the user will upload a ZIP file that contains one or several files to process in order to create the event log. 3 | 4 | To keep it very simple, here we are uploading a ZIP file that just contains a simple CSV file ready to be ingested. A more sophisticated example will be developed later on. 5 | 6 | - Create a process app 7 | - Load the simplecode.py (execution code) 8 | 9 | - Create a process with this process app 10 | - Upload the file 'justatest.zip' 11 | 12 | The program will retrieve the justatest.zip file that is located in the server's, in the process app directory. We unzip the file: justatest.csv is now located in the same directory. 13 | 14 | We can read this file with ```pd.readcsv('./justatest.csv')``` 15 | -------------------------------------------------------------------------------- /Process Apps/testProcessAppWithFile/justatest.csv: -------------------------------------------------------------------------------- 1 | processid,activity,date 2 | p1,analyze request,2023-01-01 3 | p1,approve request,2023-01-02 4 | p2,approve request,2023-01-02 5 | p2,reject request,2023-01-04 6 | -------------------------------------------------------------------------------- /Process Apps/testProcessAppWithFile/justatest.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/testProcessAppWithFile/justatest.zip -------------------------------------------------------------------------------- /Process Apps/testProcessAppWithFile/simplecode.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from zipfile import ZipFile 3 | 4 | def execute(context): 5 | 6 | myFileUploadName = context["fileUploadName"] 7 | with ZipFile(myFileUploadName, 'r') as f: 8 | f.extractall() 9 | df = pd.read_csv('./justatest.csv') 10 | return(df) 11 | 12 | if __name__ == "__main__": 13 | 14 | df = execute({}) 15 | df.to_csv('justatest.csv') -------------------------------------------------------------------------------- /Process Apps/testProcessApps/pattest_2023-06-02_081520+0000.idp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/Process Apps/testProcessApps/pattest_2023-06-02_081520+0000.idp -------------------------------------------------------------------------------- /Process Apps/testProcessApps/testprocessapp.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | def execute(context): 4 | justatest = [ 5 | {'processid':'p1', 'activity':'analyze request', 'date':'2023-01-01'}, 6 | {'processid':'p1', 'activity':'approve request', 'date':'2023-01-02'}, 7 | {'processid':'p2', 'activity':'approve request', 'date':'2023-01-02'}, 8 | {'processid':'p2', 'activity':'reject request', 'date':'2023-01-04'}, 9 | ] 10 | df = pd.DataFrame(justatest) 11 | return(df) 12 | 13 | if __name__ == "__main__": 14 | 15 | df = execute({}) 16 | df.to_csv('justatest.csv') -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IBM Process Mining 2 | IBM Process Mining assets are available to clients, partners, and IBMers. 3 | 4 | ## Process Apps 5 | Process Apps can be developed by IBM, consultants, partners, and clients. They provide connectors, filters, KPIs, and dashboards that address a specific use case. 6 | Don't hesitate to use pull requests if you want to contribute a process app useful to other process mining practitioners. 7 | 8 | This repository contains process apps you can upload to your process mining environment. 9 | 10 | Check the [list of available process apps](./Process%20Apps/README.md) 11 | 12 | [BAW-IBM-Process-Mining-Assets](./Process%20Apps/BAW-IBM-Process-Mining-Assets/) contains the process app documented and supported in the product. I found several issues with clients using this connector/process app, I recommend using [BAW_connector](./BAW_connector/) instead. 13 | 14 | ## Use cases - Examples 15 | Feel free to create process mining projects from these [examples](./Datasets_usecases/README.md), and to contribute new ones. 16 | 17 | ## Custom Accelerators (public) 18 | Accelerators are programs used to create the event log from external data sources. They include the connection to the data source, the data transformation, and, optionally, the upload of the final CSV into a process mining project. Accelerators can be multi-threaded and can provide recovery mechanisms. They are helpful when large amounts of data requiring several hours of connection are collected. 19 | - BAW Accelerator for BPM: [python](./BAW_connector/README.md) 20 | - BAW Accelerator for Case (no code) : https://github.com/MalekJabri/BAWAccelerator 21 | 22 | ## Custom Accelerators (IBM consultants only) 23 | These accelerators are available to IBMers only. 24 | - SAP AP, SAP P2P, SAP O2C with Talend: https://github.ibm.com/automation-base-pak/ibm-process-mining-etl/ 25 | - Maximo (contact laurence_may@uk.ibm.com) 26 | 27 | 28 | ## Installation scripts 29 | Traditional installation of process mining and task mining on premises [Installation scripts](./Installation_on_prem/README.md). 30 | Following this script might accelerate the installation process for POCs. Note: this script is for version 1.14. Version 1.15 adds MonetDB for nextgen. 31 | 32 | ## REST APIs 33 | [REST APIs](./REST%20APIs/) contains [IPMClient](./REST%20APIs/IPMClient/), a powerful python library that simplifies drastically the use of Process Mining REST API in a python program. Most REST APIs are implemented and you can easily request analytics results, create projects, upload data, create users, and many more, through a few python line of code. 34 | 35 | ## LLM 36 | [LLM](./LLM/chat-with-api/) is an experiment from Emmanuel Tissandier, to interact with a bot that interprets your request using LLM, and call the appropriate action to provide you with the answer. This is an interesting example of using Process Mining REST API and LangChain. 37 | 38 | ## Hands-on Labs 39 | 40 | IBM Processes Mining and Task Mining hands-on labs are updated to work with every major release. Click [here](https://ibm.box.com/v/PROC-TASK-MINING-LABS-1-14) to download the labs. The labs were designed to run as-is on the IBM Tech Zone Environment (current version 1.14.1), which includes Process Mining, Task Mining, and Task Mining Client VMs, and it is available to IBMers and IBM Business Partners. Click [here](https://techzone.ibm.com/collection/process-mining-with-task-mining-demo-and-etl) to access the Tech Zone Environment. 41 | 42 | For questions regarding the labs or the Tech Zone Environment, contact pacholsk@calibm.com 43 | 44 | # IBM Process Mining Technical Assets 45 | These assets illustrate how JavaScript developers can customize IBM Process Mining components. 46 | 47 | ## Custom Metrics (JavaScript) 48 | Custom metrics are developed in JavaScript to create case-level metrics from the events data in each case. 49 | Examples of custom metrics: [Custom Metrics](./Custom%20Metrics/) 50 | 51 | ## Custom Filters (JavaScript) 52 | Custom filters are developed in JavaScript to create sophisticated filters from the event data in each case. 53 | Examples of custom filters: [Custom Filters](./Custom%20Filters/) 54 | 55 | ## Custom Widgets (JavaScript) 56 | Custom widgets are developed in JavaScript to add new widgets in dashboards. 57 | Examples of custom widgets: [Custom Widgets](./Custom%20Widgets/) 58 | 59 | Read this [tutorial](./Custom%20Widgets/dimension_linechart/README.md) to learn how to create advanced custom widgets with charts, that could be useful in any project. 60 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/COS.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Uploading data to IBM Cloud Object Storage\n", 8 | "\n", 9 | "This example shows how you can upload any kind of data to IBM Cloud Object Storage (S3).\n", 10 | "\n", 11 | "The configuration file COSConfig.json includes the json dict credential that you can retrieve in S3\n", 12 | "\n", 13 | "Example of credentials in COSConfig.json: \n" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "metadata": {}, 20 | "outputs": [], 21 | "source": [ 22 | "\n", 23 | "{\n", 24 | " \"apikey\": \"w_e7iClXZv1L4EZN4OfpGPUsQhKdnaaaaaaaaaaaaa\",\n", 25 | " \"endpoints\": \"https://control.cloud-object-storage.cloud.ibm.com/v2/endpoints\",\n", 26 | " \"iam_apikey_description\": \"Auto-generated for key crn:v1:bluemix:public:cloud-object-storage:global:a/36c46e8f45ad4d8395e2955c8b5a4cf4:cd24c66b-2121-437a-8942-aaaaaaaaa:resource-key:8e081733-cf6c-49ba-975f-aaaaaaaaaa\",\n", 27 | " \"iam_apikey_id\": \"ApiKey-4345baee-6f71-4ad8-a308-aaaaaaaaa\",\n", 28 | " \"iam_apikey_name\": \"cloud-object-storage-cos-standard-aaaaaaaaaaa\",\n", 29 | " \"iam_role_crn\": \"crn:v1:bluemix:public:iam::::serviceRole:Writer\",\n", 30 | " \"iam_serviceid_crn\": \"crn:v1:bluemix:public:iam-identity::a/36c46e8f45ad4d8395e2955c8b5a4cf4::serviceid:ServiceId-c90ea459-1570-4883-831c-aaaaaaaaaaa\",\n", 31 | " \"resource_instance_id\": \"crn:v1:bluemix:public:cloud-object-storage:global:a/36c46e8f45ad4d8395e2955c8b5a4cf4:cd24c66b-2121-437a-8942-aaaaaaaaaa::\"\n", 32 | "}" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 2, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "from ibm_botocore.client import Config\n", 42 | "import ibm_boto3\n", 43 | "import json\n", 44 | "\n", 45 | "\n", 46 | "COSConfigFilename = './COSConfig.json'\n", 47 | "with open(COSConfigFilename, 'r') as file:\n", 48 | " serviceCredential = json.load(file) \n", 49 | "\n", 50 | "\n", 51 | "s3 = ibm_boto3.client(\"s3\", ibm_api_key_id=serviceCredential['apikey'],\n", 52 | " ibm_service_instance_id=serviceCredential['resource_instance_id'],\n", 53 | " config=Config(signature_version=\"oauth\"),\n", 54 | " endpoint_url='https://s3.eu-gb.cloud-object-storage.appdomain.cloud'\n", 55 | ")\n", 56 | "\n", 57 | "s3.list_buckets()\n", 58 | "\n", 59 | "s3.upload_file(Filename='data/Client Onboarding.idp',Bucket=serviceCredential['iam_apikey_name'],Key='Client Onboarding.idp')\n", 60 | "s3.upload_file(Filename='data/Client Onboarding.zip',Bucket=serviceCredential['iam_apikey_name'],Key='Client Onboarding.zip')" 61 | ] 62 | } 63 | ], 64 | "metadata": { 65 | "kernelspec": { 66 | "display_name": "Python 3", 67 | "language": "python", 68 | "name": "python3" 69 | }, 70 | "language_info": { 71 | "codemirror_mode": { 72 | "name": "ipython", 73 | "version": 3 74 | }, 75 | "file_extension": ".py", 76 | "mimetype": "text/x-python", 77 | "name": "python", 78 | "nbconvert_exporter": "python", 79 | "pygments_lexer": "ipython3", 80 | "version": "3.10.5" 81 | } 82 | }, 83 | "nbformat": 4, 84 | "nbformat_minor": 2 85 | } 86 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/COSConfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "apikey": "w_e7iClXZv1L4EZN4OfpGPUsQhKdnR3jgVFhD6uVTcNG", 3 | "endpoints": "https://control.cloud-object-storage.cloud.ibm.com/v2/endpoints", 4 | "iam_apikey_description": "Auto-generated for key crn:v1:bluemix:public:cloud-object-storage:global:a/36c46e8f45ad4d8395e2955c8b5a4cf4:cd24c66b-2121-437a-8942-ad6c8f27ca4d:resource-key:8e081733-cf6c-49ba-975f-48fbf6030c2a", 5 | "iam_apikey_id": "ApiKey-4345baee-6f71-4ad8-a308-b822ea0a8566", 6 | "iam_apikey_name": "cloud-object-storage-cos-standard-patrickmegard", 7 | "iam_role_crn": "crn:v1:bluemix:public:iam::::serviceRole:Writer", 8 | "iam_serviceid_crn": "crn:v1:bluemix:public:iam-identity::a/36c46e8f45ad4d8395e2955c8b5a4cf4::serviceid:ServiceId-c90ea459-1570-4883-831c-f05f9875701b", 9 | "resource_instance_id": "crn:v1:bluemix:public:cloud-object-storage:global:a/36c46e8f45ad4d8395e2955c8b5a4cf4:cd24c66b-2121-437a-8942-ad6c8f27ca4d::" 10 | } -------------------------------------------------------------------------------- /REST APIs/IPMClient/CognosAnalytics.json: -------------------------------------------------------------------------------- 1 | { 2 | "url": "http://pm-patrick-process-miner.fyre.ibm.com:9300/api/v1/", 3 | "CAMAPILoginKey": "AWlDNzY5MTJCQzg1MDU0NTQ5OTlFM0Y4ODQ5MzA5NTY4RXiiziKhbFA4nCbGAI0I+T8zFu1R", 4 | "CAMNamespace": "", 5 | "CAMUsername": "", 6 | "CAMPassword":"" 7 | } -------------------------------------------------------------------------------- /REST APIs/IPMClient/CognosAnalyticsExample.json: -------------------------------------------------------------------------------- 1 | { 2 | "url": "http://MY_COGNOS_URL:9300/api/v1/", 3 | "CAMAPILoginKey": "AWlAZERTYUIOMDU0NTQ5OTlFM0Y4ODQ5MzA5NTY4RXiiziKhbFA4nCbGAI0I+T8zFu1R", 4 | "CAMNamespace": "", 5 | "CAMUsername": "", 6 | "CAMPassword":"" 7 | } -------------------------------------------------------------------------------- /REST APIs/IPMClient/IPMAccount.py: -------------------------------------------------------------------------------- 1 | import IPMBase as ipmb 2 | import IPMTenant as ipmt 3 | import json 4 | 5 | # Requires Admin permission; 'Owners' only can run this class 6 | class Account(ipmb.Base): 7 | def __init__(self, tenant, jsondata): 8 | ipmb.Base.__init__(self) 9 | self.tenant = tenant 10 | self.username = jsondata['username'] 11 | self.email = jsondata['username'] 12 | self.lastname = jsondata['lastName'] 13 | self.firstname = jsondata['firstName'] 14 | self.key = jsondata['accountId'] 15 | self.jsondata = jsondata 16 | dataExample = { 17 | "firstName": "John", 18 | "lastName": "Sam", 19 | "country": "IN", 20 | "email": "john.sam@ibm.com", 21 | "agentEnabled": True, 22 | "technicalUser": True, 23 | "active": True, 24 | "password": "string", 25 | "username": "john.sam" 26 | } 27 | 28 | def patch(self, jsondata): 29 | exampleData = { 30 | "firstName": "John", 31 | "lastName": "Sam", 32 | "country": "IN", 33 | "email": "admin@ibm.com", 34 | "agentEnabled": True, 35 | "technicalUser": True, 36 | "active": True, 37 | "password": "string" 38 | } 39 | self.username = jsondata['username'] 40 | self.email = jsondata['username'] 41 | self.lastname = jsondata['lastName'] 42 | self.firstname = jsondata['firstname'] 43 | if self.sendPatchRequest( 44 | url=f'{self.getURL()}/user-management/integration/tenants/{self.tenant.key}/accounts', 45 | verify=self.verify, 46 | headers=self.getHeaders(), 47 | params={}, 48 | data=json.dumps(self.data), 49 | functionName='patch account'): 50 | 51 | self.status = True 52 | self.jsondata = self.getResponseData() 53 | return self 54 | 55 | 56 | def getHeaders(self): return {"content-type": "application/json", "Authorization": "Bearer %s" % self.tenant.client.token } 57 | def getURL(self): return self.tenant.client.url -------------------------------------------------------------------------------- /REST APIs/IPMClient/IPMConfigExample.json: -------------------------------------------------------------------------------- 1 | { 2 | "url":"https://MYPROCESSMINING.com", 3 | "userid":"firstname.lastname", 4 | "apikey":"iuiyugk87eqvd1180" 5 | } -------------------------------------------------------------------------------- /REST APIs/IPMClient/IPMDashboard.py: -------------------------------------------------------------------------------- 1 | import IPMBase as ipmb 2 | import IPMWidgets as ipmw 3 | 4 | class Dashboard(ipmb.Base): 5 | def __init__(self, project, jsondata): 6 | ipmb.Base.__init__(self) 7 | self.project = project 8 | self.name = jsondata['name'] 9 | self.id = jsondata['id'] 10 | self.jsondata = jsondata 11 | self.widgets = None 12 | 13 | def getHeaders(self): 14 | return self.project.getHeaders() 15 | 16 | def getURL(self): 17 | return self.project.client.url 18 | 19 | def retrieveWidgets(self): 20 | params = {'org' : self.project.orgkey} 21 | headers = self.getHeaders() 22 | url = "%s/analytics/integration/dashboard/%s/%s/list" % (self.getURL(), self.project.key, self.id) 23 | if self.sendGetRequest( 24 | url=url, 25 | verify=self.verify, 26 | params=params, 27 | headers=headers, 28 | functionName='retrieve widgets' 29 | ): 30 | self.widgets = [] 31 | for jsonWidget in self.getResponseData()['widgets']: 32 | widget = ipmw.Widget(self, jsonWidget) 33 | self.widgets.append(widget) 34 | return self.widgets 35 | 36 | def getWidgets(self): 37 | if self.widgets: 38 | return self.widgets 39 | else: 40 | return self.retrieveWidgets() 41 | 42 | def getWidgetByName(self, name): 43 | widgets = self.getWidgets() 44 | for widget in widgets: 45 | if widget.name == name: 46 | return widget 47 | self._setResponseKO() 48 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/IPMTenant.py: -------------------------------------------------------------------------------- 1 | import IPMBase as ipmb 2 | 3 | class Tenant(ipmb.Base): 4 | def __init__(self, client, jsondata): 5 | ipmb.Base.__init__(self) 6 | self.client = client 7 | self.name = jsondata['name'] 8 | self.key = jsondata['tenantId'] 9 | self.accounts = None 10 | 11 | def getHeaders(self): return {"content-type": "application/json", "Authorization": "Bearer %s" % self.client.token } 12 | def getURL(self): return self.client.url 13 | 14 | # ACCOUNT MANAGEMENT 15 | 16 | def getAccounts(self)->list: 17 | if self.accounts: return self.accounts 18 | return self.client.retrieveAccounts(self) 19 | 20 | def getAccountByUserName(self, username): 21 | accounts = self.getAccounts() 22 | for account in accounts: 23 | if account.username == username: 24 | return account 25 | self._setResponseKO() 26 | 27 | def _removeAccount(self, account): 28 | accounts = self.getAccounts() 29 | for index, item in enumerate(accounts): 30 | if item == account: 31 | self.accounts.pop(index) 32 | return account -------------------------------------------------------------------------------- /REST APIs/IPMClient/IPMWidgets.py: -------------------------------------------------------------------------------- 1 | import IPMBase as ipmb 2 | import pandas as pd 3 | from datetime import datetime 4 | 5 | class Widget(ipmb.Base): 6 | def __init__(self, dashboard, jsonWidget): 7 | ipmb.Base.__init__(self) 8 | self.dashboard = dashboard 9 | self.name = jsonWidget['title'] 10 | self.jsondata = jsonWidget 11 | self.values = None 12 | self.jsondata = None 13 | self.dataframe = None 14 | 15 | def getHeaders(self): 16 | return self.dashboard.getHeaders() 17 | 18 | def getURL(self): 19 | return self.dashboard.project.client.url 20 | 21 | def getValues(self): 22 | if self.values: return self.values 23 | else: return self.retrieveValues() 24 | 25 | def retrieveValues(self): 26 | params = {'org' : self.dashboard.project.orgkey} 27 | headers = self.getHeaders() 28 | url = "%s/analytics/integration/dashboard/%s/%s/%s/retrieve" % (self.getURL(), self.dashboard.project.key, self.dashboard.id, self.name) 29 | if self.sendGetRequest(url=url, verify=self.verify, params=params, headers=headers, functionName='retrieve widget values'): 30 | self.values = self.getResponseData() 31 | return self.values 32 | 33 | def toDataFrame(self): 34 | if self.values: 35 | self.dataframe = pd.DataFrame(self.values) 36 | return self.dataframe 37 | 38 | def toCSV(self, filename, replace=True): 39 | if replace: 40 | filename = filename+'.csv' 41 | else: 42 | filename = filename+'_'+datetime.now().strftime("%d_%m_%Y_%H_%M_%S")+'.csv' 43 | self.dataframe.to_csv(filename, index=None) 44 | 45 | def getURL(self): 46 | return self.dashboard.project.client.url -------------------------------------------------------------------------------- /REST APIs/IPMClient/Nextgen.py: -------------------------------------------------------------------------------- 1 | import IPMClient as ipm 2 | import json 3 | import sys 4 | import os 5 | 6 | def main(argv): 7 | 8 | getConfigFrom = 'FILE' 9 | configFileName = './IPMConfig_nextgen.json' 10 | 11 | # update the clienturation with your environment 12 | # retrieve from OS variables 13 | if getConfigFrom == 'OS': 14 | url :str= os.getenv('PM_API_URL') 15 | userid :str = os.getenv('PM_API_USER') 16 | apikey :str =os.getenv('PM_API_KEY') 17 | elif getConfigFrom == 'VARS': 18 | # or update these variables 19 | url = 'PROCESSMININGURL' 20 | userid = 'PROCESSMINGUSERID' 21 | apikey= 'USER_APIKEY' 22 | elif getConfigFrom == 'FILE': 23 | # or load a file that contains JSON config 24 | with open(configFileName, 'r') as file: 25 | config = json.load(file) 26 | url = config['url'] 27 | userid = config['userid'] 28 | apikey = config['apikey'] 29 | 30 | client = ipm.Client(url, userid, apikey) 31 | client.setTrace(True, 1) 32 | project = client.getProjectByName('BAC') 33 | dashboards = project.getDashboards() 34 | dashboard = project.getDashboardByName('test') 35 | widget = dashboard.getWidgetByName('alerts') 36 | values = widget.retrieveValues() 37 | stats1 = project.retrieveModelStatistics() 38 | 39 | with open('json_result_examples/filters.json', 'r') as file: 40 | jsonfilter = json.load(file) 41 | filters = jsonfilter['filters'] 42 | stat2 = project.retrieveModelStatistics(filters) 43 | #filter = project.createFilterAttribute('activity', 'Authorization Requested', True, 'ANY') 44 | #stats2 = project.retrieveModelStatistics([filter]) 45 | print('done') 46 | 47 | 48 | if __name__ == "__main__": 49 | main(sys.argv) -------------------------------------------------------------------------------- /REST APIs/IPMClient/Raffaello.py: -------------------------------------------------------------------------------- 1 | import IPMClient as ipm 2 | import json 3 | import sys 4 | import os 5 | 6 | def main(argv): 7 | 8 | getConfigFrom = 'FILE' 9 | configFileName = './IPMConfig_Raffaello.json' 10 | 11 | # update the clienturation with your environment 12 | # retrieve from OS variables 13 | if getConfigFrom == 'OS': 14 | url :str= os.getenv('PM_API_URL') 15 | userid :str = os.getenv('PM_API_USER') 16 | apikey :str =os.getenv('PM_API_KEY') 17 | elif getConfigFrom == 'VARS': 18 | # or update these variables 19 | url = 'PROCESSMININGURL' 20 | userid = 'PROCESSMINGUSERID' 21 | apikey= 'USER_APIKEY' 22 | elif getConfigFrom == 'FILE': 23 | # or load a file that contains JSON config 24 | with open(configFileName, 'r') as file: 25 | config = json.load(file) 26 | url = config['url'] 27 | userid = config['userid'] 28 | apikey = config['apikey'] 29 | 30 | client = ipm.Client(url, userid, apikey) 31 | client.setTrace(True, 1) 32 | newAcountJSON = { 33 | "firstName": "prenom", 34 | "lastName": "nom", 35 | "country": "FR", 36 | "email": "prenom.nom@ibm.com", 37 | "agentEnabled": True, 38 | "technicalUser": False, # user can't log into the UI if True. Only for REST API 39 | "active": True, 40 | "password": "Prenom12345!", 41 | "username": "prenom.nom@ibm.com" 42 | } 43 | organization = client.getOrganizationByName('Demo Kit - myInvenio') 44 | tenants = client.getTenants() 45 | client.setCurrentTenant(tenants[0]) 46 | newAccount = client.createAccount(newAcountJSON) 47 | organization.addAccount(newAccount) 48 | 49 | print('done') 50 | 51 | 52 | if __name__ == "__main__": 53 | main(sys.argv) -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/CognosAnalyticsClient.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/CognosAnalyticsClient.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/IPMAccount.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/IPMAccount.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/IPMBase.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/IPMBase.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/IPMClient.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/IPMClient.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/IPMDashboard.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/IPMDashboard.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/IPMOrganization.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/IPMOrganization.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/IPMProject.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/IPMProject.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/IPMTenant.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/IPMTenant.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/__pycache__/IPMWidgets.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/__pycache__/IPMWidgets.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/IPMClient/acticityStatsFromWidget.csv: -------------------------------------------------------------------------------- 1 | attr-activity,# cases,avg duration,median duration 2 | Request created,116329.0,2.6708261112113796E7,60000.0 3 | BO Service Closure,96306.0,1.8981998641957682E8,1.1008E8 4 | Pending Request for Reservation Closure,94035.0,932217.8232425128,128000.0 5 | Pending Liquidation Request,93683.0,1.0170696969599992E8,7.9243E7 6 | Network Service Closure,93501.0,6.701648622577694E7,148000.0 7 | Request completed with account closure,88424.0,188.41038632045266,0.0 8 | Authorization Requested,45677.0,8.501452610147665E7,2852000.0 9 | Evaluating Request (NO registered letter),41123.0,8.521241563115543E7,2042000.0 10 | Request deleted,13958.0,0.0,0.0 11 | Back-Office Adjustment Requested ,10063.0,5.9311920314471826E7,1.7255E7 12 | Network Adjustment Requested,8275.0,2.6684533072499454E8,2.0527E7 13 | Evaluating Request (WITH registered letter),7115.0,5.735765101897395E7,2972000.0 14 | Pending Request for acquittance of heirs,7094.0,3.815564068226679E7,1.3966E7 15 | Pending Request for Network Information,1338.0,4.259866526857977E8,3.63733E8 16 | Request completed with customer recovery,263.0,0.0,0.0 17 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/activityStats.csv: -------------------------------------------------------------------------------- 1 | activityName,frequency,avgDuration,medianDuration,minDuration,maxDuration,caseRepetition,avgRepetition,overallCost 2 | START,0,0,0,0,0,0,0.0,0.0 3 | Request completed with account closure,88424,188,0,0,16660000,0,0.0,44212.0 4 | Back-Office Adjustment Requested ,10303,59311920,17255000,0,1405280000,167,2.437125748502994,82424.0 5 | Pending Request for Reservation Closure,94921,932217,128000,0,1202824000,761,2.164257555847569,47460.5 6 | BO Service Closure,105225,189819986,110080000,0,9628864000,6902,2.2922341350333237,420900.0 7 | Authorization Requested,46415,85014526,2852000,0,12561547000,688,2.072674418604651,130426.14999995955 8 | Network Adjustment Requested,10607,266845330,20527000,0,13315129000,1771,2.3167701863354035,5303.5 9 | Pending Liquidation Request,93684,101706969,79243000,0,2783321000,1,2.0,46842.0 10 | Pending Request for acquittance of heirs,7094,38155640,13966000,0,531118000,0,0.0,35470.0 11 | Request created,116337,26708261,60000,0,14601676000,8,2.0,174505.5 12 | Evaluating Request (NO registered letter),41123,85212415,2042000,0,11227048000,0,0.0,596283.5 13 | Network Service Closure,93508,67016486,148000,0,14041996000,7,2.0,187016.0 14 | Request completed with customer recovery,263,0,0,0,0,0,0.0,131.5 15 | Request deleted,13958,0,0,0,0,0,0.0,6979.0 16 | Evaluating Request (WITH registered letter),7115,57357651,2972000,0,8362130000,0,0.0,69798.15000000261 17 | Pending Request for Network Information,1359,425986652,363733000,0,4532178000,21,2.0,679.5 18 | STOP,0,0,0,0,0,0,0.0,0.0 19 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/activityStatsFromWidget.csv: -------------------------------------------------------------------------------- 1 | attr-activity,# cases,avg duration,median duration 2 | Request completed with account closure,88409.0,0.0,0.0 3 | Pending Request for Reservation Closure,88308.0,725782.8107634813,128000.0 4 | Request created,88236.0,1.5602418555369312E7,61000.0 5 | Pending Liquidation Request,88199.0,8.842668567670773E7,7.9951E7 6 | BO Service Closure,87024.0,2.0089272252138776E8,1.21773E8 7 | Network Service Closure,82324.0,6.249403176241936E7,155000.0 8 | Authorization Requested,41319.0,7.826502158890495E7,2939000.0 9 | Evaluating Request (NO registered letter),35570.0,7.715263472026953E7,2089000.0 10 | Back-Office Adjustment Requested ,8299.0,6.86588543838861E7,3.1034E7 11 | Pending Request for acquittance of heirs,7078.0,3.824189248375238E7,1.40445E7 12 | Network Adjustment Requested,6938.0,2.3435662860671338E8,2.3237E7 13 | Evaluating Request (WITH registered letter),6278.0,5.7749423861102246E7,3039500.0 14 | Pending Request for Network Information,1121.0,3.9516226403508866E8,3.974985E8 15 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/blocked_at_pending_liquidation_request.csv: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/data/Client Onboarding.idp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/data/Client Onboarding.idp -------------------------------------------------------------------------------- /REST APIs/IPMClient/data/Client Onboarding.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/data/Client Onboarding.zip -------------------------------------------------------------------------------- /REST APIs/IPMClient/example.py: -------------------------------------------------------------------------------- 1 | import IPMClient as ipm 2 | import json 3 | import sys 4 | import os 5 | 6 | def main(argv): 7 | 8 | getConfigFrom = 'FILE' 9 | configFileName = 'IPMClient/IPMConfig.json' 10 | 11 | # update the clienturation with your environment 12 | # retrieve from OS variables 13 | if getConfigFrom == 'OS': 14 | url :str= os.getenv('PM_API_URL') 15 | userid :str = os.getenv('PM_API_USER') 16 | apikey :str =os.getenv('PM_API_KEY') 17 | elif getConfigFrom == 'VARS': 18 | # or update these variables 19 | url = 'PROCESSMININGURL' 20 | userid = 'PROCESSMINGUSERID' 21 | apikey= 'USER_APIKEY' 22 | elif getConfigFrom == 'FILE': 23 | # or load a file that contains JSON config 24 | with open(configFileName, 'r') as file: 25 | config = json.load(file) 26 | url = config['url'] 27 | userid = config['userid'] 28 | apikey = config['apikey'] 29 | 30 | client = ipm.Client(url, userid, apikey) 31 | client.setTrace(True, 0) 32 | 33 | testOrg2 = client.createOrganization('anOrg2', 'this is anOrg2') 34 | if client.isResponseKO(): return 35 | 36 | testOrg3 = client.getOrganizationByName('anOrg3') 37 | if testOrg3: 38 | client.deleteOrganization(testOrg3) 39 | 40 | testOrg3 = client.createOrganization('anOrg3', 'this is anOrg3') 41 | COProject3 = client.createProject('CO3',testOrg3.key) 42 | COProject3.uploadCSVApplyBackupRunMining('IPMClient/data/Client Onboarding.zip', 'IPMClient/data/Client Onboarding.idp') 43 | 44 | newaccountdata = { 45 | "firstName": "John", 46 | "lastName": "Sam", 47 | "country": "FR", 48 | "email": "john.sam@ibm.com", 49 | "agentEnabled": True, 50 | "technicalUser": False, 51 | "active": True, 52 | "password": "John12345!", 53 | "username": "john.sam" 54 | } 55 | account = client.createAccount(newaccountdata) 56 | testOrg2.addAccount(account) 57 | testOrg3.addAccount(account) 58 | #testOrg2.removeAccount(account) 59 | client.deleteAccount(account) 60 | 61 | 62 | print('done') 63 | 64 | 65 | if __name__ == "__main__": 66 | main(sys.argv) -------------------------------------------------------------------------------- /REST APIs/IPMClient/images/business_performance.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/IPMClient/images/business_performance.jpg -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/deviations.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "deviationType": "INSERTED_ACTIVITY", 4 | "activityName": "Network Adjustment Requested", 5 | "caseCount": 235, 6 | "caseRatio": 0.04583577140628048, 7 | "steps": 9.4, 8 | "throughput": 2965027412.765956, 9 | "cost": 17.34370212765958 10 | }, 11 | { 12 | "deviationType": "INSERTED_ACTIVITY", 13 | "activityName": "Back-Office Adjustment Requested ", 14 | "caseCount": 586, 15 | "caseRatio": 0.11429685976204408, 16 | "steps": 8.100682593856662, 17 | "throughput": 1384646122.866893, 18 | "cost": 19.99723549488053 19 | }, 20 | { 21 | "deviationType": "INSERTED_START_ACTIVITY", 22 | "activityName": "Network Service Closure", 23 | "caseCount": 2, 24 | "caseRatio": 0.0003900916715428126, 25 | "steps": 6.0, 26 | "throughput": 1077619000.0, 27 | "cost": 10.31 28 | }, 29 | { 30 | "deviationType": "INSERTED_RELATION", 31 | "startActivity": "Pending Request for Reservation Closure", 32 | "endActivity": "Request completed with account closure", 33 | "caseCount": 5, 34 | "caseRatio": 0.0009752291788570314, 35 | "steps": 6.4, 36 | "throughput": 587166600.0, 37 | "cost": 11.51 38 | }, 39 | { 40 | "deviationType": "INSERTED_RELATION", 41 | "startActivity": "Pending Request for Reservation Closure", 42 | "endActivity": "Pending Request for Reservation Closure", 43 | "caseCount": 1, 44 | "caseRatio": 0.0001950458357714063, 45 | "steps": 8.0, 46 | "throughput": 922615000.0, 47 | "cost": 12.31 48 | }, 49 | { 50 | "deviationType": "INSERTED_RELATION", 51 | "startActivity": "Authorization Requested", 52 | "endActivity": "Pending Request for Reservation Closure", 53 | "caseCount": 21, 54 | "caseRatio": 0.004095962551199532, 55 | "steps": 6.142857142857143, 56 | "throughput": 995399142.8571428, 57 | "cost": 9.238571428571431 58 | }, 59 | { 60 | "deviationType": "INSERTED_RELATION", 61 | "startActivity": "BO Service Closure", 62 | "endActivity": "Pending Liquidation Request", 63 | "caseCount": 4, 64 | "caseRatio": 0.0007801833430856252, 65 | "steps": 6.0, 66 | "throughput": 669821750.0, 67 | "cost": 11.31 68 | }, 69 | { 70 | "deviationType": "INSERTED_RELATION", 71 | "startActivity": "Request created", 72 | "endActivity": "Authorization Requested", 73 | "caseCount": 11, 74 | "caseRatio": 0.0021455041934854693, 75 | "steps": 6.1818181818181825, 76 | "throughput": 896791000.0, 77 | "cost": 10.582727272727272 78 | } 79 | ] -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/filers2.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "filterType": "attribute", 4 | "label": "Activity is \"Goods Received\"", 5 | "attributeValue": "Goods Received", 6 | "attributeKey": "process", 7 | "matchCases": true, 8 | "timeSpanFrom": "2017-01-13", 9 | "matchingRule": "ANY", 10 | "timeSpanTo": "2022-01-13", 11 | "operator": "LT", 12 | "timeUnit": "DAYS", 13 | "timeValue": 1, 14 | "inclusiveTimeSpan": true 15 | }, 16 | { 17 | "filterType": "kpi", 18 | "label": "Retain cases within KPI boundaries (Activity wait time,Activity service time)", 19 | "kpis": [ 20 | "service-range", 21 | "waittime-range" 22 | ], 23 | "retainCritical": false 24 | }, 25 | { 26 | "filterType": "pattern", 27 | "label": ".* path time under 4 days", 28 | "attributeKey": "activity", 29 | "pattern": ".*", 30 | "matchCases": true, 31 | "matchingRule": "ANY", 32 | "operator": "LT", 33 | "timeValue": 4, 34 | "timeUnit": "DAYS" 35 | }, 36 | { 37 | "filterType": "outliers", 38 | "label": "Include Outliers", 39 | "matchCases": true, 40 | "includeServiceTime": true, 41 | "includeWaitTime": true, 42 | "includeLeadTime": true, 43 | "range": "both", 44 | "activity": null, 45 | "outlierLevel": 2.5 46 | }, 47 | { 48 | "filterType": "parallelism", 49 | "label": "Include parallelism between Invoice Check and Wait", 50 | "sourceActivity": "Invoice Check", 51 | "targetActivity": "Wait", 52 | "matchCases": true 53 | }, 54 | { 55 | "filterType": "relation", 56 | "label": "Invoice Check directly follows Wait", 57 | "sourceActivity": "Invoice Check", 58 | "targetActivity": "Wait", 59 | "matchCases": true 60 | }, 61 | { 62 | "filterType": "rework", 63 | "label": "Rework of Invoice Check", 64 | "sourceActivity": "Invoice Check", 65 | "matchCases": true 66 | }, 67 | { 68 | "filterType": "throughput", 69 | "label": "Case lead time under 86400000 MILLISECONDS", 70 | "operator": "LTE", 71 | "timeUnit": "MILLISECONDS", 72 | "timeValue": 86400000, 73 | "matchCases": true 74 | }, 75 | { 76 | "filterType": "timespan", 77 | "label": " from Sep 14, 2016 until Sep 15, 2016", 78 | "startTime": "2016-09-14T00:00:00Z", 79 | "endTime": "2016-09-14T23:59:59Z", 80 | "inclusiveTimeSpan": false, 81 | "matchCases": true 82 | }, 83 | { 84 | "filterType": "variant", 85 | "variant": "1578805807", 86 | "label": "NewVariant", 87 | "matchCases": true 88 | }, 89 | { 90 | "filterType": "running", 91 | "label": "Exclude running cases", 92 | "excludeCases": "running" 93 | }, 94 | { 95 | "filterType": "simulation", 96 | "label": "Exclude simulated cases", 97 | "excludeCases": "simulated" 98 | }, 99 | { 100 | "filterType": "conformant", 101 | "label": "Exclude non conformant cases", 102 | "excludeCases": "nonConformance" 103 | }, 104 | { 105 | "filterType": "customMetric", 106 | "label": "metricName = Pending Liquidation Request", 107 | "customMetricName": "metricName", 108 | "operator": "E", 109 | "customMetricValue": "Pending Liquidation Request", 110 | "matchCases": true 111 | } 112 | ] -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/filter-templates.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "templateId": "65965cee9b06410ce8355e7a", 4 | "name": "No Sim", 5 | "filters": [ 6 | { 7 | "filterType": "running", 8 | "label": "Exclude running cases", 9 | "excludeCases": "running" 10 | }, 11 | { 12 | "filterType": "simulation", 13 | "label": "Exclude simulated cases", 14 | "excludeCases": "simulated" 15 | } 16 | ], 17 | "owner": "task.miner", 18 | "shared": true 19 | }, 20 | { 21 | "templateId": "65965cee9b06410ce8355e7b", 22 | "name": "Only Sim", 23 | "filters": [ 24 | { 25 | "filterType": "running", 26 | "label": "Exclude running cases", 27 | "excludeCases": "running" 28 | }, 29 | { 30 | "filterType": "simulation", 31 | "label": "Exclude as-is cases", 32 | "excludeCases": "asIs" 33 | } 34 | ], 35 | "owner": "task.miner", 36 | "shared": true 37 | }, 38 | { 39 | "templateId": "65965cee9b06410ce8355e7c", 40 | "name": "Branch 6", 41 | "filters": [ 42 | { 43 | "filterType": "running", 44 | "label": "Exclude running cases", 45 | "excludeCases": "running" 46 | }, 47 | { 48 | "filterType": "attribute", 49 | "label": "INSTITUTE_NR is \"6\"", 50 | "attributeKey": "INSTITUTE_NR", 51 | "attributeValue": "6", 52 | "matchCases": true, 53 | "matchingRule": "ANY", 54 | "inclusiveTimeSpan": false 55 | } 56 | ], 57 | "owner": "task.miner", 58 | "shared": true 59 | }, 60 | { 61 | "templateId": "65965cee9b06410ce8355e7d", 62 | "name": "As-Is", 63 | "filters": [ 64 | { 65 | "filterType": "simulation", 66 | "label": "Exclude simulated cases", 67 | "excludeCases": "simulated" 68 | } 69 | ], 70 | "owner": "task.miner", 71 | "shared": false 72 | }, 73 | { 74 | "templateId": "65965cee9b06410ce8355e7e", 75 | "name": "As Is", 76 | "filters": [ 77 | { 78 | "filterType": "simulation", 79 | "label": "Exclude simulated cases", 80 | "excludeCases": "simulated" 81 | } 82 | ], 83 | "owner": "task.miner", 84 | "shared": false 85 | }, 86 | { 87 | "templateId": "65df194f83b8cc46751ccb94", 88 | "name": "To-Be", 89 | "filters": [ 90 | { 91 | "filterType": "simulation", 92 | "label": "Exclude as-is cases", 93 | "excludeCases": "asIs" 94 | } 95 | ], 96 | "owner": "task.miner", 97 | "shared": false 98 | }, 99 | { 100 | "templateId": "65df1acba619fe3260f1f042", 101 | "name": "clientlost", 102 | "filters": [ 103 | { 104 | "filterType": "attribute", 105 | "label": "CLOSURE_REASON is \"1 - Client lost\"", 106 | "attributeKey": "CLOSURE_REASON", 107 | "attributeValue": "1 - Client lost", 108 | "matchCases": true, 109 | "matchingRule": "ANY", 110 | "inclusiveTimeSpan": false 111 | } 112 | ], 113 | "owner": "task.miner", 114 | "shared": false 115 | }, 116 | { 117 | "templateId": "65df1afca619fe3260f1f196", 118 | "name": "relationschanged", 119 | "filters": [ 120 | { 121 | "filterType": "attribute", 122 | "label": "CLOSURE_REASON is \"6 - Relationship changed\"", 123 | "attributeKey": "CLOSURE_REASON", 124 | "attributeValue": "6 - Relationship changed", 125 | "matchCases": true, 126 | "matchingRule": "ANY", 127 | "inclusiveTimeSpan": false 128 | } 129 | ], 130 | "owner": "task.miner", 131 | "shared": false 132 | }, 133 | { 134 | "templateId": "65df2646a619fe3260f202d6", 135 | "name": "top5", 136 | "filters": [ 137 | { 138 | "filterType": "variant", 139 | "label": "top5", 140 | "variant": "1311839531|-442488819|-1361806382|358704624|-587405032", 141 | "matchCases": true 142 | } 143 | ], 144 | "owner": "task.miner", 145 | "shared": false 146 | } 147 | ] -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/filters-OR.json: -------------------------------------------------------------------------------- 1 | { 2 | "filters": [ 3 | { 4 | "filterType": "attribute", 5 | "label": "CLOSURE_REASON is \"1 - Client lost\", \"2 - Keep bank account. Same dip\"", 6 | "attributeKey": "CLOSURE_REASON", 7 | "attributeValue": "1 - Client lost|2 - Keep bank account. Same dip", 8 | "matchCases": true, 9 | "matchingRule": "ANY", 10 | "inclusiveTimeSpan": false 11 | } 12 | ] 13 | } -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/filters.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "filterType": "running", 4 | "label": "Exclude running cases", 5 | "excludeCases": "running" 6 | }, 7 | { 8 | "filterType": "attribute", 9 | "label": "CLOSURE_REASON is \"1 - Client lost\"", 10 | "attributeKey": "CLOSURE_REASON", 11 | "attributeValue": "1 - Client lost", 12 | "matchCases": true, 13 | "matchingRule": "ANY", 14 | "inclusiveTimeSpan": false 15 | }, 16 | { 17 | "filterType": "attribute", 18 | "label": "Activity is \"Authorization Requested\"", 19 | "attributeKey": "activity", 20 | "attributeValue": "Authorization Requested", 21 | "matchCases": true, 22 | "matchingRule": "ANY", 23 | "inclusiveTimeSpan": false 24 | }, 25 | { 26 | "filterType": "customMetric", 27 | "label": "countActivities > 5", 28 | "matchCases": true, 29 | "customMetricName": "COUNTACTIVITIES", 30 | "customMetricDataType": "DOUBLE", 31 | "operator": "GT", 32 | "customMetricValue": "5" 33 | } 34 | ] -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/kpi-settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "process": { 3 | "kpis": [ 4 | { 5 | "name": "case-throughput", 6 | "warningThreshold": 1.0, 7 | "criticalThreshold": 19.0, 8 | "warningThresholdUnit": "DAYS", 9 | "criticalThresholdUnit": "DAYS", 10 | "unitType": "timestamp" 11 | }, 12 | { 13 | "name": "case-cost", 14 | "warningThreshold": 16.0, 15 | "criticalThreshold": 17.0, 16 | "warningThresholdUnit": "MONEY", 17 | "criticalThresholdUnit": "MONEY", 18 | "unitType": "double" 19 | } 20 | ] 21 | }, 22 | "activities": [ 23 | { 24 | "activityName": "default", 25 | "kpis": [ 26 | { 27 | "name": "activity-throughput", 28 | "warningThreshold": 1.0, 29 | "criticalThreshold": 2.0, 30 | "warningThresholdUnit": "DAYS", 31 | "criticalThresholdUnit": "DAYS", 32 | "unitType": "timestamp" 33 | }, 34 | { 35 | "name": "activity-duration", 36 | "warningThreshold": 1.0, 37 | "criticalThreshold": 2.0, 38 | "warningThresholdUnit": "DAYS", 39 | "criticalThresholdUnit": "DAYS", 40 | "unitType": "timestamp" 41 | }, 42 | { 43 | "name": "activity-wait-queue", 44 | "warningThreshold": 1.0, 45 | "criticalThreshold": 2.0, 46 | "warningThresholdUnit": "DAYS", 47 | "criticalThresholdUnit": "DAYS", 48 | "unitType": "timestamp" 49 | }, 50 | { 51 | "name": "resource-allocation", 52 | "warningThreshold": 0.33, 53 | "criticalThreshold": 0.66, 54 | "warningThresholdUnit": "percent", 55 | "criticalThresholdUnit": "percent", 56 | "unitType": "double" 57 | } 58 | ] 59 | }, 60 | { 61 | "activityName": "BO Service Closure", 62 | "kpis": [ 63 | { 64 | "name": "activity-throughput", 65 | "warningThreshold": 24.0, 66 | "criticalThreshold": 30.0, 67 | "warningThresholdUnit": "HOURS", 68 | "criticalThresholdUnit": "HOURS", 69 | "unitType": "timestamp" 70 | }, 71 | { 72 | "name": "activity-duration", 73 | "warningThreshold": 20.0, 74 | "criticalThreshold": 30.0, 75 | "warningThresholdUnit": "HOURS", 76 | "criticalThresholdUnit": "HOURS", 77 | "unitType": "timestamp" 78 | }, 79 | { 80 | "name": "activity-wait-queue", 81 | "warningThreshold": 1.0, 82 | "criticalThreshold": 2.0, 83 | "warningThresholdUnit": "DAYS", 84 | "criticalThresholdUnit": "DAYS", 85 | "unitType": "timestamp" 86 | }, 87 | { 88 | "name": "resource-allocation", 89 | "warningThreshold": 0.33, 90 | "criticalThreshold": 0.66, 91 | "warningThresholdUnit": "percent", 92 | "criticalThresholdUnit": "percent", 93 | "unitType": "double" 94 | } 95 | ] 96 | } 97 | ] 98 | } -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/project-settings activities-cost.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "activity": "Pending Request for acquittance of heirs", 4 | "cost": 2.0, 5 | "type": "MANUAL", 6 | "endDate": null 7 | }, 8 | { 9 | "activity": "BO Service Closure", 10 | "cost": 1.0, 11 | "type": "MANUAL", 12 | "endDate": null 13 | }, 14 | { 15 | "activity": "BO Service Closure", 16 | "cost": 0.2, 17 | "type": "AUTOMATIC", 18 | "endDate": null 19 | }, 20 | { 21 | "activity": "Back-Office Adjustment Requested ", 22 | "cost": 2.0, 23 | "type": "MANUAL", 24 | "endDate": null 25 | }, 26 | { 27 | "activity": "Network Service Closure", 28 | "cost": 0.1, 29 | "type": "AUTOMATIC", 30 | "endDate": null 31 | }, 32 | { 33 | "activity": "__DEFAULT__", 34 | "cost": 0.5, 35 | "type": "BOTH", 36 | "endDate": null 37 | } 38 | ] -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/project-settings activities-working-time.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "activity": "Authorization Requested", 4 | "type": "BOTH", 5 | "value": 0.33, 6 | "endDate": null 7 | }, 8 | { 9 | "activity": "BO Service Closure", 10 | "type": "BOTH", 11 | "value": 1.0, 12 | "endDate": null 13 | }, 14 | { 15 | "activity": "BO Service Closure", 16 | "type": "AUTOMATIC", 17 | "value": 0.016666666666667, 18 | "endDate": null 19 | }, 20 | { 21 | "activity": "Evaluating Request (WITH registered letter)", 22 | "type": "BOTH", 23 | "value": 1.33, 24 | "endDate": null 25 | }, 26 | { 27 | "activity": "Pending Request for acquittance of heirs", 28 | "type": "BOTH", 29 | "value": 1.0, 30 | "endDate": null 31 | }, 32 | { 33 | "activity": "Back-Office Adjustment Requested ", 34 | "type": "BOTH", 35 | "value": 2.0, 36 | "endDate": null 37 | }, 38 | { 39 | "activity": "Evaluating Request (NO registered letter)", 40 | "type": "BOTH", 41 | "value": 2.0, 42 | "endDate": null 43 | }, 44 | { 45 | "activity": "Request created", 46 | "type": "BOTH", 47 | "value": 1.0, 48 | "endDate": null 49 | }, 50 | { 51 | "activity": "Network Service Closure", 52 | "type": "BOTH", 53 | "value": 1.5, 54 | "endDate": null 55 | }, 56 | { 57 | "activity": "__DEFAULT__", 58 | "type": "BOTH", 59 | "value": 0.0, 60 | "endDate": null 61 | } 62 | ] -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "computeDurationInBusinessHours": true, 3 | "excludeWeekends": true, 4 | "businessHours": "8-18", 5 | "selectedCalendars": [], 6 | "automatedFieldTruthValues": "Robot", 7 | "automatedActivityAttribute": "attr-custom-AUTOMATIC", 8 | "customExcludedFields": [ 9 | "attr-custom-AUTOMATIC", 10 | "attr-custom-CE_STATE", 11 | "attr-custom-INSTITUTE_NR" 12 | ], 13 | "timeZoneOffset": "+0000", 14 | "defaultPage": "Model", 15 | "displayReferenceActivities": false, 16 | "keepDefinedRoles": true, 17 | "defaultMetric": "avg", 18 | "projectCurrency": "EUR", 19 | "enableRunningTime": true, 20 | "modelRelationDetails": 1, 21 | "modelActivityDetail": "100", 22 | "blueWorksliveProcess": null, 23 | "defaultBucketLimit": 2048 24 | } -------------------------------------------------------------------------------- /REST APIs/IPMClient/json_result_examples/status.json: -------------------------------------------------------------------------------- 1 | { 2 | "status": "READY", 3 | "updatedWithParent": true, 4 | "lastUpdateWithParentTs": 1709119802017, 5 | "message": "Event log updated on 2024-01-04T07:24:25.854Z" 6 | } -------------------------------------------------------------------------------- /REST APIs/IPMClient/processStats.csv: -------------------------------------------------------------------------------- 1 | minThroughputTime,maxThroughputTime,avgThroughputTime,stdThroughputTime,medianThroughputTime,minArrivalRate,maxArrivalRate,avgArrivalRate,minTime,maxTime,filteredCases,filteredEvents,totalCases,totalEvents 2 | 0,50103159000,1611148418,2659370632,902080000,1,675,222.87954110898667,1492694540000,1551906966000,116566,730336,116566,730336 3 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/setupTechzone.py: -------------------------------------------------------------------------------- 1 | import IPMClient as ipm 2 | import pandas as pd 3 | import json 4 | 5 | ipmConfig = { 6 | "url":"https://pharoses1.fyre.ibm.com", 7 | "userid":"task.miner", 8 | "apikey":"8a5kga87eqvd1180" 9 | } 10 | 11 | 12 | ipmConfig = { 13 | "url":"https://useast.services.cloud.techzone.ibm.com:27464", 14 | "userid":"maintenance.admin", 15 | "apikey":"k0rea0pg4c6ro2nq" 16 | } 17 | 18 | ipmConfig = { 19 | "url":"https://useast.services.cloud.techzone.ibm.com:46936", 20 | "userid":"maintenance.admin", 21 | "apikey":"k0rea0pg4c6ro2nq" 22 | } 23 | 24 | ipmConfig = { 25 | "url":"https://useast.services.cloud.techzone.ibm.com:41330", 26 | "userid":"maintenance.admin", 27 | "apikey":"k0rea0pg4c6ro2nq" 28 | } 29 | 30 | ipmConfig = { 31 | "url":"https://useast.services.cloud.techzone.ibm.com:43997", 32 | "userid":"maintenance.admin", 33 | "apikey":"k0rea0pg4c6ro2nq" 34 | } 35 | 36 | 37 | 38 | ipmClient = ipm.Client(ipmConfig['url'], ipmConfig['userid'], ipmConfig['apikey']) 39 | co = ipmClient.getProjectByName('CO') 40 | if co: 41 | ipmClient.deleteProject(co) 42 | org = ipmClient.getOrganizationByName('Finance') 43 | co = ipmClient.createProject(org, 'CO') 44 | co.uploadCSVApplyBackupRunMining('IPMClient/data/Client Onboarding.zip', 'IPMClient/data/Client Onboarding.idp') 45 | 46 | ## Manage users 47 | groups = ipmClient.retrieveGroups() 48 | owners = ipmClient.getGroupByName(groups, 'Owners') 49 | users = [ 50 | {"firstName": "User1", 51 | "lastName": "Lab", 52 | "country": "FR", 53 | "email": "User1.Lab@ibm.com", 54 | "agentEnabled": True, 55 | "technicalUser": False, 56 | "active": True, 57 | "password": "Passw0rd!", 58 | "username": "user1.lab"}, 59 | {"firstName": "User2", 60 | "lastName": "Lab", 61 | "country": "FR", 62 | "email": "User2.Lab@ibm.com", 63 | "agentEnabled": True, 64 | "technicalUser": False, 65 | "active": True, 66 | "password": "Passw0rd!", 67 | "username": "user2.lab"}, 68 | {"firstName": "User3", 69 | "lastName": "Lab", 70 | "country": "FR", 71 | "email": "User3.Lab@ibm.com", 72 | "agentEnabled": True, 73 | "technicalUser": False, 74 | "active": True, 75 | "password": "Passw0rd!", 76 | "username": "user3.lab"}, 77 | {"firstName": "User4", 78 | "lastName": "Lab", 79 | "country": "FR", 80 | "email": "User4.Lab@ibm.com", 81 | "agentEnabled": True, 82 | "technicalUser": False, 83 | "active": True, 84 | "password": "Passw0rd!", 85 | "username": "user4.lab"}, 86 | {"firstName": "User5", 87 | "lastName": "Lab", 88 | "country": "FR", 89 | "email": "User5.Lab@ibm.com", 90 | "agentEnabled": True, 91 | "technicalUser": False, 92 | "active": True, 93 | "password": "Passw0rd!", 94 | "username": "user5.lab"}, 95 | {"firstName": "User6", 96 | "lastName": "Lab", 97 | "country": "FR", 98 | "email": "User6.Lab@ibm.com", 99 | "agentEnabled": True, 100 | "technicalUser": False, 101 | "active": True, 102 | "password": "Passw0rd!", 103 | "username": "user6.lab"}, 104 | {"firstName": "User7", 105 | "lastName": "Lab", 106 | "country": "FR", 107 | "email": "User7.Lab@ibm.com", 108 | "agentEnabled": True, 109 | "technicalUser": False, 110 | "active": True, 111 | "password": "Passw0rd!", 112 | "username": "user7.lab"} 113 | ] 114 | for user in users: 115 | account = ipmClient.createAccount(user) 116 | ipmClient.addAccountToGroup(account, owners['groupId']) 117 | org.addAccount(account) 118 | 119 | print('DONE: set the dashboards to Shared') -------------------------------------------------------------------------------- /REST APIs/IPMClient/test.py: -------------------------------------------------------------------------------- 1 | import IPMClient as ipm 2 | import json 3 | import sys 4 | import os 5 | 6 | def main(argv): 7 | 8 | getConfigFrom = 'FILE' 9 | configFileName = 'IPMClient/IPMConfig.json' 10 | 11 | # update the clienturation with your environment 12 | # retrieve from OS variables 13 | if getConfigFrom == 'OS': 14 | url :str= os.getenv('PM_API_URL') 15 | userid :str = os.getenv('PM_API_USER') 16 | apikey :str =os.getenv('PM_API_KEY') 17 | elif getConfigFrom == 'VARS': 18 | # or update these variables 19 | url = 'PROCESSMININGURL' 20 | userid = 'PROCESSMINGUSERID' 21 | apikey= 'USER_APIKEY' 22 | elif getConfigFrom == 'FILE': 23 | # or load a file that contains JSON config 24 | with open(configFileName, 'r') as file: 25 | config = json.load(file) 26 | url = config['url'] 27 | userid = config['userid'] 28 | apikey = config['apikey'] 29 | 30 | client = ipm.Client(url, userid, apikey) 31 | project = client.getProjectByKey('bank-account-closure') 32 | svg = project.retrieveSVG() 33 | with open('mySVG.svg', 'w') as file: 34 | file.write(svg) 35 | file.close() 36 | 37 | project._dumpToFile = True 38 | #result = project.retrieveKPISettings() 39 | #result = project.retrieveModelStatistics() 40 | #result = project.retrieveDeviations() 41 | #result = project.retrieveFilters() 42 | #project.dumpJsonToFile(result, 'filters') 43 | 44 | #result = project.retrieveTemplates() 45 | #project.retrieveStatus() 46 | #result = project.retrieveVariants() 47 | #result = project.retrieveSettings() 48 | #r = project.retrieveSettingsActivityCost() 49 | #r = project.retrieveSettingsActivityWorkingTime() 50 | #project.dumpJsonToFile(r, 'project-settings activities-working-time') 51 | with open('json_result_examples/filters.json', 'r') as file: 52 | filters = json.load(file) 53 | #stat2 = project.retrieveModelStatistics(filters) 54 | # To apply a transient filter, create an ARRAY of JSON (filters) see examples 55 | #stats1 = project.retrieveModelStatistics() 56 | #ActStats = project.getActivityStatistics(stats1) 57 | #BOStats = project.getActivityStatistics(stats1,'BO Service Closure') 58 | #TransStats = project.getTransitionStatistics(stats1) 59 | #StartStats = project.getTransitionStatistics(stats1, 'START', 'Request created') 60 | #processStats = project.getProcessStatistics(stats1) 61 | #stats2 = project.retrieveModelStatistics([filter]) 62 | #diff = stats1['processAnalysis']['filteredCases'] - stats2['processAnalysis']['filteredCases'] 63 | #deviations = project.retrieveDeviations() 64 | #deviations = project.retrieveDeviations(filters) 65 | #kpis = project.retrieveKpiStatus() 66 | #kpis = project.retrieveKpiStatus(filters) 67 | #customMetrics = project.retrieveCustomMetrics() 68 | # ERROR project.setActivityCost returns 400 69 | #result = project.setActivityCost('BO Service Closure', 100, 'Manual', '2019-11-14T00:00:00.000Z') 70 | ''' 71 | groups = client.retrieveGroups() 72 | owners = client.getGroupByName(groups, 'Owners') 73 | 74 | accountData = { 75 | "firstName": "John", 76 | "lastName": "Sam", 77 | "country": "IN", 78 | "email": "john.sam@ibm.com", 79 | "agentEnabled": True, 80 | "technicalUser": False, 81 | "active": True, 82 | "password": "John12345!", 83 | "username": "john.sam" 84 | } 85 | 86 | account = client.createAccount(accountData) 87 | client.addAccountToGroup(account, owners['groupId']) 88 | org1 = client.getOrganizationByName('myBank') 89 | org2 = client.getOrganizationByName('MAPFRE') 90 | org1.addAccount(account) 91 | org2.addAccount(account) 92 | client.deleteAccount(account) 93 | ''' 94 | print('done') 95 | 96 | 97 | if __name__ == "__main__": 98 | main(sys.argv) -------------------------------------------------------------------------------- /REST APIs/IPMClient/testIPMClient.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import IPMClient as ipm\n", 10 | "import json\n", 11 | "import sys\n", 12 | "import os\n", 13 | "import pandas as pd\n", 14 | "\n", 15 | "\n", 16 | "getConfigFrom = 'FILE'\n", 17 | "configFileName = './IPMConfig.json'\n", 18 | "\n", 19 | "# update the clienturation with your environment\n", 20 | "# retrieve from OS variables\n", 21 | "if getConfigFrom == 'OS':\n", 22 | " url :str= os.getenv('PM_API_URL')\n", 23 | " userid :str = os.getenv('PM_API_USER')\n", 24 | " apikey :str =os.getenv('PM_API_KEY')\n", 25 | "elif getConfigFrom == 'VARS':\n", 26 | "# or update these variables\n", 27 | " url = 'PROCESSMININGURL'\n", 28 | " userid = 'PROCESSMINGUSERID'\n", 29 | " apikey= 'USER_APIKEY'\n", 30 | "elif getConfigFrom == 'FILE':\n", 31 | "# or load a file that contains JSON config\n", 32 | " with open(configFileName, 'r') as file:\n", 33 | " config = json.load(file)\n", 34 | " url = config['url']\n", 35 | " userid = config['userid']\n", 36 | " apikey = config['apikey']\n", 37 | "\n", 38 | "client = ipm.Client(url, userid, apikey)\n", 39 | "client.setTrace(True, 0)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [ 48 | "project = client.getProjectByKey('bank-account-closure')\n", 49 | "stats = project.retrieveModelStatistics()\n", 50 | "project.getTransitionStatistics(stats)" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "project.getActivityStatistics(stats)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": null, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "project.getProcessStatistics(stats)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "project.retrieveDeviations()" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "dashboard = project.getDashboardByName('test REST API')\n", 87 | "widget = dashboard.getWidgetByName('porting-cases-with-deviation')\n", 88 | "widget.retrieveValues()\n", 89 | "widget.toDataFrame()\n", 90 | "widget.toCSV('test-widget-table-1.csv', True)\n" 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "## Create a project, load data" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "org1 = client.getOrganizationByName('myBank')\n", 107 | "COProject = client.createProject(name='CO', organization=org1)\n" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": null, 113 | "metadata": {}, 114 | "outputs": [], 115 | "source": [ 116 | "\n", 117 | "COProject.uploadCSVApplyBackupRunMining('./data/Client Onboarding.zip', './data/Client Onboarding.idp')\n" 118 | ] 119 | } 120 | ], 121 | "metadata": { 122 | "kernelspec": { 123 | "display_name": "Python 3", 124 | "language": "python", 125 | "name": "python3" 126 | }, 127 | "language_info": { 128 | "codemirror_mode": { 129 | "name": "ipython", 130 | "version": 3 131 | }, 132 | "file_extension": ".py", 133 | "mimetype": "text/x-python", 134 | "name": "python", 135 | "nbconvert_exporter": "python", 136 | "pygments_lexer": "ipython3", 137 | "version": "3.10.5" 138 | } 139 | }, 140 | "nbformat": 4, 141 | "nbformat_minor": 2 142 | } 143 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/testPermissionsOrgandTenant.py: -------------------------------------------------------------------------------- 1 | import IPMClient as ipm 2 | import pandas as pd 3 | import json 4 | 5 | 6 | 7 | ipmConfig = { 8 | "url":"https://pm-patrick-process-miner.fyre.ibm.com", 9 | "userid":"task.miner", 10 | "apikey":"d8org223anqr2kvu" 11 | } 12 | 13 | ipmConfig = { 14 | "url":"https://pm-patrick-process-miner.fyre.ibm.com", 15 | "userid":"maintenance.admin", 16 | "apikey":"k5siu71a93c61asf" 17 | } 18 | 19 | ipmConfig = { 20 | "url":"https://pharoses1.fyre.ibm.com", 21 | "userid":"task.miner", 22 | "apikey":"8a5kga87eqvd1180" 23 | } 24 | 25 | 26 | 27 | ipmClient = ipm.Client(ipmConfig['url'], ipmConfig['userid'], ipmConfig['apikey']) 28 | 29 | 30 | CO = ipmClient.createProject('CO', '') 31 | CO.uploadCSVApplyBackupRunMining('IPMClient/data/Client Onboarding.zip', 'IPMClient/data/Client Onboarding.idp') 32 | CO_ADMIN_NO_ORG = ipmClient.getProjectByName('CO_ADMIN_NO_ORG') 33 | dashboards = CO.retrieveDashboards() 34 | stats = CO.retrieveModelStatistics() 35 | 36 | tenants = ipmClient.retrieveTenants() 37 | accounts = ipmClient.retrieveAccounts(tenants[1]) # not permitted for Owners only for admin 38 | organizations = ipmClient.retrieveOrganizations() 39 | CO = ipmClient.getProjectByName('CO') 40 | if CO: 41 | ipmClient.deleteProject(CO) 42 | user = ipmClient.getAccountByUserName('user1.lab') 43 | if not user: 44 | user = ipmClient.createAccount( 45 | {"firstName": "User1", 46 | "lastName": "Lab", 47 | "country": "FR", 48 | "email": "User1.Lab@ibm.com", 49 | "agentEnabled": True, 50 | "technicalUser": False, 51 | "active": True, 52 | "password": "Passw0rd!", 53 | "username": "user1.lab"}) 54 | if user: 55 | ipmClient.deleteAccount(user) 56 | print('done') 57 | ''' 58 | ipmClient = ipm.Client(ipmConfig['url'], ipmConfig['userid'], ipmConfig['apikey']) 59 | ipmClient.retrieveProjects() 60 | groups = ipmClient.retrieveGroups() 61 | MultiTenantAdministrators = ipmClient.getGroupByName(groups, 'MultiTenantAdministrators') 62 | Owners = ipmClient.getGroupByName(groups, 'Owners') 63 | 64 | accounts=ipmClient.retrieveAccounts() 65 | ''' 66 | 67 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/testVariants.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import json 3 | import IPMClient as ipm 4 | import os 5 | 6 | os.chdir('IPMClient') 7 | 8 | ipmConfigFilename = './IPMConfig.json' 9 | with open(ipmConfigFilename, 'r') as file: 10 | ipmConfig = json.load(file) 11 | 12 | ipmClient = ipm.Client(ipmConfig['url'], ipmConfig['userid'], ipmConfig['apikey']) 13 | ipmProject = ipmClient.getProjectByName('Bank Account Closure') 14 | res = ipmProject.retrieveSettingsActivityCost() 15 | print(res) 16 | #variants = ipmProject.retrieveVariants(300) 17 | #df = pd.DataFrame(variants) 18 | #df.keys() 19 | #df=df[['steps', 'id', 'frequency', 20 | # 'subProcessFrequency', 'ratio', 'avgDuration', 'activityNames', 21 | # 'minTime', 'maxTime', 'totalCost', 'avgCost']] 22 | #df.to_csv('allVariants.csv', index=None) 23 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/transitionStats.csv: -------------------------------------------------------------------------------- 1 | sourceActivity,targetActivity,frequency,avgDuration,medianDuration,minDuration,maxDuration,parallelFrequency,caseRepetition,avgRepetition 2 | START,Request created,116325,0,0,0,0,0,0,0.0 3 | Request completed with account closure,STOP,88409,0,0,0,0,0,0,0.0 4 | Back-Office Adjustment Requested ,Request completed with account closure,7622,0,0,0,0,0,0,0.0 5 | Pending Request for Reservation Closure,Pending Liquidation Request,93611,74045,0,0,920208000,15,0,0.0 6 | BO Service Closure,Pending Request for Reservation Closure,92819,7772,0,0,161509000,0,204,2.0588235294117645 7 | BO Service Closure,Network Adjustment Requested,9478,10208213,0,0,8354796000,0,1612,2.3114143920595533 8 | Authorization Requested,BO Service Closure,44560,7110249,0,0,1880315000,0,0,0.0 9 | Network Adjustment Requested,BO Service Closure,8928,7535465,0,0,1129105000,0,1539,2.3112410656270304 10 | Pending Liquidation Request,Request completed with account closure,73595,0,0,0,0,13,0,0.0 11 | Pending Liquidation Request,Back-Office Adjustment Requested ,9081,1487575,0,0,842569000,0,0,0.0 12 | Pending Liquidation Request,Pending Request for acquittance of heirs,7029,1847381,0,0,383904000,2,0,0.0 13 | Pending Request for acquittance of heirs,Request completed with account closure,7079,0,0,0,0,0,0,0.0 14 | Request created,BO Service Closure,5715,4086280,0,0,2712944000,0,0,0.0 15 | Request created,Evaluating Request (NO registered letter),41115,1961847,0,0,6284881000,0,0,0.0 16 | Request created,Network Service Closure,47652,15546345,0,0,14408265000,0,0,0.0 17 | Request created,Request deleted,10780,0,0,0,0,0,0,0.0 18 | Request created,Evaluating Request (WITH registered letter),6931,3055681,0,0,2040188000,0,0,0.0 19 | Request created,Pending Request for Network Information,1332,18757972,0,0,1112812000,0,0,0.0 20 | Evaluating Request (NO registered letter),Network Service Closure,40039,78133364,1066000,0,10929838000,0,0,0.0 21 | Evaluating Request (NO registered letter),Request completed with customer recovery,236,0,0,0,0,0,0,0.0 22 | Network Service Closure,BO Service Closure,44651,6725010,0,0,1545996000,0,3,2.0 23 | Network Service Closure,Authorization Requested,45583,1911858,0,0,5987061000,0,0,0.0 24 | Request completed with customer recovery,STOP,263,0,0,0,0,0,0,0.0 25 | Request deleted,STOP,13954,0,0,0,0,0,0,0.0 26 | Evaluating Request (WITH registered letter),Network Service Closure,5753,57362579,1457000,0,7758899000,0,0,0.0 27 | Pending Request for Network Information,Pending Request for Reservation Closure,1126,1955,0,0,238000,0,2,2.0 28 | -------------------------------------------------------------------------------- /REST APIs/IPMClient/users_to_add.csv: -------------------------------------------------------------------------------- 1 | firstName,lastName,country,email,agentEnabled,technicalUser,active,password,username 2 | User1,Lab,FR,User1.Lab@ibm.com,True,False,True,Passw0rd!,user1.lab 3 | User2,Lab,FR,User2.Lab@ibm.com,True,False,True,Passw0rd!,user2.lab 4 | User3,Lab,FR,User3.Lab@ibm.com,True,False,True,Passw0rd!,user3.lab 5 | User4,Lab,FR,User4.Lab@ibm.com,True,False,True,Passw0rd!,user4.lab 6 | User5,Lab,FR,User5.Lab@ibm.com,True,False,True,Passw0rd!,user5.lab 7 | User6,Lab,FR,User6.Lab@ibm.com,True,False,True,Passw0rd!,user6.lab 8 | User7,Lab,FR,User7.Lab@ibm.com,True,False,True,Passw0rd!,user7.lab 9 | -------------------------------------------------------------------------------- /REST APIs/REST_API_Examples/.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python Debugger: Current File with Arguments", 9 | "type": "debugpy", 10 | "request": "launch", 11 | "program": "${file}", 12 | "console": "integratedTerminal", 13 | "args": "pharoses1_config.json" 14 | } 15 | ] 16 | } -------------------------------------------------------------------------------- /REST APIs/REST_API_Examples/Using_Curl.md: -------------------------------------------------------------------------------- 1 | # EXAMPLES OF REST API CALLS USING CURL 2 | 3 | Replace the elements like with your values 4 | 5 | You fist need to get a signature: 6 | ``` 7 | curl -X POST -k '/integration/sign' -H "Content-Type: application/json" --data '{"uid" : "", "apiKey" : ""}' 8 | ``` 9 | returns a key = that you use in the other calls 10 | 11 | ## Uploading an event log 12 | Upload CSV into a project 13 | ``` 14 | curl -k -X POST '/integration/csv//upload?org=' -F file=@.zip -H "accept: application/json" -H "Authorization: Bearer " 15 | ``` 16 | returns a job = 17 | 18 | You need to wait until the is complete: 19 | ``` 20 | curl -k '/integration/csv/job-status/' -H "accept: application/json" -H "Authorization: Bearer " 21 | ``` 22 | 23 | Then 'refresh' the event log 24 | ``` 25 | curl -k -X POST '/integration/csv//create-log?org=' -H "accept: application/json" -H "Authorization: Bearer " 26 | ``` 27 | returns a job = 28 | 29 | loop/wait until the job is complete 30 | ``` 31 | curl -k '/integration/csv/job-status/' -H "accept: application/json" -H "Authorization: Bearer " 32 | ``` 33 | 34 | ## Querying data from the event log 35 | ``` 36 | curl -k -X POST '/analytics/integration/newbawextract/query?org=ca2b2685' -H "Authorization: Bearer " --header 'Content-Type: application/x-www-form-urlencoded' --data-urlencode 'params={ "query": "SELECT count(*) FROM EVENTLOG" }' 37 | ``` 38 | 39 | ## Trimming events (using Bank Account Closure) 40 | 41 | Replace , , , 42 | And returned by the command below: 43 | curl -X POST -k '/integration/sign' -H "Content-Type: application/json" --data '{"uid" : "", "apiKey" : ""}' 44 | 45 | 46 | curl -k -X POST ‘/integration/csv/bank-account-closure/trimming?org=' -H "Authorization: Bearer " --header 'Content-Type: application/json' --data '{"caseFilters": [{"filterType": "attribute","attributeKey": "attr-CLOSURE_TYPE","attributeValue": "Client Recess"}]}' -------------------------------------------------------------------------------- /REST APIs/REST_API_Examples/__pycache__/ProcessMining_API.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/processmining/b0a64a50ca97f44d67b78f789ec11d0a52afb198/REST APIs/REST_API_Examples/__pycache__/ProcessMining_API.cpython-310.pyc -------------------------------------------------------------------------------- /REST APIs/REST_API_Examples/pharoses1_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "url":"https://pharoses1.fyre.ibm.com", 3 | "user_id": "task.miner", 4 | "api_key":"8a5kga87eqvd1180", 5 | "project_key": "procure-to-pay", 6 | "org_key": "" 7 | } -------------------------------------------------------------------------------- /REST APIs/REST_API_Examples/techzone.json: -------------------------------------------------------------------------------- 1 | { 2 | "url":"", 3 | "user_id": "maintenance.admin", 4 | "api_key":"8a5kga87eqvd1180" 5 | } -------------------------------------------------------------------------------- /REST APIs/REST_API_Examples/view.html: -------------------------------------------------------------------------------- 1 | --------------------------------------------------------------------------------