├── GeneratedData ├── Step2Model │ └── Data of Step 2.txt ├── Step1.1Data │ └── Data of Step 1.1.txt ├── Step1.2Data │ └── Data of Step 1.2.txt └── Step4.1Data │ └── Data of Step 4.1.txt ├── EssentialData ├── APGrid.mat ├── SSGrid.mat ├── David_lobe.mat ├── MTM_newCase.xlsx ├── posteriorCov.mat ├── posteriorMean.mat ├── ProbabilityGrid.mat ├── muAndSigmaForAgentModel.mat ├── posteriorCovConvergence.mat ├── muAndSigmaForOptimization.mat ├── MTM_newCase_partial.csv ├── MTM_newCase.csv └── MTM_newCaseIncludingThePointsofUncertainty.csv ├── ResultDisplay ├── Fig-David.pdf └── Fig-David.png ├── step1.2_SortData.py ├── step4.2_SortDataForProbabilisticLobes.py ├── step4.3_GetProbabilisticLobeDiagram.py ├── step4.1_SampleForProbabilisticLobes.py ├── step1.1_SampleForAgent.py ├── step2_TrainAgentModel.py ├── README.md ├── step5_final.py ├── step3_BSLD.py └── FDM_function.py /GeneratedData/Step2Model/Data of Step 2.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /GeneratedData/Step1.1Data/Data of Step 1.1.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /GeneratedData/Step1.2Data/Data of Step 1.2.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /GeneratedData/Step4.1Data/Data of Step 4.1.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /EssentialData/APGrid.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/APGrid.mat -------------------------------------------------------------------------------- /EssentialData/SSGrid.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/SSGrid.mat -------------------------------------------------------------------------------- /EssentialData/David_lobe.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/David_lobe.mat -------------------------------------------------------------------------------- /ResultDisplay/Fig-David.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/ResultDisplay/Fig-David.pdf -------------------------------------------------------------------------------- /ResultDisplay/Fig-David.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/ResultDisplay/Fig-David.png -------------------------------------------------------------------------------- /EssentialData/MTM_newCase.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/MTM_newCase.xlsx -------------------------------------------------------------------------------- /EssentialData/posteriorCov.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/posteriorCov.mat -------------------------------------------------------------------------------- /EssentialData/posteriorMean.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/posteriorMean.mat -------------------------------------------------------------------------------- /EssentialData/ProbabilityGrid.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/ProbabilityGrid.mat -------------------------------------------------------------------------------- /EssentialData/muAndSigmaForAgentModel.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/muAndSigmaForAgentModel.mat -------------------------------------------------------------------------------- /EssentialData/posteriorCovConvergence.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/posteriorCovConvergence.mat -------------------------------------------------------------------------------- /EssentialData/muAndSigmaForOptimization.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gengxiangc/BSLD/HEAD/EssentialData/muAndSigmaForOptimization.mat -------------------------------------------------------------------------------- /EssentialData/MTM_newCase_partial.csv: -------------------------------------------------------------------------------- 1 | 0.64,0.25,0 2 | 0.64,0.25,0 3 | 0.66,0.25,0 4 | 0.68,0.25,0 5 | 0.7,0.25,0 6 | 0.72,0.25,0 7 | 0.74,0.25,0 8 | 0.76,0.25,0 9 | 0.78,0.25,0 10 | 0.8,0.25,0 11 | 0.64,0.5,1 12 | 0.66,0.5,0 13 | 0.68,0.5,0 14 | 0.7,0.5,0 15 | 0.72,0.5,0 16 | 0.74,0.5,0 17 | 0.76,0.5,0 18 | 0.78,0.5,0 19 | 0.8,0.5,0 20 | 0.68,0.75,0 21 | 0.7,0.75,0 22 | 0.72,0.75,0 23 | 0.74,0.75,0 24 | 0.76,0.75,0 25 | 0.78,0.75,1 26 | 0.8,0.75,1 27 | 0.66,1,1 28 | 0.68,1,1 29 | 0.7,1,1 30 | 0.72,1,0 31 | 0.74,1,0 32 | 0.76,1,1 33 | 0.78,1,1 34 | 0.72,1.25,0 35 | 0.74,1.25,1 36 | 0.72,1.5,0 37 | 0.74,1.5,1 38 | 0.72,1.75,0 39 | 0.72,2,0 40 | 0.72,2.5,1 41 | -------------------------------------------------------------------------------- /step1.2_SortData.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | 9 | """ 10 | '''Integrate data from each grid point''' 11 | import numpy as np 12 | import os 13 | import scipy.io as sio 14 | 15 | size = 800 16 | gridNumber= 27 * 85 17 | if not os.path.exists('GeneratedData/Step1.2Data'): 18 | os.makedirs('GeneratedData/Step1.2Data') 19 | 20 | alldata = sio.loadmat('GeneratedData/Step1.1Data/all_spectral_radius-MTMcase.mat')['data'] 21 | for i in range(0, gridNumber): 22 | temp = [] 23 | for j in range(0, size): 24 | if j == 0: 25 | temp = alldata[gridNumber * j + i, :] 26 | else: 27 | # print(gridNumber * j + i) 28 | temp = np.vstack((temp,alldata[gridNumber * j + i, :])) 29 | 30 | sio.savemat('GeneratedData/Step1.2Data/agentdata' + str(i) + '.mat', {'data': temp}) 31 | 32 | print("step1.2 is over, please run the step2") -------------------------------------------------------------------------------- /step4.2_SortDataForProbabilisticLobes.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | 9 | """ 10 | '''The sampling points for each grid point are put together''' 11 | import numpy as np 12 | import scipy.io as sio 13 | import os 14 | 15 | size = 500 16 | gridNumber = 101 * 51 17 | if not os.path.exists('GeneratedData/Step4.2Data'): 18 | os.makedirs('GeneratedData/Step4.2Data') 19 | 20 | alldata = sio.loadmat('GeneratedData/Step4.1Data/all_spectral_radius-' + str(size) + '-forMTMcasePro.mat')['data'] 21 | for i in range(0, gridNumber): 22 | temp = [] 23 | for j in range(0, size): 24 | if j == 0: 25 | temp = alldata[gridNumber * j + i, :] 26 | else: 27 | print(gridNumber * j + i) 28 | temp = np.vstack((temp, alldata[gridNumber * j + i, :])) 29 | 30 | sio.savemat('GeneratedData/Step4.2Data/agentdata' + str(i) + '.mat', {'data': temp}) 31 | 32 | print("step4.2 is over, please run the step4.3") -------------------------------------------------------------------------------- /step4.3_GetProbabilisticLobeDiagram.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | 9 | """ 10 | 11 | '''Monte Carlo''' 12 | import pandas as pd 13 | import matplotlib.pyplot as plt 14 | import numpy as np 15 | import scipy.io as scio 16 | 17 | size = 500 18 | 19 | PRO = np.zeros((51, 101)) 20 | ss = np.linspace(4800, 13200, 101) 21 | ap = np.linspace(0, 4, 51) 22 | SS,AP = np.meshgrid(ss, ap) 23 | 24 | # Chatter if spectral radius is larger than 1. 25 | for i in range(101): 26 | for j in range(51): 27 | sum = 0 28 | temp = scio.loadmat('GeneratedData/Step4.2Data/agentdata' + str(i*51 + j) + '.mat')['data'] 29 | for k in range(size): 30 | if temp[k, 10] > 1: 31 | sum = sum + 1 32 | PRO[j, i] = sum / size 33 | print(i) 34 | 35 | plt.contourf(SS, AP, PRO) 36 | 37 | plt.contour(SS, AP, PRO) 38 | 39 | contour = plt.contour(SS, AP, PRO, [0.5] , colors = 'k') 40 | 41 | plt.clabel(contour, fontsize=10, colors = ('k', 'r')) 42 | 43 | case5_exp = np.array(pd.read_csv('EssentialData/MTM_newCase.csv', sep=',')) 44 | markers = ['o', 'x', 'o'] 45 | colors = ['y', 'r', 'b'] 46 | for i in range(case5_exp.shape[0]): 47 | plt.scatter(case5_exp[i, 0] * 10000, case5_exp[i, 1], c = colors[int(case5_exp[i, 2])], marker = markers[int(case5_exp[i, 2])], s = 30) 48 | 49 | plt.show() 50 | 51 | scio.savemat('EssentialData/SSGrid.mat', {'data': SS}) 52 | scio.savemat('EssentialData/APGrid.mat', {'data': AP}) 53 | scio.savemat('EssentialData/ProbabilityGrid.mat', {'data': PRO}) 54 | 55 | print("step4.3 is over, please run the step5_final") -------------------------------------------------------------------------------- /EssentialData/MTM_newCase.csv: -------------------------------------------------------------------------------- 1 | 0.5,0.25,0 2 | 0.52,0.25,0 3 | 0.54,0.25,0 4 | 0.56,0.25,0 5 | 0.58,0.25,0 6 | 0.6,0.25,0 7 | 0.62,0.25,0 8 | 0.64,0.25,0 9 | 0.66,0.25,0 10 | 0.68,0.25,0 11 | 0.7,0.25,0 12 | 0.72,0.25,0 13 | 0.74,0.25,0 14 | 0.76,0.25,0 15 | 0.78,0.25,0 16 | 0.8,0.25,0 17 | 0.5,0.5,1 18 | 0.52,0.5,1 19 | 0.54,0.5,0 20 | 0.56,0.5,0 21 | 0.58,0.5,0 22 | 0.6,0.5,1 23 | 0.62,0.5,1 24 | 0.64,0.5,1 25 | 0.66,0.5,0 26 | 0.68,0.5,0 27 | 0.7,0.5,0 28 | 0.72,0.5,0 29 | 0.74,0.5,0 30 | 0.76,0.5,0 31 | 0.78,0.5,0 32 | 0.8,0.5,0 33 | 0.5,0.75,1 34 | 0.52,0.75,1 35 | 0.54,0.75,0 36 | 0.56,0.75,0 37 | 0.6,0.75,1 38 | 0.68,0.75,0 39 | 0.7,0.75,0 40 | 0.72,0.75,0 41 | 0.74,0.75,0 42 | 0.76,0.75,0 43 | 0.78,0.75,1 44 | 0.8,0.75,1 45 | 0.52,1,1 46 | 0.54,1,1 47 | 0.58,1,1 48 | 0.66,1,1 49 | 0.68,1,1 50 | 0.7,1,1 51 | 0.72,1,0 52 | 0.74,1,0 53 | 0.76,1,1 54 | 0.78,1,1 55 | 0.56,1.25,1 56 | 0.72,1.25,0 57 | 0.74,1.25,1 58 | 0.72,1.5,0 59 | 0.74,1.5,1 60 | 0.72,1.75,0 61 | 0.72,2,0 62 | 0.72,2.5,1 63 | 0.84,0.25,0 64 | 0.84,0.5,1 65 | 0.88,0.25,0 66 | 0.88,0.5,1 67 | 0.88,0.75,1 68 | 0.92,0.25,0 69 | 0.92,0.5,0 70 | 0.92,0.75,0 71 | 0.92,1,1 72 | 0.96,0.25,0 73 | 0.96,0.5,0 74 | 0.96,0.75,0 75 | 0.96,1,0 76 | 0.96,1.5,1 77 | 1,0.25,0 78 | 1,0.5,0 79 | 1,0.75,0 80 | 1,1,0 81 | 1,1.25,0 82 | 1,1.5,0 83 | 1,1.75,1 84 | 1.05,0.25,0 85 | 1.05,0.5,0 86 | 1.05,0.75,0 87 | 1.05,1,0 88 | 1.05,1.25,0 89 | 1.05,1.5,0 90 | 1.1,0.25,0 91 | 1.1,0.5,0 92 | 1.1,0.75,0 93 | 1.1,1,0 94 | 1.1,1.25,0 95 | 1.1,1.5,0 96 | 1.1,1.75,0 97 | 1.1,2,1 98 | 1.15,0.25,0 99 | 1.15,0.5,0 100 | 1.15,0.75,0 101 | 1.15,1,0 102 | 1.15,1.25,0 103 | 1.15,1.5,1 104 | 1.15,1.75,1 105 | 1.2,0.25,0 106 | 1.2,0.5,0 107 | 1.2,0.75,0 108 | 1.2,1,0 109 | 1.2,1.25,1 110 | 1.2,1.5,1 111 | 1.25,0.25,0 112 | 1.25,0.5,0 113 | 1.25,0.75,0 114 | 1.25,1,1 115 | 1.3,0.25,0 116 | 1.3,0.5,0 117 | 1.3,0.75,1 118 | 1.3,1,1 119 | -------------------------------------------------------------------------------- /EssentialData/MTM_newCaseIncludingThePointsofUncertainty.csv: -------------------------------------------------------------------------------- 1 | 0.5,0.25,0 2 | 0.52,0.25,0 3 | 0.54,0.25,0 4 | 0.56,0.25,0 5 | 0.58,0.25,0 6 | 0.6,0.25,0 7 | 0.62,0.25,0 8 | 0.64,0.25,0 9 | 0.66,0.25,0 10 | 0.68,0.25,0 11 | 0.7,0.25,0 12 | 0.72,0.25,0 13 | 0.74,0.25,0 14 | 0.76,0.25,0 15 | 0.78,0.25,0 16 | 0.8,0.25,0 17 | 0.5,0.5,1 18 | 0.52,0.5,1 19 | 0.54,0.5,0 20 | 0.56,0.5,0 21 | 0.58,0.5,0 22 | 0.6,0.5,1 23 | 0.62,0.5,1 24 | 0.64,0.5,1 25 | 0.66,0.5,0 26 | 0.68,0.5,0 27 | 0.7,0.5,0 28 | 0.72,0.5,0 29 | 0.74,0.5,0 30 | 0.76,0.5,0 31 | 0.78,0.5,0 32 | 0.8,0.5,0 33 | 0.5,0.75,1 34 | 0.52,0.75,1 35 | 0.54,0.75,0 36 | 0.56,0.75,0 37 | 0.58,0.75,2 38 | 0.6,0.75,1 39 | 0.66,0.75,2 40 | 0.68,0.75,0 41 | 0.7,0.75,0 42 | 0.72,0.75,0 43 | 0.74,0.75,0 44 | 0.76,0.75,0 45 | 0.78,0.75,1 46 | 0.8,0.75,1 47 | 0.52,1,1 48 | 0.54,1,1 49 | 0.56,1,2 50 | 0.58,1,1 51 | 0.66,1,1 52 | 0.68,1,1 53 | 0.7,1,1 54 | 0.72,1,0 55 | 0.74,1,0 56 | 0.76,1,1 57 | 0.78,1,1 58 | 0.56,1.25,1 59 | 0.72,1.25,0 60 | 0.74,1.25,1 61 | 0.72,1.5,0 62 | 0.74,1.5,1 63 | 0.72,1.75,0 64 | 0.72,2,0 65 | 0.72,2.5,1 66 | 0.84,0.25,0 67 | 0.84,0.5,1 68 | 0.88,0.25,0 69 | 0.88,0.5,1 70 | 0.88,0.75,1 71 | 0.92,0.25,0 72 | 0.92,0.5,0 73 | 0.92,0.75,0 74 | 0.92,1,1 75 | 0.96,0.25,0 76 | 0.96,0.5,0 77 | 0.96,0.75,0 78 | 0.96,1,0 79 | 0.96,1.5,1 80 | 1,0.25,0 81 | 1,0.5,0 82 | 1,0.75,0 83 | 1,1,0 84 | 1,1.25,0 85 | 1,1.5,0 86 | 1,1.75,1 87 | 1.05,0.25,0 88 | 1.05,0.5,0 89 | 1.05,0.75,0 90 | 1.05,1,0 91 | 1.05,1.25,0 92 | 1.05,1.5,0 93 | 1.1,0.25,0 94 | 1.1,0.5,0 95 | 1.1,0.75,0 96 | 1.1,1,0 97 | 1.1,1.25,0 98 | 1.1,1.5,0 99 | 1.1,1.75,0 100 | 1.1,2,1 101 | 1.15,0.25,0 102 | 1.15,0.5,0 103 | 1.15,0.75,0 104 | 1.15,1,0 105 | 1.15,1.25,0 106 | 1.15,1.5,1 107 | 1.15,1.75,1 108 | 1.2,0.25,0 109 | 1.2,0.5,0 110 | 1.2,0.75,0 111 | 1.2,1,0 112 | 1.2,1.25,1 113 | 1.2,1.5,1 114 | 1.25,0.25,0 115 | 1.25,0.5,0 116 | 1.25,0.75,0 117 | 1.25,1,1 118 | 1.3,0.25,0 119 | 1.3,0.5,0 120 | 1.3,0.75,1 121 | 1.3,1,1 122 | 0.96,1.25,2 123 | 0.72,2.25,2 124 | 1.05,1.75,2 125 | 1.05,2,2 126 | 1.05,2.25,2 127 | 1.05,2.5,2 128 | -------------------------------------------------------------------------------- /step4.1_SampleForProbabilisticLobes.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | 9 | """ 10 | import os 11 | import numpy as np 12 | import FDM_function as MF 13 | import time 14 | import scipy.io as sio 15 | 16 | '''Samples are taken for drawing probabilistic Lobes diagram''' 17 | size = 500 18 | if not os.path.exists('GeneratedData/Step4.1Data'): 19 | os.makedirs('GeneratedData/Step4.1Data') 20 | 21 | postCov = sio.loadmat('EssentialData/posteriorCov.mat')['data'] 22 | postMean = sio.loadmat('EssentialData/posteriorMean.mat')['data'] 23 | 24 | postCov = 0.01 * postCov 25 | postMean = postMean.T 26 | postMean = postMean[0, :] 27 | data = np.random.multivariate_normal(mean = postMean, cov = postCov, size = size) 28 | data[:, 0:2] = data[:, 0:2] * 1000 29 | 30 | data = np.matrix(data) 31 | 32 | ''' 33 | Note: This step takes some time because of FDM calculation. 34 | After obtaining posterior distribution of parameters, 35 | the spectral_radius can also be obtained by surrogate model. 36 | ''' 37 | for i in range(size): 38 | localtime = time.asctime(time.localtime(time.time())) 39 | print('Index: ', i, ' in ', size, ', Time', localtime) 40 | _, _, _, spectral_radius = MF.FDM( 41 | 'SampleAtAllGridPoint', 42 | data[i, 0], 43 | data[i, 1], 44 | data[i, 2], 45 | data[i, 3], 46 | data[i, 4], 47 | data[i, 5], 48 | data[i, 6], 49 | data[i, 7]) 50 | 51 | extra_feature = np.repeat(data[i], len(spectral_radius), axis=0) 52 | data_temp = np.hstack((extra_feature, spectral_radius)) 53 | if i == 0: 54 | data_save = data_temp 55 | if i > 0: 56 | data_save = np.vstack((data_temp, data_save)) 57 | 58 | ##if the calculation is complete, each step can be deleted 59 | sio.savemat('GeneratedData/Step4.1Data/' + str(i) + '_spectral_radius.mat', {'data': data_temp}) 60 | 61 | #this is what we need in the end 62 | sio.savemat('GeneratedData/Step4.1Data/all_spectral_radius-' + str(size) + '-forMTMcasePro.mat', {'data': data_save}) 63 | print("step4.1 is over, please run the step4.2") -------------------------------------------------------------------------------- /step1.1_SampleForAgent.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | Data From: 9 | [1] Hajdu D, Borgioli F, Michiels W, et al. Robust stability of milling operations 10 | based on pseudospectral approach[J]. International Journal of Machine Tools and 11 | Manufacture, 2020, 149: 103516. 12 | 13 | """ 14 | '''Samples at each grid point for agent model''' 15 | import sobol_seq 16 | import os 17 | import numpy as np 18 | import FDM_function as MF 19 | import time 20 | import scipy.io as sio 21 | 22 | # size of samples 23 | size = 800 24 | if not os.path.exists('GeneratedData/Step1.1Data'): 25 | os.makedirs('GeneratedData/Step1.1Data') 26 | 27 | ''' 28 | Load initial model parameters from Hajdu D 29 | Note that the variance in muAndSigmaForAgentModel is larger than 30 | that in muAndSigmaForOptimization for wider samples for agent model, 31 | A : wx, wy, cx, cy, ks, ky, kt, kr 32 | ''' 33 | A = sio.loadmat('EssentialData/muAndSigmaForAgentModel.mat')['data'] 34 | A = np.matrix(A) 35 | 36 | mu = np.repeat(A[:, 0].T, size, axis = 0) 37 | sigma = np.repeat(A[:, 1].T, size, axis = 0) 38 | 39 | 40 | # Sobol Sampling 41 | data_ = sobol_seq.i4_sobol_generate_std_normal(8, size) 42 | data = np.multiply(data_, sigma) + mu 43 | 44 | 45 | # This step takes some time because of FDM calculation 46 | # Matlab sampling can be more quick 47 | for i in range(size): 48 | localtime = time.asctime(time.localtime(time.time())) 49 | print('Index: ', i, ' in ', size, ', Time', localtime) 50 | _, _, _, spectral_radius = MF.FDM( 51 | 'SampleForAgent', 52 | data[i, 0], 53 | data[i, 1], 54 | data[i, 2], 55 | data[i, 3], 56 | data[i, 4], 57 | data[i, 5], 58 | data[i, 6], 59 | data[i, 7]) 60 | 61 | extra_feature = np.repeat(data[i], len(spectral_radius), axis = 0) 62 | data_temp = np.hstack((extra_feature, spectral_radius)) 63 | if i == 0: 64 | data_save = data_temp 65 | if i > 0: 66 | data_save = np.vstack((data_temp, data_save)) 67 | 68 | ##if the calculation is complete, each step can be deleted 69 | sio.savemat('GeneratedData/Step1.1Data/' + str(i) + '_spectral_radius.mat', {'data': data_temp}) 70 | 71 | # this is what we need in the final step 72 | sio.savemat('GeneratedData/Step1.1Data/all_spectral_radius-MTMcase.mat', {'data': data_save}) 73 | print("step1.1 is over, please run the step1.2") -------------------------------------------------------------------------------- /step2_TrainAgentModel.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | Note: 9 | the AgentModel can be trained by grid, or by one big model 10 | 11 | """ 12 | import torch 13 | import torch.nn.functional as F 14 | import torch.nn as nn 15 | import scipy.io as scio 16 | import torch.optim as optim 17 | import os 18 | from decimal import Decimal 19 | 20 | size = 800 21 | gridNumber = 27 * 85 22 | 23 | class myNN(torch.nn.Module): 24 | def __init__(self): 25 | super(myNN, self).__init__() 26 | self.fc1 = nn.Linear(8, 20) 27 | self.fc2 = nn.Linear(20, 40) 28 | self.fc3 = nn.Linear(40, 40) 29 | self.fc4 = nn.Linear(40, 1) 30 | self.y_prediction = None 31 | 32 | def forward(self,x): 33 | h1 = F.sigmoid(self.fc1(x)) 34 | h2 = F.sigmoid(self.fc2(h1)) 35 | h3 = F.relu(self.fc3(h2)) 36 | self.y_prediction = F.relu(self.fc4(h3)) 37 | return self.y_prediction 38 | 39 | def loss(self,y): 40 | return torch.norm(self.y_prediction - y) / y.size()[0] 41 | 42 | if __name__=="__main__": 43 | if not os.path.exists('GeneratedData/Step2Model'): 44 | os.makedirs('GeneratedData/Step2Model') 45 | 46 | for i in range(gridNumber): 47 | print('Training model : ', i, ' in ', gridNumber, 'grid') 48 | temp = scio.loadmat('GeneratedData/Step1.2Data/agentdata' + str(i) + '.mat')['data'] 49 | temp[:, 0:2] = temp[:, 0:2] / 1000 50 | #In order to facilitate neural network training, the natural frequency is reduced by 1000 51 | x = torch.tensor(temp[:, 0:8], dtype = torch.float32) 52 | ss = temp[0, 8] 53 | ap = Decimal(temp[0, 9] * 1000).quantize(Decimal("0.01"), rounding = "ROUND_HALF_UP") 54 | y = torch.tensor(temp[:, 10], dtype = torch.float32) 55 | y = y.resize(size, 1) 56 | 57 | tempmodel = myNN() 58 | optimizer = optim.Adam(tempmodel.parameters(), lr = 0.01) 59 | ecohp = 0 60 | while(ecohp < 3000 ): 61 | tempmodel.forward(x) 62 | loss = tempmodel.loss(y) 63 | # print(loss) 64 | optimizer.zero_grad() 65 | loss.backward() 66 | optimizer.step() 67 | ecohp += 1 68 | 69 | torch.save(tempmodel, 'GeneratedData/Step2Model/ss' + str(ss) + 'ap'+str(ap)+'model.pkl') 70 | 71 | print("step2 is over, please run the step3") -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Bayesian Stability Lobe Diagram(BSLD) 2 | 3 | ## Introduction 4 | 5 | This code is a simplified step-by-step implement for better understanding of the submitted manuscript: **Physics-informed Bayesian Inference for Milling Stability Analysis**. Please contact cgx@nuaa.edu.cn if there is any mistake or confusion. 6 | 7 | **1**. Due to the randomness of the sampling, the results of each run may be slightly different. 8 | You can run the `step5_final.py` to get the result (Fig. 8. in the manuscript). 9 | 10 | ![Fig_-_David.png](ResultDisplay/Fig-David.svg) 11 | 12 | **2**. To illustrate the algorithm clearly, the total procedure is separated into the 8 steps. Run the following steps in sequence to get the data for the intermediate steps and the final result: 13 | 14 | 1. `step1.1_SampleForAgent.py` Sampling 800 points from the prior distributions to train the surrogate models of spectral radius. Sobol sampling strategy is adopted here for efficient space-density sampling. The model parameters comes from [David](https://www.sciencedirect.com/science/article/pii/S0890695519310995). The spectral radius is calculated by [FDM](www.sciencedirect.com/science/article/pii/S089069551000012X). The default parameters of stability lobe diagram is defined in the function `FDM_function`. 15 | 2. `step1.2_SortData.py` The sampled data is organized from 800 groups of SLDs to 2295 (the grids of SLD is 27*85=2295) groups of spectral radius. 16 | 3. `step2_TrainAgentModel.py` 2295 surrogate models are trained using the datasets $[\mathbf{w}, \lambda]$. The surrogate models are simple multi-layer perceptions in pytorch. 17 | 4. `step3_BSLD.py` Inferring the posterior distribution using Laplace approximation. The model of the distribution $\mathbf{w}_{*}$ is obtained by maximizing the posterior function using gradient decent in pytorch. Note that the Hessian matrix is calculated using the auto-grad graph of pytorch. The experimental training data used in this step is `MTM_newCase_partial.csv`. 18 | 5. `step4.1_SampleForProbabilisticLobes.py` Sample 500 points from the posterior distribution. Note that this sample strategy is distribution-density sample rather than space-density sample in step1. This step takes some time because of FDM calculation. The spectral_radius for posterior distribution can also be obtained the trained surrogate models. 19 | 6. `step4.2_SortDataForProbabilisticLobes.py` Organise the dataset, the same as step 2. 20 | 7. `step4.3_GetProbabilisticLobeDiagram.py` Calculate the number $N_{\text {chatter }}$ based on the value of spectral radius. 21 | 8. `step5_final.py` Plot the probabilistic SLD using iso-probability boundaries. 22 | 23 | **3**. We also have trained all surrogate models in `GeneratedData\Step2Model` , so you can directly run the `step3_BSLD.py` to inference the posterior distribution of parameters. 24 | 25 | **4**. The following important packages need to be configured in order to run the code: 26 | 27 | ``` 28 | pytorch 1.6.0 29 | sobol_seq (pip install git+https://github.com/naught101/sobol_seq@v0.2.0#egg=sobol_seq) 30 | ``` 31 | 32 | 33 | ### Acknowledgement 34 | Here, we also would like to greatly acknowledge the help of Dr. [David](https://www.sciencedirect.com/science/article/pii/S0890695519310995) for data sharing in the case study. 35 | -------------------------------------------------------------------------------- /step5_final.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | 9 | """ 10 | import numpy as np 11 | import matplotlib 12 | import matplotlib.pyplot as plt 13 | import FDM_function as MF 14 | import scipy.io as sio 15 | import pandas as pd 16 | 17 | plt.rc('font', family = 'serif', size = 12) 18 | matplotlib.rcParams['mathtext.fontset'] = 'cm' # Set Formula Font STIX 19 | 20 | Title = 'David' 21 | 22 | PrioriArgument = sio.loadmat('EssentialData/muAndSigmaForOptimization.mat')['data'] 23 | 24 | #'''The lobe diagram before optimization was calculated''' 25 | matrix_spindle_speed1, matrix_axis_depth1, matrix_eigenvalues1, _ = MF.FDM( 26 | 'SampleAtAllGridPoint', PrioriArgument[0, 0] * 1000, PrioriArgument[1, 0] * 1000, PrioriArgument[2, 0], PrioriArgument[3, 0], 27 | PrioriArgument[4, 0], PrioriArgument[5, 0], PrioriArgument[6, 0], PrioriArgument[7, 0]) 28 | 29 | '''Loading probability lobe profile data, rotation speed, cutting depth, probability''' 30 | SS = sio.loadmat('EssentialData/SSGrid.mat')['data'] 31 | AP = sio.loadmat('EssentialData/APGrid.mat')['data'] 32 | PRO = sio.loadmat('EssentialData/ProbabilityGrid.mat')['data'] 33 | 34 | '''Load David's raw data''' 35 | lobe_x = sio.loadmat('EssentialData/David_lobe.mat')['x'][0] 36 | lobe_y = sio.loadmat('EssentialData/David_lobe.mat')['y'][0] 37 | lobe_david = np.vstack((lobe_x, lobe_y)).T 38 | lobe_david = lobe_david[np.argsort(lobe_david[:, 0])] 39 | 40 | fig, ax = plt.subplots(figsize = (10.8, 6)) 41 | 42 | '''Probability lobes''' 43 | factor = 1 44 | cs = ax.contourf(SS, AP, PRO * factor, np.linspace(0, 1, 20), 45 | cmap = plt.cm.GnBu, 46 | alpha = 0.7, 47 | linestyles = None) 48 | 49 | cb = fig.colorbar(cs) 50 | cb.set_ticks([0.2 * factor, 0.4 * factor, 0.6 * factor, 0.8 * factor, 1.0 * factor, ]) 51 | cb.set_ticklabels(('0.2', '0.4', '0.6', '0.8', '1.0')) 52 | cb.set_label('Probability of Chatter') 53 | cb.ax.tick_params(labelsize=12) 54 | 55 | cs = ax.contour(SS, AP, PRO, [0.5], colors = 'r') 56 | plt.clabel(cs, fontsize = 10, colors = ('k', 'r'), fmt = '%1.2f') 57 | 58 | '''Draw a theoretical lobes diagram''' 59 | ax.contour(matrix_spindle_speed1, matrix_axis_depth1 * 1000, matrix_eigenvalues1, [1], colors = 'k', linestyles = '-') 60 | 61 | '''Draw the optimized picture''' 62 | # ax.contour(matrix_spindle_speed2, matrix_axis_depth2 * 1000, matrix_eigenvalues2, [1, ], colors='r') 63 | 64 | '''draw the results of David's experiment''' 65 | plt.plot(lobe_david[:, 0], lobe_david[:, 1] * 1000, 'b-', label = 'David') 66 | plt.plot([5000, 5000], [0.1, 0.1], 'k-', label = 'Original') 67 | plt.plot([5000, 5000], [0.1, 0.1], 'r-', label = 'BSLD($p=0.5$)') 68 | 69 | '''The experimental data''' 70 | case5_exp = np.array(pd.read_csv('EssentialData/MTM_newCaseIncludingThePointsofUncertainty.csv', sep = ',')) 71 | markers = ['o', 'x', '^'] 72 | colors = ['k', 'k', 'w'] 73 | states = ['Stable', 'Unstable', 'Marginal'] 74 | edgecolors = ['k', 'k', 'k'] 75 | for i in range(case5_exp.shape[0]): 76 | plt.scatter(case5_exp[i, 0] * 10000, case5_exp[i, 1], 77 | color = colors[int(case5_exp[i, 2])], 78 | marker = markers[int(case5_exp[i, 2])], 79 | edgecolors = edgecolors[int(case5_exp[i, 2])], linewidths=1.5, 80 | s = 50) 81 | # Prepare Marker 82 | plt.scatter(5000, 0.25, c = colors[int(0)], marker = markers[int(0)], s = 50, label = states[int(0)]) 83 | plt.scatter(5000, 0.5, c = colors[int(1)], marker = markers[int(1)], s = 50, label = states[int(1)]) 84 | plt.scatter(5800, 0.75, c = colors[int(2)], marker = markers[int(2)], s = 50, label = states[int(2)], 85 | edgecolors = edgecolors[int(2)], linewidths = 1.5) 86 | 87 | '''Training data''' 88 | case5_train_data = np.array(pd.read_csv('EssentialData/MTM_newCase_partial.csv', sep = ',')) 89 | for i in range(case5_train_data.shape[0]): 90 | if i == 0: 91 | plt.scatter(case5_train_data[i, 0] * 10000, case5_train_data[i, 1], 92 | c = '', marker = 's', s = 90, edgecolors = 'r', linewidths = 0.75, label = 'Traning Samples') 93 | else: 94 | plt.scatter(case5_train_data[i, 0] * 10000, case5_train_data[i, 1], 95 | c = '', marker = 's', s = 90, edgecolors = 'r', linewidths = 0.75) 96 | 97 | size = 12 98 | ax.set_xlabel('Spindle speed [rev/min]') 99 | ax.set_ylabel('Axial depth [mm]') 100 | plt.yticks() 101 | plt.xticks() 102 | plt.tight_layout() 103 | ax = plt.gca() 104 | plt.legend(loc = 'upper left', ncol = 2) 105 | plt.show() 106 | plt.savefig('ResultDisplay/Fig-' + str(Title) + '.svg', format = 'svg') 107 | plt.savefig('ResultDisplay/Fig-' + str(Title) + '.pdf', format = 'pdf') 108 | plt.savefig('ResultDisplay/Fig-' + str(Title) + '.png', format = 'png') 109 | 110 | print("All Over") -------------------------------------------------------------------------------- /step3_BSLD.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | """ 9 | import numpy as np 10 | import pandas as pd 11 | import scipy.io as sio 12 | import torch 13 | import matplotlib.pyplot as plt 14 | from decimal import Decimal 15 | import matplotlib 16 | from step2_TrainAgentModel import myNN 17 | 18 | plt.rc('font', family = 'serif', size = 10) 19 | matplotlib.rcParams['mathtext.fontset'] = 'cm' # Set Formula Font STIX 20 | 21 | '''Import experimental cutting data''' 22 | case5_exp = np.array(pd.read_csv('EssentialData/MTM_newCase_partial.csv', sep = ',')) # Part of the experimental data was used for optimization 23 | caseob_exp = np.array(pd.read_csv('EssentialData/MTM_newCase.csv', sep = ',')) 24 | 25 | '''Prior distribution''' 26 | PrioriArgument = sio.loadmat('EssentialData/muAndSigmaForOptimization.mat')['data'] 27 | 28 | A = np.matrix(PrioriArgument) 29 | 30 | mu_0 = A[:, 0] 31 | sigma_0 = np.multiply(A[:, 1], np.eye(8)) 32 | mu_0 = torch.tensor(mu_0) 33 | sigma_0 = torch.tensor(sigma_0) 34 | 35 | # Initial Parameters 36 | x = torch.tensor(mu_0.float(), requires_grad = True) 37 | 38 | def likelihoodF(w): 39 | likelihood = -0.5 * torch.mm((w - mu_0).t(), torch.inverse(sigma_0)) 40 | likelihood = torch.mm(likelihood, (w - mu_0)) 41 | prior = likelihood 42 | 43 | for i in range(case5_exp.shape[0]): 44 | tempstr = 'GeneratedData/Step2Model/ss' + str(round(case5_exp[i, 0] * 10000, 2)) + \ 45 | 'ap' +str(Decimal(case5_exp[i, 1]).quantize(Decimal("0.01"), rounding = "ROUND_HALF_UP")) + 'model.pkl' 46 | model = torch.load(tempstr) 47 | fi = 1.0 / (1.0 + torch.exp(4.0 - 4.0 * model.forward(w.t()))) 48 | #'1.5','1.0','1.0' are the adjustable weight to punish the result 49 | likelihood = likelihood + 1.5 / case5_exp.shape[0] * (1.0 * case5_exp[i, 2] * torch.log(fi) + 1.0 * (1 - case5_exp[i, 2]) * torch.log(1 - fi)) 50 | 51 | posterior = -likelihood 52 | return posterior , prior 53 | 54 | optimizer = torch.optim.Adagrad([x,] , lr = 0.002) 55 | 56 | era = 500 57 | allx = x.detach().numpy() 58 | t1, t2 = likelihoodF(x) 59 | allposterior = t1.detach().numpy() 60 | allprior = t2.detach().numpy() 61 | for step in range(era): 62 | posterior,prior = likelihoodF(x) 63 | optimizer.zero_grad() 64 | posterior.backward(retain_graph = True) 65 | optimizer.step() 66 | allx = np.hstack((allx, x.detach().numpy())) 67 | allposterior = np.hstack((allposterior, posterior.detach().numpy())) 68 | allprior = np.hstack((allprior, prior.detach().numpy())) 69 | if step % 50 == 0: 70 | print ('step : {}, w1 = {}, w2 = {}, w3 = {}, w4 = {}, w5 = {}, w6 = {}, w7 = {}, w8 = {}, posterior = {}'. 71 | format(step, 72 | Decimal(x.tolist()[0][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"), 73 | Decimal(x.tolist()[1][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"), 74 | Decimal(x.tolist()[2][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"), 75 | Decimal(x.tolist()[3][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"), 76 | Decimal(x.tolist()[4][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"), 77 | Decimal(x.tolist()[5][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"), 78 | Decimal(x.tolist()[6][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"), 79 | Decimal(x.tolist()[7][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"), 80 | Decimal(posterior.tolist()[0][0]).quantize(Decimal("0.001"), rounding = "ROUND_HALF_UP"))) 81 | 82 | '''The covariance matrix is calculated by Laplace approximation''' 83 | xt = torch.tensor(x.detach().numpy(), requires_grad = True) 84 | F, _ = likelihoodF(xt) 85 | # Keep the graph for calculating the second derivative 86 | dydx = torch.autograd.grad(F, xt, create_graph=True, retain_graph=True) 87 | 88 | d2ydx2 = torch.tensor([]) 89 | for anygrad in dydx[0]: 90 | d2ydx2 = torch.cat((d2ydx2, torch.autograd.grad(anygrad, xt, retain_graph=True)[0]), 1) 91 | 92 | d2ydx2 = d2ydx2.detach().numpy() 93 | # The covariance matrix is obtained by Laplace approximation 94 | Cov = np.linalg.inv(d2ydx2) 95 | variance = [] 96 | for i in range(8): 97 | variance = np.hstack((variance, Cov[i, i])) 98 | 99 | 100 | xx = x.detach().numpy() 101 | 102 | sio.savemat('EssentialData/posteriorCovConvergence.mat', {'parameters': allx, 103 | 'posterior' : allposterior}) 104 | 105 | sio.savemat('EssentialData/posteriorCov.mat', {'data': Cov}) 106 | sio.savemat('EssentialData/posteriorMean.mat', {'data': xx}) 107 | 108 | print("step3 is over, please run the step4.1") -------------------------------------------------------------------------------- /FDM_function.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 1 2021 4 | 5 | This code is part of the supplement materials of the submmited manuscript: 6 | 'Physics-informed Bayesian Inference for Milling Stability Analysis'. 7 | 8 | 9 | """ 10 | import numpy as np 11 | import math 12 | import matplotlib.pyplot as plt 13 | from scipy.linalg import expm 14 | from matplotlib import colors 15 | import pandas as pd 16 | 17 | def FDM(purpose, w_x_in, w_y_in, c_x_in, c_y_in, g_x_in, g_y_in, Kt_in, Kn_in): #Input: Model parameters 18 | 19 | N = 2 # Blade number 20 | Kt = Kt_in * 1e8 # Tangential cutting force coefficient (N/m^2) 21 | Kn = Kn_in * 1e8 # Normal cutting force coefficient (N/m^2) 22 | 23 | '''Tool modal parameters''' 24 | w_x = w_x_in * 2 * np.pi # The natural frequency in the X direction, the angular frequency, (rad/s) 25 | c_x = c_x_in # The damping ratio in the X direction (1) 26 | k_x = g_x_in * 1e6 # Modal stiffness in the X direction (N/m) 27 | m_x = k_x / (w_x ** 2) # X direction modal mass (kg), m_t = stiff/(w^2) 28 | 29 | w_y = w_y_in * 2 * np.pi # The natural frequency in the Y direction, the angular frequency, (rad/s) 30 | c_y = c_y_in # The damping ratio in the Y direction (1) 31 | k_y = g_y_in * 1e6 # Modal stiffness in the Y direction (N/m) 32 | m_y = k_y / (w_y ** 2) # Y direction modal mass (kg) m_t = stiff/(w^2) 33 | 34 | '''Discrete parameter setting''' 35 | m = 20 # Number of discrete units per cutting cycle 36 | D = np.eye(2 * m + 4, k=-2) # The simplified dimension of the 2-DOF transfer matrix is [2*m+4, 2*m+4] 37 | D[:, 0:4] = 0 38 | D[4, 0] = 1 39 | D[5, 1] = 1 40 | 41 | '''Information about Angle of entry and Angle of exit''' 42 | aD = 0.5 # The ratio of cutting width to tool diameter, '1' denotes full knife 43 | up_or_down = -1 # '1' denotes up milling, '-1' denote down milling 44 | if up_or_down == 1: 45 | phi_start = 0 # Angle of entry 46 | phi_exit = math.acos(1 - 2 * aD) # Angle of exit 47 | if up_or_down == -1: 48 | phi_start = math.acos(2 * aD - 1) # Angle of entry 49 | phi_exit = np.pi # Angle of exit 50 | 51 | '''In order to optimize, the data of the training agent model needs to be sampled under the speed and cutting depth of the experimental points''' 52 | if purpose == 'SampleForAgent': 53 | ap_step = 26 # Discrete steps of cutting depth 26*65 54 | ss_step = 84 # Speed discrete steps 55 | ap_start = 0.25e-3 # Start point of cutting depth 56 | ap_end = 3.5e-3 # End point of cutting depth 57 | ss_start = 4.8e3 # Starting point of rotation 58 | ss_end = 13.2e3 # End point of cutting rotation 59 | '''Sample for lobes diagram''' 60 | if purpose == 'SampleAtAllGridPoint': 61 | ap_step = 50 62 | ss_step = 100 63 | ap_start = 0e-3 64 | ap_end = 4e-3 65 | ss_start = 4.8e3 66 | ss_end = 13.2e3 67 | 68 | '''A two-degree-of-freedom modal parameter matrix is constructed''' 69 | # Mq.. + Cq. + Kq = F 70 | M = np.matrix([[m_x, 0], 71 | [0, m_y]]) 72 | C = np.matrix([[2 * m_x * c_x * w_x, 0], 73 | [0, 2 * m_y * c_y * w_y]]) 74 | K = np.matrix([[m_x * w_x ** 2, 0], 75 | [0, m_y * w_y ** 2]]) 76 | 77 | '''Discrete cutting force''' 78 | h_xx = np.zeros(m + 1) 79 | h_yy = np.zeros(m + 1) 80 | h_yx = np.zeros(m + 1) 81 | h_xy = np.zeros(m + 1) 82 | for i in range(m + 1): 83 | delta_t = 2 * np.pi / N / m 84 | for j in range(N): 85 | phi = i * delta_t + j * 2 * np.pi / N 86 | if (phi >= phi_start) * (phi <= phi_exit): 87 | g = 1 # In the cutting zone 88 | else: 89 | g = 0 # Not in the cutting zone 90 | h_xx[i] = h_xx[i] + g * (Kt * math.cos(phi) + Kn * math.sin(phi)) * math.sin(phi) 91 | h_xy[i] = h_xy[i] + g * (Kt * math.cos(phi) + Kn * math.sin(phi)) * math.cos(phi) 92 | h_yx[i] = h_yx[i] + g * (-Kt * math.sin(phi) + Kn * math.cos(phi)) * math.sin(phi) 93 | h_yy[i] = h_yy[i] + g * (-Kt * math.sin(phi) + Kn * math.cos(phi)) * math.cos(phi) 94 | 95 | '''Construct the coefficient matrix A for the equation of state''' 96 | A0_11 = -M.I @ C / 2 97 | A0_12 = M.I 98 | A0_21 = C @ M.I @ C / 4 - K 99 | A0_22 = -C @ M.I / 2 100 | A0_11_12 = np.hstack((A0_11, A0_12)) 101 | A0_21_22 = np.hstack((A0_21, A0_22)) 102 | A0 = np.vstack((A0_11_12, A0_21_22)) 103 | I = np.eye(len(A0)) 104 | invA0 = A0.I 105 | 106 | data = [] 107 | matrix_spindle_speed = np.zeros((ss_step + 1, ap_step + 1)) 108 | matrix_axis_depth = np.zeros((ss_step + 1, ap_step + 1)) 109 | matrix_eigenvalues = np.zeros((ss_step + 1, ap_step + 1)) 110 | for x in range(ss_step + 1): 111 | ss = ss_start + x * (ss_end - ss_start) / ss_step 112 | tau = 60 / ss / N 113 | dt = tau / m 114 | 115 | Fi0 = expm(A0 * dt) 116 | Fi1 = invA0 @ (Fi0 - I) 117 | Fi2 = invA0 @ (Fi0 * dt - Fi1) 118 | Fi3 = invA0 @ (Fi0 * dt * dt - 2 * Fi2) 119 | 120 | for y in range(ap_step + 1): 121 | 122 | ap = ap_start + y * (ap_end - ap_start) / ap_step 123 | Fi = np.eye(2 * m + 4) 124 | for i in range(m): 125 | A0k = np.matrix([[0, 0, 0, 0], 126 | [0, 0, 0, 0], 127 | [-ap * h_xx[i + 1], -ap * h_xy[i + 1], 0, 0], 128 | [-ap * h_yx[i + 1], -ap * h_yy[i + 1], 0, 0]]) 129 | A1k = np.matrix([[0, 0, 0, 0], 130 | [0, 0, 0, 0], 131 | [ap * (h_xx[i + 1] - h_xx[i]) / dt, ap * (h_xy[i + 1] - h_xy[i]) / dt, 0, 0], 132 | [ap * (h_yx[i + 1] - h_yx[i]) / dt, ap * (h_yy[i + 1] - h_yy[i]) / dt, 0, 0]]) 133 | F01 = Fi2 @ A0k / dt + Fi3 @ A1k / dt 134 | Fkp1 = (Fi1 - Fi2 / dt) @ A0k + (Fi2 - Fi3 / dt) @ A1k 135 | invOfImFkp1 = (I - Fkp1).I 136 | D[0:4, 0:4] = invOfImFkp1 @ (Fi0 + F01) 137 | D[0:4, 2 * m: 2 * m + 2] = -invOfImFkp1 @ Fkp1[0:4, 0:2] 138 | D[0:4, 2 * m + 2: 2 * m + 4] = -invOfImFkp1 @ F01[0:4, 0:2] 139 | Fi = D @ Fi 140 | 141 | print('Spindle Speed: ', x, ' in ', ss_step, 142 | 'Axis depth: ', y, ' in ', ap_step) 143 | 144 | eigenvalues, eigenvectors = np.linalg.eig(Fi) 145 | matrix_spindle_speed[x, y] = ss 146 | matrix_axis_depth[x, y] = ap 147 | matrix_eigenvalues[x, y] = max(abs(eigenvalues)) 148 | data.append([ss, ap, max(abs(eigenvalues))]) 149 | 150 | return matrix_spindle_speed, matrix_axis_depth, matrix_eigenvalues, np.matrix(data) 151 | 152 | '''Draw the lobes diagram''' 153 | if __name__ == '__main__': 154 | Title = 'MTM case' 155 | matrix_spindle_speed, matrix_axis_depth, matrix_eigenvalues, _ = FDM('SampleAtAllGridPoint', 156 | 782.7, 752.8, 157 | 0.0184, 0.0186, 158 | 6.5616, 4.8852, 159 | 10.95, 1.76) 160 | fig, ax = plt.subplots() 161 | cs = ax.contourf(matrix_spindle_speed, 162 | matrix_axis_depth * 1000, 163 | matrix_eigenvalues, 164 | np.linspace(0, 10, 100), 165 | norm=colors.LogNorm()) 166 | ax.contour(matrix_spindle_speed, matrix_axis_depth * 1000, matrix_eigenvalues, [1, 10]) 167 | 168 | fontfamily = 'NSimSun' 169 | font = {'family': fontfamily, 170 | 'size': 12, 171 | 'weight': 23} 172 | 173 | caseob_exp = np.array(pd.read_csv('EssentialData/MTM_newCase.csv', sep=',')) 174 | markers = ['o', 'x', 'o'] 175 | colors = ['y', 'r', 'b'] 176 | for i in range(caseob_exp.shape[0]): 177 | plt.scatter(caseob_exp[i, 0] * 10000, caseob_exp[i, 1], c=colors[int(caseob_exp[i, 2])], 178 | marker=markers[int(caseob_exp[i, 2])], s=30) 179 | 180 | ax.set_xlabel('Spindle speed [rev/min]', fontproperties=fontfamily, size=12) 181 | ax.set_ylabel('Axis depth [mm]', fontproperties=fontfamily, size=12) 182 | plt.yticks(fontproperties=fontfamily, size=12) 183 | plt.xticks(fontproperties=fontfamily, size=12) 184 | ax.set_title(Title, fontproperties=fontfamily, size=12) 185 | plt.legend(prop=font) 186 | plt.tight_layout() 187 | 188 | ax = plt.gca() 189 | ax.xaxis.get_major_formatter().set_powerlimits((0, 1)) 190 | ax.yaxis.get_major_formatter().set_powerlimits((0, 1)) 191 | plt.legend(prop=font) 192 | plt.show() --------------------------------------------------------------------------------