├── README.md ├── genSample.py ├── gmm.data ├── gmm.py ├── main.py └── sample.data /README.md: -------------------------------------------------------------------------------- 1 | # gmm-em-clustering 2 | 高斯混合模型(GMM 聚类)的 EM 算法实现。 3 | 4 | # 相关文章 5 | [高斯混合模型 EM 算法的 Python 实现](http://www.codebelief.com/article/2017/11/gmm-em-algorithm-implementation-by-python/) 6 | 7 | # 测试结果 8 | ![](http://static.codebelief.com/2017/11/24/gmm.png) 9 | -------------------------------------------------------------------------------- /genSample.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | cov1 = np.mat("0.3 0;0 0.1") 5 | cov2 = np.mat("0.2 0;0 0.3") 6 | mu1 = np.array([0, 1]) 7 | mu2 = np.array([2, 1]) 8 | 9 | sample = np.zeros((100, 2)) 10 | sample[:30, :] = np.random.multivariate_normal(mean=mu1, cov=cov1, size=30) 11 | sample[30:, :] = np.random.multivariate_normal(mean=mu2, cov=cov2, size=70) 12 | np.savetxt("sample.data", sample) 13 | 14 | plt.plot(sample[:30, 0], sample[:30, 1], "bo") 15 | plt.plot(sample[30:, 0], sample[30:, 1], "rs") 16 | plt.show() 17 | -------------------------------------------------------------------------------- /gmm.data: -------------------------------------------------------------------------------- 1 | 3.600000 79.000000 2 | 1.800000 54.000000 3 | 3.333000 74.000000 4 | 2.283000 62.000000 5 | 4.533000 85.000000 6 | 2.883000 55.000000 7 | 4.700000 88.000000 8 | 3.600000 85.000000 9 | 1.950000 51.000000 10 | 4.350000 85.000000 11 | 1.833000 54.000000 12 | 3.917000 84.000000 13 | 4.200000 78.000000 14 | 1.750000 47.000000 15 | 4.700000 83.000000 16 | 2.167000 52.000000 17 | 1.750000 62.000000 18 | 4.800000 84.000000 19 | 1.600000 52.000000 20 | 4.250000 79.000000 21 | 1.800000 51.000000 22 | 1.750000 47.000000 23 | 3.450000 78.000000 24 | 3.067000 69.000000 25 | 4.533000 74.000000 26 | 3.600000 83.000000 27 | 1.967000 55.000000 28 | 4.083000 76.000000 29 | 3.850000 78.000000 30 | 4.433000 79.000000 31 | 4.300000 73.000000 32 | 4.467000 77.000000 33 | 3.367000 66.000000 34 | 4.033000 80.000000 35 | 3.833000 74.000000 36 | 2.017000 52.000000 37 | 1.867000 48.000000 38 | 4.833000 80.000000 39 | 1.833000 59.000000 40 | 4.783000 90.000000 41 | 4.350000 80.000000 42 | 1.883000 58.000000 43 | 4.567000 84.000000 44 | 1.750000 58.000000 45 | 4.533000 73.000000 46 | 3.317000 83.000000 47 | 3.833000 64.000000 48 | 2.100000 53.000000 49 | 4.633000 82.000000 50 | 2.000000 59.000000 51 | 4.800000 75.000000 52 | 4.716000 90.000000 53 | 1.833000 54.000000 54 | 4.833000 80.000000 55 | 1.733000 54.000000 56 | 4.883000 83.000000 57 | 3.717000 71.000000 58 | 1.667000 64.000000 59 | 4.567000 77.000000 60 | 4.317000 81.000000 61 | 2.233000 59.000000 62 | 4.500000 84.000000 63 | 1.750000 48.000000 64 | 4.800000 82.000000 65 | 1.817000 60.000000 66 | 4.400000 92.000000 67 | 4.167000 78.000000 68 | 4.700000 78.000000 69 | 2.067000 65.000000 70 | 4.700000 73.000000 71 | 4.033000 82.000000 72 | 1.967000 56.000000 73 | 4.500000 79.000000 74 | 4.000000 71.000000 75 | 1.983000 62.000000 76 | 5.067000 76.000000 77 | 2.017000 60.000000 78 | 4.567000 78.000000 79 | 3.883000 76.000000 80 | 3.600000 83.000000 81 | 4.133000 75.000000 82 | 4.333000 82.000000 83 | 4.100000 70.000000 84 | 2.633000 65.000000 85 | 4.067000 73.000000 86 | 4.933000 88.000000 87 | 3.950000 76.000000 88 | 4.517000 80.000000 89 | 2.167000 48.000000 90 | 4.000000 86.000000 91 | 2.200000 60.000000 92 | 4.333000 90.000000 93 | 1.867000 50.000000 94 | 4.817000 78.000000 95 | 1.833000 63.000000 96 | 4.300000 72.000000 97 | 4.667000 84.000000 98 | 3.750000 75.000000 99 | 1.867000 51.000000 100 | 4.900000 82.000000 101 | 2.483000 62.000000 102 | 4.367000 88.000000 103 | 2.100000 49.000000 104 | 4.500000 83.000000 105 | 4.050000 81.000000 106 | 1.867000 47.000000 107 | 4.700000 84.000000 108 | 1.783000 52.000000 109 | 4.850000 86.000000 110 | 3.683000 81.000000 111 | 4.733000 75.000000 112 | 2.300000 59.000000 113 | 4.900000 89.000000 114 | 4.417000 79.000000 115 | 1.700000 59.000000 116 | 4.633000 81.000000 117 | 2.317000 50.000000 118 | 4.600000 85.000000 119 | 1.817000 59.000000 120 | 4.417000 87.000000 121 | 2.617000 53.000000 122 | 4.067000 69.000000 123 | 4.250000 77.000000 124 | 1.967000 56.000000 125 | 4.600000 88.000000 126 | 3.767000 81.000000 127 | 1.917000 45.000000 128 | 4.500000 82.000000 129 | 2.267000 55.000000 130 | 4.650000 90.000000 131 | 1.867000 45.000000 132 | 4.167000 83.000000 133 | 2.800000 56.000000 134 | 4.333000 89.000000 135 | 1.833000 46.000000 136 | 4.383000 82.000000 137 | 1.883000 51.000000 138 | 4.933000 86.000000 139 | 2.033000 53.000000 140 | 3.733000 79.000000 141 | 4.233000 81.000000 142 | 2.233000 60.000000 143 | 4.533000 82.000000 144 | 4.817000 77.000000 145 | 4.333000 76.000000 146 | 1.983000 59.000000 147 | 4.633000 80.000000 148 | 2.017000 49.000000 149 | 5.100000 96.000000 150 | 1.800000 53.000000 151 | 5.033000 77.000000 152 | 4.000000 77.000000 153 | 2.400000 65.000000 154 | 4.600000 81.000000 155 | 3.567000 71.000000 156 | 4.000000 70.000000 157 | 4.500000 81.000000 158 | 4.083000 93.000000 159 | 1.800000 53.000000 160 | 3.967000 89.000000 161 | 2.200000 45.000000 162 | 4.150000 86.000000 163 | 2.000000 58.000000 164 | 3.833000 78.000000 165 | 3.500000 66.000000 166 | 4.583000 76.000000 167 | 2.367000 63.000000 168 | 5.000000 88.000000 169 | 1.933000 52.000000 170 | 4.617000 93.000000 171 | 1.917000 49.000000 172 | 2.083000 57.000000 173 | 4.583000 77.000000 174 | 3.333000 68.000000 175 | 4.167000 81.000000 176 | 4.333000 81.000000 177 | 4.500000 73.000000 178 | 2.417000 50.000000 179 | 4.000000 85.000000 180 | 4.167000 74.000000 181 | 1.883000 55.000000 182 | 4.583000 77.000000 183 | 4.250000 83.000000 184 | 3.767000 83.000000 185 | 2.033000 51.000000 186 | 4.433000 78.000000 187 | 4.083000 84.000000 188 | 1.833000 46.000000 189 | 4.417000 83.000000 190 | 2.183000 55.000000 191 | 4.800000 81.000000 192 | 1.833000 57.000000 193 | 4.800000 76.000000 194 | 4.100000 84.000000 195 | 3.966000 77.000000 196 | 4.233000 81.000000 197 | 3.500000 87.000000 198 | 4.366000 77.000000 199 | 2.250000 51.000000 200 | 4.667000 78.000000 201 | 2.100000 60.000000 202 | 4.350000 82.000000 203 | 4.133000 91.000000 204 | 1.867000 53.000000 205 | 4.600000 78.000000 206 | 1.783000 46.000000 207 | 4.367000 77.000000 208 | 3.850000 84.000000 209 | 1.933000 49.000000 210 | 4.500000 83.000000 211 | 2.383000 71.000000 212 | 4.700000 80.000000 213 | 1.867000 49.000000 214 | 3.833000 75.000000 215 | 3.417000 64.000000 216 | 4.233000 76.000000 217 | 2.400000 53.000000 218 | 4.800000 94.000000 219 | 2.000000 55.000000 220 | 4.150000 76.000000 221 | 1.867000 50.000000 222 | 4.267000 82.000000 223 | 1.750000 54.000000 224 | 4.483000 75.000000 225 | 4.000000 78.000000 226 | 4.117000 79.000000 227 | 4.083000 78.000000 228 | 4.267000 78.000000 229 | 3.917000 70.000000 230 | 4.550000 79.000000 231 | 4.083000 70.000000 232 | 2.417000 54.000000 233 | 4.183000 86.000000 234 | 2.217000 50.000000 235 | 4.450000 90.000000 236 | 1.883000 54.000000 237 | 1.850000 54.000000 238 | 4.283000 77.000000 239 | 3.950000 79.000000 240 | 2.333000 64.000000 241 | 4.150000 75.000000 242 | 2.350000 47.000000 243 | 4.933000 86.000000 244 | 2.900000 63.000000 245 | 4.583000 85.000000 246 | 3.833000 82.000000 247 | 2.083000 57.000000 248 | 4.367000 82.000000 249 | 2.133000 67.000000 250 | 4.350000 74.000000 251 | 2.200000 54.000000 252 | 4.450000 83.000000 253 | 3.567000 73.000000 254 | 4.500000 73.000000 255 | 4.150000 88.000000 256 | 3.817000 80.000000 257 | 3.917000 71.000000 258 | 4.450000 83.000000 259 | 2.000000 56.000000 260 | 4.283000 79.000000 261 | 4.767000 78.000000 262 | 4.533000 84.000000 263 | 1.850000 58.000000 264 | 4.250000 83.000000 265 | 1.983000 43.000000 266 | 2.250000 60.000000 267 | 4.750000 75.000000 268 | 4.117000 81.000000 269 | 2.150000 46.000000 270 | 4.417000 90.000000 271 | 1.817000 46.000000 272 | 4.467000 74.000000 273 | -------------------------------------------------------------------------------- /gmm.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # ---------------------------------------------------- 3 | # Copyright (c) 2017, Wray Zheng. All Rights Reserved. 4 | # Distributed under the BSD License. 5 | # ---------------------------------------------------- 6 | 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | from scipy.stats import multivariate_normal 10 | 11 | DEBUG = True 12 | 13 | ###################################################### 14 | # 调试输出函数 15 | # 由全局变量 DEBUG 控制输出 16 | ###################################################### 17 | def debug(*args, **kwargs): 18 | global DEBUG 19 | if DEBUG: 20 | print(*args, **kwargs) 21 | 22 | 23 | ###################################################### 24 | # 第 k 个模型的高斯分布密度函数 25 | # 每 i 行表示第 i 个样本在各模型中的出现概率 26 | # 返回一维列表 27 | ###################################################### 28 | def phi(Y, mu_k, cov_k): 29 | norm = multivariate_normal(mean=mu_k, cov=cov_k) 30 | return norm.pdf(Y) 31 | 32 | 33 | ###################################################### 34 | # E 步:计算每个模型对样本的响应度 35 | # Y 为样本矩阵,每个样本一行,只有一个特征时为列向量 36 | # mu 为均值多维数组,每行表示一个样本各个特征的均值 37 | # cov 为协方差矩阵的数组,alpha 为模型响应度数组 38 | ###################################################### 39 | def getExpectation(Y, mu, cov, alpha): 40 | # 样本数 41 | N = Y.shape[0] 42 | # 模型数 43 | K = alpha.shape[0] 44 | 45 | # 为避免使用单个高斯模型或样本,导致返回结果的类型不一致 46 | # 因此要求样本数和模型个数必须大于1 47 | assert N > 1, "There must be more than one sample!" 48 | assert K > 1, "There must be more than one gaussian model!" 49 | 50 | # 响应度矩阵,行对应样本,列对应响应度 51 | gamma = np.mat(np.zeros((N, K))) 52 | 53 | # 计算各模型中所有样本出现的概率,行对应样本,列对应模型 54 | prob = np.zeros((N, K)) 55 | for k in range(K): 56 | prob[:, k] = phi(Y, mu[k], cov[k]) 57 | prob = np.mat(prob) 58 | 59 | # 计算每个模型对每个样本的响应度 60 | for k in range(K): 61 | gamma[:, k] = alpha[k] * prob[:, k] 62 | for i in range(N): 63 | gamma[i, :] /= np.sum(gamma[i, :]) 64 | return gamma 65 | 66 | 67 | ###################################################### 68 | # M 步:迭代模型参数 69 | # Y 为样本矩阵,gamma 为响应度矩阵 70 | ###################################################### 71 | def maximize(Y, gamma): 72 | # 样本数和特征数 73 | N, D = Y.shape 74 | # 模型数 75 | K = gamma.shape[1] 76 | 77 | #初始化参数值 78 | mu = np.zeros((K, D)) 79 | cov = [] 80 | alpha = np.zeros(K) 81 | 82 | # 更新每个模型的参数 83 | for k in range(K): 84 | # 第 k 个模型对所有样本的响应度之和 85 | Nk = np.sum(gamma[:, k]) 86 | # 更新 mu 87 | # 对每个特征求均值 88 | mu[k, :] = np.sum(np.multiply(Y, gamma[:, k]), axis=0) / Nk 89 | # 更新 cov 90 | cov_k = (Y - mu[k]).T * np.multiply((Y - mu[k]), gamma[:, k]) / Nk 91 | cov.append(cov_k) 92 | # 更新 alpha 93 | alpha[k] = Nk / N 94 | cov = np.array(cov) 95 | return mu, cov, alpha 96 | 97 | 98 | ###################################################### 99 | # 数据预处理 100 | # 将所有数据都缩放到 0 和 1 之间 101 | ###################################################### 102 | def scale_data(Y): 103 | # 对每一维特征分别进行缩放 104 | for i in range(Y.shape[1]): 105 | max_ = Y[:, i].max() 106 | min_ = Y[:, i].min() 107 | Y[:, i] = (Y[:, i] - min_) / (max_ - min_) 108 | debug("Data scaled.") 109 | return Y 110 | 111 | 112 | ###################################################### 113 | # 初始化模型参数 114 | # shape 是表示样本规模的二元组,(样本数, 特征数) 115 | # K 表示模型个数 116 | ###################################################### 117 | def init_params(shape, K): 118 | N, D = shape 119 | mu = np.random.rand(K, D) 120 | cov = np.array([np.eye(D)] * K) 121 | alpha = np.array([1.0 / K] * K) 122 | debug("Parameters initialized.") 123 | debug("mu:", mu, "cov:", cov, "alpha:", alpha, sep="\n") 124 | return mu, cov, alpha 125 | 126 | 127 | ###################################################### 128 | # 高斯混合模型 EM 算法 129 | # 给定样本矩阵 Y,计算模型参数 130 | # K 为模型个数 131 | # times 为迭代次数 132 | ###################################################### 133 | def GMM_EM(Y, K, times): 134 | Y = scale_data(Y) 135 | mu, cov, alpha = init_params(Y.shape, K) 136 | for i in range(times): 137 | gamma = getExpectation(Y, mu, cov, alpha) 138 | mu, cov, alpha = maximize(Y, gamma) 139 | debug("{sep} Result {sep}".format(sep="-" * 20)) 140 | debug("mu:", mu, "cov:", cov, "alpha:", alpha, sep="\n") 141 | return mu, cov, alpha 142 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # ---------------------------------------------------- 3 | # Copyright (c) 2017, Wray Zheng. All Rights Reserved. 4 | # Distributed under the BSD License. 5 | # ---------------------------------------------------- 6 | 7 | import matplotlib.pyplot as plt 8 | from gmm import * 9 | 10 | # 设置调试模式 11 | DEBUG = True 12 | 13 | # 载入数据 14 | Y = np.loadtxt("gmm.data") 15 | matY = np.matrix(Y, copy=True) 16 | 17 | # 模型个数,即聚类的类别个数 18 | K = 2 19 | 20 | # 计算 GMM 模型参数 21 | mu, cov, alpha = GMM_EM(matY, K, 100) 22 | 23 | # 根据 GMM 模型,对样本数据进行聚类,一个模型对应一个类别 24 | N = Y.shape[0] 25 | # 求当前模型参数下,各模型对样本的响应度矩阵 26 | gamma = getExpectation(matY, mu, cov, alpha) 27 | # 对每个样本,求响应度最大的模型下标,作为其类别标识 28 | category = gamma.argmax(axis=1).flatten().tolist()[0] 29 | # 将每个样本放入对应类别的列表中 30 | class1 = np.array([Y[i] for i in range(N) if category[i] == 0]) 31 | class2 = np.array([Y[i] for i in range(N) if category[i] == 1]) 32 | 33 | # 绘制聚类结果 34 | plt.plot(class1[:, 0], class1[:, 1], 'rs', label="class1") 35 | plt.plot(class2[:, 0], class2[:, 1], 'bo', label="class2") 36 | plt.legend(loc="best") 37 | plt.title("GMM Clustering By EM Algorithm") 38 | plt.show() 39 | -------------------------------------------------------------------------------- /sample.data: -------------------------------------------------------------------------------- 1 | -7.068508871962037032e-01 8.704213984933941717e-01 2 | -7.051058525701757451e-02 8.753081466186661830e-01 3 | 1.197910801263905589e-01 1.067935477381781739e+00 4 | 2.984001879249929545e-01 1.006928438162818296e+00 5 | -5.525917366188564106e-01 1.452949538697631438e+00 6 | -5.275893933338875463e-01 1.177243954037594076e+00 7 | 2.289028031679322117e-01 6.414516006606454379e-01 8 | 3.121322023051925285e-02 1.752059772540235372e+00 9 | 3.381813993563539400e-01 1.016754493153939620e+00 10 | -6.984909362582623071e-01 1.422804634517532474e+00 11 | -5.989854011795203714e-01 1.335462563916165468e-01 12 | -8.792931702476153299e-01 1.078729838707628952e+00 13 | 1.904315971635754112e-01 1.049917360685198142e+00 14 | -1.792953829925339193e-01 5.830477625580643419e-01 15 | 7.782437352984861514e-03 1.094613387298581708e+00 16 | -1.827027626992727971e-01 1.448073736323527427e+00 17 | 4.587286850186203524e-01 9.036055121097352760e-01 18 | -5.375682398897051184e-02 1.218083240105583887e+00 19 | 4.527886506719832060e-01 1.063925081016519725e+00 20 | 3.506623854928935802e-01 3.309308921076906662e-01 21 | -1.730049519794575552e+00 9.756744278262029502e-01 22 | 3.034214164220715160e-02 1.786080280256484576e+00 23 | -2.282858183357621140e-01 9.223754793328212687e-01 24 | -3.326906312754344119e-01 6.918287219624045248e-01 25 | 9.509185559965710466e-01 1.160910856139863556e+00 26 | -2.208011034617315682e-01 1.359519182015863414e+00 27 | 5.014308343418812930e-01 8.616809371827346409e-01 28 | 4.062119791802221158e-01 9.978441261611491475e-01 29 | -3.261808473677009212e-01 1.253851956706640625e+00 30 | -9.384122266458547190e-02 1.087380584398571326e+00 31 | 2.831734380739727719e+00 4.200087358898976220e-01 32 | 2.392848910128904993e+00 6.857786098881671899e-01 33 | 1.959663826594718605e+00 1.190996879860474866e+00 34 | 1.970967483724908709e+00 5.798748135277649318e-01 35 | 2.532190890158702246e+00 1.866146517258099546e+00 36 | 1.637845456716563675e+00 6.377873337528057185e-01 37 | 2.411315668442148397e+00 -2.475776840458854267e-01 38 | 1.439002569761470784e+00 9.368488018010180385e-01 39 | 2.358087679214174948e+00 1.223425832600814500e+00 40 | 2.111092573033652720e+00 7.867535330511843394e-01 41 | 1.728202078440840506e+00 1.152215345125059187e+00 42 | 1.904640852085592861e+00 1.008566583866177258e+00 43 | 2.067521842423878375e+00 1.527922351327991812e+00 44 | 1.660784931656968944e+00 8.395967994740269891e-01 45 | 1.072524732757967669e+00 6.351047189763145973e-01 46 | 1.281055239263188206e+00 1.597868318722888592e+00 47 | 2.305335222877380907e+00 2.841155758641304985e+00 48 | 1.384387452767090965e+00 9.532042487190902635e-01 49 | 1.804966021245087315e+00 1.368415649573023085e+00 50 | 2.077906347542996190e+00 1.717921751565865129e-01 51 | 2.279244234819231885e+00 1.582067276809969059e+00 52 | 1.858904948631792564e+00 1.620160253711492526e+00 53 | 2.286144450629103098e+00 1.344314754198359996e+00 54 | 1.494054823017956224e+00 6.398421789210342325e-01 55 | 1.538074461123730030e+00 1.007109380215525318e+00 56 | 1.373919437671518473e+00 2.162545560277426837e+00 57 | 1.739143580085863672e+00 1.388985981547359305e+00 58 | 1.670816903781698226e+00 7.965758876659547738e-01 59 | 1.779317953674169672e+00 -5.343512181678944373e-02 60 | 2.287867041326709927e+00 1.233466798487238503e+00 61 | 2.281895870137802262e+00 1.527882261921060358e+00 62 | 2.502896659097889831e+00 1.015893748900247306e+00 63 | 2.280744329081264343e+00 8.214490826679817781e-01 64 | 2.282899347855093186e+00 1.372847051372260596e+00 65 | 2.149837998496872071e+00 1.323841565813230980e+00 66 | 2.455665918748586307e+00 -3.808671528944282958e-01 67 | 2.334322009603155390e+00 9.216128437928703399e-01 68 | 2.095235210199232423e+00 9.497803947286705961e-01 69 | 2.204160338254386620e+00 3.806190411821878117e-01 70 | 1.878675246333841420e+00 1.180168828457327734e+00 71 | 2.004009946888315685e+00 6.396277159333193518e-01 72 | 1.964927164536159898e+00 1.672050593238606497e+00 73 | 2.406702339413223868e+00 6.838656030207713732e-01 74 | 1.787782246327375146e+00 8.758888319788539212e-01 75 | 2.081583222663557997e+00 1.348737068609703771e+00 76 | 2.415455664157045934e+00 1.042962400926586763e+00 77 | 2.357343059546415542e+00 1.004297894587993012e+00 78 | 1.928703731339916905e+00 1.289569436705197969e+00 79 | 2.364034982348631075e+00 9.637983015228904771e-01 80 | 9.076277299739994309e-01 7.046148128668310306e-01 81 | 1.243012188046264122e+00 1.376656364340384187e+00 82 | 2.355059115542109449e+00 1.467798201263690983e+00 83 | 2.116454616334744188e+00 2.276102175094805169e+00 84 | 1.898435445943431832e+00 3.454880206766344219e-01 85 | 1.868793611966695467e+00 1.020289850047294111e+00 86 | 2.307000792071562056e+00 5.165685374093851312e-01 87 | 1.310486999420669374e+00 8.622761698258895047e-01 88 | 2.383734636983776856e+00 7.416780401234546183e-01 89 | 2.640347998228197657e+00 1.911101973987915592e+00 90 | 1.392075116365584897e+00 7.279548047994565119e-01 91 | 1.818266794704792355e+00 1.941464266900982949e+00 92 | 2.150705450888527270e+00 8.450794412236889430e-01 93 | 1.613292951111350515e+00 1.702687167278020386e+00 94 | 2.728004636017816953e+00 -1.930614190935879826e-01 95 | 1.166268729043721031e+00 1.352888013516711396e+00 96 | 2.130808245208590801e+00 1.169413040794330838e+00 97 | 1.695602844750024873e+00 1.440074864639362850e+00 98 | 1.094397437693858333e+00 1.083555686804543949e+00 99 | 1.785766985812844210e+00 1.419783722472153231e-01 100 | 1.935898443684904935e+00 9.941907941449651398e-01 101 | --------------------------------------------------------------------------------