├── Chapter02 ├── encript_decript_strings.py ├── lagged_fibonacci_algorithm.py ├── learmouth_lewis_generator.py ├── linear_congruential_generator.py ├── random_generation.py ├── random_password_generator.py └── uniformity_test.py ├── Chapter03 ├── binomial_distribution.py ├── colosseum.jpg ├── image_augmentation.py ├── normal_distribution.py ├── power_analysis.py └── uniform_distribution.py ├── Chapter04 ├── central_limit_theorem.py ├── cross_entropy.py ├── cross_entropy_loss_function.py ├── numerical_integration.py ├── sensitivity_analysis.py └── simulating_pi.py ├── Chapter05 ├── schelling_model.py ├── simulating_random_walk.py └── weather_forecasting.py ├── Chapter06 ├── bootstrap_estimator.py ├── bootstrap_regression.py ├── jakknife_estimator.py ├── kfold_cross_validation.py └── permutation_test.py ├── Chapter07 ├── GradientDescent.py ├── Newton-Raphson.py ├── SciPyOptimize.py ├── gaussian_mixtures.py └── simulated_annealing.py ├── Chapter08 ├── cellular_automata.py ├── genetic_algorithm.py └── symbolic_regression.py ├── Chapter09 ├── AMZN.csv ├── amazon_stock_montecarlo_simulation.py ├── standard_brownian_motion.py └── value_at_risk.py ├── Chapter10 ├── Concrete_data.xlsx ├── airfoil_self_noise.dat ├── airfoil_self_noise.py └── concrete_quality.py ├── Chapter11 ├── montecarlo_tasks_scheduling.py ├── tiny_forest-management.py └── tiny_forest_management_modified.py ├── Chapter12 ├── UAV_WiFi.xlsx ├── UAV_detector.py ├── fault.dataset.xlsx └── gearbox_fault_diagnosis.py ├── LICENSE └── README.md /Chapter02/encript_decript_strings.py: -------------------------------------------------------------------------------- 1 | from cryptography.fernet import Fernet 2 | 3 | title = "Simulation Modeling with Python" 4 | 5 | secret_key = Fernet.generate_key() 6 | 7 | fernet_obj = Fernet(secret_key) 8 | 9 | enc_title = fernet_obj.encrypt(title.encode()) 10 | 11 | print("My last book title = ", title) 12 | print("Title encrypted = ", enc_title) 13 | 14 | dec_title = fernet_obj.decrypt(enc_title.decode()) 15 | 16 | print("Title decrypted = ", dec_title) -------------------------------------------------------------------------------- /Chapter02/lagged_fibonacci_algorithm.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | x0=1 3 | x1=1 4 | m=2**32 5 | 6 | for i in range (1,101): 7 | x= np.mod((x0+x1), m) 8 | x0=x1 9 | x1=x 10 | print(x) 11 | -------------------------------------------------------------------------------- /Chapter02/learmouth_lewis_generator.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | a = 75 3 | c = 0 4 | m = 2**(31) -1 5 | x = 0.1 6 | 7 | for i in range(1,100): 8 | x= np.mod((a*x+c),m) 9 | u = x/m 10 | print(u) -------------------------------------------------------------------------------- /Chapter02/linear_congruential_generator.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | a = 2 3 | c = 4 4 | m = 5 5 | x = 3 6 | 7 | for i in range(1,17): 8 | x= np.mod((a*x+c),m) 9 | print(x) -------------------------------------------------------------------------------- /Chapter02/random_generation.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | for i in range(20): 4 | print('%05.4f' % random.random(), end=' ') 5 | print() 6 | 7 | random.seed(1) 8 | 9 | for i in range(20): 10 | print('%05.4f' % random.random(), end=' ') 11 | print() 12 | 13 | for i in range(20): 14 | print('%6.4f' %random.uniform(1, 100), end=' ') 15 | print() 16 | 17 | 18 | for i in range(20): 19 | print(random.randint(-100, 100), end=' ') 20 | print() 21 | 22 | for i in range(20): 23 | print(random.randrange(0, 100,5), end=' ') 24 | print() 25 | 26 | CitiesList = ['Rome','New York','London','Berlin','Moskov', 'Los Angeles','Paris','Madrid','Tokio','Toronto'] 27 | for i in range(10): 28 | CitiesItem = random.choice(CitiesList) 29 | print ("Randomly selected item from Cities list is - ", CitiesItem) 30 | 31 | DataList = range(10,100,10) 32 | print("Initial Data List = ",DataList) 33 | DataSample = random.sample(DataList,k=5) 34 | print("Sample Data List = ",DataSample) -------------------------------------------------------------------------------- /Chapter02/random_password_generator.py: -------------------------------------------------------------------------------- 1 | import string 2 | import random 3 | 4 | 5 | char_set = list(string.ascii_letters + string.digits + "()!$%^&*@#") 6 | 7 | password_length = int(input("How long should your password be?: ")) 8 | 9 | random.shuffle(char_set) 10 | 11 | password = [] 12 | for i in range(password_length): 13 | password.append(random.choice(char_set)) 14 | 15 | random.shuffle(password) 16 | 17 | print("".join(password)) 18 | 19 | 20 | -------------------------------------------------------------------------------- /Chapter02/uniformity_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | a = 75 3 | c = 0 4 | m = 2**(31) -1 5 | x = 0.1 6 | u=np.array([]) 7 | 8 | for i in range(0,100): 9 | x= np.mod((a*x+c),m) 10 | u= np.append(u,x/m) 11 | print(u[i]) 12 | 13 | N=100 14 | s=20 15 | Ns =N/s 16 | S = np.arange(0, 1, 0.05) 17 | counts = np.empty(S.shape, dtype=int) 18 | V=0 19 | for i in range(0,20): 20 | counts[i] = len(np.where((u >= S[i]) & (u < S[i]+0.05))[0]) 21 | V=V+(counts[i]-Ns)**2 / Ns 22 | 23 | print("R = ",counts) 24 | print("V = ", V) 25 | 26 | import matplotlib.pyplot as plt 27 | Ypos = np.arange(len(counts)) 28 | 29 | plt.bar(Ypos,counts) 30 | -------------------------------------------------------------------------------- /Chapter03/binomial_distribution.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | 5 | N = 1000 6 | n = 10 7 | p = 0.5 8 | 9 | P1 = np.random.binomial(n,p,N) 10 | 11 | 12 | plt.figure() 13 | plt.hist(P1, density=True, alpha=0.8, histtype='bar', color = 'green', ec='black') 14 | plt.show() 15 | 16 | 17 | -------------------------------------------------------------------------------- /Chapter03/colosseum.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Hands-On-Simulation-Modeling-with-Python-Second-Edition/799f10caa2702e4074893fd4c5145e9aa42b8719/Chapter03/colosseum.jpg -------------------------------------------------------------------------------- /Chapter03/image_augmentation.py: -------------------------------------------------------------------------------- 1 | from keras.preprocessing.image import load_img 2 | from keras.preprocessing.image import img_to_array 3 | from keras.preprocessing.image import ImageDataGenerator 4 | 5 | image_generation = ImageDataGenerator( 6 | rotation_range=10, 7 | width_shift_range=0.1, 8 | height_shift_range=0.1, 9 | shear_range=0.1, 10 | zoom_range=0.1, 11 | horizontal_flip=True, 12 | fill_mode='nearest') 13 | 14 | source_img = load_img('colosseum.jpg') 15 | x = img_to_array(source_img) 16 | x = x.reshape((1,) + x.shape) 17 | 18 | 19 | i = 0 20 | for batch in image_generation.flow(x, batch_size=1, 21 | save_to_dir='AugImage', save_prefix='new_image', save_format='jpeg'): 22 | i += 1 23 | if i > 50: 24 | break 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /Chapter03/normal_distribution.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | 5 | mu = 5 6 | sigma =2 7 | 8 | P1 = np.random.normal(mu, sigma, 1000) 9 | 10 | mu = 10 11 | sigma =2 12 | 13 | P2 = np.random.normal(mu, sigma, 1000) 14 | 15 | mu = 15 16 | sigma =2 17 | 18 | P3 = np.random.normal(mu, sigma, 1000) 19 | 20 | mu = 10 21 | sigma =2 22 | 23 | P4 = np.random.normal(mu, sigma, 1000) 24 | 25 | mu = 10 26 | sigma =1 27 | 28 | P5 = np.random.normal(mu, sigma, 1000) 29 | 30 | mu = 10 31 | sigma =0.5 32 | 33 | P6 = np.random.normal(mu, sigma, 1000) 34 | 35 | 36 | Plot1 = sns.histplot(P1,stat="density", kde=True, color="g") 37 | Plot2 = sns.histplot(P2,stat="density", kde=True, color="b") 38 | Plot3 = sns.histplot(P3,stat="density", kde=True, color="y") 39 | 40 | plt.figure() 41 | Plot4 = sns.histplot(P4,stat="density", kde=True, color="g") 42 | Plot5 = sns.histplot(P5,stat="density", kde=True, color="b") 43 | Plot6 = sns.histplot(P6,stat="density", kde=True, color="y") 44 | plt.show() 45 | 46 | -------------------------------------------------------------------------------- /Chapter03/power_analysis.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import statsmodels.stats.power as ssp 4 | 5 | 6 | stat_power = ssp.TTestPower() 7 | sample_size = stat_power.solve_power(effect_size=0.5, nobs = None, alpha=0.05, power=0.8) 8 | print('Sample Size: {:.2f}'.format(sample_size)) 9 | 10 | power = stat_power.solve_power(effect_size = 0.5,nobs=33, 11 | alpha = 0.05, power = None) 12 | print('Power = {:.2f}'.format(power)) 13 | 14 | effect_sizes = np.array([0.2, 0.5, 0.8,1]) 15 | sample_sizes = np.array(range(5, 500)) 16 | 17 | stat_power.plot_power(dep_var='nobs', nobs=sample_sizes, 18 | effect_size=effect_sizes) 19 | plt.xlabel('Sample Size') 20 | plt.ylabel('Power') 21 | plt.show() -------------------------------------------------------------------------------- /Chapter03/uniform_distribution.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | a=1 4 | b=100 5 | N=100 6 | X1=np.random.uniform(a,b,N) 7 | plt.plot(X1) 8 | plt.show() 9 | plt.figure() 10 | plt.hist(X1, density=True, histtype='stepfilled', alpha=0.2) 11 | plt.show() 12 | 13 | a=1 14 | b=100 15 | N=10000 16 | X2=np.random.uniform(a,b,N) 17 | 18 | plt.figure() 19 | plt.plot(X2) 20 | plt.show() 21 | 22 | plt.figure() 23 | plt.hist(X2, density=True, histtype='stepfilled', alpha=0.2) 24 | plt.show() 25 | 26 | 27 | -------------------------------------------------------------------------------- /Chapter04/central_limit_theorem.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | a=1 5 | b=100 6 | N=10000 7 | DataPop=list(np.random.uniform(a,b,N)) 8 | plt.hist(DataPop, density=True, histtype='stepfilled', alpha=0.2) 9 | plt.show() 10 | 11 | SamplesMeans = [] 12 | for i in range(0,1000): 13 | DataExtracted = random.sample(DataPop,k=100) 14 | DataExtractedMean = np.mean(DataExtracted) 15 | SamplesMeans.append(DataExtractedMean) 16 | plt.figure() 17 | plt.hist(SamplesMeans, density=True, histtype='stepfilled', alpha=0.2) 18 | plt.show() 19 | -------------------------------------------------------------------------------- /Chapter04/cross_entropy.py: -------------------------------------------------------------------------------- 1 | from matplotlib import pyplot 2 | from math import log2 3 | 4 | events = ['A', 'B', 'C','D'] 5 | p = [0.70, 0.05,0.10,0.15] 6 | q = [0.45, 0.10, 0.20,0.25] 7 | print(f'P = {sum(p):.3f}',f'Q = {sum(q):.3f}') 8 | 9 | pyplot.subplot(2,1,1) 10 | pyplot.bar(events, p) 11 | pyplot.subplot(2,1,2) 12 | pyplot.bar(events, q) 13 | pyplot.show() 14 | 15 | def cross_entropy(p, q): 16 | return -sum([p*log2(q) for p,q in zip(p,q)]) 17 | 18 | h_pq = cross_entropy(p, q) 19 | print(f'H(P, Q) = {h_pq:.3f} bits') 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /Chapter04/cross_entropy_loss_function.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | y = np.array([1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0]) 4 | p = np.array([0.8, 0.1, 0.9, 0.2, 0.8, 0.1, 0.7, 0.3, 0.6, 0.4]) 5 | 6 | ce_loss = -sum(y*np.log(p)+(1-y)*np.log(1-p)) 7 | 8 | ce_loss = ce_loss/len(p) 9 | print(f'Cross Entropy Loss = {ce_loss:.3f} nats') -------------------------------------------------------------------------------- /Chapter04/numerical_integration.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | 5 | random.seed(2) 6 | f = lambda x: x**2 7 | a = 0.0 8 | 9 | b = 3.0 10 | NumSteps = 1000000 11 | XIntegral=[] 12 | YIntegral=[] 13 | XRectangle=[] 14 | YRectangle=[] 15 | 16 | ymin = f(a) 17 | ymax = ymin 18 | for i in range(NumSteps): 19 | x = a + (b - a) * float(i) / NumSteps 20 | y = f(x) 21 | if y < ymin: ymin = y 22 | if y > ymax: ymax = y 23 | 24 | A = (b - a) * (ymax - ymin) 25 | N = 1000000 26 | M = 0 27 | for k in range(N): 28 | x = a + (b - a) * random.random() 29 | y = ymin + (ymax - ymin) * random.random() 30 | if y <= f(x): 31 | M += 1 32 | XIntegral.append(x) 33 | YIntegral.append(y) 34 | else: 35 | XRectangle.append(x) 36 | YRectangle.append(y) 37 | NumericalIntegral = M / N * A 38 | print ("Numerical integration = " + str(NumericalIntegral)) 39 | 40 | XLin=np.linspace(a,b) 41 | YLin=[] 42 | for x in XLin: 43 | YLin.append(f(x)) 44 | 45 | plt.axis ([0, b, 0, f(b)]) 46 | plt.plot (XLin,YLin, color="red" , linewidth="4") 47 | plt.scatter(XIntegral, YIntegral, color="blue", marker =".") 48 | plt.scatter(XRectangle, YRectangle, color="yellow", marker =".") 49 | plt.title ("Numerical Integration using Monte Carlo method") 50 | plt.show() 51 | -------------------------------------------------------------------------------- /Chapter04/sensitivity_analysis.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import math 3 | from sensitivity import SensitivityAnalyzer 4 | 5 | def my_func(x_1, x_2,x_3): 6 | return math.log(x_1/ x_2 + x_3) 7 | 8 | x_1=np.arange(10, 100, 10) 9 | x_2=np.arange(1, 10, 1) 10 | x_3=np.arange(1, 10, 1) 11 | 12 | sa_dict = {'x_1':x_1.tolist(),'x_2':x_2.tolist(),'x_3':x_3.tolist()} 13 | 14 | sa_model = SensitivityAnalyzer(sa_dict, my_func) 15 | plot = sa_model.plot() 16 | styled_df = sa_model.styled_dfs() 17 | 18 | -------------------------------------------------------------------------------- /Chapter04/simulating_pi.py: -------------------------------------------------------------------------------- 1 | import math 2 | import random 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | 6 | N = 10000 7 | M = 0 8 | 9 | XCircle=[] 10 | YCircle=[] 11 | XSquare=[] 12 | YSquare=[] 13 | 14 | for p in range(N): 15 | x=random.random() 16 | y=random.random() 17 | if(x**2+y**2 <= 1): 18 | M+=1 19 | XCircle.append(x) 20 | YCircle.append(y) 21 | else: 22 | XSquare.append(x) 23 | YSquare.append(y) 24 | 25 | Pi = 4*M/N 26 | 27 | print("N=%d M=%d Pi=%.2f" %(N,M,Pi)) 28 | 29 | XLin=np.linspace(0,1) 30 | YLin=[] 31 | for x in XLin: 32 | YLin.append(math.sqrt(1-x**2)) 33 | 34 | plt.axis ("equal") 35 | plt.grid (which="major") 36 | plt.plot (XLin , YLin, color="red" , linewidth="4") 37 | plt.scatter(XCircle, YCircle, color="yellow", marker =".") 38 | plt.scatter(XSquare, YSquare, color="blue" , marker =".") 39 | plt.title ("Monte Carlo method for Pi estimation") 40 | 41 | plt.show() 42 | -------------------------------------------------------------------------------- /Chapter05/schelling_model.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | from random import random 3 | 4 | class SchAgent: 5 | 6 | def __init__(self, type): 7 | self.type = type 8 | self.ag_location() 9 | 10 | def ag_location(self): 11 | self.location = random(), random() 12 | 13 | def euclidean_distance(self, new): 14 | eu_dist = ((self.location[0] - new.location[0])**2 \ 15 | + (self.location[1] - new.location[1])**2)**(1/2) 16 | return eu_dist 17 | 18 | def satisfaction(self, agents): 19 | eu_dist = [] 20 | for agent in agents: 21 | if self != agent: 22 | eu_distance = self.euclidean_distance(agent) 23 | eu_dist.append((eu_distance, agent)) 24 | eu_dist.sort() 25 | neigh_agent = [agent for k, agent in eu_dist[:neigh_num]] 26 | neigh_itself = sum(self.type == agent.type for agent in neigh_agent) 27 | return neigh_itself >= neigh_threshold 28 | 29 | def update(self, agents): 30 | while not self.satisfaction(agents): 31 | self.ag_location() 32 | 33 | 34 | def grid_plot(agents, step): 35 | x_A, y_A = [], [] 36 | x_B, y_B = [], [] 37 | for agent in agents: 38 | x, y = agent.location 39 | if agent.type == 0: 40 | x_A.append(x) 41 | y_A.append(y) 42 | else: 43 | x_B.append(x) 44 | y_B.append(y) 45 | fig, ax = plt.subplots(figsize=(10, 10)) 46 | ax.plot(x_A, y_A, '^', markerfacecolor='b',markersize= 10) 47 | ax.plot(x_B, y_B, 'o', markerfacecolor='r',markersize= 10) 48 | ax.set_title(f'Step number = {step}') 49 | plt.show() 50 | 51 | 52 | num_agents_A = 500 53 | num_agents_B = 500 54 | neigh_num = 8 55 | neigh_threshold = 4 56 | 57 | agents = [SchAgent(0) for i in range(num_agents_A)] 58 | agents.extend(SchAgent(1) for i in range(num_agents_B)) 59 | 60 | step = 0 61 | k=0 62 | while (k<(num_agents_A + num_agents_B)): 63 | print('Step number = ', step) 64 | grid_plot(agents, step) 65 | step += 1 66 | k=0 67 | for agent in agents: 68 | old_location = agent.location 69 | agent.update(agents) 70 | if agent.location == old_location: 71 | k=k+1 72 | else: 73 | print(f'Satisfied agents with {neigh_threshold/neigh_num*100} \ 74 | % of similar neighbors') -------------------------------------------------------------------------------- /Chapter05/simulating_random_walk.py: -------------------------------------------------------------------------------- 1 | from random import seed 2 | from random import random 3 | from matplotlib import pyplot 4 | seed(1) 5 | RWPath = list() 6 | RWPath.append(-1 if random() < 0.5 else 1) 7 | for i in range(1, 1000): 8 | ZNValue = -1 if random() < 0.5 else 1 9 | XNValue = RWPath[i-1] + ZNValue 10 | RWPath.append(XNValue) 11 | pyplot.plot(RWPath) 12 | pyplot.show() -------------------------------------------------------------------------------- /Chapter05/weather_forecasting.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | np.random.seed(3) 5 | StatesData = ["Sunny","Rainy"] 6 | 7 | TransitionStates = [["SuSu","SuRa"],["RaRa","RaSu"]] 8 | TransitionMatrix = [[0.80,0.20],[0.25,0.75]] 9 | 10 | 11 | WeatherForecasting = list() 12 | NumDays = 365 13 | TodayPrediction = StatesData[0] 14 | 15 | print("Weather initial condition =",TodayPrediction) 16 | 17 | 18 | for i in range(1, NumDays): 19 | 20 | if TodayPrediction == "Sunny": 21 | TransCondition = np.random.choice(TransitionStates[0],replace=True,p=TransitionMatrix[0]) 22 | if TransCondition == "SuSu": 23 | pass 24 | else: 25 | TodayPrediction = "Rainy" 26 | 27 | 28 | 29 | elif TodayPrediction == "Rainy": 30 | TransCondition = np.random.choice(TransitionStates[1],replace=True,p=TransitionMatrix[1]) 31 | if TransCondition == "RaRa": 32 | pass 33 | else: 34 | TodayPrediction = "Sunny" 35 | 36 | 37 | WeatherForecasting.append(TodayPrediction) 38 | print(TodayPrediction) 39 | 40 | 41 | plt.plot(WeatherForecasting) 42 | plt.show() 43 | 44 | plt.figure() 45 | plt.hist(WeatherForecasting) 46 | plt.show() 47 | -------------------------------------------------------------------------------- /Chapter06/bootstrap_estimator.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | 5 | PopData = list() 6 | 7 | random.seed(7) 8 | 9 | for i in range(1000): 10 | DataElem = 50 * random.random() 11 | PopData.append(DataElem) 12 | 13 | 14 | PopSample = random.choices(PopData, k=100) 15 | 16 | PopSampleMean = list() 17 | for i in range(10000): 18 | SampleI = random.choices(PopData, k=100) 19 | PopSampleMean.append(np.mean(SampleI)) 20 | 21 | plt.hist(PopSampleMean) 22 | plt.show() 23 | 24 | MeanPopSampleMean = np.mean(PopSampleMean) 25 | print("The mean of the Bootstrap estimator is ",MeanPopSampleMean) 26 | 27 | MeanPopData = np.mean(PopData) 28 | print("The mean of the population is ",MeanPopData) 29 | 30 | MeanPopSample = np.mean(PopSample) 31 | print("The mean of the simple random sample is ",MeanPopSample) 32 | 33 | -------------------------------------------------------------------------------- /Chapter06/bootstrap_regression.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn.linear_model import LinearRegression 3 | import matplotlib.pyplot as plt 4 | import pandas as pd 5 | import seaborn as sns 6 | 7 | 8 | x = np.linspace(0, 1, 100) 9 | y = x + (np.random.rand(len(x))) 10 | 11 | for i in range(30): 12 | x=np.append(x, np.random.choice(x)) 13 | y=np.append(y, np.random.choice(y)) 14 | 15 | x=x.reshape(-1, 1) 16 | y=y.reshape(-1, 1) 17 | 18 | reg_model = LinearRegression().fit(x, y) 19 | 20 | r_sq = reg_model.score(x, y) 21 | print(f"R squared = {r_sq}") 22 | 23 | alpha=float(reg_model.coef_[0]) 24 | print(f"slope: {reg_model.coef_}") 25 | beta=float(reg_model.intercept_[0]) 26 | print(f"intercept: {reg_model.intercept_}") 27 | 28 | y_pred = reg_model.predict(x) 29 | 30 | plt.scatter(x, y) 31 | plt.plot(x, y_pred, linewidth=2) 32 | plt.xlabel('x') 33 | plt.ylabel('y') 34 | plt.show() 35 | 36 | boot_slopes = [] 37 | boot_interc = [] 38 | r_sqs= [] 39 | n_boots = 500 40 | num_sample = len(x) 41 | data = pd.DataFrame({'x': x[:,0],'y': y[:,0]}) 42 | 43 | plt.figure() 44 | for k in range(n_boots): 45 | sample = data.sample(n=num_sample, replace=True) 46 | x_temp=sample['x'].values.reshape(-1, 1) 47 | y_temp=sample['y'].values.reshape(-1, 1) 48 | reg_model = LinearRegression().fit(x_temp, y_temp) 49 | r_sqs_temp = reg_model.score(x_temp, y_temp) 50 | r_sqs.append(r_sqs_temp) 51 | boot_interc.append(float(reg_model.intercept_[0])) 52 | boot_slopes.append(float(reg_model.coef_[0])) 53 | y_pred_temp = reg_model.predict(x_temp) 54 | plt.plot(x_temp, y_pred_temp, color='grey', alpha=0.2) 55 | 56 | plt.scatter(x, y) 57 | plt.plot(x, y_pred, linewidth=2) 58 | plt.xlabel('x') 59 | plt.ylabel('y') 60 | plt.show() 61 | 62 | sns.histplot(data=boot_slopes, kde=True) 63 | plt.show() 64 | sns.histplot(data=boot_interc, kde=True) 65 | plt.show() 66 | 67 | plt.plot(r_sqs) 68 | 69 | max_r_sq=max(r_sqs) 70 | print(f"Max R squared = {max_r_sq}") 71 | 72 | pos_max_r_sq=r_sqs.index(max(r_sqs)) 73 | print(f"Boot of the best Regression model = {pos_max_r_sq}") 74 | 75 | max_slope=boot_slopes[pos_max_r_sq] 76 | print(f"Slope of the best Regression model = {max_slope}") 77 | 78 | max_interc=boot_interc[pos_max_r_sq] 79 | print(f"Intercept of the best Regression model = {max_interc}") -------------------------------------------------------------------------------- /Chapter06/jakknife_estimator.py: -------------------------------------------------------------------------------- 1 | import random 2 | import statistics 3 | import matplotlib.pyplot as plt 4 | 5 | PopData = list() 6 | 7 | random.seed(5) 8 | 9 | for i in range(100): 10 | DataElem = 10 * random.random() 11 | PopData.append(DataElem) 12 | 13 | 14 | def CVCalc(Dat): 15 | CVCalc = statistics.stdev(Dat)/statistics.mean(Dat) 16 | return CVCalc 17 | 18 | CVPopData = CVCalc(PopData) 19 | print(CVPopData) 20 | 21 | N = len(PopData) 22 | JackVal = list() 23 | PseudoVal = list() 24 | for i in range(N-1): 25 | JackVal.append(0) 26 | for i in range(N): 27 | PseudoVal.append(0) 28 | 29 | for i in range(N): 30 | for j in range(N): 31 | if(j < i): 32 | JackVal[j] = PopData[j] 33 | else: 34 | if(j > i): 35 | JackVal[j-1]= PopData[j] 36 | PseudoVal[i] = N*CVCalc(PopData)-(N-1)*CVCalc(JackVal) 37 | 38 | plt.hist(PseudoVal) 39 | plt.show() 40 | 41 | MeanPseudoVal=statistics.mean(PseudoVal) 42 | print(MeanPseudoVal) 43 | VariancePseudoVal=statistics.variance(PseudoVal) 44 | print(VariancePseudoVal) 45 | VarJack = statistics.variance(PseudoVal)/N 46 | print(VarJack) 47 | 48 | -------------------------------------------------------------------------------- /Chapter06/kfold_cross_validation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn.model_selection import KFold 3 | 4 | StartedData=np.arange(10,110,10) 5 | print(StartedData) 6 | 7 | 8 | kfold = KFold(5, True, 1) 9 | 10 | for TrainData, TestData in kfold.split(StartedData): 11 | print("Train Data :", StartedData[TrainData],"Test Data :", StartedData[TestData]) 12 | 13 | -------------------------------------------------------------------------------- /Chapter06/permutation_test.py: -------------------------------------------------------------------------------- 1 | from sklearn.datasets import load_iris 2 | import numpy as np 3 | from sklearn import tree 4 | from sklearn.model_selection import permutation_test_score 5 | import matplotlib.pyplot as plt 6 | import seaborn as sns 7 | 8 | data=data = load_iris() 9 | X = data.data 10 | y = data.target 11 | 12 | np.random.seed(0) 13 | X_nc_data = np.random.normal(size=(len(X), 4)) 14 | 15 | 16 | 17 | clf = tree.DecisionTreeClassifier(random_state=1) 18 | 19 | p_test_iris = permutation_test_score( 20 | clf, X, y, scoring="accuracy", n_permutations=1000 21 | ) 22 | 23 | print(f"Score of iris flower classification = {p_test_iris[0]}") 24 | print(f"P_value of permutation test for iris dataset = {p_test_iris[2]}") 25 | 26 | p_test_nc_data = permutation_test_score( 27 | clf, X_nc_data, y, scoring="accuracy", n_permutations=1000 28 | ) 29 | 30 | print(f"Score of no-correletd data classification = {p_test_nc_data[0]}") 31 | print(f"P_value of permutation test for no-correletd dataset = {p_test_nc_data[2]}") 32 | 33 | pbox1=sns.histplot(data=p_test_iris[1], kde=True) 34 | plt.axvline(p_test_iris[0],linestyle="-", color='r') 35 | plt.axvline(p_test_iris[2],linestyle="--", color='b') 36 | pbox1.set(xlim=(0,1)) 37 | plt.show() 38 | 39 | pbox2=sns.histplot(data=p_test_nc_data[1], kde=True) 40 | plt.axvline(p_test_nc_data[0], color="r",linestyle="-") 41 | plt.axvline(p_test_nc_data[2], color="b",linestyle="--") 42 | pbox2.set(xlim=(0,1)) 43 | plt.show() -------------------------------------------------------------------------------- /Chapter07/GradientDescent.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | x = np.linspace(-1,3,100) 5 | y=x**2-2*x+1 6 | 7 | fig = plt.figure() 8 | axdef = fig.add_subplot(1, 1, 1) 9 | axdef.spines['left'].set_position('center') 10 | axdef.spines['bottom'].set_position('zero') 11 | axdef.spines['right'].set_color('none') 12 | axdef.spines['top'].set_color('none') 13 | axdef.xaxis.set_ticks_position('bottom') 14 | axdef.yaxis.set_ticks_position('left') 15 | 16 | plt.plot(x,y, 'r') 17 | plt.show() 18 | 19 | Gradf = lambda x: 2*x-2 20 | 21 | ActualX = 3 22 | LearningRate = 0.01 23 | PrecisionValue = 0.000001 24 | PreviousStepSize = 1 25 | MaxIteration = 10000 26 | IterationCounter = 0 27 | 28 | 29 | while PreviousStepSize > PrecisionValue and IterationCounter < MaxIteration: 30 | PreviousX = ActualX 31 | ActualX = ActualX - LearningRate * Gradf(PreviousX) 32 | PreviousStepSize = abs(ActualX - PreviousX) 33 | IterationCounter = IterationCounter+1 34 | print("Number of iterations = ",IterationCounter,"\nActual value of x is = ",ActualX) 35 | 36 | print("X value of f(x) minimum = ", ActualX) -------------------------------------------------------------------------------- /Chapter07/Newton-Raphson.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | x = np.linspace(0,3,100) 5 | y=x**3 -2*x**2 -x + 2 6 | 7 | fig = plt.figure() 8 | axdef = fig.add_subplot(1, 1, 1) 9 | axdef.spines['left'].set_position('center') 10 | axdef.spines['bottom'].set_position('zero') 11 | axdef.spines['right'].set_color('none') 12 | axdef.spines['top'].set_color('none') 13 | axdef.xaxis.set_ticks_position('bottom') 14 | axdef.yaxis.set_ticks_position('left') 15 | 16 | plt.plot(x,y, 'r') 17 | plt.show() 18 | 19 | print('Value of x at the minimum of the function', x[np.argmin(y)]) 20 | 21 | FirstDerivative = lambda x: 3*x**2-4*x -1 22 | SecondDerivative = lambda x: 6*x-4 23 | 24 | ActualX = 3 25 | PrecisionValue = 0.000001 26 | PreviousStepSize = 1 27 | MaxIteration = 10000 28 | IterationCounter = 0 29 | 30 | 31 | while PreviousStepSize > PrecisionValue and IterationCounter < MaxIteration: 32 | PreviousX = ActualX 33 | ActualX = ActualX - FirstDerivative(PreviousX)/ SecondDerivative(PreviousX) 34 | PreviousStepSize = abs(ActualX - PreviousX) 35 | IterationCounter = IterationCounter+1 36 | print("Number of iterations = ",IterationCounter,"\nActual value of x is = ",ActualX) 37 | 38 | print("X value of f(x) minimum = ", ActualX) -------------------------------------------------------------------------------- /Chapter07/SciPyOptimize.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.optimize import minimize 3 | import matplotlib.pyplot as plt 4 | from matplotlib import cm 5 | from matplotlib.ticker import LinearLocator, FormatStrFormatter 6 | from mpl_toolkits.mplot3d import Axes3D 7 | 8 | def matyas(x): 9 | return 0.26*(x[0]**2+x[1]**2)-0.48*x[0]*x[1] 10 | 11 | #def booth(x): 12 | # return (x[0]+2*x[1]-7)**2+(2*x[0]+x[1]-5)**2 13 | 14 | x = np.linspace(-10,10,100) 15 | y = np.linspace(-10,10,100) 16 | x, y = np.meshgrid(x, y) 17 | z = matyas([x,y]) 18 | 19 | fig = plt.figure() 20 | ax = fig.gca(projection='3d') 21 | surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, 22 | cmap=cm.RdBu,linewidth=0, antialiased=False) 23 | 24 | ax.zaxis.set_major_locator(LinearLocator(10)) 25 | ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) 26 | 27 | fig.colorbar(surf, shrink=0.5, aspect=10) 28 | 29 | plt.show() 30 | 31 | 32 | 33 | x0 = np.array([-10, 10]) 34 | NelderMeadOptimizeResults = minimize(matyas, x0, method='nelder-mead', 35 | options={'xatol': 1e-8, 'disp': True}) 36 | 37 | print(NelderMeadOptimizeResults.x) 38 | 39 | 40 | 41 | 42 | x0 = np.array([-10, 10]) 43 | PowellOptimizeResults = minimize(matyas, x0, method='Powell', 44 | options={'xtol': 1e-8, 'disp': True}) 45 | 46 | print(PowellOptimizeResults.x) -------------------------------------------------------------------------------- /Chapter07/gaussian_mixtures.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import seaborn as sns 3 | from matplotlib import pyplot as plt 4 | from sklearn.mixture import GaussianMixture 5 | import pandas as pd 6 | 7 | mean_1=25 8 | st_1=9 9 | mean_2=50 10 | st_2=5 11 | 12 | n_dist_1 = np.random.normal(loc=mean_1, scale=st_1, size=3000) 13 | n_dist_2 = np.random.normal(loc=mean_2, scale=st_2, size=7000) 14 | 15 | dist_merged = np.hstack((n_dist_1, n_dist_2)) 16 | 17 | sns.set_style("white") 18 | sns.histplot(data=dist_merged, kde=True) 19 | plt.show() 20 | 21 | dist_merged_res = dist_merged.reshape((len(dist_merged), 1)) 22 | gm_model = GaussianMixture(n_components=2, init_params='kmeans') 23 | gm_model.fit(dist_merged_res) 24 | 25 | print(f"Initial distribution means = {mean_1,mean_2}") 26 | print(f"Initial distribution standard deviation = {st_1,st_2}") 27 | 28 | print(f"GM_model distribution means = {gm_model.means_}") 29 | print(f"GM_model distribution standard deviation = {np.sqrt(gm_model.covariances_)}") 30 | 31 | dist_labels = gm_model.predict(dist_merged_res) 32 | 33 | sns.set_style("white") 34 | data_pred=pd.DataFrame({'data':dist_merged, 'label':dist_labels}) 35 | sns.histplot(data = data_pred, x = "data", kde = True, hue = "label") 36 | plt.show() 37 | 38 | label_0 = np.zeros(3000, dtype=int) 39 | label_1 = np.ones(7000, dtype=int) 40 | labels_merged = np.hstack((label_0, label_1)) 41 | data_init=pd.DataFrame({'data':dist_merged, 'label':labels_merged}) 42 | 43 | sns.set_style("white") 44 | sns.histplot(data = data_init, x = "data", kde = True, hue = "label") 45 | plt.show() -------------------------------------------------------------------------------- /Chapter07/simulated_annealing.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | x= np.linspace(0,10,1000) 5 | 6 | 7 | def cost_function(x): 8 | return x*np.sin(2.1*x+1) 9 | 10 | plt.plot(x,cost_function(x)) 11 | plt.xlabel('X') 12 | plt.ylabel('Cost Function') 13 | plt.show() 14 | 15 | temp = 2000 16 | iter = 2000 17 | step_size = 0.1 18 | np.random.seed(15) 19 | xi = np.random.uniform(min(x), max(x)) 20 | E_xi = cost_function(xi) 21 | xit, E_xit = xi, E_xi 22 | cost_func_eval = [] 23 | acc_prob = 1 24 | 25 | for i in range(iter): 26 | xstep = xit + np.random.randn() * step_size 27 | E_step = cost_function(xstep) 28 | if E_step < E_xi: 29 | xi, E_xi = xstep, E_step 30 | cost_func_eval.append(E_xi) 31 | print('Iteration = ',i, 'x_min = ',xi,'Global Minimum =', E_xi, 32 | 'Acceptance Probability =', acc_prob) 33 | diff_energy = E_step - E_xit 34 | t = temp /(i + 1) 35 | acc_prob = np.exp(-diff_energy/ t) 36 | if diff_energy < 0 or np.random.randn() < acc_prob: 37 | xit, E_xit = xstep, E_step 38 | 39 | 40 | plt.plot(cost_func_eval, 'bs--') 41 | plt.xlabel('Improvement Step') 42 | plt.ylabel('Cost Function improvement') 43 | plt.show() -------------------------------------------------------------------------------- /Chapter08/cellular_automata.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | cols_num=100 5 | rows_num=100 6 | wolfram_rule=126 7 | bin_rule = np.array([int(_) for _ in np.binary_repr(wolfram_rule, 8)]) 8 | print('Binary rule is:',bin_rule) 9 | 10 | cell_state = np.zeros((rows_num, cols_num),dtype=np.int8) 11 | cell_state[0, :] = np.random.randint(0,2,cols_num) 12 | 13 | update_window= np.array([[4], [2], [1]]) 14 | for j in range(rows_num - 1): 15 | update = np.vstack((np.roll(cell_state[j, :], 1), cell_state[j, :], 16 | np.roll(cell_state[j, :], -1))).astype(np.int8) 17 | rule_up = np.sum(update * update_window, axis=0).astype(np.int8) 18 | cell_state[j + 1, :] = bin_rule[7 - rule_up] 19 | 20 | 21 | ca_img= plt.imshow(cell_state,cmap=plt.cm.binary) 22 | plt.show() -------------------------------------------------------------------------------- /Chapter08/genetic_algorithm.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | var_values = [1,-3,4.5,2] 4 | num_coeff = 4 5 | pop_chrom = 10 6 | sel_rate = 5 7 | 8 | pop_size = (pop_chrom,num_coeff) 9 | pop_new = np.random.uniform(low=-10.0, high=10.0, size=pop_size) 10 | print(pop_new) 11 | 12 | num_gen = 100 13 | for k in range(num_gen): 14 | fitness = np.sum(pop_new *var_values, axis=1) 15 | par_sel = np.empty((sel_rate, pop_new.shape[1])) 16 | print("Current generation = ", k) 17 | print("Best fitness value : ", np.max(fitness)) 18 | 19 | for i in range(sel_rate): 20 | sel_id = np.where(fitness == np.max(fitness)) 21 | sel_id = sel_id[0][0] 22 | par_sel[i, :] = pop_new[sel_id, :] 23 | fitness[sel_id]=np.min(fitness) 24 | 25 | offspring_size=(pop_chrom-sel_rate, num_coeff) 26 | offspring = np.empty(offspring_size) 27 | crossover_lenght = int(offspring_size[1]/2) 28 | 29 | for j in range(offspring_size[0]): 30 | par1_id = np.random.randint(0,par_sel.shape[0]) 31 | par2_id = np.random.randint(0,par_sel.shape[0]) 32 | offspring[j, 0:crossover_lenght] = par_sel[par1_id, 0:crossover_lenght] 33 | offspring[j, crossover_lenght:] = par_sel[par2_id, crossover_lenght:] 34 | 35 | 36 | for m in range(offspring.shape[0]): 37 | mut_val = np.random.uniform(-1.0, 1.0) 38 | mut_id = np.random.randint(0,par_sel.shape[1]) 39 | offspring[m, mut_id] = offspring[m, mut_id] + mut_val 40 | 41 | pop_new[0:par_sel.shape[0], :] = par_sel 42 | pop_new[par_sel.shape[0]:, :] = offspring 43 | 44 | 45 | fitness = np.sum(pop_new *var_values, axis=1) 46 | best_id = np.where(fitness == np.max(fitness)) 47 | print("Optimized coefficient values = ", pop_new[best_id, :]) 48 | print("Maximum value of y = ", fitness[best_id]) 49 | -------------------------------------------------------------------------------- /Chapter08/symbolic_regression.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | from mpl_toolkits.mplot3d import Axes3D 4 | from gplearn.genetic import SymbolicRegressor 5 | 6 | x = np.arange(-1, 1, 1/10.) 7 | y = np.arange(-1, 1, 1/10.) 8 | x, y = np.meshgrid(x, y) 9 | f_values = x**2 + y**2 10 | 11 | fig = plt.figure() 12 | ax = Axes3D(fig) 13 | ax.plot_surface(x, y, f_values) 14 | plt.xlabel('x') 15 | plt.ylabel('y') 16 | plt.show() 17 | 18 | 19 | input_train = np.random.uniform(-1, 1, 200).reshape(100, 2) 20 | output_train = input_train[:, 0]**2 + input_train[:, 1]**2 21 | 22 | input_test = np.random.uniform(-1, 1, 200).reshape(100, 2) 23 | output_test = input_test[:, 0]**2 + input_test[:, 1]**2 24 | 25 | function_set = ['add', 'sub', 'mul'] 26 | 27 | sr_model = SymbolicRegressor(population_size=1000,function_set=function_set, 28 | generations=10, stopping_criteria=0.001, 29 | p_crossover=0.7, p_subtree_mutation=0.1, 30 | p_hoist_mutation=0.05, p_point_mutation=0.1, 31 | max_samples=0.9, verbose=1, 32 | parsimony_coefficient=0.01, random_state=1) 33 | sr_model.fit(input_train, output_train) 34 | 35 | print(sr_model._program) 36 | print('R2:',sr_model.score(input_test,output_test)) -------------------------------------------------------------------------------- /Chapter09/amazon_stock_montecarlo_simulation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import matplotlib.pyplot as plt 4 | from scipy.stats import norm 5 | from pandas.plotting import register_matplotlib_converters 6 | 7 | register_matplotlib_converters() 8 | 9 | AmznData = pd.read_csv('AMZN.csv',header=0, usecols=['Date', 'Close'],parse_dates=True,index_col='Date') 10 | print(AmznData.info()) 11 | print(AmznData.head()) 12 | print(AmznData.tail()) 13 | print(AmznData.describe()) 14 | 15 | plt.figure(figsize=(10,5)) 16 | plt.plot(AmznData) 17 | plt.show() 18 | 19 | AmznDataPctChange = AmznData.pct_change() 20 | 21 | AmznLogReturns = np.log(1 + AmznDataPctChange) 22 | print(AmznLogReturns.tail(10)) 23 | 24 | plt.figure(figsize=(10,5)) 25 | plt.plot(AmznLogReturns) 26 | plt.show() 27 | 28 | MeanLogReturns = np.array(AmznLogReturns.mean()) 29 | 30 | VarLogReturns = np.array(AmznLogReturns.var()) 31 | 32 | StdevLogReturns = np.array(AmznLogReturns.std()) 33 | 34 | 35 | Drift = MeanLogReturns - (0.5 * VarLogReturns) 36 | print("Drift = ",Drift) 37 | 38 | NumIntervals = 2515 39 | 40 | Iterations = 20 41 | 42 | np.random.seed(7) 43 | SBMotion = norm.ppf(np.random.rand(NumIntervals, Iterations)) 44 | 45 | 46 | 47 | DailyReturns = np.exp(Drift + StdevLogReturns * SBMotion) 48 | 49 | 50 | StartStockPrices = AmznData.iloc[0] 51 | 52 | StockPrice = np.zeros_like(DailyReturns) 53 | 54 | StockPrice[0] = StartStockPrices 55 | 56 | for t in range(1, NumIntervals): 57 | 58 | StockPrice[t] = StockPrice[t - 1] * DailyReturns[t] 59 | 60 | 61 | 62 | plt.figure(figsize=(10,5)) 63 | 64 | plt.plot(StockPrice) 65 | 66 | AMZNTrend = np.array(AmznData.iloc[:, 0:1]) 67 | 68 | plt.plot(AMZNTrend,'k*') 69 | 70 | plt.show() -------------------------------------------------------------------------------- /Chapter09/standard_brownian_motion.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | np.random.seed(4) 5 | 6 | n = 1000 7 | 8 | sqn = 1/np.math.sqrt(n) 9 | 10 | z_values = np.random.randn(n) 11 | 12 | Yk = 0 13 | 14 | sb_motion=list() 15 | 16 | for k in range(n): 17 | Yk = Yk + sqn*z_values[k] 18 | sb_motion.append(Yk) 19 | 20 | plt.plot(sb_motion) 21 | plt.show() 22 | -------------------------------------------------------------------------------- /Chapter09/value_at_risk.py: -------------------------------------------------------------------------------- 1 | import datetime as dt 2 | import numpy as np 3 | import pandas_datareader.data as wb 4 | import matplotlib.pyplot as plt 5 | from scipy.stats import norm 6 | 7 | StockList = ['ADBE','CSCO','IBM','NVDA','MSFT','HPQ'] 8 | StartDay = dt.datetime(2021, 1, 1) 9 | EndDay = dt.datetime(2021, 12, 31) 10 | 11 | StockData = wb.DataReader(StockList, 'yahoo',StartDay,EndDay) 12 | StockClose = StockData["Adj Close"] 13 | print(StockClose.describe()) 14 | 15 | 16 | fig, axs = plt.subplots(3, 2, figsize=(20,10)) 17 | axs[0, 0].plot(StockClose['ADBE']) 18 | axs[0, 0].set_title('ADBE') 19 | axs[0, 1].plot(StockClose['CSCO']) 20 | axs[0, 1].set_title('CSCO') 21 | axs[1, 0].plot(StockClose['IBM']) 22 | axs[1, 0].set_title('IBM') 23 | axs[1, 1].plot(StockClose['NVDA']) 24 | axs[1, 1].set_title('NVDA') 25 | axs[2, 0].plot(StockClose['MSFT']) 26 | axs[2, 0].set_title('MSFT') 27 | axs[2, 1].plot(StockClose['HPQ']) 28 | axs[2, 1].set_title('HPQ') 29 | 30 | 31 | StockReturns = StockClose.pct_change() 32 | print(StockReturns.tail(15)) 33 | 34 | PortvolioValue = 1000000000.00 35 | ConfidenceValue = 0.95 36 | MeanStockRet = np.mean(StockReturns) 37 | StdStockRet = np.std(StockReturns) 38 | 39 | WorkingDays2021 = 252. 40 | AnnualizedMeanStockRet = MeanStockRet/WorkingDays2021 41 | AnnualizedStdStockRet = StdStockRet/np.sqrt(WorkingDays2021) 42 | 43 | INPD = norm.ppf(1-ConfidenceValue,AnnualizedMeanStockRet,AnnualizedStdStockRet) 44 | VaR = PortvolioValue*INPD 45 | 46 | RoundVaR=np.round_(VaR,2) 47 | 48 | for i in range(len(StockList)): 49 | print("Value-at-Risk for", StockList[i], "is equal to ",RoundVaR[i]) 50 | -------------------------------------------------------------------------------- /Chapter10/Concrete_data.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Hands-On-Simulation-Modeling-with-Python-Second-Edition/799f10caa2702e4074893fd4c5145e9aa42b8719/Chapter10/Concrete_data.xlsx -------------------------------------------------------------------------------- /Chapter10/airfoil_self_noise.dat: -------------------------------------------------------------------------------- 1 | 800 0 0.3048 71.3 0.00266337 126.201 2 | 1000 0 0.3048 71.3 0.00266337 125.201 3 | 1250 0 0.3048 71.3 0.00266337 125.951 4 | 1600 0 0.3048 71.3 0.00266337 127.591 5 | 2000 0 0.3048 71.3 0.00266337 127.461 6 | 2500 0 0.3048 71.3 0.00266337 125.571 7 | 3150 0 0.3048 71.3 0.00266337 125.201 8 | 4000 0 0.3048 71.3 0.00266337 123.061 9 | 5000 0 0.3048 71.3 0.00266337 121.301 10 | 6300 0 0.3048 71.3 0.00266337 119.541 11 | 8000 0 0.3048 71.3 0.00266337 117.151 12 | 10000 0 0.3048 71.3 0.00266337 115.391 13 | 12500 0 0.3048 71.3 0.00266337 112.241 14 | 16000 0 0.3048 71.3 0.00266337 108.721 15 | 500 0 0.3048 55.5 0.00283081 126.416 16 | 630 0 0.3048 55.5 0.00283081 127.696 17 | 800 0 0.3048 55.5 0.00283081 128.086 18 | 1000 0 0.3048 55.5 0.00283081 126.966 19 | 1250 0 0.3048 55.5 0.00283081 126.086 20 | 1600 0 0.3048 55.5 0.00283081 126.986 21 | 2000 0 0.3048 55.5 0.00283081 126.616 22 | 2500 0 0.3048 55.5 0.00283081 124.106 23 | 3150 0 0.3048 55.5 0.00283081 123.236 24 | 4000 0 0.3048 55.5 0.00283081 121.106 25 | 5000 0 0.3048 55.5 0.00283081 119.606 26 | 6300 0 0.3048 55.5 0.00283081 117.976 27 | 8000 0 0.3048 55.5 0.00283081 116.476 28 | 10000 0 0.3048 55.5 0.00283081 113.076 29 | 12500 0 0.3048 55.5 0.00283081 111.076 30 | 200 0 0.3048 39.6 0.00310138 118.129 31 | 250 0 0.3048 39.6 0.00310138 119.319 32 | 315 0 0.3048 39.6 0.00310138 122.779 33 | 400 0 0.3048 39.6 0.00310138 124.809 34 | 500 0 0.3048 39.6 0.00310138 126.959 35 | 630 0 0.3048 39.6 0.00310138 128.629 36 | 800 0 0.3048 39.6 0.00310138 129.099 37 | 1000 0 0.3048 39.6 0.00310138 127.899 38 | 1250 0 0.3048 39.6 0.00310138 125.499 39 | 1600 0 0.3048 39.6 0.00310138 124.049 40 | 2000 0 0.3048 39.6 0.00310138 123.689 41 | 2500 0 0.3048 39.6 0.00310138 121.399 42 | 3150 0 0.3048 39.6 0.00310138 120.319 43 | 4000 0 0.3048 39.6 0.00310138 119.229 44 | 5000 0 0.3048 39.6 0.00310138 117.789 45 | 6300 0 0.3048 39.6 0.00310138 116.229 46 | 8000 0 0.3048 39.6 0.00310138 114.779 47 | 10000 0 0.3048 39.6 0.00310138 112.139 48 | 12500 0 0.3048 39.6 0.00310138 109.619 49 | 200 0 0.3048 31.7 0.00331266 117.195 50 | 250 0 0.3048 31.7 0.00331266 118.595 51 | 315 0 0.3048 31.7 0.00331266 122.765 52 | 400 0 0.3048 31.7 0.00331266 125.045 53 | 500 0 0.3048 31.7 0.00331266 127.315 54 | 630 0 0.3048 31.7 0.00331266 129.095 55 | 800 0 0.3048 31.7 0.00331266 129.235 56 | 1000 0 0.3048 31.7 0.00331266 127.365 57 | 1250 0 0.3048 31.7 0.00331266 124.355 58 | 1600 0 0.3048 31.7 0.00331266 122.365 59 | 2000 0 0.3048 31.7 0.00331266 122.375 60 | 2500 0 0.3048 31.7 0.00331266 120.755 61 | 3150 0 0.3048 31.7 0.00331266 119.135 62 | 4000 0 0.3048 31.7 0.00331266 118.145 63 | 5000 0 0.3048 31.7 0.00331266 115.645 64 | 6300 0 0.3048 31.7 0.00331266 113.775 65 | 8000 0 0.3048 31.7 0.00331266 110.515 66 | 10000 0 0.3048 31.7 0.00331266 108.265 67 | 800 1.5 0.3048 71.3 0.00336729 127.122 68 | 1000 1.5 0.3048 71.3 0.00336729 125.992 69 | 1250 1.5 0.3048 71.3 0.00336729 125.872 70 | 1600 1.5 0.3048 71.3 0.00336729 126.632 71 | 2000 1.5 0.3048 71.3 0.00336729 126.642 72 | 2500 1.5 0.3048 71.3 0.00336729 124.512 73 | 3150 1.5 0.3048 71.3 0.00336729 123.392 74 | 4000 1.5 0.3048 71.3 0.00336729 121.762 75 | 5000 1.5 0.3048 71.3 0.00336729 119.632 76 | 6300 1.5 0.3048 71.3 0.00336729 118.122 77 | 8000 1.5 0.3048 71.3 0.00336729 115.372 78 | 10000 1.5 0.3048 71.3 0.00336729 113.492 79 | 12500 1.5 0.3048 71.3 0.00336729 109.222 80 | 16000 1.5 0.3048 71.3 0.00336729 106.582 81 | 315 1.5 0.3048 39.6 0.00392107 121.851 82 | 400 1.5 0.3048 39.6 0.00392107 124.001 83 | 500 1.5 0.3048 39.6 0.00392107 126.661 84 | 630 1.5 0.3048 39.6 0.00392107 128.311 85 | 800 1.5 0.3048 39.6 0.00392107 128.831 86 | 1000 1.5 0.3048 39.6 0.00392107 127.581 87 | 1250 1.5 0.3048 39.6 0.00392107 125.211 88 | 1600 1.5 0.3048 39.6 0.00392107 122.211 89 | 2000 1.5 0.3048 39.6 0.00392107 122.101 90 | 2500 1.5 0.3048 39.6 0.00392107 120.981 91 | 3150 1.5 0.3048 39.6 0.00392107 119.111 92 | 4000 1.5 0.3048 39.6 0.00392107 117.741 93 | 5000 1.5 0.3048 39.6 0.00392107 116.241 94 | 6300 1.5 0.3048 39.6 0.00392107 114.751 95 | 8000 1.5 0.3048 39.6 0.00392107 112.251 96 | 10000 1.5 0.3048 39.6 0.00392107 108.991 97 | 12500 1.5 0.3048 39.6 0.00392107 106.111 98 | 400 3 0.3048 71.3 0.00425727 127.564 99 | 500 3 0.3048 71.3 0.00425727 128.454 100 | 630 3 0.3048 71.3 0.00425727 129.354 101 | 800 3 0.3048 71.3 0.00425727 129.494 102 | 1000 3 0.3048 71.3 0.00425727 129.004 103 | 1250 3 0.3048 71.3 0.00425727 127.634 104 | 1600 3 0.3048 71.3 0.00425727 126.514 105 | 2000 3 0.3048 71.3 0.00425727 125.524 106 | 2500 3 0.3048 71.3 0.00425727 124.024 107 | 3150 3 0.3048 71.3 0.00425727 121.514 108 | 4000 3 0.3048 71.3 0.00425727 120.264 109 | 5000 3 0.3048 71.3 0.00425727 118.134 110 | 6300 3 0.3048 71.3 0.00425727 116.134 111 | 8000 3 0.3048 71.3 0.00425727 114.634 112 | 10000 3 0.3048 71.3 0.00425727 110.224 113 | 400 3 0.3048 55.5 0.00452492 126.159 114 | 500 3 0.3048 55.5 0.00452492 128.179 115 | 630 3 0.3048 55.5 0.00452492 129.569 116 | 800 3 0.3048 55.5 0.00452492 129.949 117 | 1000 3 0.3048 55.5 0.00452492 129.329 118 | 1250 3 0.3048 55.5 0.00452492 127.329 119 | 1600 3 0.3048 55.5 0.00452492 124.439 120 | 2000 3 0.3048 55.5 0.00452492 123.069 121 | 2500 3 0.3048 55.5 0.00452492 122.439 122 | 3150 3 0.3048 55.5 0.00452492 120.189 123 | 4000 3 0.3048 55.5 0.00452492 118.689 124 | 5000 3 0.3048 55.5 0.00452492 117.309 125 | 6300 3 0.3048 55.5 0.00452492 115.679 126 | 8000 3 0.3048 55.5 0.00452492 113.799 127 | 10000 3 0.3048 55.5 0.00452492 112.169 128 | 315 3 0.3048 39.6 0.00495741 123.312 129 | 400 3 0.3048 39.6 0.00495741 125.472 130 | 500 3 0.3048 39.6 0.00495741 127.632 131 | 630 3 0.3048 39.6 0.00495741 129.292 132 | 800 3 0.3048 39.6 0.00495741 129.552 133 | 1000 3 0.3048 39.6 0.00495741 128.312 134 | 1250 3 0.3048 39.6 0.00495741 125.802 135 | 1600 3 0.3048 39.6 0.00495741 122.782 136 | 2000 3 0.3048 39.6 0.00495741 120.532 137 | 2500 3 0.3048 39.6 0.00495741 120.162 138 | 3150 3 0.3048 39.6 0.00495741 118.922 139 | 4000 3 0.3048 39.6 0.00495741 116.792 140 | 5000 3 0.3048 39.6 0.00495741 115.792 141 | 6300 3 0.3048 39.6 0.00495741 114.042 142 | 8000 3 0.3048 39.6 0.00495741 110.652 143 | 315 3 0.3048 31.7 0.00529514 123.118 144 | 400 3 0.3048 31.7 0.00529514 125.398 145 | 500 3 0.3048 31.7 0.00529514 127.548 146 | 630 3 0.3048 31.7 0.00529514 128.698 147 | 800 3 0.3048 31.7 0.00529514 128.708 148 | 1000 3 0.3048 31.7 0.00529514 126.838 149 | 1250 3 0.3048 31.7 0.00529514 124.838 150 | 1600 3 0.3048 31.7 0.00529514 122.088 151 | 2000 3 0.3048 31.7 0.00529514 120.088 152 | 2500 3 0.3048 31.7 0.00529514 119.598 153 | 3150 3 0.3048 31.7 0.00529514 118.108 154 | 4000 3 0.3048 31.7 0.00529514 115.608 155 | 5000 3 0.3048 31.7 0.00529514 113.858 156 | 6300 3 0.3048 31.7 0.00529514 109.718 157 | 250 4 0.3048 71.3 0.00497773 126.395 158 | 315 4 0.3048 71.3 0.00497773 128.175 159 | 400 4 0.3048 71.3 0.00497773 129.575 160 | 500 4 0.3048 71.3 0.00497773 130.715 161 | 630 4 0.3048 71.3 0.00497773 131.615 162 | 800 4 0.3048 71.3 0.00497773 131.755 163 | 1000 4 0.3048 71.3 0.00497773 131.015 164 | 1250 4 0.3048 71.3 0.00497773 129.395 165 | 1600 4 0.3048 71.3 0.00497773 126.645 166 | 2000 4 0.3048 71.3 0.00497773 124.395 167 | 2500 4 0.3048 71.3 0.00497773 123.775 168 | 3150 4 0.3048 71.3 0.00497773 121.775 169 | 4000 4 0.3048 71.3 0.00497773 119.535 170 | 5000 4 0.3048 71.3 0.00497773 117.785 171 | 6300 4 0.3048 71.3 0.00497773 116.165 172 | 8000 4 0.3048 71.3 0.00497773 113.665 173 | 10000 4 0.3048 71.3 0.00497773 110.905 174 | 12500 4 0.3048 71.3 0.00497773 107.405 175 | 250 4 0.3048 39.6 0.00579636 123.543 176 | 315 4 0.3048 39.6 0.00579636 126.843 177 | 400 4 0.3048 39.6 0.00579636 128.633 178 | 500 4 0.3048 39.6 0.00579636 130.173 179 | 630 4 0.3048 39.6 0.00579636 131.073 180 | 800 4 0.3048 39.6 0.00579636 130.723 181 | 1000 4 0.3048 39.6 0.00579636 128.723 182 | 1250 4 0.3048 39.6 0.00579636 126.343 183 | 1600 4 0.3048 39.6 0.00579636 123.213 184 | 2000 4 0.3048 39.6 0.00579636 120.963 185 | 2500 4 0.3048 39.6 0.00579636 120.233 186 | 3150 4 0.3048 39.6 0.00579636 118.743 187 | 4000 4 0.3048 39.6 0.00579636 115.863 188 | 5000 4 0.3048 39.6 0.00579636 113.733 189 | 1250 0 0.2286 71.3 0.00214345 128.144 190 | 1600 0 0.2286 71.3 0.00214345 129.134 191 | 2000 0 0.2286 71.3 0.00214345 128.244 192 | 2500 0 0.2286 71.3 0.00214345 128.354 193 | 3150 0 0.2286 71.3 0.00214345 127.834 194 | 4000 0 0.2286 71.3 0.00214345 125.824 195 | 5000 0 0.2286 71.3 0.00214345 124.304 196 | 6300 0 0.2286 71.3 0.00214345 122.044 197 | 8000 0 0.2286 71.3 0.00214345 118.024 198 | 10000 0 0.2286 71.3 0.00214345 118.134 199 | 12500 0 0.2286 71.3 0.00214345 117.624 200 | 16000 0 0.2286 71.3 0.00214345 114.984 201 | 20000 0 0.2286 71.3 0.00214345 114.474 202 | 315 0 0.2286 55.5 0.00229336 119.540 203 | 400 0 0.2286 55.5 0.00229336 121.660 204 | 500 0 0.2286 55.5 0.00229336 123.780 205 | 630 0 0.2286 55.5 0.00229336 126.160 206 | 800 0 0.2286 55.5 0.00229336 127.530 207 | 1000 0 0.2286 55.5 0.00229336 128.290 208 | 1250 0 0.2286 55.5 0.00229336 127.910 209 | 1600 0 0.2286 55.5 0.00229336 126.790 210 | 2000 0 0.2286 55.5 0.00229336 126.540 211 | 2500 0 0.2286 55.5 0.00229336 126.540 212 | 3150 0 0.2286 55.5 0.00229336 125.160 213 | 4000 0 0.2286 55.5 0.00229336 123.410 214 | 5000 0 0.2286 55.5 0.00229336 122.410 215 | 6300 0 0.2286 55.5 0.00229336 118.410 216 | 315 0 0.2286 39.6 0.00253511 121.055 217 | 400 0 0.2286 39.6 0.00253511 123.565 218 | 500 0 0.2286 39.6 0.00253511 126.195 219 | 630 0 0.2286 39.6 0.00253511 128.705 220 | 800 0 0.2286 39.6 0.00253511 130.205 221 | 1000 0 0.2286 39.6 0.00253511 130.435 222 | 1250 0 0.2286 39.6 0.00253511 129.395 223 | 1600 0 0.2286 39.6 0.00253511 127.095 224 | 2000 0 0.2286 39.6 0.00253511 125.305 225 | 2500 0 0.2286 39.6 0.00253511 125.025 226 | 3150 0 0.2286 39.6 0.00253511 124.625 227 | 4000 0 0.2286 39.6 0.00253511 123.465 228 | 5000 0 0.2286 39.6 0.00253511 122.175 229 | 6300 0 0.2286 39.6 0.00253511 117.465 230 | 315 0 0.2286 31.7 0.0027238 120.595 231 | 400 0 0.2286 31.7 0.0027238 123.635 232 | 500 0 0.2286 31.7 0.0027238 126.675 233 | 630 0 0.2286 31.7 0.0027238 129.465 234 | 800 0 0.2286 31.7 0.0027238 130.725 235 | 1000 0 0.2286 31.7 0.0027238 130.595 236 | 1250 0 0.2286 31.7 0.0027238 128.805 237 | 1600 0 0.2286 31.7 0.0027238 125.625 238 | 2000 0 0.2286 31.7 0.0027238 123.455 239 | 2500 0 0.2286 31.7 0.0027238 123.445 240 | 3150 0 0.2286 31.7 0.0027238 123.445 241 | 4000 0 0.2286 31.7 0.0027238 122.035 242 | 5000 0 0.2286 31.7 0.0027238 120.505 243 | 6300 0 0.2286 31.7 0.0027238 116.815 244 | 400 2 0.2286 71.3 0.00293031 125.116 245 | 500 2 0.2286 71.3 0.00293031 126.486 246 | 630 2 0.2286 71.3 0.00293031 127.356 247 | 800 2 0.2286 71.3 0.00293031 128.216 248 | 1000 2 0.2286 71.3 0.00293031 128.956 249 | 1250 2 0.2286 71.3 0.00293031 128.816 250 | 1600 2 0.2286 71.3 0.00293031 127.796 251 | 2000 2 0.2286 71.3 0.00293031 126.896 252 | 2500 2 0.2286 71.3 0.00293031 127.006 253 | 3150 2 0.2286 71.3 0.00293031 126.116 254 | 4000 2 0.2286 71.3 0.00293031 124.086 255 | 5000 2 0.2286 71.3 0.00293031 122.816 256 | 6300 2 0.2286 71.3 0.00293031 120.786 257 | 8000 2 0.2286 71.3 0.00293031 115.996 258 | 10000 2 0.2286 71.3 0.00293031 113.086 259 | 400 2 0.2286 55.5 0.00313525 122.292 260 | 500 2 0.2286 55.5 0.00313525 124.692 261 | 630 2 0.2286 55.5 0.00313525 126.842 262 | 800 2 0.2286 55.5 0.00313525 128.492 263 | 1000 2 0.2286 55.5 0.00313525 129.002 264 | 1250 2 0.2286 55.5 0.00313525 128.762 265 | 1600 2 0.2286 55.5 0.00313525 126.752 266 | 2000 2 0.2286 55.5 0.00313525 124.612 267 | 2500 2 0.2286 55.5 0.00313525 123.862 268 | 3150 2 0.2286 55.5 0.00313525 123.742 269 | 4000 2 0.2286 55.5 0.00313525 122.232 270 | 5000 2 0.2286 55.5 0.00313525 120.472 271 | 6300 2 0.2286 55.5 0.00313525 118.712 272 | 315 2 0.2286 39.6 0.00346574 120.137 273 | 400 2 0.2286 39.6 0.00346574 122.147 274 | 500 2 0.2286 39.6 0.00346574 125.157 275 | 630 2 0.2286 39.6 0.00346574 127.417 276 | 800 2 0.2286 39.6 0.00346574 129.037 277 | 1000 2 0.2286 39.6 0.00346574 129.147 278 | 1250 2 0.2286 39.6 0.00346574 128.257 279 | 1600 2 0.2286 39.6 0.00346574 125.837 280 | 2000 2 0.2286 39.6 0.00346574 122.797 281 | 2500 2 0.2286 39.6 0.00346574 121.397 282 | 3150 2 0.2286 39.6 0.00346574 121.627 283 | 4000 2 0.2286 39.6 0.00346574 120.227 284 | 5000 2 0.2286 39.6 0.00346574 118.827 285 | 6300 2 0.2286 39.6 0.00346574 116.417 286 | 315 2 0.2286 31.7 0.00372371 120.147 287 | 400 2 0.2286 31.7 0.00372371 123.417 288 | 500 2 0.2286 31.7 0.00372371 126.677 289 | 630 2 0.2286 31.7 0.00372371 129.057 290 | 800 2 0.2286 31.7 0.00372371 130.307 291 | 1000 2 0.2286 31.7 0.00372371 130.307 292 | 1250 2 0.2286 31.7 0.00372371 128.677 293 | 1600 2 0.2286 31.7 0.00372371 125.797 294 | 2000 2 0.2286 31.7 0.00372371 123.037 295 | 2500 2 0.2286 31.7 0.00372371 121.407 296 | 3150 2 0.2286 31.7 0.00372371 121.527 297 | 4000 2 0.2286 31.7 0.00372371 120.527 298 | 5000 2 0.2286 31.7 0.00372371 118.267 299 | 6300 2 0.2286 31.7 0.00372371 115.137 300 | 500 4 0.2286 71.3 0.00400603 126.758 301 | 630 4 0.2286 71.3 0.00400603 129.038 302 | 800 4 0.2286 71.3 0.00400603 130.688 303 | 1000 4 0.2286 71.3 0.00400603 131.708 304 | 1250 4 0.2286 71.3 0.00400603 131.718 305 | 1600 4 0.2286 71.3 0.00400603 129.468 306 | 2000 4 0.2286 71.3 0.00400603 126.218 307 | 2500 4 0.2286 71.3 0.00400603 124.338 308 | 3150 4 0.2286 71.3 0.00400603 124.108 309 | 4000 4 0.2286 71.3 0.00400603 121.728 310 | 5000 4 0.2286 71.3 0.00400603 121.118 311 | 6300 4 0.2286 71.3 0.00400603 118.618 312 | 8000 4 0.2286 71.3 0.00400603 112.848 313 | 10000 4 0.2286 71.3 0.00400603 113.108 314 | 12500 4 0.2286 71.3 0.00400603 114.258 315 | 16000 4 0.2286 71.3 0.00400603 112.768 316 | 20000 4 0.2286 71.3 0.00400603 109.638 317 | 400 4 0.2286 55.5 0.0042862 123.274 318 | 500 4 0.2286 55.5 0.0042862 127.314 319 | 630 4 0.2286 55.5 0.0042862 129.964 320 | 800 4 0.2286 55.5 0.0042862 131.864 321 | 1000 4 0.2286 55.5 0.0042862 132.134 322 | 1250 4 0.2286 55.5 0.0042862 131.264 323 | 1600 4 0.2286 55.5 0.0042862 128.264 324 | 2000 4 0.2286 55.5 0.0042862 124.254 325 | 2500 4 0.2286 55.5 0.0042862 122.384 326 | 3150 4 0.2286 55.5 0.0042862 122.394 327 | 4000 4 0.2286 55.5 0.0042862 120.654 328 | 5000 4 0.2286 55.5 0.0042862 120.034 329 | 6300 4 0.2286 55.5 0.0042862 117.154 330 | 8000 4 0.2286 55.5 0.0042862 112.524 331 | 315 4 0.2286 39.6 0.00473801 122.229 332 | 400 4 0.2286 39.6 0.00473801 123.879 333 | 500 4 0.2286 39.6 0.00473801 127.039 334 | 630 4 0.2286 39.6 0.00473801 129.579 335 | 800 4 0.2286 39.6 0.00473801 130.469 336 | 1000 4 0.2286 39.6 0.00473801 129.969 337 | 1250 4 0.2286 39.6 0.00473801 128.339 338 | 1600 4 0.2286 39.6 0.00473801 125.319 339 | 2000 4 0.2286 39.6 0.00473801 121.659 340 | 2500 4 0.2286 39.6 0.00473801 119.649 341 | 3150 4 0.2286 39.6 0.00473801 120.419 342 | 4000 4 0.2286 39.6 0.00473801 119.159 343 | 5000 4 0.2286 39.6 0.00473801 117.649 344 | 6300 4 0.2286 39.6 0.00473801 114.249 345 | 8000 4 0.2286 39.6 0.00473801 113.129 346 | 250 4 0.2286 31.7 0.00509068 120.189 347 | 315 4 0.2286 31.7 0.00509068 123.609 348 | 400 4 0.2286 31.7 0.00509068 126.149 349 | 500 4 0.2286 31.7 0.00509068 128.939 350 | 630 4 0.2286 31.7 0.00509068 130.349 351 | 800 4 0.2286 31.7 0.00509068 130.869 352 | 1000 4 0.2286 31.7 0.00509068 129.869 353 | 1250 4 0.2286 31.7 0.00509068 128.119 354 | 1600 4 0.2286 31.7 0.00509068 125.229 355 | 2000 4 0.2286 31.7 0.00509068 122.089 356 | 2500 4 0.2286 31.7 0.00509068 120.209 357 | 3150 4 0.2286 31.7 0.00509068 120.229 358 | 4000 4 0.2286 31.7 0.00509068 118.859 359 | 5000 4 0.2286 31.7 0.00509068 115.969 360 | 6300 4 0.2286 31.7 0.00509068 112.699 361 | 400 5.3 0.2286 71.3 0.0051942 127.700 362 | 500 5.3 0.2286 71.3 0.0051942 129.880 363 | 630 5.3 0.2286 71.3 0.0051942 131.800 364 | 800 5.3 0.2286 71.3 0.0051942 133.480 365 | 1000 5.3 0.2286 71.3 0.0051942 134.000 366 | 1250 5.3 0.2286 71.3 0.0051942 133.380 367 | 1600 5.3 0.2286 71.3 0.0051942 130.460 368 | 2000 5.3 0.2286 71.3 0.0051942 125.890 369 | 2500 5.3 0.2286 71.3 0.0051942 123.740 370 | 3150 5.3 0.2286 71.3 0.0051942 123.120 371 | 4000 5.3 0.2286 71.3 0.0051942 120.330 372 | 5000 5.3 0.2286 71.3 0.0051942 118.050 373 | 6300 5.3 0.2286 71.3 0.0051942 116.920 374 | 8000 5.3 0.2286 71.3 0.0051942 114.900 375 | 10000 5.3 0.2286 71.3 0.0051942 111.350 376 | 250 5.3 0.2286 39.6 0.00614329 127.011 377 | 315 5.3 0.2286 39.6 0.00614329 129.691 378 | 400 5.3 0.2286 39.6 0.00614329 131.221 379 | 500 5.3 0.2286 39.6 0.00614329 132.251 380 | 630 5.3 0.2286 39.6 0.00614329 132.011 381 | 800 5.3 0.2286 39.6 0.00614329 129.491 382 | 1000 5.3 0.2286 39.6 0.00614329 125.581 383 | 1250 5.3 0.2286 39.6 0.00614329 125.721 384 | 1600 5.3 0.2286 39.6 0.00614329 123.081 385 | 2000 5.3 0.2286 39.6 0.00614329 117.911 386 | 2500 5.3 0.2286 39.6 0.00614329 116.151 387 | 3150 5.3 0.2286 39.6 0.00614329 118.441 388 | 4000 5.3 0.2286 39.6 0.00614329 115.801 389 | 5000 5.3 0.2286 39.6 0.00614329 115.311 390 | 6300 5.3 0.2286 39.6 0.00614329 112.541 391 | 200 7.3 0.2286 71.3 0.0104404 138.758 392 | 250 7.3 0.2286 71.3 0.0104404 139.918 393 | 315 7.3 0.2286 71.3 0.0104404 139.808 394 | 400 7.3 0.2286 71.3 0.0104404 139.438 395 | 500 7.3 0.2286 71.3 0.0104404 136.798 396 | 630 7.3 0.2286 71.3 0.0104404 133.768 397 | 800 7.3 0.2286 71.3 0.0104404 130.748 398 | 1000 7.3 0.2286 71.3 0.0104404 126.838 399 | 1250 7.3 0.2286 71.3 0.0104404 127.358 400 | 1600 7.3 0.2286 71.3 0.0104404 125.728 401 | 2000 7.3 0.2286 71.3 0.0104404 122.708 402 | 2500 7.3 0.2286 71.3 0.0104404 122.088 403 | 3150 7.3 0.2286 71.3 0.0104404 120.458 404 | 4000 7.3 0.2286 71.3 0.0104404 119.208 405 | 5000 7.3 0.2286 71.3 0.0104404 115.298 406 | 6300 7.3 0.2286 71.3 0.0104404 115.818 407 | 200 7.3 0.2286 55.5 0.0111706 135.234 408 | 250 7.3 0.2286 55.5 0.0111706 136.384 409 | 315 7.3 0.2286 55.5 0.0111706 136.284 410 | 400 7.3 0.2286 55.5 0.0111706 135.924 411 | 500 7.3 0.2286 55.5 0.0111706 133.174 412 | 630 7.3 0.2286 55.5 0.0111706 130.934 413 | 800 7.3 0.2286 55.5 0.0111706 128.444 414 | 1000 7.3 0.2286 55.5 0.0111706 125.194 415 | 1250 7.3 0.2286 55.5 0.0111706 125.724 416 | 1600 7.3 0.2286 55.5 0.0111706 123.354 417 | 2000 7.3 0.2286 55.5 0.0111706 120.354 418 | 2500 7.3 0.2286 55.5 0.0111706 118.994 419 | 3150 7.3 0.2286 55.5 0.0111706 117.134 420 | 4000 7.3 0.2286 55.5 0.0111706 117.284 421 | 5000 7.3 0.2286 55.5 0.0111706 113.144 422 | 6300 7.3 0.2286 55.5 0.0111706 111.534 423 | 200 7.3 0.2286 39.6 0.0123481 130.989 424 | 250 7.3 0.2286 39.6 0.0123481 131.889 425 | 315 7.3 0.2286 39.6 0.0123481 132.149 426 | 400 7.3 0.2286 39.6 0.0123481 132.039 427 | 500 7.3 0.2286 39.6 0.0123481 130.299 428 | 630 7.3 0.2286 39.6 0.0123481 128.929 429 | 800 7.3 0.2286 39.6 0.0123481 126.299 430 | 1000 7.3 0.2286 39.6 0.0123481 122.539 431 | 1250 7.3 0.2286 39.6 0.0123481 123.189 432 | 1600 7.3 0.2286 39.6 0.0123481 121.059 433 | 2000 7.3 0.2286 39.6 0.0123481 117.809 434 | 2500 7.3 0.2286 39.6 0.0123481 116.559 435 | 3150 7.3 0.2286 39.6 0.0123481 114.309 436 | 4000 7.3 0.2286 39.6 0.0123481 114.079 437 | 5000 7.3 0.2286 39.6 0.0123481 111.959 438 | 6300 7.3 0.2286 39.6 0.0123481 110.839 439 | 200 7.3 0.2286 31.7 0.0132672 128.679 440 | 250 7.3 0.2286 31.7 0.0132672 130.089 441 | 315 7.3 0.2286 31.7 0.0132672 130.239 442 | 400 7.3 0.2286 31.7 0.0132672 130.269 443 | 500 7.3 0.2286 31.7 0.0132672 128.169 444 | 630 7.3 0.2286 31.7 0.0132672 126.189 445 | 800 7.3 0.2286 31.7 0.0132672 123.209 446 | 1000 7.3 0.2286 31.7 0.0132672 119.099 447 | 1250 7.3 0.2286 31.7 0.0132672 120.509 448 | 1600 7.3 0.2286 31.7 0.0132672 119.039 449 | 2000 7.3 0.2286 31.7 0.0132672 115.309 450 | 2500 7.3 0.2286 31.7 0.0132672 114.709 451 | 3150 7.3 0.2286 31.7 0.0132672 113.229 452 | 4000 7.3 0.2286 31.7 0.0132672 112.639 453 | 5000 7.3 0.2286 31.7 0.0132672 111.029 454 | 6300 7.3 0.2286 31.7 0.0132672 110.689 455 | 800 0 0.1524 71.3 0.0015988 125.817 456 | 1000 0 0.1524 71.3 0.0015988 127.307 457 | 1250 0 0.1524 71.3 0.0015988 128.927 458 | 1600 0 0.1524 71.3 0.0015988 129.667 459 | 2000 0 0.1524 71.3 0.0015988 128.647 460 | 2500 0 0.1524 71.3 0.0015988 128.127 461 | 3150 0 0.1524 71.3 0.0015988 129.377 462 | 4000 0 0.1524 71.3 0.0015988 128.857 463 | 5000 0 0.1524 71.3 0.0015988 126.457 464 | 6300 0 0.1524 71.3 0.0015988 125.427 465 | 8000 0 0.1524 71.3 0.0015988 122.527 466 | 10000 0 0.1524 71.3 0.0015988 120.247 467 | 12500 0 0.1524 71.3 0.0015988 117.087 468 | 16000 0 0.1524 71.3 0.0015988 113.297 469 | 500 0 0.1524 55.5 0.00172668 120.573 470 | 630 0 0.1524 55.5 0.00172668 123.583 471 | 800 0 0.1524 55.5 0.00172668 126.713 472 | 1000 0 0.1524 55.5 0.00172668 128.583 473 | 1250 0 0.1524 55.5 0.00172668 129.953 474 | 1600 0 0.1524 55.5 0.00172668 130.183 475 | 2000 0 0.1524 55.5 0.00172668 129.673 476 | 2500 0 0.1524 55.5 0.00172668 127.763 477 | 3150 0 0.1524 55.5 0.00172668 127.753 478 | 4000 0 0.1524 55.5 0.00172668 127.233 479 | 5000 0 0.1524 55.5 0.00172668 125.203 480 | 6300 0 0.1524 55.5 0.00172668 123.303 481 | 8000 0 0.1524 55.5 0.00172668 121.903 482 | 10000 0 0.1524 55.5 0.00172668 119.253 483 | 12500 0 0.1524 55.5 0.00172668 117.093 484 | 16000 0 0.1524 55.5 0.00172668 112.803 485 | 500 0 0.1524 39.6 0.00193287 119.513 486 | 630 0 0.1524 39.6 0.00193287 124.403 487 | 800 0 0.1524 39.6 0.00193287 127.903 488 | 1000 0 0.1524 39.6 0.00193287 130.033 489 | 1250 0 0.1524 39.6 0.00193287 131.023 490 | 1600 0 0.1524 39.6 0.00193287 131.013 491 | 2000 0 0.1524 39.6 0.00193287 129.633 492 | 2500 0 0.1524 39.6 0.00193287 126.863 493 | 3150 0 0.1524 39.6 0.00193287 125.603 494 | 4000 0 0.1524 39.6 0.00193287 125.343 495 | 5000 0 0.1524 39.6 0.00193287 123.453 496 | 6300 0 0.1524 39.6 0.00193287 121.313 497 | 8000 0 0.1524 39.6 0.00193287 120.553 498 | 10000 0 0.1524 39.6 0.00193287 115.413 499 | 500 0 0.1524 31.7 0.00209405 121.617 500 | 630 0 0.1524 31.7 0.00209405 125.997 501 | 800 0 0.1524 31.7 0.00209405 129.117 502 | 1000 0 0.1524 31.7 0.00209405 130.987 503 | 1250 0 0.1524 31.7 0.00209405 131.467 504 | 1600 0 0.1524 31.7 0.00209405 130.817 505 | 2000 0 0.1524 31.7 0.00209405 128.907 506 | 2500 0 0.1524 31.7 0.00209405 125.867 507 | 3150 0 0.1524 31.7 0.00209405 124.207 508 | 4000 0 0.1524 31.7 0.00209405 123.807 509 | 5000 0 0.1524 31.7 0.00209405 122.397 510 | 6300 0 0.1524 31.7 0.00209405 119.737 511 | 8000 0 0.1524 31.7 0.00209405 117.957 512 | 630 2.7 0.1524 71.3 0.00243851 127.404 513 | 800 2.7 0.1524 71.3 0.00243851 127.394 514 | 1000 2.7 0.1524 71.3 0.00243851 128.774 515 | 1250 2.7 0.1524 71.3 0.00243851 130.144 516 | 1600 2.7 0.1524 71.3 0.00243851 130.644 517 | 2000 2.7 0.1524 71.3 0.00243851 130.114 518 | 2500 2.7 0.1524 71.3 0.00243851 128.334 519 | 3150 2.7 0.1524 71.3 0.00243851 127.054 520 | 4000 2.7 0.1524 71.3 0.00243851 126.534 521 | 5000 2.7 0.1524 71.3 0.00243851 124.364 522 | 6300 2.7 0.1524 71.3 0.00243851 121.944 523 | 8000 2.7 0.1524 71.3 0.00243851 120.534 524 | 10000 2.7 0.1524 71.3 0.00243851 116.724 525 | 12500 2.7 0.1524 71.3 0.00243851 113.034 526 | 16000 2.7 0.1524 71.3 0.00243851 110.364 527 | 500 2.7 0.1524 39.6 0.00294804 121.009 528 | 630 2.7 0.1524 39.6 0.00294804 125.809 529 | 800 2.7 0.1524 39.6 0.00294804 128.829 530 | 1000 2.7 0.1524 39.6 0.00294804 130.589 531 | 1250 2.7 0.1524 39.6 0.00294804 130.829 532 | 1600 2.7 0.1524 39.6 0.00294804 130.049 533 | 2000 2.7 0.1524 39.6 0.00294804 128.139 534 | 2500 2.7 0.1524 39.6 0.00294804 125.589 535 | 3150 2.7 0.1524 39.6 0.00294804 122.919 536 | 4000 2.7 0.1524 39.6 0.00294804 121.889 537 | 5000 2.7 0.1524 39.6 0.00294804 121.499 538 | 6300 2.7 0.1524 39.6 0.00294804 119.209 539 | 8000 2.7 0.1524 39.6 0.00294804 116.659 540 | 10000 2.7 0.1524 39.6 0.00294804 112.589 541 | 12500 2.7 0.1524 39.6 0.00294804 108.649 542 | 400 5.4 0.1524 71.3 0.00401199 124.121 543 | 500 5.4 0.1524 71.3 0.00401199 126.291 544 | 630 5.4 0.1524 71.3 0.00401199 128.971 545 | 800 5.4 0.1524 71.3 0.00401199 131.281 546 | 1000 5.4 0.1524 71.3 0.00401199 133.201 547 | 1250 5.4 0.1524 71.3 0.00401199 134.111 548 | 1600 5.4 0.1524 71.3 0.00401199 133.241 549 | 2000 5.4 0.1524 71.3 0.00401199 131.111 550 | 2500 5.4 0.1524 71.3 0.00401199 127.591 551 | 3150 5.4 0.1524 71.3 0.00401199 123.311 552 | 4000 5.4 0.1524 71.3 0.00401199 121.431 553 | 5000 5.4 0.1524 71.3 0.00401199 120.061 554 | 6300 5.4 0.1524 71.3 0.00401199 116.411 555 | 400 5.4 0.1524 55.5 0.00433288 126.807 556 | 500 5.4 0.1524 55.5 0.00433288 129.367 557 | 630 5.4 0.1524 55.5 0.00433288 131.807 558 | 800 5.4 0.1524 55.5 0.00433288 133.097 559 | 1000 5.4 0.1524 55.5 0.00433288 132.127 560 | 1250 5.4 0.1524 55.5 0.00433288 130.777 561 | 1600 5.4 0.1524 55.5 0.00433288 130.567 562 | 2000 5.4 0.1524 55.5 0.00433288 128.707 563 | 2500 5.4 0.1524 55.5 0.00433288 124.077 564 | 3150 5.4 0.1524 55.5 0.00433288 121.587 565 | 4000 5.4 0.1524 55.5 0.00433288 119.737 566 | 5000 5.4 0.1524 55.5 0.00433288 118.757 567 | 6300 5.4 0.1524 55.5 0.00433288 117.287 568 | 8000 5.4 0.1524 55.5 0.00433288 114.927 569 | 315 5.4 0.1524 39.6 0.00485029 125.347 570 | 400 5.4 0.1524 39.6 0.00485029 127.637 571 | 500 5.4 0.1524 39.6 0.00485029 129.937 572 | 630 5.4 0.1524 39.6 0.00485029 132.357 573 | 800 5.4 0.1524 39.6 0.00485029 132.757 574 | 1000 5.4 0.1524 39.6 0.00485029 130.507 575 | 1250 5.4 0.1524 39.6 0.00485029 127.117 576 | 1600 5.4 0.1524 39.6 0.00485029 126.267 577 | 2000 5.4 0.1524 39.6 0.00485029 124.647 578 | 2500 5.4 0.1524 39.6 0.00485029 120.497 579 | 3150 5.4 0.1524 39.6 0.00485029 119.137 580 | 4000 5.4 0.1524 39.6 0.00485029 117.137 581 | 5000 5.4 0.1524 39.6 0.00485029 117.037 582 | 6300 5.4 0.1524 39.6 0.00485029 116.677 583 | 315 5.4 0.1524 31.7 0.00525474 125.741 584 | 400 5.4 0.1524 31.7 0.00525474 127.781 585 | 500 5.4 0.1524 31.7 0.00525474 129.681 586 | 630 5.4 0.1524 31.7 0.00525474 131.471 587 | 800 5.4 0.1524 31.7 0.00525474 131.491 588 | 1000 5.4 0.1524 31.7 0.00525474 128.241 589 | 1250 5.4 0.1524 31.7 0.00525474 123.991 590 | 1600 5.4 0.1524 31.7 0.00525474 123.761 591 | 2000 5.4 0.1524 31.7 0.00525474 122.771 592 | 2500 5.4 0.1524 31.7 0.00525474 119.151 593 | 3150 5.4 0.1524 31.7 0.00525474 118.291 594 | 4000 5.4 0.1524 31.7 0.00525474 116.181 595 | 5000 5.4 0.1524 31.7 0.00525474 115.691 596 | 6300 5.4 0.1524 31.7 0.00525474 115.591 597 | 315 7.2 0.1524 71.3 0.00752039 128.713 598 | 400 7.2 0.1524 71.3 0.00752039 130.123 599 | 500 7.2 0.1524 71.3 0.00752039 132.043 600 | 630 7.2 0.1524 71.3 0.00752039 134.853 601 | 800 7.2 0.1524 71.3 0.00752039 136.023 602 | 1000 7.2 0.1524 71.3 0.00752039 134.273 603 | 1250 7.2 0.1524 71.3 0.00752039 132.513 604 | 1600 7.2 0.1524 71.3 0.00752039 130.893 605 | 2000 7.2 0.1524 71.3 0.00752039 128.643 606 | 2500 7.2 0.1524 71.3 0.00752039 124.353 607 | 3150 7.2 0.1524 71.3 0.00752039 116.783 608 | 4000 7.2 0.1524 71.3 0.00752039 119.343 609 | 5000 7.2 0.1524 71.3 0.00752039 118.343 610 | 6300 7.2 0.1524 71.3 0.00752039 116.603 611 | 8000 7.2 0.1524 71.3 0.00752039 113.333 612 | 10000 7.2 0.1524 71.3 0.00752039 110.313 613 | 250 7.2 0.1524 39.6 0.00909175 127.488 614 | 315 7.2 0.1524 39.6 0.00909175 130.558 615 | 400 7.2 0.1524 39.6 0.00909175 132.118 616 | 500 7.2 0.1524 39.6 0.00909175 132.658 617 | 630 7.2 0.1524 39.6 0.00909175 133.198 618 | 800 7.2 0.1524 39.6 0.00909175 132.358 619 | 1000 7.2 0.1524 39.6 0.00909175 128.338 620 | 1250 7.2 0.1524 39.6 0.00909175 122.428 621 | 1600 7.2 0.1524 39.6 0.00909175 120.058 622 | 2000 7.2 0.1524 39.6 0.00909175 120.228 623 | 2500 7.2 0.1524 39.6 0.00909175 117.478 624 | 3150 7.2 0.1524 39.6 0.00909175 111.818 625 | 4000 7.2 0.1524 39.6 0.00909175 114.258 626 | 5000 7.2 0.1524 39.6 0.00909175 113.288 627 | 6300 7.2 0.1524 39.6 0.00909175 112.688 628 | 8000 7.2 0.1524 39.6 0.00909175 111.588 629 | 10000 7.2 0.1524 39.6 0.00909175 110.868 630 | 200 9.9 0.1524 71.3 0.0193001 134.319 631 | 250 9.9 0.1524 71.3 0.0193001 135.329 632 | 315 9.9 0.1524 71.3 0.0193001 135.459 633 | 400 9.9 0.1524 71.3 0.0193001 135.079 634 | 500 9.9 0.1524 71.3 0.0193001 131.279 635 | 630 9.9 0.1524 71.3 0.0193001 129.889 636 | 800 9.9 0.1524 71.3 0.0193001 128.879 637 | 1000 9.9 0.1524 71.3 0.0193001 126.349 638 | 1250 9.9 0.1524 71.3 0.0193001 122.679 639 | 1600 9.9 0.1524 71.3 0.0193001 121.789 640 | 2000 9.9 0.1524 71.3 0.0193001 120.779 641 | 2500 9.9 0.1524 71.3 0.0193001 119.639 642 | 3150 9.9 0.1524 71.3 0.0193001 116.849 643 | 4000 9.9 0.1524 71.3 0.0193001 115.079 644 | 5000 9.9 0.1524 71.3 0.0193001 114.569 645 | 6300 9.9 0.1524 71.3 0.0193001 112.039 646 | 200 9.9 0.1524 55.5 0.0208438 131.955 647 | 250 9.9 0.1524 55.5 0.0208438 133.235 648 | 315 9.9 0.1524 55.5 0.0208438 132.355 649 | 400 9.9 0.1524 55.5 0.0208438 131.605 650 | 500 9.9 0.1524 55.5 0.0208438 127.815 651 | 630 9.9 0.1524 55.5 0.0208438 127.315 652 | 800 9.9 0.1524 55.5 0.0208438 126.565 653 | 1000 9.9 0.1524 55.5 0.0208438 124.665 654 | 1250 9.9 0.1524 55.5 0.0208438 121.635 655 | 1600 9.9 0.1524 55.5 0.0208438 119.875 656 | 2000 9.9 0.1524 55.5 0.0208438 119.505 657 | 2500 9.9 0.1524 55.5 0.0208438 118.365 658 | 3150 9.9 0.1524 55.5 0.0208438 115.085 659 | 4000 9.9 0.1524 55.5 0.0208438 112.945 660 | 5000 9.9 0.1524 55.5 0.0208438 112.065 661 | 6300 9.9 0.1524 55.5 0.0208438 110.555 662 | 200 9.9 0.1524 39.6 0.0233328 127.315 663 | 250 9.9 0.1524 39.6 0.0233328 128.335 664 | 315 9.9 0.1524 39.6 0.0233328 128.595 665 | 400 9.9 0.1524 39.6 0.0233328 128.345 666 | 500 9.9 0.1524 39.6 0.0233328 126.835 667 | 630 9.9 0.1524 39.6 0.0233328 126.465 668 | 800 9.9 0.1524 39.6 0.0233328 126.345 669 | 1000 9.9 0.1524 39.6 0.0233328 123.835 670 | 1250 9.9 0.1524 39.6 0.0233328 120.555 671 | 1600 9.9 0.1524 39.6 0.0233328 118.545 672 | 2000 9.9 0.1524 39.6 0.0233328 117.925 673 | 2500 9.9 0.1524 39.6 0.0233328 116.295 674 | 3150 9.9 0.1524 39.6 0.0233328 113.525 675 | 4000 9.9 0.1524 39.6 0.0233328 112.265 676 | 5000 9.9 0.1524 39.6 0.0233328 111.135 677 | 6300 9.9 0.1524 39.6 0.0233328 109.885 678 | 200 9.9 0.1524 31.7 0.0252785 127.299 679 | 250 9.9 0.1524 31.7 0.0252785 128.559 680 | 315 9.9 0.1524 31.7 0.0252785 128.809 681 | 400 9.9 0.1524 31.7 0.0252785 128.939 682 | 500 9.9 0.1524 31.7 0.0252785 127.179 683 | 630 9.9 0.1524 31.7 0.0252785 126.049 684 | 800 9.9 0.1524 31.7 0.0252785 125.539 685 | 1000 9.9 0.1524 31.7 0.0252785 122.149 686 | 1250 9.9 0.1524 31.7 0.0252785 118.619 687 | 1600 9.9 0.1524 31.7 0.0252785 117.119 688 | 2000 9.9 0.1524 31.7 0.0252785 116.859 689 | 2500 9.9 0.1524 31.7 0.0252785 114.729 690 | 3150 9.9 0.1524 31.7 0.0252785 112.209 691 | 4000 9.9 0.1524 31.7 0.0252785 111.459 692 | 5000 9.9 0.1524 31.7 0.0252785 109.949 693 | 6300 9.9 0.1524 31.7 0.0252785 108.689 694 | 200 12.6 0.1524 71.3 0.0483159 128.354 695 | 250 12.6 0.1524 71.3 0.0483159 129.744 696 | 315 12.6 0.1524 71.3 0.0483159 128.484 697 | 400 12.6 0.1524 71.3 0.0483159 127.094 698 | 500 12.6 0.1524 71.3 0.0483159 121.664 699 | 630 12.6 0.1524 71.3 0.0483159 123.304 700 | 800 12.6 0.1524 71.3 0.0483159 123.054 701 | 1000 12.6 0.1524 71.3 0.0483159 122.044 702 | 1250 12.6 0.1524 71.3 0.0483159 120.154 703 | 1600 12.6 0.1524 71.3 0.0483159 120.534 704 | 2000 12.6 0.1524 71.3 0.0483159 117.504 705 | 2500 12.6 0.1524 71.3 0.0483159 115.234 706 | 3150 12.6 0.1524 71.3 0.0483159 113.334 707 | 4000 12.6 0.1524 71.3 0.0483159 108.034 708 | 5000 12.6 0.1524 71.3 0.0483159 108.034 709 | 6300 12.6 0.1524 71.3 0.0483159 107.284 710 | 200 12.6 0.1524 39.6 0.0584113 114.750 711 | 250 12.6 0.1524 39.6 0.0584113 115.890 712 | 315 12.6 0.1524 39.6 0.0584113 116.020 713 | 400 12.6 0.1524 39.6 0.0584113 115.910 714 | 500 12.6 0.1524 39.6 0.0584113 114.900 715 | 630 12.6 0.1524 39.6 0.0584113 116.550 716 | 800 12.6 0.1524 39.6 0.0584113 116.560 717 | 1000 12.6 0.1524 39.6 0.0584113 114.670 718 | 1250 12.6 0.1524 39.6 0.0584113 112.160 719 | 1600 12.6 0.1524 39.6 0.0584113 110.780 720 | 2000 12.6 0.1524 39.6 0.0584113 109.520 721 | 2500 12.6 0.1524 39.6 0.0584113 106.880 722 | 3150 12.6 0.1524 39.6 0.0584113 106.260 723 | 4000 12.6 0.1524 39.6 0.0584113 104.500 724 | 5000 12.6 0.1524 39.6 0.0584113 104.130 725 | 6300 12.6 0.1524 39.6 0.0584113 103.380 726 | 800 0 0.0508 71.3 0.000740478 130.960 727 | 1000 0 0.0508 71.3 0.000740478 129.450 728 | 1250 0 0.0508 71.3 0.000740478 128.560 729 | 1600 0 0.0508 71.3 0.000740478 129.680 730 | 2000 0 0.0508 71.3 0.000740478 131.060 731 | 2500 0 0.0508 71.3 0.000740478 131.310 732 | 3150 0 0.0508 71.3 0.000740478 135.070 733 | 4000 0 0.0508 71.3 0.000740478 134.430 734 | 5000 0 0.0508 71.3 0.000740478 134.430 735 | 6300 0 0.0508 71.3 0.000740478 133.040 736 | 8000 0 0.0508 71.3 0.000740478 130.890 737 | 10000 0 0.0508 71.3 0.000740478 128.740 738 | 12500 0 0.0508 71.3 0.000740478 125.220 739 | 800 0 0.0508 55.5 0.00076193 124.336 740 | 1000 0 0.0508 55.5 0.00076193 125.586 741 | 1250 0 0.0508 55.5 0.00076193 127.076 742 | 1600 0 0.0508 55.5 0.00076193 128.576 743 | 2000 0 0.0508 55.5 0.00076193 131.456 744 | 2500 0 0.0508 55.5 0.00076193 133.956 745 | 3150 0 0.0508 55.5 0.00076193 134.826 746 | 4000 0 0.0508 55.5 0.00076193 134.946 747 | 5000 0 0.0508 55.5 0.00076193 134.556 748 | 6300 0 0.0508 55.5 0.00076193 132.796 749 | 8000 0 0.0508 55.5 0.00076193 130.156 750 | 10000 0 0.0508 55.5 0.00076193 127.636 751 | 12500 0 0.0508 55.5 0.00076193 125.376 752 | 800 0 0.0508 39.6 0.000791822 126.508 753 | 1000 0 0.0508 39.6 0.000791822 127.638 754 | 1250 0 0.0508 39.6 0.000791822 129.148 755 | 1600 0 0.0508 39.6 0.000791822 130.908 756 | 2000 0 0.0508 39.6 0.000791822 132.918 757 | 2500 0 0.0508 39.6 0.000791822 134.938 758 | 3150 0 0.0508 39.6 0.000791822 135.938 759 | 4000 0 0.0508 39.6 0.000791822 135.308 760 | 5000 0 0.0508 39.6 0.000791822 134.308 761 | 6300 0 0.0508 39.6 0.000791822 131.918 762 | 8000 0 0.0508 39.6 0.000791822 128.518 763 | 10000 0 0.0508 39.6 0.000791822 125.998 764 | 12500 0 0.0508 39.6 0.000791822 123.988 765 | 800 0 0.0508 31.7 0.000812164 122.790 766 | 1000 0 0.0508 31.7 0.000812164 126.780 767 | 1250 0 0.0508 31.7 0.000812164 129.270 768 | 1600 0 0.0508 31.7 0.000812164 131.010 769 | 2000 0 0.0508 31.7 0.000812164 133.010 770 | 2500 0 0.0508 31.7 0.000812164 134.870 771 | 3150 0 0.0508 31.7 0.000812164 135.490 772 | 4000 0 0.0508 31.7 0.000812164 134.110 773 | 5000 0 0.0508 31.7 0.000812164 133.230 774 | 6300 0 0.0508 31.7 0.000812164 130.340 775 | 8000 0 0.0508 31.7 0.000812164 126.590 776 | 10000 0 0.0508 31.7 0.000812164 122.450 777 | 12500 0 0.0508 31.7 0.000812164 119.070 778 | 1600 4.2 0.0508 71.3 0.00142788 124.318 779 | 2000 4.2 0.0508 71.3 0.00142788 129.848 780 | 2500 4.2 0.0508 71.3 0.00142788 131.978 781 | 3150 4.2 0.0508 71.3 0.00142788 133.728 782 | 4000 4.2 0.0508 71.3 0.00142788 133.598 783 | 5000 4.2 0.0508 71.3 0.00142788 132.828 784 | 6300 4.2 0.0508 71.3 0.00142788 129.308 785 | 8000 4.2 0.0508 71.3 0.00142788 125.268 786 | 10000 4.2 0.0508 71.3 0.00142788 121.238 787 | 12500 4.2 0.0508 71.3 0.00142788 117.328 788 | 1000 4.2 0.0508 39.6 0.00152689 125.647 789 | 1250 4.2 0.0508 39.6 0.00152689 128.427 790 | 1600 4.2 0.0508 39.6 0.00152689 130.197 791 | 2000 4.2 0.0508 39.6 0.00152689 132.587 792 | 2500 4.2 0.0508 39.6 0.00152689 133.847 793 | 3150 4.2 0.0508 39.6 0.00152689 133.587 794 | 4000 4.2 0.0508 39.6 0.00152689 131.807 795 | 5000 4.2 0.0508 39.6 0.00152689 129.777 796 | 6300 4.2 0.0508 39.6 0.00152689 125.717 797 | 8000 4.2 0.0508 39.6 0.00152689 120.397 798 | 10000 4.2 0.0508 39.6 0.00152689 116.967 799 | 800 8.4 0.0508 71.3 0.00529514 127.556 800 | 1000 8.4 0.0508 71.3 0.00529514 129.946 801 | 1250 8.4 0.0508 71.3 0.00529514 132.086 802 | 1600 8.4 0.0508 71.3 0.00529514 133.846 803 | 2000 8.4 0.0508 71.3 0.00529514 134.476 804 | 2500 8.4 0.0508 71.3 0.00529514 134.226 805 | 3150 8.4 0.0508 71.3 0.00529514 131.966 806 | 4000 8.4 0.0508 71.3 0.00529514 126.926 807 | 5000 8.4 0.0508 71.3 0.00529514 121.146 808 | 400 8.4 0.0508 55.5 0.00544854 121.582 809 | 500 8.4 0.0508 55.5 0.00544854 123.742 810 | 630 8.4 0.0508 55.5 0.00544854 126.152 811 | 800 8.4 0.0508 55.5 0.00544854 128.562 812 | 1000 8.4 0.0508 55.5 0.00544854 130.722 813 | 1250 8.4 0.0508 55.5 0.00544854 132.252 814 | 1600 8.4 0.0508 55.5 0.00544854 133.032 815 | 2000 8.4 0.0508 55.5 0.00544854 133.042 816 | 2500 8.4 0.0508 55.5 0.00544854 131.542 817 | 3150 8.4 0.0508 55.5 0.00544854 128.402 818 | 4000 8.4 0.0508 55.5 0.00544854 122.612 819 | 5000 8.4 0.0508 55.5 0.00544854 115.812 820 | 400 8.4 0.0508 39.6 0.00566229 120.015 821 | 500 8.4 0.0508 39.6 0.00566229 122.905 822 | 630 8.4 0.0508 39.6 0.00566229 126.045 823 | 800 8.4 0.0508 39.6 0.00566229 128.435 824 | 1000 8.4 0.0508 39.6 0.00566229 130.195 825 | 1250 8.4 0.0508 39.6 0.00566229 131.205 826 | 1600 8.4 0.0508 39.6 0.00566229 130.965 827 | 2000 8.4 0.0508 39.6 0.00566229 129.965 828 | 2500 8.4 0.0508 39.6 0.00566229 127.465 829 | 3150 8.4 0.0508 39.6 0.00566229 123.965 830 | 4000 8.4 0.0508 39.6 0.00566229 118.955 831 | 400 8.4 0.0508 31.7 0.00580776 120.076 832 | 500 8.4 0.0508 31.7 0.00580776 122.966 833 | 630 8.4 0.0508 31.7 0.00580776 125.856 834 | 800 8.4 0.0508 31.7 0.00580776 128.246 835 | 1000 8.4 0.0508 31.7 0.00580776 129.516 836 | 1250 8.4 0.0508 31.7 0.00580776 130.156 837 | 1600 8.4 0.0508 31.7 0.00580776 129.296 838 | 2000 8.4 0.0508 31.7 0.00580776 127.686 839 | 2500 8.4 0.0508 31.7 0.00580776 125.576 840 | 3150 8.4 0.0508 31.7 0.00580776 122.086 841 | 4000 8.4 0.0508 31.7 0.00580776 118.106 842 | 200 11.2 0.0508 71.3 0.014072 125.941 843 | 250 11.2 0.0508 71.3 0.014072 127.101 844 | 315 11.2 0.0508 71.3 0.014072 128.381 845 | 400 11.2 0.0508 71.3 0.014072 129.281 846 | 500 11.2 0.0508 71.3 0.014072 130.311 847 | 630 11.2 0.0508 71.3 0.014072 133.611 848 | 800 11.2 0.0508 71.3 0.014072 136.031 849 | 1000 11.2 0.0508 71.3 0.014072 136.941 850 | 1250 11.2 0.0508 71.3 0.014072 136.191 851 | 1600 11.2 0.0508 71.3 0.014072 135.191 852 | 2000 11.2 0.0508 71.3 0.014072 133.311 853 | 2500 11.2 0.0508 71.3 0.014072 130.541 854 | 3150 11.2 0.0508 71.3 0.014072 127.141 855 | 4000 11.2 0.0508 71.3 0.014072 122.471 856 | 200 11.2 0.0508 39.6 0.0150478 125.010 857 | 250 11.2 0.0508 39.6 0.0150478 126.430 858 | 315 11.2 0.0508 39.6 0.0150478 128.990 859 | 400 11.2 0.0508 39.6 0.0150478 130.670 860 | 500 11.2 0.0508 39.6 0.0150478 131.960 861 | 630 11.2 0.0508 39.6 0.0150478 133.130 862 | 800 11.2 0.0508 39.6 0.0150478 133.790 863 | 1000 11.2 0.0508 39.6 0.0150478 132.430 864 | 1250 11.2 0.0508 39.6 0.0150478 130.050 865 | 1600 11.2 0.0508 39.6 0.0150478 126.540 866 | 2000 11.2 0.0508 39.6 0.0150478 124.420 867 | 2500 11.2 0.0508 39.6 0.0150478 122.170 868 | 3150 11.2 0.0508 39.6 0.0150478 119.670 869 | 4000 11.2 0.0508 39.6 0.0150478 115.520 870 | 200 15.4 0.0508 71.3 0.0264269 123.595 871 | 250 15.4 0.0508 71.3 0.0264269 124.835 872 | 315 15.4 0.0508 71.3 0.0264269 126.195 873 | 400 15.4 0.0508 71.3 0.0264269 126.805 874 | 500 15.4 0.0508 71.3 0.0264269 127.285 875 | 630 15.4 0.0508 71.3 0.0264269 129.645 876 | 800 15.4 0.0508 71.3 0.0264269 131.515 877 | 1000 15.4 0.0508 71.3 0.0264269 131.865 878 | 1250 15.4 0.0508 71.3 0.0264269 130.845 879 | 1600 15.4 0.0508 71.3 0.0264269 130.065 880 | 2000 15.4 0.0508 71.3 0.0264269 129.285 881 | 2500 15.4 0.0508 71.3 0.0264269 127.625 882 | 3150 15.4 0.0508 71.3 0.0264269 125.715 883 | 4000 15.4 0.0508 71.3 0.0264269 122.675 884 | 5000 15.4 0.0508 71.3 0.0264269 119.135 885 | 6300 15.4 0.0508 71.3 0.0264269 115.215 886 | 8000 15.4 0.0508 71.3 0.0264269 112.675 887 | 200 15.4 0.0508 55.5 0.0271925 122.940 888 | 250 15.4 0.0508 55.5 0.0271925 124.170 889 | 315 15.4 0.0508 55.5 0.0271925 125.390 890 | 400 15.4 0.0508 55.5 0.0271925 126.500 891 | 500 15.4 0.0508 55.5 0.0271925 127.220 892 | 630 15.4 0.0508 55.5 0.0271925 129.330 893 | 800 15.4 0.0508 55.5 0.0271925 130.430 894 | 1000 15.4 0.0508 55.5 0.0271925 130.400 895 | 1250 15.4 0.0508 55.5 0.0271925 130.000 896 | 1600 15.4 0.0508 55.5 0.0271925 128.200 897 | 2000 15.4 0.0508 55.5 0.0271925 127.040 898 | 2500 15.4 0.0508 55.5 0.0271925 125.630 899 | 3150 15.4 0.0508 55.5 0.0271925 123.460 900 | 4000 15.4 0.0508 55.5 0.0271925 120.920 901 | 5000 15.4 0.0508 55.5 0.0271925 117.110 902 | 6300 15.4 0.0508 55.5 0.0271925 112.930 903 | 200 15.4 0.0508 39.6 0.0282593 121.783 904 | 250 15.4 0.0508 39.6 0.0282593 122.893 905 | 315 15.4 0.0508 39.6 0.0282593 124.493 906 | 400 15.4 0.0508 39.6 0.0282593 125.353 907 | 500 15.4 0.0508 39.6 0.0282593 125.963 908 | 630 15.4 0.0508 39.6 0.0282593 127.443 909 | 800 15.4 0.0508 39.6 0.0282593 128.423 910 | 1000 15.4 0.0508 39.6 0.0282593 127.893 911 | 1250 15.4 0.0508 39.6 0.0282593 126.743 912 | 1600 15.4 0.0508 39.6 0.0282593 124.843 913 | 2000 15.4 0.0508 39.6 0.0282593 123.443 914 | 2500 15.4 0.0508 39.6 0.0282593 122.413 915 | 3150 15.4 0.0508 39.6 0.0282593 120.513 916 | 4000 15.4 0.0508 39.6 0.0282593 118.113 917 | 5000 15.4 0.0508 39.6 0.0282593 114.453 918 | 6300 15.4 0.0508 39.6 0.0282593 109.663 919 | 200 15.4 0.0508 31.7 0.0289853 119.975 920 | 250 15.4 0.0508 31.7 0.0289853 121.225 921 | 315 15.4 0.0508 31.7 0.0289853 122.845 922 | 400 15.4 0.0508 31.7 0.0289853 123.705 923 | 500 15.4 0.0508 31.7 0.0289853 123.695 924 | 630 15.4 0.0508 31.7 0.0289853 124.685 925 | 800 15.4 0.0508 31.7 0.0289853 125.555 926 | 1000 15.4 0.0508 31.7 0.0289853 124.525 927 | 1250 15.4 0.0508 31.7 0.0289853 123.255 928 | 1600 15.4 0.0508 31.7 0.0289853 121.485 929 | 2000 15.4 0.0508 31.7 0.0289853 120.835 930 | 2500 15.4 0.0508 31.7 0.0289853 119.945 931 | 3150 15.4 0.0508 31.7 0.0289853 118.045 932 | 4000 15.4 0.0508 31.7 0.0289853 115.635 933 | 5000 15.4 0.0508 31.7 0.0289853 112.355 934 | 6300 15.4 0.0508 31.7 0.0289853 108.185 935 | 200 19.7 0.0508 71.3 0.0341183 118.005 936 | 250 19.7 0.0508 71.3 0.0341183 119.115 937 | 315 19.7 0.0508 71.3 0.0341183 121.235 938 | 400 19.7 0.0508 71.3 0.0341183 123.865 939 | 500 19.7 0.0508 71.3 0.0341183 126.995 940 | 630 19.7 0.0508 71.3 0.0341183 128.365 941 | 800 19.7 0.0508 71.3 0.0341183 124.555 942 | 1000 19.7 0.0508 71.3 0.0341183 121.885 943 | 1250 19.7 0.0508 71.3 0.0341183 121.485 944 | 1600 19.7 0.0508 71.3 0.0341183 120.575 945 | 2000 19.7 0.0508 71.3 0.0341183 120.055 946 | 2500 19.7 0.0508 71.3 0.0341183 118.385 947 | 3150 19.7 0.0508 71.3 0.0341183 116.225 948 | 4000 19.7 0.0508 71.3 0.0341183 113.045 949 | 200 19.7 0.0508 39.6 0.036484 125.974 950 | 250 19.7 0.0508 39.6 0.036484 127.224 951 | 315 19.7 0.0508 39.6 0.036484 129.864 952 | 400 19.7 0.0508 39.6 0.036484 130.614 953 | 500 19.7 0.0508 39.6 0.036484 128.444 954 | 630 19.7 0.0508 39.6 0.036484 120.324 955 | 800 19.7 0.0508 39.6 0.036484 119.174 956 | 1000 19.7 0.0508 39.6 0.036484 118.904 957 | 1250 19.7 0.0508 39.6 0.036484 118.634 958 | 1600 19.7 0.0508 39.6 0.036484 117.604 959 | 2000 19.7 0.0508 39.6 0.036484 117.724 960 | 2500 19.7 0.0508 39.6 0.036484 116.184 961 | 3150 19.7 0.0508 39.6 0.036484 113.004 962 | 4000 19.7 0.0508 39.6 0.036484 108.684 963 | 2500 0 0.0254 71.3 0.000400682 133.707 964 | 3150 0 0.0254 71.3 0.000400682 137.007 965 | 4000 0 0.0254 71.3 0.000400682 138.557 966 | 5000 0 0.0254 71.3 0.000400682 136.837 967 | 6300 0 0.0254 71.3 0.000400682 134.987 968 | 8000 0 0.0254 71.3 0.000400682 129.867 969 | 10000 0 0.0254 71.3 0.000400682 130.787 970 | 12500 0 0.0254 71.3 0.000400682 133.207 971 | 16000 0 0.0254 71.3 0.000400682 130.477 972 | 20000 0 0.0254 71.3 0.000400682 123.217 973 | 2000 0 0.0254 55.5 0.00041229 127.623 974 | 2500 0 0.0254 55.5 0.00041229 130.073 975 | 3150 0 0.0254 55.5 0.00041229 130.503 976 | 4000 0 0.0254 55.5 0.00041229 133.223 977 | 5000 0 0.0254 55.5 0.00041229 135.803 978 | 6300 0 0.0254 55.5 0.00041229 136.103 979 | 8000 0 0.0254 55.5 0.00041229 136.163 980 | 10000 0 0.0254 55.5 0.00041229 134.563 981 | 12500 0 0.0254 55.5 0.00041229 131.453 982 | 16000 0 0.0254 55.5 0.00041229 125.683 983 | 20000 0 0.0254 55.5 0.00041229 121.933 984 | 1600 0 0.0254 39.6 0.000428464 124.156 985 | 2000 0 0.0254 39.6 0.000428464 130.026 986 | 2500 0 0.0254 39.6 0.000428464 131.836 987 | 3150 0 0.0254 39.6 0.000428464 133.276 988 | 4000 0 0.0254 39.6 0.000428464 135.346 989 | 5000 0 0.0254 39.6 0.000428464 136.536 990 | 6300 0 0.0254 39.6 0.000428464 136.826 991 | 8000 0 0.0254 39.6 0.000428464 135.866 992 | 10000 0 0.0254 39.6 0.000428464 133.376 993 | 12500 0 0.0254 39.6 0.000428464 129.116 994 | 16000 0 0.0254 39.6 0.000428464 124.986 995 | 1000 0 0.0254 31.7 0.000439472 125.127 996 | 1250 0 0.0254 31.7 0.000439472 127.947 997 | 1600 0 0.0254 31.7 0.000439472 129.267 998 | 2000 0 0.0254 31.7 0.000439472 130.697 999 | 2500 0 0.0254 31.7 0.000439472 132.897 1000 | 3150 0 0.0254 31.7 0.000439472 135.227 1001 | 4000 0 0.0254 31.7 0.000439472 137.047 1002 | 5000 0 0.0254 31.7 0.000439472 138.607 1003 | 6300 0 0.0254 31.7 0.000439472 138.537 1004 | 8000 0 0.0254 31.7 0.000439472 137.207 1005 | 10000 0 0.0254 31.7 0.000439472 134.227 1006 | 12500 0 0.0254 31.7 0.000439472 128.977 1007 | 16000 0 0.0254 31.7 0.000439472 125.627 1008 | 2000 4.8 0.0254 71.3 0.000848633 128.398 1009 | 2500 4.8 0.0254 71.3 0.000848633 130.828 1010 | 3150 4.8 0.0254 71.3 0.000848633 133.378 1011 | 4000 4.8 0.0254 71.3 0.000848633 134.928 1012 | 5000 4.8 0.0254 71.3 0.000848633 135.468 1013 | 6300 4.8 0.0254 71.3 0.000848633 134.498 1014 | 8000 4.8 0.0254 71.3 0.000848633 131.518 1015 | 10000 4.8 0.0254 71.3 0.000848633 127.398 1016 | 12500 4.8 0.0254 71.3 0.000848633 127.688 1017 | 16000 4.8 0.0254 71.3 0.000848633 124.208 1018 | 20000 4.8 0.0254 71.3 0.000848633 119.708 1019 | 1600 4.8 0.0254 55.5 0.000873218 121.474 1020 | 2000 4.8 0.0254 55.5 0.000873218 125.054 1021 | 2500 4.8 0.0254 55.5 0.000873218 129.144 1022 | 3150 4.8 0.0254 55.5 0.000873218 132.354 1023 | 4000 4.8 0.0254 55.5 0.000873218 133.924 1024 | 5000 4.8 0.0254 55.5 0.000873218 135.484 1025 | 6300 4.8 0.0254 55.5 0.000873218 135.164 1026 | 8000 4.8 0.0254 55.5 0.000873218 132.184 1027 | 10000 4.8 0.0254 55.5 0.000873218 126.944 1028 | 12500 4.8 0.0254 55.5 0.000873218 125.094 1029 | 16000 4.8 0.0254 55.5 0.000873218 124.394 1030 | 20000 4.8 0.0254 55.5 0.000873218 121.284 1031 | 500 4.8 0.0254 39.6 0.000907475 116.366 1032 | 630 4.8 0.0254 39.6 0.000907475 118.696 1033 | 800 4.8 0.0254 39.6 0.000907475 120.766 1034 | 1000 4.8 0.0254 39.6 0.000907475 122.956 1035 | 1250 4.8 0.0254 39.6 0.000907475 125.026 1036 | 1600 4.8 0.0254 39.6 0.000907475 125.966 1037 | 2000 4.8 0.0254 39.6 0.000907475 128.916 1038 | 2500 4.8 0.0254 39.6 0.000907475 131.236 1039 | 3150 4.8 0.0254 39.6 0.000907475 133.436 1040 | 4000 4.8 0.0254 39.6 0.000907475 134.996 1041 | 5000 4.8 0.0254 39.6 0.000907475 135.426 1042 | 6300 4.8 0.0254 39.6 0.000907475 134.336 1043 | 8000 4.8 0.0254 39.6 0.000907475 131.346 1044 | 10000 4.8 0.0254 39.6 0.000907475 126.066 1045 | 500 4.8 0.0254 31.7 0.000930789 116.128 1046 | 630 4.8 0.0254 31.7 0.000930789 120.078 1047 | 800 4.8 0.0254 31.7 0.000930789 122.648 1048 | 1000 4.8 0.0254 31.7 0.000930789 125.348 1049 | 1250 4.8 0.0254 31.7 0.000930789 127.408 1050 | 1600 4.8 0.0254 31.7 0.000930789 128.718 1051 | 2000 4.8 0.0254 31.7 0.000930789 130.148 1052 | 2500 4.8 0.0254 31.7 0.000930789 132.588 1053 | 3150 4.8 0.0254 31.7 0.000930789 134.268 1054 | 4000 4.8 0.0254 31.7 0.000930789 135.328 1055 | 5000 4.8 0.0254 31.7 0.000930789 135.248 1056 | 6300 4.8 0.0254 31.7 0.000930789 132.898 1057 | 8000 4.8 0.0254 31.7 0.000930789 127.008 1058 | 630 9.5 0.0254 71.3 0.00420654 125.726 1059 | 800 9.5 0.0254 71.3 0.00420654 127.206 1060 | 1000 9.5 0.0254 71.3 0.00420654 129.556 1061 | 1250 9.5 0.0254 71.3 0.00420654 131.656 1062 | 1600 9.5 0.0254 71.3 0.00420654 133.756 1063 | 2000 9.5 0.0254 71.3 0.00420654 134.976 1064 | 2500 9.5 0.0254 71.3 0.00420654 135.956 1065 | 3150 9.5 0.0254 71.3 0.00420654 136.166 1066 | 4000 9.5 0.0254 71.3 0.00420654 134.236 1067 | 5000 9.5 0.0254 71.3 0.00420654 131.186 1068 | 6300 9.5 0.0254 71.3 0.00420654 127.246 1069 | 400 9.5 0.0254 55.5 0.0043284 120.952 1070 | 500 9.5 0.0254 55.5 0.0043284 123.082 1071 | 630 9.5 0.0254 55.5 0.0043284 125.452 1072 | 800 9.5 0.0254 55.5 0.0043284 128.082 1073 | 1000 9.5 0.0254 55.5 0.0043284 130.332 1074 | 1250 9.5 0.0254 55.5 0.0043284 132.202 1075 | 1600 9.5 0.0254 55.5 0.0043284 133.062 1076 | 2000 9.5 0.0254 55.5 0.0043284 134.052 1077 | 2500 9.5 0.0254 55.5 0.0043284 134.152 1078 | 3150 9.5 0.0254 55.5 0.0043284 133.252 1079 | 4000 9.5 0.0254 55.5 0.0043284 131.582 1080 | 5000 9.5 0.0254 55.5 0.0043284 128.412 1081 | 6300 9.5 0.0254 55.5 0.0043284 124.222 1082 | 200 9.5 0.0254 39.6 0.00449821 116.074 1083 | 250 9.5 0.0254 39.6 0.00449821 116.924 1084 | 315 9.5 0.0254 39.6 0.00449821 119.294 1085 | 400 9.5 0.0254 39.6 0.00449821 121.154 1086 | 500 9.5 0.0254 39.6 0.00449821 123.894 1087 | 630 9.5 0.0254 39.6 0.00449821 126.514 1088 | 800 9.5 0.0254 39.6 0.00449821 129.014 1089 | 1000 9.5 0.0254 39.6 0.00449821 130.374 1090 | 1250 9.5 0.0254 39.6 0.00449821 130.964 1091 | 1600 9.5 0.0254 39.6 0.00449821 131.184 1092 | 2000 9.5 0.0254 39.6 0.00449821 131.274 1093 | 2500 9.5 0.0254 39.6 0.00449821 131.234 1094 | 3150 9.5 0.0254 39.6 0.00449821 129.934 1095 | 4000 9.5 0.0254 39.6 0.00449821 127.864 1096 | 5000 9.5 0.0254 39.6 0.00449821 125.044 1097 | 6300 9.5 0.0254 39.6 0.00449821 120.324 1098 | 200 9.5 0.0254 31.7 0.00461377 119.146 1099 | 250 9.5 0.0254 31.7 0.00461377 120.136 1100 | 315 9.5 0.0254 31.7 0.00461377 122.766 1101 | 400 9.5 0.0254 31.7 0.00461377 124.756 1102 | 500 9.5 0.0254 31.7 0.00461377 126.886 1103 | 630 9.5 0.0254 31.7 0.00461377 129.006 1104 | 800 9.5 0.0254 31.7 0.00461377 130.746 1105 | 1000 9.5 0.0254 31.7 0.00461377 131.346 1106 | 1250 9.5 0.0254 31.7 0.00461377 131.446 1107 | 1600 9.5 0.0254 31.7 0.00461377 131.036 1108 | 2000 9.5 0.0254 31.7 0.00461377 130.496 1109 | 2500 9.5 0.0254 31.7 0.00461377 130.086 1110 | 3150 9.5 0.0254 31.7 0.00461377 128.536 1111 | 4000 9.5 0.0254 31.7 0.00461377 126.736 1112 | 5000 9.5 0.0254 31.7 0.00461377 124.426 1113 | 6300 9.5 0.0254 31.7 0.00461377 120.726 1114 | 250 12.7 0.0254 71.3 0.0121808 119.698 1115 | 315 12.7 0.0254 71.3 0.0121808 122.938 1116 | 400 12.7 0.0254 71.3 0.0121808 125.048 1117 | 500 12.7 0.0254 71.3 0.0121808 126.898 1118 | 630 12.7 0.0254 71.3 0.0121808 128.878 1119 | 800 12.7 0.0254 71.3 0.0121808 130.348 1120 | 1000 12.7 0.0254 71.3 0.0121808 131.698 1121 | 1250 12.7 0.0254 71.3 0.0121808 133.048 1122 | 1600 12.7 0.0254 71.3 0.0121808 134.528 1123 | 2000 12.7 0.0254 71.3 0.0121808 134.228 1124 | 2500 12.7 0.0254 71.3 0.0121808 134.058 1125 | 3150 12.7 0.0254 71.3 0.0121808 133.758 1126 | 4000 12.7 0.0254 71.3 0.0121808 131.808 1127 | 5000 12.7 0.0254 71.3 0.0121808 128.978 1128 | 6300 12.7 0.0254 71.3 0.0121808 125.398 1129 | 8000 12.7 0.0254 71.3 0.0121808 120.538 1130 | 10000 12.7 0.0254 71.3 0.0121808 114.418 1131 | 250 12.7 0.0254 39.6 0.0130253 121.547 1132 | 315 12.7 0.0254 39.6 0.0130253 123.537 1133 | 400 12.7 0.0254 39.6 0.0130253 125.527 1134 | 500 12.7 0.0254 39.6 0.0130253 127.127 1135 | 630 12.7 0.0254 39.6 0.0130253 128.867 1136 | 800 12.7 0.0254 39.6 0.0130253 130.217 1137 | 1000 12.7 0.0254 39.6 0.0130253 130.947 1138 | 1250 12.7 0.0254 39.6 0.0130253 130.777 1139 | 1600 12.7 0.0254 39.6 0.0130253 129.977 1140 | 2000 12.7 0.0254 39.6 0.0130253 129.567 1141 | 2500 12.7 0.0254 39.6 0.0130253 129.027 1142 | 3150 12.7 0.0254 39.6 0.0130253 127.847 1143 | 4000 12.7 0.0254 39.6 0.0130253 126.537 1144 | 5000 12.7 0.0254 39.6 0.0130253 125.107 1145 | 6300 12.7 0.0254 39.6 0.0130253 123.177 1146 | 8000 12.7 0.0254 39.6 0.0130253 120.607 1147 | 10000 12.7 0.0254 39.6 0.0130253 116.017 1148 | 200 17.4 0.0254 71.3 0.016104 112.506 1149 | 250 17.4 0.0254 71.3 0.016104 113.796 1150 | 315 17.4 0.0254 71.3 0.016104 115.846 1151 | 400 17.4 0.0254 71.3 0.016104 117.396 1152 | 500 17.4 0.0254 71.3 0.016104 119.806 1153 | 630 17.4 0.0254 71.3 0.016104 122.606 1154 | 800 17.4 0.0254 71.3 0.016104 124.276 1155 | 1000 17.4 0.0254 71.3 0.016104 125.816 1156 | 1250 17.4 0.0254 71.3 0.016104 126.356 1157 | 1600 17.4 0.0254 71.3 0.016104 126.406 1158 | 2000 17.4 0.0254 71.3 0.016104 126.826 1159 | 2500 17.4 0.0254 71.3 0.016104 126.746 1160 | 3150 17.4 0.0254 71.3 0.016104 126.536 1161 | 4000 17.4 0.0254 71.3 0.016104 125.586 1162 | 5000 17.4 0.0254 71.3 0.016104 123.126 1163 | 6300 17.4 0.0254 71.3 0.016104 119.916 1164 | 8000 17.4 0.0254 71.3 0.016104 115.466 1165 | 200 17.4 0.0254 55.5 0.0165706 109.951 1166 | 250 17.4 0.0254 55.5 0.0165706 110.491 1167 | 315 17.4 0.0254 55.5 0.0165706 111.911 1168 | 400 17.4 0.0254 55.5 0.0165706 115.461 1169 | 500 17.4 0.0254 55.5 0.0165706 119.621 1170 | 630 17.4 0.0254 55.5 0.0165706 122.411 1171 | 800 17.4 0.0254 55.5 0.0165706 123.091 1172 | 1000 17.4 0.0254 55.5 0.0165706 126.001 1173 | 1250 17.4 0.0254 55.5 0.0165706 129.301 1174 | 1600 17.4 0.0254 55.5 0.0165706 126.471 1175 | 2000 17.4 0.0254 55.5 0.0165706 125.261 1176 | 2500 17.4 0.0254 55.5 0.0165706 124.931 1177 | 3150 17.4 0.0254 55.5 0.0165706 124.101 1178 | 4000 17.4 0.0254 55.5 0.0165706 121.771 1179 | 5000 17.4 0.0254 55.5 0.0165706 118.941 1180 | 6300 17.4 0.0254 55.5 0.0165706 114.861 1181 | 200 17.4 0.0254 39.6 0.0172206 114.044 1182 | 250 17.4 0.0254 39.6 0.0172206 114.714 1183 | 315 17.4 0.0254 39.6 0.0172206 115.144 1184 | 400 17.4 0.0254 39.6 0.0172206 115.444 1185 | 500 17.4 0.0254 39.6 0.0172206 117.514 1186 | 630 17.4 0.0254 39.6 0.0172206 124.514 1187 | 800 17.4 0.0254 39.6 0.0172206 135.324 1188 | 1000 17.4 0.0254 39.6 0.0172206 138.274 1189 | 1250 17.4 0.0254 39.6 0.0172206 131.364 1190 | 1600 17.4 0.0254 39.6 0.0172206 127.614 1191 | 2000 17.4 0.0254 39.6 0.0172206 126.644 1192 | 2500 17.4 0.0254 39.6 0.0172206 124.154 1193 | 3150 17.4 0.0254 39.6 0.0172206 123.564 1194 | 4000 17.4 0.0254 39.6 0.0172206 122.724 1195 | 5000 17.4 0.0254 39.6 0.0172206 119.854 1196 | 200 17.4 0.0254 31.7 0.0176631 116.146 1197 | 250 17.4 0.0254 31.7 0.0176631 116.956 1198 | 315 17.4 0.0254 31.7 0.0176631 118.416 1199 | 400 17.4 0.0254 31.7 0.0176631 120.766 1200 | 500 17.4 0.0254 31.7 0.0176631 127.676 1201 | 630 17.4 0.0254 31.7 0.0176631 136.886 1202 | 800 17.4 0.0254 31.7 0.0176631 139.226 1203 | 1000 17.4 0.0254 31.7 0.0176631 131.796 1204 | 1250 17.4 0.0254 31.7 0.0176631 128.306 1205 | 1600 17.4 0.0254 31.7 0.0176631 126.846 1206 | 2000 17.4 0.0254 31.7 0.0176631 124.356 1207 | 2500 17.4 0.0254 31.7 0.0176631 124.166 1208 | 3150 17.4 0.0254 31.7 0.0176631 123.466 1209 | 4000 17.4 0.0254 31.7 0.0176631 121.996 1210 | 5000 17.4 0.0254 31.7 0.0176631 117.996 1211 | 315 22.2 0.0254 71.3 0.0214178 115.857 1212 | 400 22.2 0.0254 71.3 0.0214178 117.927 1213 | 500 22.2 0.0254 71.3 0.0214178 117.967 1214 | 630 22.2 0.0254 71.3 0.0214178 120.657 1215 | 800 22.2 0.0254 71.3 0.0214178 123.227 1216 | 1000 22.2 0.0254 71.3 0.0214178 134.247 1217 | 1250 22.2 0.0254 71.3 0.0214178 140.987 1218 | 1600 22.2 0.0254 71.3 0.0214178 131.817 1219 | 2000 22.2 0.0254 71.3 0.0214178 127.197 1220 | 2500 22.2 0.0254 71.3 0.0214178 126.097 1221 | 3150 22.2 0.0254 71.3 0.0214178 124.127 1222 | 4000 22.2 0.0254 71.3 0.0214178 123.917 1223 | 5000 22.2 0.0254 71.3 0.0214178 125.727 1224 | 6300 22.2 0.0254 71.3 0.0214178 123.127 1225 | 8000 22.2 0.0254 71.3 0.0214178 121.657 1226 | 200 22.2 0.0254 39.6 0.0229028 116.066 1227 | 250 22.2 0.0254 39.6 0.0229028 117.386 1228 | 315 22.2 0.0254 39.6 0.0229028 120.716 1229 | 400 22.2 0.0254 39.6 0.0229028 123.416 1230 | 500 22.2 0.0254 39.6 0.0229028 129.776 1231 | 630 22.2 0.0254 39.6 0.0229028 137.026 1232 | 800 22.2 0.0254 39.6 0.0229028 137.076 1233 | 1000 22.2 0.0254 39.6 0.0229028 128.416 1234 | 1250 22.2 0.0254 39.6 0.0229028 126.446 1235 | 1600 22.2 0.0254 39.6 0.0229028 122.216 1236 | 2000 22.2 0.0254 39.6 0.0229028 121.256 1237 | 2500 22.2 0.0254 39.6 0.0229028 121.306 1238 | 3150 22.2 0.0254 39.6 0.0229028 120.856 1239 | 4000 22.2 0.0254 39.6 0.0229028 119.646 1240 | 5000 22.2 0.0254 39.6 0.0229028 118.816 1241 | 630 0 0.1016 71.3 0.00121072 124.155 1242 | 800 0 0.1016 71.3 0.00121072 126.805 1243 | 1000 0 0.1016 71.3 0.00121072 128.825 1244 | 1250 0 0.1016 71.3 0.00121072 130.335 1245 | 1600 0 0.1016 71.3 0.00121072 131.725 1246 | 2000 0 0.1016 71.3 0.00121072 132.095 1247 | 2500 0 0.1016 71.3 0.00121072 132.595 1248 | 3150 0 0.1016 71.3 0.00121072 131.955 1249 | 4000 0 0.1016 71.3 0.00121072 130.935 1250 | 5000 0 0.1016 71.3 0.00121072 130.795 1251 | 6300 0 0.1016 71.3 0.00121072 129.395 1252 | 8000 0 0.1016 71.3 0.00121072 125.465 1253 | 10000 0 0.1016 71.3 0.00121072 123.305 1254 | 12500 0 0.1016 71.3 0.00121072 119.375 1255 | 630 0 0.1016 55.5 0.00131983 126.170 1256 | 800 0 0.1016 55.5 0.00131983 127.920 1257 | 1000 0 0.1016 55.5 0.00131983 129.800 1258 | 1250 0 0.1016 55.5 0.00131983 131.430 1259 | 1600 0 0.1016 55.5 0.00131983 132.050 1260 | 2000 0 0.1016 55.5 0.00131983 132.540 1261 | 2500 0 0.1016 55.5 0.00131983 133.040 1262 | 3150 0 0.1016 55.5 0.00131983 131.780 1263 | 4000 0 0.1016 55.5 0.00131983 129.500 1264 | 5000 0 0.1016 55.5 0.00131983 128.360 1265 | 6300 0 0.1016 55.5 0.00131983 127.730 1266 | 8000 0 0.1016 55.5 0.00131983 124.450 1267 | 10000 0 0.1016 55.5 0.00131983 121.930 1268 | 12500 0 0.1016 55.5 0.00131983 119.910 1269 | 630 0 0.1016 39.6 0.00146332 125.401 1270 | 800 0 0.1016 39.6 0.00146332 128.401 1271 | 1000 0 0.1016 39.6 0.00146332 130.781 1272 | 1250 0 0.1016 39.6 0.00146332 132.271 1273 | 1600 0 0.1016 39.6 0.00146332 133.261 1274 | 2000 0 0.1016 39.6 0.00146332 133.251 1275 | 2500 0 0.1016 39.6 0.00146332 132.611 1276 | 3150 0 0.1016 39.6 0.00146332 130.961 1277 | 4000 0 0.1016 39.6 0.00146332 127.801 1278 | 5000 0 0.1016 39.6 0.00146332 126.021 1279 | 6300 0 0.1016 39.6 0.00146332 125.631 1280 | 8000 0 0.1016 39.6 0.00146332 122.341 1281 | 10000 0 0.1016 39.6 0.00146332 119.561 1282 | 630 0 0.1016 31.7 0.00150092 126.413 1283 | 800 0 0.1016 31.7 0.00150092 129.053 1284 | 1000 0 0.1016 31.7 0.00150092 131.313 1285 | 1250 0 0.1016 31.7 0.00150092 133.063 1286 | 1600 0 0.1016 31.7 0.00150092 133.553 1287 | 2000 0 0.1016 31.7 0.00150092 133.153 1288 | 2500 0 0.1016 31.7 0.00150092 132.003 1289 | 3150 0 0.1016 31.7 0.00150092 129.973 1290 | 4000 0 0.1016 31.7 0.00150092 126.933 1291 | 5000 0 0.1016 31.7 0.00150092 124.393 1292 | 6300 0 0.1016 31.7 0.00150092 124.253 1293 | 8000 0 0.1016 31.7 0.00150092 120.193 1294 | 10000 0 0.1016 31.7 0.00150092 115.893 1295 | 800 3.3 0.1016 71.3 0.00202822 131.074 1296 | 1000 3.3 0.1016 71.3 0.00202822 131.434 1297 | 1250 3.3 0.1016 71.3 0.00202822 132.304 1298 | 1600 3.3 0.1016 71.3 0.00202822 133.664 1299 | 2000 3.3 0.1016 71.3 0.00202822 134.034 1300 | 2500 3.3 0.1016 71.3 0.00202822 133.894 1301 | 3150 3.3 0.1016 71.3 0.00202822 132.114 1302 | 4000 3.3 0.1016 71.3 0.00202822 128.704 1303 | 5000 3.3 0.1016 71.3 0.00202822 127.054 1304 | 6300 3.3 0.1016 71.3 0.00202822 124.904 1305 | 8000 3.3 0.1016 71.3 0.00202822 121.234 1306 | 10000 3.3 0.1016 71.3 0.00202822 116.694 1307 | 630 3.3 0.1016 55.5 0.002211 126.599 1308 | 800 3.3 0.1016 55.5 0.002211 129.119 1309 | 1000 3.3 0.1016 55.5 0.002211 131.129 1310 | 1250 3.3 0.1016 55.5 0.002211 132.769 1311 | 1600 3.3 0.1016 55.5 0.002211 133.649 1312 | 2000 3.3 0.1016 55.5 0.002211 133.649 1313 | 2500 3.3 0.1016 55.5 0.002211 132.889 1314 | 3150 3.3 0.1016 55.5 0.002211 130.629 1315 | 4000 3.3 0.1016 55.5 0.002211 127.229 1316 | 5000 3.3 0.1016 55.5 0.002211 124.839 1317 | 6300 3.3 0.1016 55.5 0.002211 123.839 1318 | 8000 3.3 0.1016 55.5 0.002211 120.569 1319 | 10000 3.3 0.1016 55.5 0.002211 115.659 1320 | 630 3.3 0.1016 39.6 0.00245138 127.251 1321 | 800 3.3 0.1016 39.6 0.00245138 129.991 1322 | 1000 3.3 0.1016 39.6 0.00245138 131.971 1323 | 1250 3.3 0.1016 39.6 0.00245138 133.211 1324 | 1600 3.3 0.1016 39.6 0.00245138 133.071 1325 | 2000 3.3 0.1016 39.6 0.00245138 132.301 1326 | 2500 3.3 0.1016 39.6 0.00245138 130.791 1327 | 3150 3.3 0.1016 39.6 0.00245138 128.401 1328 | 4000 3.3 0.1016 39.6 0.00245138 124.881 1329 | 5000 3.3 0.1016 39.6 0.00245138 122.371 1330 | 6300 3.3 0.1016 39.6 0.00245138 120.851 1331 | 8000 3.3 0.1016 39.6 0.00245138 118.091 1332 | 10000 3.3 0.1016 39.6 0.00245138 115.321 1333 | 630 3.3 0.1016 31.7 0.00251435 128.952 1334 | 800 3.3 0.1016 31.7 0.00251435 131.362 1335 | 1000 3.3 0.1016 31.7 0.00251435 133.012 1336 | 1250 3.3 0.1016 31.7 0.00251435 134.022 1337 | 1600 3.3 0.1016 31.7 0.00251435 133.402 1338 | 2000 3.3 0.1016 31.7 0.00251435 131.642 1339 | 2500 3.3 0.1016 31.7 0.00251435 130.392 1340 | 3150 3.3 0.1016 31.7 0.00251435 128.252 1341 | 4000 3.3 0.1016 31.7 0.00251435 124.852 1342 | 5000 3.3 0.1016 31.7 0.00251435 122.082 1343 | 6300 3.3 0.1016 31.7 0.00251435 120.702 1344 | 8000 3.3 0.1016 31.7 0.00251435 117.432 1345 | 630 6.7 0.1016 71.3 0.00478288 131.448 1346 | 800 6.7 0.1016 71.3 0.00478288 134.478 1347 | 1000 6.7 0.1016 71.3 0.00478288 136.758 1348 | 1250 6.7 0.1016 71.3 0.00478288 137.658 1349 | 1600 6.7 0.1016 71.3 0.00478288 136.678 1350 | 2000 6.7 0.1016 71.3 0.00478288 134.568 1351 | 2500 6.7 0.1016 71.3 0.00478288 131.458 1352 | 3150 6.7 0.1016 71.3 0.00478288 124.458 1353 | 500 6.7 0.1016 55.5 0.0052139 129.343 1354 | 630 6.7 0.1016 55.5 0.0052139 133.023 1355 | 800 6.7 0.1016 55.5 0.0052139 135.953 1356 | 1000 6.7 0.1016 55.5 0.0052139 137.233 1357 | 1250 6.7 0.1016 55.5 0.0052139 136.883 1358 | 1600 6.7 0.1016 55.5 0.0052139 133.653 1359 | 2000 6.7 0.1016 55.5 0.0052139 129.653 1360 | 2500 6.7 0.1016 55.5 0.0052139 124.273 1361 | 400 6.7 0.1016 39.6 0.00578076 128.295 1362 | 500 6.7 0.1016 39.6 0.00578076 130.955 1363 | 630 6.7 0.1016 39.6 0.00578076 133.355 1364 | 800 6.7 0.1016 39.6 0.00578076 134.625 1365 | 1000 6.7 0.1016 39.6 0.00578076 134.515 1366 | 1250 6.7 0.1016 39.6 0.00578076 132.395 1367 | 1600 6.7 0.1016 39.6 0.00578076 127.375 1368 | 2000 6.7 0.1016 39.6 0.00578076 122.235 1369 | 315 6.7 0.1016 31.7 0.00592927 126.266 1370 | 400 6.7 0.1016 31.7 0.00592927 128.296 1371 | 500 6.7 0.1016 31.7 0.00592927 130.206 1372 | 630 6.7 0.1016 31.7 0.00592927 132.116 1373 | 800 6.7 0.1016 31.7 0.00592927 132.886 1374 | 1000 6.7 0.1016 31.7 0.00592927 131.636 1375 | 1250 6.7 0.1016 31.7 0.00592927 129.256 1376 | 1600 6.7 0.1016 31.7 0.00592927 124.346 1377 | 2000 6.7 0.1016 31.7 0.00592927 120.446 1378 | 200 8.9 0.1016 71.3 0.0103088 133.503 1379 | 250 8.9 0.1016 71.3 0.0103088 134.533 1380 | 315 8.9 0.1016 71.3 0.0103088 136.583 1381 | 400 8.9 0.1016 71.3 0.0103088 138.123 1382 | 500 8.9 0.1016 71.3 0.0103088 138.523 1383 | 630 8.9 0.1016 71.3 0.0103088 138.423 1384 | 800 8.9 0.1016 71.3 0.0103088 137.813 1385 | 1000 8.9 0.1016 71.3 0.0103088 135.433 1386 | 1250 8.9 0.1016 71.3 0.0103088 132.793 1387 | 1600 8.9 0.1016 71.3 0.0103088 128.763 1388 | 2000 8.9 0.1016 71.3 0.0103088 124.233 1389 | 2500 8.9 0.1016 71.3 0.0103088 123.623 1390 | 3150 8.9 0.1016 71.3 0.0103088 123.263 1391 | 4000 8.9 0.1016 71.3 0.0103088 120.243 1392 | 5000 8.9 0.1016 71.3 0.0103088 116.723 1393 | 6300 8.9 0.1016 71.3 0.0103088 117.253 1394 | 200 8.9 0.1016 39.6 0.0124596 133.420 1395 | 250 8.9 0.1016 39.6 0.0124596 134.340 1396 | 315 8.9 0.1016 39.6 0.0124596 135.380 1397 | 400 8.9 0.1016 39.6 0.0124596 135.540 1398 | 500 8.9 0.1016 39.6 0.0124596 133.790 1399 | 630 8.9 0.1016 39.6 0.0124596 131.920 1400 | 800 8.9 0.1016 39.6 0.0124596 130.940 1401 | 1000 8.9 0.1016 39.6 0.0124596 129.580 1402 | 1250 8.9 0.1016 39.6 0.0124596 127.710 1403 | 1600 8.9 0.1016 39.6 0.0124596 123.820 1404 | 2000 8.9 0.1016 39.6 0.0124596 119.040 1405 | 2500 8.9 0.1016 39.6 0.0124596 119.190 1406 | 3150 8.9 0.1016 39.6 0.0124596 119.350 1407 | 4000 8.9 0.1016 39.6 0.0124596 116.220 1408 | 5000 8.9 0.1016 39.6 0.0124596 113.080 1409 | 6300 8.9 0.1016 39.6 0.0124596 113.110 1410 | 200 12.3 0.1016 71.3 0.0337792 130.588 1411 | 250 12.3 0.1016 71.3 0.0337792 131.568 1412 | 315 12.3 0.1016 71.3 0.0337792 137.068 1413 | 400 12.3 0.1016 71.3 0.0337792 139.428 1414 | 500 12.3 0.1016 71.3 0.0337792 140.158 1415 | 630 12.3 0.1016 71.3 0.0337792 135.368 1416 | 800 12.3 0.1016 71.3 0.0337792 127.318 1417 | 1000 12.3 0.1016 71.3 0.0337792 127.928 1418 | 1250 12.3 0.1016 71.3 0.0337792 126.648 1419 | 1600 12.3 0.1016 71.3 0.0337792 124.748 1420 | 2000 12.3 0.1016 71.3 0.0337792 122.218 1421 | 2500 12.3 0.1016 71.3 0.0337792 121.318 1422 | 3150 12.3 0.1016 71.3 0.0337792 120.798 1423 | 4000 12.3 0.1016 71.3 0.0337792 118.018 1424 | 5000 12.3 0.1016 71.3 0.0337792 116.108 1425 | 6300 12.3 0.1016 71.3 0.0337792 113.958 1426 | 200 12.3 0.1016 55.5 0.0368233 132.304 1427 | 250 12.3 0.1016 55.5 0.0368233 133.294 1428 | 315 12.3 0.1016 55.5 0.0368233 135.674 1429 | 400 12.3 0.1016 55.5 0.0368233 136.414 1430 | 500 12.3 0.1016 55.5 0.0368233 133.774 1431 | 630 12.3 0.1016 55.5 0.0368233 124.244 1432 | 800 12.3 0.1016 55.5 0.0368233 125.114 1433 | 1000 12.3 0.1016 55.5 0.0368233 125.484 1434 | 1250 12.3 0.1016 55.5 0.0368233 124.214 1435 | 1600 12.3 0.1016 55.5 0.0368233 121.824 1436 | 2000 12.3 0.1016 55.5 0.0368233 118.564 1437 | 2500 12.3 0.1016 55.5 0.0368233 117.054 1438 | 3150 12.3 0.1016 55.5 0.0368233 116.914 1439 | 4000 12.3 0.1016 55.5 0.0368233 114.404 1440 | 5000 12.3 0.1016 55.5 0.0368233 112.014 1441 | 6300 12.3 0.1016 55.5 0.0368233 110.124 1442 | 200 12.3 0.1016 39.6 0.0408268 128.545 1443 | 250 12.3 0.1016 39.6 0.0408268 129.675 1444 | 315 12.3 0.1016 39.6 0.0408268 129.415 1445 | 400 12.3 0.1016 39.6 0.0408268 128.265 1446 | 500 12.3 0.1016 39.6 0.0408268 122.205 1447 | 630 12.3 0.1016 39.6 0.0408268 121.315 1448 | 800 12.3 0.1016 39.6 0.0408268 122.315 1449 | 1000 12.3 0.1016 39.6 0.0408268 122.435 1450 | 1250 12.3 0.1016 39.6 0.0408268 121.165 1451 | 1600 12.3 0.1016 39.6 0.0408268 117.875 1452 | 2000 12.3 0.1016 39.6 0.0408268 114.085 1453 | 2500 12.3 0.1016 39.6 0.0408268 113.315 1454 | 3150 12.3 0.1016 39.6 0.0408268 113.055 1455 | 4000 12.3 0.1016 39.6 0.0408268 110.905 1456 | 5000 12.3 0.1016 39.6 0.0408268 108.625 1457 | 6300 12.3 0.1016 39.6 0.0408268 107.985 1458 | 200 12.3 0.1016 31.7 0.0418756 124.987 1459 | 250 12.3 0.1016 31.7 0.0418756 125.857 1460 | 315 12.3 0.1016 31.7 0.0418756 124.717 1461 | 400 12.3 0.1016 31.7 0.0418756 123.207 1462 | 500 12.3 0.1016 31.7 0.0418756 118.667 1463 | 630 12.3 0.1016 31.7 0.0418756 119.287 1464 | 800 12.3 0.1016 31.7 0.0418756 120.037 1465 | 1000 12.3 0.1016 31.7 0.0418756 119.777 1466 | 1250 12.3 0.1016 31.7 0.0418756 118.767 1467 | 1600 12.3 0.1016 31.7 0.0418756 114.477 1468 | 2000 12.3 0.1016 31.7 0.0418756 110.447 1469 | 2500 12.3 0.1016 31.7 0.0418756 110.317 1470 | 3150 12.3 0.1016 31.7 0.0418756 110.307 1471 | 4000 12.3 0.1016 31.7 0.0418756 108.407 1472 | 5000 12.3 0.1016 31.7 0.0418756 107.147 1473 | 6300 12.3 0.1016 31.7 0.0418756 107.267 1474 | 200 15.6 0.1016 71.3 0.0437259 130.898 1475 | 250 15.6 0.1016 71.3 0.0437259 132.158 1476 | 315 15.6 0.1016 71.3 0.0437259 133.808 1477 | 400 15.6 0.1016 71.3 0.0437259 134.058 1478 | 500 15.6 0.1016 71.3 0.0437259 130.638 1479 | 630 15.6 0.1016 71.3 0.0437259 122.288 1480 | 800 15.6 0.1016 71.3 0.0437259 124.188 1481 | 1000 15.6 0.1016 71.3 0.0437259 124.438 1482 | 1250 15.6 0.1016 71.3 0.0437259 123.178 1483 | 1600 15.6 0.1016 71.3 0.0437259 121.528 1484 | 2000 15.6 0.1016 71.3 0.0437259 119.888 1485 | 2500 15.6 0.1016 71.3 0.0437259 118.998 1486 | 3150 15.6 0.1016 71.3 0.0437259 116.468 1487 | 4000 15.6 0.1016 71.3 0.0437259 113.298 1488 | 200 15.6 0.1016 39.6 0.0528487 123.514 1489 | 250 15.6 0.1016 39.6 0.0528487 124.644 1490 | 315 15.6 0.1016 39.6 0.0528487 122.754 1491 | 400 15.6 0.1016 39.6 0.0528487 120.484 1492 | 500 15.6 0.1016 39.6 0.0528487 115.304 1493 | 630 15.6 0.1016 39.6 0.0528487 118.084 1494 | 800 15.6 0.1016 39.6 0.0528487 118.964 1495 | 1000 15.6 0.1016 39.6 0.0528487 119.224 1496 | 1250 15.6 0.1016 39.6 0.0528487 118.214 1497 | 1600 15.6 0.1016 39.6 0.0528487 114.554 1498 | 2000 15.6 0.1016 39.6 0.0528487 110.894 1499 | 2500 15.6 0.1016 39.6 0.0528487 110.264 1500 | 3150 15.6 0.1016 39.6 0.0528487 109.254 1501 | 4000 15.6 0.1016 39.6 0.0528487 106.604 1502 | 5000 15.6 0.1016 39.6 0.0528487 106.224 1503 | 6300 15.6 0.1016 39.6 0.0528487 104.204 1504 | -------------------------------------------------------------------------------- /Chapter10/airfoil_self_noise.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | ASNNames= ['Frequency','AngleAttack','ChordLength','FSVelox','SSDT','SSP'] 4 | 5 | ASNData = pd.read_csv('airfoil_self_noise.dat', delim_whitespace=True, names=ASNNames) 6 | 7 | print(ASNData.head(20)) 8 | 9 | print(ASNData.info()) 10 | 11 | BasicStats = ASNData.describe() 12 | BasicStats = BasicStats.transpose() 13 | print(BasicStats) 14 | 15 | 16 | from sklearn.preprocessing import MinMaxScaler 17 | 18 | ScalerObject = MinMaxScaler() 19 | print(ScalerObject.fit(ASNData)) 20 | ASNDataScaled = ScalerObject.fit_transform(ASNData) 21 | ASNDataScaled = pd.DataFrame(ASNDataScaled, columns=ASNNames) 22 | 23 | summary = ASNDataScaled.describe() 24 | summary = summary.transpose() 25 | print(summary) 26 | 27 | import matplotlib.pyplot as plt 28 | boxplot = ASNDataScaled.boxplot(column=ASNNames) 29 | plt.show() 30 | 31 | CorASNData = ASNDataScaled.corr(method='pearson') 32 | with pd.option_context('display.max_rows', None, 'display.max_columns', CorASNData.shape[1]): 33 | print(CorASNData) 34 | 35 | plt.matshow(CorASNData) 36 | plt.xticks(range(len(CorASNData.columns)), CorASNData.columns) 37 | plt.yticks(range(len(CorASNData.columns)), CorASNData.columns) 38 | plt.colorbar() 39 | plt.show() 40 | 41 | 42 | from sklearn.model_selection import train_test_split 43 | 44 | X = ASNDataScaled.drop('SSP', axis = 1) 45 | print('X shape = ',X.shape) 46 | Y = ASNDataScaled['SSP'] 47 | print('Y shape = ',Y.shape) 48 | 49 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.30, random_state = 5) 50 | print('X train shape = ',X_train.shape) 51 | print('X test shape = ', X_test.shape) 52 | print('Y train shape = ', Y_train.shape) 53 | print('Y test shape = ',Y_test.shape) 54 | 55 | 56 | #Linear Regression 57 | from sklearn.linear_model import LinearRegression 58 | 59 | LModel = LinearRegression() 60 | LModel.fit(X_train, Y_train) 61 | 62 | Y_predLM = LModel.predict(X_test) 63 | 64 | from sklearn.metrics import mean_squared_error 65 | 66 | MseLM = mean_squared_error(Y_test, Y_predLM) 67 | print('Linear Regression Model') 68 | print(MseLM) 69 | 70 | 71 | #MLP Regressor Model 72 | from sklearn.neural_network import MLPRegressor 73 | 74 | MLPRegModel = MLPRegressor(hidden_layer_sizes=(50),activation='relu', solver='lbfgs', 75 | tol=1e-4, max_iter=10000, random_state=1) 76 | 77 | MLPRegModel.fit(X_train, Y_train) 78 | 79 | Y_predMLPReg = MLPRegModel.predict(X_test) 80 | 81 | MseMLP = mean_squared_error(Y_test, Y_predMLPReg) 82 | print('SKLearn Neural Network Model') 83 | print(MseMLP) 84 | 85 | # Plot a comparison diagram 86 | plt.figure(1) 87 | plt.subplot(121) 88 | plt.scatter(Y_test, Y_predMLPReg) 89 | plt.plot((0, 1), "r--") 90 | plt.xlabel("Actual values") 91 | plt.ylabel("Predicted values") 92 | plt.title("SKLearn Neural Network Model") 93 | 94 | plt.subplot(122) 95 | plt.scatter(Y_test, Y_predLM) 96 | plt.plot((0, 1), "r--") 97 | plt.xlabel("Actual values") 98 | plt.ylabel("Predicted values") 99 | plt.title("SKLearn Linear Regression Model") 100 | plt.show() 101 | 102 | 103 | 104 | 105 | 106 | -------------------------------------------------------------------------------- /Chapter10/concrete_quality.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import seaborn as sns 3 | from sklearn.preprocessing import MinMaxScaler 4 | from sklearn.model_selection import train_test_split 5 | from keras.models import Sequential 6 | from keras.layers import Dense 7 | from sklearn.metrics import r2_score 8 | 9 | features_names= ['Cement','BFS','FLA','Water','SP','CA','FA','Age','CCS'] 10 | concrete_data = pd.read_excel('concrete_data.xlsx', names=features_names) 11 | 12 | 13 | summary = concrete_data.describe() 14 | print(summary) 15 | 16 | sns.set(style="ticks") 17 | sns.boxplot(data = concrete_data) 18 | 19 | scaler = MinMaxScaler() 20 | print(scaler.fit(concrete_data)) 21 | scaled_data = scaler.fit_transform(concrete_data) 22 | scaled_data = pd.DataFrame(scaled_data, columns=features_names) 23 | 24 | summary = scaled_data.describe() 25 | print(summary) 26 | 27 | sns.boxplot(data = scaled_data) 28 | 29 | input_data = pd.DataFrame(scaled_data.iloc[:,:8]) 30 | output_data = pd.DataFrame(scaled_data.iloc[:,8]) 31 | 32 | inp_train, inp_test, out_train, out_test = train_test_split(input_data,output_data, test_size = 0.30, random_state = 1) 33 | print(inp_train.shape) 34 | print(inp_test.shape) 35 | print(out_train.shape) 36 | print(out_test.shape) 37 | 38 | 39 | model = Sequential() 40 | model.add(Dense(20, input_dim=8, activation='relu')) 41 | model.add(Dense(10, activation='relu')) 42 | model.add(Dense(10, activation='relu')) 43 | model.add(Dense(1, activation='linear')) 44 | model.compile(optimizer='adam',loss='mean_squared_error',metrics=['accuracy']) 45 | model.fit(inp_train, out_train, epochs=1000, verbose=1) 46 | 47 | model.summary() 48 | 49 | output_pred = model.predict(inp_test) 50 | 51 | print('Coefficient of determination = ') 52 | print(r2_score(out_test, output_pred)) 53 | 54 | -------------------------------------------------------------------------------- /Chapter11/montecarlo_tasks_scheduling.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import random 3 | import numpy as np 4 | 5 | N = 10000 6 | 7 | TotalTime=[] 8 | 9 | T = np.empty(shape=(N,6)) 10 | 11 | TaskTimes=[[3,5,8], 12 | [2,4,7], 13 | [3,5,9], 14 | [4,6,10], 15 | [3,5,9], 16 | [2,6,8]] 17 | 18 | Lh=[] 19 | for i in range(6): 20 | Lh.append((TaskTimes[i][1]-TaskTimes[i][0])/(TaskTimes[i][2]-TaskTimes[i][0])) 21 | 22 | 23 | for p in range(N): 24 | for i in range(6): 25 | trand=random.random() 26 | if (trand < Lh[i]): 27 | T[p][i] = TaskTimes[i][0] + np.sqrt(trand*(TaskTimes[i][1]-TaskTimes[i][0])*(TaskTimes[i][2]-TaskTimes[i][0])) 28 | else: 29 | T[p][i] = TaskTimes[i][2] - np.sqrt((1-trand)*(TaskTimes[i][2]-TaskTimes[i][1])*(TaskTimes[i][2]-TaskTimes[i][0])) 30 | TotalTime.append( T[p][0]+ np.maximum(T[p][1],T[p][2]) + np.maximum(T[p][3],T[p][4]) + T[p][5]) 31 | 32 | 33 | Data = pd.DataFrame(T,columns=['Task1', 'Task2', 'Task3', 'Task4', 'Task5', 'Task6']) 34 | 35 | pd.set_option('display.max_columns', None) 36 | print(Data.describe()) 37 | 38 | hist = Data.hist(bins=10) 39 | 40 | 41 | print("Minimum project completion time = ",np.amin(TotalTime)) 42 | 43 | print("Mean project completion time = ",np.mean(TotalTime)) 44 | 45 | print("Maximum project completion time = ",np.amax(TotalTime)) 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /Chapter11/tiny_forest-management.py: -------------------------------------------------------------------------------- 1 | import mdptoolbox.example 2 | 3 | P, R = mdptoolbox.example.forest() 4 | 5 | print(P[0]) 6 | print(P[1]) 7 | 8 | print(R[:,0]) 9 | print(R[:,1]) 10 | 11 | gamma=0.9 12 | 13 | PolIterModel = mdptoolbox.mdp.PolicyIteration(P, R, gamma) 14 | 15 | PolIterModel.run() 16 | 17 | print(PolIterModel.V) 18 | 19 | print(PolIterModel.policy) 20 | 21 | print(PolIterModel.iter) 22 | 23 | print(PolIterModel.time) 24 | 25 | 26 | -------------------------------------------------------------------------------- /Chapter11/tiny_forest_management_modified.py: -------------------------------------------------------------------------------- 1 | import mdptoolbox.example 2 | P, R = mdptoolbox.example.forest(3,4,2,0.8) 3 | 4 | print(P[0]) 5 | print(P[1]) 6 | 7 | print(R[:,0]) 8 | print(R[:,1]) 9 | 10 | gamma=0.9 11 | 12 | PolIterModel = mdptoolbox.mdp.PolicyIteration(P, R, gamma) 13 | 14 | PolIterModel.run() 15 | 16 | print(PolIterModel.V) 17 | 18 | print(PolIterModel.policy) 19 | 20 | print(PolIterModel.iter) 21 | 22 | print(PolIterModel.time) -------------------------------------------------------------------------------- /Chapter12/UAV_WiFi.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Hands-On-Simulation-Modeling-with-Python-Second-Edition/799f10caa2702e4074893fd4c5145e9aa42b8719/Chapter12/UAV_WiFi.xlsx -------------------------------------------------------------------------------- /Chapter12/UAV_detector.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.model_selection import train_test_split 3 | from sklearn.svm import SVC 4 | import matplotlib.pyplot as plt 5 | from sklearn.feature_selection import SelectKBest, chi2 6 | 7 | data = pd.read_excel('UAV_WiFi.xlsx') 8 | 9 | print(data.info()) 10 | 11 | DataStatCat = data.astype('object').describe() 12 | print(DataStatCat) 13 | 14 | X = data.drop('target', axis = 1) 15 | print('X shape = ',X.shape) 16 | Y = data['target'] 17 | print('Y shape = ',Y.shape) 18 | 19 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.30, random_state = 1) 20 | print('X train shape = ',X_train.shape) 21 | print('X test shape = ', X_test.shape) 22 | print('Y train shape = ', Y_train.shape) 23 | print('Y test shape = ',Y_test.shape) 24 | 25 | 26 | SVC_model = SVC(gamma='scale',random_state=0).fit(X_train, Y_train) 27 | SVC_model_score = SVC_model.score(X_test, Y_test) 28 | print('Support Vector Classification Model Score = ', SVC_model_score) 29 | 30 | first_10_columns = X.iloc[:,0:5] 31 | plt.figure(figsize=(10,5)) 32 | first_10_columns.boxplot() 33 | 34 | X_scaled = (X-X.min())/(X.max()-X.min()) 35 | 36 | first_10_columns = X_scaled.iloc[:,0:5] 37 | plt.figure(figsize=(10,5)) 38 | first_10_columns.boxplot() 39 | 40 | best_input_columns = SelectKBest(chi2, k=10).fit(X_scaled, Y) 41 | sel_index = best_input_columns.get_support() 42 | best_X = X_scaled.loc[: , sel_index] 43 | 44 | feature_selected = best_X.columns.values.tolist() 45 | print("The best 10 feature selected are:", feature_selected) 46 | 47 | X_train, X_test, Y_train, Y_test = train_test_split(best_X, Y, test_size = 0.30, random_state = 1) 48 | print('X train shape = ',X_train.shape) 49 | print('X test shape = ', X_test.shape) 50 | print('Y train shape = ', Y_train.shape) 51 | print('Y test shape = ',Y_test.shape) 52 | 53 | SVC_model = SVC(gamma='auto',random_state=0).fit(X_train, Y_train) 54 | SVC_model_score = SVC_model.score(X_test, Y_test) 55 | print('Support Vector Classification Model Score = ', SVC_model_score) 56 | -------------------------------------------------------------------------------- /Chapter12/fault.dataset.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PacktPublishing/Hands-On-Simulation-Modeling-with-Python-Second-Edition/799f10caa2702e4074893fd4c5145e9aa42b8719/Chapter12/fault.dataset.xlsx -------------------------------------------------------------------------------- /Chapter12/gearbox_fault_diagnosis.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import seaborn as sns 3 | from matplotlib import pyplot as plt 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.linear_model import LogisticRegression 6 | from sklearn.ensemble import RandomForestClassifier 7 | from sklearn.neural_network import MLPClassifier 8 | from sklearn.neighbors import KNeighborsClassifier 9 | from sklearn.inspection import DecisionBoundaryDisplay 10 | 11 | data = pd.read_excel('fault.dataset.xlsx') 12 | 13 | print(data.head(10)) 14 | 15 | print(data.info()) 16 | 17 | DataStat = data.describe() 18 | print(DataStat) 19 | 20 | DataStatCat = data.astype('object').describe() 21 | print(DataStatCat) 22 | 23 | fig, axes = plt.subplots(1,2, figsize=(18, 10)) 24 | sns.boxplot(ax=axes[0],x='state', y='a1', data=data) 25 | sns.boxplot(ax=axes[1],x='state', y='a2', data=data) 26 | plt.ylim(-40, 40) 27 | plt.show() 28 | 29 | X = data.drop('state', axis = 1) 30 | print('X shape = ',X.shape) 31 | Y = data['state'] 32 | print('Y shape = ',Y.shape) 33 | 34 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.30, random_state = 1) 35 | print('X train shape = ',X_train.shape) 36 | print('X test shape = ', X_test.shape) 37 | print('Y train shape = ', Y_train.shape) 38 | print('Y test shape = ',Y_test.shape) 39 | 40 | lr_model = LogisticRegression(random_state=0).fit(X_train, Y_train) 41 | lr_model_score = lr_model.score(X_test, Y_test) 42 | print('Logistic Regression Model Score = ', lr_model_score) 43 | 44 | ax1 = DecisionBoundaryDisplay.from_estimator( 45 | lr_model, X_train, response_method="predict", 46 | alpha=0.5) 47 | ax1.ax_.scatter(X_train.iloc[:,0], X_train.iloc[:,1], c=Y_train, edgecolor="k") 48 | plt.show() 49 | 50 | rm_model = RandomForestClassifier(max_depth=2, random_state=0).fit(X_train, Y_train) 51 | rm_model_score = rm_model.score(X_test, Y_test) 52 | print('Random Forest Model Score = ', rm_model_score) 53 | 54 | ax2 = DecisionBoundaryDisplay.from_estimator( 55 | rm_model, X_train, response_method="predict", 56 | alpha=0.5) 57 | ax2.ax_.scatter(X_train.iloc[:,0], X_train.iloc[:,1], c=Y_train, edgecolor="k") 58 | plt.show() 59 | 60 | mlp_model = MLPClassifier(random_state=1, max_iter=300).fit(X_train, Y_train) 61 | mlp_model_score = mlp_model.score(X_test, Y_test) 62 | print('Artificial Neural Network Model Score = ', mlp_model_score) 63 | 64 | ax3 = DecisionBoundaryDisplay.from_estimator( 65 | mlp_model, X_train, response_method="predict", 66 | alpha=0.5) 67 | ax3.ax_.scatter(X_train.iloc[:,0], X_train.iloc[:,1], c=Y_train, edgecolor="k") 68 | plt.show() 69 | 70 | kn_model= KNeighborsClassifier(n_neighbors=2).fit(X_train, Y_train) 71 | kn_model_score = kn_model.score(X_test, Y_test) 72 | print('K-nearest neighbors Model score =', kn_model_score) 73 | 74 | ax4 = DecisionBoundaryDisplay.from_estimator( 75 | kn_model, X_train, response_method="predict", 76 | alpha=0.5) 77 | ax4.ax_.scatter(X_train.iloc[:,0], X_train.iloc[:,1], c=Y_train, edgecolor="k") 78 | plt.show() 79 | 80 | 81 | 82 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Packt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | ## Machine Learning Summit 2025 4 | **Bridging Theory and Practice: ML Solutions for Today’s Challenges** 5 | 6 | 3 days, 20+ experts, and 25+ tech sessions and talks covering critical aspects of: 7 | - **Agentic and Generative AI** 8 | - **Applied Machine Learning in the Real World** 9 | - **ML Engineering and Optimization** 10 | 11 | 👉 [Book your ticket now >>](https://packt.link/mlsumgh) 12 | 13 | --- 14 | 15 | ## Join Our Newsletters 📬 16 | 17 | ### DataPro 18 | *The future of AI is unfolding. Don’t fall behind.* 19 | 20 | 21 | 22 | Stay ahead with [**DataPro**](https://landing.packtpub.com/subscribe-datapronewsletter/?link_from_packtlink=yes), the free weekly newsletter for data scientists, AI/ML researchers, and data engineers. 23 | From trending tools like **PyTorch**, **scikit-learn**, **XGBoost**, and **BentoML** to hands-on insights on **database optimization** and real-world **ML workflows**, you’ll get what matters, fast. 24 | 25 | > Stay sharp with [DataPro](https://landing.packtpub.com/subscribe-datapronewsletter/?link_from_packtlink=yes). Join **115K+ data professionals** who never miss a beat. 26 | 27 | --- 28 | 29 | ### BIPro 30 | *Business runs on data. Make sure yours tells the right story.* 31 | 32 | 33 | 34 | [**BIPro**](https://landing.packtpub.com/subscribe-bipro-newsletter/?link_from_packtlink=yes) is your free weekly newsletter for BI professionals, analysts, and data leaders. 35 | Get practical tips on **dashboarding**, **data visualization**, and **analytics strategy** with tools like **Power BI**, **Tableau**, **Looker**, **SQL**, and **dbt**. 36 | 37 | > Get smarter with [BIPro](https://landing.packtpub.com/subscribe-bipro-newsletter/?link_from_packtlink=yes). Trusted by **35K+ BI professionals**, see what you’re missing. 38 | 39 | 40 | ### [Packt Conference : Put Generative AI to work on Oct 11-13 (Virtual)](https://packt.link/JGIEY) 41 | 42 |[](https://packt.link/JGIEY)
43 | 3 Days, 20+ AI Experts, 25+ Workshops and Power Talks 44 | 45 | Code: USD75OFF 46 | 47 | 48 | 49 | 50 | # Hands On Simulation Modeling with Python Second Edition 51 | Hands-On Simulation Modeling with Python, Second Edition, published by Packt 52 | --------------------------------------------------------------------------------