├── README ├── test_extract.py ├── progress_bar.py ├── simpson.py ├── rainflow_damage.py ├── gamma_lanczos.py ├── extractMaxS.py └── CalcDamage.py /README: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test_extract.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys, os 3 | import extractMaxS 4 | 5 | class Extract(unittest.TestCase): 6 | def test_1(self): 7 | N = extractMaxS.Nf(90.0, -0.5) 8 | self.assertAlmostEquals(N, 100000, -4) 9 | 10 | def suite(): 11 | suite1 = unittest.makeSuite(Extract) 12 | return unittest.TestSuite([suite1]) 13 | 14 | if __name__ == '__main__': 15 | suite = unittest.TestLoader().loadTestsFromTestCase(Extract) 16 | unittest.TextTestRunner(verbosity=2).run(suite) 17 | -------------------------------------------------------------------------------- /progress_bar.py: -------------------------------------------------------------------------------- 1 | class ProgressBar: 2 | """From http://code.activestate.com/recipes/168639/""" 3 | def __init__(self, minValue = 0, maxValue = 10, totalWidth=12): 4 | self.progBar = "[]" # This holds the progress bar string 5 | self.min = minValue 6 | self.max = maxValue 7 | self.span = maxValue - minValue 8 | self.width = totalWidth 9 | self.amount = 0 # When amount == max, we are 100% done 10 | self.updateAmount(0) # Build progress bar string 11 | 12 | def updateAmount(self, newAmount = 0): 13 | if newAmount < self.min: newAmount = self.min 14 | if newAmount > self.max: newAmount = self.max 15 | self.amount = newAmount 16 | 17 | # Figure out the new percent done, round to an integer 18 | diffFromMin = float(self.amount - self.min) 19 | percentDone = (diffFromMin / float(self.span)) * 100.0 20 | percentDone = round(percentDone) 21 | percentDone = int(percentDone) 22 | 23 | # Figure out how many hash bars the percentage should be 24 | allFull = self.width - 2 25 | numHashes = (percentDone / 100.0) * allFull 26 | numHashes = int(round(numHashes)) 27 | 28 | # build a progress bar with hashes and spaces 29 | self.progBar = "[" + '#'*numHashes + ' '*(allFull-numHashes) + "]" 30 | 31 | # figure out where to put the percentage, roughly centered 32 | percentPlace = (len(self.progBar) / 2) - len(str(percentDone)) 33 | percentString = str(percentDone) + "%" 34 | 35 | # slice the percentage into the bar 36 | self.progBar = (self.progBar[0:percentPlace] + percentString 37 | + self.progBar[percentPlace+len(percentString):]) 38 | 39 | def __str__(self): 40 | return str(self.progBar) 41 | -------------------------------------------------------------------------------- /simpson.py: -------------------------------------------------------------------------------- 1 | """ 2 | Closed Simpson's rule for 3 | \int_a^b f(x) dx 4 | Divide [a,b] iteratively into h, h/2, h/4, h/8, ... step sizes; and, 5 | for each step size, evaluate f(x) at a+h, a+3h, a+5h, a+7h, ..., b-3h, 6 | b-h, noting that other points have already been sampled. 7 | 8 | At each iteration step, data are sampled only where necessary so that 9 | the total data is represented by adding sampled points from all 10 | previous steps: 11 | step 1: h a---------------b 12 | step 2: h/2 a-------^-------b 13 | step 3: h/4 a---^-------^---b 14 | step 4: h/8 a-^---^---^---^-b 15 | total: a-^-^-^-^-^-^-^-b 16 | So, for step size of h/n, there are n intervals, and the data are 17 | sampled at the boundaries including the 2 end points. 18 | 19 | If old = Trapezoid formula for an old step size 2h, then Trapezoid 20 | formula for the new step size h is obtained by 21 | new = old/2 + h{f(a+h) + f(a+3h) + f(a+5h) + f(a+7h) +...+ f(b-3h) 22 | + f(b-h)} 23 | Also, Simpson formula for the new step size h is given by 24 | simpson = (4 new - old)/3 25 | """ 26 | def close_enough(u, v, TOL): 27 | if abs(u-v) <= TOL*abs(u) and abs(u-v) <= TOL*abs(v): 28 | return True 29 | else: 30 | return False 31 | 32 | def closedpoints(func, a, b, TOL=1e-6): # f(x)=func(x) 33 | h = b - a 34 | old2 = old = h * (func(a) + func(b)) / 2.0 35 | count = 0 36 | while 1: 37 | h = h / 2.0 38 | x, sum = a + h, 0 39 | while x < b: 40 | sum = sum + func(x) 41 | x = x + 2 * h 42 | new = old / 2.0 + h * sum 43 | new2 = (4 * new - old) / 3.0 44 | if abs(new2 - old2) < TOL * (1 + abs(old2)) and count > 4: return new2 45 | old = new # Trapezoid 46 | old2 = new2 # Simpson 47 | count = count + 1 48 | #print 'closedpoints(%d): Trapezoid=%s, Simpson=%s' % (count, new, new2) 49 | 50 | -------------------------------------------------------------------------------- /rainflow_damage.py: -------------------------------------------------------------------------------- 1 | import math 2 | import operator 3 | import numpy as NP 4 | import scipy.integrate 5 | import scipy.special 6 | import csv 7 | import simpson 8 | import logging 9 | 10 | def read_element_stress_PSD(fname): 11 | stress_PSD = [] 12 | f = open(fname,'r') 13 | rdr = csv.reader(f) 14 | for freq, s_PSD in rdr: 15 | stress_PSD.append((float(freq), float(s_PSD))) 16 | f.close() 17 | return stress_PSD 18 | 19 | class StressPSD(object): 20 | def __init__(self, stress_PSD): 21 | self.stress_PSD = dict(stress_PSD) 22 | sorted_lst = sorted(self.stress_PSD.iteritems()) 23 | f = NP.array(map(operator.itemgetter(0), sorted_lst)) 24 | vals = NP.array(map(operator.itemgetter(1), sorted_lst)) 25 | self.M0 = scipy.integrate.trapz(vals, x = f) 26 | self.M2 = scipy.integrate.trapz(f*f*vals, x = f) 27 | self.M4 = scipy.integrate.trapz(f*f*f*f*vals, x = f) 28 | self.nu_0_plus = math.sqrt(self.M2/self.M0) # rate of zero crossings 29 | self.nu_p = math.sqrt(self.M4/self.M2) # rate of peaks 30 | self.alpha = self.nu_0_plus/self.nu_p # irregularity factor 31 | self.epsilon = math.sqrt(1.0-self.alpha*self.alpha) # spectral width parameter 32 | 33 | class Material(object): 34 | def __init__(self, A, m): 35 | self.A = A 36 | self.m = m 37 | 38 | SQRT2 = math.sqrt(2.0) 39 | 40 | def damage(s_psd, T, material): 41 | ''' 42 | Wirshing and Light empirical equivalent narrow band model 43 | Note that the RMS Stress is converted to ksi since the material data is in ksi 44 | Note also that A is based on amplitude, not range 45 | ''' 46 | d_nb = (s_psd.nu_0_plus*T/material.A)*pow((SQRT2*math.sqrt(s_psd.M0)/1000.0),material.m)*scipy.special.gamma(material.m/2.0+1.0) 47 | a_m = 0.926 - 0.033*material.m 48 | b_m = 1.587*material.m - 2.323 49 | _lambda = a_m + (1 - a_m)*pow(1 - s_psd.epsilon, b_m) 50 | return _lambda*d_nb 51 | 52 | if __name__ == '__main__': 53 | stress_PSD = read_element_stress_PSD('element.csv') 54 | mtrl = Material(A=pow(10.0,24.49), m = 9.62) 55 | s_PSD = StressPSD(stress_PSD) 56 | print 'damage: %s' % (damage(s_PSD, 3600.0, mtrl)) 57 | pass 58 | -------------------------------------------------------------------------------- /gamma_lanczos.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2010 the authors listed at the following URL, and/or 2 | # the authors of referenced articles or incorporated external code: 3 | # http://en.literateprograms.org/Gamma_function_with_the_Lanczos_approximation_(Python)?action=history&offset=20090111205238 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining 6 | # a copy of this software and associated documentation files (the 7 | # "Software"), to deal in the Software without restriction, including 8 | # without limitation the rights to use, copy, modify, merge, publish, 9 | # distribute, sublicense, and/or sell copies of the Software, and to 10 | # permit persons to whom the Software is furnished to do so, subject to 11 | # the following conditions: 12 | # 13 | # The above copyright notice and this permission notice shall be 14 | # included in all copies or substantial portions of the Software. 15 | # 16 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 21 | # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 22 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | # 24 | # Retrieved from: http://en.literateprograms.org/Gamma_function_with_the_Lanczos_approximation_(Python)?oldid=15934 25 | 26 | from cmath import * 27 | 28 | g = 7 29 | lanczos_coef = [ \ 30 | 0.99999999999980993, 31 | 676.5203681218851, 32 | -1259.1392167224028, 33 | 771.32342877765313, 34 | -176.61502916214059, 35 | 12.507343278686905, 36 | -0.13857109526572012, 37 | 9.9843695780195716e-6, 38 | 1.5056327351493116e-7] 39 | 40 | def gamma(z): 41 | z = complex(z) 42 | if z.real < 0.5: 43 | return pi / (sin(pi*z)*gamma(1-z)) 44 | else: 45 | z -= 1 46 | x = lanczos_coef[0] + \ 47 | sum(lanczos_coef[i]/(z+i) 48 | for i in range(1, g+2)) 49 | t = z + g + 0.5 50 | return sqrt(2*pi) * t**(z+0.5) * exp(-t) * x 51 | 52 | if __name__ == '__main__': 53 | print gamma(5.81).real 54 | -------------------------------------------------------------------------------- /extractMaxS.py: -------------------------------------------------------------------------------- 1 | from odbAccess import * 2 | from abaqusConstants import * 3 | import sys, getopt, os, string 4 | import math 5 | 6 | def getPath(job_id): 7 | odbPath = job_id + '.odb' 8 | new_odbPath = None 9 | print odbPath 10 | if isUpgradeRequiredForOdb(upgradeRequiredOdbPath=odbPath): 11 | print "Upgrade required" 12 | path,file = os.path.split(odbPath) 13 | file = 'upgraded_'+file 14 | new_odbPath = os.path.join(path,file) 15 | upgradeOdb(existingOdbPath=odbPath, upgradedOdbPath=new_odbPath) 16 | odbPath = new_odbPath 17 | else: 18 | print "Upgrade not required" 19 | return odbPath 20 | 21 | def Nf(Smax, R): 22 | ''' 23 | Calculate Nf for Inconel 625, room temperature 24 | ''' 25 | log_Nf = 24.49 - 9.62*math.log10(Smax*pow((1-R),0.42)) 26 | N = math.pow(10, log_Nf) 27 | return N 28 | 29 | def life_at_f(f, Smax, T, R): 30 | n_1sigma = 0.6827*f*T 31 | N_1sigma = Nf(Smax, R) 32 | n_2sigma = 0.2718*f*T 33 | N_2sigma = Nf(2*Smax, R) 34 | n_3sigma = 0.0428*f*T 35 | N_3sigma = Nf(3*Smax, R) 36 | life = n_1sigma/N_1sigma + n_2sigma/N_2sigma + n_3sigma/N_3sigma 37 | return life 38 | 39 | def calc_accumulated_damage(max_values, T, R): 40 | sum = 0.0 41 | for f, Smax in max_values: 42 | sum = sum + life_at_f(f, Smax, T, R) 43 | return sum 44 | 45 | def odbMaxStress(job_id): 46 | 47 | odbPath = getPath(job_id) 48 | odb = openOdb(path=odbPath) 49 | 50 | MaxValues = {} 51 | for instance_name in odb.rootAssembly.instances.keys(): 52 | MaxValues[instance_name] = [] 53 | 54 | # retrieve steps from the odb 55 | keys = odb.steps.keys() 56 | for key in keys: 57 | print key 58 | step = odb.steps[key] 59 | 60 | frames = step.frames 61 | 62 | for i in range(len(frames)): 63 | #for i in range(0,3): 64 | frame = frames[i] 65 | print 'Id = %d, Frequency = %f\n'%(frame.frameId,frame.frameValue) 66 | freq = frame.frameValue 67 | try: 68 | stress = frame.fieldOutputs['S'] 69 | for val in MaxValues.itervalues(): 70 | val.append((freq, -1.0e20)) 71 | 72 | # Doesn't make too much sense to use an invariant on the stress in a RR analysis, but I need something 73 | for stressValue in stress.values: 74 | MaxValues[stressValue.instance.name][-1] = (freq, max(stressValue.mises, MaxValues[stressValue.instance.name][-1][1])) 75 | 76 | except KeyError: 77 | print "fieldOutputs does not have RS at frame %s" % (frame.frameId,) 78 | 79 | odb.close() 80 | instances_to_delete = [] 81 | def max_stress(a, b): 82 | if b[1] > a[1]: 83 | return b 84 | else: 85 | return a 86 | for instance_name, vals in MaxValues.iteritems(): 87 | maximum = reduce(max_stress, vals) 88 | # Deleting the instances without any values. 89 | # Only true for a random response analysis, adjust to taste 90 | if maximum[1] < 0.0: 91 | instances_to_delete.append(instance_name) 92 | for instance_name in instances_to_delete: 93 | print "deleting %s" % (instance_name,) 94 | del MaxValues[instance_name] 95 | 96 | dest = job_id + "_MaxStress.txt" 97 | output = open(dest,"w") 98 | output.write( 'instance,f (Hz),S (ksi)\n') 99 | def convert_stress(s): 100 | if s > 0.0: 101 | return math.sqrt(val)/1000.0 102 | else: 103 | return 0.0001 104 | for instance_name, vals in sorted(MaxValues.iteritems()): 105 | # Convert to ksi, take square root 106 | vals = [(f, convert_stress(val)) for f, val in vals] 107 | for f, val in vals: 108 | output.write('%s,%f,%f\n' % (instance_name,f, val)) 109 | T = 3600.0 # Duration of test in seconds 110 | R = -1 # Ration of max/min, -1 fully reversed 111 | output.write("Accumulated damage for instance %s: %s\n" % (instance_name, calc_accumulated_damage(vals, 3600.0, -1),)) 112 | output.close() 113 | 114 | if __name__ == '__main__': 115 | # Get command line arguments. 116 | 117 | usage = "usage: abaqus python " 118 | optlist, args = getopt.getopt(sys.argv[1:],'') 119 | JobID = args[0] 120 | if not JobID: 121 | print usage 122 | sys.exit(0) 123 | odbPath = JobID + '.odb' 124 | if not os.path.exists(odbPath): 125 | print "odb %s does not exist!" % odbPath 126 | sys.exit(0) 127 | excluded_instances = ['ASSY_6-1-1',] 128 | odbMaxStress(JobID) -------------------------------------------------------------------------------- /CalcDamage.py: -------------------------------------------------------------------------------- 1 | from odbAccess import * 2 | from abaqusConstants import * 3 | import sys, getopt, os, string 4 | import math 5 | import rainflow_damage 6 | import progress_bar 7 | 8 | def getPath(job_id): 9 | odbPath = job_id + '.odb' 10 | new_odbPath = None 11 | print odbPath 12 | if isUpgradeRequiredForOdb(upgradeRequiredOdbPath=odbPath): 13 | print "Upgrade required" 14 | path,file = os.path.split(odbPath) 15 | file = 'upgraded_'+file 16 | new_odbPath = os.path.join(path,file) 17 | upgradeOdb(existingOdbPath=odbPath, upgradedOdbPath=new_odbPath) 18 | odbPath = new_odbPath 19 | else: 20 | print "Upgrade not required" 21 | return odbPath 22 | 23 | def Nf(Smax, R): 24 | ''' 25 | Calculate Nf for Inconel 625, room temperature 26 | ''' 27 | log_Nf = 24.49 - 9.62*math.log10(Smax*pow((1-R),0.42)) 28 | N = math.pow(10, log_Nf) 29 | return N 30 | 31 | def life_at_f(f, Smax, T, R): 32 | n_1sigma = 0.6827*f*T 33 | N_1sigma = Nf(Smax, R) 34 | n_2sigma = 0.2718*f*T 35 | N_2sigma = Nf(2*Smax, R) 36 | n_3sigma = 0.0428*f*T 37 | N_3sigma = Nf(3*Smax, R) 38 | life = n_1sigma/N_1sigma + n_2sigma/N_2sigma + n_3sigma/N_3sigma 39 | return life 40 | 41 | def calc_accumulated_damage(max_values, T, R): 42 | sum = 0.0 43 | for f, Smax in max_values: 44 | sum = sum + life_at_f(f, Smax, T, R) 45 | return sum 46 | 47 | def odbMaxStress(job_id, exposure_time): 48 | 49 | odbPath = getPath(job_id) 50 | odb = openOdb(path=odbPath) 51 | 52 | MaxValues = {} 53 | for instance_name in odb.rootAssembly.instances.keys(): 54 | MaxValues[instance_name] = {} 55 | 56 | # retrieve steps from the odb 57 | keys = odb.steps.keys() 58 | for key in keys: 59 | step = odb.steps[key] 60 | 61 | frames = step.frames 62 | 63 | sys.stdout.write('Working on step %s\n' % (key,)) 64 | pb = progress_bar.ProgressBar(maxValue = len(frames), totalWidth = 60) 65 | for i in range(len(frames)): 66 | # for i in range(0,13): 67 | frame = frames[i] 68 | freq = frame.frameValue 69 | try: 70 | stress = frame.fieldOutputs['S'] 71 | 72 | # Doesn't make too much sense to use an invariant on the stress in a RR analysis, but I need something 73 | for stressValue in stress.values: 74 | instance = MaxValues[stressValue.instance.name] 75 | if instance.has_key(stressValue.elementLabel): 76 | element = instance[stressValue.elementLabel] 77 | if element.has_key(freq): 78 | element[freq] = max(stressValue.mises, element[freq]) 79 | else: 80 | element[freq] = stressValue.mises 81 | else: 82 | instance[stressValue.elementLabel] = {} 83 | 84 | except KeyError: 85 | print "fieldOutputs does not have S at frame %s" % (frame.frameId,) 86 | 87 | pb.updateAmount(i) 88 | sys.stdout.write('\r%s' % str(pb)) 89 | sys.stdout.flush() 90 | sys.stdout.write('\n') 91 | odb.close() 92 | instances_to_delete = [] 93 | def max_stress(a, b): 94 | if b > a: 95 | return b 96 | else: 97 | return a 98 | for instance_name, elements in MaxValues.iteritems(): 99 | max_of_max = -1.0e20 100 | for element in elements.itervalues(): 101 | maximum = reduce(max_stress, element.itervalues()) 102 | #print 'maximum: %s' % (maximum,) 103 | # Deleting the instances without any values. 104 | # Only true for a random response analysis, adjust to taste 105 | max_of_max = max(maximum, max_of_max) 106 | #print 'max_of_max: %s' % (max_of_max,) 107 | if max_of_max < 0.0: 108 | instances_to_delete.append(instance_name) 109 | for instance_name in instances_to_delete: 110 | #print "deleting %s" % (instance_name,) 111 | del MaxValues[instance_name] 112 | 113 | dest = job_id + "_MaxStress.txt" 114 | output = open(dest,"w") 115 | output.write( 'instance,f (Hz),S (ksi)\n') 116 | mtrl = rainflow_damage.Material(A=pow(10.0,24.49), m = 9.62) 117 | for instance_name, elements in sorted(MaxValues.iteritems()): 118 | sys.stdout.write('Working on instance %s\n' % (instance_name,)) 119 | damages = [] 120 | pb = progress_bar.ProgressBar(maxValue = len(elements), totalWidth = 60) 121 | for i, (element_label, frequency_data) in enumerate(elements.iteritems()): 122 | #print 'element_label: %s, frequency_data: %s' % (element_label, frequency_data) 123 | vals = [(freq, val) for freq, val in frequency_data.iteritems()] 124 | vals.sort() 125 | #print 'vals: %s' % (vals,) 126 | s_PSD = rainflow_damage.StressPSD(vals) 127 | damages.append(rainflow_damage.damage(s_PSD, T=exposure_time, material = mtrl)) 128 | pb.updateAmount(i) 129 | sys.stdout.write('\r%s' % str(pb)) 130 | sys.stdout.flush() 131 | sys.stdout.write('\n') 132 | damages.sort(reverse=True) # Descending 133 | output.write("Accumulated damage for instance %s: %s\n" % (instance_name, damages[:10])) 134 | output.close() 135 | 136 | if __name__ == '__main__': 137 | # Get command line arguments. 138 | 139 | usage = "usage: abaqus python " 140 | optlist, args = getopt.getopt(sys.argv[1:],'') 141 | JobID = args[0] 142 | T = float(args[1]) 143 | if not JobID: 144 | print usage 145 | sys.exit(0) 146 | odbPath = JobID + '.odb' 147 | if not os.path.exists(odbPath): 148 | print "odb %s does not exist!" % odbPath 149 | sys.exit(0) 150 | excluded_instances = ['ASSY_6-1-1',] 151 | odbMaxStress(JobID, T) 152 | --------------------------------------------------------------------------------