├── laminar.bat ├── turbulent.bat ├── experimental.png ├── .gitignore ├── LICENSE ├── laminar.py ├── common.py └── turbulent.py /laminar.bat: -------------------------------------------------------------------------------- 1 | python laminar.py data/laminar-square 2 | pause 3 | -------------------------------------------------------------------------------- /turbulent.bat: -------------------------------------------------------------------------------- 1 | python turbulent.py data/turbulent-square 2 | pause 3 | -------------------------------------------------------------------------------- /experimental.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/batterseapower/cfd-processing/master/experimental.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Build artifacts 2 | *.pyc 3 | 4 | # Output graphs 5 | *.png 6 | 7 | # OS junk 8 | .DS_Store 9 | Thumbs.db 10 | 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Neil Mitchell 2006-2007. 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Neil Mitchell nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /laminar.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from glob import glob 4 | import sys 5 | import os.path 6 | import re 7 | from math import sqrt, log, cos, cosh, tanh, sinh, pi 8 | 9 | from common import * 10 | 11 | 12 | directory = sys.argv[1] 13 | 14 | 15 | # Parse out width 16 | if "square" in directory: 17 | width = 0.05 18 | else: 19 | m = re.search(r"width([\d\.]+)", directory) 20 | if m is None: 21 | print "Count not determine duct width: add widthN.M to the file name" 22 | sys.exit(1) 23 | else: 24 | width = float(m.group(1)) 25 | 26 | wall_to_wall_distance = 0.05 27 | height = 0.05 28 | length = 0.5 29 | d_h = (width * height) / (2 * (width + height)) # Hydralic diameter 30 | 31 | density = 1.18415 32 | mu = 0.00001855 33 | pressure = 0.05 34 | 35 | c = width / 2 36 | b = height / 2 37 | pressure_gradient = -pressure / length 38 | 39 | 40 | q_infinite_sum = infinite_sum(lambda n: tanh(0.5 * n * pi) / pow(n, 5)) 41 | q = pow(b, 4) / (6 * mu) * pressure_gradient * -(1 - 192 / pow(pi, 5) * q_infinite_sum) 42 | vm = q / pow(b, 2) 43 | z = 0 44 | 45 | def velocity_theory(position): 46 | pres_infinite_sum = infinite_sum(lambda n: pow(-1, (n - 1) / 2.0) * (1 - cosh(n * pi * z / (2 * b)) / cosh(n * pi * c / (2 * b))) * cos(n * pi * (position - (wall_to_wall_distance / 2)) / (2 * b)) / pow(float(n), 3)) 47 | return -16 * pow(b, 2) / (mu * pow(pi, 3)) * pressure_gradient * pres_infinite_sum 48 | 49 | 50 | # Print some information about the model 51 | printconstants("Model constants:", [("width", width), ("height", height), ("length", length), ("d_h", d_h), ("density", density), ("mu", mu), ("c", c), ("b", b), ("pressure gradient", pressure_gradient), ("q", q), ("vm", vm)]) 52 | 53 | 54 | # Pull in data 55 | sim_velocity_profiles = {} 56 | sim_scaled_velocity_profiles = {} 57 | sim_scaled_nonhydralic_velocity_profiles = {} 58 | for file in glob(os.path.join(directory, "*.csv")): 59 | print "" 60 | print "#", file 61 | 62 | # pl29sf1.13base0.1distance.csv 63 | # r"pl(\d+)sf([\d\.]+)base([\d\.]+)distance" 64 | file_base = os.path.basename(os.path.splitext(file)[0]) 65 | 66 | # Extract data from CSV 67 | velocity_max, velocity_average, velocity_profile = extractdata(file) 68 | 69 | # Trim data down to size (especially important if the velocity profile was built from position-indexed information) 70 | wall_to_center_distance = wall_to_wall_distance / 2.0 71 | half_velocity_profile = [(position, velocity) for position, velocity in velocity_profile if 0.0 < position <= wall_to_center_distance] 72 | 73 | # Build data for graphing 74 | sim_velocity_profiles[file_base] = velocity_profile 75 | sim_scaled_velocity_profiles[file_base] = [(velocity / velocity_average, 2 * position / d_h) for position, velocity in half_velocity_profile] 76 | sim_scaled_nonhydralic_velocity_profiles[file_base] = [(1 - (position / wall_to_center_distance), velocity / velocity_max) for position, velocity in half_velocity_profile] 77 | 78 | 79 | # Find the theoretical velocity profile 80 | all_positions = unions([set([position for position, _ in velocity_profile]) for velocity_profile in sim_velocity_profiles.values()]) 81 | theory_velocity_profile = sorted([(position, velocity_theory(position)) for position in all_positions]) 82 | 83 | 84 | # Guess at best model 85 | print "" 86 | print "# Simulation average squared error" 87 | for sim, velocity_profile in sim_velocity_profiles.items(): 88 | print " ", sim, "=", average_error(velocity_profile, velocity_theory) 89 | 90 | 91 | # OK, lets go: 92 | # http://matplotlib.sourceforge.net/users/pyplot_tutorial.html 93 | import matplotlib.pyplot as plt 94 | 95 | # profile: 96 | plt.clf() 97 | plt.figure(figsize=(10, 8), dpi=80) 98 | 99 | plt.xlabel("x") 100 | plt.ylabel("u") 101 | 102 | for sim, velocity_profile in [("theory", theory_velocity_profile)] + sim_velocity_profiles.items(): 103 | positions, velocities = zip(*velocity_profile) 104 | plt.plot(positions, velocities, label=sim) 105 | 106 | plt.legend(loc="lower center") 107 | plt.savefig(os.path.join(directory, "velocity-profile")) 108 | 109 | # scaled velocity: 110 | plt.clf() 111 | plt.figure(figsize=(10, 8), dpi=80) 112 | 113 | plt.xlabel("u/u_avg") 114 | plt.ylabel("2y/D_H") 115 | 116 | for sim, scaled_velocity_profile in sim_scaled_velocity_profiles.items(): 117 | scaled_velocity, scaled_y = zip(*scaled_velocity_profile) 118 | plt.plot(scaled_velocity, scaled_y, label=sim) 119 | 120 | plt.legend(loc="upper left") 121 | plt.savefig(os.path.join(directory, "scaled-velocity-profile")) 122 | 123 | # scaled non-hydralic velocity: 124 | plt.clf() 125 | plt.figure(figsize=(10, 8), dpi=80) 126 | 127 | plt.xlabel("2y/h") 128 | plt.ylabel("v/v_max") 129 | 130 | for sim, scaled_nonhydralic_velocity_profiles in sim_scaled_nonhydralic_velocity_profiles.items(): 131 | scaled_position, scaled_velocity = zip(*scaled_nonhydralic_velocity_profiles) 132 | plt.plot(scaled_position, scaled_velocity, label=sim) 133 | 134 | plt.legend(loc="lower left") 135 | plt.savefig(os.path.join(directory, "scaled-nonhydralic-velocity-profile")) 136 | -------------------------------------------------------------------------------- /common.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import sys 3 | 4 | def concat(xss): 5 | return sum(xss, []) 6 | 7 | def drop(n, xs): 8 | for x in xs: 9 | if n > 0: 10 | n = n - 1 11 | else: 12 | yield x 13 | 14 | def unions(sets): 15 | allset = set() 16 | for aset in sets: 17 | allset.update(aset) 18 | return allset 19 | 20 | def printconstants(title, ks): 21 | print title 22 | for name, value in ks: 23 | if type(value) == type(0.0): 24 | value = "%.6g" % value 25 | 26 | print " ", name, "=", value 27 | 28 | def infinite_sum(term): 29 | n = 1 30 | result = 0.0 31 | contribution = sys.maxint 32 | while abs(contribution) > 0.000001 and n < 40: 33 | contribution = term(n) 34 | #print n, "=", "%.6g" % contribution 35 | result += contribution 36 | n += 2 # Only want odd terms 37 | 38 | return result 39 | 40 | def average_error(observed, model): 41 | return sum([pow(model(x) - y, 2) for x, y in observed]) / len(observed) 42 | 43 | def extractdata(file, extra_stats=lambda _: []): 44 | # "Position [0.0, 1.0, 0.0] (m)-point 1 (m)","Velocity: Magnitude-point 1 (m/s)" 45 | # 0.0,35.15489668712291 46 | # 2.499999993688107E-7,35.15489668712291 47 | reject_count = 0 48 | raw_velocity_positions = [] 49 | 50 | # Skip header row 51 | reader = csv.reader(open(file)) 52 | header = reader.next() 53 | is_wall_distance = "Wall Distance" in header[0] 54 | 55 | if is_wall_distance: 56 | print "WARNING: using wall-distance position information, results may be inaccurate" 57 | 58 | # Input body of data 59 | all_rows = list(reader) 60 | last_position = None 61 | for row in all_rows: 62 | # Verify record is correct length 63 | if len(row) != 2: 64 | reject_count = reject_count + 1 65 | continue 66 | 67 | # Verify we can parse as numbers 68 | try: 69 | position, velocity = float(row[0]), float(row[1]) 70 | 71 | # If reading a file containing wall distance, we only want half of the profile 72 | # or we get weirdy discontinituites in the output 73 | if is_wall_distance and position < last_position: 74 | # Diagnostics for the user to help them figure out disontinutities 75 | broke_at = len(raw_velocity_positions) 76 | expected_break_at = len(all_rows) / 2 77 | row_different = expected_break_at - broke_at 78 | 79 | print "Breaking wall-distance series at", position, "<", last_position 80 | print "Broke after", broke_at, "rows, expecting", expected_break_at, "rows" 81 | if row_different > 1: 82 | print "CRITICAL WARNING: the number of rows got is less than our expectation by", row_different, "so the series is probably truncated" 83 | elif row_different < -1: 84 | print "CRITICAL WARNING: the number of rows got exceeds our expectation by", abs(row_different), "so the series may contain false readings" 85 | 86 | break 87 | else: 88 | last_position = position 89 | 90 | raw_velocity_positions.append((position, velocity)) 91 | except ValueError: 92 | reject_count = reject_count + 1 93 | 94 | # Tell user about problems with the input data 95 | if reject_count != 0: 96 | print "Rejected", reject_count, "malformed row(s)" 97 | 98 | if is_wall_distance: 99 | # Build the velocity profile 100 | min_velocity_positions = {} 101 | max_velocity_positions = {} 102 | for position, velocity in raw_velocity_positions: 103 | # Record the positions that experienced a given velocity 104 | min_velocity_positions[velocity] = min(min_velocity_positions.get(velocity, sys.maxint), position) 105 | max_velocity_positions[velocity] = max(min_velocity_positions.get(velocity, -sys.maxint), position) 106 | 107 | # Use the average position 108 | velocity_profile = sorted([((min_velocity_positions[velocity] + max_velocity_positions[velocity]) / 2, velocity) for velocity in min_velocity_positions.keys()]) 109 | else: 110 | velocity_profile = [] 111 | 112 | # For a position-indexed file we can safely simply sort the data by position and look for runs, 113 | # which is more reliable 114 | velocity_in_run = None 115 | first_position_in_run = None 116 | for position, velocity in sorted(raw_velocity_positions): 117 | if velocity != velocity_in_run: 118 | if velocity_in_run is not None: 119 | velocity_profile.append(((position + first_position_in_run) / 2, velocity_in_run)) 120 | 121 | first_position_in_run = position 122 | velocity_in_run = velocity 123 | 124 | if velocity_in_run is not None: 125 | velocity_profile.append(((position + first_position_in_run) / 2, velocity_in_run)) 126 | 127 | velocity_profile.sort() 128 | 129 | # Determine average velocity 130 | distance_accumulator = 0.0 131 | velocity_average_accumulator = 0.0 132 | last_position = 0.0 133 | for position, velocity in sorted(raw_velocity_positions): 134 | distance = abs(position - last_position) 135 | distance_accumulator = distance_accumulator + distance 136 | velocity_average_accumulator = velocity_average_accumulator + velocity * distance 137 | 138 | last_position = position 139 | 140 | velocity_average = velocity_average_accumulator / distance_accumulator 141 | velocity_max = max([velocity for position, velocity in velocity_profile]) 142 | 143 | # Intermediate results 144 | positions, velocities = zip(*raw_velocity_positions) 145 | printconstants("Simulation summary:", [("average velocity", velocity_average), 146 | ("minimum velocity", min(velocities)), 147 | ("maximum velocity", max(velocities)), 148 | ("minimum position", min(positions)), 149 | ("maximum position", max(positions))] + extra_stats(positions)) 150 | 151 | return velocity_max, velocity_average, velocity_profile 152 | -------------------------------------------------------------------------------- /turbulent.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from glob import glob 4 | import sys 5 | import os.path 6 | import re 7 | from math import sqrt, log 8 | 9 | from common import * 10 | 11 | 12 | directory = sys.argv[1] 13 | 14 | 15 | # Parse out width 16 | if "square" in directory: 17 | width = 0.05 18 | else: 19 | m = re.search(r"width([\d\.]+)", directory) 20 | if m is None: 21 | print "Count not determine duct width: add widthN.M to the file name" 22 | sys.exit(1) 23 | else: 24 | width = float(m.group(1)) 25 | 26 | wall_to_wall_distance = 0.05 27 | height = 0.05 28 | d_h = (width * height) / (2 * (width + height)) # Hydralic diameter 29 | 30 | density = 1.18415 31 | mu = 0.00001855 32 | 33 | 34 | def uplus_theory(yplus): 35 | # Computation Methods for Fluid Dynamics, p298 36 | if yplus <= 5: 37 | return yplus 38 | else: 39 | k = 0.41 40 | b = 5 41 | return (1 / k) * log(yplus) + b 42 | 43 | 44 | # Print some information about the model 45 | printconstants("Model constants:", [("width", width), ("height", height), ("d_h", d_h), ("density", density), ("mu", mu)]) 46 | 47 | 48 | # Pull in data 49 | sim_wall_functions = {} 50 | sim_scaled_velocity_profiles = {} 51 | sim_scaled_nonhydralic_velocity_profiles = {} 52 | for file in glob(os.path.join(directory, "*.csv")): 53 | print "" 54 | print "#", file 55 | 56 | # pl29sf1.13base0.1distance.csv 57 | # r"pl(\d+)sf([\d\.]+)base([\d\.]+)distance" 58 | file_base = os.path.basename(os.path.splitext(file)[0]) 59 | 60 | # Parse out wall shear stress 61 | m = re.search(r"tw([\d\.]+)", file_base) 62 | if m is None: 63 | print "Count not determine wall shear stress: add twN.M to the file name" 64 | sys.exit(1) 65 | else: 66 | #wall_shear_stress = 111.9 67 | wall_shear_stress = float(m.group(1)) 68 | 69 | # Setup physical constants for this model 70 | ustar = sqrt(wall_shear_stress / density) 71 | d_v = mu / ustar 72 | 73 | def yplus(wall_distance): 74 | return wall_distance / d_v 75 | 76 | def uplus(velocity): 77 | return velocity / ustar 78 | 79 | # Print some information about the simulation 80 | printconstants("Simulation constants:", [("wall shear stress", wall_shear_stress), ("ustar", ustar), ("d_v", d_v)]) 81 | 82 | # Extract data from CSV 83 | velocity_max, velocity_average, velocity_profile = extractdata(file, extra_stats = lambda positions: [("minimum non-zero y+", yplus(min([position for position in positions if position > 0.0])))]) 84 | 85 | # Trim data down to size (especially important if the velocity profile was built from position-indexed information) 86 | wall_to_center_distance = wall_to_wall_distance / 2.0 87 | half_velocity_profile = [(position, velocity) for position, velocity in velocity_profile if 0.0 < position <= wall_to_center_distance] 88 | 89 | # Build data for graphing 90 | sim_wall_functions[file_base] = [(yplus(position), uplus(velocity)) for position, velocity in half_velocity_profile] 91 | sim_scaled_velocity_profiles[file_base] = [(velocity / velocity_average, 2 * position / d_h) for position, velocity in half_velocity_profile] 92 | sim_scaled_nonhydralic_velocity_profiles[file_base] = [(1 - (position / wall_to_center_distance), velocity / velocity_max) for position, velocity in half_velocity_profile] 93 | 94 | 95 | # Find the theoretical wall function 96 | all_yplus = unions([set([yplus for yplus, _ in wall_function]) for wall_function in sim_wall_functions.values()]) 97 | theory_wall_function = sorted([(yplus, uplus_theory(yplus)) for yplus in all_yplus]) 98 | 99 | 100 | # Compute best fit to the theory 101 | print "" 102 | print "# Simulation average squared error" 103 | for sim, wall_function in sim_wall_functions.items(): 104 | print " ", sim, "=", average_error(wall_function, uplus_theory) 105 | 106 | 107 | # OK, lets go: 108 | # http://matplotlib.sourceforge.net/users/pyplot_tutorial.html 109 | import matplotlib.pyplot as plt 110 | 111 | # y+ vs u+: 112 | plt.clf() 113 | plt.figure(figsize=(10, 8), dpi=80) 114 | 115 | plt.xlabel("y+") 116 | plt.ylabel("u+") 117 | 118 | for sim, wall_function in [("theory", theory_wall_function)] + sim_wall_functions.items(): 119 | yplus, uplus = zip(*wall_function) 120 | plt.semilogx(yplus, uplus, label=sim) 121 | 122 | plt.legend(loc="upper left") 123 | plt.savefig(os.path.join(directory, "yplus-vs-uplus")) 124 | 125 | plt.axis([0.1, 110, 0, 25]) 126 | plt.savefig(os.path.join(directory, "yplus-vs-uplus-experimental")) 127 | 128 | # scaled velocity: 129 | plt.clf() 130 | plt.figure(figsize=(10, 8), dpi=80) 131 | 132 | plt.xlabel("u/u_avg") 133 | plt.ylabel("2y/D_H") 134 | 135 | for sim, scaled_velocity_profile in sim_scaled_velocity_profiles.items(): 136 | scaled_velocity, scaled_y = zip(*scaled_velocity_profile) 137 | plt.plot(scaled_velocity, scaled_y, label=sim) 138 | 139 | plt.legend(loc="upper left") 140 | plt.savefig(os.path.join(directory, "scaled-velocity-profile")) 141 | 142 | # scaled non-hydralic velocity: 143 | plt.clf() 144 | plt.figure(figsize=(10, 8), dpi=80) 145 | 146 | plt.xlabel("2y/h") 147 | plt.ylabel("v/v_max") 148 | 149 | for sim, scaled_nonhydralic_velocity_profiles in sim_scaled_nonhydralic_velocity_profiles.items(): 150 | scaled_position, scaled_velocity = zip(*scaled_nonhydralic_velocity_profiles) 151 | plt.plot(scaled_position, scaled_velocity, label=sim) 152 | 153 | plt.legend(loc="lower left") 154 | plt.savefig(os.path.join(directory, "scaled-nonhydralic-velocity-profile")) 155 | 156 | 157 | # Build the superimposed data image 158 | from PIL import * 159 | from PIL import Image 160 | 161 | lift_point = lambda f: lambda p1, p2: (f(p1[0], p2[0]), f(p1[1], p2[1])) 162 | minus_point = lift_point(lambda x1, x2: x1 - x2) 163 | divide_point = lift_point(lambda x1, x2: x1 / float(x2)) 164 | multiply_point = lift_point(lambda x1, x2: x1 * x2) 165 | 166 | # Discover the boxes in both images that we want to superimpose 167 | experimental = Image.open("experimental.png") 168 | (experimental_width, experimental_height) = experimental.size 169 | experimental_bottom_left = (88, 108) # x = 0.1, y = 0 170 | experimental_top_right = (experimental_width - 82, experimental_height - 16) # x = 100, y = 25 171 | 172 | mine = Image.open(os.path.join(directory, "yplus-vs-uplus-experimental.png")) 173 | (mine_width, mine_height) = mine.size 174 | mine_bottom_left = (127, 81) 175 | mine_top_right = (mine_width - 111, mine_height - 81) 176 | 177 | # Begin by scaling the experimental data correctly so the axis-enclosed boxes look the same 178 | scale = divide_point(minus_point(mine_top_right, mine_bottom_left), minus_point(experimental_top_right, experimental_bottom_left)) 179 | experimental = experimental.resize(multiply_point(scale, experimental.size)) 180 | #experimental.save("yplus-vs-uplus-experimental-SCALED.png", "PNG") 181 | 182 | # Now work out the transform to shift the experimental data so the origins match 183 | #print mine_bottom_left, "-", multiply_point(scale, experimental_bottom_left) 184 | transform = minus_point(mine_bottom_left, multiply_point(scale, experimental_bottom_left)) 185 | #print transform 186 | experimental = experimental.offset(int(round(transform[0])), -int(round(transform[1])) + 8) # FIXME: fudge factor! 187 | 188 | blended = Image.blend(mine, experimental.crop((0, 0, mine.size[0], mine.size[1])), 0.3) 189 | blended.save(os.path.join(directory, "yplus-vs-uplus-experimental.png"), "PNG") --------------------------------------------------------------------------------