├── mkdir_function.py ├── output ├── 1 │ ├── 6_variable_windows.py │ ├── 18_filter_outlier_noise_Detrend.py │ ├── 14_filter_outlier_noise_2std.py │ ├── 13_filter_outlier_noise_Detrend.py │ ├── 7_De_trend_Median_filt.py │ ├── 10_De_trend_5to95_heights.py │ ├── 15_filter_outlier_noise_Asmooth插值.py │ ├── 16_filter_outlier_noise_Median_filt.py │ ├── 24_initial_ground_estimate_final_ground_points.py │ ├── 0_view_22.py │ ├── 17_filter_outlier_noise_median+average_filt_10times.py │ ├── 12_filter_outlier_noise_150m.py │ ├── 0_view_19.py │ ├── 0_view_21.py │ ├── 0_view_23.py │ ├── 0_view_20.py │ ├── 22_initial_ground_estimate_ground_points.py │ ├── 11_De_trend_median+average_filt_10times.py │ ├── 5_4326_proj_4550.py │ ├── 4_get_signal.py │ ├── 9_De_trend_Asmooth插值.py │ ├── 21_initial_ground_estimate_cutOff_upperbound.py │ ├── 20_initial_ground_estimate_potential_ground_points.py │ ├── 0_view_24.py │ ├── 19_initial_ground_estimate_cutOff_lowerbound.py │ ├── 8_De_trend_ref_DEM_limit.py │ ├── 3_hist_Gauss_fit.py │ ├── 23_initial_ground_estimate_refine_ground.py │ ├── interp_function.py │ ├── compare_height_function.py │ └── filter_function.py ├── mkdir_function.py └── 2_open3d_k_20_histogram_x为t.py ├── 1_variable_windows.py ├── LICENSE ├── command.txt ├── Readme.txt ├── README.md └── 1_extract_and_norm_t_h_3rdd=0.py /mkdir_function.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon May 23 23:19:03 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | def mkdir(path): 9 | import os 10 | # 去除首位空格 11 | path=path.strip() 12 | # 去除尾部 \ 符号 13 | path=path.rstrip("\\") 14 | # 判断路径是否存在 15 | isExists=os.path.exists(path) 16 | if not isExists: 17 | # 如果不存在则创建目录 18 | os.makedirs(path) 19 | return True 20 | else: 21 | # 如果目录存在则不创建 22 | return False 23 | 24 | 25 | mkpath = './output' 26 | # 调用函数 27 | 28 | mkdir(mkpath) -------------------------------------------------------------------------------- /output/mkdir_function.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon May 23 23:19:03 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | def mkdir(path): 9 | import os 10 | # 去除首位空格 11 | path=path.strip() 12 | # 去除尾部 \ 符号 13 | path=path.rstrip("\\") 14 | # 判断路径是否存在 15 | isExists=os.path.exists(path) 16 | if not isExists: 17 | # 如果不存在则创建目录 18 | os.makedirs(path) 19 | return True 20 | else: 21 | # 如果目录存在则不创建 22 | return False 23 | 24 | 25 | mkpath = './output' 26 | # 调用函数 27 | 28 | mkdir(mkpath) -------------------------------------------------------------------------------- /1_variable_windows.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 5 02:07:30 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import math 9 | import numpy as np 10 | 11 | a = math.log(1 - 21 / (51 - 5)) / (-28114) 12 | # print(a) 13 | 14 | # 读取 bin 所有点坐标(longitude, elevation) 15 | p_xyz = np.genfromtxt("20200925_gt1l_d_time_h.txt",delimiter = ",") 16 | length = len(p_xyz) 17 | Window = 5 + 46 * (1 - math.exp( -1 * a * length)) 18 | # print(Window) 19 | 20 | arry = np.zeros(2) 21 | arry[0] = a 22 | arry[1] = Window 23 | print(arry) 24 | 25 | np.savetxt("./output/2_Window.txt", arry, delimiter=",") -------------------------------------------------------------------------------- /output/1/6_variable_windows.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 5 02:07:30 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import math 9 | import numpy as np 10 | import os 11 | 12 | a = math.log(1 - 21 / (51 - 5)) / (-28114) 13 | # print(a) 14 | 15 | # 读取 bin 所有点坐标(longitude, elevation) 16 | # 读取 ATL03_3and4 的所有点 coordinate 和 attribute 17 | path = './' 18 | dirs = os.listdir(path) 19 | for dir in dirs: 20 | if dir[-12:] == "d_time_h.txt": 21 | p_xyz = np.genfromtxt(dir,delimiter = ",") 22 | length = len(p_xyz) 23 | Window = 5 + 46 * (1 - math.exp( -1 * a * length)) 24 | # print(Window) 25 | 26 | arry = np.zeros(2) 27 | arry[0] = a 28 | arry[1] = Window 29 | print(arry) 30 | 31 | np.savetxt("./2_Window.txt", arry, delimiter=",") -------------------------------------------------------------------------------- /output/1/18_filter_outlier_noise_Detrend.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 19 18:56:48 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import csv 10 | from compare_height_function import de_trend_signal_with_XYH 11 | 12 | # 读取 Asmooth surface 13 | Asmooth_2 = np.genfromtxt("./12_Asmooth_2.txt",delimiter = ",") 14 | len_Asmooth_2 = len(Asmooth_2) 15 | 16 | # 读取 within_150m_signal 17 | signal = np.genfromtxt("./8_within_150m_signal.txt",delimiter = ",") 18 | len_signal = len(signal) 19 | 20 | # 获取 int_unique 的 dist 切片 21 | # 形式为[int_unique_dist,array[signal_info],array[signal_info]] 22 | detrended = de_trend_signal_with_XYH(signal, Asmooth_2) 23 | print("len_detrended",len(detrended)) 24 | 25 | # 存储 26 | with open('./13_detrended.txt','w') as out: 27 | csv_out=csv.writer(out) 28 | # csv_out.writerow(['X','Y']) 29 | for row in detrended: 30 | csv_out.writerow(row) -------------------------------------------------------------------------------- /output/1/14_filter_outlier_noise_2std.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 19 11:12:15 2022 4 | 5 | @author: longj 6 | """ 7 | ''' 8 | 2. 如果 detrended signal 的标准差大于 10m 9 | 从 signal dataset 去除任何【低于 Asmooth】 【2倍 detrennd signal 标准差】的 signal value 10 | ''' 11 | 12 | import numpy as np 13 | import csv 14 | 15 | 16 | # 读取 De_trend 17 | De_trend = list(np.genfromtxt("./9_De_trend.txt",delimiter = ",")) 18 | len_De_trend = len(De_trend) 19 | 20 | # 取出 height 21 | detrend_ph = [] 22 | for i in range(len_De_trend): 23 | detrend_ph.append(De_trend[i][2]) 24 | 25 | #print(list(detrend_ph)[0:20]) 26 | # 求 height 的 std 27 | detrend_ph_std = np.std(detrend_ph, ddof=1) 28 | 29 | arry = np.zeros(2) 30 | arry[0] = detrend_ph_std 31 | arry[1] = len_De_trend 32 | print("Detrend_signal_photons的标准方差(std)=",arry[0]) 33 | 34 | if arry[0]<10: 35 | print("标准方差小于 10 m,本步骤无需从 signal photons 中去除 outlier noise") 36 | 37 | np.savetxt("./9_detrend_ph_std.txt", arry, delimiter=",") 38 | -------------------------------------------------------------------------------- /output/1/13_filter_outlier_noise_Detrend.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 19 11:12:15 2022 4 | 5 | @author: longj 6 | """ 7 | ''' 8 | 2. 如果 detrended signal 的标准差大于 10m 9 | 从 signal dataset 去除任何【低于 Asmooth】 【2倍 detrennd signal 标准差】的 signal value 10 | ''' 11 | 12 | import numpy as np 13 | import csv 14 | from compare_height_function import de_trend_signal_with_XYH 15 | from interp_function import save_df 16 | 17 | # 读取 within_150m_signal 18 | signal = np.genfromtxt("./8_within_150m_signal.txt",delimiter = ",") 19 | len_signal = len(signal) 20 | 21 | # 读取 Asmooth_2 22 | Asmooth_2 = np.genfromtxt("./7_Asmooth_2.txt",delimiter = ",") 23 | len_signal = len(Asmooth_2) 24 | 25 | # DE-trend signal dataset 26 | De_trend = de_trend_signal_with_XYH(signal, Asmooth_2) 27 | 28 | 29 | print("De_trend",len(De_trend)) 30 | 31 | 32 | # 存储 33 | #save_df(De_trend,'./2_De_trend.txt') 34 | 35 | with open('./9_De_trend.txt','w') as out: 36 | csv_out=csv.writer(out) 37 | # csv_out.writerow(['X','Y']) 38 | for row in De_trend: 39 | csv_out.writerow(row) -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Boost Software License - Version 1.0 - August 17th, 2003 2 | 3 | Permission is hereby granted, free of charge, to any person or organization 4 | obtaining a copy of the software and accompanying documentation covered by 5 | this license (the "Software") to use, reproduce, display, distribute, 6 | execute, and transmit the Software, and to prepare derivative works of the 7 | Software, and to permit third-parties to whom the Software is furnished to 8 | do so, all subject to the following: 9 | 10 | The copyright notices in the Software and this entire statement, including 11 | the above license grant, this restriction and the following disclaimer, 12 | must be included in all copies of the Software, in whole or in part, and 13 | all derivative works of the Software, unless such copies or derivative 14 | works are solely in the form of machine-executable object code generated by 15 | a source language processor. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT 20 | SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE 21 | FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, 22 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /output/1/7_De_trend_Median_filt.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue May 10 00:22:31 2022 4 | 5 | @author: longj 6 | """ 7 | import numpy as np 8 | import csv 9 | from filter_function import median_filter_with_XY 10 | ''' 11 | ************ 3.3 Median filter ************ 12 | # 运行 1 次 ws = Window Size 的中值滤波 13 | # 输出surface: Asmooth 14 | 15 | 16 | *************** 中值滤波 *************** 17 | # 对 Asmooth_original surface 进行中值滤波, Asmooth_median 用于存储本轮滤波后的高程 18 | # 将每一像素点的灰度值设置为该点某邻域窗口内的所有像素点灰度值的中值. 19 | ''' 20 | 21 | # 读取 signal 所有点 coordinate 和 attribute 22 | interp_A_coord = list(np.genfromtxt("./1_coord_projed_allP.txt",delimiter = ",")) 23 | 24 | # 输出的 surface 只保留 dist(横轴) 和 height(纵轴) 25 | original_surface = [] 26 | XY = [] 27 | for i in range(len(interp_A_coord)): 28 | original_surface.append((interp_A_coord[i][3],interp_A_coord[i][2])) 29 | XY.append((interp_A_coord[i][4],interp_A_coord[i][5])) 30 | 31 | # 读取计算的 window_size 32 | window_size = np.genfromtxt("./2_Window.txt",delimiter = ",")[1] 33 | window_size = round(window_size) 34 | if window_size % 2 == 0: 35 | window_size = window_size + 1 36 | 37 | Asmooth_0 = median_filter_with_XY(original_surface,window_size,XY) 38 | # Asmooth_len = len(Asmooth) 39 | 40 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 41 | with open('./3_Asmooth_0.txt','w') as out: 42 | csv_out=csv.writer(out) 43 | # csv_out.writerow(['X','Y']) 44 | for row in Asmooth_0: 45 | csv_out.writerow(row) 46 | -------------------------------------------------------------------------------- /output/1/10_De_trend_5to95_heights.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue May 10 16:12:11 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import csv 10 | 11 | ''' 12 | ************ 3.6 保留百分位高度 5 - 95 的光子 ************ 13 | ''' 14 | p_xyz = np.genfromtxt("./5_pchip_interpolated.txt",delimiter = ",") 15 | arry_len = len(p_xyz) 16 | height = np.zeros(arry_len) 17 | for i in range(arry_len): 18 | height[i] = p_xyz[i][1] 19 | 20 | # height_5to95 存储 height threshold 上下界 21 | height_5to95 = [] 22 | height_5to95.append(min(height)) 23 | height_5to95.append(max(height)) 24 | print("max_h, min_h\n",height_5to95) 25 | 26 | height_5_value = height_5to95[0] + 0.05 * (height_5to95[1] - height_5to95[0]) 27 | height_5to95.append(height_5_value) 28 | height_95_value = height_5to95[1] - 0.05 * (height_5to95[1] - height_5to95[0]) 29 | height_5to95.append(height_95_value) 30 | print("max_h, min_h, 5%_h, 95%_h\n",height_5to95) 31 | 32 | p_height_5to95 = [] 33 | for i in range(arry_len): 34 | if height[i]>height_5to95[2] and height[i]signal[i][2] or signal[i-1][2] == signal[i][2]: 38 | continue 39 | #X_original.append(Asmooth[i][0]) 40 | #Y_original.append(Asmooth[i][1]) 41 | height_original.append(signal[i][2]) 42 | dist_original.append(signal[i][3]) 43 | ''' 44 | # 进行 pchip 插值,可选: 45 | # 窗口大小多次处理 46 | # 一次处理完所有点 47 | ''' 48 | coord_interp = interp_2D_XYH(signal_len,signal_len,height_original,dist_original) 49 | save_df(coord_interp,'./10_Asmooth_3_pchip_interp(Asmooth_0).txt') 50 | -------------------------------------------------------------------------------- /output/1/16_filter_outlier_noise_Median_filt.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 19 17:49:30 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import csv 10 | from filter_function import median_filter_good 11 | ''' 12 | ************ 3.3 Median filter ************ 13 | # 运行 1 次 ws = Window Size 的中值滤波 14 | # 输出surface: Asmooth 15 | 16 | 17 | *************** 中值滤波 *************** 18 | # 对 Asmooth_original surface 进行中值滤波, Asmooth_median 用于存储本轮滤波后的高程 19 | # 将每一像素点的灰度值设置为该点某邻域窗口内的所有像素点灰度值的中值. 20 | ''' 21 | 22 | # 读取 Asmooth_3 Surface 的所有点 23 | Asmooth_3 = list(np.genfromtxt("./10_Asmooth_3_pchip_interp(Asmooth_0).txt",delimiter = ",")) 24 | ''' 25 | interp_A_ph = [] 26 | for i in range(len(Asmooth_3)): 27 | interp_A_ph.append(interp_A_coord[i][1]) 28 | ''' 29 | # Window_size 30 | window_size = np.genfromtxt("./2_Window.txt",delimiter = ",")[1] 31 | window_size = round(window_size) 32 | if window_size % 2 == 0: 33 | window_size = window_size + 1 34 | #print(window_size) 35 | 36 | Asmooth_4 = median_filter_good(Asmooth_3,window_size) 37 | Asmooth_len = len(Asmooth_4) 38 | 39 | """ 40 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 41 | p0_Asmooth = [] 42 | begin_index = int((window_size-1)/2) 43 | #print(begin_index) 44 | 45 | for i in range(Asmooth_len): 46 | ''' 47 | 输出为 (proj_X, proj_Y, median_filtered_height, along_distance) 48 | ''' 49 | p0_Asmooth.append((interp_A_coord[begin_index + i][0],Asmooth[i])) 50 | """ 51 | with open('./11_Asmooth_1.txt','w') as out: 52 | csv_out=csv.writer(out) 53 | # csv_out.writerow(['X','Y']) 54 | for row in Asmooth_4: 55 | csv_out.writerow(row) 56 | -------------------------------------------------------------------------------- /output/1/24_initial_ground_estimate_final_ground_points.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon May 23 12:22:52 2022 4 | 5 | @author: longj 6 | """ 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | import csv 10 | from interp_function import interp_linear_dist_ph,save_df_4col,interp_2D_XYH,interp_2D_XYH 11 | from compare_height_function import de_trend_signal_with_XYH,compare_2_layers_7col_,slice_unique_int_dist 12 | import matplotlib 13 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 14 | matplotlib.rcParams['axes.unicode_minus'] =False 15 | 16 | 17 | ''' 18 | --------------- 提取 linear interp surface 上方 photons ---------- 19 | ''' 20 | ground = np.genfromtxt("./18_refined_ground.txt",delimiter = ",") 21 | Z = [] 22 | dist = [] 23 | for i in range(len(ground)): 24 | Z.append(ground[i][2]) 25 | dist.append(ground[i][3]) 26 | ''' 27 | plt.plot(dist,Z,'c.',label = 'ground points') 28 | ''' 29 | surface_len = int(dist[-1]-dist[0]) 30 | surface = interp_linear_dist_ph(surface_len, Z, dist) 31 | ''' 32 | e = surface_2[1] 33 | f = surface_2[0] 34 | print("height范围",min(e),max(e)) 35 | print("dist范围",min(f),max(f)) 36 | 37 | plt.plot(f,e,'y-',label = 'linear interplation') 38 | ''' 39 | surface_ = [] 40 | # 转置 linear_interplote_surface 41 | for i in range(len(surface[0])): 42 | surface_.append((surface[0][i],surface[1][i])) 43 | 44 | ground_points = compare_2_layers_7col_(ground, surface_,0.5)[0] 45 | ground_points = compare_2_layers_7col_(ground_points, surface_,-0.5)[1] 46 | 47 | # 存储 48 | with open('./19_ground_points.txt','w') as out: 49 | csv_out=csv.writer(out) 50 | # csv_out.writerow(['X','Y']) 51 | for row in ground_points: 52 | csv_out.writerow(row) 53 | 54 | -------------------------------------------------------------------------------- /output/1/0_view_22.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 12 09:37:42 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | ''' 9 | ************** 查看 De-trend 平面 *************** 10 | 11 | ''' 12 | import numpy as np 13 | import matplotlib.pyplot as plt 14 | from pylab import * 15 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 16 | matplotlib.rcParams['axes.unicode_minus'] =False 17 | ''' 18 | ************************************************************ 19 | ''' 20 | 21 | # 读取 De-trend_p 22 | Asmooth_2 = np.genfromtxt("./17_ground.txt",delimiter = ",") 23 | len_Asmooth_2 = len(Asmooth_2) 24 | 25 | d_height = np.zeros(len_Asmooth_2) 26 | d_dist_along = np.zeros(len_Asmooth_2) 27 | 28 | 29 | for i in range(len_Asmooth_2): 30 | d_height[i] = Asmooth_2[i][2] 31 | d_dist_along[i] = Asmooth_2[i][3]# - Asmooth_2[0][3] 32 | 33 | ''' 34 | ************************************************************ 35 | ''' 36 | 37 | signal_ = np.genfromtxt("./13_detrended.txt",delimiter = ",") 38 | len_signal_ = len(signal_) 39 | 40 | f_height = np.zeros(len_signal_) 41 | f_dist_along = np.zeros(len_signal_) 42 | 43 | for i in range(len_signal_): 44 | f_height[i] = signal_[i][2] 45 | f_dist_along[i] = signal_[i][3]# - signal[0][3] 46 | 47 | ''' 48 | 49 | ************************************************************ 50 | ''' 51 | #用subplot()方法绘制多幅图形 52 | #plt.figure(figsize=(6,10),dpi=80) 53 | plt.figure(figsize=(18,6),dpi=80) 54 | 55 | plt.title("title") 56 | plt.xlabel("沿轨道距离 (m)") 57 | plt.ylabel("高程 (m)") 58 | plt.scatter(f_dist_along, f_height, color = 'silver',marker = '.', label='De-trended 信号光子') 59 | #plt.plot(s_dist_along, s_height, 'b', label='下边界') 60 | plt.plot(d_dist_along, d_height, 'bo', label='可能的 ground 光子') 61 | plt.legend() 62 | 63 | 64 | plt.savefig('potential_ground_photons_1.png') -------------------------------------------------------------------------------- /output/1/17_filter_outlier_noise_median+average_filt_10times.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 19 17:56:02 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import csv 10 | from filter_function import median_filter_good,average_filter_good 11 | 12 | ''' 13 | ************** 中值滤波 + 移动平均窗口滤波 10次 ************** 14 | ''' 15 | 16 | 17 | # 读取 window_size 18 | window_size = list(np.genfromtxt("./2_Window.txt",delimiter = ","))[1] 19 | 20 | # 读取 relief 21 | height_5and95 = list(np.genfromtxt("./6_relief.txt",delimiter = ",")) 22 | relief = height_5and95[3] - height_5and95[2] 23 | 24 | # 计算 SmoothSize 25 | SmoothSize = 2 * window_size 26 | if relief > 200 and (relief < 400 or relief == 400): 27 | SmoothSize = int(SmoothSize / 2) 28 | elif relief > 400 and (relief < 900 or relief == 900): 29 | SmoothSize = int(SmoothSize / 3) 30 | elif relief > 900: 31 | SmoothSize = int(SmoothSize / 4) 32 | else: 33 | SmoothSize = int(SmoothSize) 34 | # SmoothSize 必须为奇数,因为滤算子波需要奇数长度 35 | if SmoothSize % 2 == 0: 36 | SmoothSize = SmoothSize + 1 37 | 38 | 39 | # 读取 4_Asmooth_4 的所有点 40 | Asmooth_1 = list(np.genfromtxt("./11_Asmooth_1.txt",delimiter = ",")) 41 | 42 | print("len(Asmooth_1)",len(Asmooth_1)) 43 | print("SmoothSize",SmoothSize) 44 | 45 | average_filtered_surface = Asmooth_1 46 | for i in range(10): 47 | median_filtered_surface = median_filter_good(average_filtered_surface,SmoothSize) 48 | average_filtered_surface = average_filter_good(median_filtered_surface , SmoothSize) 49 | 50 | # 输出 51 | Asmooth_5 = average_filtered_surface 52 | 53 | print("len(Asmooth_5)",len(Asmooth_5)) 54 | 55 | # 存储 Asmooth_2 56 | with open('./12_Asmooth_2.txt','w') as out: 57 | csv_out=csv.writer(out) 58 | # csv_out.writerow(['X','Y']) 59 | for row in Asmooth_5: 60 | csv_out.writerow(row) -------------------------------------------------------------------------------- /command.txt: -------------------------------------------------------------------------------- 1 | # 下方 CMD 命令为使用 ATL03 数据反演地表面的脚本顺序运行命令。 2 | # 经测试,待安装好 anaconda 环境后即可顺利运行。 3 | 4 | # ---------- CMD 开始 ------------ 5 | conda activate python37(anaconda 环境) 6 | 7 | E: 8 | 9 | cd E:./script0830 10 | 11 | python 1_extract_and_norm_t_h_3rdd=0.py 12 | 13 | cd ./output 14 | 15 | python 2_open3d_k_20_histogram_x为t.py 16 | 17 | cd ./1 18 | 19 | python 3_hist_Gauss_fit.py 20 | 21 | python 4_get_signal.py 22 | 23 | python 5_4326_proj_4550.py 24 | 25 | python 6_variable_windows.py 26 | 27 | python 7_De_trend_Median_filt.py 28 | 29 | python 8_De_trend_ref_DEM_limit.py 30 | 31 | python 9_De_trend_Asmooth插值.py 32 | 33 | python 10_De_trend_5to95_heights.py 34 | 35 | python 11_De_trend_median+average_filt_10times.py 36 | 37 | python 12_filter_outlier_noise_150m.py 38 | 39 | python 13_filter_outlier_noise_Detrend.py 40 | 41 | python 14_filter_outlier_noise_2std.py 42 | 43 | python 15_filter_outlier_noise_Asmooth插值.py 44 | 45 | python 16_filter_outlier_noise_Median_filt.py 46 | 47 | python 17_filter_outlier_noise_median+average_filt_10times.py 48 | 49 | python 18_filter_outlier_noise_Detrend.py 50 | 51 | python 19_initial_ground_estimate_cutOff_lowerbound.py 52 | 53 | python 0_view_19.py 54 | 55 | python 20_initial_ground_estimate_potential_ground_points.py 56 | 57 | python 0_view_20.py 58 | 59 | python 21_initial_ground_estimate_cutOff_upperbound.py 60 | 61 | python 0_view_21.py 62 | 63 | python 22_initial_ground_estimate_ground_points.py 64 | 65 | python 0_view_22.py 66 | 67 | python 23_initial_ground_estimate_refine_ground.py 68 | 69 | python 0_view_23.py 70 | 71 | python 24_initial_ground_estimate_final_ground_points.py 72 | 73 | python 0_view_24.py 74 | 75 | exit 76 | # ---------- CMD 结束 ------------ 77 | 78 | # 该 CMD 命令将所有脚本完全运行结束,时间约花费 30 分钟。 79 | # 倘若换为其他 ATL03 数据,则脚本 3_hist_Gauss_fit.py 中的参数暂时需要手动改变。 80 | # 使用数学方法自动加载脚本 3_hist_Gauss_fit.py 参数的新版本脚本后续将在 github 更新。 81 | 82 | # 此源码压缩包内容为 version_1 版本,未使用类、继承、多态等程序设计中提高代码利用率的方法。将在 version_2 版本优化代码,预计代码量将减少 1/5,运行速度也将进一步提高。 -------------------------------------------------------------------------------- /output/1/12_filter_outlier_noise_150m.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 19 11:12:15 2022 4 | 5 | @author: longj 6 | """ 7 | ''' 8 | ************** 4.7 从 signal 去除 noise *************** 9 | 1. 去除 signal 中超过 Asmooth 150 m 的光子 10 | 2. 如果 detrended signal 的标准差大于 10m 11 | 从 signal dataset 去除任何【低于 Asmooth】 【2倍 detrennd signal 标准差】的 signal value 12 | 3. 使用剩余的 signal photons 用 pchip 插值计算 1 个新 Asmooth surface, 13 | 然后使用 Window_size 进行 median 滤波, 14 | 再使用 SmoothSize 进行 【median fliter + average filter】 10 次。 15 | 4. De-trend 信号光子操作: 16 | 从 Asmooth 表面高度值中减去信号高度值来去除信号光子的趋势。 17 | 使用【去趋势】的高度进行 surface finding。 18 | ''' 19 | 20 | ''' 21 | # 去除上界(Asmooth + 150m)外 signal photons 的方法 22 | 23 | Asmooth 24 | 25 | interp_Asmooth 26 | x 轴 interp_Asmooth_surface_along-track_distance 27 | y 轴_1 interp_Asmooth_surface_height 28 | y 轴_2 interp_Asmooth_surface_height + 150 m 29 | 30 | signal photons 31 | x 轴_1 signal_along-track_distance 32 | x 轴_2 round(signal_along-track_distance) # 原因:与 interp_Asmooth 的 unit 统一 33 | y 轴 signal_height 34 | 35 | 去除上界外 signal photons 即相同 x 比较 y_2 36 | ''' 37 | import numpy as np 38 | import csv 39 | from compare_height_function import compare_2_layers_ 40 | 41 | 42 | # 读取 Asmooth surface 43 | Asmooth_2 = np.genfromtxt("./7_Asmooth_2.txt",delimiter = ",") 44 | len_Asmooth_2 = len(Asmooth_2) 45 | # Asmooth surface 可使用的最远 dist 46 | #As_2_min_dist = 40500000 47 | #As_2_max_dist = 41750000 48 | 49 | # 读取 All_signal_p 50 | signal = list(np.genfromtxt("./1_coord_projed_allP.txt",delimiter = ",")) 51 | len_signal = len(signal) 52 | print("len_signal",len_signal) 53 | 54 | ''' 55 | # 这一函数不仅会比较两个 layer 的 height, 56 | # 而且是取的当 all_point 数据集中每个单元多个光子 中最低值height 与surface height 比较 57 | ''' 58 | within_150m_signal = compare_2_layers_(signal, Asmooth_2, 150)[0] 59 | 60 | 61 | with open('./8_within_150m_signal.txt','w') as out: 62 | csv_out=csv.writer(out) 63 | # csv_out.writerow(['X','Y']) 64 | for row in within_150m_signal: 65 | csv_out.writerow(row) -------------------------------------------------------------------------------- /output/1/0_view_19.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 12 09:37:42 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | ''' 9 | ************** 查看 De-trend 平面 *************** 10 | 11 | ''' 12 | import numpy as np 13 | import matplotlib.pyplot as plt 14 | from pylab import * 15 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 16 | matplotlib.rcParams['axes.unicode_minus'] =False 17 | ''' 18 | ************************************************************ 19 | ''' 20 | 21 | # 读取 De-trend_p 22 | Asmooth_2 = np.genfromtxt("./14_lowerbound.txt",delimiter = ",") 23 | len_Asmooth_2 = len(Asmooth_2) 24 | 25 | # d_X = np.zeros(len_Asmooth_2) 26 | # d_Y = np.zeros(len_Asmooth_2) 27 | d_height = np.zeros(len_Asmooth_2) 28 | d_dist_along = np.zeros(len_Asmooth_2) 29 | 30 | 31 | for i in range(len_Asmooth_2): 32 | d_height[i] = Asmooth_2[i][2] 33 | d_dist_along[i] = Asmooth_2[i][3]# - Asmooth_2[0][3] 34 | 35 | ''' 36 | ************************************************************ 37 | ''' 38 | signal = np.genfromtxt("./14_ground.txt",delimiter = ",") 39 | len_signal = len(signal) 40 | 41 | s_height = np.zeros(len_signal) 42 | s_dist_along = np.zeros(len_signal) 43 | 44 | 45 | for i in range(len_signal): 46 | s_height[i] = signal[i][2] 47 | s_dist_along[i] = signal[i][3]# - signal[0][3] 48 | 49 | ''' 50 | ************************************************************ 51 | ''' 52 | 53 | signal_ = np.genfromtxt("./13_detrended.txt",delimiter = ",") 54 | len_signal_ = len(signal_) 55 | 56 | f_height = np.zeros(len_signal_) 57 | f_dist_along = np.zeros(len_signal_) 58 | 59 | for i in range(len_signal_): 60 | f_height[i] = signal_[i][2] 61 | f_dist_along[i] = signal_[i][3]# - signal[0][3] 62 | ''' 63 | 64 | ************************************************************ 65 | ''' 66 | #用subplot()方法绘制多幅图形 67 | #plt.figure(figsize=(6,10),dpi=80) 68 | plt.figure(figsize=(18,6),dpi=80) 69 | 70 | 71 | plt.title("title") 72 | plt.xlabel("沿轨道距离 (m)") 73 | plt.ylabel("高程 (m)") 74 | plt.scatter(f_dist_along, f_height, color = 'silver',marker = '.', label='De-trended 信号光子') 75 | plt.plot(s_dist_along, s_height, 'bo', label='初步估计的 ground 光子') 76 | plt.plot(d_dist_along, d_height, 'r', label='下边界') 77 | plt.legend() 78 | 79 | 80 | plt.savefig('下边界.png') -------------------------------------------------------------------------------- /output/1/0_view_21.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 12 09:37:42 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | ''' 9 | ************** 查看 De-trend 平面 *************** 10 | 11 | ''' 12 | import numpy as np 13 | import matplotlib.pyplot as plt 14 | from pylab import * 15 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 16 | matplotlib.rcParams['axes.unicode_minus'] =False 17 | ''' 18 | ************************************************************ 19 | ''' 20 | 21 | # 读取 De-trend_p 22 | Asmooth_2 = np.genfromtxt("./16_upperbound.txt",delimiter = ",") 23 | len_Asmooth_2 = len(Asmooth_2) 24 | 25 | # d_X = np.zeros(len_Asmooth_2) 26 | # d_Y = np.zeros(len_Asmooth_2) 27 | d_height = np.zeros(len_Asmooth_2) 28 | d_dist_along = np.zeros(len_Asmooth_2) 29 | 30 | 31 | for i in range(len_Asmooth_2): 32 | d_height[i] = Asmooth_2[i][2] 33 | d_dist_along[i] = Asmooth_2[i][3]# - Asmooth_2[0][3] 34 | 35 | 36 | ''' 37 | ************************************************************ 38 | ''' 39 | signal = np.genfromtxt("./16_top.txt",delimiter = ",") 40 | len_signal = len(signal) 41 | 42 | s_height = np.zeros(len_signal) 43 | s_dist_along = np.zeros(len_signal) 44 | 45 | 46 | for i in range(len_signal): 47 | s_height[i] = signal[i][2] 48 | s_dist_along[i] = signal[i][3]# - signal[0][3] 49 | 50 | 51 | ''' 52 | ************************************************************ 53 | ''' 54 | 55 | signal_ = np.genfromtxt("./13_detrended.txt",delimiter = ",") 56 | len_signal_ = len(signal_) 57 | 58 | f_height = np.zeros(len_signal_) 59 | f_dist_along = np.zeros(len_signal_) 60 | 61 | for i in range(len_signal_): 62 | f_height[i] = signal_[i][2] 63 | f_dist_along[i] = signal_[i][3]# - signal[0][3] 64 | 65 | ''' 66 | 67 | ************************************************************ 68 | ''' 69 | #用subplot()方法绘制多幅图形 70 | #plt.figure(figsize=(6,10),dpi=80) 71 | plt.figure(figsize=(18,6),dpi=80) 72 | 73 | plt.title("title") 74 | plt.xlabel("沿轨道距离 (m)") 75 | plt.ylabel("高程 (m)") 76 | plt.scatter(f_dist_along, f_height, color = 'silver',marker = '.', label='De-trended 信号光子') 77 | plt.plot(s_dist_along, s_height, 'bo', label='初步估计的 ground 光子') 78 | plt.plot(d_dist_along, d_height, 'r', label='上边界') 79 | plt.legend() 80 | 81 | 82 | plt.savefig('上边界.png') -------------------------------------------------------------------------------- /Readme.txt: -------------------------------------------------------------------------------- 1 | # 记得放ATL03文件到此目录 2 | 3 | 4 | # 算法理论基础参考:2019_ICESat-2 ATBD for ATL08_r002_v2.pdf 5 | 6 | 7 | # 下方 CMD 命令为使用 ATL03 数据反演地表面的脚本顺序运行命令。 8 | ## 经测试,待安装好 anaconda 环境后即可顺利运行。 9 | 10 | ## anaconda 环境 11 | ### 语言:Python 3.7 12 | ### 库:sys, os, shutil, numpy, csv, pandas, scipy, h5py, open3d, math, matplotlib, pyproj, osgeo, gdal, gdalconst 13 | 14 | ## ---------- CMD 开始 ------------ 15 | conda activate python37(anaconda 环境) 16 | 17 | E: 18 | 19 | cd E:./script0830 20 | 21 | python 1_extract_and_norm_t_h_3rdd=0.py 22 | 23 | cd ./output 24 | 25 | python 2_open3d_k_20_histogram_x为t.py 26 | 27 | cd ./1 28 | 29 | python 3_hist_Gauss_fit.py 30 | 31 | python 4_get_signal.py 32 | 33 | python 5_4326_proj_4550.py 34 | 35 | python 6_variable_windows.py 36 | 37 | python 7_De_trend_Median_filt.py 38 | 39 | python 8_De_trend_ref_DEM_limit.py 40 | 41 | python 9_De_trend_Asmooth插值.py 42 | 43 | python 10_De_trend_5to95_heights.py 44 | 45 | python 11_De_trend_median+average_filt_10times.py 46 | 47 | python 12_filter_outlier_noise_150m.py 48 | 49 | python 13_filter_outlier_noise_Detrend.py 50 | 51 | python 14_filter_outlier_noise_2std.py 52 | 53 | python 15_filter_outlier_noise_Asmooth插值.py 54 | 55 | python 16_filter_outlier_noise_Median_filt.py 56 | 57 | python 17_filter_outlier_noise_median+average_filt_10times.py 58 | 59 | python 18_filter_outlier_noise_Detrend.py 60 | 61 | python 19_initial_ground_estimate_cutOff_lowerbound.py 62 | 63 | python 0_view_19.py 64 | 65 | python 20_initial_ground_estimate_potential_ground_points.py 66 | 67 | python 0_view_20.py 68 | 69 | python 21_initial_ground_estimate_cutOff_upperbound.py 70 | 71 | python 0_view_21.py 72 | 73 | python 22_initial_ground_estimate_ground_points.py 74 | 75 | python 0_view_22.py 76 | 77 | python 23_initial_ground_estimate_refine_ground.py 78 | 79 | python 0_view_23.py 80 | 81 | python 24_initial_ground_estimate_final_ground_points.py 82 | 83 | python 0_view_24.py 84 | 85 | exit 86 | ## ---------- CMD 结束 ------------ 87 | 88 | ## 该 CMD 命令将所有脚本完全运行结束,时间约花费 30 分钟。 89 | ## 倘若换为其他 ATL03 数据,则脚本 3_hist_Gauss_fit.py 中的参数暂时需要手动改变。 90 | ## 使用数学方法自动加载脚本 3_hist_Gauss_fit.py 参数的新版本脚本后续将在 github 更新。 91 | 92 | ## 此源码压缩包内容为 version_1 版本,未使用类、继承、多态等程序设计中提高代码利用率的方法。将在 version_2 版本优化代码,预计代码量将减少 1/5,运行速度也将进一步提高。 -------------------------------------------------------------------------------- /output/1/0_view_23.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 12 09:37:42 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | ''' 9 | ************** 查看 De-trend 平面 *************** 10 | 11 | ''' 12 | import numpy as np 13 | import matplotlib.pyplot as plt 14 | from pylab import * 15 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 16 | matplotlib.rcParams['axes.unicode_minus'] =False 17 | ''' 18 | ************************************************************ 19 | ''' 20 | 21 | # 读取 De-trend_p 22 | Asmooth_2 = np.genfromtxt("./18_ground_final.txt",delimiter = ",") 23 | len_Asmooth_2 = len(Asmooth_2) 24 | 25 | # d_X = np.zeros(len_Asmooth_2) 26 | # d_Y = np.zeros(len_Asmooth_2) 27 | d_height = np.zeros(len_Asmooth_2) 28 | d_dist_along = np.zeros(len_Asmooth_2) 29 | 30 | 31 | for i in range(len_Asmooth_2): 32 | d_height[i] = Asmooth_2[i][1] 33 | d_dist_along[i] = Asmooth_2[i][0]# - Asmooth_2[0][3] 34 | 35 | 36 | ''' 37 | ************************************************************ 38 | ''' 39 | signal = np.genfromtxt("./18_refined_ground.txt",delimiter = ",") 40 | len_signal = len(signal) 41 | 42 | s_height = np.zeros(len_signal) 43 | s_dist_along = np.zeros(len_signal) 44 | 45 | 46 | for i in range(len_signal): 47 | s_height[i] = signal[i][2] 48 | s_dist_along[i] = signal[i][3]# - signal[0][3] 49 | 50 | 51 | ''' 52 | ************************************************************ 53 | ''' 54 | 55 | signal_ = np.genfromtxt("./13_detrended.txt",delimiter = ",") 56 | len_signal_ = len(signal_) 57 | 58 | f_height = np.zeros(len_signal_) 59 | f_dist_along = np.zeros(len_signal_) 60 | 61 | for i in range(len_signal_): 62 | f_height[i] = signal_[i][2] 63 | f_dist_along[i] = signal_[i][3]# - signal[0][3] 64 | 65 | ''' 66 | 67 | ************************************************************ 68 | ''' 69 | #用subplot()方法绘制多幅图形 70 | #plt.figure(figsize=(6,10),dpi=80) 71 | plt.figure(figsize=(18,6),dpi=80) 72 | 73 | plt.title("title") 74 | plt.xlabel("沿轨道距离 (m)") 75 | plt.ylabel("高程 (m)") 76 | plt.scatter(f_dist_along, f_height, color = 'silver',marker = '.', label='De-trended 信号光子') 77 | plt.plot(s_dist_along, s_height, 'bo', label='地面点') 78 | plt.plot(d_dist_along, d_height, 'r-', label='精细化的地面') 79 | plt.legend() 80 | 81 | 82 | plt.savefig('地面点平滑表面.png') -------------------------------------------------------------------------------- /output/1/0_view_20.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 12 09:37:42 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | ''' 9 | ************** 查看 De-trend 平面 *************** 10 | 11 | ''' 12 | import numpy as np 13 | import matplotlib.pyplot as plt 14 | from pylab import * 15 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 16 | matplotlib.rcParams['axes.unicode_minus'] =False 17 | ''' 18 | ************************************************************ 19 | ''' 20 | 21 | # 读取 De-trend_p 22 | Asmooth_2 = np.genfromtxt("./15_potential_ground.txt",delimiter = ",") 23 | len_Asmooth_2 = len(Asmooth_2) 24 | 25 | # d_X = np.zeros(len_Asmooth_2) 26 | # d_Y = np.zeros(len_Asmooth_2) 27 | d_height = np.zeros(len_Asmooth_2) 28 | d_dist_along = np.zeros(len_Asmooth_2) 29 | 30 | 31 | for i in range(len_Asmooth_2): 32 | d_height[i] = Asmooth_2[i][2] 33 | d_dist_along[i] = Asmooth_2[i][3]# - Asmooth_2[0][3] 34 | 35 | ''' 36 | ************************************************************ 37 | ''' 38 | signal = np.genfromtxt("./14_lowerbound.txt",delimiter = ",") 39 | len_signal = len(signal) 40 | 41 | s_height = np.zeros(len_signal) 42 | s_dist_along = np.zeros(len_signal) 43 | 44 | 45 | for i in range(len_signal): 46 | s_height[i] = signal[i][2] 47 | s_dist_along[i] = signal[i][3]# - signal[0][3] 48 | 49 | ''' 50 | ************************************************************ 51 | ''' 52 | 53 | signal_ = np.genfromtxt("./13_detrended.txt",delimiter = ",") 54 | len_signal_ = len(signal_) 55 | 56 | f_height = np.zeros(len_signal_) 57 | f_dist_along = np.zeros(len_signal_) 58 | 59 | for i in range(len_signal_): 60 | f_height[i] = signal_[i][2] 61 | f_dist_along[i] = signal_[i][3]# - signal[0][3] 62 | ''' 63 | 64 | ************************************************************ 65 | ''' 66 | #用subplot()方法绘制多幅图形 67 | #plt.figure(figsize=(6,10),dpi=80) 68 | plt.figure(figsize=(18,6),dpi=80) 69 | 70 | plt.title("title") 71 | plt.xlabel("沿轨道距离 (m)") 72 | plt.ylabel("高程 (m)") 73 | plt.scatter(f_dist_along, f_height, color = 'silver',marker = '.', label='De-trended 信号光子') 74 | plt.plot(s_dist_along, s_height, 'b', label='下边界') 75 | plt.plot(d_dist_along, d_height, 'ro', label='可能的 ground 光子') 76 | plt.legend() 77 | 78 | 79 | plt.savefig('potential_ground_photons.png') -------------------------------------------------------------------------------- /output/1/22_initial_ground_estimate_ground_points.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon May 23 12:22:52 2022 4 | 5 | @author: longj 6 | """ 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | import csv 10 | from interp_function import interp_linear_dist_ph,save_df 11 | from compare_height_function import compare_2_layers_7col_ 12 | import matplotlib 13 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 14 | matplotlib.rcParams['axes.unicode_minus'] =False 15 | 16 | 17 | ''' 18 | # ---------------- linear 插值 ------------------- 19 | ''' 20 | # 读取 within_150m_signal 的所有点 XYH 和 along-track_dist 21 | lowerbound = np.genfromtxt("./16_upperbound.txt",delimiter = ",") 22 | along_dist = [] 23 | lb_Z = [] 24 | 25 | for i in range(len(lowerbound)): 26 | along_dist.append(lowerbound[i][3]) 27 | lb_Z.append(lowerbound[i][2]) 28 | 29 | 30 | dist_lowerbound = int(along_dist[-1] - along_dist[0]) 31 | interp_surface = interp_linear_dist_ph(dist_lowerbound, lb_Z, along_dist) 32 | 33 | 34 | # 存储 35 | save_df(interp_surface,'./17_linear_interp_surface.txt') 36 | 37 | ''' 38 | --------------- 提取 linear interp surface 上方 photons ---------- 39 | ''' 40 | surface = np.genfromtxt("./17_linear_interp_surface.txt",delimiter = ",") 41 | #np.genfromtxt('',delimiter = ",") 42 | signal = np.genfromtxt("./15_potential_ground.txt",delimiter = ",") 43 | #potential_ground = compare_2_layers_2col(detrended, interp_surface, 0)[1] 44 | surface_0 = surface[0][0] 45 | surface__1 = surface[-1][0] 46 | #print(surface_0,"********************") 47 | 48 | for i in range(len(signal)): 49 | #print(i,slice_signal[i][0],surface_0) 50 | if signal[i][3]>surface_0: 51 | print(">> ",i,signal[i][3],surface_0) 52 | s_overlay_begin_idx = i 53 | break 54 | 55 | for i in range(1,len(signal)): 56 | if signal[-i][3]> ",len(signal)-i,signal[-i][3],surface__1) 58 | #print(len(signal)-i+1,signal[-i+1][0],surface__1) 59 | s_overlay_end_idx = len(signal)- i 60 | break 61 | 62 | ''' 63 | --------------- 提取 upperbound下方光子 ---------- 64 | ''' 65 | signal = signal[s_overlay_begin_idx:s_overlay_end_idx] 66 | 67 | result = compare_2_layers_7col_(signal,surface,0) 68 | lower_set = result[0] 69 | 70 | # 存储 71 | with open('./17_ground.txt','w') as out: 72 | csv_out=csv.writer(out) 73 | # csv_out.writerow(['X','Y']) 74 | for row in lower_set: 75 | csv_out.writerow(row) 76 | -------------------------------------------------------------------------------- /output/1/11_De_trend_median+average_filt_10times.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue May 10 23:12:37 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import csv 10 | from filter_function import average_filter_with_empty,median_filter_with_empty 11 | 12 | ''' 13 | ------------------------------------------ 14 | ***** 中值滤波 + 移动平均窗口滤波 10次 ***** 15 | ****************** Main ****************** 16 | ------------------------------------------ 17 | ''' 18 | 19 | # 读取 window_size 20 | window_size = list(np.genfromtxt("./2_Window.txt",delimiter = ","))[1] 21 | 22 | # 读取 relief 23 | height_5and95 = list(np.genfromtxt("./6_relief.txt",delimiter = ",")) 24 | relief = height_5and95[3] - height_5and95[2] 25 | 26 | # 计算 SmoothSize 27 | SmoothSize = 2 * window_size 28 | if relief > 200 and (relief < 400 or relief == 400): 29 | SmoothSize = int(SmoothSize / 2) 30 | elif relief > 400 and (relief < 900 or relief == 900): 31 | SmoothSize = int(SmoothSize / 3) 32 | elif relief > 900: 33 | SmoothSize = int(SmoothSize / 4) 34 | else: 35 | SmoothSize = int(SmoothSize) 36 | # SmoothSize 必须为奇数,因为滤算子波需要奇数长度 37 | if SmoothSize % 2 == 0: 38 | SmoothSize = SmoothSize + 1 39 | ''' 40 | # 读取 p_height_5to95 的所有点 XYH 和 along-track_dist 41 | Asmooth_1 = list(np.genfromtxt("./output/6_p_height_5to95.txt",delimiter = ",")) 42 | # 提取 (along-track_dist,height) 成为一个单独数组,对其进行中值滤波和均值滤波 43 | 44 | 45 | print("len(Asmooth_1)",len(Asmooth_1)) 46 | print("SmoothSize",SmoothSize) 47 | # 进行10次 中值滤波 + 均值滤波 48 | 49 | ''' 50 | # 读取 p_height_5to95 的所有点 XYH 和 along-track_dist 51 | Asmooth_1_ = list(np.genfromtxt("./6_p_height_5to95.txt",delimiter = ",")) 52 | # 提取 (along-track_dist,height) 成为一个单独数组,对其进行中值滤波和均值滤波 53 | 54 | Asmooth_1 = [] 55 | for i in range(len(Asmooth_1_)): 56 | Asmooth_1.append((Asmooth_1_[i][0],Asmooth_1_[i][1])) 57 | 58 | print("len(Asmooth_1)",len(Asmooth_1)) 59 | print("SmoothSize",SmoothSize) 60 | # 进行10次 中值滤波 + 均值滤波 61 | 62 | average_filtered_surface = Asmooth_1 63 | for i in range(10): 64 | median_filtered_surface = median_filter_with_empty(average_filtered_surface,SmoothSize) 65 | average_filtered_surface = average_filter_with_empty(median_filtered_surface,SmoothSize) 66 | # 输出 67 | Asmooth_2 = average_filtered_surface 68 | print("len(Asmooth_2)",len(Asmooth_2)) 69 | 70 | # 存储 Asmooth_2 71 | with open('./7_Asmooth_2.txt','w') as out: 72 | csv_out=csv.writer(out) 73 | # csv_out.writerow(['X','Y']) 74 | for row in Asmooth_2: 75 | csv_out.writerow(row) 76 | -------------------------------------------------------------------------------- /output/1/5_4326_proj_4550.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sun May 8 15:28:14 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import csv 10 | from pyproj import Transformer 11 | import os 12 | ''' 13 | *************** 坐标投影函数 **************** 14 | # 梨树县玉米分布 tiff 为 CGCS2000 / 3-degree Gauss-Kruger CM 123E 坐标系 15 | # 将 photons 的 (longitude, latitude) 投影为 CGCS2000 投影坐标系 (X, Y) 16 | ''' 17 | def lonLat_to_CGCS2000_proj(lats_tu, lons_tu, from_epsg="epsg:4326", to_epsg="epsg:4550"): 18 | 19 | transformer = Transformer.from_crs(from_epsg, to_epsg) 20 | # process points at a time in a tuple 21 | x,y = transformer.transform(lats_tu, lons_tu) 22 | return x,y 23 | 24 | 25 | ''' 26 | ----------------------------------------------------- 27 | ************** Main() ************* 28 | ----------------------------------------------------- 29 | ''' 30 | 31 | ''' 32 | # 读取 bin 所有点坐标(longitude, latitude, elevation) 33 | # p_xyz = np.genfromtxt("Signal_p_xyz_250.txt",delimiter = ",") 34 | # arry_len = len(p_xyz) 35 | 36 | 37 | # 用数组存 (longitude, latitude, elevation) 38 | lon_arry = np.zeros(arry_len) 39 | lat_arry = np.zeros(arry_len) 40 | 41 | for i in range(arry_len): 42 | lon_arry[i] = p_xyz[i][0] 43 | lat_arry[i] = p_xyz[i][1] 44 | ''' 45 | 46 | ''' 47 | ************ 3.1 连接2个数据集作 input ************ 48 | # (1) DRAGANN 滤波获得的 signal 光子;(2) ATL03 signal_config_ph 为 3-4 的点 49 | ''' 50 | # 读取 signal_DRAGANN 的所有点 coordinate 和 attribute 51 | signal = list(np.genfromtxt("./Signal_p_xyz.txt",delimiter = ",")) 52 | 53 | # 读取 ATL03_3and4 的所有点 coordinate 和 attribute 54 | path = './' 55 | dirs = os.listdir(path) 56 | for dir in dirs: 57 | if dir[-9:] == "3and4.txt": 58 | conf_3and4 = list(np.genfromtxt(dir,delimiter = ",")) 59 | 60 | signal.extend(conf_3and4) 61 | 62 | # 获取列表的第二个元素 63 | def takeForth(elem): 64 | return elem[3] 65 | 66 | signal.sort(key=takeForth) 67 | arry_len = len(signal) 68 | 69 | # 用数组存 (longitude, latitude, elevation) 70 | lon_arry = np.zeros(arry_len) 71 | lat_arry = np.zeros(arry_len) 72 | 73 | for i in range(arry_len): 74 | lon_arry[i] = signal[i][0] 75 | lat_arry[i] = signal[i][1] 76 | 77 | # 将 array 转化为 tuple 78 | lon_tu = tuple(lon_arry) 79 | lat_tu = tuple(lat_arry) 80 | 81 | # 进行点地理坐标投影 82 | Projected_XY = [] 83 | proj = lonLat_to_CGCS2000_proj(lat_tu, lon_tu) 84 | for i in range(arry_len): 85 | Projected_XY.append((proj[1][i],proj[0][i])) 86 | 87 | 88 | # 存储 X,Y,height,maize_flag 89 | coord_projed = [] 90 | for i in range(arry_len): 91 | coord_projed.append((signal[i][0],signal[i][1],signal[i][2],signal[i][3],Projected_XY[i][0],Projected_XY[i][1])) 92 | with open('./1_coord_projed_allP.txt','w') as out: 93 | csv_out=csv.writer(out) 94 | # csv_out.writerow(['X','Y']) 95 | for row in coord_projed: 96 | csv_out.writerow(row) 97 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 2022 - Undergraduate Thesis Source Code 2 | 3 | This repository contains a complete workflow for extracting ground elevation from ATL03 photon data, based on the ATL08 land and vegetation height product algorithms from ICESat-2. The project reproduces the theoretical basis and implements it using Python scripts for batch processing of surface elevation inversion. 4 | 5 | ## 📚 Algorithm Reference 6 | 7 | - Theoretical basis: `2019_ICESat-2 ATBD for ATL08_r002_v2.pdf` 8 | 9 | ## 🚀 Execution Instructions (Windows CMD) 10 | 11 | Below are the sequential commands to run scripts for surface inversion using ATL03 data. Tested and verified in the Anaconda environment. 12 | 13 | > Note: All scripts assume you are in the `E:/script0830` directory. 14 | 15 | ### ✅ Environment Setup 16 | 17 | - Language: Python 3.7 18 | - Required packages: 19 | ``` 20 | sys, os, shutil, numpy, csv, pandas, scipy, h5py, open3d, 21 | math, matplotlib, pyproj, osgeo, gdal, gdalconst 22 | ``` 23 | 24 | ### ▶️ CMD Commands 25 | 26 | ```cmd 27 | conda activate python37 28 | 29 | E: 30 | 31 | cd E:/script0830 32 | 33 | python 1_extract_and_norm_t_h_3rdd=0.py 34 | 35 | cd ./output 36 | 37 | python 2_open3d_k_20_histogram_x为t.py 38 | 39 | cd ./1 40 | 41 | python 3_hist_Gauss_fit.py 42 | python 4_get_signal.py 43 | python 5_4326_proj_4550.py 44 | python 6_variable_windows.py 45 | python 7_De_trend_Median_filt.py 46 | python 8_De_trend_ref_DEM_limit.py 47 | python 9_De_trend_Asmooth插值.py 48 | python 10_De_trend_5to95_heights.py 49 | python 11_De_trend_median+average_filt_10times.py 50 | python 12_filter_outlier_noise_150m.py 51 | python 13_filter_outlier_noise_Detrend.py 52 | python 14_filter_outlier_noise_2std.py 53 | python 15_filter_outlier_noise_Asmooth插值.py 54 | python 16_filter_outlier_noise_Median_filt.py 55 | python 17_filter_outlier_noise_median+average_filt_10times.py 56 | python 18_filter_outlier_noise_Detrend.py 57 | python 19_initial_ground_estimate_cutOff_lowerbound.py 58 | python 0_view_19.py 59 | python 20_initial_ground_estimate_potential_ground_points.py 60 | python 0_view_20.py 61 | python 21_initial_ground_estimate_cutOff_upperbound.py 62 | python 0_view_21.py 63 | python 22_initial_ground_estimate_ground_points.py 64 | python 0_view_22.py 65 | python 23_initial_ground_estimate_refine_ground.py 66 | python 0_view_23.py 67 | python 24_initial_ground_estimate_final_ground_points.py 68 | python 0_view_24.py 69 | 70 | exit 71 | ``` 72 | 73 | > ⏱️ Estimated runtime for all scripts: ~30 minutes. 74 | 75 | ## Notes 76 | 77 | - When using different ATL03 datasets, manual adjustment may be needed for parameters in `3_hist_Gauss_fit.py`. 78 | - A future update will automate this parameter tuning. 79 | - This is version 1 of the code and does not yet use object-oriented features such as classes or inheritance. 80 | - Version 2 will include optimized code structure, reducing code size by ~20% and improving execution speed. 81 | 82 | ## Visual Examples 83 | 84 | ![Gauss_fit](https://github.com/juejue123/2022-/assets/82886491/aa275da1-6b80-4601-ad6c-6584158d637d) 85 | ![Final Ground Points](https://github.com/juejue123/2022-/assets/82886491/b6d4b2ef-9f20-472d-bc8d-e2b867226310) 86 | -------------------------------------------------------------------------------- /output/1/4_get_signal.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue May 3 00:13:06 2022 4 | 5 | @author: longj 6 | """ 7 | import numpy as np 8 | import csv 9 | import os 10 | import shutil 11 | ''' 12 | --------------- function --------------- 13 | ''' 14 | 15 | def extract_p_Id(extract_len, threshold): 16 | ls_signal = [] 17 | for i in range(extract_len): 18 | if tuple_id_pnum[i][1] > threshold: 19 | ls_signal.append(tuple_id_pnum[i]) 20 | # 存 list 21 | with open('./Signal_Id_pnum.txt','w') as out: 22 | csv_out=csv.writer(out) 23 | # csv_out.writerow(['Id','pnum_in_r']) 24 | for row in ls_signal: 25 | csv_out.writerow(row) 26 | return ls_signal 27 | 28 | 29 | ''' 30 | --------------- 选出 PNumInRadius 大于 threshold 的点 ID --------------- 31 | ''' 32 | # 读取 r(P=20)_pnum 即第一列id,第二列PNumInRadius的txt 33 | #id_pnum = np.genfromtxt("./output/r(P=20)_pnum.txt",delimiter = ",") 34 | id_pnum = np.genfromtxt("./id_r.txt",delimiter = ",") 35 | extract_len = len(id_pnum) 36 | 37 | # 创建元组list 38 | tuple_id_pnum = [tuple(x) for x in id_pnum.tolist()] 39 | # print("len tuple_id_pnuml",len(tuple_id_pnum)) 40 | 41 | # 提取符合要求的 isolated 点 Id 42 | threshold = np.genfromtxt("./parameters.txt",delimiter = ",")[6] 43 | ls_signal = extract_p_Id(extract_len, threshold) 44 | # print("len ls_signal",len(ls_signal)) 45 | 46 | # 按 Id_ 排序 47 | sorted_ls_signal = sorted(ls_signal) 48 | # for i in range(4): 49 | # print(sorted_ls_signal[i]) 50 | 51 | ''' 52 | ----------------- 根据唯一标识 Id_ 提取相应点的 xyz differencial 坐标 -------------------- 53 | ''' 54 | work_dir = os.getcwd() 55 | path = work_dir 56 | dirs = os.listdir(path) 57 | # 查找 [-12:] 为 d_time_h.txt 的文件,取得dir 58 | for dir in dirs: 59 | if dir[-12:] == "d_time_h.txt": 60 | # 根据确定为 signal 点的 Id_ 提取 signal 点的 xyz differencial 坐标 61 | p_d_th = np.genfromtxt(dir,delimiter = ",") 62 | if dir[-9:] == '3and4.txt': 63 | # 读取 1个 bin 的点 Id_ 和其对应的 xyz 64 | p_xyz = list(np.genfromtxt(dir,delimiter = ",")) 65 | 66 | # print("len(p_xyz)",len(p_xyz)) 67 | 68 | # 根据确定为 signal 点的 Id_ 提取 signal 点的 coordinate 坐标以及其他属性信息 69 | extracted_Id = 0 70 | ls_xyz = [] 71 | ls_d_th = [] 72 | stop_extract = len(ls_signal) 73 | for i in range(len(p_xyz)): 74 | Id_ = sorted_ls_signal[extracted_Id][0] 75 | if i == Id_ and i < stop_extract: 76 | ls_xyz.append(p_xyz[i]) 77 | ls_d_th.append(p_d_th[i]) 78 | extracted_Id = extracted_Id + 1 79 | 80 | # 输出 coordinate 坐标以及其他属性信息 81 | #with open("./Signal_p_xyz_" + str(threshold) + ".txt",'w') as out: 82 | with open("./Signal_p_xyz.txt",'w') as out: 83 | csv_out=csv.writer(out) 84 | # 1st col:'longitude', 2nd col:'latitude', 3rd col:'height', 85 | # 4th col:'photon signal confidence', 5th col:'along-track distance' 86 | for row in ls_xyz: 87 | csv_out.writerow(row) 88 | 89 | 90 | #with open("./Signal_normalized_" + str(threshold) + ".txt",'w') as out: 91 | with open("./Signal_normalized.txt",'w') as out: 92 | csv_out=csv.writer(out) 93 | # 1st col:'normalized_delts_time', 2nd col:'normalized_height' 94 | for row in ls_d_th: 95 | csv_out.writerow(row) -------------------------------------------------------------------------------- /output/1/9_De_trend_Asmooth插值.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue May 10 01:53:13 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import pandas as pd 10 | import scipy 11 | from scipy import interpolate 12 | import matplotlib.pyplot as plt 13 | from interp_function import interp_2D_XYH,save_df 14 | 15 | ''' 16 | ************ 3.5 去除超出 ref_dem_limit 阈值的 Asmooth ************ 17 | # 使用的插值方法是 pchip 形状保存分段立方隐含插值多项式 18 | ''' 19 | 20 | # (非必须) 计算 window_size 21 | window_size = list(np.genfromtxt("./2_Window.txt",delimiter = ","))[1] 22 | window_size = round(window_size) 23 | if window_size % 2 == 0: 24 | window_size = window_size + 1 25 | 26 | # (非必须) 计算 SmoothSize 27 | #SmoothSize = int(2 * 1000 * window_size) 28 | SmoothSize = int(2 * 100 * window_size) 29 | 30 | # 读取 1_Asmooth 中各项属性 31 | Asmooth = list(np.genfromtxt("./4_Asmooth_1.txt",delimiter = ",")) 32 | Asmooth_len = len(Asmooth) 33 | 34 | height_original = [] 35 | dist_original = [] 36 | 37 | for i in range(1,Asmooth_len): 38 | ''' 39 | if round(Asmooth[i][0],3) == round(Asmooth[i-1][0],3): 40 | # 取 height 低的 signal 点 41 | if Asmooth[i-1][2]>Asmooth[i][2] or Asmooth[i-1][2] == Asmooth[i][2]: 42 | continue 43 | #X_original.append(Asmooth[i][0]) 44 | #Y_original.append(Asmooth[i][1]) 45 | ''' 46 | height_original.append(Asmooth[i][1]) 47 | dist_original.append(Asmooth[i][0]) 48 | ''' 49 | # 进行 pchip 插值,可选: 50 | # 窗口大小多次处理 51 | # 一次处理完所有点 52 | ''' 53 | print("开始进行 pchip 插值") 54 | coord_interp = interp_2D_XYH(Asmooth_len, Asmooth_len, height_original, dist_original) 55 | save_df(coord_interp,'./5_pchip_interpolated.txt') 56 | 57 | ''' 58 | 已完成第 0 段,范围 0 90000 59 | 已完成第 1 段,范围 90000 180000 60 | 已完成第 2 段,范围 180000 270000 61 | 已完成第 3 段,范围 270000 360000 62 | 已完成第 4 段,范围 360000 450000 63 | 已完成第 5 段,范围 450000 540000 64 | 已完成第 6 段,范围 540000 630000 65 | 已完成第 7 段,范围 630000 720000 66 | 已完成第 8 段,范围 720000 810000 67 | 已完成第 9 段,范围 810000 900000 68 | 已完成第 10 段,范围 900000 990000 69 | 已完成第 11 段,范围 990000 1080000 70 | 已完成第 12 段,范围 1080000 1170000 71 | 已完成第 13 段,范围 1170000 1260000 72 | 已完成第 14 段,范围 1260000 1350000 73 | 已完成第 15 段,范围 1350000 1440000 74 | 已完成第 16 段,范围 1440000 1530000 75 | 已完成第 17 段,范围 1530000 1620000 76 | 已完成第 18 段,范围 1620000 1710000 77 | 已完成小尾巴部分,范围 1710000 1779564 78 | Asmooth 长度: 1779564 79 | X_interp 长度: 1779564 80 | ***************************** 81 | Asmooth 前10个记录: 82 | [array([6.32362045e+05, 4.78289371e+06, 3.29900000e+02, 2.30000000e+01]), array([6.32362041e+05, 4.78289374e+06, 3.29900000e+02, 2.40000000e+01]), array([6.32362037e+05, 4.78289378e+06, 3.29900000e+02, 2.50000000e+01]), array([6.32362033e+05, 4.78289381e+06, 3.29900000e+02, 2.60000000e+01]), array([6.32362029e+05, 4.78289385e+06, 3.29900000e+02, 2.70000000e+01]), array([6.32362025e+05, 4.78289388e+06, 3.29900000e+02, 2.80000000e+01]), array([6.32362021e+05, 4.78289392e+06, 3.29900000e+02, 2.90000000e+01]), array([6.32362018e+05, 4.78289395e+06, 3.29900000e+02, 3.00000000e+01]), array([6.32362014e+05, 4.78289399e+06, 3.29900000e+02, 3.10000000e+01]), array([6.32362010e+05, 4.78289402e+06, 3.29900000e+02, 3.20000000e+01])] 83 | ----------------------------- 84 | X_interp 前10个记录: 85 | [632362.0382252822, 632362.0316136912, 632362.0250015891, 632362.0183889789, 632362.0117758618, 632362.00516224, 632361.9985481163, 632361.9919334917, 632361.9853183687, 632361.9787027492] 86 | 87 | ''' -------------------------------------------------------------------------------- /output/1/21_initial_ground_estimate_cutOff_upperbound.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon May 23 17:24:08 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import matplotlib.pyplot as plt 10 | import csv 11 | from filter_function import median_filter_with_empty_7col ,average_filter_with_empty_7col,average_filter_with_offset_7col 12 | from compare_height_function import compare_2_layers_7col 13 | import matplotlib 14 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 15 | matplotlib.rcParams['axes.unicode_minus'] =False 16 | 17 | 18 | # --------------------- 参数 ------------------------------------------------- 19 | # 读取 window_size 20 | window_size = int(list(np.genfromtxt("./2_Window.txt",delimiter = ","))[1]) 21 | 22 | # medianSpan 必须为奇数,因为滤算子波需要奇数长度 23 | if window_size % 2 == 0: 24 | window_size = window_size + 1 25 | 26 | medianSpan = int(window_size * 2 / 3) 27 | 28 | # medianSpan 必须为奇数,因为滤算子波需要奇数长度 29 | if medianSpan % 2 == 0: 30 | medianSpan = medianSpan + 1 31 | 32 | 33 | # --------------------- 5.1 cutOff ------------------------------------------------- 34 | # 读取 4_potential_ground 的所有点 XYH 和 along-track_dist 35 | top = np.genfromtxt("./15_potential_ground.txt",delimiter = ",") 36 | len_top = len(top) 37 | 38 | print('-------- Finding top points -----------------------') 39 | print("len_top",len_top) 40 | print("medianSpan",medianSpan) 41 | 42 | # 进行 3 次 中值滤波 + 均值滤波 43 | original_begin_index = 0 44 | for i in range(3): 45 | print(i+1,"轮") 46 | # new_ground = extract_dist_ph(ground) 47 | # cutOff = medianfilter(ground), medianSpan 48 | cutOff_1 = median_filter_with_empty_7col(top,medianSpan) 49 | original_begin_index = original_begin_index + int((medianSpan - 1)/2) 50 | print("> len(cutOff_1)", len(cutOff_1)) 51 | print(">> original_begin_index",original_begin_index) 52 | # cutOff = smoothfilter(cutOff), Window 53 | cutOff_2 = average_filter_with_empty_7col(cutOff_1 , window_size) 54 | original_begin_index = original_begin_index + int((window_size - 1)/2) 55 | print("> len(cutOff_2)", len(cutOff_2)) 56 | print(">> original_begin_index",original_begin_index,'\n') 57 | del cutOff_1 58 | top = compare_2_layers_7col(top, cutOff_2, 1)[0] 59 | del cutOff_2 60 | 61 | 62 | print("结束 initially ground photons 提取") 63 | print("len_ground",len(top),'\n') 64 | print('-------- Finding lowerbound -----------------------') 65 | 66 | 67 | # 存储 68 | with open('./16_top.txt','w') as out: 69 | csv_out=csv.writer(out) 70 | # csv_out.writerow(['X','Y']) 71 | for row in top: 72 | csv_out.writerow(row) 73 | 74 | # ------------------------ 5.2 lowerbound ---------------------------------- 75 | # 函数输入格式 76 | new_top = top 77 | original_begin_index = 0 78 | # Median 79 | upperbound = median_filter_with_empty_7col(new_top, medianSpan) 80 | original_begin_index = original_begin_index + int((medianSpan - 1)/2) 81 | print(i,"> len(upperbound)", len(upperbound)) 82 | print(">> original_begin_index",original_begin_index) 83 | 84 | # Smooth 85 | upperbound_final = average_filter_with_offset_7col(upperbound, window_size, 1) 86 | print(i,"> len(upperbound)", len(upperbound_final)) 87 | print(">> original_begin_index",original_begin_index) 88 | 89 | # 存储 90 | with open('./16_upperbound.txt','w') as out: 91 | csv_out=csv.writer(out) 92 | # csv_out.writerow(['X','Y']) 93 | for row in upperbound_final: 94 | csv_out.writerow(row) -------------------------------------------------------------------------------- /output/1/20_initial_ground_estimate_potential_ground_points.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon May 23 12:22:52 2022 4 | 5 | @author: longj 6 | """ 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | import csv 10 | from interp_function import interp_linear_dist_ph,save_df 11 | from compare_height_function import compare_2_layers_7col_ 12 | import matplotlib 13 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 14 | matplotlib.rcParams['axes.unicode_minus'] =False 15 | 16 | 17 | ''' 18 | # ---------------- linear 插值 ------------------- 19 | ''' 20 | # 读取 within_150m_signal 的所有点 XYH 和 along-track_dist 21 | lowerbound = np.genfromtxt("./14_lowerbound.txt",delimiter = ",") 22 | along_dist = [] 23 | lb_Z = [] 24 | 25 | for i in range(len(lowerbound)): 26 | along_dist.append(lowerbound[i][3]) 27 | lb_Z.append(lowerbound[i][2]) 28 | 29 | 30 | dist_lowerbound = int(along_dist[-1] - along_dist[0]) 31 | interp_surface = interp_linear_dist_ph(dist_lowerbound, lb_Z, along_dist) 32 | 33 | 34 | # 存储 35 | save_df(interp_surface,'./15_linear_interp_surface.txt') 36 | 37 | ''' 38 | --------------- 提取 linear interp surface 上方 photons ---------- 39 | ''' 40 | surface = np.genfromtxt("./15_linear_interp_surface.txt",delimiter = ",") 41 | #np.genfromtxt('',delimiter = ",") 42 | signal = np.genfromtxt("./14_ground.txt",delimiter = ",") 43 | #potential_ground = compare_2_layers_2col(detrended, interp_surface, 0)[1] 44 | surface_0 = surface[0][0] 45 | surface__1 = surface[-1][0] 46 | #print(surface_0,"********************") 47 | 48 | for i in range(len(signal)): 49 | #print(i,slice_signal[i][0],surface_0) 50 | if signal[i][3]>surface_0: 51 | print(">> ",i,signal[i][3],surface_0) 52 | s_overlay_begin_idx = i 53 | break 54 | 55 | for i in range(1,len(signal)): 56 | if signal[-i][3]> ",len(signal)-i,signal[-i][3],surface__1) 58 | #print(len(signal)-i+1,signal[-i+1][0],surface__1) 59 | s_overlay_end_idx = len(signal)- i 60 | break 61 | 62 | signal = signal[s_overlay_begin_idx:s_overlay_end_idx] 63 | """ 64 | dist= [] 65 | height = [] 66 | for i in range(len(signal)): 67 | dist.append(signal[i][0]) 68 | height.append(signal[i][1]) 69 | 70 | plt.plot(dist,height,'.') 71 | 72 | 73 | """ 74 | result = compare_2_layers_7col_(signal,surface,0) 75 | lower_set = result[1] 76 | # upper_set = result[1] 77 | 78 | plt.xlabel('沿轨距离 (m)') 79 | plt.ylabel('高程 (m)') 80 | 81 | dist= [] 82 | height = [] 83 | for i in range(len(lower_set)): 84 | dist.append(lower_set[i][3]) 85 | height.append(lower_set[i][2]) 86 | 87 | plt.plot(dist,height,'.',label = 'De-trended 信号光子') 88 | 89 | surface_x= [] 90 | surface_y = [] 91 | for i in range(len(surface)): 92 | surface_x.append(surface[i][0]) 93 | surface_y.append(surface[i][1]) 94 | plt.plot(surface_x,surface_y,'r',label = 'lowerbound 线性插值平面') 95 | 96 | 97 | # 存储 98 | with open('./15_potential_ground.txt','w') as out: 99 | csv_out=csv.writer(out) 100 | # csv_out.writerow(['X','Y']) 101 | for row in lower_set: 102 | csv_out.writerow(row) 103 | ''' 104 | # 存储 105 | with open('./output/result_upper_set.txt','w') as out: 106 | csv_out=csv.writer(out) 107 | # csv_out.writerow(['X','Y']) 108 | for row in upper_set: 109 | csv_out.writerow(row) 110 | ''' 111 | -------------------------------------------------------------------------------- /output/1/0_view_24.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu May 12 09:37:42 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | ''' 9 | ************** 查看 De-trend 平面 *************** 10 | 11 | ''' 12 | import numpy as np 13 | import matplotlib.pyplot as plt 14 | from pylab import * 15 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 16 | matplotlib.rcParams['axes.unicode_minus'] =False 17 | ''' 18 | ************************************************************ 19 | ''' 20 | 21 | # 读取 De-trend_p 22 | Asmooth_1 = np.genfromtxt("./19_ground_points.txt",delimiter = ",") 23 | len_Asmooth_1 = len(Asmooth_1) 24 | 25 | m_height = np.zeros(len_Asmooth_1) 26 | m_dist_along = np.zeros(len_Asmooth_1) 27 | m_ph = np.zeros(len_Asmooth_1) 28 | 29 | for i in range(len_Asmooth_1): 30 | m_height[i] = Asmooth_1[i][2] 31 | m_dist_along[i] = Asmooth_1[i][3]# - Asmooth_2[0][3] 32 | m_ph[i] = Asmooth_1[i][6] 33 | 34 | ''' 35 | ************************************************************ 36 | ''' 37 | 38 | # 读取 De-trend_p 39 | Asmooth_2 = np.genfromtxt("./17_ground.txt",delimiter = ",") 40 | len_Asmooth_2 = len(Asmooth_2) 41 | 42 | d_height = np.zeros(len_Asmooth_2) 43 | d_dist_along = np.zeros(len_Asmooth_2) 44 | 45 | for i in range(len_Asmooth_2): 46 | d_height[i] = Asmooth_2[i][2] 47 | d_dist_along[i] = Asmooth_2[i][3]# - Asmooth_2[0][3] 48 | 49 | 50 | ''' 51 | ************************************************************ 52 | ''' 53 | 54 | signal_ = np.genfromtxt("./13_detrended.txt",delimiter = ",") 55 | len_signal_ = len(signal_) 56 | 57 | f_height = np.zeros(len_signal_) 58 | f_dist_along = np.zeros(len_signal_) 59 | 60 | for i in range(len_signal_): 61 | f_height[i] = signal_[i][2] 62 | f_dist_along[i] = signal_[i][3]# - signal[0][3] 63 | 64 | ''' 65 | 66 | ************************************************************ 67 | ''' 68 | signal = np.genfromtxt("./8_within_150m_signal.txt",delimiter = ",") 69 | len_signal = len(signal_) 70 | 71 | s_height = np.zeros(len_signal) 72 | s_dist_along = np.zeros(len_signal) 73 | 74 | for i in range(len_signal): 75 | s_height[i] = signal[i][2] 76 | s_dist_along[i] = signal[i][3]# - signal[0][3] 77 | 78 | ''' 79 | 80 | ************************************************************ 81 | ''' 82 | signal1 = np.genfromtxt("./20200830_gt1l_coord_attribute.txt",delimiter = ",") 83 | len_signal1 = len(signal1) 84 | 85 | n_height = np.zeros(len_signal1) 86 | n_dist_along = np.zeros(len_signal1) 87 | 88 | for i in range(len_signal1): 89 | n_height[i] = signal1[i][2] 90 | n_dist_along[i] = signal1[i][3]# - signal[0][3] 91 | 92 | ''' 93 | 94 | ************************************************************ 95 | ''' 96 | #用subplot()方法绘制多幅图形 97 | #plt.figure(figsize=(6,10),dpi=80) 98 | plt.figure(figsize=(18,14),dpi=80) 99 | plt.figure(1) 100 | ax1 = plt.subplot(311) 101 | plt.title("title") 102 | plt.xlabel("沿轨道距离 (m)") 103 | plt.ylabel("高程 (m)") 104 | plt.scatter(n_dist_along, n_height, color = 'silver',marker = '.', label='所有光子') 105 | plt.scatter(s_dist_along, s_height, color = 'blue',marker = '.', label='信号光子') 106 | plt.plot(m_dist_along, m_ph, 'r.', label='最终地表面点') 107 | plt.legend() 108 | 109 | ax1 = plt.subplot(312) 110 | plt.title("title") 111 | plt.xlabel("沿轨道距离 (m)") 112 | plt.ylabel("高程 (m)") 113 | plt.scatter(f_dist_along, f_height, color = 'blue',marker = '.', label='De-trended 信号光子') 114 | #plt.plot(d_dist_along, d_height, 'b.', label='地面点') 115 | plt.plot(m_dist_along, m_height, 'r.', label='最终地表面点') 116 | plt.legend() 117 | 118 | 119 | plt.savefig('最终地表面点.png') -------------------------------------------------------------------------------- /output/2_open3d_k_20_histogram_x为t.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Apr 15 19:19:31 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import os 9 | import open3d as o3d 10 | import pandas as pd 11 | import numpy as np 12 | import sys 13 | import csv 14 | import math 15 | from mkdir_function import mkdir 16 | import shutil 17 | ''' 18 | ************* kd tree ************* 19 | kd-树是 k-dimention tree的缩写,是对数据点在k维空间中划分的一种数据结构, 20 | 主要应用于多维空间关键数据的搜索(如范围搜索和最近邻搜索) 21 | 22 | kd tree 是一种空间划分树,把整个空间划分为特定的几个部分,然后在特定空间的部分内 23 | 进行相关搜索操作。 24 | ''' 25 | 26 | # ----------------------------------------------------------- 27 | # *********************** function **************************** 28 | # ----------------------------------------------------------- 29 | 30 | 31 | 32 | ''' 33 | ************* k近邻搜索 *************** 34 | Search_knn_vector_3d,返回查询点的k个最近邻索引列表,这些相邻的点存储在一个数组中。 35 | ''' 36 | 37 | ''' 38 | ************ 计算搜索半径 r ************ 39 | 公式 P / Ntotal = V / Vtotal 40 | 其中 V = π * r^^2 41 | Vtotal = 1 42 | ''' 43 | def calculat_search_r(pcd_vector, P = 20, Ntotal = 1): 44 | r = math.sqrt(P / Ntotal / 3.1415926) 45 | return r 46 | 47 | ''' 48 | ************* 半径邻域搜索 ************* 49 | Search_radius_vector_3d,查询所有和查询点距离小于给定半径的点。 50 | ''' 51 | def kd_search_radius(pcd_vector, P = 20, folder_name='default'): 52 | pcd_tree = o3d.geometry.KDTreeFlann(pcd_vector) 53 | id_ = list(range(len(pcd_vector.points))) 54 | Ntotal = len(pcd_vector.points) 55 | r = calculat_search_r(pcd_vector, P, Ntotal) 56 | num_in_r = [] 57 | for i in range(Ntotal): 58 | #建立半径搜索 59 | [k2, idx2, _] = pcd_tree.search_radius_vector_3d(pcd_vector.points[i],r) 60 | num_in_r.append([k2, idx2, _][0]) 61 | statistic_occurence(num_in_r,folder_name) 62 | 63 | # 输出 r(P=20)_pnum.txt 。第一列为 Id_,第二列为 pnum。 64 | dict_ = {'point_ID':id_, 'num_in_r':num_in_r} 65 | df = pd.DataFrame(dict_) 66 | df['num_in_r'].hist(bins=100) 67 | mkdir('./'+ folder_name) 68 | df.to_csv('./'+ folder_name +'/id_r.txt',index=False,header=False) 69 | return 70 | 71 | ''' 72 | ************* 记r(P=20)对应的频数 ************* 73 | 使用 dict() 74 | ''' 75 | def statistic_occurence(num_in_r,folder_name='default'): 76 | 77 | se = pd.Series(num_in_r) 78 | countDict = dict(se.value_counts()) 79 | # print(countDict) 80 | 81 | # 输出 r(P=20)_poccurence.txt 。第一列为 r(P=20),第二列为 pnum的Occurence。 82 | mkdir('./'+ folder_name) 83 | with open('./'+ folder_name +'/r_poccurence.txt',"w") as f: 84 | writer=csv.writer(f) 85 | for key,value in countDict.items(): 86 | writer.writerow([key,value]) 87 | 88 | return countDict 89 | 90 | # ----------------------------------------------------------- 91 | # *********************** main() **************************** 92 | # ----------------------------------------------------------- 93 | 94 | ''' 95 | ************* 读取txt ************* 96 | Open3D不能直接读取txt点云,可以通过numpy读取点坐标(分隔符为","), 97 | 再转成三维向量进行可视化。 98 | ''' 99 | 100 | # txt数据读取 101 | path = './' 102 | dirs = os.listdir(path) 103 | i = 1 104 | for dir in dirs: 105 | if dir[-12:] == "d_time_h.txt": 106 | # 挨个读取 bin 数据 107 | pcd = np.genfromtxt(dir,delimiter = ",") 108 | 109 | # 初始化open3d vector 110 | pcd_vector = o3d.geometry.PointCloud() 111 | 112 | # 加载点坐标 113 | pcd_vector.points = o3d.utility.Vector3dVector(pcd[:, : 3]) 114 | 115 | # 输出点云个数 116 | #print(len(pcd_vector.points)) 117 | 118 | # KD树邻域搜索 P= 20 119 | # kd_search_knn(pcd_vector, 20) 120 | 121 | # KD树半径搜索 r(P = 20) 122 | kd_search_radius(pcd_vector, 5,str(i)) 123 | for dir_ in dirs: 124 | if dir_[0:12] == dir[0:12]: 125 | shutil.move(path + dir_, path + '/' + str(i)+'/'+dir_) 126 | #shutil.move(path + dir, path + '/' + str(i)+'/'+dir) 127 | i = i + 1 128 | -------------------------------------------------------------------------------- /output/1/19_initial_ground_estimate_cutOff_lowerbound.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue May 3 15:27:03 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import csv 10 | import matplotlib.pyplot as plt 11 | from filter_function import median_filter_with_empty_7col ,average_filter_with_empty_7col,average_filter_with_offset_7col 12 | from compare_height_function import compare_2_layers_7col 13 | ''' 14 | def extract_dist_ph(signal): 15 | dist_ph = [] 16 | for i in range(len(signal)): 17 | dist_ph.append((signal[i][3],signal[i][2])) 18 | return dist_ph 19 | ''' 20 | # --------------------- 参数 ------------------------------------------------- 21 | # 读取 window_size 22 | window_size = int(list(np.genfromtxt("./2_Window.txt",delimiter = ","))[1]) 23 | 24 | # medianSpan 必须为奇数,因为滤算子波需要奇数长度 25 | if window_size % 2 == 0: 26 | window_size = window_size + 1 27 | 28 | medianSpan = int(window_size * 2 / 3) 29 | 30 | # medianSpan 必须为奇数,因为滤算子波需要奇数长度 31 | if medianSpan % 2 == 0: 32 | medianSpan = medianSpan + 1 33 | 34 | # ---------------------- 4.1 cutOff ---------------------------------------- 35 | # 读取 within_150m_signal 的所有点 XYH 和 along-track_dist 36 | ground = np.genfromtxt("./13_detrended.txt",delimiter = ",") 37 | len_ground = len(ground) 38 | 39 | dz = [] 40 | distan = [] 41 | for i in range(len(ground)): 42 | dz.append(ground[i][2]) 43 | distan.append(ground[i][3]) 44 | 45 | 46 | print('-------- Finding ground points -----------------------') 47 | print("len_ground",len_ground) 48 | print("medianSpan",medianSpan) 49 | 50 | # 进行 5 次 中值滤波 + 均值滤波 51 | original_begin_index = 0 52 | for i in range(5): 53 | print(i+1,"轮") 54 | # new_ground = extract_dist_ph(ground) 55 | # cutOff = medianfilter(ground), medianSpan 56 | cutOff_1 = median_filter_with_empty_7col(ground,medianSpan) 57 | original_begin_index = original_begin_index + int((medianSpan - 1)/2) 58 | print("> len(cutOff_1)", len(cutOff_1)) 59 | print(">> original_begin_index",original_begin_index) 60 | # cutOff = smoothfilter(cutOff), Window 61 | cutOff_2 = average_filter_with_empty_7col(cutOff_1 , window_size) 62 | original_begin_index = original_begin_index + int((window_size - 1)/2) 63 | print("> len(cutOff_2)", len(cutOff_1)) 64 | print(">> original_begin_index",original_begin_index,'\n') 65 | del cutOff_1 66 | 67 | ground = compare_2_layers_7col(ground, cutOff_2, 1)[0] 68 | del cutOff_2 69 | 70 | 71 | print("结束 initially ground photons 提取") 72 | print("len_ground",len(ground),'\n') 73 | 74 | print('-------- Finding lowerbound -----------------------') 75 | 76 | # 存储 77 | with open('./14_ground.txt','w') as out: 78 | csv_out=csv.writer(out) 79 | # csv_out.writerow(['X','Y']) 80 | for row in ground: 81 | csv_out.writerow(row) 82 | 83 | # ------------------------ 4.2 lowerbound ---------------------------------- 84 | # 函数输入格式 85 | new_ground = ground 86 | original_begin_index = 0 87 | # Median 88 | lowerbound = median_filter_with_empty_7col(new_ground, medianSpan * 3) 89 | original_begin_index = original_begin_index + int((medianSpan * 3 - 1)/2) 90 | print(i,"> len(lowerbound)", len(lowerbound)) 91 | print(">> original_begin_index",original_begin_index) 92 | # Smooth 93 | middlebound = average_filter_with_empty_7col(lowerbound , window_size) 94 | original_begin_index = original_begin_index + int((window_size - 1)/2) 95 | print(i,"> len(middlebound)", len(middlebound)) 96 | print(">> original_begin_index",original_begin_index) 97 | # Smooth 98 | lowerbound_final = average_filter_with_offset_7col(lowerbound, window_size, -4) 99 | print(i,"> len(lowerbound)", len(lowerbound_final)) 100 | print(">> original_begin_index",original_begin_index) 101 | 102 | # 存储 103 | with open('./14_lowerbound.txt','w') as out: 104 | csv_out=csv.writer(out) 105 | # csv_out.writerow(['X','Y']) 106 | for row in lowerbound_final: 107 | csv_out.writerow(row) 108 | 109 | ''' 110 | a = [] 111 | b = [] 112 | for i in range(len(cutOff_2)): 113 | a.append(cutOff_2[i][2]) 114 | b.append(cutOff_2[i][3]) 115 | plt.plot(distan,dz,'b.',label='detrended points') 116 | plt.plot(b,a,'r.',label='cutOff_2') 117 | ''' 118 | -------------------------------------------------------------------------------- /output/1/8_De_trend_ref_DEM_limit.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue May 10 00:22:52 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | ''' 9 | ************ 3.4 定义一个 reference DEM limit ************ 10 | # ref_dem_limit 11 | # 使用 SRTM 90 m DEM, TBD = 120 m 12 | ''' 13 | import sys 14 | import numpy as np 15 | import csv 16 | from osgeo import gdal 17 | from gdalconst import * 18 | 19 | ''' 20 | ****** 提取在 DEM 上下 120 m 内的点 ******* 21 | ''' 22 | # 函数功能:elevation 最邻近插值 23 | def nearest_interp_elev(elev): 24 | new_elev = [] 25 | for i in range(len(elev)): 26 | if elev[i] != -100: 27 | new_elev.append(elev[i]) 28 | else: 29 | j_left = 0 30 | j_right = 0 31 | for j in range(1000): 32 | if elev[i-j_left] == -100: 33 | j_left = j_left + 1 34 | else: 35 | break 36 | if i len(elev)-1: 46 | j_right = -100 47 | break 48 | if j_left != -100 and j_right != -100: 49 | new_elev.append((elev[i-j_left]+elev[i+j_right])/2) 50 | elif j_left == -100 and j_right != -100: 51 | #print(i,j_right,len(new_elev)) 52 | new_elev.append(elev[i+j_right]) 53 | elif j_right == -100 and j_left != -100: 54 | #print(i,j_left,len(new_elev)) 55 | new_elev.append(elev[i-j_left]) 56 | 57 | if i % 1000 == 0: 58 | print(">",end="") 59 | 60 | return new_elev 61 | 62 | 63 | # 函数功能:point overlay raster, get elevation 64 | def in_DEM_threshold(tif_path, xy_ls): 65 | #获取注册类 66 | gdal.AllRegister() 67 | 68 | #打开栅格数据 69 | ds = gdal.Open(tif_path)#, GA_ReadOnly) 70 | if ds is None: 71 | print('Could not open image') 72 | sys.exit(1) 73 | 74 | #获取仿射变换信息 75 | transform = ds.GetGeoTransform() 76 | xOrigin = transform[0] 77 | yOrigin = transform[3] 78 | pixelWidth = transform[1] 79 | pixelHeight = transform[5] 80 | # 提取(X, Y)处的 pixel_value, 即 DEM 高程 81 | #DEM_height = [] 82 | #print("xOrigin,xOrigin,pixelWidth,pixelHeight",xOrigin,yOrigin,pixelWidth,pixelHeight) 83 | #print("range(len(xy_ls)),xy_ls[0][0:2],xy_ls[len(xy_ls)-1][0:2]") 84 | #print(range(len(xy_ls)),xy_ls[0][0:2],xy_ls[len(xy_ls)-1][0:2]) 85 | elev = [] 86 | for i in range(len(xy_ls)): 87 | x = xy_ls[i][2] 88 | y = xy_ls[i][3] 89 | if x != -100: 90 | # 获取 (X, Y) 所在的栅格的位置 91 | xOffset = int((x-xOrigin)/pixelWidth) 92 | yOffset = int((y-yOrigin)/pixelHeight) 93 | # 提取 pixel_velue 94 | data = ds.GetRasterBand(1).ReadAsArray(xOffset, yOffset,1,1) 95 | pixel_value = data[0,0] 96 | elev.append(pixel_value) 97 | ''' 98 | if abs(xy_ls[i][2] - pixel_value)<120: 99 | pInThreshold.append(xy_ls[i]) 100 | ''' 101 | else: 102 | elev.append(-100) 103 | print("已完成从 DEM 的 elevation 提取") 104 | interped_elev = nearest_interp_elev(elev) 105 | print("已完成 elevation 最邻近插值") 106 | pInThreshold = [] 107 | for i in range(len(interped_elev)): 108 | if abs(xy_ls[i][1] - interped_elev[i])<120: 109 | pInThreshold.append(xy_ls[i]) 110 | 111 | return pInThreshold 112 | 113 | # 读取 0_Asmooth 的所有点 X,Y,h 和 along-track_dist 114 | xy_ls = list(np.genfromtxt("./3_Asmooth_0.txt",delimiter = ",")) 115 | 116 | # 获取 DEM/srtm_61_04_4550 117 | pInThreshold = in_DEM_threshold("../srtm_61_04_4550.tif", xy_ls) 118 | print("xy_ls 长度:",len(xy_ls)) 119 | print("pInTreshold 长度:",len(pInThreshold)) 120 | 121 | # 存储 pInThreshold 122 | with open('./4_Asmooth_1.txt','w') as out: 123 | csv_out=csv.writer(out) 124 | # csv_out.writerow(['X','Y']) 125 | for row in pInThreshold: 126 | csv_out.writerow(row) 127 | 128 | -------------------------------------------------------------------------------- /output/1/3_hist_Gauss_fit.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon May 2 09:39:02 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import matplotlib.pyplot as plt 10 | from scipy.optimize import curve_fit 11 | import matplotlib 12 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 13 | matplotlib.rcParams['axes.unicode_minus'] =False 14 | 15 | ''' 16 | *********** r(P=20)_poccurence 散点图的高斯拟合 ******************** 17 | *********** 定义 DRAGANN 的高斯拟合函数(自定义函数) *************** 18 | ''' 19 | def Gauss(x, Parameter1_amp, Parameter2_cent): 20 | # Parameter1_amp 为 拟合出的高斯曲线的 peak 强度(高度) 21 | # Parameter2_cent 为 拟合出的高斯曲线的 peak 中线(平均值μ) 22 | # Parameter3_std 为 Xdata list 的 Std 23 | Parameter3_std =np.std(x) 24 | y = Parameter1_amp * np.exp((-1 * ((x - Parameter2_cent)**2)) / 2 / Parameter3_std**2) 25 | return y 26 | 27 | ''' 28 | ********************** 平移纵坐标 ********************** 29 | ''' 30 | def substract_move(x): 31 | return x - 800 32 | ''' 33 | ********************** 求 y1、y2 交点 ********************** 34 | ''' 35 | def y1(x,arry): 36 | y1 = arry[0] * np.exp((-1 * ((x - arry[1])**2)) / 2 / arry[2]**2) 37 | return y1 38 | 39 | def y2(x,arry): 40 | y2 = arry[3] * np.exp((-1 * ((x - arry[4])**2)) / 2 / arry[5]**2) 41 | return y2 42 | 43 | def Y1Y2_intersect(arry_len,arry): 44 | x = list(range(0, arry_len, 1)) 45 | for i in x: 46 | if abs(y1(i,arry) - y2(i,arry)) < 0.01: 47 | break 48 | return i 49 | 50 | ''' 51 | *********** 读取数据,并排序 *********************** 52 | ''' 53 | id_pnum = np.genfromtxt("./r_poccurence.txt",delimiter = ",") 54 | arry_len = len(id_pnum) 55 | 56 | tuple_id_pnum = [tuple(x) for x in id_pnum.tolist()] 57 | 58 | # 在进行高斯曲线拟合前需对 Xdata & 对应的 Ydata 进行排序, 59 | # 否则会生成非常奇怪的拟合曲线 60 | sort_id_pnum = sorted(tuple_id_pnum) 61 | 62 | 63 | pnum_in_r = np.zeros(arry_len) 64 | occurence = np.zeros(arry_len) 65 | for i in range(arry_len): 66 | pnum_in_r[i] = sort_id_pnum[i][0] 67 | occurence[i] = sort_id_pnum[i][1] 68 | 69 | ''' 70 | *********** 手动分段,进行高斯拟合 *************** 71 | ------------ Noise Peak 拟合 ---------------- 72 | ''' 73 | # 输出参数 74 | arry = np.zeros(7) 75 | 76 | fig=plt.figure(figsize=(10,6)) 77 | ax=fig.add_axes([0.1,0.2,0.8,0.7]) 78 | # 点统计图绘制 79 | #plt.xlabel("P = 20 时的搜索半径") 80 | plt.ylabel("搜索半径统计频次 (次)") 81 | plt.ylim(-20, 250) 82 | plt.plot(pnum_in_r, occurence, 'c.', label='ATL03 光子') 83 | 84 | # 分段 85 | xdata1 = pnum_in_r[450:770] 86 | ydata1 = occurence[450:770] 87 | plt.plot(xdata1, ydata1, 'b.', label='Noise 峰') 88 | 89 | # 对散点拟合曲线,获得参数fit_A,fit_B 90 | parameters, covariance = curve_fit(Gauss, xdata1, ydata1) 91 | 92 | fit_A1 = parameters[0] 93 | fit_B1 = parameters[1] 94 | arry[0] = round(fit_A1,2) 95 | arry[1] = round(fit_B1,2) 96 | arry[2] = round(np.std(xdata1),2) 97 | 98 | # 计算出参数后,画拟合曲线 99 | fit_y1 = Gauss(xdata1, fit_A1, fit_B1) 100 | # plt.plot(xdata1, fit_y1, 'r--', label='NoisePeak_fit') 101 | plt.plot(xdata1, fit_y1, 'r--', label='Noise 峰拟合') 102 | plt.legend() 103 | 104 | # 插入公式 text 105 | plt.text(300, 200,r"occurences = $ae^{\frac{-(x-b)^2}{2c^2}}$",fontsize=16) 106 | #plt.text(0, 200, str(round(fit_A1,2)) + r'$\Gamma(z) = \int_0^\infty t^{z-1}e^{-t}dt\,.$', fontsize=10) 107 | ''' 108 | -------------- Signal Peak 拟合 -------------- 109 | ''' 110 | 111 | # 分段 112 | xdata2 = pnum_in_r[800:950] 113 | xdata2_move = list(map(substract_move, pnum_in_r[800:950])) 114 | ydata2 = occurence[800:950] 115 | plt.plot(xdata2, ydata2, 'g.', label='Signal峰') 116 | 117 | # 对散点拟合曲线,获得参数fit_A,fit_B 118 | parameters, covariance = curve_fit(Gauss, xdata2_move, ydata2) 119 | 120 | fit_A2 = parameters[0] 121 | fit_B2 = parameters[1] 122 | arry[3] = round(fit_A2,2) 123 | arry[4] = round(fit_B2,2) 124 | arry[5] = round(np.std(xdata2),2) 125 | 126 | # 计算出参数后,画拟合曲线 127 | fit_y2 = Gauss(xdata2_move, fit_A2, fit_B2) 128 | plt.plot(xdata2, fit_y2, 'r-', label='Signal 峰拟合') 129 | plt.legend() 130 | 131 | threshold = Y1Y2_intersect(arry_len,arry) 132 | arry[6] = (threshold) 133 | # 插入公式 text 134 | plt.xlabel("搜索邻域中的光子数量 (个)"+'\n\n其中 Noise 峰拟合系数 a ='+ str(round(fit_A1,2)) + " b =" + str(round(fit_B1,2)) + " c =" + str(round(np.std(xdata1),2))+'\n其中最高 Signal 峰拟合系数 a ='+ str(round(fit_A2,2)) + " b =" + str(round(fit_B2,2)) + " c =" + str(round(np.std(xdata2),2))) 135 | 136 | plt.savefig('Gauss_fit.png') 137 | np.savetxt('parameters.txt', arry) 138 | -------------------------------------------------------------------------------- /output/1/23_initial_ground_estimate_refine_ground.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon May 23 19:26:10 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | 9 | import numpy as np 10 | import matplotlib.pyplot as plt 11 | import csv 12 | from filter_function import median_filter_with_empty_7col ,average_filter_with_empty_7col, average_filter_with_offset_7col 13 | from compare_height_function import compare_2_layers_7col 14 | import matplotlib 15 | matplotlib.rcParams['font.sans-serif'] = ['SimHei'] 16 | matplotlib.rcParams['axes.unicode_minus'] =False 17 | import scipy.signal 18 | 19 | 20 | # --------------------- 参数 ------------------------------------------------- 21 | # 读取 window_size 22 | window_size = int(list(np.genfromtxt("./2_Window.txt",delimiter = ","))[1]) 23 | 24 | # medianSpan 必须为奇数,因为滤算子波需要奇数长度 25 | if window_size % 2 == 0: 26 | window_size = window_size + 1 27 | 28 | medianSpan = int(window_size * 2 / 3) 29 | 30 | # medianSpan 必须为奇数,因为滤算子波需要奇数长度 31 | if medianSpan % 2 == 0: 32 | medianSpan = medianSpan + 1 33 | 34 | # --------------------- 5.1 cutOff ------------------------------------------------- 35 | # 读取 4_potential_ground 的所有点 XYH 和 along-track_dist 36 | ground = np.genfromtxt("./17_ground.txt",delimiter = ",") 37 | len_ground = len(ground) 38 | 39 | print('-------- Refine ground points -----------------------') 40 | print("len_ground",len_ground) 41 | print("medianSpan",medianSpan) 42 | 43 | # 进行 2 次 中值滤波 + 均值滤波 44 | original_begin_index = 0 45 | for i in range(2): 46 | print(i+1,"轮") 47 | # new_ground = extract_dist_ph(ground) 48 | # cutOff = medianfilter(ground), medianSpan 49 | cutOff_1 = median_filter_with_empty_7col(ground,medianSpan) 50 | original_begin_index = original_begin_index + int((medianSpan - 1)/2) 51 | print("> len(cutOff_1)", len(cutOff_1)) 52 | print(">> original_begin_index",original_begin_index) 53 | # cutOff = smoothfilter(cutOff), Window 54 | cutOff_2 = average_filter_with_empty_7col(cutOff_1 , window_size) 55 | original_begin_index = original_begin_index + int((window_size - 1)/2) 56 | print("> len(cutOff_2)", len(cutOff_2)) 57 | print(">> original_begin_index",original_begin_index,'\n') 58 | del cutOff_1 59 | ground = compare_2_layers_7col(ground, cutOff_2, 1)[0] 60 | del cutOff_2 61 | 62 | print("结束 initially ground photons 提取") 63 | print("len_ground",len(ground),'\n') 64 | 65 | # 存储 66 | with open('./18_refined_ground.txt','w') as out: 67 | csv_out=csv.writer(out) 68 | # csv_out.writerow(['X','Y']) 69 | for row in ground: 70 | csv_out.writerow(row) 71 | print('-------- Finished Refine -----------------------\n\n') 72 | 73 | ground = np.genfromtxt("./18_refined_ground.txt",delimiter = ",") 74 | def Savizky_Golay_smooth(original_surface,SmoothSize): 75 | print("len(original_surface)",len(original_surface)) 76 | height = [] 77 | along_dist = [] 78 | 79 | for idx in range(len(original_surface)): 80 | height.append(original_surface[idx][2]) 81 | along_dist.append(original_surface[idx][3]) 82 | 83 | print("> len(along_dist)",len(along_dist)) 84 | print("> len(height)",len(height)) 85 | 86 | new_height = scipy.signal.savgol_filter(height,window_length=SmoothSize,polyorder=3) 87 | 88 | smoothed_surface = [] 89 | for i in range(len(new_height)): 90 | smoothed_surface.append((along_dist[i],new_height[i])) 91 | 92 | print(">> len(smoothed_surface)",len(smoothed_surface)) 93 | 94 | return smoothed_surface 95 | 96 | print('-------- Savizky-Golay Smooth -----------------------') 97 | # 函数输入格式 98 | refined_ground = ground 99 | original_begin_index = 0 100 | # Median 101 | refined_ground = median_filter_with_empty_7col(refined_ground, medianSpan) 102 | original_begin_index = original_begin_index + int((medianSpan - 1)/2) 103 | print("> len(refined_ground)", len(refined_ground)) 104 | print(">> original_begin_index",original_begin_index) 105 | 106 | # Savizky-Golay Smooth 107 | ground_final = Savizky_Golay_smooth(refined_ground, window_size) 108 | print("> len(ground_final)", len(ground_final)) 109 | print(">> original_begin_index",original_begin_index) 110 | 111 | 112 | # print(ground_final[0:100]) 113 | 114 | # 存储 115 | with open('./18_ground_final.txt','w') as out: 116 | csv_out=csv.writer(out) 117 | # csv_out.writerow(['X','Y']) 118 | for row in ground_final: 119 | csv_out.writerow(row) 120 | -------------------------------------------------------------------------------- /output/1/interp_function.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sun May 22 09:53:13 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import numpy as np 9 | import pandas as pd 10 | import scipy 11 | from scipy import interpolate 12 | 13 | 14 | ''' 15 | *******存储******** 16 | ''' 17 | def save_df(coord_interp,path): 18 | zip_coord_interp = list(map(list, zip(*coord_interp))) 19 | name=['along_track_dist','height_interp'] 20 | #数据有两列 21 | df_coord_interp = pd.DataFrame(data = zip_coord_interp,columns = name) 22 | 23 | df_coord_interp.to_csv(path,sep=',', header=False, index=False) 24 | return True 25 | 26 | def save_df_4col(coord_interp,path): 27 | zip_coord_interp = list(map(list, zip(*coord_interp))) 28 | name=['X','Y','along_track_dist','height_interp'] 29 | #数据有四列 30 | df_coord_interp = pd.DataFrame(data = zip_coord_interp,columns = name) 31 | 32 | df_coord_interp.to_csv(path,sep=',', header=False, index=False) 33 | return True 34 | 35 | def interp_2D_XYH(Asmooth_len,SmoothSize,height_original,dist_original): 36 | dist_interp = [] 37 | height_interp = [] 38 | # 将全长拆开为 2 * window_size m 的小段 39 | segment_num = round(Asmooth_len / SmoothSize) 40 | segment_last_len = Asmooth_len % SmoothSize 41 | 42 | 43 | tck = list(range(1,segment_num + 1)) 44 | u = list(range(1,segment_num + 1)) 45 | X_zero = [0 for i in range(len(height_original))] 46 | 47 | try: 48 | if Asmooth_len != SmoothSize: 49 | # 整段部分 50 | for i in range(1,segment_num): 51 | # coord_original 是 XYH 三维数组 52 | coord_original = [X_zero[SmoothSize*(i-1):SmoothSize*i],dist_original[SmoothSize*(i-1):SmoothSize*i],height_original[SmoothSize*(i-1):SmoothSize*i]] 53 | #now we get all the knots and info about the interpolated spline 54 | tck[i], u[i]= interpolate.splprep(coord_original) 55 | #here we generate the new interpolated dataset, 56 | #increase the resolution by increasing the spacing, 500 in this example 57 | new = interpolate.splev(np.linspace(0,1,SmoothSize), tck[i]) 58 | dist_interp.extend(list(new[1])) 59 | height_interp.extend(list(new[2])) 60 | #print("i - len(X_interp)",i,len(X_interp)) 61 | print("已完成第",i,"段,范围",SmoothSize*(i-1),SmoothSize*i) 62 | # 小尾巴部分 63 | 64 | coord_original = [X_zero[Asmooth_len - segment_last_len:Asmooth_len],dist_original[Asmooth_len - segment_last_len:Asmooth_len],height_original[Asmooth_len - segment_last_len:Asmooth_len]] 65 | tck_final, u_final= interpolate.splprep(coord_original) 66 | #here we generate the new interpolated dataset, 67 | #increase the resolution by increasing the spacing, 500 in this example 68 | new = interpolate.splev(np.linspace(0,1,segment_last_len), tck_final) 69 | dist_interp.extend(list(new[1])) 70 | height_interp.extend(list(new[2])) 71 | print("已完成小尾巴部分,范围",Asmooth_len - segment_last_len,Asmooth_len) 72 | 73 | # SmoothSize=len(X_original) 74 | else: 75 | coord_original = [X_zero,dist_original,height_original] 76 | tck_final, u_final= interpolate.splprep(coord_original) 77 | #here we generate the new interpolated dataset, 78 | #increase the resolution by increasing the spacing, 500 in this example 79 | new = interpolate.splev(np.linspace(0,1,Asmooth_len), tck_final) 80 | #print(new) 81 | dist_interp.extend(list(new[1])) 82 | height_interp.extend(list(new[2])) 83 | print("已完成 pchip,范围",'0',Asmooth_len) 84 | except Exception as e: 85 | print('Python message: %s\n' % e) 86 | ''' 87 | print("signal 长度:", len(signal)) 88 | print("X_interp 长度:",len(height_interp)) 89 | print("*****************************") 90 | print("signal 前10个记录:") 91 | print(signal[0:10]) 92 | ''' 93 | print("-----------------------------") 94 | print("X_interp 前10个记录:") 95 | print(height_interp[0:10]) 96 | 97 | coord_interp = [dist_interp,height_interp] 98 | #save_df(coord_interp) 99 | return coord_interp 100 | 101 | 102 | 103 | 104 | def interp_linear_dist_ph(Asmooth_len,height_original,dist_original): 105 | dist_interp = [] 106 | height_interp = [] 107 | #coord_original = [X_zero,dist_original,height_original] 108 | #tck_final, u_final= interpolate.splprep(coord_original) 109 | 110 | f = interpolate.interp1d(x=dist_original,y=height_original,kind='linear') 111 | dist_interp = np.linspace(start=dist_original[0],stop=dist_original[-1],num=Asmooth_len+1) 112 | height_interp = f(dist_interp) 113 | #interpolate.linear(, tck_final) 114 | #dist_interp.extend(list(new[1])) 115 | #height_interp.extend(list(new[2])) 116 | #height_interp = new 117 | print("已完成 linear 插值,范围",'0',Asmooth_len) 118 | 119 | print("-----------------------------") 120 | print("X_interp 前10个记录:") 121 | print(height_interp[0:10]) 122 | coord_interp = [dist_interp,height_interp] 123 | #save_df(coord_interp) 124 | return coord_interp 125 | 126 | 127 | def interp_3D_XYH(Asmooth_len,SmoothSize,X_original,Y_original,height_original,dist_original): 128 | X_interp = [] 129 | Y_interp = [] 130 | height_interp = [] 131 | # 将全长拆开为 2 * window_size m 的小段 132 | segment_num = round(Asmooth_len / SmoothSize) 133 | segment_last_len = Asmooth_len % SmoothSize 134 | 135 | tck = list(range(1,segment_num + 1)) 136 | u = list(range(1,segment_num + 1)) 137 | 138 | if Asmooth_len != SmoothSize: 139 | # 整段部分 140 | for i in range(1,segment_num): 141 | # coord_original 是 XYH 三维数组 142 | coord_original = [list(X_original)[SmoothSize*(i-1):SmoothSize*i],list(Y_original)[SmoothSize*(i-1):SmoothSize*i],list(height_original)[SmoothSize*(i-1):SmoothSize*i]] 143 | #now we get all the knots and info about the interpolated spline 144 | tck[i], u[i]= interpolate.splprep(coord_original) 145 | #here we generate the new interpolated dataset, 146 | #increase the resolution by increasing the spacing, 500 in this example 147 | new = interpolate.splev(np.linspace(0,1,SmoothSize), tck[i]) 148 | X_interp.extend(list(new[0])) 149 | Y_interp.extend(list(new[1])) 150 | height_interp.extend(list(new[2])) 151 | #print("i - len(X_interp)",i,len(X_interp)) 152 | print("已完成第",i,"段,范围",SmoothSize*(i-1),SmoothSize*i) 153 | # 小尾巴部分 154 | 155 | coord_original = [list(X_original)[Asmooth_len - segment_last_len:Asmooth_len],list(Y_original)[Asmooth_len - segment_last_len:Asmooth_len],list(height_original)[Asmooth_len - segment_last_len:Asmooth_len]] 156 | tck_final, u_final= interpolate.splprep(coord_original) 157 | #here we generate the new interpolated dataset, 158 | #increase the resolution by increasing the spacing, 500 in this example 159 | new = interpolate.splev(np.linspace(0,1,segment_last_len), tck_final) 160 | X_interp.extend(list(new[0])) 161 | Y_interp.extend(list(new[1])) 162 | height_interp.extend(list(new[2])) 163 | print("已完成小尾巴部分,范围",Asmooth_len - segment_last_len,Asmooth_len) 164 | 165 | # SmoothSize=len(X_original) 166 | else: 167 | coord_original = [list(X_original),list(Y_original),list(height_original)] 168 | tck_final, u_final= interpolate.splprep(coord_original) 169 | #here we generate the new interpolated dataset, 170 | #increase the resolution by increasing the spacing, 500 in this example 171 | new = interpolate.splev(np.linspace(0,1,Asmooth_len), tck_final) 172 | X_interp.extend(list(new[0])) 173 | Y_interp.extend(list(new[1])) 174 | height_interp.extend(list(new[2])) 175 | print("已完成 pchip,范围",'0',Asmooth_len) 176 | 177 | coord_interp = [X_interp,Y_interp,height_interp,list(dist_original)] 178 | #save_df(coord_interp) 179 | return coord_interp -------------------------------------------------------------------------------- /1_extract_and_norm_t_h_3rdd=0.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Apr 15 19:19:31 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | import h5py 9 | import os 10 | import numpy as np 11 | 12 | min_elem = 0 13 | max_elem = 0 14 | 15 | # 将一个 segement 的点 delta_time 平均分成间隔为 spacing_time 的 list 16 | def av_spacing_t(delta_time_ls): 17 | # 创建一个 list 用于存每个点的新的 spaced 的时间 18 | av_delta_time_ls = [] 19 | 20 | # 计算 average spacing time 21 | point_num = len(delta_time_ls) 22 | begin_deltatime = delta_time_ls[0] 23 | end_deltatime = delta_time_ls[point_num - 1] 24 | spacing_time = (end_deltatime - begin_deltatime) / (point_num - 1) 25 | 26 | # 将新的 spaced 的时间存进新数组 27 | for i in range(0, point_num): 28 | av_delta_time_ls.append(begin_deltatime + spacing_time * i) 29 | return av_delta_time_ls 30 | 31 | # 对每个 element 做 differential 32 | def func(x): 33 | try: 34 | elem_norm = (x - min_elem)/(max_elem - min_elem) 35 | except Exception as e: 36 | print('Python message: %s\n' % e) 37 | return elem_norm 38 | 39 | # 对数组中所有元素做 differential 40 | def differential(ls): 41 | global min_elem 42 | global max_elem 43 | try: 44 | min_elem = min(ls) 45 | max_elem = max(ls) 46 | except Exception as e: 47 | print('Python message: %s\n' % e) 48 | return list(map(func,ls)) 49 | 50 | # 以 latitude 为 bound 裁切出梨树县区域的 ATL03数据 51 | def clipATL03_heights(in_file03, label): 52 | dataOut = [] 53 | with h5py.File(in_file03, 'r') as f: 54 | dsname=''.join([label, "/heights/lat_ph"]) 55 | dataOut = np.array(f[dsname]) 56 | c_edge = [0,0] 57 | # latitude edge 58 | for i in range(0, len(dataOut)): 59 | if dataOut[i] > 43.165 and dataOut[i] < 43.17: 60 | c_edge[0] = i 61 | continue 62 | elif dataOut[i] > 43.785 and dataOut[i] < 43.79: 63 | c_edge[1] = i 64 | continue 65 | c_edge.sort() 66 | print("clip取前后索引",c_edge) 67 | return c_edge 68 | 69 | # 读取 ATL03.h5 文件 70 | def readAtl08H5(in_file03, fieldName, label, c_edge): 71 | # Initialize output 72 | dataOut = [] 73 | dataOut_slice = [] 74 | if not os.path.isfile(in_file03): 75 | print('ATL03 file does not exist') 76 | try: 77 | with h5py.File(in_file03, 'r') as f: 78 | dsname=''.join([label, fieldName]) 79 | if dsname in f: 80 | dataOut = np.array(f[dsname]) 81 | dataOut_slice = dataOut[c_edge[0]:c_edge[1]] 82 | # print("True") 83 | else: 84 | dataOut_slice = [] 85 | print("False") 86 | except Exception as e: 87 | print('Python message: %s\n' % e) 88 | #print(len(dataOut_slice)) 89 | return dataOut_slice 90 | 91 | def readAtl08H5_(in_file03, fieldName, label): 92 | # Initialize output 93 | dataOut = [] 94 | if not os.path.isfile(in_file03): 95 | print('ATL03 file does not exist') 96 | try: 97 | with h5py.File(in_file03, 'r') as f: 98 | dsname=''.join([label, fieldName]) 99 | if dsname in f: 100 | dataOut = np.array(f[dsname]) 101 | # print("True") 102 | else: 103 | dataOut = [] 104 | print("False") 105 | except Exception as e: 106 | print('Python message: %s\n' % e) 107 | #print(len(dataOut_slice)) 108 | return dataOut 109 | 110 | # 读取所有属性信息并输出为1个txt 111 | def out_attribute(file_path): 112 | #label_ls = ["gt1l","gt1r","gt2l","gt2r","gt3l","gt3r"] 113 | label_ls = ["gt1l","gt2l","gt3l"] 114 | date = file_path[6:14] 115 | try: 116 | for i in label_ls: 117 | # 提示信息 118 | print(file_path[6:14],i) 119 | 120 | # latitude 为裁切边界 121 | c_edge = clipATL03_heights(file_path, i) 122 | 123 | # 提取裁切边界内的 ATL03 属性数据:latitude、longitude、height、deltatime 124 | lat_ph = list(readAtl08H5_(file_path, "/heights/lat_ph", i))[c_edge[0]:c_edge[1]] 125 | lon_ph = list(readAtl08H5_(file_path, "/heights/lon_ph", i))[c_edge[0]:c_edge[1]] 126 | h_ph = list(readAtl08H5_(file_path, "/heights/h_ph", i))[c_edge[0]:c_edge[1]] 127 | # GPS elapsed time 128 | delta_time = list(readAtl08H5_(file_path, "/heights/delta_time", i))[c_edge[0]:c_edge[1]] 129 | # photon channel confidence 130 | #arry = readAtl08H5_(file_path, "/heights/signal_conf_ph", i) 131 | signal_conf_ph = list(readAtl08H5_(file_path, "/heights/signal_conf_ph", i))[c_edge[0]:c_edge[1]] 132 | #list(arry)[c_edge[0]:c_edge[1]] 133 | # along-track distance 134 | dist_ph_along = list(readAtl08H5_(file_path, "/heights/dist_ph_along", i)) 135 | # cumulative (ralate to 1st photon in research area) along-track distance 136 | final_along_track_dist = [] 137 | # segment_dist_x 是从这条轨道最开始点到该segment的累计距离,与delta_time相关 138 | # segment_dist_x = list(readAtl08H5_(file_path, "/geolocation/segment_dist_x", i)) 139 | # 给定segment中第一个光子的索引 140 | ph_index_beg = list(readAtl08H5_(file_path, "/geolocation/ph_index_beg", i)) 141 | # 在给定segment中的光子数量 142 | segment_ph_cnt = list(readAtl08H5_(file_path, "/geolocation/segment_ph_cnt", i)) 143 | # 给定segment的长度 144 | segment_length = list(readAtl08H5_(file_path, "/geolocation/segment_length", i)) 145 | ''' 146 | sum_pnum = 0 147 | for m in range(len(segment_ph_cnt)): 148 | sum_pnum = sum_pnum + segment_ph_cnt[m] 149 | print("定segment中的光子数量之和:",sum_pnum) 150 | ''' 151 | # 计算每个光子的累计沿轨长度 152 | k = 0 153 | cu_segment_length = 0 154 | final_along_track_dist = [] 155 | for m in range(len(segment_ph_cnt)): 156 | if(segment_ph_cnt[m]==0): 157 | cu_segment_length = cu_segment_length + segment_length[m] 158 | else: 159 | try: 160 | for j in range(segment_ph_cnt[m]): 161 | k = m + j 162 | #final_along_track_dist.append((ph_index_beg[m]+j,segment_ph_cnt[m],dist_ph_along[k],cu_segment_length + dist_ph_along[k])) 163 | final_along_track_dist.append(cu_segment_length + dist_ph_along[k]) 164 | cu_segment_length = cu_segment_length + segment_length[m] 165 | except: 166 | break 167 | cut_final_along_track_dist = final_along_track_dist[c_edge[0]:c_edge[1]] 168 | ''' 169 | print("length:h_ph",len(h_ph)) 170 | print("length:segment_ph_cnt",len(segment_ph_cnt)) 171 | # print("length:along_track_dist",len(along_track_dist)) 172 | print("length:final_along_track_dist",len(final_along_track_dist)) 173 | print("length:cut_final_along_track_dist",len(cut_final_along_track_dist)) 174 | ''' 175 | 176 | # 存储归一化 (relative_average_spacing_delta_time)-(height) 二维坐标 177 | # 将用于 DRAGANN 滤波获得 signal photons 178 | d_delta_time = differential(delta_time) 179 | d_h_ph = differential(h_ph) 180 | # 由于 3 对 bins 是同时扫描的,因此连接 6 个 bins 为 1 个 list 不适用 181 | arr_0 = np.zeros(len(d_delta_time)) 182 | 183 | # 输出未根据 photon signal confidence 筛选的光子 184 | # output(date,i,"coord_attribute",lon_ph,lat_ph,h_ph,signal_conf_ph,dist_ph_along) 185 | output(date,i,"coord_attribute",lon_ph,lat_ph,h_ph,cut_final_along_track_dist) 186 | output(date,i,"d_time_h",d_delta_time,d_h_ph,arr_0,arr_0) 187 | 188 | ''' 189 | # 记下 photon signal confidence 在 land 不等于 3-4 光子的 Id 190 | 191 | signal_conf_ph (photon signal confidence) 为 5xN 的数组 192 | 5 列分别表示信号点从下述表面类型(land, ocean, sea ice, land ice and inland water)获得的可能性 193 | 需要提取的是 land 地表类型 flag = 3 和 4 的光子。 194 | 作为第二遍 DRAGANN 滤波的其中一个输入数据集(另一个输入是第一遍 GRAGANN 滤波结果集) 195 | ''' 196 | 197 | Id_del_by_config = [] 198 | for j in range(len(signal_conf_ph)): 199 | if signal_conf_ph[j][0]==3 or signal_conf_ph[j][0]==4: 200 | Id_del_by_config.append(j) 201 | # print(Id_del_by_config) 202 | # 选出 land 分类等于 3-4 的光子 203 | lat_3and4 = [] 204 | lon_3and4 = [] 205 | h_3and4 = [] 206 | # signal_conf_3and4 = [] 207 | dist_3and4_along = [] 208 | for k in Id_del_by_config: 209 | lat_3and4.append(lat_ph[k]) 210 | lon_3and4.append(lon_ph[k]) 211 | h_3and4.append(h_ph[k]) 212 | # signal_conf_3and4.append(signal_conf_ph[k]) 213 | dist_3and4_along.append(cut_final_along_track_dist[k]) 214 | # 输出 photon signal confidence 筛选后的光子 215 | # output(date,i,"coord_attribute_3and4",lon_3and4,lat_3and4,h_3and4,signal_conf_3and4,dist_3and4_along) 216 | output(date,i,"coord_attribute_3and4",lon_3and4,lat_3and4,h_3and4,dist_3and4_along) 217 | 218 | except Exception as e: 219 | print('Python message: %s\n' % e) 220 | return 221 | 222 | 223 | # 所有label输出为一个txt 224 | def output(date,label,attribute_name,attribute1 = [],attribute2 = [],attribute3 = [],attribute4 = []): 225 | ls = [] 226 | string = '\n' 227 | for i in range(0,len(attribute1)): 228 | str1 = "" 229 | str1 = str(attribute1[i])+","+str(attribute2[i])+","+str(attribute3[i])+","+str(attribute4[i]) 230 | ls.append(str1) 231 | f=open("./output/"+date+"_"+label+"_"+attribute_name+".txt","w") 232 | f.write(string.join(ls)) 233 | f.close() 234 | return 235 | 236 | 237 | path = './' 238 | dirs = os.listdir(path) 239 | for dir in dirs: 240 | if dir[-2:] == "h5": 241 | out_attribute(dir) -------------------------------------------------------------------------------- /output/1/compare_height_function.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sun May 22 07:21:07 2022 4 | 5 | @author: longj 6 | """ 7 | 8 | 9 | 10 | # 函数功能,给 signal 数据 按 int_dist 切片 11 | def slice_unique_int_dist(signal): 12 | len_signal = len(signal) 13 | signal_slice = [] 14 | i = 0 15 | while i < len_signal-1: 16 | ls_temp=[] 17 | # unique_hp = signal[i][2] 18 | ls_temp.append(signal[i]) 19 | unique_int_dist=int(signal[i][3]) 20 | if int(signal[i][3]-signal[i+1][3])==0: 21 | #print("_",end=", ") 22 | ls_temp.append(signal[i+1]) 23 | #print(">> ",ls_temp) 24 | signal_slice.append([unique_int_dist,ls_temp]) 25 | i = i + 2 26 | continue 27 | signal_slice.append([unique_int_dist,ls_temp]) 28 | i = i + 1 29 | #print("**************************************") 30 | 31 | #print("len_signal_slice",len(signal_slice)) 32 | #print(signal_slice) 33 | return signal_slice 34 | 35 | 36 | # *********************** 继承 ********************** 37 | # 函数功能,给 signal 数据 按 int_dist 切片 38 | def slice_unique_int_dist_2col(signal): 39 | len_signal = len(signal) 40 | signal_slice = [] 41 | i = 0 42 | while i < len_signal-1: 43 | ls_temp=[] 44 | # unique_hp = signal[i][2] 45 | ls_temp.append(signal[i]) 46 | unique_int_dist=int(signal[i][0]) 47 | if int(signal[i][0]-signal[i+1][0])==0: 48 | #print("_",end=", ") 49 | ls_temp.append(signal[i+1]) 50 | #print(">> ",ls_temp) 51 | signal_slice.append([unique_int_dist,ls_temp]) 52 | i = i + 2 53 | continue 54 | signal_slice.append([unique_int_dist,ls_temp]) 55 | i = i + 1 56 | #print("**************************************") 57 | 58 | #print("len_signal_slice",len(signal_slice)) 59 | #print(signal_slice) 60 | return signal_slice 61 | 62 | 63 | # *********************** 继承 ********************** 64 | 65 | 66 | 67 | # 函数功能,对每片 int_dist, 与目标数据集比较 height 值 68 | def compare_2_layers(signal, Asmooth_surface, delta): 69 | signal_slice = slice_unique_int_dist(signal) 70 | # 获取与 interp_Asmooth 重叠段 signal 光子 71 | result_set = [] 72 | i = 0 73 | try: 74 | # 对每个 signal dist slice 去找对应的 Asmooth dist,比较高程 75 | for unique_slice in signal_slice: 76 | unique_int_dist = unique_slice[0] 77 | # 找 Asmooth 对应的 dist 78 | while Asmooth_surface[i][0] len(Asmooth_surface)-2: 133 | break 134 | for j in range(len(unique_slice[1])): 135 | # 比较 height 136 | if int(unique_slice[1][j][2]) < float(Asmooth_surface[i][2]) + delta: 137 | result_set.append(unique_slice[1][j]) 138 | else: 139 | inverse_result_set.append(unique_slice[1][j]) 140 | if i > len(Asmooth_surface)-2: 141 | break 142 | return [result_set,inverse_result_set] 143 | 144 | def compare_2_layers_7col_(signal, Asmooth_surface, delta): 145 | signal_slice = slice_unique_int_dist(signal) 146 | print('len_signal_slice',len(signal_slice)) 147 | # 获取与 interp_Asmooth 重叠段 signal 光子 148 | result_set = [] 149 | inverse_result_set = [] 150 | i = 0 151 | # 对每个 signal dist slice 去找对应的 Asmooth dist,比较高程 152 | for unique_slice in signal_slice: 153 | unique_int_dist = unique_slice[0] 154 | # 找 Asmooth 对应的 dist 155 | while Asmooth_surface[i][0] len(Asmooth_surface)-2: 158 | break 159 | for j in range(len(unique_slice[1])): 160 | # 比较 height 161 | if int(unique_slice[1][j][2]) < float(Asmooth_surface[i][1]) + delta: 162 | result_set.append(unique_slice[1][j]) 163 | else: 164 | inverse_result_set.append(unique_slice[1][j]) 165 | return [result_set,inverse_result_set] 166 | 167 | def compare_2_layers_2col(signal, Asmooth_surface, delta): 168 | signal_slice = slice_unique_int_dist_2col(signal) 169 | # 获取与 interp_Asmooth 重叠段 signal 光子 170 | result_set = [] 171 | inverse_result_set = [] 172 | i = 0 173 | try: 174 | # 对每个 signal dist slice 去找对应的 Asmooth dist,比较高程 175 | for unique_slice in signal_slice: 176 | unique_int_dist = unique_slice[0] 177 | # 找 Asmooth 对应的 dist 178 | while Asmooth_surface[i][0] len(along_dist)",len(along_dist)) 23 | print("> len(height)",len(height)) 24 | 25 | 26 | surface_len = len(height) 27 | print("surface_len", surface_len) 28 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 29 | new_surface = [] 30 | 31 | # 设 surface_len = 300 m 32 | # surface_len = 1000 33 | 34 | original_begin_index = int((SmoothSize - 1)/2) 35 | # i 是 filtered 的 along_dist 的 index 36 | result_theorical_len = surface_len-(SmoothSize-1) 37 | for i in range(result_theorical_len): 38 | median_filtered = 0 39 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 40 | overlay_h = [] 41 | for j in range(SmoothSize): 42 | if height[i+j] != -100: 43 | overlay_h.append(height[i+j]) 44 | len_overlay_h = len(overlay_h) 45 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 46 | if len_overlay_h == 0: 47 | median_filtered = -100 48 | # 考虑 pattern 对应段值为【偶数】,中位数是 sorted 后中间两个数的平均值 49 | elif len_overlay_h%2 == 0: 50 | sorted_overlay_h = sorted(overlay_h) 51 | median_filtered = int(0.5 * (sorted_overlay_h[int(len_overlay_h/2-1)]+sorted_overlay_h[int(len_overlay_h/2)])) 52 | else: 53 | median_idx = int((len_overlay_h-1)/2) 54 | median_filtered = round(sorted(overlay_h)[median_idx],3) 55 | new_surface.append((along_dist[original_begin_index+i],median_filtered)) 56 | #print((along_dist[original_begin_index+i],median_filtered)) 57 | 58 | 59 | #original_end_index = surface_len - 1 - original_begin_index 60 | # new_along_dist = along_dist[original_begin_index:original_end_index] 61 | 62 | print(">> len(new_surface)",len(new_surface)) 63 | 64 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 65 | with open('./latest_median_surface.txt','w') as out: 66 | csv_out=csv.writer(out) 67 | # csv_out.writerow(['X','Y']) 68 | for row in new_surface: 69 | csv_out.writerow(row) 70 | 71 | # 提取出非异常值的 photons 72 | new_surface_sub = [] 73 | for i in new_surface: 74 | if i[1] != -100: 75 | new_surface_sub.append(i) 76 | 77 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 78 | 79 | return new_surface_sub 80 | 81 | # 适合没有空缺 且 1个 unit 中没有重复点的普通情况 82 | def average_filter_good(original_surface,SmoothSize): 83 | print("len(original_surface)",len(original_surface)) 84 | height = [] 85 | along_dist = [] 86 | 87 | for idx in range(len(original_surface)): 88 | height.append(original_surface[idx][1]) 89 | along_dist.append(original_surface[idx][0]) 90 | 91 | print("> len(along_dist)",len(along_dist)) 92 | print("> len(height)",len(height)) 93 | 94 | 95 | surface_len = len(height) 96 | print("surface_len", surface_len) 97 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 98 | new_surface = [] 99 | 100 | # 设 surface_len = 300 m 101 | # surface_len = 1000 102 | 103 | original_begin_index = int((SmoothSize - 1)/2) 104 | # i 是 filtered 的 along_dist 的 index 105 | result_theorical_len = surface_len-(SmoothSize-1) 106 | for i in range(result_theorical_len): 107 | average_filtered = 0 108 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 109 | overlay_h = [] 110 | for j in range(SmoothSize): 111 | if height[i+j] != -100: 112 | overlay_h.append(height[i+j]) 113 | len_overlay_h = len(overlay_h) 114 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 115 | if len_overlay_h == 0: 116 | average_filtered = -100 117 | # 求 pattern 平均值 118 | else: 119 | average_filtered = np.mean(overlay_h) 120 | new_surface.append((along_dist[original_begin_index+i],average_filtered)) 121 | 122 | 123 | #original_end_index = surface_len - 1 - original_begin_index 124 | # new_along_dist = along_dist[original_begin_index:original_end_index] 125 | 126 | print(">> len(new_surface)",len(new_surface)) 127 | 128 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 129 | with open('./latest_median_surface.txt','w') as out: 130 | csv_out=csv.writer(out) 131 | # csv_out.writerow(['X','Y']) 132 | for row in new_surface: 133 | csv_out.writerow(row) 134 | 135 | # 提取出非异常值的 photons 136 | new_surface_sub = [] 137 | for i in new_surface: 138 | if i[1] != -100: 139 | new_surface_sub.append(i) 140 | 141 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 142 | 143 | return new_surface_sub 144 | 145 | # **************************** 继承 ********************************************** 146 | # 适用于 1个 1m unit 中可能存在多个光子的情况 147 | def median_filter(original_surface,SmoothSize): 148 | # 输入 list 长度为 len_original_surface; 输出更长,为 surface_len = (max_dist - min_dist) 149 | print("len(original_surface)",len(original_surface)) 150 | # surface_len = (max_dist - min_dist) 151 | surface_len = int(original_surface[len(original_surface)-1][0] - original_surface[0][0]) 152 | print("surface_len", surface_len,"= int(",original_surface[len(original_surface)-1][0],"-",original_surface[0][0],")") 153 | height = [] 154 | along_dist = [] 155 | # 遍历 min_dist ~ max_dist, 将有 height 值的 delts_dist 对应 value 设为 height;将无 height 对应的 value 设为 -100 156 | # 完成以下循环过程后 height 和 dist 长度都应为 (max_dist - min_dist) 157 | idx = 0 158 | # 判断 along_dist 对应这段有无 height 159 | for i in range(surface_len): 160 | # 当 idx 超出 original_surface 长度 161 | if idx > len(original_surface)-1: 162 | height.append(-100) 163 | # 存入 along_dist 164 | along_dist.append(original_surface[0][0]+i) 165 | continue 166 | 167 | # 研究的该 1 m unit 内有光子 168 | if int(original_surface[idx][0]) == int(original_surface[0][0]+i): 169 | # 有 2 种情况 ① 1 m unit 内只有唯一一个光子;② 1 m unit 内有 多 个相同的光子 170 | # ② 判断 1 m unit 内有 是否有 多 个相同的光子.初始假定 否 171 | #judge = False 172 | # 研究的 1 m unit 中相同光子的个数, num 已为 1(至少有1个) 173 | num = 1 174 | # 假设 1m unit 内至多只有 6 个光子。我需要判断在 研究区 im 内 idx~idx+6 范围内是否还有j个光子(num=j+1) 175 | for j in range(1,6): 176 | if idx < len(original_surface)-j and int(original_surface[idx+j][0]) == int(original_surface[0][0]+i): 177 | # 178 | # print(idx,i,int(original_surface[idx][0]),original_surface[idx][1]) 179 | # 更新相同的光子num 180 | num = j+1 181 | # 是 有多个相同的光子 182 | #judge = True 183 | # 是 有多个相同的光子 184 | #if judge is True: 185 | height.append(original_surface[idx][1]) 186 | idx = idx + num 187 | # 存入 along_dist 188 | along_dist.append(original_surface[0][0]+i) 189 | continue 190 | ''' 191 | else: 192 | # ① 1 m unit 内只有唯一一个光子; 193 | height.append(original_surface[idx][1]) 194 | # print(idx,i,int(original_surface[idx][0]),original_surface[idx][1]) 195 | idx = idx + num 196 | # 存入 along_dist 197 | along_dist.append(original_surface[0][0]+i) 198 | continue 199 | ''' 200 | # 不属于上述 2 种情况 201 | height.append(-100) 202 | # 存入 along_dist 203 | along_dist.append(original_surface[0][0]+i) 204 | 205 | 206 | print("> len(along_dist)",len(along_dist)) 207 | print("> len(height)",len(height)) 208 | 209 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 210 | new_surface = [] 211 | 212 | # 设 surface_len = 300 m 213 | # surface_len = 1000 214 | 215 | original_begin_index = int((SmoothSize - 1)/2) 216 | # i 是 filtered 的 along_dist 的 index 217 | result_theorical_len = surface_len-(SmoothSize-1) 218 | for i in range(result_theorical_len): 219 | median_filtered = 0 220 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 221 | overlay_h = [] 222 | for j in range(SmoothSize): 223 | if height[i+j] != -100: 224 | overlay_h.append(height[i+j]) 225 | len_overlay_h = len(overlay_h) 226 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 227 | if len_overlay_h == 0: 228 | median_filtered = -100 229 | # 考虑 pattern 对应段值为【偶数】,中位数是 sorted 后中间两个数的平均值 230 | elif len_overlay_h%2 == 0: 231 | sorted_overlay_h = sorted(overlay_h) 232 | median_filtered = int(0.5 * (sorted_overlay_h[int(len_overlay_h/2-1)]+sorted_overlay_h[int(len_overlay_h/2)])) 233 | else: 234 | median_idx = int((len_overlay_h-1)/2) 235 | median_filtered = round(sorted(overlay_h)[median_idx],3) 236 | new_surface.append((along_dist[original_begin_index+i],median_filtered)) 237 | #print((along_dist[original_begin_index+i],median_filtered)) 238 | 239 | 240 | #original_end_index = surface_len - 1 - original_begin_index 241 | # new_along_dist = along_dist[original_begin_index:original_end_index] 242 | 243 | print(">> len(new_surface)",len(new_surface)) 244 | ''' 245 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 246 | with open('./latest_median_surface.txt','w') as out: 247 | csv_out=csv.writer(out) 248 | # csv_out.writerow(['X','Y']) 249 | for row in new_surface: 250 | csv_out.writerow(row) 251 | ''' 252 | # 提取出非异常值的 photons 253 | new_surface_sub = [] 254 | for i in new_surface: 255 | if i[1] != -100: 256 | new_surface_sub.append(i) 257 | 258 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 259 | 260 | return new_surface_sub 261 | 262 | 263 | def average_filter(original_surface,SmoothSize): 264 | # 输入 list 长度为 len_original_surface; 输出更长,为 surface_len = (max_dist - min_dist) 265 | print("len(original_surface)",len(original_surface)) 266 | # surface_len = (max_dist - min_dist) 267 | surface_len = int(original_surface[len(original_surface)-1][0] - original_surface[0][0]) 268 | print("surface_len", surface_len,"= int(",original_surface[len(original_surface)-1][0],"-",original_surface[0][0],")") 269 | height = [] 270 | along_dist = [] 271 | # 遍历 min_dist ~ max_dist, 将有 height 值的 delts_dist 对应 value 设为 height;将无 height 对应的 value 设为 -100 272 | # 完成以下循环过程后 height 和 dist 长度都应为 (max_dist - min_dist) 273 | idx = 0 274 | # 判断 along_dist 对应这段有无 height 275 | for i in range(surface_len): 276 | # 当 idx 超出 original_surface 长度 277 | if idx > len(original_surface)-1: 278 | height.append(-100) 279 | # 存入 along_dist 280 | along_dist.append(original_surface[0][0]+i) 281 | continue 282 | 283 | # 研究的该 1 m unit 内有光子 284 | if int(original_surface[idx][0]) == int(original_surface[0][0]+i): 285 | # 有 2 种情况 ① 1 m unit 内只有唯一一个光子;② 1 m unit 内有 多 个相同的光子 286 | # ② 判断 1 m unit 内有 是否有 多 个相同的光子.初始假定 否 287 | #judge = False 288 | # 研究的 1 m unit 中相同光子的个数, num 已为 1(至少有1个) 289 | num = 1 290 | # 假设 1m unit 内至多只有 6 个光子。我需要判断在 研究区 im 内 idx~idx+6 范围内是否还有j个光子(num=j+1) 291 | for j in range(1,6): 292 | if idx < len(original_surface)-j and int(original_surface[idx+j][0]) == int(original_surface[0][0]+i): 293 | # 294 | # print(idx,i,int(original_surface[idx][0]),original_surface[idx][1]) 295 | # 更新相同的光子num 296 | num = j+1 297 | # 是 有多个相同的光子 298 | #judge = True 299 | # 是 有多个相同的光子 300 | #if judge is True: 301 | height.append(original_surface[idx][1]) 302 | idx = idx + num 303 | # 存入 along_dist 304 | along_dist.append(original_surface[0][0]+i) 305 | continue 306 | ''' 307 | else: 308 | # ① 1 m unit 内只有唯一一个光子; 309 | height.append(original_surface[idx][1]) 310 | # print(idx,i,int(original_surface[idx][0]),original_surface[idx][1]) 311 | idx = idx + num 312 | # 存入 along_dist 313 | along_dist.append(original_surface[0][0]+i) 314 | continue 315 | ''' 316 | # 不属于上述 2 种情况 317 | height.append(-100) 318 | # 存入 along_dist 319 | along_dist.append(original_surface[0][0]+i) 320 | 321 | 322 | print("> len(along_dist)",len(along_dist)) 323 | print("> len(height)",len(height)) 324 | 325 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 326 | new_surface = [] 327 | 328 | # 设 surface_len = 300 m 329 | # surface_len = 300 330 | 331 | original_begin_index = int((SmoothSize - 1)/2) 332 | # i 是 filtered 的 along_dist 的 index 333 | result_theorical_len = surface_len-(SmoothSize-1) 334 | for i in range(result_theorical_len): 335 | average_filtered = 0 336 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 337 | overlay_h = [] 338 | for j in range(SmoothSize): 339 | if height[i+j] != -100: 340 | overlay_h.append(height[i+j]) 341 | len_overlay_h = len(overlay_h) 342 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 343 | if len_overlay_h == 0: 344 | average_filtered = -100 345 | # 求 pattern 平均值 346 | else: 347 | average_filtered = np.mean(overlay_h) 348 | new_surface.append((along_dist[original_begin_index+i],average_filtered)) 349 | 350 | 351 | #original_end_index = surface_len - 1 - original_begin_index 352 | # new_along_dist = along_dist[original_begin_index:original_end_index] 353 | 354 | print(">> len(new_surface)",len(new_surface)) 355 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 356 | ''' 357 | with open('./latest_average_surface.txt','w') as out: 358 | csv_out=csv.writer(out) 359 | # csv_out.writerow(['X','Y']) 360 | for row in new_surface: 361 | csv_out.writerow(row) 362 | ''' 363 | # 提取出非异常值的 photons 364 | new_surface_sub = [] 365 | for i in new_surface: 366 | if i[1] != -100: 367 | new_surface_sub.append(i) 368 | 369 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 370 | 371 | return new_surface_sub 372 | 373 | 374 | def median_filter_with_XY(original_surface,SmoothSize,XY): 375 | # 输入 list 长度为 len_original_surface; 输出更长,为 surface_len = (max_dist - min_dist) 376 | print("len(original_surface)",len(original_surface)) 377 | # surface_len = (max_dist - min_dist) 378 | surface_len = int(original_surface[len(original_surface)-1][0] - original_surface[0][0]) 379 | print("surface_len", surface_len,"= int(",original_surface[len(original_surface)-1][0],"-",original_surface[0][0],")") 380 | height = [] 381 | along_dist = [] 382 | remain_XY = [] 383 | # 遍历 min_dist ~ max_dist, 将有 height 值的 delts_dist 对应 value 设为 height;将无 height 对应的 value 设为 -100 384 | # 完成以下循环过程后 height 和 dist 长度都应为 (max_dist - min_dist) 385 | idx = 0 386 | # 判断 along_dist 对应这段有无 height 387 | for i in range(surface_len): 388 | # 当 idx 超出 original_surface 长度 389 | if idx > len(original_surface)-1: 390 | break 391 | 392 | # 研究的该 1 m unit 内有光子 393 | if int(original_surface[idx][0]) == int(original_surface[0][0]+i): 394 | # 有 2 种情况 ① 1 m unit 内只有唯一一个光子;② 1 m unit 内有 多 个相同的光子 395 | # ② 判断 1 m unit 内有 是否有 多 个相同的光子.初始假定 否 396 | #judge = False 397 | # 研究的 1 m unit 中相同光子的个数, num 已为 1(至少有1个) 398 | num = 1 399 | # 假设 1m unit 内至多只有 6 个光子。我需要判断在 研究区 im 内 idx~idx+6 范围内是否还有j个光子(num=j+1) 400 | for j in range(1,6): 401 | if idx < len(original_surface)-j and int(original_surface[idx+j][0]) == int(original_surface[0][0]+i): 402 | # 403 | # print(idx,i,int(original_surface[idx][0]),original_surface[idx][1]) 404 | # 更新相同的光子num 405 | num = j+1 406 | # 是 有多个相同的光子 407 | #judge = True 408 | # 是 有多个相同的光子 409 | #if judge is True: 410 | height.append(original_surface[idx][1]) 411 | remain_XY.append(XY[idx]) 412 | idx = idx + num 413 | # 存入 along_dist 414 | along_dist.append(original_surface[0][0]+i) 415 | continue 416 | ''' 417 | else: 418 | # ① 1 m unit 内只有唯一一个光子; 419 | height.append(original_surface[idx][1]) 420 | # print(idx,i,int(original_surface[idx][0]),original_surface[idx][1]) 421 | idx = idx + num 422 | # 存入 along_dist 423 | along_dist.append(original_surface[0][0]+i) 424 | continue 425 | ''' 426 | # 不属于上述 2 种情况 427 | height.append(-100) 428 | remain_XY.append((-100,-100)) 429 | # 存入 along_dist 430 | along_dist.append(original_surface[0][0]+i) 431 | 432 | 433 | print("> len(along_dist)",len(along_dist)) 434 | print("> len(height)",len(height)) 435 | print("> len(remain_XY)",len(remain_XY)) 436 | 437 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 438 | new_surface = [] 439 | 440 | # 设 surface_len = 300 m 441 | # surface_len = 1000 442 | 443 | original_begin_index = int((SmoothSize - 1)/2) 444 | # i 是 filtered 的 along_dist 的 index 445 | result_theorical_len = surface_len-(SmoothSize-1) 446 | for i in range(result_theorical_len): 447 | median_filtered = 0 448 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 449 | overlay_h = [] 450 | for j in range(SmoothSize): 451 | if height[i+j] != -100: 452 | overlay_h.append(height[i+j]) 453 | len_overlay_h = len(overlay_h) 454 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 455 | if len_overlay_h == 0: 456 | median_filtered = -100 457 | # 考虑 pattern 对应段值为【偶数】,中位数是 sorted 后中间两个数的平均值 458 | elif len_overlay_h%2 == 0: 459 | sorted_overlay_h = sorted(overlay_h) 460 | median_filtered = int(0.5 * (sorted_overlay_h[int(len_overlay_h/2-1)]+sorted_overlay_h[int(len_overlay_h/2)])) 461 | else: 462 | median_idx = int((len_overlay_h-1)/2) 463 | median_filtered = round(sorted(overlay_h)[median_idx],3) 464 | new_surface.append((along_dist[original_begin_index+i],median_filtered,remain_XY[original_begin_index+i][0],remain_XY[original_begin_index+i][1])) 465 | #print((along_dist[original_begin_index+i],median_filtered)) 466 | 467 | 468 | #original_end_index = surface_len - 1 - original_begin_index 469 | # new_along_dist = along_dist[original_begin_index:original_end_index] 470 | 471 | print(" >> len(new_surface)",len(new_surface)) 472 | ''' 473 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 474 | with open('./output/new_surface.txt','w') as out: 475 | csv_out=csv.writer(out) 476 | # csv_out.writerow(['X','Y']) 477 | for row in new_surface: 478 | csv_out.writerow(row) 479 | ''' 480 | # 提取出非异常值的 photons 481 | new_surface_sub = [] 482 | for i in new_surface: 483 | if i[1] != -100: 484 | new_surface_sub.append(i) 485 | 486 | print(" >>> len(new_surface_sub)",len(new_surface_sub)) 487 | 488 | return new_surface_sub 489 | 490 | def average_filter_with_empty(original_surface,SmoothSize): 491 | # 输入 list 长度为 len_original_surface; 输出更长,为 surface_len = (max_dist - min_dist) 492 | print("len(original_surface)",len(original_surface)) 493 | #dist_max = original_surface[len(original_surface)-1][0] 494 | 495 | along_dist = [] 496 | height = [] 497 | dist = original_surface[0][0] 498 | for idx in range(len(original_surface)): 499 | unique_int_dist = original_surface[idx][0] 500 | #unique_int_dist = int(original_surface[idx][0]) 501 | # 找 Asmooth 对应的 dist 502 | while dist len(along_dist)",len(along_dist)) 511 | print("> len(height)",len(height)) 512 | 513 | surface_len = len(height) 514 | print("surface_len", surface_len) 515 | 516 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 517 | new_surface = [] 518 | 519 | # 设 surface_len = 300 m 520 | # surface_len = 300 521 | 522 | original_begin_index = int((SmoothSize - 1)/2) 523 | # i 是 filtered 的 along_dist 的 index 524 | result_theorical_len = surface_len-(SmoothSize-1) 525 | for i in range(result_theorical_len): 526 | average_filtered = 0 527 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 528 | overlay_h = [] 529 | for j in range(SmoothSize): 530 | if height[i+j] != -100: 531 | overlay_h.append(height[i+j]) 532 | len_overlay_h = len(overlay_h) 533 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 534 | if len_overlay_h == 0: 535 | average_filtered = -100 536 | # 求 pattern 平均值 537 | else: 538 | average_filtered = np.mean(overlay_h) 539 | new_surface.append((along_dist[original_begin_index+i],average_filtered)) 540 | 541 | 542 | #original_end_index = surface_len - 1 - original_begin_index 543 | # new_along_dist = along_dist[original_begin_index:original_end_index] 544 | 545 | print(">> len(new_surface)",len(new_surface)) 546 | 547 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 548 | with open('./latest_median_surface.txt','w') as out: 549 | csv_out=csv.writer(out) 550 | # csv_out.writerow(['X','Y']) 551 | for row in new_surface: 552 | csv_out.writerow(row) 553 | 554 | # 提取出非异常值的 photons 555 | new_surface_sub = [] 556 | for i in new_surface: 557 | if i[1] != -100: 558 | new_surface_sub.append(i) 559 | 560 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 561 | 562 | return new_surface_sub 563 | 564 | def median_filter_with_empty(original_surface,SmoothSize): 565 | # 输入 list 长度为 len_original_surface; 输出更长,为 surface_len = (max_dist - min_dist) 566 | print("len(original_surface)",len(original_surface)) 567 | #dist_max = original_surface[len(original_surface)-1][0] 568 | 569 | along_dist = [] 570 | height = [] 571 | dist = original_surface[0][0] 572 | for idx in range(len(original_surface)): 573 | unique_int_dist = original_surface[idx][0] 574 | #unique_int_dist = int(original_surface[idx][0]) 575 | # 找 Asmooth 对应的 dist 576 | while dist len(along_dist)",len(along_dist)) 585 | print("> len(height)",len(height)) 586 | 587 | surface_len = len(height) 588 | print("surface_len", surface_len) 589 | 590 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 591 | new_surface = [] 592 | 593 | # 设 surface_len = 300 m 594 | # surface_len = 300 595 | 596 | original_begin_index = int((SmoothSize - 1)/2) 597 | # i 是 filtered 的 along_dist 的 index 598 | result_theorical_len = surface_len-(SmoothSize-1) 599 | for i in range(result_theorical_len): 600 | median_filtered = 0 601 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 602 | overlay_h = [] 603 | for j in range(SmoothSize): 604 | if height[i+j] != -100: 605 | overlay_h.append(height[i+j]) 606 | len_overlay_h = len(overlay_h) 607 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 608 | if len_overlay_h == 0: 609 | median_filtered = -100 610 | # 考虑 pattern 对应段值为【偶数】,中位数是 sorted 后中间两个数的平均值 611 | elif len_overlay_h%2 == 0: 612 | sorted_overlay_h = sorted(overlay_h) 613 | median_filtered = int(0.5 * (sorted_overlay_h[int(len_overlay_h/2-1)]+sorted_overlay_h[int(len_overlay_h/2)])) 614 | else: 615 | median_idx = int((len_overlay_h-1)/2) 616 | median_filtered = round(sorted(overlay_h)[median_idx],3) 617 | new_surface.append((along_dist[original_begin_index+i],median_filtered)) 618 | 619 | 620 | #original_end_index = surface_len - 1 - original_begin_index 621 | # new_along_dist = along_dist[original_begin_index:original_end_index] 622 | 623 | print(">> len(new_surface)",len(new_surface)) 624 | 625 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 626 | with open('./latest_median_surface.txt','w') as out: 627 | csv_out=csv.writer(out) 628 | # csv_out.writerow(['X','Y']) 629 | for row in new_surface: 630 | csv_out.writerow(row) 631 | 632 | # 提取出非异常值的 photons 633 | new_surface_sub = [] 634 | for i in new_surface: 635 | if i[1] != -100: 636 | new_surface_sub.append(i) 637 | 638 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 639 | 640 | return new_surface_sub 641 | 642 | 643 | # 适合没有空缺 且 1个 unit 中没有重复点的普通情况 644 | def average_filter_with_offset(original_surface,SmoothSize,offset): 645 | print("len(original_surface)",len(original_surface)) 646 | height = [] 647 | along_dist = [] 648 | 649 | for idx in range(len(original_surface)): 650 | height.append(original_surface[idx][1]) 651 | along_dist.append(original_surface[idx][0]) 652 | 653 | print("> len(along_dist)",len(along_dist)) 654 | print("> len(height)",len(height)) 655 | 656 | 657 | surface_len = len(height) 658 | print("surface_len", surface_len) 659 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 660 | new_surface = [] 661 | 662 | # 设 surface_len = 300 m 663 | # surface_len = 1000 664 | 665 | original_begin_index = int((SmoothSize - 1)/2) 666 | # i 是 filtered 的 along_dist 的 index 667 | result_theorical_len = surface_len-(SmoothSize-1) 668 | for i in range(result_theorical_len): 669 | average_filtered = 0 670 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 671 | overlay_h = [] 672 | for j in range(SmoothSize): 673 | if height[i+j] != -100: 674 | overlay_h.append(height[i+j]) 675 | len_overlay_h = len(overlay_h) 676 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 677 | if len_overlay_h == 0: 678 | average_filtered = -100 679 | # 求 pattern 平均值 680 | else: 681 | average_filtered = np.mean(overlay_h)+offset 682 | new_surface.append((along_dist[original_begin_index+i],average_filtered)) 683 | 684 | 685 | #original_end_index = surface_len - 1 - original_begin_index 686 | # new_along_dist = along_dist[original_begin_index:original_end_index] 687 | 688 | print(">> len(new_surface)",len(new_surface)) 689 | 690 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 691 | with open('./latest_median_surface.txt','w') as out: 692 | csv_out=csv.writer(out) 693 | # csv_out.writerow(['X','Y']) 694 | for row in new_surface: 695 | csv_out.writerow(row) 696 | 697 | # 提取出非异常值的 photons 698 | new_surface_sub = [] 699 | for i in new_surface: 700 | if i[1] != -100: 701 | new_surface_sub.append(i) 702 | 703 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 704 | 705 | return new_surface_sub 706 | 707 | ''' 708 | # ********************* 继承 ********************** 709 | ''' 710 | def average_filter_with_empty_7col(original_surface,SmoothSize): 711 | # 输入 list 长度为 len_original_surface; 输出更长,为 surface_len = (max_dist - min_dist) 712 | print("len(original_surface)",len(original_surface)) 713 | #dist_max = original_surface[len(original_surface)-1][0] 714 | 715 | along_dist = [] 716 | height = [] 717 | attr = [] 718 | dist = original_surface[0][3] 719 | for idx in range(len(original_surface)): 720 | unique_int_dist = original_surface[idx][3] 721 | #unique_int_dist = int(original_surface[idx][0]) 722 | # 找 Asmooth 对应的 dist 723 | while dist len(along_dist)",len(along_dist)) 734 | print("> len(height)",len(height)) 735 | 736 | surface_len = len(height) 737 | print("surface_len", surface_len) 738 | 739 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 740 | new_surface = [] 741 | 742 | # 设 surface_len = 300 m 743 | # surface_len = 300 744 | 745 | original_begin_index = int((SmoothSize - 1)/2) 746 | # i 是 filtered 的 along_dist 的 index 747 | result_theorical_len = surface_len-(SmoothSize-1) 748 | for i in range(result_theorical_len): 749 | average_filtered = 0 750 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 751 | overlay_h = [] 752 | for j in range(SmoothSize): 753 | if height[i+j] != -100: 754 | overlay_h.append(height[i+j]) 755 | len_overlay_h = len(overlay_h) 756 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 757 | if len_overlay_h == 0: 758 | average_filtered = -100 759 | # 求 pattern 平均值 760 | else: 761 | average_filtered = np.mean(overlay_h) 762 | new_surface.append((attr[original_begin_index+i][0],attr[original_begin_index+i][1],average_filtered,along_dist[original_begin_index+i],attr[original_begin_index+i][2],attr[original_begin_index+i][3])) 763 | 764 | 765 | #original_end_index = surface_len - 1 - original_begin_index 766 | # new_along_dist = along_dist[original_begin_index:original_end_index] 767 | 768 | print(">> len(new_surface)",len(new_surface)) 769 | ''' 770 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 771 | with open('./latest_median_surface.txt','w') as out: 772 | csv_out=csv.writer(out) 773 | # csv_out.writerow(['X','Y']) 774 | for row in new_surface: 775 | csv_out.writerow(row) 776 | ''' 777 | # 提取出非异常值的 photons 778 | new_surface_sub = [] 779 | for i in new_surface: 780 | if i[2] != -100: 781 | new_surface_sub.append(i) 782 | 783 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 784 | 785 | return new_surface_sub 786 | 787 | def median_filter_with_empty_7col(original_surface,SmoothSize): 788 | # 输入 list 长度为 len_original_surface; 输出更长,为 surface_len = (max_dist - min_dist) 789 | print("len(original_surface)",len(original_surface)) 790 | #dist_max = original_surface[len(original_surface)-1][0] 791 | 792 | along_dist = [] 793 | height = [] 794 | attr = [] 795 | dist = original_surface[0][3] 796 | for idx in range(len(original_surface)): 797 | unique_int_dist = original_surface[idx][3] 798 | #unique_int_dist = int(original_surface[idx][0]) 799 | # 找 Asmooth 对应的 dist 800 | while dist len(along_dist)",len(along_dist)) 811 | print("> len(height)",len(height)) 812 | 813 | surface_len = len(height) 814 | print("surface_len", surface_len) 815 | 816 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 817 | new_surface = [] 818 | 819 | # 设 surface_len = 300 m 820 | # surface_len = 300 821 | 822 | original_begin_index = int((SmoothSize - 1)/2) 823 | # i 是 filtered 的 along_dist 的 index 824 | result_theorical_len = surface_len-(SmoothSize-1) 825 | for i in range(result_theorical_len): 826 | median_filtered = 0 827 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 828 | overlay_h = [] 829 | for j in range(SmoothSize): 830 | if height[i+j] != -100: 831 | overlay_h.append(height[i+j]) 832 | len_overlay_h = len(overlay_h) 833 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 834 | if len_overlay_h == 0: 835 | median_filtered = -100 836 | # 考虑 pattern 对应段值为【偶数】,中位数是 sorted 后中间两个数的平均值 837 | elif len_overlay_h%2 == 0: 838 | sorted_overlay_h = sorted(overlay_h) 839 | median_filtered = int(0.5 * (sorted_overlay_h[int(len_overlay_h/2-1)]+sorted_overlay_h[int(len_overlay_h/2)])) 840 | else: 841 | median_idx = int((len_overlay_h-1)/2) 842 | median_filtered = round(sorted(overlay_h)[median_idx],3) 843 | new_surface.append((attr[original_begin_index+i][0],attr[original_begin_index+i][1],median_filtered,along_dist[original_begin_index+i],attr[original_begin_index+i][2],attr[original_begin_index+i][3],attr[original_begin_index+i][4])) 844 | 845 | 846 | #original_end_index = surface_len - 1 - original_begin_index 847 | # new_along_dist = along_dist[original_begin_index:original_end_index] 848 | 849 | print(">> len(new_surface)",len(new_surface)) 850 | ''' 851 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 852 | with open('./latest_median_surface.txt','w') as out: 853 | csv_out=csv.writer(out) 854 | # csv_out.writerow(['X','Y']) 855 | for row in new_surface: 856 | csv_out.writerow(row) 857 | ''' 858 | # 提取出非异常值的 photons 859 | new_surface_sub = [] 860 | for i in new_surface: 861 | if i[2] != -100: 862 | new_surface_sub.append(i) 863 | 864 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 865 | 866 | return new_surface_sub 867 | 868 | 869 | 870 | # 适合没有空缺 且 1个 unit 中没有重复点的普通情况 871 | def average_filter_with_offset_7col(original_surface,SmoothSize,offset): 872 | print("len(original_surface)",len(original_surface)) 873 | height = [] 874 | along_dist = [] 875 | 876 | for idx in range(len(original_surface)): 877 | height.append(original_surface[idx][2]) 878 | along_dist.append(original_surface[idx][3]) 879 | 880 | print("> len(along_dist)",len(along_dist)) 881 | print("> len(height)",len(height)) 882 | 883 | 884 | surface_len = len(height) 885 | print("surface_len", surface_len) 886 | # 滤波的方向为 along_dist,而每个 unique_dist 对应的 value 值为 height 887 | new_surface = [] 888 | 889 | # 设 surface_len = 300 m 890 | # surface_len = 1000 891 | 892 | original_begin_index = int((SmoothSize - 1)/2) 893 | # i 是 filtered 的 along_dist 的 index 894 | result_theorical_len = surface_len-(SmoothSize-1) 895 | for i in range(result_theorical_len): 896 | average_filtered = 0 897 | # [i:i+SmoothSize] 为 算子 与 Asmooth_original surface 重叠的部分的 index 898 | overlay_h = [] 899 | for j in range(SmoothSize): 900 | if height[i+j] != -100: 901 | overlay_h.append(height[i+j]) 902 | len_overlay_h = len(overlay_h) 903 | # 考虑 pattern 对应的段全为【无值】,设置为异常值 904 | if len_overlay_h == 0: 905 | average_filtered = -100 906 | # 求 pattern 平均值 907 | else: 908 | average_filtered = np.mean(overlay_h)+offset 909 | new_surface.append((original_surface[original_begin_index+i][0],original_surface[original_begin_index+i][1],average_filtered,along_dist[original_begin_index+i],original_surface[original_begin_index+i][4],original_surface[original_begin_index+i][5],original_surface[original_begin_index+i][6])) 910 | 911 | 912 | #original_end_index = surface_len - 1 - original_begin_index 913 | # new_along_dist = along_dist[original_begin_index:original_end_index] 914 | 915 | print(">> len(new_surface)",len(new_surface)) 916 | 917 | # 存储 0_Asmooth (由 interp_A 进行 1 次中值滤波获得的 surface) 918 | with open('./latest_median_surface.txt','w') as out: 919 | csv_out=csv.writer(out) 920 | # csv_out.writerow(['X','Y']) 921 | for row in new_surface: 922 | csv_out.writerow(row) 923 | 924 | # 提取出非异常值的 photons 925 | new_surface_sub = [] 926 | for i in new_surface: 927 | if i[1] != -100: 928 | new_surface_sub.append(i) 929 | 930 | print(">>> len(new_surface_sub)",len(new_surface_sub)) 931 | 932 | return new_surface_sub --------------------------------------------------------------------------------