├── psnr_y.py ├── p2point.py ├── p2plane.py └── README.md /psnr_y.py: -------------------------------------------------------------------------------- 1 | import open3d as o3d 2 | import numpy as np 3 | import math 4 | 5 | def pc_normalize(pc): 6 | l = pc.shape[0] 7 | centroid = np.mean(pc, axis=0) 8 | pc = pc - centroid 9 | m = np.max(np.sqrt(np.sum(pc**2, axis=1))) 10 | pc = pc / m 11 | return pc 12 | 13 | def RGB2YUV( rgb ): 14 | m = np.array([[ 0.29900, -0.16874, 0.50000], 15 | [0.58700, -0.33126, -0.41869], 16 | [ 0.11400, 0.50000, -0.08131]]) 17 | yuv = np.dot(rgb,m) 18 | yuv[:,1:]+=128.0 19 | return yuv 20 | 21 | 22 | def match_point_compute_mse(src,tar,src_yuv,tar_yuv): 23 | # Calculates the one way matching distortion from src -> target 24 | # building kdtree 25 | tar_pc = o3d.geometry.PointCloud() 26 | tar_pc.points = o3d.utility.Vector3dVector(tar) 27 | kdtree = o3d.geometry.KDTreeFlann(tar_pc) 28 | # matching points and caculate distance 29 | point_size_src = src.shape[0] 30 | y_list = np.zeros(point_size_src) 31 | for i in range(point_size_src): 32 | [_,idx,dis] = kdtree.search_knn_vector_3d(src[i],1) 33 | y_list[i] = (src_yuv[i][0] - tar_yuv[idx[0]][0])**2 # get the square of the closest point's y 34 | return np.sum(y_list)/point_size_src 35 | 36 | 37 | 38 | 39 | def psnr_y(ref_name,dis_name): 40 | ref = o3d.io.read_point_cloud(ref_name) 41 | dis = o3d.io.read_point_cloud(dis_name) 42 | ref_points = pc_normalize(np.array(ref.points)) 43 | dis_points = pc_normalize(np.array(dis.points)) 44 | ref_yuv = RGB2YUV(np.array(ref.colors)*255) 45 | dis_yuv = RGB2YUV(np.array(dis.colors)*255) 46 | mse_square = match_point_compute_mse(ref_points,dis_points,ref_yuv,dis_yuv) 47 | return 10 * math.log10(255.0**2/mse_square) 48 | 49 | 50 | 51 | ref_name = 'hhi.ply' 52 | dis_name = 'hhi_0.ply' 53 | #print(p2point(ref_name,dis_name)) 54 | print(psnr_y(ref_name,dis_name)) -------------------------------------------------------------------------------- /p2point.py: -------------------------------------------------------------------------------- 1 | import open3d as o3d 2 | import numpy as np 3 | def pc_normalize(pc): 4 | l = pc.shape[0] 5 | centroid = np.mean(pc, axis=0) 6 | pc = pc - centroid 7 | m = np.max(np.sqrt(np.sum(pc**2, axis=1))) 8 | pc = pc / m 9 | return pc 10 | 11 | 12 | def d_rms(src,tar): 13 | # Calculates the one way matching distortion from src -> target 14 | # building kdtree 15 | tar_pc = o3d.geometry.PointCloud() 16 | tar_pc.points = o3d.utility.Vector3dVector(tar) 17 | kdtree = o3d.geometry.KDTreeFlann(tar_pc) 18 | # matching points and caculate distance 19 | point_size_src = src.shape[0] 20 | distance_list = np.zeros(point_size_src) 21 | for i in range(point_size_src): 22 | [_,idx,dis] = kdtree.search_knn_vector_3d(src[i],1) 23 | distance_list[i] = dis[0] # get the square of the closest point's distance 24 | return (distance_list.sum()/point_size_src)**0.5 25 | 26 | def d_hausdorf(src,tar): 27 | # Calculates the one way matching distortion from src -> target 28 | # building kdtree 29 | tar_pc = o3d.geometry.PointCloud() 30 | tar_pc.points = o3d.utility.Vector3dVector(tar) 31 | kdtree = o3d.geometry.KDTreeFlann(tar_pc) 32 | # matching points and caculate distance 33 | point_size_src = src.shape[0] 34 | distance_list = np.zeros(point_size_src) 35 | for i in range(point_size_src): 36 | [_,idx,dis] = kdtree.search_knn_vector_3d(src[i],1) 37 | distance_list[i] = dis[0] # get the square of the closest point's distance 38 | return np.max(distance_list)**0.5 39 | 40 | def d_symmetric_rms(pc1,pc2,mode = 'max'): 41 | # p2point see Evaluation criteria for PCC (Point Cloud Compression) 42 | if mode == 'max': 43 | return max(d_rms(pc1,pc2),d_rms(pc2,pc1)) 44 | else: 45 | # see See `"Dynamic Polygon Clouds: Representation and Compression for VR/AR" ` paper. 46 | return (d_rms(pc1,pc2)+d_rms(pc2,pc1))/2 47 | 48 | def d_symmetric_hausdorf(pc1,pc2): 49 | # haudorf p2point see Evaluation criteria for PCC (Point Cloud Compression) 50 | return max(d_hausdorf(pc1,pc2),d_hausdorf(pc2,pc1)) 51 | 52 | # MSE P2POINT 53 | def p2point(ref_name,dis_name): 54 | ref = o3d.io.read_point_cloud(ref_name) 55 | dis = o3d.io.read_point_cloud(dis_name) 56 | ref_points = pc_normalize(np.array(ref.points)) 57 | dis_points = pc_normalize(np.array(dis.points)) 58 | return d_symmetric_rms(ref_points,dis_points) 59 | 60 | # HAUSDORF P2POINT 61 | def p2point_hausdorf(ref_name,dis_name): 62 | ref = o3d.io.read_point_cloud(ref_name) 63 | dis = o3d.io.read_point_cloud(dis_name) 64 | ref_points = pc_normalize(np.array(ref.points)) 65 | dis_points = pc_normalize(np.array(dis.points)) 66 | return d_symmetric_hausdorf(ref_points,dis_points) 67 | -------------------------------------------------------------------------------- /p2plane.py: -------------------------------------------------------------------------------- 1 | import open3d as o3d 2 | import numpy as np 3 | def pc_normalize(pc): 4 | l = pc.shape[0] 5 | centroid = np.mean(pc, axis=0) 6 | pc = pc - centroid 7 | m = np.max(np.sqrt(np.sum(pc**2, axis=1))) 8 | pc = pc / m 9 | return pc 10 | 11 | def estimate_normal(pcd): 12 | # estimate normals if point cloud does not have normals 13 | pcd.estimate_normals( 14 | search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=10000, max_nn=30)) 15 | normals = np.asarray(pcd.normals) 16 | return normals 17 | 18 | def match_point_compute_error(src,tar,normals): 19 | # Calculates the one way matching distortion from src -> target 20 | # building kdtree 21 | tar_pc = o3d.geometry.PointCloud() 22 | tar_pc.points = o3d.utility.Vector3dVector(tar) 23 | kdtree = o3d.geometry.KDTreeFlann(tar_pc) 24 | # matching points 25 | point_size_src = src.shape[0] 26 | error_list = np.zeros(point_size_src) 27 | for i in range(point_size_src): 28 | [_,idx,dis] = kdtree.search_knn_vector_3d(src[i],1) 29 | normal = normals[i] # get the src normal 30 | error_vector = tar[idx[0]] - src[i] # compute the error vector 31 | error_list[i] = (np.dot(normal,error_vector))**2 # get the square of vector projection 32 | return error_list.sum()/point_size_src 33 | 34 | def match_point_compute_error_hausdorf(src,tar,normals): 35 | # Calculates the one way matching distortion from src -> target 36 | # building kdtree 37 | tar_pc = o3d.geometry.PointCloud() 38 | tar_pc.points = o3d.utility.Vector3dVector(tar) 39 | kdtree = o3d.geometry.KDTreeFlann(tar_pc) 40 | # matching points 41 | point_size_src = src.shape[0] 42 | error_list = np.zeros(point_size_src) 43 | for i in range(point_size_src): 44 | [_,idx,dis] = kdtree.search_knn_vector_3d(src[i],1) 45 | normal = normals[i] # get the src normal 46 | error_vector = tar[idx[0]] - src[i] # compute the error vector 47 | error_list[i] = (np.dot(normal,error_vector))**2 # get the square of vector projection 48 | return np.max(error_list)**0.5 49 | 50 | 51 | #MSE P2PLANE 52 | def p2plane(ref_name,dis_name, no_normals = 'True'): 53 | ref = o3d.io.read_point_cloud(ref_name) 54 | dis = o3d.io.read_point_cloud(dis_name) 55 | if no_normals: 56 | ref_normals = estimate_normal(ref) 57 | else: 58 | ref_normals = np.array(ref.normals) 59 | ref_points = pc_normalize(np.array(ref.points)) 60 | dis_points = pc_normalize(np.array(dis.points)) 61 | return match_point_compute_error(ref_points,dis_points,ref_normals) 62 | 63 | #HAUSDORF P2PLANE 64 | def p2plane_hausdorf(ref_name,dis_name, no_normals = 'True'): 65 | ref = o3d.io.read_point_cloud(ref_name) 66 | dis = o3d.io.read_point_cloud(dis_name) 67 | if no_normals: 68 | ref_normals = estimate_normal(ref) 69 | else: 70 | ref_normals = np.array(ref.normals) 71 | ref_points = pc_normalize(np.array(ref.points)) 72 | dis_points = pc_normalize(np.array(dis.points)) 73 | return match_point_compute_error_hausdorf(ref_points,dis_points,ref_normals)\ 74 | 75 | # ref_name = 'hhi.ply' 76 | # dis_name = 'hhi_0.ply' 77 | # print(p2point(ref_name,dis_name)) 78 | # print(p2point_hausdorf(ref_name,dis_name)) 79 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # :point_right: 3DQA Databases (update) 2 | 3 | If you want to add PCQA papers and codes to this list, feel free to start a pull request. 4 | 5 | We are happy to see your contribution! 6 | 7 | 8 | ## Overview of the databases 9 | | Database | Format | Attributes | Rated Models| 10 | |-----------------------|---------------------|---------------------|-----------| 11 | | M-PCCD | Point cloud | Colored | 232 | 12 | | IRPC | Point cloud | Colorless & Colored | 54 & 54 | 13 | | WPC | Point cloud | Colored | 740 | 14 | | WPC2.0 | Point cloud | Colored | 400 | 15 | | WPC3.0 | Point cloud | Colored | 350 | 16 | | ICIP2020 | Point cloud | Colored | 96 | 17 | | SJTU-PCQA | Point cloud | Colored | 378 | 18 | | SIAT-PCQD | Point cloud | Colored | 340 | 19 | | LS-PCQA | Point cloud | Colored | 1,080 | 20 | | BASICS | Point cloud | Colored | 1,494 | 21 | | CMDM | Mesh | Colored | 480 | 22 | | TMQA | Mesh | Textured | 3,000 | 23 | | Geo-Metric | Mesh | Geometry Faces | 2,450 | 24 | | DHHQA | Mesh | Textured human heads| 1,540 | 25 | | DDH-QA | FBX/MP4 | Dynamic Digital Humans | 800 | 26 | | SJTU-H3D | Mesh | Full-body Digital Humans | 1,120 | 27 | 28 | ## PCQA databases 29 | | # | Database Name | Title & Link | Database Link | 30 | |---|---------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------| 31 | | 1 | SJTU-PCQA | [Predicting the Perceptual Quality of Point Cloud: A 3D-to-2D Projection-Based Exploration](https://ieeexplore.ieee.org/abstract/document/9238424) | [Link](https://smt.sjtu.edu.cn/database/) | 32 | | 2 | WPC | [Perceptual Quality Assessment of Colored 3D Point Clouds](https://ieeexplore.ieee.org/document/9756929) | [Link](https://github.com/qdushl/Waterloo-Point-Cloud-Database) | 33 | | 3 | LS-PCQA | [Point Cloud Quality Assessment: Dataset Construction and Learning-based No-Reference Approach](https://arxiv.org/pdf/2012.11895.pdf) | [Link](https://smt.sjtu.edu.cn/database/) | 34 | | 4 | WPC2.0(Compression) | [Reduced Reference Perceptual Quality Model with Application to Rate Control for Video-based Point Cloud Compression](https://ieeexplore.ieee.org/document/9490512) | [Link](https://github.com/qdushl/Waterloo-Point-Cloud-Database-2.0) | 35 | | 5 | WPC3.0(Compression) | [No-reference Bitstream-layer Model for Perceptual Quality Assessment of V-PCC Encoded Point Clouds](https://ieeexplore.ieee.org/document/9782549) | [Link](https://github.com/qdushl/Waterloo-Point-Cloud-Database-3.0) | 36 | | 6 | CPCD2.0(Compression & Noise) | [TGP-PCQA: Texture and geometry projection based quality assessment for colored point clouds](https://www.sciencedirect.com/science/article/pii/S1047320322000128) | [Link](https://github.com/cherry0415/CPCD2.0) | 37 | | 7 | ICIP2020 | [Quality Evaluation Of Static Point Clouds Encoded Using MPEG Codecs](https://ieeexplore.ieee.org/abstract/document/9191308) | | 38 | | 8 | M-PCCD | [A comprehensive study of the rate-distortion performance in MPEG point cloud compression](https://www.nowpublishers.com/article/Details/SIP-132) | | 39 | | 9 | IRPC | [Point Cloud Rendering after Coding : Impacts on Subjective and Objective Quality.](https://ieeexplore.ieee.org/abstract/document/9257015/) | | 40 | |10 | SIAT-PCQD | [Subjective Quality Database and Objective Study of Compressed Point Clouds With 6DoF Head-Mounted Display](https://ieeexplore.ieee.org/abstract/document/9502695) | [Link](https://dx.doi.org/10.21227/ad8d-7r28) | 41 | |11 | vsenseVVDB (Volumetric Video Quality Database #1) | [Subjective and Objective Quality Assessment for Volumetric Video Compression](https://v-sense.scss.tcd.ie/research/6dof/quality-assessment-for-fvv-compression/) | [Link](https://v-sense.scss.tcd.ie/research/6dof/quality-assessment-for-fvv-compression/) | 42 | |12 | vsenseVVDB2 (Volumetric Video Quality Database #2) | [Textured mesh vs coloured point cloud: A subjective study for volumetric video compression](https://ieeexplore.ieee.org/abstract/document/9123137/) | [Link](https://v-sense.scss.tcd.ie/research/6dof/quality-assessment-for-fvv-compression/) | 43 | |13 | BASICS | [BASICS: Broad quality Assessment of Static point clouds In Compression Scenarios](https://arxiv.org/pdf/2302.04796.pdf) | | 44 | 45 | ## MQA (mesh quality assessment) database 46 | | # | Database Name | Title & Link | Database Link | 47 | |---|---------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------| 48 | | 1 | CMDM | [Visual Quality of 3D Meshes With Diffuse Colors in Virtual Reality: Subjective and Objective Evaluation](https://ieeexplore.ieee.org/abstract/document/9252120) | [Link](https://yananehme.github.io) | 49 | | 2 | TMQA | [Textured Mesh Quality Assessment: Large-Scale Dataset and Deep Learning-based Quality Metric](https://yananehme.github.io/publications/2022-ACM-TOG) | [Link](https://yananehme.github.io/publications/2022-ACM-TOG) | 50 | | 3 | - | [Geo-Metric: A Perceptual Dataset of Distortions on Faces](https://dl.acm.org/doi/abs/10.1145/3550454.3555475) | [link](https://github.com/facebookresearch/Geo-metric) | 51 | | 4 | SJTU-TMQA | [SJTU-TMQA: A quality assessment database for static mesh with texture map](https://arxiv.org/abs/2309.15675) | [link](https://ccccby.github.io/) | 52 | 53 | 54 | ## Digital human quality assessment database 55 | | # | Database Name | Title & Link | Database Link | 56 | |---|---------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------| 57 | | 1 | DHHQA | [Perceptual Quality Assessment for Digital Human Heads](https://arxiv.org/abs/2209.09489) | [Link](https://github.com/zzc-1998/DHHQA) | 58 | | 2 | DDH-QA | [DDH-QA: A DYNAMIC DIGITAL HUMANS QUALITY ASSESSMENT DATABASE](https://arxiv.org/pdf/2212.12734.pdf) | [Link](https://github.com/zzc-1998/DDH-QA) | 59 | | 3 | SJTU-H3D | [Advancing Zero-Shot Digital Human Quality Assessment through Text-Prompted Evaluation](https://arxiv.org/abs/2307.02808) | [Link](https://github.com/zzc-1998/SJTU-H3D) 60 | 61 | 62 | 63 | 64 | 65 | 66 | # :point_right: 3DQA methods 67 | ## Basic FR-PCQA 68 | Basic full-reference quality assessment metrics implemented by Python. 69 | 70 | I try to implement the p2point, p2plane, and PSNR_yuv with [python](https://github.com/zzc-1998/Point-cloud-quality-assessment/). 71 | The original algorithms come from ["Evaluation criteria for PCC (Point Cloud Compression)"](https://mpeg.chiariglione.org/standards/mpeg-i/point-cloud-compression/evaluation-criteria-pcc),["Dynamic Polygon Clouds: Representation and Compression for VR/AR"](https://www.cambridge.org/core/journals/apsipa-transactions-on-signal-and-information-processing/article/dynamic-polygon-clouds-representation-and-compression-for-vrar/A83EFCDBEF825DA5DC2A08308B6E21BE), and ["Geometric Distortion Metrics for Point Cloud Compression"](https://ieeexplore.ieee.org/document/8296925). 72 | 73 | ## FR-PCQA metrics 74 | 75 | | # | Metric Name | Title & Link | Code Link | 76 | |---|-------------|--------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------| 77 | | 1 | PointSSIM | ["Towards a Point Cloud Structural Similarity Metric"](https://ieeexplore.ieee.org/abstract/document/9106005) | [Code](https://github.com/mmspg/pointssim) | 78 | | 2 | GraphSIM | ["Inferring Point Cloud Quality via Graph Similarity"](http://arxiv.org/abs/2006.00497) | [Code](https://github.com/NJUVISION/GraphSIM) | 79 | | 3 | PCQM | ["PCQM: A Full-Reference Quality Metric for Colored 3D Point Clouds"](https://ieeexplore.ieee.org/document/9123147) | [Code](https://github.com/MEPP-team/PCQM) | 80 | 81 | 82 | ## RR-PCQA metrics 83 | 84 | | # | Metric Name | Title & Link | Code Link | 85 | |---|-------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------| 86 | | 1 | PCMrr | ["A Reduced Reference Metric for Visual Quality Evaluation of Point Cloud Contents"](https://ieeexplore.ieee.org/abstract/document/9198142) | [Code](https://github.com/cwi-dis/PCM_RR) | 87 | | 2 | - | ["Reduced Reference Quality Assessment for Point Cloud Compression"](https://arxiv.org/pdf/2301.01009.pdf) | - | 88 | | 3 | - | ["Reduced-Reference Quality Assessment of Point Clouds via Content-Oriented Saliency Projection"](https://arxiv.org/abs/2301.07681) | [Code](https://github.com/weizhou-geek/RR-CAP) | 89 | | 4 | - | ["Support Vector Regression-based Reduced-Reference Perceptual Quality Model for Compressed Point Clouds"](https://ieeexplore.ieee.org/abstract/document/10375131)|| 90 | 91 | 92 | ## NR-PCQA metrics 93 | 94 | | # | Metric Name | Title & Link | Code Link | 95 | |----|---------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------| 96 | | 1 | 3D-NSS | ["No-Reference Quality Assessment for 3D Colored Point Cloud and Mesh Models"](https://ieeexplore.ieee.org/document/9810024) [[Arxiv]](https://arxiv.org/abs/2107.02041) | [Code](https://github.com/zzc-1998/NR-3DQA) | 97 | | 2 | ResSCNN | ["Point Cloud Quality Assessment: Dataset Construction and Learning-based No-Reference Approach"](https://arxiv.org/pdf/2012.11895.pdf) | [Code](https://github.com/lyp22/ResSCNN) | 98 | | 3 | IT-PCQA | ["No-Reference Point Cloud Quality Assessment via Domain Adaptation"](https://openaccess.thecvf.com/content/CVPR2022/papers/Yang_No-Reference_Point_Cloud_Quality_Assessment_via_Domain_Adaptation_CVPR_2022_paper.pdf) | [Code](https://github.com/lyp22/IT-PCQA) | 99 | | 4 | 3D-CNN-PCQA | ["A No-reference Quality Assessment Metric for Point Cloud Based on Captured Video Sequences"](https://arxiv.org/abs/2206.05054) | - | 100 | | 5 | VQA-PC | ["Evaluating Point Cloud from Moving Camera Videos: A No-Reference Metric"](https://arxiv.org/abs/2208.14085) | [Code](https://github.com/zzc-1998/VQA_PC) | 101 | | 6 | - | ["Blind Quality Assessment of 3D Dense Point Clouds with Structure Guided Resampling"](https://arxiv.org/abs/2208.14603) | - | 102 | | 7 | MM-PCQA | ["MM-PCQA: Multi-Modal Learning for No-reference Point Cloud Quality Assessment"](https://arxiv.org/abs/2209.00244) | [Code](https://github.com/zzc-1998/MM-PCQA) | 103 | | 8 | - | ["V-PCC Projection Based Blind Point Cloud Quality Assessment for Compression Distortion"](https://ieeexplore.ieee.org/document/9881542) | - | 104 | | 9 | - | ["GPA-Net: No-Reference Point Cloud Quality Assessment with Multi-task Graph Convolutional Network"](https://arxiv.org/abs/2210.16478) | [Code](https://github.com/Slowhander/GPA-Net) | 105 | | 10 | - | ["PQA-Net: Deep No Reference Point Cloud Quality Assessment via Multi-View Projection"](https://ieeexplore.ieee.org/document/9496633) | [Code](https://github.com/qdushl/PQA-Net) | 106 | | 11 | - | ["Progressive Knowledge Transfer Based on Human Visual Perception Mechanism for Perceptual Quality Assessment of Point Clouds"](https://arxiv.org/abs/2211.16646) | - | 107 | | 12 | - | ["Bitstream-based Perceptual Quality Assessment of Compressed 3D Point Clouds"](https://ieeexplore.ieee.org/abstract/document/10061856?casa_token=hh3eIB-ggm8AAAAA:wIXnZG4sBOPW-ZY1XZA0Z3TtpbQDhRbwqQUcijdTRwHIjEb1OEakhcN5_2HV38IYg_1oW5_rjfY) | - | 108 | | 13 | - | ["GMS-3DQA: Projection-based Grid Mini-patch Sampling for 3D Model Quality Assessment"](https://arxiv.org/pdf/2306.05658.pdf) | [Code](https://github.com/zzc-1998/GMS-3DQA) | 109 | | 14 | - | ["Once-Training-All-Fine: No-Reference Point Cloud Quality Assessment via Domain-relevance Degradation Description"](https://arxiv.org/abs/2307.01567) | - | 110 | | 15 | - | ["Pseudo-Reference Point Cloud Quality Measurement Based on Joint 2-D and 3-D Distortion Description"](https://ieeexplore.ieee.org/abstract/document/10167694) | - | 111 | | 16 | - | ["pmBQA: Projection-based Blind Point Cloud Quality Assessment via Multimodal Learning"](https://dl.acm.org/doi/abs/10.1145/3581783.3611998) | - | 112 | | 17 | - | ["Non-Local Geometry and Color Gradient Aggregation Graph Model for No-Reference Point Cloud Quality Assessment"](https://dl.acm.org/doi/abs/10.1145/3581783.3612169) | - | 113 | | 18 | - | ["Simple Baselines for Projection-based Full-reference and No-reference Point Cloud Quality Assessment"](https://arxiv.org/abs/2310.17147) | - | 114 | | 19 | - |["Plain-PCQA: No-Reference Point Cloud Quality Assessment by Analysis of Plain Visual and Geometrical Components"](https://ieeexplore.ieee.org/document/10381826)|-| 115 | | 20 | - |["Zoom to Perceive Better: No-reference Point Cloud Quality Assessment via Exploring Effective Multiscale Feature"](https://ieeexplore.ieee.org/document/10422856)|[Code](https://openi.pcl.ac.cn/OpenPointCloud/MOD-PCQA)| 116 | | 21 | - |["PAME: SELF-SUPERVISED MASKED AUTOENCODER FOR NO-REFERENCE POINT CLOUD QUALITY ASSESSMENT"](https://arxiv.org/pdf/2403.10061.pdf)|-| 117 | | 22 | - |["Contrastive Pre-Training with Multi-View Fusion for No-Reference Point Cloud Quality Assessment"](https://arxiv.org/pdf/2403.10066.pdf)|-| 118 | | 23 | - |["MFT-PCQA: Multi-Modal Fusion Transformer for No-Reference Point Cloud Quality Assessment"](https://ieeexplore.ieee.org/abstract/document/10445736)|-| 119 | | 24 | - |["Rating-Augmented No-Reference Point Cloud Quality Assessment Using Multi-Task Learning"](https://ieeexplore.ieee.org/abstract/document/10448511) |-| 120 | | 25 | - |["3DTA: No-Reference 3D Point Cloud Quality Assessment with Twin Attention"](https://ieeexplore.ieee.org/abstract/document/10542438) | [Code](https://github.com/philox12358/3DTA-PCQA) | 121 | | 26 | - |["Compressed Point Cloud Quality Index by Combining Global Appearance and Local Details"](https://dl.acm.org/doi/abs/10.1145/3672567) | - | 122 | | 27 | - |["Asynchronous Feedback Network for Perceptual Point Cloud Quality Assessment"](https://arxiv.org/pdf/2407.09806) | [Code](https://github.com/zhangyujie-1998/AFNet) | 123 | | 28 | - |["TCDM: Transformational Complexity Based Distortion Metric for Perceptual Point Cloud Quality Assessment](https://ieeexplore.ieee.org/abstract/document/10337742) | [Code](https://github.com/zyj1318053/TCDM) | 124 | | 29 | ACM MM Best Paper Nomination |["LMM-PCQA: Assisting Point Cloud Quality Assessment with LMM"](https://arxiv.org/abs/2404.18203) | [Code](https://github.com/Q-Future/LMM-PCQA) | 125 | | 30 | - |["LLM-guided Cross-Modal Point Cloud Quality Assessment: A Graph Learning Approach"](https://ieeexplore.ieee.org/abstract/document/10660545) | - | 126 | | 31 | - |["Visual-Saliency Guided Multi-modal Learning for No Reference Point Cloud Quality Assessment"](https://dl.acm.org/doi/abs/10.1145/3689093.3689183) | - | 127 | | 32 | - |["Perceptual Quality Assessment of Trisoup-Lifting Encoded 3D Point Clouds"](https://arxiv.org/abs/2410.06689) | - | 128 | | 33 | - |["No-Reference Point Cloud Quality Assessment Through Structure Sampling and Clustering Based on Graph"](https://ieeexplore.ieee.org/abstract/document/10737898/) | - | 129 | | 34 | - |["No-reference point cloud quality assessment via graph convolutional network"](https://orca.cardiff.ac.uk/id/eprint/172907/) | - | 130 | | 35 | - |["CLIP-PCQA: Exploring Subjective-Aligned Vision-Language Modeling for Point Cloud Quality Assessment"](https://arxiv.org/abs/2501.10071) | - | 131 | | 36 | - |["Information Exploration of Projected Views for Point Cloud Quality Measurement"](https://ieeexplore.ieee.org/abstract/document/10841467) | - | 132 | | 37 | - |["CMDC-PCQA: No-Reference Point Cloud Quality Assessment via a Cross-Modal Deep-Coupling Framework"](https://ieeexplore.ieee.org/abstract/document/10884925) | - | 133 | | 38 | - |["No-reference geometry quality assessment for colorless point clouds via list-wise rank learning"](https://www.sciencedirect.com/science/article/abs/pii/S0097849325000159) | - | 134 | | 39 | - |["Dynamic Hypergraph Convolutional Network for No-Reference Point Cloud Quality Assessment"](https://ieeexplore.ieee.org/abstract/document/10549980) | [Code](https://github.com/chenwuwq/DHCN) | 135 | | 40 | - |["DQP-PCQA: Deep Quantization Parameters Bring New Insight to Point Cloud Quality Assessment"](https://ieeexplore.ieee.org/abstract/document/11078383/) | - | 136 | | 41 | - |["Perception-Weighted Multi-View Point Cloud Quality Assessment with Saliency-Guided Coverage Analysis"](https://ieeexplore.ieee.org/abstract/document/11081825) | - | 137 | | 42 | - |["Mpv-pcqa: multimodal no-reference point cloud quality assessment via point cloud and captured dynamic video"](https://link.springer.com/article/10.1007/s00530-025-01887-2) | - | 138 | | 43 | - |["COPP-Net: No-Reference Point Cloud Quality Assessment via Weighted Patch Quality Prediction"](https://ieeexplore.ieee.org/abstract/document/11131459/) | - | 139 | | 44 | - |["BMPCQA: Bioinspired Metaverse Point Cloud Quality Assessment Based on Large Multimodal Models"](https://advanced.onlinelibrary.wiley.com/doi/abs/10.1002/aisy.202500504) | [Code](https://github.com/IntMeGroup/BMPCQA) | 140 | 141 | ## Mesh QA metrics 142 | 143 | 1. "Surface-Sampling Based Objective Quality Assessment Metrics for Meshes" [[ICASSP]](https://ieeexplore.ieee.org/document/10096048) 144 | 145 | 146 | # Contact Information 147 | :sunglasses: If you want to make contributions, include your works, or simply make discussions, feel free to e-mail me at zzc1998@sjtu.edu.cn :sunglasses: 148 | 149 | :sparkling_heart: If you find this collection helpful, please star this project! Thank you! :sparkling_heart: 150 | --------------------------------------------------------------------------------