├── LICENSE ├── README.md ├── background └── new │ ├── 0070106734-000000000137219850_2.jpg │ ├── 021020.gallery.WORK.1537169783-1.jpg │ ├── 100832t21c0jps99zznn16.jpg │ ├── 1484722000669.jpg │ ├── 1499406617573_000.jpg │ ├── 20150425141154_18023.jpg │ ├── 20170606154305OjfqsVeuAJt4835Z.jpg │ ├── 20180301170407_51589.jpg │ ├── 20180415234959_8.jpg │ ├── 258479-150P310261526.jpg │ ├── 38708109-ひび割れ、乾燥した荒野地面テクスチャ.jpg │ ├── 4375-11018741.jpg │ ├── 4375-11018802.jpg │ ├── 4375-9685046.jpg │ ├── 4380.jpg_wh1200.jpg │ ├── 5514fb2764e7e.jpg │ └── 8205b5bf-d408-4aea-92e7-44247b40e356.jpg ├── dataset.jpg ├── perturbed_images_generation_multiProcess.py ├── perturbed_images_generation_multiProcess_addition1.py ├── perturbed_images_generation_multiProcess_addition2.py ├── perturbed_images_generation_multiProcess_addition3.py ├── scan └── new │ ├── 1.jpg │ ├── 390bf8b615242318906029b8a6c71ad4.png │ ├── 3d-paper-snowflake-template-24-paper-cutting-templates-pdf-doc-psd-vector-eps-fur-3d-paper-snowflake-template.jpeg │ ├── 660911ENT039.jpg │ └── Battle-Creek-Grand-Opening-Flyer-1.jpg └── utils.py /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Synthesize Distorted Image and Its Control Points 2 |
We uniformly sample a set of (b) reference points on (a) scanned document image, and then perform geometric deformation on them to get (c) distorted image and (d) control points. (e) Synthetic data consists of distorted image, reference points and control points.
6 | 7 |python >=3.7
22 |opencv-python
23 |scipy
24 |math
25 |pickle
26 | -------------------------------------------------------------------------------- /background/new/0070106734-000000000137219850_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/0070106734-000000000137219850_2.jpg -------------------------------------------------------------------------------- /background/new/021020.gallery.WORK.1537169783-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/021020.gallery.WORK.1537169783-1.jpg -------------------------------------------------------------------------------- /background/new/100832t21c0jps99zznn16.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/100832t21c0jps99zznn16.jpg -------------------------------------------------------------------------------- /background/new/1484722000669.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/1484722000669.jpg -------------------------------------------------------------------------------- /background/new/1499406617573_000.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/1499406617573_000.jpg -------------------------------------------------------------------------------- /background/new/20150425141154_18023.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/20150425141154_18023.jpg -------------------------------------------------------------------------------- /background/new/20170606154305OjfqsVeuAJt4835Z.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/20170606154305OjfqsVeuAJt4835Z.jpg -------------------------------------------------------------------------------- /background/new/20180301170407_51589.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/20180301170407_51589.jpg -------------------------------------------------------------------------------- /background/new/20180415234959_8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/20180415234959_8.jpg -------------------------------------------------------------------------------- /background/new/258479-150P310261526.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/258479-150P310261526.jpg -------------------------------------------------------------------------------- /background/new/38708109-ひび割れ、乾燥した荒野地面テクスチャ.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/38708109-ひび割れ、乾燥した荒野地面テクスチャ.jpg -------------------------------------------------------------------------------- /background/new/4375-11018741.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/4375-11018741.jpg -------------------------------------------------------------------------------- /background/new/4375-11018802.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/4375-11018802.jpg -------------------------------------------------------------------------------- /background/new/4375-9685046.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/4375-9685046.jpg -------------------------------------------------------------------------------- /background/new/4380.jpg_wh1200.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/4380.jpg_wh1200.jpg -------------------------------------------------------------------------------- /background/new/5514fb2764e7e.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/5514fb2764e7e.jpg -------------------------------------------------------------------------------- /background/new/8205b5bf-d408-4aea-92e7-44247b40e356.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/background/new/8205b5bf-d408-4aea-92e7-44247b40e356.jpg -------------------------------------------------------------------------------- /dataset.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/dataset.jpg -------------------------------------------------------------------------------- /perturbed_images_generation_multiProcess.py: -------------------------------------------------------------------------------- 1 | ''' 2 | GuoWang xie 3 | set up :2020-1-9 4 | intergrate img and label into one file 5 | 6 | -- fiducial1024_v1 7 | ''' 8 | 9 | 10 | import argparse 11 | import sys, os 12 | import pickle 13 | import random 14 | import collections 15 | import json 16 | import numpy as np 17 | import scipy.io as io 18 | import scipy.misc as m 19 | import matplotlib.pyplot as plt 20 | import glob 21 | import math 22 | import time 23 | 24 | import threading 25 | import multiprocessing as mp 26 | from multiprocessing import Pool 27 | import re 28 | import cv2 29 | # sys.path.append('/lustre/home/gwxie/hope/project/dewarp/datasets/') # /lustre/home/gwxie/program/project/unwarp/perturbed_imgaes/GAN 30 | import utils 31 | 32 | def getDatasets(dir): 33 | return os.listdir(dir) 34 | 35 | class perturbed(utils.BasePerturbed): 36 | def __init__(self, path, bg_path, save_path, save_suffix): 37 | 38 | self.path = path 39 | self.bg_path = bg_path 40 | self.save_path = save_path 41 | self.save_suffix = save_suffix 42 | def save_img(self, m, n, fold_curve='fold', repeat_time=4, fiducial_points = 16, relativeShift_position='relativeShift_v2'): 43 | 44 | 45 | origin_img = cv2.imread(self.path, flags=cv2.IMREAD_COLOR) 46 | 47 | save_img_shape = [512*2, 480*2] # 320 48 | # reduce_value = np.random.choice([2**4, 2**5, 2**6, 2**7, 2**8], p=[0.01, 0.1, 0.4, 0.39, 0.1]) 49 | reduce_value = np.random.choice([2*2, 4*2, 8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.02, 0.18, 0.2, 0.3, 0.1, 0.1, 0.08, 0.02]) 50 | # reduce_value = np.random.choice([8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.01, 0.02, 0.2, 0.4, 0.19, 0.18]) 51 | # reduce_value = np.random.choice([16, 24, 32, 40, 48, 64], p=[0.01, 0.1, 0.2, 0.4, 0.2, 0.09]) 52 | base_img_shrink = save_img_shape[0] - reduce_value 53 | 54 | # enlarge_img_shrink = [1024, 768] 55 | # enlarge_img_shrink = [896, 672] # 420 56 | enlarge_img_shrink = [512*4, 480*4] # 420 57 | # enlarge_img_shrink = [896*2, 768*2] # 420 58 | # enlarge_img_shrink = [896, 768] # 420 59 | # enlarge_img_shrink = [768, 576] # 420 60 | # enlarge_img_shrink = [640, 480] # 420 61 | 62 | '''''' 63 | im_lr = origin_img.shape[0] 64 | im_ud = origin_img.shape[1] 65 | 66 | reduce_value_v2 = np.random.choice([2*2, 4*2, 8*2, 16*2, 24*2, 28*2, 32*2, 48*2], p=[0.02, 0.18, 0.2, 0.2, 0.1, 0.1, 0.1, 0.1]) 67 | # reduce_value_v2 = np.random.choice([16, 24, 28, 32, 48, 64], p=[0.01, 0.1, 0.2, 0.3, 0.25, 0.14]) 68 | 69 | if im_lr > im_ud: 70 | im_ud = min(int(im_ud / im_lr * base_img_shrink), save_img_shape[1] - reduce_value_v2) 71 | im_lr = save_img_shape[0] - reduce_value 72 | else: 73 | base_img_shrink = save_img_shape[1] - reduce_value 74 | im_lr = min(int(im_lr / im_ud * base_img_shrink), save_img_shape[0] - reduce_value_v2) 75 | im_ud = base_img_shrink 76 | 77 | if round(im_lr / im_ud, 2) < 0.5 or round(im_ud / im_lr, 2) < 0.5: 78 | repeat_time = min(repeat_time, 8) 79 | 80 | edge_padding = 3 81 | im_lr -= im_lr % (fiducial_points-1) - (2*edge_padding) # im_lr % (fiducial_points-1) - 1 82 | im_ud -= im_ud % (fiducial_points-1) - (2*edge_padding) # im_ud % (fiducial_points-1) - 1 83 | im_hight = np.linspace(edge_padding, im_lr - edge_padding, fiducial_points, dtype=np.int64) 84 | im_wide = np.linspace(edge_padding, im_ud - edge_padding, fiducial_points, dtype=np.int64) 85 | # im_lr -= im_lr % (fiducial_points-1) - (1+2*edge_padding) # im_lr % (fiducial_points-1) - 1 86 | # im_ud -= im_ud % (fiducial_points-1) - (1+2*edge_padding) # im_ud % (fiducial_points-1) - 1 87 | # im_hight = np.linspace(edge_padding, im_lr - (1+edge_padding), fiducial_points, dtype=np.int64) 88 | # im_wide = np.linspace(edge_padding, im_ud - (1+edge_padding), fiducial_points, dtype=np.int64) 89 | im_x, im_y = np.meshgrid(im_hight, im_wide) 90 | segment_x = (im_lr) // (fiducial_points-1) 91 | segment_y = (im_ud) // (fiducial_points-1) 92 | 93 | # plt.plot(im_x, im_y, 94 | # color='limegreen', 95 | # marker='.', 96 | # linestyle='') 97 | # plt.grid(True) 98 | # plt.show() 99 | self.origin_img = cv2.resize(origin_img, (im_ud, im_lr), interpolation=cv2.INTER_CUBIC) 100 | 101 | perturbed_bg_ = getDatasets(self.bg_path) 102 | perturbed_bg_img_ = self.bg_path+random.choice(perturbed_bg_) 103 | perturbed_bg_img = cv2.imread(perturbed_bg_img_, flags=cv2.IMREAD_COLOR) 104 | 105 | mesh_shape = self.origin_img.shape[:2] 106 | 107 | self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 256, dtype=np.float32)#np.zeros_like(perturbed_bg_img) 108 | # self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 0, dtype=np.int16)#np.zeros_like(perturbed_bg_img) 109 | self.new_shape = self.synthesis_perturbed_img.shape[:2] 110 | perturbed_bg_img = cv2.resize(perturbed_bg_img, (save_img_shape[1], save_img_shape[0]), cv2.INPAINT_TELEA) 111 | 112 | origin_pixel_position = np.argwhere(np.zeros(mesh_shape, dtype=np.uint32) == 0).reshape(mesh_shape[0], mesh_shape[1], 2) 113 | pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2) 114 | self.perturbed_xy_ = np.zeros((self.new_shape[0], self.new_shape[1], 2)) 115 | # self.perturbed_xy_ = pixel_position.copy().astype(np.float32) 116 | # fiducial_points_grid = origin_pixel_position[im_x, im_y] 117 | 118 | self.synthesis_perturbed_label = np.zeros((self.new_shape[0], self.new_shape[1], 2)) 119 | x_min, y_min, x_max, y_max = self.adjust_position_v2(0, 0, mesh_shape[0], mesh_shape[1], save_img_shape) 120 | origin_pixel_position += [x_min, y_min] 121 | 122 | x_min, y_min, x_max, y_max = self.adjust_position(0, 0, mesh_shape[0], mesh_shape[1]) 123 | x_shift = random.randint(-enlarge_img_shrink[0]//16, enlarge_img_shrink[0]//16) 124 | y_shift = random.randint(-enlarge_img_shrink[1]//16, enlarge_img_shrink[1]//16) 125 | x_min += x_shift 126 | x_max += x_shift 127 | y_min += y_shift 128 | y_max += y_shift 129 | 130 | '''im_x,y''' 131 | im_x += x_min 132 | im_y += y_min 133 | 134 | self.synthesis_perturbed_img[x_min:x_max, y_min:y_max] = self.origin_img 135 | self.synthesis_perturbed_label[x_min:x_max, y_min:y_max] = origin_pixel_position 136 | 137 | synthesis_perturbed_img_map = self.synthesis_perturbed_img.copy() 138 | synthesis_perturbed_label_map = self.synthesis_perturbed_label.copy() 139 | 140 | foreORbackground_label = np.full((mesh_shape), 1, dtype=np.int16) 141 | foreORbackground_label_map = np.full((self.new_shape), 0, dtype=np.int16) 142 | foreORbackground_label_map[x_min:x_max, y_min:y_max] = foreORbackground_label 143 | 144 | # synthesis_perturbed_img_map = self.pad(self.synthesis_perturbed_img.copy(), x_min, y_min, x_max, y_max) 145 | # synthesis_perturbed_label_map = self.pad(synthesis_perturbed_label_map, x_min, y_min, x_max, y_max) 146 | '''*****************************************************************''' 147 | is_normalizationFun_mixture = self.is_perform(0.2, 0.8) 148 | # if not is_normalizationFun_mixture: 149 | normalizationFun_0_1 = False 150 | # normalizationFun_0_1 = self.is_perform(0.5, 0.5) 151 | 152 | if fold_curve == 'fold': 153 | fold_curve_random = True 154 | # is_normalizationFun_mixture = False 155 | normalizationFun_0_1 = self.is_perform(0.2, 0.8) 156 | if is_normalizationFun_mixture: 157 | alpha_perturbed = random.randint(80, 120) / 100 158 | else: 159 | if normalizationFun_0_1 and repeat_time < 8: 160 | alpha_perturbed = random.randint(50, 70) / 100 161 | else: 162 | alpha_perturbed = random.randint(70, 130) / 100 163 | else: 164 | fold_curve_random = self.is_perform(0.1, 0.9) # False # self.is_perform(0.01, 0.99) 165 | alpha_perturbed = random.randint(80, 160) / 100 166 | # is_normalizationFun_mixture = False # self.is_perform(0.01, 0.99) 167 | synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256) 168 | # synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 0, dtype=np.int16) 169 | synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label) 170 | 171 | alpha_perturbed_change = self.is_perform(0.5, 0.5) 172 | p_pp_choice = self.is_perform(0.8, 0.2) if fold_curve == 'fold' else self.is_perform(0.1, 0.9) 173 | for repeat_i in range(repeat_time): 174 | 175 | if alpha_perturbed_change: 176 | if fold_curve == 'fold': 177 | if is_normalizationFun_mixture: 178 | alpha_perturbed = random.randint(80, 120) / 100 179 | else: 180 | if normalizationFun_0_1 and repeat_time < 8: 181 | alpha_perturbed = random.randint(50, 70) / 100 182 | else: 183 | alpha_perturbed = random.randint(70, 130) / 100 184 | else: 185 | alpha_perturbed = random.randint(80, 160) / 100 186 | '''''' 187 | linspace_x = [0, (self.new_shape[0] - im_lr) // 2 - 1, 188 | self.new_shape[0] - (self.new_shape[0] - im_lr) // 2 - 1, self.new_shape[0] - 1] 189 | linspace_y = [0, (self.new_shape[1] - im_ud) // 2 - 1, 190 | self.new_shape[1] - (self.new_shape[1] - im_ud) // 2 - 1, self.new_shape[1] - 1] 191 | linspace_x_seq = [1, 2, 3] 192 | linspace_y_seq = [1, 2, 3] 193 | r_x = random.choice(linspace_x_seq) 194 | r_y = random.choice(linspace_y_seq) 195 | perturbed_p = np.array( 196 | [random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10), 197 | random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10 198 | if ((r_x == 1 or r_x == 3) and (r_y == 1 or r_y == 3)) and p_pp_choice: 199 | linspace_x_seq.remove(r_x) 200 | linspace_y_seq.remove(r_y) 201 | r_x = random.choice(linspace_x_seq) 202 | r_y = random.choice(linspace_y_seq) 203 | perturbed_pp = np.array( 204 | [random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10), 205 | random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10 206 | # perturbed_p, perturbed_pp = np.array( 207 | # [random.randint(0, self.new_shape[0] * 10) / 10, 208 | # random.randint(0, self.new_shape[1] * 10) / 10]) \ 209 | # , np.array([random.randint(0, self.new_shape[0] * 10) / 10, 210 | # random.randint(0, self.new_shape[1] * 10) / 10]) 211 | # perturbed_p, perturbed_pp = np.array( 212 | # [random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10, 213 | # random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) \ 214 | # , np.array([random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10, 215 | # random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) 216 | '''''' 217 | 218 | perturbed_vp = perturbed_pp - perturbed_p 219 | perturbed_vp_norm = np.linalg.norm(perturbed_vp) 220 | 221 | perturbed_distance_vertex_and_line = np.dot((perturbed_p - pixel_position), perturbed_vp) / perturbed_vp_norm 222 | '''''' 223 | # perturbed_v = np.array([random.randint(-3000, 3000) / 100, random.randint(-3000, 3000) / 100]) 224 | # perturbed_v = np.array([random.randint(-4000, 4000) / 100, random.randint(-4000, 4000) / 100]) 225 | if fold_curve == 'fold' and self.is_perform(0.6, 0.4): # self.is_perform(0.3, 0.7): 226 | # perturbed_v = np.array([random.randint(-9000, 9000) / 100, random.randint(-9000, 9000) / 100]) 227 | perturbed_v = np.array([random.randint(-10000, 10000) / 100, random.randint(-10000, 10000) / 100]) 228 | # perturbed_v = np.array([random.randint(-11000, 11000) / 100, random.randint(-11000, 11000) / 100]) 229 | else: 230 | # perturbed_v = np.array([random.randint(-9000, 9000) / 100, random.randint(-9000, 9000) / 100]) 231 | # perturbed_v = np.array([random.randint(-16000, 16000) / 100, random.randint(-16000, 16000) / 100]) 232 | perturbed_v = np.array([random.randint(-8000, 8000) / 100, random.randint(-8000, 8000) / 100]) 233 | # perturbed_v = np.array([random.randint(-3500, 3500) / 100, random.randint(-3500, 3500) / 100]) 234 | # perturbed_v = np.array([random.randint(-600, 600) / 10, random.randint(-600, 600) / 10]) 235 | '''''' 236 | if fold_curve == 'fold': 237 | if is_normalizationFun_mixture: 238 | if self.is_perform(0.5, 0.5): 239 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 240 | else: 241 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 2)) 242 | else: 243 | if normalizationFun_0_1: 244 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), 2) 245 | else: 246 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 247 | 248 | else: 249 | if is_normalizationFun_mixture: 250 | if self.is_perform(0.5, 0.5): 251 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 252 | else: 253 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 2)) 254 | else: 255 | if normalizationFun_0_1: 256 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), 2) 257 | else: 258 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 259 | '''''' 260 | if fold_curve_random: 261 | # omega_perturbed = (alpha_perturbed+0.2) / (perturbed_d + alpha_perturbed) 262 | # omega_perturbed = alpha_perturbed**perturbed_d 263 | omega_perturbed = alpha_perturbed / (perturbed_d + alpha_perturbed) 264 | else: 265 | omega_perturbed = 1 - perturbed_d ** alpha_perturbed 266 | 267 | '''shadow''' 268 | if self.is_perform(0.6, 0.4): 269 | synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] = np.minimum(np.maximum(synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] - np.int16(np.round(omega_perturbed[x_min:x_max, y_min:y_max].repeat(3).reshape(x_max-x_min, y_max-y_min, 3) * abs(np.linalg.norm(perturbed_v//2))*np.array([0.4-random.random()*0.1, 0.4-random.random()*0.1, 0.4-random.random()*0.1]))), 0), 255) 270 | '''''' 271 | 272 | if relativeShift_position in ['position', 'relativeShift_v2']: 273 | self.perturbed_xy_ += np.array([omega_perturbed * perturbed_v[0], omega_perturbed * perturbed_v[1]]).transpose(1, 2, 0) 274 | else: 275 | print('relativeShift_position error') 276 | exit() 277 | 278 | ''' 279 | flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape( 280 | self.new_shape[0] * self.new_shape[1], 2) 281 | vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position) 282 | wts_sum = np.abs(wts).sum(-1) 283 | 284 | # flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts) 285 | wts = wts[wts_sum <= 1, :] 286 | vtx = vtx[wts_sum <= 1, :] 287 | synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1, 288 | :] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts) 289 | 290 | synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1, 291 | :] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts) 292 | 293 | foreORbackground_label = np.zeros(self.new_shape) 294 | foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts) 295 | foreORbackground_label[foreORbackground_label < 0.99] = 0 296 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 297 | 298 | # synthesis_perturbed_img = np.around(synthesis_perturbed_img).astype(np.uint8) 299 | synthesis_perturbed_label[:, :, 0] *= foreORbackground_label 300 | synthesis_perturbed_label[:, :, 1] *= foreORbackground_label 301 | synthesis_perturbed_img[:, :, 0] *= foreORbackground_label 302 | synthesis_perturbed_img[:, :, 1] *= foreORbackground_label 303 | synthesis_perturbed_img[:, :, 2] *= foreORbackground_label 304 | 305 | self.synthesis_perturbed_img = synthesis_perturbed_img 306 | self.synthesis_perturbed_label = synthesis_perturbed_label 307 | ''' 308 | 309 | '''perspective''' 310 | 311 | perspective_shreshold = random.randint(26, 36)*10 # 280 312 | x_min_per, y_min_per, x_max_per, y_max_per = self.adjust_position(perspective_shreshold, perspective_shreshold, self.new_shape[0]-perspective_shreshold, self.new_shape[1]-perspective_shreshold) 313 | pts1 = np.float32([[x_min_per, y_min_per], [x_max_per, y_min_per], [x_min_per, y_max_per], [x_max_per, y_max_per]]) 314 | e_1_ = x_max_per - x_min_per 315 | e_2_ = y_max_per - y_min_per 316 | e_3_ = e_2_ 317 | e_4_ = e_1_ 318 | perspective_shreshold_h = e_1_*0.02 319 | perspective_shreshold_w = e_2_*0.02 320 | a_min_, a_max_ = 70, 110 321 | # if self.is_perform(1, 0): 322 | if fold_curve == 'curve' and self.is_perform(0.5, 0.5): 323 | if self.is_perform(0.5, 0.5): 324 | while True: 325 | pts2 = np.around( 326 | np.float32([[x_min_per - (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold], 327 | [x_max_per - (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold], 328 | [x_min_per + (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold], 329 | [x_max_per + (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold]])) # right 330 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 331 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 332 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 333 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 334 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 335 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 336 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 337 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 338 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 339 | break 340 | else: 341 | while True: 342 | pts2 = np.around( 343 | np.float32([[x_min_per + (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold], 344 | [x_max_per + (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold], 345 | [x_min_per - (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold], 346 | [x_max_per - (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold]])) 347 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 348 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 349 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 350 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 351 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 352 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 353 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 354 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 355 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 356 | break 357 | 358 | else: 359 | while True: 360 | pts2 = np.around(np.float32([[x_min_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold], 361 | [x_max_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold], 362 | [x_min_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold], 363 | [x_max_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold]])) 364 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 365 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 366 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 367 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 368 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 369 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 370 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 371 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 372 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 373 | break 374 | 375 | M = cv2.getPerspectiveTransform(pts1, pts2) 376 | one = np.ones((self.new_shape[0], self.new_shape[1], 1), dtype=np.int16) 377 | matr = np.dstack((pixel_position, one)) 378 | new = np.dot(M, matr.reshape(-1, 3).T).T.reshape(self.new_shape[0], self.new_shape[1], 3) 379 | x = new[:, :, 0]/new[:, :, 2] 380 | y = new[:, :, 1]/new[:, :, 2] 381 | perturbed_xy_ = np.dstack((x, y)) 382 | # perturbed_xy_round_int = np.around(cv2.bilateralFilter(perturbed_xy_round_int, 9, 75, 75)) 383 | # perturbed_xy_round_int = np.around(cv2.blur(perturbed_xy_, (17, 17))) 384 | # perturbed_xy_round_int = cv2.blur(perturbed_xy_round_int, (17, 17)) 385 | # perturbed_xy_round_int = cv2.GaussianBlur(perturbed_xy_round_int, (7, 7), 0) 386 | perturbed_xy_ = perturbed_xy_-np.min(perturbed_xy_.T.reshape(2, -1), 1) 387 | # perturbed_xy_round_int = np.around(perturbed_xy_round_int-np.min(perturbed_xy_round_int.T.reshape(2, -1), 1)).astype(np.int16) 388 | 389 | self.perturbed_xy_ += perturbed_xy_ 390 | 391 | '''perspective end''' 392 | 393 | '''to img''' 394 | flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape( 395 | self.new_shape[0] * self.new_shape[1], 2) 396 | # self.perturbed_xy_ = cv2.blur(self.perturbed_xy_, (7, 7)) 397 | self.perturbed_xy_ = cv2.GaussianBlur(self.perturbed_xy_, (7, 7), 0) 398 | 399 | '''get fiducial points''' 400 | fiducial_points_coordinate = self.perturbed_xy_[im_x, im_y] 401 | 402 | 403 | vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position) 404 | wts_sum = np.abs(wts).sum(-1) 405 | 406 | # flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts) 407 | wts = wts[wts_sum <= 1, :] 408 | vtx = vtx[wts_sum <= 1, :] 409 | synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1, 410 | :] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts) 411 | 412 | synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1, 413 | :] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts) 414 | 415 | foreORbackground_label = np.zeros(self.new_shape) 416 | foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts) 417 | foreORbackground_label[foreORbackground_label < 0.99] = 0 418 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 419 | 420 | self.synthesis_perturbed_img = synthesis_perturbed_img 421 | self.synthesis_perturbed_label = synthesis_perturbed_label 422 | self.foreORbackground_label = foreORbackground_label 423 | 424 | '''draw fiducial points 425 | stepSize = 0 426 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy() 427 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1,2): 428 | cv2.circle(fiducial_points_synthesis_perturbed_img, (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1) 429 | cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_large.jpg', fiducial_points_synthesis_perturbed_img) 430 | ''' 431 | 432 | '''clip''' 433 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1] 434 | for x in range(self.new_shape[0] // 2, perturbed_x_max): 435 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x: 436 | perturbed_x_max = x 437 | break 438 | for x in range(self.new_shape[0] // 2, perturbed_x_min, -1): 439 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0: 440 | perturbed_x_min = x 441 | break 442 | for y in range(self.new_shape[1] // 2, perturbed_y_max): 443 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y: 444 | perturbed_y_max = y 445 | break 446 | for y in range(self.new_shape[1] // 2, perturbed_y_min, -1): 447 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0: 448 | perturbed_y_min = y 449 | break 450 | 451 | if perturbed_x_min == 0 or perturbed_x_max == self.new_shape[0] or perturbed_y_min == self.new_shape[1] or perturbed_y_max == self.new_shape[1]: 452 | raise Exception('clip error') 453 | 454 | if perturbed_x_max - perturbed_x_min < im_lr//2 or perturbed_y_max - perturbed_y_min < im_ud//2: 455 | raise Exception('clip error') 456 | 457 | 458 | perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n) 459 | is_shrink = False 460 | if perturbed_x_max - perturbed_x_min > save_img_shape[0] or perturbed_y_max - perturbed_y_min > save_img_shape[1]: 461 | is_shrink = True 462 | synthesis_perturbed_img = cv2.resize(self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 463 | synthesis_perturbed_label = cv2.resize(self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 464 | foreORbackground_label = cv2.resize(self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 465 | foreORbackground_label[foreORbackground_label < 0.99] = 0 466 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 467 | '''shrink fiducial points''' 468 | center_x_l, center_y_l = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2 469 | fiducial_points_coordinate_copy = fiducial_points_coordinate.copy() 470 | shrink_x = im_lr/(perturbed_x_max - perturbed_x_min) 471 | shrink_y = im_ud/(perturbed_y_max - perturbed_y_min) 472 | fiducial_points_coordinate *= [shrink_x, shrink_y] 473 | center_x_l *= shrink_x 474 | center_y_l *= shrink_y 475 | # fiducial_points_coordinate[1:, 1:] *= [shrink_x, shrink_y] 476 | # fiducial_points_coordinate[1:, :1, 0] *= shrink_x 477 | # fiducial_points_coordinate[:1, 1:, 1] *= shrink_y 478 | # perturbed_x_min_copy, perturbed_y_min_copy, perturbed_x_max_copy, perturbed_y_max_copy = perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max 479 | 480 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape) 481 | 482 | self.synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256) 483 | self.synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label) 484 | self.foreORbackground_label = np.zeros_like(self.foreORbackground_label) 485 | self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_img 486 | self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_label 487 | self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max] = foreORbackground_label 488 | 489 | 490 | center_x, center_y = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2 491 | if is_shrink: 492 | fiducial_points_coordinate += [center_x-center_x_l, center_y-center_y_l] 493 | 494 | '''draw fiducial points 495 | stepSize = 0 496 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy() 497 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2): 498 | cv2.circle(fiducial_points_synthesis_perturbed_img, 499 | (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1) 500 | cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_small.jpg',fiducial_points_synthesis_perturbed_img) 501 | ''' 502 | self.new_shape = save_img_shape 503 | self.synthesis_perturbed_img = self.synthesis_perturbed_img[ 504 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 505 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2, 506 | :].copy() 507 | self.synthesis_perturbed_label = self.synthesis_perturbed_label[ 508 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 509 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2, 510 | :].copy() 511 | self.foreORbackground_label = self.foreORbackground_label[ 512 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 513 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2].copy() 514 | 515 | 516 | perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0) 517 | perturbed_x_min = perturbed_x_ // 2 518 | perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1) 519 | 520 | perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0) 521 | perturbed_y_min = perturbed_y_ // 2 522 | perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1) 523 | 524 | '''clip 525 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1] 526 | for x in range(self.new_shape[0] // 2, perturbed_x_max): 527 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x: 528 | perturbed_x_max = x 529 | break 530 | for x in range(self.new_shape[0] // 2, perturbed_x_min, -1): 531 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0: 532 | perturbed_x_min = x 533 | break 534 | for y in range(self.new_shape[1] // 2, perturbed_y_max): 535 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y: 536 | perturbed_y_max = y 537 | break 538 | for y in range(self.new_shape[1] // 2, perturbed_y_min, -1): 539 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0: 540 | perturbed_y_min = y 541 | break 542 | 543 | 544 | center_x, center_y = perturbed_x_min+(perturbed_x_max - perturbed_x_min)//2, perturbed_y_min+(perturbed_y_max - perturbed_y_min)//2 545 | 546 | perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n) 547 | 548 | self.new_shape = save_img_shape 549 | 550 | perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0) 551 | perturbed_x_min = perturbed_x_ // 2 552 | perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1) 553 | 554 | perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0) 555 | perturbed_y_min = perturbed_y_ // 2 556 | perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1) 557 | 558 | self.synthesis_perturbed_img = self.synthesis_perturbed_img[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy() 559 | self.synthesis_perturbed_label = self.synthesis_perturbed_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy() 560 | self.foreORbackground_label = self.foreORbackground_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2].copy() 561 | 562 | ''' 563 | 564 | 565 | 566 | '''save''' 567 | pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2) 568 | 569 | if relativeShift_position == 'relativeShift_v2': 570 | self.synthesis_perturbed_label -= pixel_position 571 | fiducial_points_coordinate -= [center_x - self.new_shape[0] // 2, center_y - self.new_shape[1] // 2] 572 | 573 | self.synthesis_perturbed_label[:, :, 0] *= self.foreORbackground_label 574 | self.synthesis_perturbed_label[:, :, 1] *= self.foreORbackground_label 575 | self.synthesis_perturbed_img[:, :, 0] *= self.foreORbackground_label 576 | self.synthesis_perturbed_img[:, :, 1] *= self.foreORbackground_label 577 | self.synthesis_perturbed_img[:, :, 2] *= self.foreORbackground_label 578 | 579 | ''' 580 | synthesis_perturbed_img_filter = self.synthesis_perturbed_img.copy() 581 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 582 | # if self.is_perform(0.9, 0.1) or repeat_time > 5: 583 | # # if self.is_perform(0.1, 0.9) and repeat_time > 9: 584 | # # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (7, 7), 0) 585 | # # else: 586 | # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0) 587 | # else: 588 | # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 589 | self.synthesis_perturbed_img[self.foreORbackground_label == 1] = synthesis_perturbed_img_filter[self.foreORbackground_label == 1] 590 | ''' 591 | 592 | ''' 593 | perturbed_bg_img = perturbed_bg_img.astype(np.float32) 594 | perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label 595 | perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label 596 | perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label 597 | self.synthesis_perturbed_img += perturbed_bg_img 598 | HSV 599 | perturbed_bg_img = perturbed_bg_img.astype(np.float32) 600 | if self.is_perform(0.1, 0.9): 601 | if self.is_perform(0.2, 0.8): 602 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 603 | synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_RGB2HSV) 604 | H_, S_, V_ = (random.random()-0.2)*20, (random.random()-0.2)/8, (random.random()-0.2)*20 605 | synthesis_perturbed_img_clip_HSV[:, :, 0], synthesis_perturbed_img_clip_HSV[:, :, 1], synthesis_perturbed_img_clip_HSV[:, :, 2] = synthesis_perturbed_img_clip_HSV[:, :, 0]-H_, synthesis_perturbed_img_clip_HSV[:, :, 1]-S_, synthesis_perturbed_img_clip_HSV[:, :, 2]-V_ 606 | synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_HSV2RGB) 607 | perturbed_bg_img[:, :, 0] *= 1-self.foreORbackground_label 608 | perturbed_bg_img[:, :, 1] *= 1-self.foreORbackground_label 609 | perturbed_bg_img[:, :, 2] *= 1-self.foreORbackground_label 610 | 611 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 612 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 613 | else: 614 | perturbed_bg_img_HSV = perturbed_bg_img 615 | perturbed_bg_img_HSV = cv2.cvtColor(perturbed_bg_img_HSV, cv2.COLOR_RGB2HSV) 616 | H_, S_, V_ = (random.random()-0.5)*20, (random.random()-0.5)/8, (random.random()-0.2)*20 617 | perturbed_bg_img_HSV[:, :, 0], perturbed_bg_img_HSV[:, :, 1], perturbed_bg_img_HSV[:, :, 2] = perturbed_bg_img_HSV[:, :, 0]-H_, perturbed_bg_img_HSV[:, :, 1]-S_, perturbed_bg_img_HSV[:, :, 2]-V_ 618 | perturbed_bg_img_HSV = cv2.cvtColor(perturbed_bg_img_HSV, cv2.COLOR_HSV2RGB) 619 | perturbed_bg_img_HSV[:, :, 0] *= 1-self.foreORbackground_label 620 | perturbed_bg_img_HSV[:, :, 1] *= 1-self.foreORbackground_label 621 | perturbed_bg_img_HSV[:, :, 2] *= 1-self.foreORbackground_label 622 | 623 | self.synthesis_perturbed_img += perturbed_bg_img_HSV 624 | # self.synthesis_perturbed_img[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771] 625 | 626 | else: 627 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 628 | perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label 629 | perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label 630 | perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label 631 | 632 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 633 | 634 | # synthesis_perturbed_img_clip_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img[np.sum(self.synthesis_perturbed_img, 2) == 771] 635 | synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_RGB2HSV) 636 | H_, S_, V_ = (random.random()-0.5)*20, (random.random()-0.5)/10, (random.random()-0.4)*20 637 | synthesis_perturbed_img_clip_HSV[:, :, 0], synthesis_perturbed_img_clip_HSV[:, :, 1], synthesis_perturbed_img_clip_HSV[:, :, 2] = synthesis_perturbed_img_clip_HSV[:, :, 0]-H_, synthesis_perturbed_img_clip_HSV[:, :, 1]-S_, synthesis_perturbed_img_clip_HSV[:, :, 2]-V_ 638 | synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_HSV2RGB) 639 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 640 | ''' 641 | '''HSV_v2''' 642 | perturbed_bg_img = perturbed_bg_img.astype(np.float32) 643 | # if self.is_perform(1, 0): 644 | # if self.is_perform(1, 0): 645 | if self.is_perform(0.1, 0.9): 646 | if self.is_perform(0.2, 0.8): 647 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 648 | 649 | synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV) 650 | 651 | perturbed_bg_img[:, :, 0] *= 1-self.foreORbackground_label 652 | perturbed_bg_img[:, :, 1] *= 1-self.foreORbackground_label 653 | perturbed_bg_img[:, :, 2] *= 1-self.foreORbackground_label 654 | 655 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 656 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 657 | else: 658 | perturbed_bg_img_HSV = perturbed_bg_img 659 | perturbed_bg_img_HSV = self.HSV_v1(perturbed_bg_img_HSV) 660 | 661 | perturbed_bg_img_HSV[:, :, 0] *= 1-self.foreORbackground_label 662 | perturbed_bg_img_HSV[:, :, 1] *= 1-self.foreORbackground_label 663 | perturbed_bg_img_HSV[:, :, 2] *= 1-self.foreORbackground_label 664 | 665 | self.synthesis_perturbed_img += perturbed_bg_img_HSV 666 | # self.synthesis_perturbed_img[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771] 667 | 668 | else: 669 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 670 | perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label 671 | perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label 672 | perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label 673 | 674 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 675 | 676 | synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV) 677 | 678 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 679 | 680 | '''''' 681 | # cv2.imwrite(self.save_path+'clip/'+perfix_+'_'+fold_curve+str(perturbed_time)+'-'+str(repeat_time)+'.png', synthesis_perturbed_img_clip) 682 | 683 | self.synthesis_perturbed_img[self.synthesis_perturbed_img < 0] = 0 684 | self.synthesis_perturbed_img[self.synthesis_perturbed_img > 255] = 255 685 | self.synthesis_perturbed_img = np.around(self.synthesis_perturbed_img).astype(np.uint8) 686 | label = np.zeros_like(self.synthesis_perturbed_img, dtype=np.float32) 687 | label[:, :, :2] = self.synthesis_perturbed_label 688 | label[:, :, 2] = self.foreORbackground_label 689 | 690 | # grey = np.around(self.synthesis_perturbed_img[:, :, 0] * 0.2989 + self.synthesis_perturbed_img[:, :, 1] * 0.5870 + self.synthesis_perturbed_img[:, :, 0] * 0.1140).astype(np.int16) 691 | # synthesis_perturbed_grey = np.concatenate((grey.reshape(self.new_shape[0], self.new_shape[1], 1), label), axis=2) 692 | synthesis_perturbed_color = np.concatenate((self.synthesis_perturbed_img, label), axis=2) 693 | 694 | self.synthesis_perturbed_color = np.zeros_like(synthesis_perturbed_color, dtype=np.float32) 695 | # self.synthesis_perturbed_grey = np.zeros_like(synthesis_perturbed_grey, dtype=np.float32) 696 | reduce_value_x = int(round(min((random.random() / 2) * (self.new_shape[0] - (perturbed_x_max - perturbed_x_min)), min(reduce_value, reduce_value_v2)))) 697 | reduce_value_y = int(round(min((random.random() / 2) * (self.new_shape[1] - (perturbed_y_max - perturbed_y_min)), min(reduce_value, reduce_value_v2)))) 698 | perturbed_x_min = max(perturbed_x_min - reduce_value_x, 0) 699 | perturbed_x_max = min(perturbed_x_max + reduce_value_x, self.new_shape[0]) 700 | perturbed_y_min = max(perturbed_y_min - reduce_value_y, 0) 701 | perturbed_y_max = min(perturbed_y_max + reduce_value_y, self.new_shape[1]) 702 | 703 | if im_lr >= im_ud: 704 | self.synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] 705 | # self.synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] 706 | else: 707 | self.synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] 708 | # self.synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] 709 | 710 | '''blur''' 711 | if self.is_perform(0.1, 0.9): 712 | synthesis_perturbed_img_filter = self.synthesis_perturbed_color[:, :, :3].copy() 713 | if self.is_perform(0.1, 0.9): 714 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0) 715 | else: 716 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 717 | if self.is_perform(0.5, 0.5): 718 | self.synthesis_perturbed_color[:, :, :3][self.synthesis_perturbed_color[:, :, 5] == 1] = synthesis_perturbed_img_filter[self.synthesis_perturbed_color[:, :, 5] == 1] 719 | else: 720 | self.synthesis_perturbed_color[:, :, :3] = synthesis_perturbed_img_filter 721 | 722 | fiducial_points_coordinate = fiducial_points_coordinate[:, :, ::-1] 723 | '''draw fiducial points''' 724 | stepSize = 0 725 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_color[:, :, :3].copy() 726 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2): 727 | cv2.circle(fiducial_points_synthesis_perturbed_img, (l[0] + math.ceil(stepSize / 2), l[1] + math.ceil(stepSize / 2)), 2, (0, 0, 255), -1) 728 | cv2.imwrite(self.save_path + 'fiducial_points/' + perfix_ + '_' + fold_curve + '.png', fiducial_points_synthesis_perturbed_img) 729 | 730 | cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3]) 731 | 732 | '''forward-begin''' 733 | self.forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32) 734 | forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32) 735 | forward_position = (self.synthesis_perturbed_color[:, :, 3:5] + pixel_position)[self.synthesis_perturbed_color[:, :, 5] != 0, :] 736 | flat_position = np.argwhere(np.zeros(save_img_shape, dtype=np.uint32) == 0) 737 | vtx, wts = self.interp_weights(forward_position, flat_position) 738 | wts_sum = np.abs(wts).sum(-1) 739 | wts = wts[wts_sum <= 1, :] 740 | vtx = vtx[wts_sum <= 1, :] 741 | flat_position_forward = flat_position.reshape(save_img_shape[0], save_img_shape[1], 2)[self.synthesis_perturbed_color[:, :, 5] != 0, :] 742 | forward_mapping.reshape(save_img_shape[0] * save_img_shape[1], 2)[wts_sum <= 1, :] = self.interpolate(flat_position_forward, vtx, wts) 743 | forward_mapping = forward_mapping.reshape(save_img_shape[0], save_img_shape[1], 2) 744 | 745 | mapping_x_min_, mapping_y_min_, mapping_x_max_, mapping_y_max_ = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape) 746 | shreshold_zoom_out = 2 747 | mapping_x_min = mapping_x_min_ + shreshold_zoom_out 748 | mapping_y_min = mapping_y_min_ + shreshold_zoom_out 749 | mapping_x_max = mapping_x_max_ - shreshold_zoom_out 750 | mapping_y_max = mapping_y_max_ - shreshold_zoom_out 751 | self.forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] = forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] 752 | self.scan_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32) 753 | self.scan_img[mapping_x_min_:mapping_x_max_, mapping_y_min_:mapping_y_max_] = self.origin_img 754 | self.origin_img = self.scan_img 755 | # flat_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32) 756 | # cv2.remap(self.synthesis_perturbed_color[:, :, :3], self.forward_mapping[:, :, 1], self.forward_mapping[:, :, 0], cv2.INTER_LINEAR, flat_img) 757 | # cv2.imwrite(self.save_path + 'outputs/1.jpg', flat_img) 758 | '''forward-end''' 759 | 760 | '''image and label 761 | synthesis_perturbed_data = { 762 | 'fiducial_points': fiducial_points_coordinate, 763 | 'segment': np.array((segment_x, segment_y)) 764 | } 765 | cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3]) 766 | ''' 767 | '''or''' 768 | synthesis_perturbed_data = { 769 | 'image':self.synthesis_perturbed_color[:, :, :3], 770 | 'fiducial_points': fiducial_points_coordinate, 771 | 'segment': np.array((segment_x, segment_y)) 772 | } 773 | 774 | with open(self.save_path+'color/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f: 775 | pickle_perturbed_data = pickle.dumps(synthesis_perturbed_data) 776 | f.write(pickle_perturbed_data) 777 | # with open(self.save_path+'grey/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f: 778 | # pickle_perturbed_data = pickle.dumps(self.synthesis_perturbed_grey) 779 | # f.write(pickle_perturbed_data) 780 | # cv2.imwrite(self.save_path+'grey_im/'+perfix_+'_'+fold_curve+'.png', self.synthesis_perturbed_color[:, :, :1]) 781 | 782 | 783 | # cv2.imwrite(self.save_path + 'scan/' + self.save_suffix + '_' + str(m) + '.png', self.origin_img) 784 | trian_t = time.time() - begin_train 785 | mm, ss = divmod(trian_t, 60) 786 | hh, mm = divmod(mm, 60) 787 | print(str(m)+'_'+str(n)+'_'+fold_curve+' '+str(repeat_time)+" Time : %02d:%02d:%02d\n" % (hh, mm, ss)) 788 | 789 | 790 | def multiThread(m, n, img_path_, bg_path_, save_path, save_suffix): 791 | saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix) 792 | saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix) 793 | 794 | repeat_time = min(max(round(np.random.normal(10, 3)), 5), 16) 795 | fold = threading.Thread(target=saveFold.save_img, args=(m, n, 'fold', repeat_time, 'relativeShift_v2'), name='fold') 796 | curve = threading.Thread(target=saveCurve.save_img, args=(m, n, 'curve', repeat_time, 'relativeShift_v2'), name='curve') 797 | 798 | fold.start() 799 | curve.start() 800 | curve.join() 801 | fold.join() 802 | def xgw(args): 803 | path = args.path 804 | bg_path = args.bg_path 805 | if args.output_path is None: 806 | save_path = '/lustre/home/gwxie/data/unwarp_new/train/general1024/general1024_v1/' 807 | else: 808 | save_path = args.output_path 809 | 810 | 811 | # if not os.path.exists(save_path + 'grey/'): 812 | # os.makedirs(save_path + 'grey/') 813 | if not os.path.exists(save_path + 'color/'): 814 | os.makedirs(save_path + 'color/') 815 | 816 | if not os.path.exists(save_path + 'fiducial_points/'): 817 | os.makedirs(save_path + 'fiducial_points/') 818 | 819 | if not os.path.exists(save_path + 'png/'): 820 | os.makedirs(save_path + 'png/') 821 | 822 | if not os.path.exists(save_path + 'scan/'): 823 | os.makedirs(save_path + 'scan/') 824 | 825 | if not os.path.exists(save_path + 'outputs/'): 826 | os.makedirs(save_path + 'outputs/') 827 | 828 | save_suffix = str.split(args.path, '/')[-2] 829 | 830 | all_img_path = getDatasets(path) 831 | all_bgImg_path = getDatasets(bg_path) 832 | global begin_train 833 | begin_train = time.time() 834 | fiducial_points = 61 # 31 835 | process_pool = Pool(2) 836 | for m, img_path in enumerate(all_img_path): 837 | for n in range(args.sys_num): 838 | img_path_ = path+img_path 839 | bg_path_ = bg_path+random.choice(all_bgImg_path)+'/' 840 | 841 | for m_n in range(10): 842 | try: 843 | saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix) 844 | saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix) 845 | 846 | repeat_time = min(max(round(np.random.normal(12, 4)), 1), 18) 847 | # repeat_time = min(max(round(np.random.normal(8, 4)), 1), 12) # random.randint(1, 2) # min(max(round(np.random.normal(8, 4)), 1), 12) 848 | process_pool.apply_async(func=saveFold.save_img, args=(m, n, 'fold', repeat_time, fiducial_points, 'relativeShift_v2')) 849 | 850 | repeat_time = min(max(round(np.random.normal(8, 4)), 1), 13) 851 | # repeat_time = min(max(round(np.random.normal(6, 4)), 1), 10) 852 | process_pool.apply_async(func=saveCurve.save_img, args=(m, n, 'curve', repeat_time, fiducial_points, 'relativeShift_v2')) 853 | 854 | except BaseException as err: 855 | print(err) 856 | continue 857 | break 858 | # print('end') 859 | 860 | process_pool.close() 861 | process_pool.join() 862 | 863 | if __name__ == '__main__': 864 | 865 | parser = argparse.ArgumentParser(description='Hyperparams') 866 | parser.add_argument('--path', 867 | default='./scan/new/', type=str, 868 | help='the path of origin img.') 869 | parser.add_argument('--bg_path', 870 | default='./background/', type=str, 871 | help='the path of bg img.') 872 | 873 | parser.add_argument('--output_path', 874 | default='./output/', type=str, 875 | help='the path of origin img.') 876 | # parser.set_defaults(output_path='test') 877 | parser.add_argument('--count_from', '-p', default=0, type=int, 878 | metavar='N', help='print frequency (default: 10)') # print frequency 879 | 880 | parser.add_argument('--repeat_T', default=0, type=int) 881 | 882 | parser.add_argument('--sys_num', default=6, type=int) 883 | 884 | args = parser.parse_args() 885 | xgw(args) 886 | -------------------------------------------------------------------------------- /perturbed_images_generation_multiProcess_addition1.py: -------------------------------------------------------------------------------- 1 | ''' 2 | GuoWang xie 3 | set up :2020-1-9 4 | intergrate img and label into one file 5 | 6 | -- data1024_v1 7 | ''' 8 | 9 | 10 | import argparse 11 | import sys, os 12 | import pickle 13 | import random 14 | import collections 15 | import json 16 | import numpy as np 17 | import scipy.io as io 18 | import scipy.misc as m 19 | import matplotlib.pyplot as plt 20 | import glob 21 | import math 22 | import time 23 | 24 | import threading 25 | import multiprocessing as mp 26 | from multiprocessing import Pool 27 | import re 28 | import cv2 29 | # sys.path.append('/lustre/home/gwxie/hope/project/dewarp/datasets/') # /lustre/home/gwxie/program/project/unwarp/perturbed_imgaes/GAN 30 | import utils 31 | 32 | def getDatasets(dir): 33 | return os.listdir(dir) 34 | 35 | class perturbed(utils.BasePerturbed): 36 | def __init__(self, path, bg_path, save_path, save_suffix): 37 | 38 | self.path = path 39 | self.bg_path = bg_path 40 | self.save_path = save_path 41 | self.save_suffix = save_suffix 42 | def save_img(self, m, n, fold_curve='fold', repeat_time=4, fiducial_points = 16, relativeShift_position='relativeShift_v2'): 43 | origin_img = cv2.imread(self.path, flags=cv2.IMREAD_COLOR) 44 | 45 | save_img_shape = [512*2, 480*2] # 320 46 | # reduce_value = np.random.choice([2**4, 2**5, 2**6, 2**7, 2**8], p=[0.01, 0.1, 0.4, 0.39, 0.1]) 47 | reduce_value = np.random.choice([2*2, 4*2, 8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.02, 0.18, 0.2, 0.3, 0.1, 0.1, 0.08, 0.02]) 48 | # reduce_value = np.random.choice([8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.01, 0.02, 0.2, 0.4, 0.19, 0.18]) 49 | # reduce_value = np.random.choice([16, 24, 32, 40, 48, 64], p=[0.01, 0.1, 0.2, 0.4, 0.2, 0.09]) 50 | base_img_shrink = save_img_shape[0] - reduce_value 51 | 52 | # enlarge_img_shrink = [1024, 768] 53 | # enlarge_img_shrink = [896, 672] # 420 54 | enlarge_img_shrink = [512*4, 480*4] # 420 55 | # enlarge_img_shrink = [896*2, 768*2] # 420 56 | # enlarge_img_shrink = [896, 768] # 420 57 | # enlarge_img_shrink = [768, 576] # 420 58 | # enlarge_img_shrink = [640, 480] # 420 59 | 60 | '''''' 61 | im_lr = origin_img.shape[0] 62 | im_ud = origin_img.shape[1] 63 | 64 | reduce_value_v2 = np.random.choice([2*2, 4*2, 8*2, 16*2, 24*2, 28*2, 32*2, 48*2], p=[0.02, 0.18, 0.2, 0.2, 0.1, 0.1, 0.1, 0.1]) 65 | # reduce_value_v2 = np.random.choice([16, 24, 28, 32, 48, 64], p=[0.01, 0.1, 0.2, 0.3, 0.25, 0.14]) 66 | 67 | if im_lr > im_ud: 68 | im_ud = min(int(im_ud / im_lr * base_img_shrink), save_img_shape[1] - reduce_value_v2) 69 | im_lr = save_img_shape[0] - reduce_value 70 | else: 71 | base_img_shrink = save_img_shape[1] - reduce_value 72 | im_lr = min(int(im_lr / im_ud * base_img_shrink), save_img_shape[0] - reduce_value_v2) 73 | im_ud = base_img_shrink 74 | if round(im_lr / im_ud, 2) < 0.5 or round(im_ud / im_lr, 2) < 0.5: 75 | repeat_time = min(repeat_time, 8) 76 | 77 | edge_padding = 3 78 | im_lr -= im_lr % (fiducial_points-1) - (2*edge_padding) # im_lr % (fiducial_points-1) - 1 79 | im_ud -= im_ud % (fiducial_points-1) - (2*edge_padding) # im_ud % (fiducial_points-1) - 1 80 | im_hight = np.linspace(edge_padding, im_lr - edge_padding, fiducial_points, dtype=np.int64) 81 | im_wide = np.linspace(edge_padding, im_ud - edge_padding, fiducial_points, dtype=np.int64) 82 | # im_lr -= im_lr % (fiducial_points-1) - (1+2*edge_padding) # im_lr % (fiducial_points-1) - 1 83 | # im_ud -= im_ud % (fiducial_points-1) - (1+2*edge_padding) # im_ud % (fiducial_points-1) - 1 84 | # im_hight = np.linspace(edge_padding, im_lr - (1+edge_padding), fiducial_points, dtype=np.int64) 85 | # im_wide = np.linspace(edge_padding, im_ud - (1+edge_padding), fiducial_points, dtype=np.int64) 86 | im_x, im_y = np.meshgrid(im_hight, im_wide) 87 | segment_x = (im_lr) // (fiducial_points-1) 88 | segment_y = (im_ud) // (fiducial_points-1) 89 | 90 | # plt.plot(im_x, im_y, 91 | # color='limegreen', 92 | # marker='.', 93 | # linestyle='') 94 | # plt.grid(True) 95 | # plt.show() 96 | self.origin_img = cv2.resize(origin_img, (im_ud, im_lr), interpolation=cv2.INTER_CUBIC) 97 | 98 | perturbed_bg_ = getDatasets(self.bg_path) 99 | perturbed_bg_img_ = self.bg_path+random.choice(perturbed_bg_) 100 | perturbed_bg_img = cv2.imread(perturbed_bg_img_, flags=cv2.IMREAD_COLOR) 101 | 102 | mesh_shape = self.origin_img.shape[:2] 103 | 104 | self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 256, dtype=np.float32)#np.zeros_like(perturbed_bg_img) 105 | # self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 0, dtype=np.int16)#np.zeros_like(perturbed_bg_img) 106 | self.new_shape = self.synthesis_perturbed_img.shape[:2] 107 | perturbed_bg_img = cv2.resize(perturbed_bg_img, (save_img_shape[1], save_img_shape[0]), cv2.INPAINT_TELEA) 108 | 109 | origin_pixel_position = np.argwhere(np.zeros(mesh_shape, dtype=np.uint32) == 0).reshape(mesh_shape[0], mesh_shape[1], 2) 110 | pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2) 111 | self.perturbed_xy_ = np.zeros((self.new_shape[0], self.new_shape[1], 2)) 112 | # self.perturbed_xy_ = pixel_position.copy().astype(np.float32) 113 | 114 | self.synthesis_perturbed_label = np.zeros((self.new_shape[0], self.new_shape[1], 2)) 115 | x_min, y_min, x_max, y_max = self.adjust_position_v2(0, 0, mesh_shape[0], mesh_shape[1], save_img_shape) 116 | origin_pixel_position += [x_min, y_min] 117 | 118 | x_min, y_min, x_max, y_max = self.adjust_position(0, 0, mesh_shape[0], mesh_shape[1]) 119 | x_shift = random.randint(-enlarge_img_shrink[0]//16, enlarge_img_shrink[0]//16) 120 | y_shift = random.randint(-enlarge_img_shrink[1]//16, enlarge_img_shrink[1]//16) 121 | x_min += x_shift 122 | x_max += x_shift 123 | y_min += y_shift 124 | y_max += y_shift 125 | 126 | '''im_x,y''' 127 | im_x += x_min 128 | im_y += y_min 129 | 130 | self.synthesis_perturbed_img[x_min:x_max, y_min:y_max] = self.origin_img 131 | self.synthesis_perturbed_label[x_min:x_max, y_min:y_max] = origin_pixel_position 132 | 133 | synthesis_perturbed_img_map = self.synthesis_perturbed_img.copy() 134 | synthesis_perturbed_label_map = self.synthesis_perturbed_label.copy() 135 | 136 | foreORbackground_label = np.full((mesh_shape), 1, dtype=np.int16) 137 | foreORbackground_label_map = np.full((self.new_shape), 0, dtype=np.int16) 138 | foreORbackground_label_map[x_min:x_max, y_min:y_max] = foreORbackground_label 139 | 140 | # synthesis_perturbed_img_map = self.pad(self.synthesis_perturbed_img.copy(), x_min, y_min, x_max, y_max) 141 | # synthesis_perturbed_label_map = self.pad(synthesis_perturbed_label_map, x_min, y_min, x_max, y_max) 142 | '''*****************************************************************''' 143 | 144 | normalizationFun_0_1 = self.is_perform(0.2, 0.8) 145 | if fold_curve == 'fold': 146 | fold_curve_random = True 147 | is_normalizationFun_mixture = False 148 | if normalizationFun_0_1: 149 | alpha_perturbed = random.randint(60, 120) / 100 150 | else: 151 | alpha_perturbed = random.randint(70, 120) / 100 152 | else: 153 | fold_curve_random = False # self.is_perform(0.01, 0.99) 154 | is_normalizationFun_mixture = False # self.is_perform(0.01, 0.99) 155 | if normalizationFun_0_1: 156 | alpha_perturbed = random.randint(70, 140) / 100 157 | else: 158 | alpha_perturbed = random.randint(80, 120) / 100 159 | 160 | synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256) 161 | # synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 0, dtype=np.int16) 162 | synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label) 163 | 164 | p_pp_choice = self.is_perform(0.8, 0.2) if fold_curve == 'fold' else self.is_perform(0.1, 0.9) 165 | for repeat_i in range(repeat_time): 166 | 167 | '''''' 168 | linspace_x = [0, (self.new_shape[0] - im_lr) // 2 - 1, 169 | self.new_shape[0] - (self.new_shape[0] - im_lr) // 2 - 1, self.new_shape[0] - 1] 170 | linspace_y = [0, (self.new_shape[1] - im_ud) // 2 - 1, 171 | self.new_shape[1] - (self.new_shape[1] - im_ud) // 2 - 1, self.new_shape[1] - 1] 172 | linspace_x_seq = [1, 2, 3] 173 | linspace_y_seq = [1, 2, 3] 174 | r_x = random.choice(linspace_x_seq) 175 | r_y = random.choice(linspace_y_seq) 176 | perturbed_p = np.array( 177 | [random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10), 178 | random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10 179 | if ((r_x == 1 or r_x == 3) and (r_y == 1 or r_y == 3)) and p_pp_choice: 180 | linspace_x_seq.remove(r_x) 181 | linspace_y_seq.remove(r_y) 182 | r_x = random.choice(linspace_x_seq) 183 | r_y = random.choice(linspace_y_seq) 184 | perturbed_pp = np.array( 185 | [random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10), 186 | random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10 187 | # perturbed_p, perturbed_pp = np.array( 188 | # [random.randint(0, self.new_shape[0] * 10) / 10, 189 | # random.randint(0, self.new_shape[1] * 10) / 10]) \ 190 | # , np.array([random.randint(0, self.new_shape[0] * 10) / 10, 191 | # random.randint(0, self.new_shape[1] * 10) / 10]) 192 | # perturbed_p, perturbed_pp = np.array( 193 | # [random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10, 194 | # random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) \ 195 | # , np.array([random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10, 196 | # random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) 197 | '''''' 198 | 199 | perturbed_vp = perturbed_pp - perturbed_p 200 | perturbed_vp_norm = np.linalg.norm(perturbed_vp) 201 | 202 | perturbed_distance_vertex_and_line = np.dot((perturbed_p - pixel_position), perturbed_vp) / perturbed_vp_norm 203 | '''''' 204 | # perturbed_v = np.array([random.randint(-3000, 3000) / 100, random.randint(-3000, 3000) / 100]) 205 | perturbed_v = np.array([random.randint(-8000, 8000) / 100, random.randint(-8000, 8000) / 100]) 206 | # perturbed_v = np.array([random.randint(-11000, 11000) / 100, random.randint(-11000, 11000) / 100]) 207 | '''''' 208 | if fold_curve == 'fold': 209 | if is_normalizationFun_mixture: 210 | if self.is_perform(0.5, 0.5): 211 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 212 | else: 213 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 2)) 214 | else: 215 | if normalizationFun_0_1: 216 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), 2) 217 | else: 218 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 219 | 220 | else: 221 | if is_normalizationFun_mixture: 222 | if self.is_perform(0.5, 0.5): 223 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 224 | else: 225 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 2)) 226 | else: 227 | if normalizationFun_0_1: 228 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), 2) 229 | else: 230 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 231 | '''''' 232 | if fold_curve_random: 233 | # omega_perturbed = (alpha_perturbed+0.2) / (perturbed_d + alpha_perturbed) 234 | # omega_perturbed = alpha_perturbed**perturbed_d 235 | omega_perturbed = alpha_perturbed / (perturbed_d + alpha_perturbed) 236 | else: 237 | omega_perturbed = 1 - perturbed_d ** alpha_perturbed 238 | 239 | '''shadow''' 240 | if self.is_perform(0.4, 0.6): 241 | synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] = np.minimum(np.maximum(synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] - np.int16(np.round(omega_perturbed[x_min:x_max, y_min:y_max].repeat(3).reshape(x_max-x_min, y_max-y_min, 3) * abs(np.linalg.norm(perturbed_v//2))*np.array([0.4-random.random()*0.1, 0.4-random.random()*0.1, 0.4-random.random()*0.1]))), 0), 255) 242 | '''''' 243 | 244 | if relativeShift_position in ['position', 'relativeShift_v2']: 245 | self.perturbed_xy_ += np.array([omega_perturbed * perturbed_v[0], omega_perturbed * perturbed_v[1]]).transpose(1, 2, 0) 246 | else: 247 | print('relativeShift_position error') 248 | exit() 249 | 250 | 251 | '''perspective''' 252 | 253 | perspective_shreshold = random.randint(18, 52)*10 # 280 254 | x_min_per, y_min_per, x_max_per, y_max_per = self.adjust_position(perspective_shreshold, perspective_shreshold, self.new_shape[0]-perspective_shreshold, self.new_shape[1]-perspective_shreshold) 255 | pts1 = np.float32([[x_min_per, y_min_per], [x_max_per, y_min_per], [x_min_per, y_max_per], [x_max_per, y_max_per]]) 256 | e_1_ = x_max_per - x_min_per 257 | e_2_ = y_max_per - y_min_per 258 | e_3_ = e_2_ 259 | e_4_ = e_1_ 260 | perspective_shreshold_h = e_1_*0.02 261 | perspective_shreshold_w = e_2_*0.02 262 | a_min_, a_max_ = 70, 110 263 | # if self.is_perform(1, 0): 264 | if fold_curve == 'curve' and self.is_perform(0.2, 0.8): 265 | if self.is_perform(0.5, 0.5): 266 | while True: 267 | pts2 = np.around( 268 | np.float32([[x_min_per - (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold], 269 | [x_max_per - (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold], 270 | [x_min_per + (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold], 271 | [x_max_per + (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold]])) # right 272 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 273 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 274 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 275 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 276 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 277 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 278 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 279 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 280 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 281 | break 282 | else: 283 | while True: 284 | pts2 = np.around( 285 | np.float32([[x_min_per + (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold], 286 | [x_max_per + (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold], 287 | [x_min_per - (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold], 288 | [x_max_per - (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold]])) 289 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 290 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 291 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 292 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 293 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 294 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 295 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 296 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 297 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 298 | break 299 | 300 | else: 301 | while True: 302 | pts2 = np.around(np.float32([[x_min_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold], 303 | [x_max_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold], 304 | [x_min_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold], 305 | [x_max_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold]])) 306 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 307 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 308 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 309 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 310 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 311 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 312 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 313 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 314 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 315 | break 316 | 317 | M = cv2.getPerspectiveTransform(pts1, pts2) 318 | one = np.ones((self.new_shape[0], self.new_shape[1], 1), dtype=np.int16) 319 | matr = np.dstack((pixel_position, one)) 320 | new = np.dot(M, matr.reshape(-1, 3).T).T.reshape(self.new_shape[0], self.new_shape[1], 3) 321 | x = new[:, :, 0]/new[:, :, 2] 322 | y = new[:, :, 1]/new[:, :, 2] 323 | perturbed_xy_ = np.dstack((x, y)) 324 | # perturbed_xy_round_int = np.around(cv2.bilateralFilter(perturbed_xy_round_int, 9, 75, 75)) 325 | # perturbed_xy_round_int = np.around(cv2.blur(perturbed_xy_, (17, 17))) 326 | # perturbed_xy_round_int = cv2.blur(perturbed_xy_round_int, (17, 17)) 327 | # perturbed_xy_round_int = cv2.GaussianBlur(perturbed_xy_round_int, (7, 7), 0) 328 | perturbed_xy_ = perturbed_xy_-np.min(perturbed_xy_.T.reshape(2, -1), 1) 329 | # perturbed_xy_round_int = np.around(perturbed_xy_round_int-np.min(perturbed_xy_round_int.T.reshape(2, -1), 1)).astype(np.int16) 330 | 331 | self.perturbed_xy_ += perturbed_xy_ 332 | 333 | '''perspective end''' 334 | 335 | '''to img''' 336 | flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape( 337 | self.new_shape[0] * self.new_shape[1], 2) 338 | # self.perturbed_xy_ = cv2.blur(self.perturbed_xy_, (7, 7)) 339 | self.perturbed_xy_ = cv2.GaussianBlur(self.perturbed_xy_, (7, 7), 0) 340 | 341 | '''get fiducial points''' 342 | fiducial_points_coordinate = self.perturbed_xy_[im_x, im_y] 343 | 344 | 345 | vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position) 346 | wts_sum = np.abs(wts).sum(-1) 347 | 348 | # flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts) 349 | wts = wts[wts_sum <= 1, :] 350 | vtx = vtx[wts_sum <= 1, :] 351 | synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1, 352 | :] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts) 353 | 354 | synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1, 355 | :] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts) 356 | 357 | foreORbackground_label = np.zeros(self.new_shape) 358 | foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts) 359 | foreORbackground_label[foreORbackground_label < 0.99] = 0 360 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 361 | 362 | self.synthesis_perturbed_img = synthesis_perturbed_img 363 | self.synthesis_perturbed_label = synthesis_perturbed_label 364 | self.foreORbackground_label = foreORbackground_label 365 | 366 | '''draw fiducial points 367 | stepSize = 0 368 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy() 369 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1,2): 370 | cv2.circle(fiducial_points_synthesis_perturbed_img, (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1) 371 | cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_large.jpg', fiducial_points_synthesis_perturbed_img) 372 | ''' 373 | 374 | 375 | '''clip''' 376 | 377 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1] 378 | for x in range(self.new_shape[0] // 2, perturbed_x_max): 379 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x: 380 | perturbed_x_max = x 381 | break 382 | for x in range(self.new_shape[0] // 2, perturbed_x_min, -1): 383 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0: 384 | perturbed_x_min = x 385 | break 386 | for y in range(self.new_shape[1] // 2, perturbed_y_max): 387 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y: 388 | perturbed_y_max = y 389 | break 390 | for y in range(self.new_shape[1] // 2, perturbed_y_min, -1): 391 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0: 392 | perturbed_y_min = y 393 | break 394 | 395 | if perturbed_x_min == 0 or perturbed_x_max == self.new_shape[0] or perturbed_y_min == self.new_shape[1] or perturbed_y_max == self.new_shape[1]: 396 | raise Exception('clip error') 397 | 398 | if perturbed_x_max - perturbed_x_min < im_lr//2 or perturbed_y_max - perturbed_y_min < im_ud//2: 399 | raise Exception('clip error') 400 | 401 | 402 | perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n) 403 | is_shrink = False 404 | if perturbed_x_max - perturbed_x_min > save_img_shape[0] or perturbed_y_max - perturbed_y_min > save_img_shape[1]: 405 | is_shrink = True 406 | synthesis_perturbed_img = cv2.resize(self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 407 | synthesis_perturbed_label = cv2.resize(self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 408 | foreORbackground_label = cv2.resize(self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 409 | foreORbackground_label[foreORbackground_label < 0.99] = 0 410 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 411 | '''shrink fiducial points''' 412 | center_x_l, center_y_l = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2 413 | fiducial_points_coordinate_copy = fiducial_points_coordinate.copy() 414 | shrink_x = im_lr/(perturbed_x_max - perturbed_x_min) 415 | shrink_y = im_ud/(perturbed_y_max - perturbed_y_min) 416 | fiducial_points_coordinate *= [shrink_x, shrink_y] 417 | center_x_l *= shrink_x 418 | center_y_l *= shrink_y 419 | # fiducial_points_coordinate[1:, 1:] *= [shrink_x, shrink_y] 420 | # fiducial_points_coordinate[1:, :1, 0] *= shrink_x 421 | # fiducial_points_coordinate[:1, 1:, 1] *= shrink_y 422 | # perturbed_x_min_copy, perturbed_y_min_copy, perturbed_x_max_copy, perturbed_y_max_copy = perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max 423 | 424 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape) 425 | 426 | self.synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256) 427 | self.synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label) 428 | self.foreORbackground_label = np.zeros_like(self.foreORbackground_label) 429 | self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_img 430 | self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_label 431 | self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max] = foreORbackground_label 432 | 433 | center_x, center_y = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2 434 | if is_shrink: 435 | fiducial_points_coordinate += [center_x-center_x_l, center_y-center_y_l] 436 | 437 | '''draw fiducial points 438 | stepSize = 0 439 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy() 440 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2): 441 | cv2.circle(fiducial_points_synthesis_perturbed_img, 442 | (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1) 443 | cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_small.jpg',fiducial_points_synthesis_perturbed_img) 444 | ''' 445 | self.new_shape = save_img_shape 446 | self.synthesis_perturbed_img = self.synthesis_perturbed_img[ 447 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 448 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2, 449 | :].copy() 450 | self.synthesis_perturbed_label = self.synthesis_perturbed_label[ 451 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 452 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2, 453 | :].copy() 454 | self.foreORbackground_label = self.foreORbackground_label[ 455 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 456 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2].copy() 457 | 458 | 459 | perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0) 460 | perturbed_x_min = perturbed_x_ // 2 461 | perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1) 462 | 463 | perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0) 464 | perturbed_y_min = perturbed_y_ // 2 465 | perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1) 466 | 467 | '''clip 468 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1] 469 | for x in range(self.new_shape[0] // 2, perturbed_x_max): 470 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x: 471 | perturbed_x_max = x 472 | break 473 | for x in range(self.new_shape[0] // 2, perturbed_x_min, -1): 474 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0: 475 | perturbed_x_min = x 476 | break 477 | for y in range(self.new_shape[1] // 2, perturbed_y_max): 478 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y: 479 | perturbed_y_max = y 480 | break 481 | for y in range(self.new_shape[1] // 2, perturbed_y_min, -1): 482 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0: 483 | perturbed_y_min = y 484 | break 485 | 486 | 487 | center_x, center_y = perturbed_x_min+(perturbed_x_max - perturbed_x_min)//2, perturbed_y_min+(perturbed_y_max - perturbed_y_min)//2 488 | 489 | perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n) 490 | 491 | self.new_shape = save_img_shape 492 | 493 | perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0) 494 | perturbed_x_min = perturbed_x_ // 2 495 | perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1) 496 | 497 | perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0) 498 | perturbed_y_min = perturbed_y_ // 2 499 | perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1) 500 | 501 | self.synthesis_perturbed_img = self.synthesis_perturbed_img[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy() 502 | self.synthesis_perturbed_label = self.synthesis_perturbed_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy() 503 | self.foreORbackground_label = self.foreORbackground_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2].copy() 504 | 505 | ''' 506 | 507 | 508 | 509 | '''save''' 510 | pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2) 511 | 512 | if relativeShift_position == 'relativeShift_v2': 513 | self.synthesis_perturbed_label -= pixel_position 514 | fiducial_points_coordinate -= [center_x - self.new_shape[0] // 2, center_y - self.new_shape[1] // 2] 515 | 516 | self.synthesis_perturbed_label[:, :, 0] *= self.foreORbackground_label 517 | self.synthesis_perturbed_label[:, :, 1] *= self.foreORbackground_label 518 | self.synthesis_perturbed_img[:, :, 0] *= self.foreORbackground_label 519 | self.synthesis_perturbed_img[:, :, 1] *= self.foreORbackground_label 520 | self.synthesis_perturbed_img[:, :, 2] *= self.foreORbackground_label 521 | 522 | 523 | ''' 524 | synthesis_perturbed_img_filter = self.synthesis_perturbed_img.copy() 525 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 526 | # if self.is_perform(0.9, 0.1) or repeat_time > 5: 527 | # # if self.is_perform(0.1, 0.9) and repeat_time > 9: 528 | # # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (7, 7), 0) 529 | # # else: 530 | # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0) 531 | # else: 532 | # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 533 | self.synthesis_perturbed_img[self.foreORbackground_label == 1] = synthesis_perturbed_img_filter[self.foreORbackground_label == 1] 534 | ''' 535 | '''HSV_v2''' 536 | perturbed_bg_img = perturbed_bg_img.astype(np.float32) 537 | # if self.is_perform(1, 0): 538 | # if self.is_perform(1, 0): 539 | if self.is_perform(0.1, 0.9): 540 | if self.is_perform(0.2, 0.8): 541 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 542 | 543 | synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV) 544 | 545 | perturbed_bg_img[:, :, 0] *= 1-self.foreORbackground_label 546 | perturbed_bg_img[:, :, 1] *= 1-self.foreORbackground_label 547 | perturbed_bg_img[:, :, 2] *= 1-self.foreORbackground_label 548 | 549 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 550 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 551 | else: 552 | perturbed_bg_img_HSV = perturbed_bg_img 553 | perturbed_bg_img_HSV = self.HSV_v1(perturbed_bg_img_HSV) 554 | 555 | perturbed_bg_img_HSV[:, :, 0] *= 1-self.foreORbackground_label 556 | perturbed_bg_img_HSV[:, :, 1] *= 1-self.foreORbackground_label 557 | perturbed_bg_img_HSV[:, :, 2] *= 1-self.foreORbackground_label 558 | 559 | self.synthesis_perturbed_img += perturbed_bg_img_HSV 560 | # self.synthesis_perturbed_img[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771] 561 | 562 | else: 563 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 564 | perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label 565 | perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label 566 | perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label 567 | 568 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 569 | 570 | synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV) 571 | 572 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 573 | 574 | '''''' 575 | # cv2.imwrite(self.save_path+'clip/'+perfix_+'_'+fold_curve+str(perturbed_time)+'-'+str(repeat_time)+'.png', synthesis_perturbed_img_clip) 576 | 577 | self.synthesis_perturbed_img[self.synthesis_perturbed_img < 0] = 0 578 | self.synthesis_perturbed_img[self.synthesis_perturbed_img > 255] = 255 579 | self.synthesis_perturbed_img = np.around(self.synthesis_perturbed_img).astype(np.uint8) 580 | label = np.zeros_like(self.synthesis_perturbed_img, dtype=np.float32) 581 | label[:, :, :2] = self.synthesis_perturbed_label 582 | label[:, :, 2] = self.foreORbackground_label 583 | 584 | # grey = np.around(self.synthesis_perturbed_img[:, :, 0] * 0.2989 + self.synthesis_perturbed_img[:, :, 1] * 0.5870 + self.synthesis_perturbed_img[:, :, 0] * 0.1140).astype(np.int16) 585 | # synthesis_perturbed_grey = np.concatenate((grey.reshape(self.new_shape[0], self.new_shape[1], 1), label), axis=2) 586 | synthesis_perturbed_color = np.concatenate((self.synthesis_perturbed_img, label), axis=2) 587 | 588 | self.synthesis_perturbed_color = np.zeros_like(synthesis_perturbed_color, dtype=np.float32) 589 | # self.synthesis_perturbed_grey = np.zeros_like(synthesis_perturbed_grey, dtype=np.float32) 590 | reduce_value_x = int(round(min((random.random() / 2) * (self.new_shape[0] - (perturbed_x_max - perturbed_x_min)), min(reduce_value, reduce_value_v2)))) 591 | reduce_value_y = int(round(min((random.random() / 2) * (self.new_shape[1] - (perturbed_y_max - perturbed_y_min)), min(reduce_value, reduce_value_v2)))) 592 | perturbed_x_min = max(perturbed_x_min - reduce_value_x, 0) 593 | perturbed_x_max = min(perturbed_x_max + reduce_value_x, self.new_shape[0]) 594 | perturbed_y_min = max(perturbed_y_min - reduce_value_y, 0) 595 | perturbed_y_max = min(perturbed_y_max + reduce_value_y, self.new_shape[1]) 596 | 597 | if im_lr >= im_ud: 598 | self.synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] 599 | # self.synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] 600 | else: 601 | self.synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] 602 | # self.synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] 603 | 604 | '''blur''' 605 | if self.is_perform(0.2, 0.8): 606 | synthesis_perturbed_img_filter = self.synthesis_perturbed_color[:, :, :3].copy() 607 | if self.is_perform(0.1, 0.9): 608 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0) 609 | else: 610 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 611 | if self.is_perform(0.5, 0.5): 612 | self.synthesis_perturbed_color[:, :, :3][self.synthesis_perturbed_color[:, :, 5] == 1] = synthesis_perturbed_img_filter[self.synthesis_perturbed_color[:, :, 5] == 1] 613 | else: 614 | self.synthesis_perturbed_color[:, :, :3] = synthesis_perturbed_img_filter 615 | 616 | fiducial_points_coordinate = fiducial_points_coordinate[:, :, ::-1] 617 | '''draw fiducial points''' 618 | stepSize = 0 619 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_color[:, :, :3].copy() 620 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2): 621 | cv2.circle(fiducial_points_synthesis_perturbed_img, (l[0] + math.ceil(stepSize / 2), l[1] + math.ceil(stepSize / 2)), 2, (0, 0, 255), -1) 622 | cv2.imwrite(self.save_path + 'fiducial_points/' + perfix_ + '_' + fold_curve + '.png', fiducial_points_synthesis_perturbed_img) 623 | 624 | cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3]) 625 | 626 | '''forward-begin''' 627 | self.forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32) 628 | forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32) 629 | forward_position = (self.synthesis_perturbed_color[:, :, 3:5] + pixel_position)[self.synthesis_perturbed_color[:, :, 5] != 0, :] 630 | flat_position = np.argwhere(np.zeros(save_img_shape, dtype=np.uint32) == 0) 631 | vtx, wts = self.interp_weights(forward_position, flat_position) 632 | wts_sum = np.abs(wts).sum(-1) 633 | wts = wts[wts_sum <= 1, :] 634 | vtx = vtx[wts_sum <= 1, :] 635 | flat_position_forward = flat_position.reshape(save_img_shape[0], save_img_shape[1], 2)[self.synthesis_perturbed_color[:, :, 5] != 0, :] 636 | forward_mapping.reshape(save_img_shape[0] * save_img_shape[1], 2)[wts_sum <= 1, :] = self.interpolate(flat_position_forward, vtx, wts) 637 | forward_mapping = forward_mapping.reshape(save_img_shape[0], save_img_shape[1], 2) 638 | 639 | mapping_x_min_, mapping_y_min_, mapping_x_max_, mapping_y_max_ = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape) 640 | shreshold_zoom_out = 2 641 | mapping_x_min = mapping_x_min_ + shreshold_zoom_out 642 | mapping_y_min = mapping_y_min_ + shreshold_zoom_out 643 | mapping_x_max = mapping_x_max_ - shreshold_zoom_out 644 | mapping_y_max = mapping_y_max_ - shreshold_zoom_out 645 | self.forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] = forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] 646 | self.scan_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32) 647 | self.scan_img[mapping_x_min_:mapping_x_max_, mapping_y_min_:mapping_y_max_] = self.origin_img 648 | self.origin_img = self.scan_img 649 | # flat_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32) 650 | # cv2.remap(self.synthesis_perturbed_color[:, :, :3], self.forward_mapping[:, :, 1], self.forward_mapping[:, :, 0], cv2.INTER_LINEAR, flat_img) 651 | # cv2.imwrite(self.save_path + 'outputs/1.jpg', flat_img) 652 | '''forward-end''' 653 | 654 | '''image and label 655 | synthesis_perturbed_data = { 656 | 'fiducial_points': fiducial_points_coordinate, 657 | 'segment': np.array((segment_x, segment_y)) 658 | } 659 | cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3]) 660 | ''' 661 | '''or''' 662 | synthesis_perturbed_data = { 663 | 'image':self.synthesis_perturbed_color[:, :, :3], 664 | 'fiducial_points': fiducial_points_coordinate, 665 | 'segment': np.array((segment_x, segment_y)) 666 | } 667 | 668 | with open(self.save_path+'color/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f: 669 | pickle_perturbed_data = pickle.dumps(synthesis_perturbed_data) 670 | f.write(pickle_perturbed_data) 671 | # with open(self.save_path+'grey/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f: 672 | # pickle_perturbed_data = pickle.dumps(self.synthesis_perturbed_grey) 673 | # f.write(pickle_perturbed_data) 674 | # cv2.imwrite(self.save_path+'grey_im/'+perfix_+'_'+fold_curve+'.png', self.synthesis_perturbed_color[:, :, :1]) 675 | 676 | 677 | # cv2.imwrite(self.save_path + 'scan/' + self.save_suffix + '_' + str(m) + '.png', self.origin_img) 678 | trian_t = time.time() - begin_train 679 | mm, ss = divmod(trian_t, 60) 680 | hh, mm = divmod(mm, 60) 681 | print(str(m)+'_'+str(n)+'_'+fold_curve+' '+str(repeat_time)+" Time : %02d:%02d:%02d\n" % (hh, mm, ss)) 682 | 683 | 684 | def multiThread(m, n, img_path_, bg_path_, save_path, save_suffix): 685 | saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix) 686 | saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix) 687 | 688 | repeat_time = min(max(round(np.random.normal(10, 3)), 5), 16) 689 | fold = threading.Thread(target=saveFold.save_img, args=(m, n, 'fold', repeat_time, 'relativeShift_v2'), name='fold') 690 | curve = threading.Thread(target=saveCurve.save_img, args=(m, n, 'curve', repeat_time, 'relativeShift_v2'), name='curve') 691 | 692 | fold.start() 693 | curve.start() 694 | curve.join() 695 | fold.join() 696 | def xgw(args): 697 | path = args.path 698 | bg_path = args.bg_path 699 | if args.output_path is None: 700 | save_path = '/lustre/home/gwxie/data/unwarp_new/train/fiducial1024/fiducial1024_v2_a1/' 701 | else: 702 | save_path = args.output_path 703 | 704 | 705 | # if not os.path.exists(save_path + 'grey/'): 706 | # os.makedirs(save_path + 'grey/') 707 | if not os.path.exists(save_path + 'color/'): 708 | os.makedirs(save_path + 'color/') 709 | 710 | if not os.path.exists(save_path + 'fiducial_points/'): 711 | os.makedirs(save_path + 'fiducial_points/') 712 | 713 | if not os.path.exists(save_path + 'png/'): 714 | os.makedirs(save_path + 'png/') 715 | 716 | # if not os.path.exists(save_path + 'scan/'): 717 | # os.makedirs(save_path + 'scan/') 718 | 719 | if not os.path.exists(save_path + 'outputs/'): 720 | os.makedirs(save_path + 'outputs/') 721 | 722 | save_suffix = str.split(args.path, '/')[-2] 723 | 724 | all_img_path = getDatasets(path) 725 | all_bgImg_path = getDatasets(bg_path) 726 | global begin_train 727 | begin_train = time.time() 728 | fiducial_points = 61 # 31 729 | process_pool = Pool(2) 730 | for m, img_path in enumerate(all_img_path): 731 | for n in range(args.sys_num, args.sys_num_e): 732 | img_path_ = path+img_path 733 | bg_path_ = bg_path+random.choice(all_bgImg_path)+'/' 734 | 735 | for m_n in range(10): 736 | try: 737 | saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix) 738 | saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix) 739 | 740 | # repeat_time = min(max(round(np.random.normal(12, 4)), 1), 20) 741 | repeat_time = min(max(round(np.random.normal(8, 4)), 1), 16) # random.randint(1, 2) # min(max(round(np.random.normal(8, 4)), 1), 12) 742 | # repeat_time = 0 743 | process_pool.apply_async(func=saveFold.save_img, args=(m, n, 'fold', repeat_time, fiducial_points, 'relativeShift_v2')) 744 | 745 | # repeat_time = min(max(round(np.random.normal(10, 4)), 1), 18) 746 | repeat_time = min(max(round(np.random.normal(4, 2)), 1), 10) 747 | # repeat_time = 0 748 | process_pool.apply_async(func=saveCurve.save_img, args=(m, n, 'curve', repeat_time, fiducial_points, 'relativeShift_v2')) 749 | 750 | except BaseException as err: 751 | print(err) 752 | continue 753 | break 754 | 755 | process_pool.close() 756 | process_pool.join() 757 | 758 | if __name__ == '__main__': 759 | 760 | parser = argparse.ArgumentParser(description='Hyperparams') 761 | parser.add_argument('--path', 762 | default='./scan/new/', type=str, 763 | help='the path of origin img.') 764 | parser.add_argument('--bg_path', 765 | default='./background/', type=str, 766 | help='the path of bg img.') 767 | 768 | parser.add_argument('--output_path', 769 | default='./output/', type=str, 770 | help='the path of origin img.') 771 | # parser.set_defaults(output_path='test') 772 | parser.add_argument('--count_from', '-p', default=0, type=int, 773 | metavar='N', help='print frequency (default: 10)') # print frequency 774 | 775 | parser.add_argument('--repeat_T', default=0, type=int) 776 | 777 | parser.add_argument('--sys_num', default=6, type=int) 778 | parser.add_argument('--sys_num_e', default=11, type=int) 779 | 780 | args = parser.parse_args() 781 | xgw(args) 782 | -------------------------------------------------------------------------------- /perturbed_images_generation_multiProcess_addition2.py: -------------------------------------------------------------------------------- 1 | ''' 2 | GuoWang xie 3 | set up :2020-1-9 4 | intergrate img and label into one file 5 | 6 | -- data1024_v1 7 | ''' 8 | 9 | 10 | import argparse 11 | import sys, os 12 | import pickle 13 | import random 14 | import collections 15 | import json 16 | import numpy as np 17 | import scipy.io as io 18 | import scipy.misc as m 19 | import matplotlib.pyplot as plt 20 | import glob 21 | import math 22 | import time 23 | 24 | import threading 25 | import multiprocessing as mp 26 | from multiprocessing import Pool 27 | import re 28 | import cv2 29 | # sys.path.append('/lustre/home/gwxie/hope/project/dewarp/datasets/') # /lustre/home/gwxie/program/project/unwarp/perturbed_imgaes/GAN 30 | import utils 31 | 32 | def getDatasets(dir): 33 | return os.listdir(dir) 34 | 35 | class perturbed(utils.BasePerturbed): 36 | def __init__(self, path, bg_path, save_path, save_suffix): 37 | 38 | self.path = path 39 | self.bg_path = bg_path 40 | self.save_path = save_path 41 | self.save_suffix = save_suffix 42 | def save_img(self, m, n, fold_curve='fold', repeat_time=4, fiducial_points = 16, relativeShift_position='relativeShift_v2'): 43 | origin_img = cv2.imread(self.path, flags=cv2.IMREAD_COLOR) 44 | 45 | save_img_shape = [512*2, 480*2] # 320 46 | # reduce_value = np.random.choice([2**4, 2**5, 2**6, 2**7, 2**8], p=[0.01, 0.1, 0.4, 0.39, 0.1]) 47 | reduce_value = np.random.choice([2*2, 4*2, 8*2, 16*2, 24*2, 32*2, 40*2], p=[0.02, 0.1, 0.2, 0.3, 0.2, 0.1, 0.08]) 48 | # reduce_value = np.random.choice([8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.1, 0.2, 0.4, 0.1, 0.1, 0.1]) 49 | # reduce_value = np.random.choice([8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.01, 0.02, 0.2, 0.4, 0.19, 0.18]) 50 | # reduce_value = np.random.choice([16, 24, 32, 40, 48, 64], p=[0.01, 0.1, 0.2, 0.4, 0.2, 0.09]) 51 | base_img_shrink = save_img_shape[0] - reduce_value 52 | 53 | # enlarge_img_shrink = [1024, 768] 54 | # enlarge_img_shrink = [896, 672] # 420 55 | enlarge_img_shrink = [512*4, 480*4] # 420 56 | # enlarge_img_shrink = [896*2, 768*2] # 420 57 | # enlarge_img_shrink = [896, 768] # 420 58 | # enlarge_img_shrink = [768, 576] # 420 59 | # enlarge_img_shrink = [640, 480] # 420 60 | 61 | '''''' 62 | im_lr = origin_img.shape[0] 63 | im_ud = origin_img.shape[1] 64 | 65 | reduce_value_v2 = max(np.random.choice(range(0, reduce_value, 2)), 4) 66 | 67 | if im_lr > im_ud: 68 | im_ud = min(int(im_ud / im_lr * base_img_shrink), save_img_shape[1] - reduce_value_v2) 69 | im_lr = save_img_shape[0] - reduce_value 70 | else: 71 | base_img_shrink = save_img_shape[1] - reduce_value 72 | im_lr = min(int(im_lr / im_ud * base_img_shrink), save_img_shape[0] - reduce_value_v2) 73 | im_ud = base_img_shrink 74 | 75 | if round(im_lr / im_ud, 2) < 0.5 or round(im_ud / im_lr, 2) < 0.5: 76 | repeat_time = min(repeat_time, 8) 77 | 78 | edge_padding = 3 79 | im_lr -= im_lr % (fiducial_points-1) - (2*edge_padding) # im_lr % (fiducial_points-1) - 1 80 | im_ud -= im_ud % (fiducial_points-1) - (2*edge_padding) # im_ud % (fiducial_points-1) - 1 81 | im_hight = np.linspace(edge_padding, im_lr - edge_padding, fiducial_points, dtype=np.int64) 82 | im_wide = np.linspace(edge_padding, im_ud - edge_padding, fiducial_points, dtype=np.int64) 83 | # im_lr -= im_lr % (fiducial_points-1) - (1+2*edge_padding) # im_lr % (fiducial_points-1) - 1 84 | # im_ud -= im_ud % (fiducial_points-1) - (1+2*edge_padding) # im_ud % (fiducial_points-1) - 1 85 | # im_hight = np.linspace(edge_padding, im_lr - (1+edge_padding), fiducial_points, dtype=np.int64) 86 | # im_wide = np.linspace(edge_padding, im_ud - (1+edge_padding), fiducial_points, dtype=np.int64) 87 | im_x, im_y = np.meshgrid(im_hight, im_wide) 88 | segment_x = (im_lr) // (fiducial_points-1) 89 | segment_y = (im_ud) // (fiducial_points-1) 90 | 91 | # plt.plot(im_x, im_y, 92 | # color='limegreen', 93 | # marker='.', 94 | # linestyle='') 95 | # plt.grid(True) 96 | # plt.show() 97 | 98 | self.origin_img = cv2.resize(origin_img, (im_ud, im_lr), interpolation=cv2.INTER_CUBIC) 99 | 100 | perturbed_bg_ = getDatasets(self.bg_path) 101 | perturbed_bg_img_ = self.bg_path+random.choice(perturbed_bg_) 102 | perturbed_bg_img = cv2.imread(perturbed_bg_img_, flags=cv2.IMREAD_COLOR) 103 | 104 | mesh_shape = self.origin_img.shape[:2] 105 | 106 | self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 256, dtype=np.float32)#np.zeros_like(perturbed_bg_img) 107 | # self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 0, dtype=np.int16)#np.zeros_like(perturbed_bg_img) 108 | self.new_shape = self.synthesis_perturbed_img.shape[:2] 109 | perturbed_bg_img = cv2.resize(perturbed_bg_img, (save_img_shape[1], save_img_shape[0]), cv2.INPAINT_TELEA) 110 | 111 | origin_pixel_position = np.argwhere(np.zeros(mesh_shape, dtype=np.uint32) == 0).reshape(mesh_shape[0], mesh_shape[1], 2) 112 | pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2) 113 | self.perturbed_xy_ = np.zeros((self.new_shape[0], self.new_shape[1], 2)) 114 | # self.perturbed_xy_ = pixel_position.copy().astype(np.float32) 115 | 116 | self.synthesis_perturbed_label = np.zeros((self.new_shape[0], self.new_shape[1], 2)) 117 | x_min, y_min, x_max, y_max = self.adjust_position_v2(0, 0, mesh_shape[0], mesh_shape[1], save_img_shape) 118 | origin_pixel_position += [x_min, y_min] 119 | 120 | x_min, y_min, x_max, y_max = self.adjust_position(0, 0, mesh_shape[0], mesh_shape[1]) 121 | x_shift = random.randint(-enlarge_img_shrink[0]//16, enlarge_img_shrink[0]//16) 122 | y_shift = random.randint(-enlarge_img_shrink[1]//16, enlarge_img_shrink[1]//16) 123 | x_min += x_shift 124 | x_max += x_shift 125 | y_min += y_shift 126 | y_max += y_shift 127 | 128 | '''im_x,y''' 129 | im_x += x_min 130 | im_y += y_min 131 | 132 | self.synthesis_perturbed_img[x_min:x_max, y_min:y_max] = self.origin_img 133 | self.synthesis_perturbed_label[x_min:x_max, y_min:y_max] = origin_pixel_position 134 | 135 | synthesis_perturbed_img_map = self.synthesis_perturbed_img.copy() 136 | synthesis_perturbed_label_map = self.synthesis_perturbed_label.copy() 137 | 138 | foreORbackground_label = np.full((mesh_shape), 1, dtype=np.int16) 139 | foreORbackground_label_map = np.full((self.new_shape), 0, dtype=np.int16) 140 | foreORbackground_label_map[x_min:x_max, y_min:y_max] = foreORbackground_label 141 | 142 | # synthesis_perturbed_img_map = self.pad(self.synthesis_perturbed_img.copy(), x_min, y_min, x_max, y_max) 143 | # synthesis_perturbed_label_map = self.pad(synthesis_perturbed_label_map, x_min, y_min, x_max, y_max) 144 | '''*****************************************************************''' 145 | is_normalizationFun_mixture = self.is_perform(0.2, 0.8) 146 | normalizationFun_0_1 = self.is_perform(0.8, 0.2) 147 | 148 | 149 | synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256) 150 | # synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 0, dtype=np.int16) 151 | synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label) 152 | 153 | p_pp_choice = self.is_perform(0.8, 0.2) if fold_curve == 'fold' else self.is_perform(0.1, 0.9) 154 | for repeat_i in range(repeat_time): 155 | 156 | if fold_curve == 'fold': 157 | fold_curve_random = self.is_perform(0.9, 0.1) 158 | if fold_curve_random: 159 | alpha_perturbed = random.randint(70, 140) / 100 160 | else: 161 | alpha_perturbed = random.randint(80, 130) / 100 162 | else: 163 | fold_curve_random = self.is_perform(0.5, 0.5) 164 | alpha_perturbed = random.randint(80, 130) / 100 165 | 166 | '''''' 167 | linspace_x = [0, (self.new_shape[0] - im_lr) // 2 - 1, 168 | self.new_shape[0] - (self.new_shape[0] - im_lr) // 2 - 1, self.new_shape[0] - 1] 169 | linspace_y = [0, (self.new_shape[1] - im_ud) // 2 - 1, 170 | self.new_shape[1] - (self.new_shape[1] - im_ud) // 2 - 1, self.new_shape[1] - 1] 171 | linspace_x_seq = [1, 2, 3] 172 | linspace_y_seq = [1, 2, 3] 173 | r_x = random.choice(linspace_x_seq) 174 | r_y = random.choice(linspace_y_seq) 175 | perturbed_p = np.array( 176 | [random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10), 177 | random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10 178 | if ((r_x == 1 or r_x == 3) and (r_y == 1 or r_y == 3)) and p_pp_choice: 179 | linspace_x_seq.remove(r_x) 180 | linspace_y_seq.remove(r_y) 181 | r_x = random.choice(linspace_x_seq) 182 | r_y = random.choice(linspace_y_seq) 183 | perturbed_pp = np.array( 184 | [random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10), 185 | random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10 186 | # perturbed_p, perturbed_pp = np.array( 187 | # [random.randint(0, self.new_shape[0] * 10) / 10, 188 | # random.randint(0, self.new_shape[1] * 10) / 10]) \ 189 | # , np.array([random.randint(0, self.new_shape[0] * 10) / 10, 190 | # random.randint(0, self.new_shape[1] * 10) / 10]) 191 | # perturbed_p, perturbed_pp = np.array( 192 | # [random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10, 193 | # random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) \ 194 | # , np.array([random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10, 195 | # random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) 196 | '''''' 197 | 198 | perturbed_vp = perturbed_pp - perturbed_p 199 | perturbed_vp_norm = np.linalg.norm(perturbed_vp) 200 | 201 | perturbed_distance_vertex_and_line = np.dot((perturbed_p - pixel_position), perturbed_vp) / perturbed_vp_norm 202 | '''''' 203 | if fold_curve == 'fold' and self.is_perform(0.5, 0.5) and fold_curve_random: 204 | perturbed_v = np.array([random.randint(-10000, 10000) / 100, random.randint(-10000, 10000) / 100]) 205 | else: 206 | if self.is_perform(0.8, 0.2): 207 | perturbed_v = np.array([random.randint(-8000, 8000) / 100, random.randint(-8000, 8000) / 100]) 208 | else: 209 | perturbed_v = np.array([random.randint(-6000, 6000) / 100, random.randint(-6000, 6000) / 100]) 210 | '''''' 211 | if fold_curve == 'fold': 212 | if is_normalizationFun_mixture: 213 | if self.is_perform(0.5, 0.5): 214 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 215 | else: 216 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 4)) 217 | else: 218 | if normalizationFun_0_1: 219 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 4)) 220 | else: 221 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 222 | 223 | else: 224 | if is_normalizationFun_mixture: 225 | if self.is_perform(0.5, 0.5): 226 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 227 | else: 228 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 4)) 229 | else: 230 | if normalizationFun_0_1: 231 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 4)) 232 | else: 233 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 234 | '''''' 235 | if fold_curve_random: 236 | # omega_perturbed = (alpha_perturbed+0.2) / (perturbed_d + alpha_perturbed) 237 | # omega_perturbed = alpha_perturbed**perturbed_d 238 | omega_perturbed = alpha_perturbed / (perturbed_d + alpha_perturbed) 239 | else: 240 | omega_perturbed = 1 - perturbed_d ** alpha_perturbed 241 | 242 | '''shadow''' 243 | if self.is_perform(0.5, 0.5): 244 | synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] = np.minimum(np.maximum(synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] - np.int16(np.round(omega_perturbed[x_min:x_max, y_min:y_max].repeat(3).reshape(x_max-x_min, y_max-y_min, 3) * abs(np.linalg.norm(perturbed_v//2))*np.array([0.4-random.random()*0.1, 0.4-random.random()*0.1, 0.4-random.random()*0.1]))), 0), 255) 245 | '''''' 246 | 247 | if relativeShift_position in ['position', 'relativeShift_v2']: 248 | self.perturbed_xy_ += np.array([omega_perturbed * perturbed_v[0], omega_perturbed * perturbed_v[1]]).transpose(1, 2, 0) 249 | else: 250 | print('relativeShift_position error') 251 | exit() 252 | 253 | ''' 254 | flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape( 255 | self.new_shape[0] * self.new_shape[1], 2) 256 | vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position) 257 | wts_sum = np.abs(wts).sum(-1) 258 | 259 | # flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts) 260 | wts = wts[wts_sum <= 1, :] 261 | vtx = vtx[wts_sum <= 1, :] 262 | synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1, 263 | :] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts) 264 | 265 | synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1, 266 | :] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts) 267 | 268 | foreORbackground_label = np.zeros(self.new_shape) 269 | foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts) 270 | foreORbackground_label[foreORbackground_label < 0.99] = 0 271 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 272 | 273 | # synthesis_perturbed_img = np.around(synthesis_perturbed_img).astype(np.uint8) 274 | synthesis_perturbed_label[:, :, 0] *= foreORbackground_label 275 | synthesis_perturbed_label[:, :, 1] *= foreORbackground_label 276 | synthesis_perturbed_img[:, :, 0] *= foreORbackground_label 277 | synthesis_perturbed_img[:, :, 1] *= foreORbackground_label 278 | synthesis_perturbed_img[:, :, 2] *= foreORbackground_label 279 | 280 | self.synthesis_perturbed_img = synthesis_perturbed_img 281 | self.synthesis_perturbed_label = synthesis_perturbed_label 282 | ''' 283 | 284 | '''perspective''' 285 | 286 | perspective_shreshold = random.randint(24, 40)*10 # 280 287 | x_min_per, y_min_per, x_max_per, y_max_per = self.adjust_position(perspective_shreshold, perspective_shreshold, self.new_shape[0]-perspective_shreshold, self.new_shape[1]-perspective_shreshold) 288 | pts1 = np.float32([[x_min_per, y_min_per], [x_max_per, y_min_per], [x_min_per, y_max_per], [x_max_per, y_max_per]]) 289 | e_1_ = x_max_per - x_min_per 290 | e_2_ = y_max_per - y_min_per 291 | e_3_ = e_2_ 292 | e_4_ = e_1_ 293 | perspective_shreshold_h = e_1_*0.02 294 | perspective_shreshold_w = e_2_*0.02 295 | a_min_, a_max_ = 70, 100 296 | 297 | if fold_curve == 'curve' and self.is_perform(0.2, 0.8): 298 | if self.is_perform(0.5, 0.5): 299 | while True: 300 | pts2 = np.around( 301 | np.float32([[x_min_per - (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold], 302 | [x_max_per - (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold], 303 | [x_min_per + (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold], 304 | [x_max_per + (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold]])) # right 305 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 306 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 307 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 308 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 309 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 310 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 311 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 312 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 313 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or ( 314 | a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 315 | break 316 | 317 | else: 318 | while True: 319 | pts2 = np.around( 320 | np.float32([[x_min_per + (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold], 321 | [x_max_per + (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold], 322 | [x_min_per - (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold], 323 | [x_max_per - (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold]])) 324 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 325 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 326 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 327 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 328 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 329 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 330 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 331 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 332 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or ( 333 | a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 334 | break 335 | 336 | else: 337 | while True: 338 | pts2 = np.around(np.float32([[x_min_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold], 339 | [x_max_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold], 340 | [x_min_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold], 341 | [x_max_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold]])) 342 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 343 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 344 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 345 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 346 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 347 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 348 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 349 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 350 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 351 | break 352 | 353 | 354 | M = cv2.getPerspectiveTransform(pts1, pts2) 355 | one = np.ones((self.new_shape[0], self.new_shape[1], 1), dtype=np.int16) 356 | matr = np.dstack((pixel_position, one)) 357 | new = np.dot(M, matr.reshape(-1, 3).T).T.reshape(self.new_shape[0], self.new_shape[1], 3) 358 | x = new[:, :, 0]/new[:, :, 2] 359 | y = new[:, :, 1]/new[:, :, 2] 360 | perturbed_xy_ = np.dstack((x, y)) 361 | # perturbed_xy_round_int = np.around(cv2.bilateralFilter(perturbed_xy_round_int, 9, 75, 75)) 362 | # perturbed_xy_round_int = np.around(cv2.blur(perturbed_xy_, (17, 17))) 363 | # perturbed_xy_round_int = cv2.blur(perturbed_xy_round_int, (17, 17)) 364 | # perturbed_xy_round_int = cv2.GaussianBlur(perturbed_xy_round_int, (7, 7), 0) 365 | perturbed_xy_ = perturbed_xy_-np.min(perturbed_xy_.T.reshape(2, -1), 1) 366 | # perturbed_xy_round_int = np.around(perturbed_xy_round_int-np.min(perturbed_xy_round_int.T.reshape(2, -1), 1)).astype(np.int16) 367 | 368 | self.perturbed_xy_ += perturbed_xy_ 369 | 370 | '''perspective end''' 371 | 372 | '''to img''' 373 | flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape( 374 | self.new_shape[0] * self.new_shape[1], 2) 375 | # self.perturbed_xy_ = cv2.blur(self.perturbed_xy_, (7, 7)) 376 | self.perturbed_xy_ = cv2.GaussianBlur(self.perturbed_xy_, (7, 7), 0) 377 | 378 | '''get fiducial points''' 379 | fiducial_points_coordinate = self.perturbed_xy_[im_x, im_y] 380 | 381 | 382 | vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position) 383 | wts_sum = np.abs(wts).sum(-1) 384 | 385 | # flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts) 386 | wts = wts[wts_sum <= 1, :] 387 | vtx = vtx[wts_sum <= 1, :] 388 | synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1, 389 | :] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts) 390 | 391 | synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1, 392 | :] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts) 393 | 394 | foreORbackground_label = np.zeros(self.new_shape) 395 | foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts) 396 | foreORbackground_label[foreORbackground_label < 0.99] = 0 397 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 398 | 399 | self.synthesis_perturbed_img = synthesis_perturbed_img 400 | self.synthesis_perturbed_label = synthesis_perturbed_label 401 | self.foreORbackground_label = foreORbackground_label 402 | 403 | '''draw fiducial points 404 | stepSize = 0 405 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy() 406 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1,2): 407 | cv2.circle(fiducial_points_synthesis_perturbed_img, (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1) 408 | cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_large.jpg', fiducial_points_synthesis_perturbed_img) 409 | ''' 410 | 411 | '''clip''' 412 | 413 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1] 414 | for x in range(self.new_shape[0] // 2, perturbed_x_max): 415 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x: 416 | perturbed_x_max = x 417 | break 418 | for x in range(self.new_shape[0] // 2, perturbed_x_min, -1): 419 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0: 420 | perturbed_x_min = x 421 | break 422 | for y in range(self.new_shape[1] // 2, perturbed_y_max): 423 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y: 424 | perturbed_y_max = y 425 | break 426 | for y in range(self.new_shape[1] // 2, perturbed_y_min, -1): 427 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0: 428 | perturbed_y_min = y 429 | break 430 | 431 | if perturbed_x_min == 0 or perturbed_x_max == self.new_shape[0] or perturbed_y_min == self.new_shape[1] or perturbed_y_max == self.new_shape[1]: 432 | raise Exception('clip error') 433 | 434 | if perturbed_x_max - perturbed_x_min < im_lr//2 or perturbed_y_max - perturbed_y_min < im_ud//2: 435 | raise Exception('clip error') 436 | 437 | 438 | perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n) 439 | is_shrink = False 440 | if perturbed_x_max - perturbed_x_min > save_img_shape[0] or perturbed_y_max - perturbed_y_min > save_img_shape[1]: 441 | is_shrink = True 442 | synthesis_perturbed_img = cv2.resize(self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 443 | synthesis_perturbed_label = cv2.resize(self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 444 | foreORbackground_label = cv2.resize(self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 445 | foreORbackground_label[foreORbackground_label < 0.99] = 0 446 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 447 | '''shrink fiducial points''' 448 | center_x_l, center_y_l = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2 449 | fiducial_points_coordinate_copy = fiducial_points_coordinate.copy() 450 | shrink_x = im_lr/(perturbed_x_max - perturbed_x_min) 451 | shrink_y = im_ud/(perturbed_y_max - perturbed_y_min) 452 | fiducial_points_coordinate *= [shrink_x, shrink_y] 453 | center_x_l *= shrink_x 454 | center_y_l *= shrink_y 455 | # fiducial_points_coordinate[1:, 1:] *= [shrink_x, shrink_y] 456 | # fiducial_points_coordinate[1:, :1, 0] *= shrink_x 457 | # fiducial_points_coordinate[:1, 1:, 1] *= shrink_y 458 | # perturbed_x_min_copy, perturbed_y_min_copy, perturbed_x_max_copy, perturbed_y_max_copy = perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max 459 | 460 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape) 461 | 462 | self.synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256) 463 | self.synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label) 464 | self.foreORbackground_label = np.zeros_like(self.foreORbackground_label) 465 | self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_img 466 | self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_label 467 | self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max] = foreORbackground_label 468 | 469 | center_x, center_y = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2 470 | if is_shrink: 471 | fiducial_points_coordinate += [center_x-center_x_l, center_y-center_y_l] 472 | 473 | '''draw fiducial points 474 | stepSize = 0 475 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy() 476 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2): 477 | cv2.circle(fiducial_points_synthesis_perturbed_img, 478 | (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1) 479 | cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_small.jpg',fiducial_points_synthesis_perturbed_img) 480 | ''' 481 | self.new_shape = save_img_shape 482 | self.synthesis_perturbed_img = self.synthesis_perturbed_img[ 483 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 484 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2, 485 | :].copy() 486 | self.synthesis_perturbed_label = self.synthesis_perturbed_label[ 487 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 488 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2, 489 | :].copy() 490 | self.foreORbackground_label = self.foreORbackground_label[ 491 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 492 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2].copy() 493 | 494 | 495 | perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0) 496 | perturbed_x_min = perturbed_x_ // 2 497 | perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1) 498 | 499 | perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0) 500 | perturbed_y_min = perturbed_y_ // 2 501 | perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1) 502 | 503 | '''clip 504 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1] 505 | for x in range(self.new_shape[0] // 2, perturbed_x_max): 506 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x: 507 | perturbed_x_max = x 508 | break 509 | for x in range(self.new_shape[0] // 2, perturbed_x_min, -1): 510 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0: 511 | perturbed_x_min = x 512 | break 513 | for y in range(self.new_shape[1] // 2, perturbed_y_max): 514 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y: 515 | perturbed_y_max = y 516 | break 517 | for y in range(self.new_shape[1] // 2, perturbed_y_min, -1): 518 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0: 519 | perturbed_y_min = y 520 | break 521 | 522 | 523 | center_x, center_y = perturbed_x_min+(perturbed_x_max - perturbed_x_min)//2, perturbed_y_min+(perturbed_y_max - perturbed_y_min)//2 524 | 525 | perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n) 526 | 527 | self.new_shape = save_img_shape 528 | 529 | perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0) 530 | perturbed_x_min = perturbed_x_ // 2 531 | perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1) 532 | 533 | perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0) 534 | perturbed_y_min = perturbed_y_ // 2 535 | perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1) 536 | 537 | self.synthesis_perturbed_img = self.synthesis_perturbed_img[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy() 538 | self.synthesis_perturbed_label = self.synthesis_perturbed_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy() 539 | self.foreORbackground_label = self.foreORbackground_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2].copy() 540 | 541 | ''' 542 | 543 | 544 | 545 | '''save''' 546 | pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2) 547 | 548 | if relativeShift_position == 'relativeShift_v2': 549 | self.synthesis_perturbed_label -= pixel_position 550 | fiducial_points_coordinate -= [center_x - self.new_shape[0] // 2, center_y - self.new_shape[1] // 2] 551 | 552 | self.synthesis_perturbed_label[:, :, 0] *= self.foreORbackground_label 553 | self.synthesis_perturbed_label[:, :, 1] *= self.foreORbackground_label 554 | self.synthesis_perturbed_img[:, :, 0] *= self.foreORbackground_label 555 | self.synthesis_perturbed_img[:, :, 1] *= self.foreORbackground_label 556 | self.synthesis_perturbed_img[:, :, 2] *= self.foreORbackground_label 557 | 558 | 559 | ''' 560 | synthesis_perturbed_img_filter = self.synthesis_perturbed_img.copy() 561 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 562 | # if self.is_perform(0.9, 0.1) or repeat_time > 5: 563 | # # if self.is_perform(0.1, 0.9) and repeat_time > 9: 564 | # # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (7, 7), 0) 565 | # # else: 566 | # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0) 567 | # else: 568 | # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 569 | self.synthesis_perturbed_img[self.foreORbackground_label == 1] = synthesis_perturbed_img_filter[self.foreORbackground_label == 1] 570 | ''' 571 | 572 | '''HSV_v2''' 573 | perturbed_bg_img = perturbed_bg_img.astype(np.float32) 574 | # if self.is_perform(1, 0): 575 | # if self.is_perform(1, 0): 576 | if self.is_perform(0.1, 0.9): 577 | if self.is_perform(0.2, 0.8): 578 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 579 | 580 | synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV) 581 | 582 | perturbed_bg_img[:, :, 0] *= 1-self.foreORbackground_label 583 | perturbed_bg_img[:, :, 1] *= 1-self.foreORbackground_label 584 | perturbed_bg_img[:, :, 2] *= 1-self.foreORbackground_label 585 | 586 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 587 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 588 | else: 589 | perturbed_bg_img_HSV = perturbed_bg_img 590 | perturbed_bg_img_HSV = self.HSV_v1(perturbed_bg_img_HSV) 591 | 592 | perturbed_bg_img_HSV[:, :, 0] *= 1-self.foreORbackground_label 593 | perturbed_bg_img_HSV[:, :, 1] *= 1-self.foreORbackground_label 594 | perturbed_bg_img_HSV[:, :, 2] *= 1-self.foreORbackground_label 595 | 596 | self.synthesis_perturbed_img += perturbed_bg_img_HSV 597 | # self.synthesis_perturbed_img[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771] 598 | 599 | else: 600 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 601 | perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label 602 | perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label 603 | perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label 604 | 605 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 606 | 607 | synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV) 608 | 609 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 610 | 611 | '''''' 612 | # cv2.imwrite(self.save_path+'clip/'+perfix_+'_'+fold_curve+str(perturbed_time)+'-'+str(repeat_time)+'.png', synthesis_perturbed_img_clip) 613 | 614 | self.synthesis_perturbed_img[self.synthesis_perturbed_img < 0] = 0 615 | self.synthesis_perturbed_img[self.synthesis_perturbed_img > 255] = 255 616 | self.synthesis_perturbed_img = np.around(self.synthesis_perturbed_img).astype(np.uint8) 617 | label = np.zeros_like(self.synthesis_perturbed_img, dtype=np.float32) 618 | label[:, :, :2] = self.synthesis_perturbed_label 619 | label[:, :, 2] = self.foreORbackground_label 620 | 621 | # grey = np.around(self.synthesis_perturbed_img[:, :, 0] * 0.2989 + self.synthesis_perturbed_img[:, :, 1] * 0.5870 + self.synthesis_perturbed_img[:, :, 0] * 0.1140).astype(np.int16) 622 | # synthesis_perturbed_grey = np.concatenate((grey.reshape(self.new_shape[0], self.new_shape[1], 1), label), axis=2) 623 | synthesis_perturbed_color = np.concatenate((self.synthesis_perturbed_img, label), axis=2) 624 | 625 | self.synthesis_perturbed_color = np.zeros_like(synthesis_perturbed_color, dtype=np.float32) 626 | # self.synthesis_perturbed_grey = np.zeros_like(synthesis_perturbed_grey, dtype=np.float32) 627 | reduce_value_x = int(round(min((random.random() / 2) * (self.new_shape[0] - (perturbed_x_max - perturbed_x_min)), min(reduce_value, reduce_value_v2)))) 628 | reduce_value_y = int(round(min((random.random() / 2) * (self.new_shape[1] - (perturbed_y_max - perturbed_y_min)), min(reduce_value, reduce_value_v2)))) 629 | perturbed_x_min = max(perturbed_x_min - reduce_value_x, 0) 630 | perturbed_x_max = min(perturbed_x_max + reduce_value_x, self.new_shape[0]) 631 | perturbed_y_min = max(perturbed_y_min - reduce_value_y, 0) 632 | perturbed_y_max = min(perturbed_y_max + reduce_value_y, self.new_shape[1]) 633 | 634 | if im_lr >= im_ud: 635 | self.synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] 636 | # self.synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] 637 | else: 638 | self.synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] 639 | # self.synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] 640 | '''blur''' 641 | if self.is_perform(0.3, 0.7): 642 | synthesis_perturbed_img_filter = self.synthesis_perturbed_color[:, :, :3].copy() 643 | if self.is_perform(0.2, 0.8): 644 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0) 645 | else: 646 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 647 | if self.is_perform(0.5, 0.5): 648 | self.synthesis_perturbed_color[:, :, :3][self.synthesis_perturbed_color[:, :, 5] == 1] = synthesis_perturbed_img_filter[self.synthesis_perturbed_color[:, :, 5] == 1] 649 | else: 650 | self.synthesis_perturbed_color[:, :, :3] = synthesis_perturbed_img_filter 651 | 652 | fiducial_points_coordinate = fiducial_points_coordinate[:, :, ::-1] 653 | '''draw fiducial points''' 654 | stepSize = 0 655 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_color[:, :, :3].copy() 656 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2): 657 | cv2.circle(fiducial_points_synthesis_perturbed_img, (l[0] + math.ceil(stepSize / 2), l[1] + math.ceil(stepSize / 2)), 2, (0, 0, 255), -1) 658 | cv2.imwrite(self.save_path + 'fiducial_points/' + perfix_ + '_' + fold_curve + '.png', fiducial_points_synthesis_perturbed_img) 659 | 660 | cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3]) 661 | 662 | '''forward-begin''' 663 | self.forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32) 664 | forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32) 665 | forward_position = (self.synthesis_perturbed_color[:, :, 3:5] + pixel_position)[self.synthesis_perturbed_color[:, :, 5] != 0, :] 666 | flat_position = np.argwhere(np.zeros(save_img_shape, dtype=np.uint32) == 0) 667 | vtx, wts = self.interp_weights(forward_position, flat_position) 668 | wts_sum = np.abs(wts).sum(-1) 669 | wts = wts[wts_sum <= 1, :] 670 | vtx = vtx[wts_sum <= 1, :] 671 | flat_position_forward = flat_position.reshape(save_img_shape[0], save_img_shape[1], 2)[self.synthesis_perturbed_color[:, :, 5] != 0, :] 672 | forward_mapping.reshape(save_img_shape[0] * save_img_shape[1], 2)[wts_sum <= 1, :] = self.interpolate(flat_position_forward, vtx, wts) 673 | forward_mapping = forward_mapping.reshape(save_img_shape[0], save_img_shape[1], 2) 674 | 675 | mapping_x_min_, mapping_y_min_, mapping_x_max_, mapping_y_max_ = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape) 676 | shreshold_zoom_out = 2 677 | mapping_x_min = mapping_x_min_ + shreshold_zoom_out 678 | mapping_y_min = mapping_y_min_ + shreshold_zoom_out 679 | mapping_x_max = mapping_x_max_ - shreshold_zoom_out 680 | mapping_y_max = mapping_y_max_ - shreshold_zoom_out 681 | self.forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] = forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] 682 | self.scan_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32) 683 | self.scan_img[mapping_x_min_:mapping_x_max_, mapping_y_min_:mapping_y_max_] = self.origin_img 684 | self.origin_img = self.scan_img 685 | # flat_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32) 686 | # cv2.remap(self.synthesis_perturbed_color[:, :, :3], self.forward_mapping[:, :, 1], self.forward_mapping[:, :, 0], cv2.INTER_LINEAR, flat_img) 687 | # cv2.imwrite(self.save_path + 'outputs/1.jpg', flat_img) 688 | '''forward-end''' 689 | 690 | '''image and label 691 | synthesis_perturbed_data = { 692 | 'fiducial_points': fiducial_points_coordinate, 693 | 'segment': np.array((segment_x, segment_y)) 694 | } 695 | cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3]) 696 | ''' 697 | '''or''' 698 | synthesis_perturbed_data = { 699 | 'image':self.synthesis_perturbed_color[:, :, :3], 700 | 'fiducial_points': fiducial_points_coordinate, 701 | 'segment': np.array((segment_x, segment_y)) 702 | } 703 | 704 | with open(self.save_path+'color/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f: 705 | pickle_perturbed_data = pickle.dumps(synthesis_perturbed_data) 706 | f.write(pickle_perturbed_data) 707 | # with open(self.save_path+'grey/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f: 708 | # pickle_perturbed_data = pickle.dumps(self.synthesis_perturbed_grey) 709 | # f.write(pickle_perturbed_data) 710 | # cv2.imwrite(self.save_path+'grey_im/'+perfix_+'_'+fold_curve+'.png', self.synthesis_perturbed_color[:, :, :1]) 711 | 712 | 713 | # cv2.imwrite(self.save_path + 'scan/' + self.save_suffix + '_' + str(m) + '.png', self.origin_img) 714 | trian_t = time.time() - begin_train 715 | mm, ss = divmod(trian_t, 60) 716 | hh, mm = divmod(mm, 60) 717 | print(str(m)+'_'+str(n)+'_'+fold_curve+' '+str(repeat_time)+" Time : %02d:%02d:%02d\n" % (hh, mm, ss)) 718 | 719 | 720 | def multiThread(m, n, img_path_, bg_path_, save_path, save_suffix): 721 | saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix) 722 | saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix) 723 | 724 | repeat_time = min(max(round(np.random.normal(10, 3)), 5), 16) 725 | fold = threading.Thread(target=saveFold.save_img, args=(m, n, 'fold', repeat_time, 'relativeShift_v2'), name='fold') 726 | curve = threading.Thread(target=saveCurve.save_img, args=(m, n, 'curve', repeat_time, 'relativeShift_v2'), name='curve') 727 | 728 | fold.start() 729 | curve.start() 730 | curve.join() 731 | fold.join() 732 | def xgw(args): 733 | path = args.path 734 | bg_path = args.bg_path 735 | if args.output_path is None: 736 | save_path = '/lustre/home/gwxie/data/unwarp_new/train/fiducial1024/fiducial1024_v2_a2/' 737 | else: 738 | save_path = args.output_path 739 | 740 | 741 | # if not os.path.exists(save_path + 'grey/'): 742 | # os.makedirs(save_path + 'grey/') 743 | if not os.path.exists(save_path + 'color/'): 744 | os.makedirs(save_path + 'color/') 745 | 746 | if not os.path.exists(save_path + 'fiducial_points/'): 747 | os.makedirs(save_path + 'fiducial_points/') 748 | 749 | if not os.path.exists(save_path + 'png/'): 750 | os.makedirs(save_path + 'png/') 751 | 752 | # if not os.path.exists(save_path + 'scan/'): 753 | # os.makedirs(save_path + 'scan/') 754 | 755 | if not os.path.exists(save_path + 'outputs/'): 756 | os.makedirs(save_path + 'outputs/') 757 | 758 | save_suffix = str.split(args.path, '/')[-2] 759 | 760 | all_img_path = getDatasets(path) 761 | all_bgImg_path = getDatasets(bg_path) 762 | global begin_train 763 | begin_train = time.time() 764 | # img_path_ = '/lustre/home/gwxie/data/unwarp_new/train/origin_datasets/DL01/b04011301.bmp' 765 | # img_path_ = '/lustre/home/gwxie/data/unwarp_new/train/origin_datasets/validate/FGHJOP.jpg' 766 | # img_path_ = '/lustre/home/gwxie/data/unwarp_new/train/origin_datasets/Maurdor/F9/GUZFUD.jpg' 767 | # bg_path_ = bg_path + random.choice(all_bgImg_path) + '/' 768 | # save_perturbed = perturbed(img_path_, bg_path_, save_path, save_suffix) 769 | # save_perturbed.save_img(0, 400, 'curve',1, relativeShift_position='relativeShift_v2') 770 | fiducial_points = 61 # 31 771 | process_pool = Pool(2) 772 | for m, img_path in enumerate(all_img_path): 773 | for n in range(args.sys_num, args.sys_num_e): 774 | img_path_ = path+img_path 775 | bg_path_ = bg_path+random.choice(all_bgImg_path)+'/' 776 | 777 | for m_n in range(10): 778 | try: 779 | saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix) 780 | saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix) 781 | 782 | repeat_time = min(max(round(np.random.normal(14, 2)), 1), 20) 783 | # repeat_time = min(max(round(np.random.normal(8, 4)), 1), 12) # random.randint(1, 2) # min(max(round(np.random.normal(8, 4)), 1), 12) 784 | process_pool.apply_async(func=saveFold.save_img, args=(m, n, 'fold', repeat_time, fiducial_points, 'relativeShift_v2')) 785 | 786 | repeat_time = min(max(round(np.random.normal(10, 4)), 1), 14) 787 | # repeat_time = min(max(round(np.random.normal(6, 4)), 1), 10) 788 | process_pool.apply_async(func=saveCurve.save_img, args=(m, n, 'curve', repeat_time, fiducial_points, 'relativeShift_v2')) 789 | 790 | except BaseException as err: 791 | print(err) 792 | continue 793 | break 794 | # print('end') 795 | 796 | process_pool.close() 797 | process_pool.join() 798 | 799 | if __name__ == '__main__': 800 | 801 | parser = argparse.ArgumentParser(description='Hyperparams') 802 | parser.add_argument('--path', 803 | default='./scan/new/', type=str, 804 | help='the path of origin img.') 805 | parser.add_argument('--bg_path', 806 | default='./background/', type=str, 807 | help='the path of bg img.') 808 | 809 | parser.add_argument('--output_path', 810 | default='./output/', type=str, 811 | help='the path of origin img.') 812 | # parser.set_defaults(output_path='test') 813 | parser.add_argument('--count_from', '-p', default=0, type=int, 814 | metavar='N', help='print frequency (default: 10)') # print frequency 815 | 816 | parser.add_argument('--repeat_T', default=0, type=int) 817 | 818 | parser.add_argument('--sys_num', default=11, type=int) 819 | parser.add_argument('--sys_num_e', default=17, type=int) 820 | 821 | args = parser.parse_args() 822 | xgw(args) 823 | -------------------------------------------------------------------------------- /perturbed_images_generation_multiProcess_addition3.py: -------------------------------------------------------------------------------- 1 | ''' 2 | GuoWang xie 3 | set up :2020-1-9 4 | intergrate img and label into one file 5 | 6 | -- data1024_v1 7 | ''' 8 | 9 | 10 | import argparse 11 | import sys, os 12 | import pickle 13 | import random 14 | import collections 15 | import json 16 | import numpy as np 17 | import scipy.io as io 18 | import scipy.misc as m 19 | import matplotlib.pyplot as plt 20 | import glob 21 | import math 22 | import time 23 | 24 | import threading 25 | import multiprocessing as mp 26 | from multiprocessing import Pool 27 | import re 28 | import cv2 29 | # sys.path.append('/lustre/home/gwxie/hope/project/dewarp/datasets/') # /lustre/home/gwxie/program/project/unwarp/perturbed_imgaes/GAN 30 | import utils 31 | 32 | def getDatasets(dir): 33 | return os.listdir(dir) 34 | 35 | class perturbed(utils.BasePerturbed): 36 | def __init__(self, path, bg_path, save_path, save_suffix): 37 | 38 | self.path = path 39 | self.bg_path = bg_path 40 | self.save_path = save_path 41 | self.save_suffix = save_suffix 42 | def save_img(self, m, n, fold_curve='fold', repeat_time=4, fiducial_points = 16, relativeShift_position='relativeShift_v2'): 43 | origin_img = cv2.imread(self.path, flags=cv2.IMREAD_COLOR) 44 | 45 | save_img_shape = [512*2, 480*2] # 320 46 | # reduce_value = np.random.choice([2**4, 2**5, 2**6, 2**7, 2**8], p=[0.01, 0.1, 0.4, 0.39, 0.1]) 47 | reduce_value = np.random.choice([2*2, 4*2, 8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.02, 0.08, 0.2, 0.3, 0.1, 0.1, 0.1, 0.1]) 48 | # reduce_value = np.random.choice([8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.1, 0.2, 0.4, 0.1, 0.1, 0.1]) 49 | # reduce_value = np.random.choice([8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.01, 0.02, 0.2, 0.4, 0.19, 0.18]) 50 | # reduce_value = np.random.choice([16, 24, 32, 40, 48, 64], p=[0.01, 0.1, 0.2, 0.4, 0.2, 0.09]) 51 | base_img_shrink = save_img_shape[0] - reduce_value 52 | 53 | # enlarge_img_shrink = [1024, 768] 54 | # enlarge_img_shrink = [896, 672] # 420 55 | enlarge_img_shrink = [512*4, 480*4] # 420 56 | # enlarge_img_shrink = [896*2, 768*2] # 420 57 | # enlarge_img_shrink = [896, 768] # 420 58 | # enlarge_img_shrink = [768, 576] # 420 59 | # enlarge_img_shrink = [640, 480] # 420 60 | 61 | '''''' 62 | im_lr = origin_img.shape[0] 63 | im_ud = origin_img.shape[1] 64 | 65 | reduce_value_v2 = max(np.random.choice(range(0, reduce_value, 2)), 4) 66 | # reduce_value_v2 = np.random.choice([4*2, 8*2, 16*2, 24*2, 28*2, 32*2, 48*2, 64*2], p=[0.1, 0.1, 0.2, 0.2, 0.2, 0.1, 0.08, 0.02]) 67 | # reduce_value_v2 = np.random.choice([16, 24, 28, 32, 48, 64], p=[0.01, 0.1, 0.2, 0.3, 0.25, 0.14]) 68 | 69 | if im_lr > im_ud: 70 | im_ud = min(int(im_ud / im_lr * base_img_shrink), save_img_shape[1] - reduce_value_v2) 71 | im_lr = save_img_shape[0] - reduce_value 72 | else: 73 | base_img_shrink = save_img_shape[1] - reduce_value 74 | im_lr = min(int(im_lr / im_ud * base_img_shrink), save_img_shape[0] - reduce_value_v2) 75 | im_ud = base_img_shrink 76 | 77 | if round(im_lr / im_ud, 2) < 0.5 or round(im_ud / im_lr, 2) < 0.5: 78 | repeat_time = min(repeat_time, 8) 79 | 80 | edge_padding = 3 81 | im_lr -= im_lr % (fiducial_points-1) - (2*edge_padding) # im_lr % (fiducial_points-1) - 1 82 | im_ud -= im_ud % (fiducial_points-1) - (2*edge_padding) # im_ud % (fiducial_points-1) - 1 83 | im_hight = np.linspace(edge_padding, im_lr - edge_padding, fiducial_points, dtype=np.int64) 84 | im_wide = np.linspace(edge_padding, im_ud - edge_padding, fiducial_points, dtype=np.int64) 85 | # im_lr -= im_lr % (fiducial_points-1) - (1+2*edge_padding) # im_lr % (fiducial_points-1) - 1 86 | # im_ud -= im_ud % (fiducial_points-1) - (1+2*edge_padding) # im_ud % (fiducial_points-1) - 1 87 | # im_hight = np.linspace(edge_padding, im_lr - (1+edge_padding), fiducial_points, dtype=np.int64) 88 | # im_wide = np.linspace(edge_padding, im_ud - (1+edge_padding), fiducial_points, dtype=np.int64) 89 | im_x, im_y = np.meshgrid(im_hight, im_wide) 90 | segment_x = (im_lr) // (fiducial_points-1) 91 | segment_y = (im_ud) // (fiducial_points-1) 92 | 93 | # plt.plot(im_x, im_y, 94 | # color='limegreen', 95 | # marker='.', 96 | # linestyle='') 97 | # plt.grid(True) 98 | # plt.show() 99 | self.origin_img = cv2.resize(origin_img, (im_ud, im_lr), interpolation=cv2.INTER_CUBIC) 100 | 101 | perturbed_bg_ = getDatasets(self.bg_path) 102 | perturbed_bg_img_ = self.bg_path+random.choice(perturbed_bg_) 103 | perturbed_bg_img = cv2.imread(perturbed_bg_img_, flags=cv2.IMREAD_COLOR) 104 | 105 | mesh_shape = self.origin_img.shape[:2] 106 | 107 | self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 256, dtype=np.float32)#np.zeros_like(perturbed_bg_img) 108 | # self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 0, dtype=np.int16)#np.zeros_like(perturbed_bg_img) 109 | self.new_shape = self.synthesis_perturbed_img.shape[:2] 110 | perturbed_bg_img = cv2.resize(perturbed_bg_img, (save_img_shape[1], save_img_shape[0]), cv2.INPAINT_TELEA) 111 | 112 | origin_pixel_position = np.argwhere(np.zeros(mesh_shape, dtype=np.uint32) == 0).reshape(mesh_shape[0], mesh_shape[1], 2) 113 | pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2) 114 | self.perturbed_xy_ = np.zeros((self.new_shape[0], self.new_shape[1], 2)) 115 | # self.perturbed_xy_ = pixel_position.copy().astype(np.float32) 116 | 117 | self.synthesis_perturbed_label = np.zeros((self.new_shape[0], self.new_shape[1], 2)) 118 | x_min, y_min, x_max, y_max = self.adjust_position_v2(0, 0, mesh_shape[0], mesh_shape[1], save_img_shape) 119 | origin_pixel_position += [x_min, y_min] 120 | 121 | x_min, y_min, x_max, y_max = self.adjust_position(0, 0, mesh_shape[0], mesh_shape[1]) 122 | x_shift = random.randint(-enlarge_img_shrink[0]//16, enlarge_img_shrink[0]//16) 123 | y_shift = random.randint(-enlarge_img_shrink[1]//16, enlarge_img_shrink[1]//16) 124 | x_min += x_shift 125 | x_max += x_shift 126 | y_min += y_shift 127 | y_max += y_shift 128 | 129 | '''im_x,y''' 130 | im_x += x_min 131 | im_y += y_min 132 | 133 | self.synthesis_perturbed_img[x_min:x_max, y_min:y_max] = self.origin_img 134 | self.synthesis_perturbed_label[x_min:x_max, y_min:y_max] = origin_pixel_position 135 | 136 | synthesis_perturbed_img_map = self.synthesis_perturbed_img.copy() 137 | synthesis_perturbed_label_map = self.synthesis_perturbed_label.copy() 138 | 139 | foreORbackground_label = np.full((mesh_shape), 1, dtype=np.int16) 140 | foreORbackground_label_map = np.full((self.new_shape), 0, dtype=np.int16) 141 | foreORbackground_label_map[x_min:x_max, y_min:y_max] = foreORbackground_label 142 | 143 | # synthesis_perturbed_img_map = self.pad(self.synthesis_perturbed_img.copy(), x_min, y_min, x_max, y_max) 144 | # synthesis_perturbed_label_map = self.pad(synthesis_perturbed_label_map, x_min, y_min, x_max, y_max) 145 | '''*****************************************************************''' 146 | is_normalizationFun_mixture = self.is_perform(0.1, 0.9) 147 | normalizationFun_0_1 = self.is_perform(0.5, 0.5) 148 | 149 | 150 | synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256) 151 | # synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 0, dtype=np.int16) 152 | synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label) 153 | 154 | p_pp_choice = self.is_perform(0.8, 0.2) if fold_curve == 'fold' else self.is_perform(0.1, 0.9) 155 | for repeat_i in range(repeat_time): 156 | 157 | if fold_curve == 'fold': 158 | fold_curve_random = self.is_perform(1, 0) 159 | alpha_perturbed = random.randint(70, 130) / 100 160 | else: 161 | fold_curve_random = self.is_perform(0, 1) 162 | alpha_perturbed = random.randint(80, 130) / 100 163 | 164 | '''''' 165 | linspace_x = [0, (self.new_shape[0] - im_lr) // 2 - 1, 166 | self.new_shape[0] - (self.new_shape[0] - im_lr) // 2 - 1, self.new_shape[0] - 1] 167 | linspace_y = [0, (self.new_shape[1] - im_ud) // 2 - 1, 168 | self.new_shape[1] - (self.new_shape[1] - im_ud) // 2 - 1, self.new_shape[1] - 1] 169 | linspace_x_seq = [1, 2, 3] 170 | linspace_y_seq = [1, 2, 3] 171 | r_x = random.choice(linspace_x_seq) 172 | r_y = random.choice(linspace_y_seq) 173 | perturbed_p = np.array( 174 | [random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10), 175 | random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10 176 | if ((r_x == 1 or r_x == 3) and (r_y == 1 or r_y == 3)) and p_pp_choice: 177 | linspace_x_seq.remove(r_x) 178 | linspace_y_seq.remove(r_y) 179 | r_x = random.choice(linspace_x_seq) 180 | r_y = random.choice(linspace_y_seq) 181 | perturbed_pp = np.array( 182 | [random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10), 183 | random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10 184 | # perturbed_p, perturbed_pp = np.array( 185 | # [random.randint(0, self.new_shape[0] * 10) / 10, 186 | # random.randint(0, self.new_shape[1] * 10) / 10]) \ 187 | # , np.array([random.randint(0, self.new_shape[0] * 10) / 10, 188 | # random.randint(0, self.new_shape[1] * 10) / 10]) 189 | # perturbed_p, perturbed_pp = np.array( 190 | # [random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10, 191 | # random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) \ 192 | # , np.array([random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10, 193 | # random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) 194 | '''''' 195 | 196 | perturbed_vp = perturbed_pp - perturbed_p 197 | perturbed_vp_norm = np.linalg.norm(perturbed_vp) 198 | 199 | perturbed_distance_vertex_and_line = np.dot((perturbed_p - pixel_position), perturbed_vp) / perturbed_vp_norm 200 | '''''' 201 | if fold_curve == 'fold' and self.is_perform(0.5, 0.5): 202 | perturbed_v = np.array([random.randint(-9000, 9000) / 100, random.randint(-9000, 9000) / 100]) 203 | else: 204 | if self.is_perform(0.5, 0.5): 205 | perturbed_v = np.array([random.randint(-8000, 8000) / 100, random.randint(-8000, 8000) / 100]) 206 | else: 207 | perturbed_v = np.array([random.randint(-6000, 6000) / 100, random.randint(-6000, 6000) / 100]) 208 | '''''' 209 | if fold_curve == 'fold': 210 | if is_normalizationFun_mixture: 211 | if self.is_perform(0.5, 0.5): 212 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 213 | else: 214 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 4)) 215 | else: 216 | if normalizationFun_0_1: 217 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 4)) 218 | else: 219 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 220 | 221 | else: 222 | if is_normalizationFun_mixture: 223 | if self.is_perform(0.5, 0.5): 224 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 225 | else: 226 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 4)) 227 | else: 228 | if normalizationFun_0_1: 229 | perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 4)) 230 | else: 231 | perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line)) 232 | '''''' 233 | if fold_curve_random: 234 | # omega_perturbed = (alpha_perturbed+0.2) / (perturbed_d + alpha_perturbed) 235 | # omega_perturbed = alpha_perturbed**perturbed_d 236 | omega_perturbed = alpha_perturbed / (perturbed_d + alpha_perturbed) 237 | else: 238 | omega_perturbed = 1 - perturbed_d ** alpha_perturbed 239 | 240 | '''shadow''' 241 | if self.is_perform(0.4, 0.6): 242 | synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] = np.minimum(np.maximum(synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] - np.int16(np.round(omega_perturbed[x_min:x_max, y_min:y_max].repeat(3).reshape(x_max-x_min, y_max-y_min, 3) * abs(np.linalg.norm(perturbed_v//2))*np.array([0.4-random.random()*0.1, 0.4-random.random()*0.1, 0.4-random.random()*0.1]))), 0), 255) 243 | '''''' 244 | 245 | if relativeShift_position in ['position', 'relativeShift_v2']: 246 | self.perturbed_xy_ += np.array([omega_perturbed * perturbed_v[0], omega_perturbed * perturbed_v[1]]).transpose(1, 2, 0) 247 | else: 248 | print('relativeShift_position error') 249 | exit() 250 | 251 | ''' 252 | flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape( 253 | self.new_shape[0] * self.new_shape[1], 2) 254 | vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position) 255 | wts_sum = np.abs(wts).sum(-1) 256 | 257 | # flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts) 258 | wts = wts[wts_sum <= 1, :] 259 | vtx = vtx[wts_sum <= 1, :] 260 | synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1, 261 | :] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts) 262 | 263 | synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1, 264 | :] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts) 265 | 266 | foreORbackground_label = np.zeros(self.new_shape) 267 | foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts) 268 | foreORbackground_label[foreORbackground_label < 0.99] = 0 269 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 270 | 271 | # synthesis_perturbed_img = np.around(synthesis_perturbed_img).astype(np.uint8) 272 | synthesis_perturbed_label[:, :, 0] *= foreORbackground_label 273 | synthesis_perturbed_label[:, :, 1] *= foreORbackground_label 274 | synthesis_perturbed_img[:, :, 0] *= foreORbackground_label 275 | synthesis_perturbed_img[:, :, 1] *= foreORbackground_label 276 | synthesis_perturbed_img[:, :, 2] *= foreORbackground_label 277 | 278 | self.synthesis_perturbed_img = synthesis_perturbed_img 279 | self.synthesis_perturbed_label = synthesis_perturbed_label 280 | ''' 281 | 282 | '''perspective''' 283 | 284 | perspective_shreshold = random.randint(24, 40)*10 # 280 285 | x_min_per, y_min_per, x_max_per, y_max_per = self.adjust_position(perspective_shreshold, perspective_shreshold, self.new_shape[0]-perspective_shreshold, self.new_shape[1]-perspective_shreshold) 286 | pts1 = np.float32([[x_min_per, y_min_per], [x_max_per, y_min_per], [x_min_per, y_max_per], [x_max_per, y_max_per]]) 287 | e_1_ = x_max_per - x_min_per 288 | e_2_ = y_max_per - y_min_per 289 | e_3_ = e_2_ 290 | e_4_ = e_1_ 291 | perspective_shreshold_h = e_1_*0.02 292 | perspective_shreshold_w = e_2_*0.02 293 | a_min_, a_max_ = 80, 100 294 | 295 | if fold_curve == 'curve' and self.is_perform(0.2, 0.8): 296 | if self.is_perform(0.5, 0.5): 297 | while True: 298 | pts2 = np.around( 299 | np.float32([[x_min_per - (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold], 300 | [x_max_per - (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold], 301 | [x_min_per + (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold], 302 | [x_max_per + (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold]])) # right 303 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 304 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 305 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 306 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 307 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 308 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 309 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 310 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 311 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or ( 312 | a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 313 | break 314 | 315 | else: 316 | while True: 317 | pts2 = np.around( 318 | np.float32([[x_min_per + (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold], 319 | [x_max_per + (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold], 320 | [x_min_per - (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold], 321 | [x_max_per - (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold]])) 322 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 323 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 324 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 325 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 326 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 327 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 328 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 329 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 330 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or ( 331 | a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 332 | break 333 | 334 | else: 335 | while True: 336 | pts2 = np.around(np.float32([[x_min_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold], 337 | [x_max_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold], 338 | [x_min_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold], 339 | [x_max_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold]])) 340 | e_1 = np.linalg.norm(pts2[0]-pts2[1]) 341 | e_2 = np.linalg.norm(pts2[0]-pts2[2]) 342 | e_3 = np.linalg.norm(pts2[1]-pts2[3]) 343 | e_4 = np.linalg.norm(pts2[2]-pts2[3]) 344 | if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \ 345 | e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \ 346 | abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w: 347 | a0_, a1_, a2_, a3_ = self.get_angle_4(pts2) 348 | if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_): 349 | break 350 | 351 | 352 | M = cv2.getPerspectiveTransform(pts1, pts2) 353 | one = np.ones((self.new_shape[0], self.new_shape[1], 1), dtype=np.int16) 354 | matr = np.dstack((pixel_position, one)) 355 | new = np.dot(M, matr.reshape(-1, 3).T).T.reshape(self.new_shape[0], self.new_shape[1], 3) 356 | x = new[:, :, 0]/new[:, :, 2] 357 | y = new[:, :, 1]/new[:, :, 2] 358 | perturbed_xy_ = np.dstack((x, y)) 359 | # perturbed_xy_round_int = np.around(cv2.bilateralFilter(perturbed_xy_round_int, 9, 75, 75)) 360 | # perturbed_xy_round_int = np.around(cv2.blur(perturbed_xy_, (17, 17))) 361 | # perturbed_xy_round_int = cv2.blur(perturbed_xy_round_int, (17, 17)) 362 | # perturbed_xy_round_int = cv2.GaussianBlur(perturbed_xy_round_int, (7, 7), 0) 363 | perturbed_xy_ = perturbed_xy_-np.min(perturbed_xy_.T.reshape(2, -1), 1) 364 | # perturbed_xy_round_int = np.around(perturbed_xy_round_int-np.min(perturbed_xy_round_int.T.reshape(2, -1), 1)).astype(np.int16) 365 | 366 | self.perturbed_xy_ += perturbed_xy_ 367 | 368 | '''perspective end''' 369 | 370 | '''to img''' 371 | flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape( 372 | self.new_shape[0] * self.new_shape[1], 2) 373 | # self.perturbed_xy_ = cv2.blur(self.perturbed_xy_, (7, 7)) 374 | self.perturbed_xy_ = cv2.GaussianBlur(self.perturbed_xy_, (7, 7), 0) 375 | 376 | '''get fiducial points''' 377 | fiducial_points_coordinate = self.perturbed_xy_[im_x, im_y] 378 | 379 | vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position) 380 | wts_sum = np.abs(wts).sum(-1) 381 | 382 | # flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts) 383 | wts = wts[wts_sum <= 1, :] 384 | vtx = vtx[wts_sum <= 1, :] 385 | synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1, 386 | :] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts) 387 | 388 | synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1, 389 | :] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts) 390 | 391 | foreORbackground_label = np.zeros(self.new_shape) 392 | foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts) 393 | foreORbackground_label[foreORbackground_label < 0.99] = 0 394 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 395 | 396 | self.synthesis_perturbed_img = synthesis_perturbed_img 397 | self.synthesis_perturbed_label = synthesis_perturbed_label 398 | self.foreORbackground_label = foreORbackground_label 399 | 400 | '''draw fiducial points 401 | stepSize = 0 402 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy() 403 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1,2): 404 | cv2.circle(fiducial_points_synthesis_perturbed_img, (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1) 405 | cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_large.jpg', fiducial_points_synthesis_perturbed_img) 406 | ''' 407 | 408 | 409 | '''clip''' 410 | 411 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1] 412 | for x in range(self.new_shape[0] // 2, perturbed_x_max): 413 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x: 414 | perturbed_x_max = x 415 | break 416 | for x in range(self.new_shape[0] // 2, perturbed_x_min, -1): 417 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0: 418 | perturbed_x_min = x 419 | break 420 | for y in range(self.new_shape[1] // 2, perturbed_y_max): 421 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y: 422 | perturbed_y_max = y 423 | break 424 | for y in range(self.new_shape[1] // 2, perturbed_y_min, -1): 425 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0: 426 | perturbed_y_min = y 427 | break 428 | 429 | if perturbed_x_min == 0 or perturbed_x_max == self.new_shape[0] or perturbed_y_min == self.new_shape[1] or perturbed_y_max == self.new_shape[1]: 430 | raise Exception('clip error') 431 | 432 | if perturbed_x_max - perturbed_x_min < im_lr//2 or perturbed_y_max - perturbed_y_min < im_ud//2: 433 | raise Exception('clip error') 434 | 435 | 436 | perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n) 437 | is_shrink = False 438 | if perturbed_x_max - perturbed_x_min > save_img_shape[0] or perturbed_y_max - perturbed_y_min > save_img_shape[1]: 439 | is_shrink = True 440 | synthesis_perturbed_img = cv2.resize(self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 441 | synthesis_perturbed_label = cv2.resize(self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 442 | foreORbackground_label = cv2.resize(self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR) 443 | foreORbackground_label[foreORbackground_label < 0.99] = 0 444 | foreORbackground_label[foreORbackground_label >= 0.99] = 1 445 | '''shrink fiducial points''' 446 | center_x_l, center_y_l = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2 447 | fiducial_points_coordinate_copy = fiducial_points_coordinate.copy() 448 | shrink_x = im_lr/(perturbed_x_max - perturbed_x_min) 449 | shrink_y = im_ud/(perturbed_y_max - perturbed_y_min) 450 | fiducial_points_coordinate *= [shrink_x, shrink_y] 451 | center_x_l *= shrink_x 452 | center_y_l *= shrink_y 453 | # fiducial_points_coordinate[1:, 1:] *= [shrink_x, shrink_y] 454 | # fiducial_points_coordinate[1:, :1, 0] *= shrink_x 455 | # fiducial_points_coordinate[:1, 1:, 1] *= shrink_y 456 | # perturbed_x_min_copy, perturbed_y_min_copy, perturbed_x_max_copy, perturbed_y_max_copy = perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max 457 | 458 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape) 459 | 460 | self.synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256) 461 | self.synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label) 462 | self.foreORbackground_label = np.zeros_like(self.foreORbackground_label) 463 | self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_img 464 | self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_label 465 | self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max] = foreORbackground_label 466 | 467 | center_x, center_y = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2 468 | if is_shrink: 469 | fiducial_points_coordinate += [center_x-center_x_l, center_y-center_y_l] 470 | 471 | '''draw fiducial points 472 | stepSize = 0 473 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy() 474 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2): 475 | cv2.circle(fiducial_points_synthesis_perturbed_img, 476 | (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1) 477 | cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_small.jpg',fiducial_points_synthesis_perturbed_img) 478 | ''' 479 | self.new_shape = save_img_shape 480 | self.synthesis_perturbed_img = self.synthesis_perturbed_img[ 481 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 482 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2, 483 | :].copy() 484 | self.synthesis_perturbed_label = self.synthesis_perturbed_label[ 485 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 486 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2, 487 | :].copy() 488 | self.foreORbackground_label = self.foreORbackground_label[ 489 | center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2, 490 | center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2].copy() 491 | 492 | 493 | perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0) 494 | perturbed_x_min = perturbed_x_ // 2 495 | perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1) 496 | 497 | perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0) 498 | perturbed_y_min = perturbed_y_ // 2 499 | perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1) 500 | 501 | '''clip 502 | perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1] 503 | for x in range(self.new_shape[0] // 2, perturbed_x_max): 504 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x: 505 | perturbed_x_max = x 506 | break 507 | for x in range(self.new_shape[0] // 2, perturbed_x_min, -1): 508 | if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0: 509 | perturbed_x_min = x 510 | break 511 | for y in range(self.new_shape[1] // 2, perturbed_y_max): 512 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y: 513 | perturbed_y_max = y 514 | break 515 | for y in range(self.new_shape[1] // 2, perturbed_y_min, -1): 516 | if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0: 517 | perturbed_y_min = y 518 | break 519 | 520 | 521 | center_x, center_y = perturbed_x_min+(perturbed_x_max - perturbed_x_min)//2, perturbed_y_min+(perturbed_y_max - perturbed_y_min)//2 522 | 523 | perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n) 524 | 525 | self.new_shape = save_img_shape 526 | 527 | perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0) 528 | perturbed_x_min = perturbed_x_ // 2 529 | perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1) 530 | 531 | perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0) 532 | perturbed_y_min = perturbed_y_ // 2 533 | perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1) 534 | 535 | self.synthesis_perturbed_img = self.synthesis_perturbed_img[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy() 536 | self.synthesis_perturbed_label = self.synthesis_perturbed_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy() 537 | self.foreORbackground_label = self.foreORbackground_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2].copy() 538 | 539 | ''' 540 | 541 | 542 | 543 | '''save''' 544 | pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2) 545 | 546 | if relativeShift_position == 'relativeShift_v2': 547 | self.synthesis_perturbed_label -= pixel_position 548 | fiducial_points_coordinate -= [center_x - self.new_shape[0] // 2, center_y - self.new_shape[1] // 2] 549 | 550 | self.synthesis_perturbed_label[:, :, 0] *= self.foreORbackground_label 551 | self.synthesis_perturbed_label[:, :, 1] *= self.foreORbackground_label 552 | self.synthesis_perturbed_img[:, :, 0] *= self.foreORbackground_label 553 | self.synthesis_perturbed_img[:, :, 1] *= self.foreORbackground_label 554 | self.synthesis_perturbed_img[:, :, 2] *= self.foreORbackground_label 555 | 556 | 557 | ''' 558 | synthesis_perturbed_img_filter = self.synthesis_perturbed_img.copy() 559 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 560 | # if self.is_perform(0.9, 0.1) or repeat_time > 5: 561 | # # if self.is_perform(0.1, 0.9) and repeat_time > 9: 562 | # # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (7, 7), 0) 563 | # # else: 564 | # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0) 565 | # else: 566 | # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 567 | self.synthesis_perturbed_img[self.foreORbackground_label == 1] = synthesis_perturbed_img_filter[self.foreORbackground_label == 1] 568 | ''' 569 | '''HSV_v2''' 570 | perturbed_bg_img = perturbed_bg_img.astype(np.float32) 571 | # if self.is_perform(1, 0): 572 | # if self.is_perform(1, 0): 573 | if self.is_perform(0.1, 0.9): 574 | if self.is_perform(0.2, 0.8): 575 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 576 | 577 | synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV) 578 | 579 | perturbed_bg_img[:, :, 0] *= 1-self.foreORbackground_label 580 | perturbed_bg_img[:, :, 1] *= 1-self.foreORbackground_label 581 | perturbed_bg_img[:, :, 2] *= 1-self.foreORbackground_label 582 | 583 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 584 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 585 | else: 586 | perturbed_bg_img_HSV = perturbed_bg_img 587 | perturbed_bg_img_HSV = self.HSV_v1(perturbed_bg_img_HSV) 588 | 589 | perturbed_bg_img_HSV[:, :, 0] *= 1-self.foreORbackground_label 590 | perturbed_bg_img_HSV[:, :, 1] *= 1-self.foreORbackground_label 591 | perturbed_bg_img_HSV[:, :, 2] *= 1-self.foreORbackground_label 592 | 593 | self.synthesis_perturbed_img += perturbed_bg_img_HSV 594 | # self.synthesis_perturbed_img[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771] 595 | 596 | else: 597 | synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy() 598 | perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label 599 | perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label 600 | perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label 601 | 602 | synthesis_perturbed_img_clip_HSV += perturbed_bg_img 603 | 604 | synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV) 605 | 606 | self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV 607 | 608 | '''''' 609 | # cv2.imwrite(self.save_path+'clip/'+perfix_+'_'+fold_curve+str(perturbed_time)+'-'+str(repeat_time)+'.png', synthesis_perturbed_img_clip) 610 | 611 | self.synthesis_perturbed_img[self.synthesis_perturbed_img < 0] = 0 612 | self.synthesis_perturbed_img[self.synthesis_perturbed_img > 255] = 255 613 | self.synthesis_perturbed_img = np.around(self.synthesis_perturbed_img).astype(np.uint8) 614 | label = np.zeros_like(self.synthesis_perturbed_img, dtype=np.float32) 615 | label[:, :, :2] = self.synthesis_perturbed_label 616 | label[:, :, 2] = self.foreORbackground_label 617 | 618 | # grey = np.around(self.synthesis_perturbed_img[:, :, 0] * 0.2989 + self.synthesis_perturbed_img[:, :, 1] * 0.5870 + self.synthesis_perturbed_img[:, :, 0] * 0.1140).astype(np.int16) 619 | # synthesis_perturbed_grey = np.concatenate((grey.reshape(self.new_shape[0], self.new_shape[1], 1), label), axis=2) 620 | synthesis_perturbed_color = np.concatenate((self.synthesis_perturbed_img, label), axis=2) 621 | 622 | self.synthesis_perturbed_color = np.zeros_like(synthesis_perturbed_color, dtype=np.float32) 623 | # self.synthesis_perturbed_grey = np.zeros_like(synthesis_perturbed_grey, dtype=np.float32) 624 | reduce_value_x = int(round(min((random.random() / 2) * (self.new_shape[0] - (perturbed_x_max - perturbed_x_min)), min(reduce_value, reduce_value_v2)))) 625 | reduce_value_y = int(round(min((random.random() / 2) * (self.new_shape[1] - (perturbed_y_max - perturbed_y_min)), min(reduce_value, reduce_value_v2)))) 626 | perturbed_x_min = max(perturbed_x_min - reduce_value_x, 0) 627 | perturbed_x_max = min(perturbed_x_max + reduce_value_x, self.new_shape[0]) 628 | perturbed_y_min = max(perturbed_y_min - reduce_value_y, 0) 629 | perturbed_y_max = min(perturbed_y_max + reduce_value_y, self.new_shape[1]) 630 | 631 | if im_lr >= im_ud: 632 | self.synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] 633 | # self.synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] 634 | else: 635 | self.synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] 636 | # self.synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] 637 | '''blur''' 638 | if self.is_perform(1, 0): 639 | synthesis_perturbed_img_filter = self.synthesis_perturbed_color[:, :, :3].copy() 640 | if self.is_perform(0.2, 0.8): 641 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0) 642 | else: 643 | synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0) 644 | if self.is_perform(0.5, 0.5): 645 | self.synthesis_perturbed_color[:, :, :3][self.synthesis_perturbed_color[:, :, 5] == 1] = synthesis_perturbed_img_filter[self.synthesis_perturbed_color[:, :, 5] == 1] 646 | else: 647 | self.synthesis_perturbed_color[:, :, :3] = synthesis_perturbed_img_filter 648 | 649 | fiducial_points_coordinate = fiducial_points_coordinate[:, :, ::-1] 650 | '''draw fiducial points''' 651 | stepSize = 0 652 | fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_color[:, :, :3].copy() 653 | for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2): 654 | cv2.circle(fiducial_points_synthesis_perturbed_img, (l[0] + math.ceil(stepSize / 2), l[1] + math.ceil(stepSize / 2)), 2, (0, 0, 255), -1) 655 | cv2.imwrite(self.save_path + 'fiducial_points/' + perfix_ + '_' + fold_curve + '.png', fiducial_points_synthesis_perturbed_img) 656 | 657 | cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3]) 658 | 659 | '''forward-begin''' 660 | self.forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32) 661 | forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32) 662 | forward_position = (self.synthesis_perturbed_color[:, :, 3:5] + pixel_position)[self.synthesis_perturbed_color[:, :, 5] != 0, :] 663 | flat_position = np.argwhere(np.zeros(save_img_shape, dtype=np.uint32) == 0) 664 | vtx, wts = self.interp_weights(forward_position, flat_position) 665 | wts_sum = np.abs(wts).sum(-1) 666 | wts = wts[wts_sum <= 1, :] 667 | vtx = vtx[wts_sum <= 1, :] 668 | flat_position_forward = flat_position.reshape(save_img_shape[0], save_img_shape[1], 2)[self.synthesis_perturbed_color[:, :, 5] != 0, :] 669 | forward_mapping.reshape(save_img_shape[0] * save_img_shape[1], 2)[wts_sum <= 1, :] = self.interpolate(flat_position_forward, vtx, wts) 670 | forward_mapping = forward_mapping.reshape(save_img_shape[0], save_img_shape[1], 2) 671 | 672 | mapping_x_min_, mapping_y_min_, mapping_x_max_, mapping_y_max_ = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape) 673 | shreshold_zoom_out = 2 674 | mapping_x_min = mapping_x_min_ + shreshold_zoom_out 675 | mapping_y_min = mapping_y_min_ + shreshold_zoom_out 676 | mapping_x_max = mapping_x_max_ - shreshold_zoom_out 677 | mapping_y_max = mapping_y_max_ - shreshold_zoom_out 678 | self.forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] = forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] 679 | self.scan_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32) 680 | self.scan_img[mapping_x_min_:mapping_x_max_, mapping_y_min_:mapping_y_max_] = self.origin_img 681 | self.origin_img = self.scan_img 682 | # flat_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32) 683 | # cv2.remap(self.synthesis_perturbed_color[:, :, :3], self.forward_mapping[:, :, 1], self.forward_mapping[:, :, 0], cv2.INTER_LINEAR, flat_img) 684 | # cv2.imwrite(self.save_path + 'outputs/1.jpg', flat_img) 685 | '''forward-end''' 686 | 687 | '''image and label 688 | synthesis_perturbed_data = { 689 | 'fiducial_points': fiducial_points_coordinate, 690 | 'segment': np.array((segment_x, segment_y)) 691 | } 692 | cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3]) 693 | ''' 694 | '''or''' 695 | synthesis_perturbed_data = { 696 | 'image':self.synthesis_perturbed_color[:, :, :3], 697 | 'fiducial_points': fiducial_points_coordinate, 698 | 'segment': np.array((segment_x, segment_y)) 699 | } 700 | 701 | with open(self.save_path+'color/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f: 702 | pickle_perturbed_data = pickle.dumps(synthesis_perturbed_data) 703 | f.write(pickle_perturbed_data) 704 | # with open(self.save_path+'grey/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f: 705 | # pickle_perturbed_data = pickle.dumps(self.synthesis_perturbed_grey) 706 | # f.write(pickle_perturbed_data) 707 | # cv2.imwrite(self.save_path+'grey_im/'+perfix_+'_'+fold_curve+'.png', self.synthesis_perturbed_color[:, :, :1]) 708 | 709 | 710 | # cv2.imwrite(self.save_path + 'scan/' + self.save_suffix + '_' + str(m) + '.png', self.origin_img) 711 | trian_t = time.time() - begin_train 712 | mm, ss = divmod(trian_t, 60) 713 | hh, mm = divmod(mm, 60) 714 | print(str(m)+'_'+str(n)+'_'+fold_curve+' '+str(repeat_time)+" Time : %02d:%02d:%02d\n" % (hh, mm, ss)) 715 | 716 | 717 | def multiThread(m, n, img_path_, bg_path_, save_path, save_suffix): 718 | saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix) 719 | saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix) 720 | 721 | repeat_time = min(max(round(np.random.normal(10, 3)), 5), 16) 722 | fold = threading.Thread(target=saveFold.save_img, args=(m, n, 'fold', repeat_time, 'relativeShift_v2'), name='fold') 723 | curve = threading.Thread(target=saveCurve.save_img, args=(m, n, 'curve', repeat_time, 'relativeShift_v2'), name='curve') 724 | 725 | fold.start() 726 | curve.start() 727 | curve.join() 728 | fold.join() 729 | def xgw(args): 730 | path = args.path 731 | bg_path = args.bg_path 732 | if args.output_path is None: 733 | save_path = '/lustre/home/gwxie/data/unwarp_new/train/fiducial1024/fiducial1024_v2_a3/' 734 | else: 735 | save_path = args.output_path 736 | 737 | 738 | # if not os.path.exists(save_path + 'grey/'): 739 | # os.makedirs(save_path + 'grey/') 740 | if not os.path.exists(save_path + 'color/'): 741 | os.makedirs(save_path + 'color/') 742 | 743 | if not os.path.exists(save_path + 'fiducial_points/'): 744 | os.makedirs(save_path + 'fiducial_points/') 745 | 746 | if not os.path.exists(save_path + 'png/'): 747 | os.makedirs(save_path + 'png/') 748 | 749 | # if not os.path.exists(save_path + 'scan/'): 750 | # os.makedirs(save_path + 'scan/') 751 | 752 | if not os.path.exists(save_path + 'outputs/'): 753 | os.makedirs(save_path + 'outputs/') 754 | 755 | save_suffix = str.split(args.path, '/')[-2] 756 | 757 | all_img_path = getDatasets(path) 758 | all_bgImg_path = getDatasets(bg_path) 759 | global begin_train 760 | begin_train = time.time() 761 | # img_path_ = '/lustre/home/gwxie/data/unwarp_new/train/origin_datasets/DL01/b04011301.bmp' 762 | # img_path_ = '/lustre/home/gwxie/data/unwarp_new/train/origin_datasets/validate/FGHJOP.jpg' 763 | # img_path_ = '/lustre/home/gwxie/data/unwarp_new/train/origin_datasets/Maurdor/F9/GUZFUD.jpg' 764 | # bg_path_ = bg_path + random.choice(all_bgImg_path) + '/' 765 | # save_perturbed = perturbed(img_path_, bg_path_, save_path, save_suffix) 766 | # save_perturbed.save_img(0, 400, 'curve',1, relativeShift_position='relativeShift_v2') 767 | fiducial_points = 61 # 31 768 | process_pool = Pool(2) 769 | for m, img_path in enumerate(all_img_path): 770 | for n in range(args.sys_num, args.sys_num_e): 771 | img_path_ = path+img_path 772 | bg_path_ = bg_path+random.choice(all_bgImg_path)+'/' 773 | 774 | for m_n in range(10): 775 | try: 776 | saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix) 777 | saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix) 778 | 779 | repeat_time = min(max(round(np.random.normal(14, 4)), 1), 24) 780 | # repeat_time = min(max(round(np.random.normal(8, 4)), 1), 12) # random.randint(1, 2) # min(max(round(np.random.normal(8, 4)), 1), 12) 781 | process_pool.apply_async(func=saveFold.save_img, args=(m, n, 'fold', repeat_time, fiducial_points, 'relativeShift_v2')) 782 | 783 | repeat_time = min(max(round(np.random.normal(4, 2)), 1), 10) 784 | # repeat_time = min(max(round(np.random.normal(6, 4)), 1), 10) 785 | process_pool.apply_async(func=saveCurve.save_img, args=(m, n, 'curve', repeat_time, fiducial_points, 'relativeShift_v2')) 786 | 787 | except BaseException as err: 788 | print(err) 789 | continue 790 | break 791 | # print('end') 792 | 793 | process_pool.close() 794 | process_pool.join() 795 | 796 | if __name__ == '__main__': 797 | 798 | parser = argparse.ArgumentParser(description='Hyperparams') 799 | parser.add_argument('--path', 800 | default='./scan/new/', type=str, 801 | help='the path of origin img.') 802 | parser.add_argument('--bg_path', 803 | default='./background/', type=str, 804 | help='the path of bg img.') 805 | 806 | parser.add_argument('--output_path', 807 | default='./output/', type=str, 808 | help='the path of origin img.') 809 | # parser.set_defaults(output_path='test') 810 | parser.add_argument('--count_from', '-p', default=0, type=int, 811 | metavar='N', help='print frequency (default: 10)') # print frequency 812 | 813 | parser.add_argument('--repeat_T', default=0, type=int) 814 | 815 | parser.add_argument('--sys_num', default=17, type=int) 816 | parser.add_argument('--sys_num_e', default=22, type=int) 817 | 818 | args = parser.parse_args() 819 | xgw(args) 820 | -------------------------------------------------------------------------------- /scan/new/1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/scan/new/1.jpg -------------------------------------------------------------------------------- /scan/new/390bf8b615242318906029b8a6c71ad4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/scan/new/390bf8b615242318906029b8a6c71ad4.png -------------------------------------------------------------------------------- /scan/new/3d-paper-snowflake-template-24-paper-cutting-templates-pdf-doc-psd-vector-eps-fur-3d-paper-snowflake-template.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/scan/new/3d-paper-snowflake-template-24-paper-cutting-templates-pdf-doc-psd-vector-eps-fur-3d-paper-snowflake-template.jpeg -------------------------------------------------------------------------------- /scan/new/660911ENT039.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/scan/new/660911ENT039.jpg -------------------------------------------------------------------------------- /scan/new/Battle-Creek-Grand-Opening-Flyer-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gwxie/Synthesize-Distorted-Image-and-Its-Control-Points/04bf68ef0368b8360256ce730f82437146e575bd/scan/new/Battle-Creek-Grand-Opening-Flyer-1.jpg -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | import random 4 | import scipy.spatial.qhull as qhull 5 | import math 6 | import cv2 7 | 8 | class BasePerturbed(object): 9 | # d = np.abs(sk_normalize(d, norm='l2')) 10 | 11 | def get_normalize(self, d): 12 | E = np.mean(d) 13 | std = np.std(d) 14 | d = (d-E)/std 15 | # d = preprocessing.normalize(d, norm='l2') 16 | return d 17 | 18 | def get_0_1_d(self, d, new_max=1, new_min=0): 19 | d_min = np.min(d) 20 | d_max = np.max(d) 21 | d = ((d-d_min)/(d_max-d_min))*(new_max-new_min)+new_min 22 | return d 23 | 24 | def draw_distance_hotmap(self, distance_vertex_line): 25 | 26 | plt.matshow(distance_vertex_line, cmap='autumn') 27 | plt.colorbar() 28 | plt.show() 29 | 30 | def get_pixel(self, p, origin_img): 31 | try: 32 | return origin_img[p[0], p[1]] 33 | except: 34 | # print('out !') 35 | return np.array([257, 257, 257]) 36 | 37 | def nearest_neighbor_interpolation(self, xy, new_origin_img): 38 | # xy = np.around(xy_).astype(np.int) 39 | origin_pixel = self.get_pixel([xy[0], xy[1]], new_origin_img) 40 | if (origin_pixel == 256).all(): 41 | return origin_pixel, False 42 | return origin_pixel, True 43 | 44 | def bilinear_interpolation(self, xy_, new_origin_img): 45 | xy_int = [int(xy_[0]), int(xy_[1])] 46 | xy_decimal = [round(xy_[0] - xy_int[0], 5), round(xy_[1] - xy_int[1], 5)] 47 | x0_y0 = (1 - xy_decimal[0]) * (1 - xy_decimal[1]) * self.get_pixel([xy_int[0], xy_int[1]], new_origin_img) 48 | 49 | x0_y1 = (1 - xy_decimal[0]) * (xy_decimal[1]) * self.get_pixel([xy_int[0], xy_int[1] + 1], new_origin_img) 50 | 51 | x1_y0 = (xy_decimal[0]) * (1 - xy_decimal[1]) * self.get_pixel([xy_int[0] + 1, xy_int[1]], new_origin_img) 52 | 53 | x1_y1 = (xy_decimal[0]) * (xy_decimal[1]) * self.get_pixel([xy_int[0] + 1, xy_int[1] + 1], new_origin_img) 54 | 55 | return x0_y0, x0_y1, x1_y0, x1_y1 56 | 57 | def get_coor(self, p, origin_label): 58 | try: 59 | return origin_label[p[0], p[1]] 60 | except: 61 | # print('out !') 62 | return np.array([0, 0]) 63 | 64 | def bilinear_interpolation_coordinate_v4(self, xy_, new_origin_img): 65 | 66 | xy_int = [int(xy_[0]), int(xy_[1])] 67 | xy_decimal = [round(xy_[0] - xy_int[0], 5), round(xy_[1] - xy_int[1], 5)] 68 | x_y_i = 0 69 | x0, x1, x2, x3 = 0, 0, 0, 0 70 | y0, y1, y2, y3 = 0, 0, 0, 0 71 | x0_y0 = self.get_coor(np.array([xy_int[0], xy_int[1]]), new_origin_img) 72 | x0_y1 = self.get_coor(np.array([xy_int[0], xy_int[1]+1]), new_origin_img) 73 | x1_y0 = self.get_coor(np.array([xy_int[0]+1, xy_int[1]]), new_origin_img) 74 | x1_y1 = self.get_coor(np.array([xy_int[0]+1, xy_int[1]+1]), new_origin_img) 75 | 76 | if x0_y0[0] != 0: 77 | x0 = (1 - xy_decimal[0]) 78 | if x0_y1[0] != 0: 79 | x1 = (1 - xy_decimal[0]) 80 | if x1_y0[0] != 0: 81 | x2 = (xy_decimal[0]) 82 | if x1_y1[0] != 0: 83 | x3 = (xy_decimal[0]) 84 | 85 | if x0_y0[1] != 0: 86 | y0 = (1 - xy_decimal[1]) 87 | if x0_y1[1] != 0: 88 | y1 = (xy_decimal[1]) 89 | if x1_y0[1] != 0: 90 | y2 = (1 - xy_decimal[1]) 91 | if x1_y1[1] != 0: 92 | y3 = (xy_decimal[1]) 93 | 94 | x_ = x0+x1+x2+x3 95 | if x_ == 0: 96 | x = 0 97 | else: 98 | x = x0/x_*x0_y0[0]+x1/x_*x0_y1[0]+x2/x_*x1_y0[0]+x3/x_*x1_y1[0] 99 | 100 | y_ = y0+y1+y2+y3 101 | if y_ == 0: 102 | y = 0 103 | else: 104 | y = y0/y_*x0_y0[1]+y1/y_*x0_y1[1]+y2/y_*x1_y0[1]+y3/y_*x1_y1[1] 105 | 106 | return np.array([x, y]) 107 | 108 | 109 | def is_perform(self, execution, inexecution): 110 | return random.choices([True, False], weights=[execution, inexecution])[0] 111 | 112 | def get_margin_scale(self, min_, max_, clip_add_margin, new_shape): 113 | if clip_add_margin < 0: 114 | # raise Exception('add margin error') 115 | return -1, -1 116 | if min_-clip_add_margin//2 > 0 and max_+clip_add_margin//2 < new_shape: 117 | if clip_add_margin%2 == 0: 118 | clip_subtract_margin, clip_plus_margin = clip_add_margin//2, clip_add_margin//2 119 | else: 120 | clip_subtract_margin, clip_plus_margin = clip_add_margin//2, clip_add_margin//2+1 121 | elif min_-clip_add_margin//2 < 0 and max_+clip_add_margin//2 <= new_shape: 122 | clip_subtract_margin = min_ 123 | clip_plus_margin = clip_add_margin-clip_subtract_margin 124 | elif max_+clip_add_margin//2 > new_shape and min_-clip_add_margin//2 >= 0: 125 | clip_plus_margin = new_shape-max_ 126 | clip_subtract_margin = clip_add_margin-clip_plus_margin 127 | else: 128 | # raise Exception('add margin error') 129 | return -1, -1 130 | return clip_subtract_margin, clip_plus_margin 131 | 132 | # class perturbedCurveImg(object): 133 | # def __init__(self): 134 | 135 | def adjust_position(self, x_min, y_min, x_max, y_max): 136 | if (self.new_shape[0] - (x_max - x_min)) % 2 == 0: 137 | f_g_0_0 = (self.new_shape[0] - (x_max - x_min)) // 2 138 | f_g_0_1 = f_g_0_0 139 | else: 140 | f_g_0_0 = (self.new_shape[0] - (x_max - x_min)) // 2 141 | f_g_0_1 = f_g_0_0 + 1 142 | 143 | if (self.new_shape[1] - (y_max - y_min)) % 2 == 0: 144 | f_g_1_0 = (self.new_shape[1] - (y_max - y_min)) // 2 145 | f_g_1_1 = f_g_1_0 146 | else: 147 | f_g_1_0 = (self.new_shape[1] - (y_max - y_min)) // 2 148 | f_g_1_1 = f_g_1_0 + 1 149 | 150 | # return f_g_0_0, f_g_0_1, f_g_1_0, f_g_1_1 151 | return f_g_0_0, f_g_1_0, self.new_shape[0] - f_g_0_1, self.new_shape[1] - f_g_1_1 152 | 153 | def adjust_position_v2(self, x_min, y_min, x_max, y_max, new_shape): 154 | if (new_shape[0] - (x_max - x_min)) % 2 == 0: 155 | f_g_0_0 = (new_shape[0] - (x_max - x_min)) // 2 156 | f_g_0_1 = f_g_0_0 157 | else: 158 | f_g_0_0 = (new_shape[0] - (x_max - x_min)) // 2 159 | f_g_0_1 = f_g_0_0 + 1 160 | 161 | if (new_shape[1] - (y_max - y_min)) % 2 == 0: 162 | f_g_1_0 = (new_shape[1] - (y_max - y_min)) // 2 163 | f_g_1_1 = f_g_1_0 164 | else: 165 | f_g_1_0 = (new_shape[1] - (y_max - y_min)) // 2 166 | f_g_1_1 = f_g_1_0 + 1 167 | 168 | # return f_g_0_0, f_g_0_1, f_g_1_0, f_g_1_1 169 | return f_g_0_0, f_g_1_0, new_shape[0] - f_g_0_1, new_shape[1] - f_g_1_1 170 | 171 | def adjust_border(self, x_min, y_min, x_max, y_max, x_min_new, y_min_new, x_max_new, y_max_new): 172 | if ((x_max - x_min) - (x_max_new - x_min_new)) % 2 == 0: 173 | f_g_0_0 = ((x_max - x_min) - (x_max_new - x_min_new)) // 2 174 | f_g_0_1 = f_g_0_0 175 | else: 176 | f_g_0_0 = ((x_max - x_min) - (x_max_new - x_min_new)) // 2 177 | f_g_0_1 = f_g_0_0 + 1 178 | 179 | if ((y_max - y_min) - (y_max_new - y_min_new)) % 2 == 0: 180 | f_g_1_0 = ((y_max - y_min) - (y_max_new - y_min_new)) // 2 181 | f_g_1_1 = f_g_1_0 182 | else: 183 | f_g_1_0 = ((y_max - y_min) - (y_max_new - y_min_new)) // 2 184 | f_g_1_1 = f_g_1_0 + 1 185 | 186 | return f_g_0_0, f_g_0_1, f_g_1_0, f_g_1_1 187 | 188 | def interp_weights(self, xyz, uvw): 189 | tri = qhull.Delaunay(xyz) 190 | simplex = tri.find_simplex(uvw) 191 | vertices = np.take(tri.simplices, simplex, axis=0) 192 | # pixel_triangle = pixel[tri.simplices] 193 | temp = np.take(tri.transform, simplex, axis=0) 194 | delta = uvw - temp[:, 2] 195 | bary = np.einsum('njk,nk->nj', temp[:, :2, :], delta) 196 | return vertices, np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) 197 | 198 | def interpolate(self, values, vtx, wts): 199 | return np.einsum('njk,nj->nk', np.take(values, vtx, axis=0), wts) 200 | 201 | def pad(self, synthesis_perturbed_img_map, x_min, y_min, x_max, y_max): 202 | synthesis_perturbed_img_map[x_min - 1, y_min:y_max] = synthesis_perturbed_img_map[x_min, y_min:y_max] 203 | synthesis_perturbed_img_map[x_max + 1, y_min:y_max] = synthesis_perturbed_img_map[x_max, y_min:y_max] 204 | synthesis_perturbed_img_map[x_min:x_max, y_min - 1] = synthesis_perturbed_img_map[x_min:x_max, y_min - 1] 205 | synthesis_perturbed_img_map[x_min:x_max, y_max + 1] = synthesis_perturbed_img_map[x_min:x_max, y_max + 1] 206 | synthesis_perturbed_img_map[x_min - 1, y_min - 1] = synthesis_perturbed_img_map[x_min, y_min] 207 | synthesis_perturbed_img_map[x_min - 1, y_max + 1] = synthesis_perturbed_img_map[x_min, y_max] 208 | synthesis_perturbed_img_map[x_max + 1, y_min - 1] = synthesis_perturbed_img_map[x_max, y_min] 209 | synthesis_perturbed_img_map[x_max + 1, y_max + 1] = synthesis_perturbed_img_map[x_max, y_max] 210 | 211 | return synthesis_perturbed_img_map 212 | 213 | def isSavePerturbed(self, synthesis_perturbed_img, new_shape): 214 | if np.sum(synthesis_perturbed_img[:, 0]) != 771 * new_shape[0] or np.sum(synthesis_perturbed_img[:, new_shape[1] - 1]) != 771 * new_shape[0] or \ 215 | np.sum(synthesis_perturbed_img[0, :]) != 771 * new_shape[1] or np.sum(synthesis_perturbed_img[new_shape[0] - 1, :]) != 771 * new_shape[1]: 216 | # raise Exception('clip error') 217 | return False 218 | else: 219 | return True 220 | 221 | def get_angle(self, A, o, B): 222 | v1 = o-A 223 | v2 = o-B 224 | return np.arccos((v1 @ v2) / (np.linalg.norm(v1) * np.linalg.norm(v2)))*180/np.pi 225 | 226 | def get_angle_4(self, pts): 227 | a0_ = self.get_angle(pts[2], pts[0], pts[1]) 228 | a1_ = self.get_angle(pts[0], pts[1], pts[3]) 229 | a2_ = self.get_angle(pts[3], pts[2], pts[0]) 230 | a3_ = self.get_angle(pts[1], pts[3], pts[2]) 231 | return a0_, a1_, a2_, a3_ 232 | 233 | 234 | def HSV_v1(self, synthesis_perturbed_img_clip_HSV): 235 | synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_RGB2HSV) 236 | img_h = synthesis_perturbed_img_clip_HSV[:, :, 0].copy() 237 | # img_s = synthesis_perturbed_img_clip_HSV[:, :, 1].copy() 238 | img_v = synthesis_perturbed_img_clip_HSV[:, :, 2].copy() 239 | 240 | if self.is_perform(0.2, 0.8): 241 | img_h = (img_h + (random.random()-0.5) * 360) % 360 # img_h = np.minimum(np.maximum(img_h+20, 0), 360) 242 | else: 243 | img_h = (img_h + (random.random()-0.5) * 40) % 360 244 | # img_s = np.minimum(np.maximum(img_s-0.2, 0), 1) 245 | img_v = np.minimum(np.maximum(img_v + (random.random()-0.5)*60, 0), 255) 246 | # img_v = cv2.equalizeHist(img_v.astype(np.uint8)) 247 | synthesis_perturbed_img_clip_HSV[:, :, 0] = img_h 248 | # synthesis_perturbed_img_clip_HSV[:, :, 1] = img_s 249 | synthesis_perturbed_img_clip_HSV[:, :, 2] = img_v 250 | 251 | synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_HSV2RGB) 252 | 253 | return synthesis_perturbed_img_clip_HSV --------------------------------------------------------------------------------