├── Add_colortoimg ├── add_color.m ├── add_color.py └── skimage-color │ ├── __init__.py │ ├── __init__.pyc │ ├── adapt_rgb.py │ ├── adapt_rgb.pyc │ ├── colorconv.py │ ├── colorconv.pyc │ ├── colorlabel.py │ ├── colorlabel.pyc │ ├── delta_e.py │ ├── delta_e.pyc │ ├── rgb_colors.py │ ├── rgb_colors.pyc │ └── tests │ ├── __init__.py │ ├── __init__.pyc │ ├── ciede2000_test_data.txt │ ├── data │ ├── lab_array_a_2.npy │ ├── lab_array_d50_10.npy │ ├── lab_array_d50_2.npy │ ├── lab_array_d55_10.npy │ ├── lab_array_d55_2.npy │ ├── lab_array_d65_10.npy │ ├── lab_array_d65_2.npy │ ├── lab_array_d75_10.npy │ ├── lab_array_d75_2.npy │ ├── lab_array_e_2.npy │ ├── luv_array_a_2.npy │ ├── luv_array_d50_10.npy │ ├── luv_array_d50_2.npy │ ├── luv_array_d55_10.npy │ ├── luv_array_d55_2.npy │ ├── luv_array_d65_10.npy │ ├── luv_array_d65_2.npy │ ├── luv_array_d75_10.npy │ ├── luv_array_d75_2.npy │ └── luv_array_e_2.npy │ ├── test_adapt_rgb.py │ ├── test_adapt_rgb.pyc │ ├── test_colorconv.py │ ├── test_colorconv.pyc │ ├── test_colorlabel.py │ ├── test_colorlabel.pyc │ ├── test_delta_e.py │ └── test_delta_e.pyc ├── Image_resize ├── scale_image.py ├── single_iamge.py └── whole_image.py ├── README.md ├── data_augement ├── dataAugment.py ├── jitering.py └── keras_dataAug.py └── post_process ├── 24png_8png.m └── image_channel.py /Add_colortoimg/add_color.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/add_color.m -------------------------------------------------------------------------------- /Add_colortoimg/add_color.py: -------------------------------------------------------------------------------- 1 | #!usr/bin/python 2 | # -*- coding:utf-8 -*- 3 | import PIL.Image 4 | import numpy as np 5 | from skimage import io,data,color 6 | import matplotlib.pyplot as plt 7 | 8 | img = PIL.Image.open('xxx.png') 9 | img = np.array(img) 10 | dst = color.label2rgb(img, bg_label=0, bg_color=(0, 0, 0)) 11 | io.imsave('xxx.png', dst) -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/__init__.py: -------------------------------------------------------------------------------- 1 | from .colorconv import (convert_colorspace, 2 | guess_spatial_dimensions, 3 | rgb2hsv, 4 | hsv2rgb, 5 | rgb2xyz, 6 | xyz2rgb, 7 | rgb2rgbcie, 8 | rgbcie2rgb, 9 | rgb2grey, 10 | rgb2gray, 11 | gray2rgb, 12 | xyz2lab, 13 | lab2xyz, 14 | lab2rgb, 15 | rgb2lab, 16 | xyz2luv, 17 | luv2xyz, 18 | luv2rgb, 19 | rgb2luv, 20 | rgb2hed, 21 | hed2rgb, 22 | lab2lch, 23 | lch2lab, 24 | separate_stains, 25 | combine_stains, 26 | rgb_from_hed, 27 | hed_from_rgb, 28 | rgb_from_hdx, 29 | hdx_from_rgb, 30 | rgb_from_fgx, 31 | fgx_from_rgb, 32 | rgb_from_bex, 33 | bex_from_rgb, 34 | rgb_from_rbd, 35 | rbd_from_rgb, 36 | rgb_from_gdx, 37 | gdx_from_rgb, 38 | rgb_from_hax, 39 | hax_from_rgb, 40 | rgb_from_bro, 41 | bro_from_rgb, 42 | rgb_from_bpx, 43 | bpx_from_rgb, 44 | rgb_from_ahx, 45 | ahx_from_rgb, 46 | rgb_from_hpx, 47 | hpx_from_rgb) 48 | 49 | from .colorlabel import color_dict, label2rgb 50 | 51 | from .delta_e import (deltaE_cie76, 52 | deltaE_ciede94, 53 | deltaE_ciede2000, 54 | deltaE_cmc, 55 | ) 56 | 57 | 58 | __all__ = ['convert_colorspace', 59 | 'guess_spatial_dimensions', 60 | 'rgb2hsv', 61 | 'hsv2rgb', 62 | 'rgb2xyz', 63 | 'xyz2rgb', 64 | 'rgb2rgbcie', 65 | 'rgbcie2rgb', 66 | 'rgb2grey', 67 | 'rgb2gray', 68 | 'gray2rgb', 69 | 'xyz2lab', 70 | 'lab2xyz', 71 | 'lab2rgb', 72 | 'rgb2lab', 73 | 'rgb2hed', 74 | 'hed2rgb', 75 | 'lab2lch', 76 | 'lch2lab', 77 | 'separate_stains', 78 | 'combine_stains', 79 | 'rgb_from_hed', 80 | 'hed_from_rgb', 81 | 'rgb_from_hdx', 82 | 'hdx_from_rgb', 83 | 'rgb_from_fgx', 84 | 'fgx_from_rgb', 85 | 'rgb_from_bex', 86 | 'bex_from_rgb', 87 | 'rgb_from_rbd', 88 | 'rbd_from_rgb', 89 | 'rgb_from_gdx', 90 | 'gdx_from_rgb', 91 | 'rgb_from_hax', 92 | 'hax_from_rgb', 93 | 'rgb_from_bro', 94 | 'bro_from_rgb', 95 | 'rgb_from_bpx', 96 | 'bpx_from_rgb', 97 | 'rgb_from_ahx', 98 | 'ahx_from_rgb', 99 | 'rgb_from_hpx', 100 | 'hpx_from_rgb', 101 | 'color_dict', 102 | 'label2rgb', 103 | 'deltaE_cie76', 104 | 'deltaE_ciede94', 105 | 'deltaE_ciede2000', 106 | 'deltaE_cmc', 107 | ] 108 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/__init__.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/adapt_rgb.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | import numpy as np 4 | 5 | from .. import color 6 | from ..util.dtype import convert 7 | 8 | 9 | __all__ = ['adapt_rgb', 'hsv_value', 'each_channel'] 10 | 11 | 12 | def is_rgb_like(image): 13 | """Return True if the image *looks* like it's RGB. 14 | 15 | This function should not be public because it is only intended to be used 16 | for functions that don't accept volumes as input, since checking an image's 17 | shape is fragile. 18 | """ 19 | return (image.ndim == 3) and (image.shape[2] in (3, 4)) 20 | 21 | 22 | def adapt_rgb(apply_to_rgb): 23 | """Return decorator that adapts to RGB images to a gray-scale filter. 24 | 25 | This function is only intended to be used for functions that don't accept 26 | volumes as input, since checking an image's shape is fragile. 27 | 28 | Parameters 29 | ---------- 30 | apply_to_rgb : function 31 | Function that returns a filtered image from an image-filter and RGB 32 | image. This will only be called if the image is RGB-like. 33 | """ 34 | def decorator(image_filter): 35 | @functools.wraps(image_filter) 36 | def image_filter_adapted(image, *args, **kwargs): 37 | if is_rgb_like(image): 38 | return apply_to_rgb(image_filter, image, *args, **kwargs) 39 | else: 40 | return image_filter(image, *args, **kwargs) 41 | return image_filter_adapted 42 | return decorator 43 | 44 | 45 | def hsv_value(image_filter, image, *args, **kwargs): 46 | """Return color image by applying `image_filter` on HSV-value of `image`. 47 | 48 | Note that this function is intended for use with `adapt_rgb`. 49 | 50 | Parameters 51 | ---------- 52 | image_filter : function 53 | Function that filters a gray-scale image. 54 | image : array 55 | Input image. Note that RGBA images are treated as RGB. 56 | """ 57 | # Slice the first three channels so that we remove any alpha channels. 58 | hsv = color.rgb2hsv(image[:, :, :3]) 59 | value = hsv[:, :, 2].copy() 60 | value = image_filter(value, *args, **kwargs) 61 | hsv[:, :, 2] = convert(value, hsv.dtype) 62 | return color.hsv2rgb(hsv) 63 | 64 | 65 | def each_channel(image_filter, image, *args, **kwargs): 66 | """Return color image by applying `image_filter` on channels of `image`. 67 | 68 | Note that this function is intended for use with `adapt_rgb`. 69 | 70 | Parameters 71 | ---------- 72 | image_filter : function 73 | Function that filters a gray-scale image. 74 | image : array 75 | Input image. 76 | """ 77 | c_new = [image_filter(c, *args, **kwargs) for c in image.T] 78 | return np.array(c_new).T 79 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/adapt_rgb.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/adapt_rgb.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/colorconv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """Functions for converting between color spaces. 5 | 6 | The "central" color space in this module is RGB, more specifically the linear 7 | sRGB color space using D65 as a white-point [1]_. This represents a 8 | standard monitor (w/o gamma correction). For a good FAQ on color spaces see 9 | [2]_. 10 | 11 | The API consists of functions to convert to and from RGB as defined above, as 12 | well as a generic function to convert to and from any supported color space 13 | (which is done through RGB in most cases). 14 | 15 | 16 | Supported color spaces 17 | ---------------------- 18 | * RGB : Red Green Blue. 19 | Here the sRGB standard [1]_. 20 | * HSV : Hue, Saturation, Value. 21 | Uniquely defined when related to sRGB [3]_. 22 | * RGB CIE : Red Green Blue. 23 | The original RGB CIE standard from 1931 [4]_. Primary colors are 700 nm 24 | (red), 546.1 nm (blue) and 435.8 nm (green). 25 | * XYZ CIE : XYZ 26 | Derived from the RGB CIE color space. Chosen such that 27 | ``x == y == z == 1/3`` at the whitepoint, and all color matching 28 | functions are greater than zero everywhere. 29 | * LAB CIE : Lightness, a, b 30 | Colorspace derived from XYZ CIE that is intended to be more 31 | perceptually uniform 32 | * LUV CIE : Lightness, u, v 33 | Colorspace derived from XYZ CIE that is intended to be more 34 | perceptually uniform 35 | * LCH CIE : Lightness, Chroma, Hue 36 | Defined in terms of LAB CIE. C and H are the polar representation of 37 | a and b. The polar angle C is defined to be on ``(0, 2*pi)`` 38 | 39 | :author: Nicolas Pinto (rgb2hsv) 40 | :author: Ralf Gommers (hsv2rgb) 41 | :author: Travis Oliphant (XYZ and RGB CIE functions) 42 | :author: Matt Terry (lab2lch) 43 | 44 | :license: modified BSD 45 | 46 | References 47 | ---------- 48 | .. [1] Official specification of sRGB, IEC 61966-2-1:1999. 49 | .. [2] http://www.poynton.com/ColorFAQ.html 50 | .. [3] http://en.wikipedia.org/wiki/HSL_and_HSV 51 | .. [4] http://en.wikipedia.org/wiki/CIE_1931_color_space 52 | """ 53 | 54 | from __future__ import division 55 | 56 | from warnings import warn 57 | import numpy as np 58 | from scipy import linalg 59 | from ..util import dtype, dtype_limits 60 | 61 | 62 | def guess_spatial_dimensions(image): 63 | """Make an educated guess about whether an image has a channels dimension. 64 | 65 | Parameters 66 | ---------- 67 | image : ndarray 68 | The input image. 69 | 70 | Returns 71 | ------- 72 | spatial_dims : int or None 73 | The number of spatial dimensions of `image`. If ambiguous, the value 74 | is ``None``. 75 | 76 | Raises 77 | ------ 78 | ValueError 79 | If the image array has less than two or more than four dimensions. 80 | """ 81 | if image.ndim == 2: 82 | return 2 83 | if image.ndim == 3 and image.shape[-1] != 3: 84 | return 3 85 | if image.ndim == 3 and image.shape[-1] == 3: 86 | return None 87 | if image.ndim == 4 and image.shape[-1] == 3: 88 | return 3 89 | else: 90 | raise ValueError("Expected 2D, 3D, or 4D array, got %iD." % image.ndim) 91 | 92 | 93 | def convert_colorspace(arr, fromspace, tospace): 94 | """Convert an image array to a new color space. 95 | 96 | Parameters 97 | ---------- 98 | arr : array_like 99 | The image to convert. 100 | fromspace : str 101 | The color space to convert from. Valid color space strings are 102 | ``['RGB', 'HSV', 'RGB CIE', 'XYZ']``. Value may also be specified as 103 | lower case. 104 | tospace : str 105 | The color space to convert to. Valid color space strings are 106 | ``['RGB', 'HSV', 'RGB CIE', 'XYZ']``. Value may also be specified as 107 | lower case. 108 | 109 | Returns 110 | ------- 111 | newarr : ndarray 112 | The converted image. 113 | 114 | Notes 115 | ----- 116 | Conversion occurs through the "central" RGB color space, i.e. conversion 117 | from XYZ to HSV is implemented as ``XYZ -> RGB -> HSV`` instead of 118 | directly. 119 | 120 | Examples 121 | -------- 122 | >>> from skimage import data 123 | >>> img = data.astronaut() 124 | >>> img_hsv = convert_colorspace(img, 'RGB', 'HSV') 125 | """ 126 | fromdict = {'RGB': lambda im: im, 'HSV': hsv2rgb, 'RGB CIE': rgbcie2rgb, 127 | 'XYZ': xyz2rgb} 128 | todict = {'RGB': lambda im: im, 'HSV': rgb2hsv, 'RGB CIE': rgb2rgbcie, 129 | 'XYZ': rgb2xyz} 130 | 131 | fromspace = fromspace.upper() 132 | tospace = tospace.upper() 133 | if fromspace not in fromdict.keys(): 134 | raise ValueError('fromspace needs to be one of %s' % fromdict.keys()) 135 | if tospace not in todict.keys(): 136 | raise ValueError('tospace needs to be one of %s' % todict.keys()) 137 | 138 | return todict[tospace](fromdict[fromspace](arr)) 139 | 140 | 141 | def _prepare_colorarray(arr): 142 | """Check the shape of the array and convert it to 143 | floating point representation. 144 | 145 | """ 146 | arr = np.asanyarray(arr) 147 | 148 | if arr.ndim not in [3, 4] or arr.shape[-1] != 3: 149 | msg = ("the input array must be have a shape == (.., ..,[ ..,] 3)), " + 150 | "got (" + (", ".join(map(str, arr.shape))) + ")") 151 | raise ValueError(msg) 152 | 153 | return dtype.img_as_float(arr) 154 | 155 | 156 | def rgb2hsv(rgb): 157 | """RGB to HSV color space conversion. 158 | 159 | Parameters 160 | ---------- 161 | rgb : array_like 162 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 163 | 164 | Returns 165 | ------- 166 | out : ndarray 167 | The image in HSV format, in a 3-D array of shape ``(.., .., 3)``. 168 | 169 | Raises 170 | ------ 171 | ValueError 172 | If `rgb` is not a 3-D array of shape ``(.., .., 3)``. 173 | 174 | Notes 175 | ----- 176 | The conversion assumes an input data range of [0, 1] for all 177 | color components. 178 | 179 | Conversion between RGB and HSV color spaces results in some loss of 180 | precision, due to integer arithmetic and rounding [1]_. 181 | 182 | References 183 | ---------- 184 | .. [1] http://en.wikipedia.org/wiki/HSL_and_HSV 185 | 186 | Examples 187 | -------- 188 | >>> from skimage import color 189 | >>> from skimage import data 190 | >>> img = data.astronaut() 191 | >>> img_hsv = color.rgb2hsv(img) 192 | """ 193 | arr = _prepare_colorarray(rgb) 194 | out = np.empty_like(arr) 195 | 196 | # -- V channel 197 | out_v = arr.max(-1) 198 | 199 | # -- S channel 200 | delta = arr.ptp(-1) 201 | # Ignore warning for zero divided by zero 202 | old_settings = np.seterr(invalid='ignore') 203 | out_s = delta / out_v 204 | out_s[delta == 0.] = 0. 205 | 206 | # -- H channel 207 | # red is max 208 | idx = (arr[:, :, 0] == out_v) 209 | out[idx, 0] = (arr[idx, 1] - arr[idx, 2]) / delta[idx] 210 | 211 | # green is max 212 | idx = (arr[:, :, 1] == out_v) 213 | out[idx, 0] = 2. + (arr[idx, 2] - arr[idx, 0]) / delta[idx] 214 | 215 | # blue is max 216 | idx = (arr[:, :, 2] == out_v) 217 | out[idx, 0] = 4. + (arr[idx, 0] - arr[idx, 1]) / delta[idx] 218 | out_h = (out[:, :, 0] / 6.) % 1. 219 | out_h[delta == 0.] = 0. 220 | 221 | np.seterr(**old_settings) 222 | 223 | # -- output 224 | out[:, :, 0] = out_h 225 | out[:, :, 1] = out_s 226 | out[:, :, 2] = out_v 227 | 228 | # remove NaN 229 | out[np.isnan(out)] = 0 230 | 231 | return out 232 | 233 | 234 | def hsv2rgb(hsv): 235 | """HSV to RGB color space conversion. 236 | 237 | Parameters 238 | ---------- 239 | hsv : array_like 240 | The image in HSV format, in a 3-D array of shape ``(.., .., 3)``. 241 | 242 | Returns 243 | ------- 244 | out : ndarray 245 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 246 | 247 | Raises 248 | ------ 249 | ValueError 250 | If `hsv` is not a 3-D array of shape ``(.., .., 3)``. 251 | 252 | Notes 253 | ----- 254 | The conversion assumes an input data range of ``[0, 1]`` for all 255 | color components. 256 | 257 | Conversion between RGB and HSV color spaces results in some loss of 258 | precision, due to integer arithmetic and rounding [1]_. 259 | 260 | References 261 | ---------- 262 | .. [1] http://en.wikipedia.org/wiki/HSL_and_HSV 263 | 264 | Examples 265 | -------- 266 | >>> from skimage import data 267 | >>> img = data.astronaut() 268 | >>> img_hsv = rgb2hsv(img) 269 | >>> img_rgb = hsv2rgb(img_hsv) 270 | """ 271 | arr = _prepare_colorarray(hsv) 272 | 273 | hi = np.floor(arr[:, :, 0] * 6) 274 | f = arr[:, :, 0] * 6 - hi 275 | p = arr[:, :, 2] * (1 - arr[:, :, 1]) 276 | q = arr[:, :, 2] * (1 - f * arr[:, :, 1]) 277 | t = arr[:, :, 2] * (1 - (1 - f) * arr[:, :, 1]) 278 | v = arr[:, :, 2] 279 | 280 | hi = np.dstack([hi, hi, hi]).astype(np.uint8) % 6 281 | out = np.choose(hi, [np.dstack((v, t, p)), 282 | np.dstack((q, v, p)), 283 | np.dstack((p, v, t)), 284 | np.dstack((p, q, v)), 285 | np.dstack((t, p, v)), 286 | np.dstack((v, p, q))]) 287 | 288 | return out 289 | 290 | 291 | # --------------------------------------------------------------- 292 | # Primaries for the coordinate systems 293 | # --------------------------------------------------------------- 294 | cie_primaries = np.array([700, 546.1, 435.8]) 295 | sb_primaries = np.array([1. / 155, 1. / 190, 1. / 225]) * 1e5 296 | 297 | # --------------------------------------------------------------- 298 | # Matrices that define conversion between different color spaces 299 | # --------------------------------------------------------------- 300 | 301 | # From sRGB specification 302 | xyz_from_rgb = np.array([[0.412453, 0.357580, 0.180423], 303 | [0.212671, 0.715160, 0.072169], 304 | [0.019334, 0.119193, 0.950227]]) 305 | 306 | rgb_from_xyz = linalg.inv(xyz_from_rgb) 307 | 308 | # From http://en.wikipedia.org/wiki/CIE_1931_color_space 309 | # Note: Travis's code did not have the divide by 0.17697 310 | xyz_from_rgbcie = np.array([[0.49, 0.31, 0.20], 311 | [0.17697, 0.81240, 0.01063], 312 | [0.00, 0.01, 0.99]]) / 0.17697 313 | 314 | rgbcie_from_xyz = linalg.inv(xyz_from_rgbcie) 315 | 316 | # construct matrices to and from rgb: 317 | rgbcie_from_rgb = np.dot(rgbcie_from_xyz, xyz_from_rgb) 318 | rgb_from_rgbcie = np.dot(rgb_from_xyz, xyz_from_rgbcie) 319 | 320 | 321 | gray_from_rgb = np.array([[0.2125, 0.7154, 0.0721], 322 | [0, 0, 0], 323 | [0, 0, 0]]) 324 | 325 | # CIE LAB constants for Observer=2A, Illuminant=D65 326 | # NOTE: this is actually the XYZ values for the illuminant above. 327 | lab_ref_white = np.array([0.95047, 1., 1.08883]) 328 | 329 | # XYZ coordinates of the illuminants, scaled to [0, 1]. For each illuminant I 330 | # we have: 331 | # 332 | # illuminant[I][0] corresponds to the XYZ coordinates for the 2 degree 333 | # field of view. 334 | # 335 | # illuminant[I][1] corresponds to the XYZ coordinates for the 10 degree 336 | # field of view. 337 | # 338 | # The XYZ coordinates are calculated from [1], using the formula: 339 | # 340 | # X = x * ( Y / y ) 341 | # Y = Y 342 | # Z = ( 1 - x - y ) * ( Y / y ) 343 | # 344 | # where Y = 1. The only exception is the illuminant "D65" with aperture angle 345 | # 2, whose coordinates are copied from 'lab_ref_white' for 346 | # backward-compatibility reasons. 347 | # 348 | # References 349 | # ---------- 350 | # .. [1] http://en.wikipedia.org/wiki/Standard_illuminant 351 | 352 | illuminants = \ 353 | {"A": {'2': (1.098466069456375, 1, 0.3558228003436005), 354 | '10': (1.111420406956693, 1, 0.3519978321919493)}, 355 | "D50": {'2': (0.9642119944211994, 1, 0.8251882845188288), 356 | '10': (0.9672062750333777, 1, 0.8142801513128616)}, 357 | "D55": {'2': (0.956797052643698, 1, 0.9214805860173273), 358 | '10': (0.9579665682254781, 1, 0.9092525159847462)}, 359 | "D65": {'2': (0.95047, 1., 1.08883), # This was: `lab_ref_white` 360 | '10': (0.94809667673716, 1, 1.0730513595166162)}, 361 | "D75": {'2': (0.9497220898840717, 1, 1.226393520724154), 362 | '10': (0.9441713925645873, 1, 1.2064272211720228)}, 363 | "E": {'2': (1.0, 1.0, 1.0), 364 | '10': (1.0, 1.0, 1.0)}} 365 | 366 | 367 | def get_xyz_coords(illuminant, observer): 368 | """Get the XYZ coordinates of the given illuminant and observer [1]_. 369 | 370 | Parameters 371 | ---------- 372 | illuminant : {"A", "D50", "D55", "D65", "D75", "E"}, optional 373 | The name of the illuminant (the function is NOT case sensitive). 374 | observer : {"2", "10"}, optional 375 | The aperture angle of the observer. 376 | 377 | Returns 378 | ------- 379 | (x, y, z) : tuple 380 | A tuple with 3 elements containing the XYZ coordinates of the given 381 | illuminant. 382 | 383 | Raises 384 | ------ 385 | ValueError 386 | If either the illuminant or the observer angle are not supported or 387 | unknown. 388 | 389 | References 390 | ---------- 391 | .. [1] http://en.wikipedia.org/wiki/Standard_illuminant 392 | 393 | """ 394 | illuminant = illuminant.upper() 395 | try: 396 | return illuminants[illuminant][observer] 397 | except KeyError: 398 | raise ValueError("Unknown illuminant/observer combination\ 399 | (\'{0}\', \'{1}\')".format(illuminant, observer)) 400 | 401 | # Haematoxylin-Eosin-DAB colorspace 402 | # From original Ruifrok's paper: A. C. Ruifrok and D. A. Johnston, 403 | # "Quantification of histochemical staining by color deconvolution.," 404 | # Analytical and quantitative cytology and histology / the International 405 | # Academy of Cytology [and] American Society of Cytology, vol. 23, no. 4, 406 | # pp. 291-9, Aug. 2001. 407 | rgb_from_hed = np.array([[0.65, 0.70, 0.29], 408 | [0.07, 0.99, 0.11], 409 | [0.27, 0.57, 0.78]]) 410 | hed_from_rgb = linalg.inv(rgb_from_hed) 411 | 412 | # Following matrices are adapted form the Java code written by G.Landini. 413 | # The original code is available at: 414 | # http://www.dentistry.bham.ac.uk/landinig/software/cdeconv/cdeconv.html 415 | 416 | # Hematoxylin + DAB 417 | rgb_from_hdx = np.array([[0.650, 0.704, 0.286], 418 | [0.268, 0.570, 0.776], 419 | [0.0, 0.0, 0.0]]) 420 | rgb_from_hdx[2, :] = np.cross(rgb_from_hdx[0, :], rgb_from_hdx[1, :]) 421 | hdx_from_rgb = linalg.inv(rgb_from_hdx) 422 | 423 | # Feulgen + Light Green 424 | rgb_from_fgx = np.array([[0.46420921, 0.83008335, 0.30827187], 425 | [0.94705542, 0.25373821, 0.19650764], 426 | [0.0, 0.0, 0.0]]) 427 | rgb_from_fgx[2, :] = np.cross(rgb_from_fgx[0, :], rgb_from_fgx[1, :]) 428 | fgx_from_rgb = linalg.inv(rgb_from_fgx) 429 | 430 | # Giemsa: Methyl Blue + Eosin 431 | rgb_from_bex = np.array([[0.834750233, 0.513556283, 0.196330403], 432 | [0.092789, 0.954111, 0.283111], 433 | [0.0, 0.0, 0.0]]) 434 | rgb_from_bex[2, :] = np.cross(rgb_from_bex[0, :], rgb_from_bex[1, :]) 435 | bex_from_rgb = linalg.inv(rgb_from_bex) 436 | 437 | # FastRed + FastBlue + DAB 438 | rgb_from_rbd = np.array([[0.21393921, 0.85112669, 0.47794022], 439 | [0.74890292, 0.60624161, 0.26731082], 440 | [0.268, 0.570, 0.776]]) 441 | rbd_from_rgb = linalg.inv(rgb_from_rbd) 442 | 443 | # Methyl Green + DAB 444 | rgb_from_gdx = np.array([[0.98003, 0.144316, 0.133146], 445 | [0.268, 0.570, 0.776], 446 | [0.0, 0.0, 0.0]]) 447 | rgb_from_gdx[2, :] = np.cross(rgb_from_gdx[0, :], rgb_from_gdx[1, :]) 448 | gdx_from_rgb = linalg.inv(rgb_from_gdx) 449 | 450 | # Hematoxylin + AEC 451 | rgb_from_hax = np.array([[0.650, 0.704, 0.286], 452 | [0.2743, 0.6796, 0.6803], 453 | [0.0, 0.0, 0.0]]) 454 | rgb_from_hax[2, :] = np.cross(rgb_from_hax[0, :], rgb_from_hax[1, :]) 455 | hax_from_rgb = linalg.inv(rgb_from_hax) 456 | 457 | # Blue matrix Anilline Blue + Red matrix Azocarmine + Orange matrix Orange-G 458 | rgb_from_bro = np.array([[0.853033, 0.508733, 0.112656], 459 | [0.09289875, 0.8662008, 0.49098468], 460 | [0.10732849, 0.36765403, 0.9237484]]) 461 | bro_from_rgb = linalg.inv(rgb_from_bro) 462 | 463 | # Methyl Blue + Ponceau Fuchsin 464 | rgb_from_bpx = np.array([[0.7995107, 0.5913521, 0.10528667], 465 | [0.09997159, 0.73738605, 0.6680326], 466 | [0.0, 0.0, 0.0]]) 467 | rgb_from_bpx[2, :] = np.cross(rgb_from_bpx[0, :], rgb_from_bpx[1, :]) 468 | bpx_from_rgb = linalg.inv(rgb_from_bpx) 469 | 470 | # Alcian Blue + Hematoxylin 471 | rgb_from_ahx = np.array([[0.874622, 0.457711, 0.158256], 472 | [0.552556, 0.7544, 0.353744], 473 | [0.0, 0.0, 0.0]]) 474 | rgb_from_ahx[2, :] = np.cross(rgb_from_ahx[0, :], rgb_from_ahx[1, :]) 475 | ahx_from_rgb = linalg.inv(rgb_from_ahx) 476 | 477 | # Hematoxylin + PAS 478 | rgb_from_hpx = np.array([[0.644211, 0.716556, 0.266844], 479 | [0.175411, 0.972178, 0.154589], 480 | [0.0, 0.0, 0.0]]) 481 | rgb_from_hpx[2, :] = np.cross(rgb_from_hpx[0, :], rgb_from_hpx[1, :]) 482 | hpx_from_rgb = linalg.inv(rgb_from_hpx) 483 | 484 | # ------------------------------------------------------------- 485 | # The conversion functions that make use of the matrices above 486 | # ------------------------------------------------------------- 487 | 488 | 489 | def _convert(matrix, arr): 490 | """Do the color space conversion. 491 | 492 | Parameters 493 | ---------- 494 | matrix : array_like 495 | The 3x3 matrix to use. 496 | arr : array_like 497 | The input array. 498 | 499 | Returns 500 | ------- 501 | out : ndarray, dtype=float 502 | The converted array. 503 | """ 504 | arr = _prepare_colorarray(arr) 505 | arr = np.swapaxes(arr, 0, -1) 506 | oldshape = arr.shape 507 | arr = np.reshape(arr, (3, -1)) 508 | out = np.dot(matrix, arr) 509 | out.shape = oldshape 510 | out = np.swapaxes(out, -1, 0) 511 | 512 | return np.ascontiguousarray(out) 513 | 514 | 515 | def xyz2rgb(xyz): 516 | """XYZ to RGB color space conversion. 517 | 518 | Parameters 519 | ---------- 520 | xyz : array_like 521 | The image in XYZ format, in a 3-D array of shape ``(.., .., 3)``. 522 | 523 | Returns 524 | ------- 525 | out : ndarray 526 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 527 | 528 | Raises 529 | ------ 530 | ValueError 531 | If `xyz` is not a 3-D array of shape ``(.., .., 3)``. 532 | 533 | Notes 534 | ----- 535 | The CIE XYZ color space is derived from the CIE RGB color space. Note 536 | however that this function converts to sRGB. 537 | 538 | References 539 | ---------- 540 | .. [1] http://en.wikipedia.org/wiki/CIE_1931_color_space 541 | 542 | Examples 543 | -------- 544 | >>> from skimage import data 545 | >>> from skimage.color import rgb2xyz, xyz2rgb 546 | >>> img = data.astronaut() 547 | >>> img_xyz = rgb2xyz(img) 548 | >>> img_rgb = xyz2rgb(img_xyz) 549 | """ 550 | # Follow the algorithm from http://www.easyrgb.com/index.php 551 | # except we don't multiply/divide by 100 in the conversion 552 | arr = _convert(rgb_from_xyz, xyz) 553 | mask = arr > 0.0031308 554 | arr[mask] = 1.055 * np.power(arr[mask], 1 / 2.4) - 0.055 555 | arr[~mask] *= 12.92 556 | arr[arr < 0] = 0 557 | arr[arr > 1] = 1 558 | return arr 559 | 560 | 561 | def rgb2xyz(rgb): 562 | """RGB to XYZ color space conversion. 563 | 564 | Parameters 565 | ---------- 566 | rgb : array_like 567 | The image in RGB format, in a 3- or 4-D array of shape 568 | ``(.., ..,[ ..,] 3)``. 569 | 570 | Returns 571 | ------- 572 | out : ndarray 573 | The image in XYZ format, in a 3- or 4-D array of shape 574 | ``(.., ..,[ ..,] 3)``. 575 | 576 | Raises 577 | ------ 578 | ValueError 579 | If `rgb` is not a 3- or 4-D array of shape ``(.., ..,[ ..,] 3)``. 580 | 581 | Notes 582 | ----- 583 | The CIE XYZ color space is derived from the CIE RGB color space. Note 584 | however that this function converts from sRGB. 585 | 586 | References 587 | ---------- 588 | .. [1] http://en.wikipedia.org/wiki/CIE_1931_color_space 589 | 590 | Examples 591 | -------- 592 | >>> from skimage import data 593 | >>> img = data.astronaut() 594 | >>> img_xyz = rgb2xyz(img) 595 | """ 596 | # Follow the algorithm from http://www.easyrgb.com/index.php 597 | # except we don't multiply/divide by 100 in the conversion 598 | arr = _prepare_colorarray(rgb).copy() 599 | mask = arr > 0.04045 600 | arr[mask] = np.power((arr[mask] + 0.055) / 1.055, 2.4) 601 | arr[~mask] /= 12.92 602 | return _convert(xyz_from_rgb, arr) 603 | 604 | 605 | def rgb2rgbcie(rgb): 606 | """RGB to RGB CIE color space conversion. 607 | 608 | Parameters 609 | ---------- 610 | rgb : array_like 611 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 612 | 613 | Returns 614 | ------- 615 | out : ndarray 616 | The image in RGB CIE format, in a 3-D array of shape ``(.., .., 3)``. 617 | 618 | Raises 619 | ------ 620 | ValueError 621 | If `rgb` is not a 3-D array of shape ``(.., .., 3)``. 622 | 623 | References 624 | ---------- 625 | .. [1] http://en.wikipedia.org/wiki/CIE_1931_color_space 626 | 627 | Examples 628 | -------- 629 | >>> from skimage import data 630 | >>> from skimage.color import rgb2rgbcie 631 | >>> img = data.astronaut() 632 | >>> img_rgbcie = rgb2rgbcie(img) 633 | """ 634 | return _convert(rgbcie_from_rgb, rgb) 635 | 636 | 637 | def rgbcie2rgb(rgbcie): 638 | """RGB CIE to RGB color space conversion. 639 | 640 | Parameters 641 | ---------- 642 | rgbcie : array_like 643 | The image in RGB CIE format, in a 3-D array of shape ``(.., .., 3)``. 644 | 645 | Returns 646 | ------- 647 | out : ndarray 648 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 649 | 650 | Raises 651 | ------ 652 | ValueError 653 | If `rgbcie` is not a 3-D array of shape ``(.., .., 3)``. 654 | 655 | References 656 | ---------- 657 | .. [1] http://en.wikipedia.org/wiki/CIE_1931_color_space 658 | 659 | Examples 660 | -------- 661 | >>> from skimage import data 662 | >>> from skimage.color import rgb2rgbcie, rgbcie2rgb 663 | >>> img = data.astronaut() 664 | >>> img_rgbcie = rgb2rgbcie(img) 665 | >>> img_rgb = rgbcie2rgb(img_rgbcie) 666 | """ 667 | return _convert(rgb_from_rgbcie, rgbcie) 668 | 669 | 670 | def rgb2gray(rgb): 671 | """Compute luminance of an RGB image. 672 | 673 | Parameters 674 | ---------- 675 | rgb : array_like 676 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``, 677 | or in RGBA format with shape ``(.., .., 4)``. 678 | 679 | Returns 680 | ------- 681 | out : ndarray 682 | The luminance image, a 2-D array. 683 | 684 | Raises 685 | ------ 686 | ValueError 687 | If `rgb2gray` is not a 3-D array of shape ``(.., .., 3)`` or 688 | ``(.., .., 4)``. 689 | 690 | References 691 | ---------- 692 | .. [1] http://www.poynton.com/PDFs/ColorFAQ.pdf 693 | 694 | Notes 695 | ----- 696 | The weights used in this conversion are calibrated for contemporary 697 | CRT phosphors:: 698 | 699 | Y = 0.2125 R + 0.7154 G + 0.0721 B 700 | 701 | If there is an alpha channel present, it is ignored. 702 | 703 | Examples 704 | -------- 705 | >>> from skimage.color import rgb2gray 706 | >>> from skimage import data 707 | >>> img = data.astronaut() 708 | >>> img_gray = rgb2gray(img) 709 | """ 710 | 711 | if rgb.ndim == 2: 712 | return np.ascontiguousarray(rgb) 713 | 714 | rgb = _prepare_colorarray(rgb[..., :3]) 715 | 716 | gray = 0.2125 * rgb[..., 0] 717 | gray[:] += 0.7154 * rgb[..., 1] 718 | gray[:] += 0.0721 * rgb[..., 2] 719 | 720 | return gray 721 | 722 | 723 | rgb2grey = rgb2gray 724 | 725 | 726 | def gray2rgb(image, alpha=None): 727 | """Create an RGB representation of a gray-level image. 728 | 729 | Parameters 730 | ---------- 731 | image : array_like 732 | Input image of shape ``(M, N [, P])``. 733 | alpha : bool, optional 734 | Ensure that the output image has an alpha layer. If None, 735 | alpha layers are passed through but not created. 736 | 737 | Returns 738 | ------- 739 | rgb : ndarray 740 | RGB image of shape ``(M, N, [, P], 3)``. 741 | 742 | Raises 743 | ------ 744 | ValueError 745 | If the input is not a 2- or 3-dimensional image. 746 | 747 | """ 748 | is_rgb = False 749 | is_alpha = False 750 | dims = np.squeeze(image).ndim 751 | 752 | if dims == 3: 753 | if image.shape[2] == 3: 754 | is_rgb = True 755 | elif image.shape[2] == 4: 756 | is_alpha = True 757 | is_rgb = True 758 | 759 | if is_rgb: 760 | if alpha == False: 761 | image = image[..., :3] 762 | 763 | elif alpha == True and not is_alpha: 764 | alpha_layer = (np.ones_like(image[..., 0, np.newaxis]) * 765 | dtype_limits(image)[1]) 766 | image = np.concatenate((image, alpha_layer), axis=2) 767 | 768 | return image 769 | 770 | elif image.ndim != 1 and dims in (1, 2, 3): 771 | image = image[..., np.newaxis] 772 | 773 | if alpha: 774 | alpha_layer = (np.ones_like(image) * dtype_limits(image)[1]) 775 | return np.concatenate(3 * (image,) + (alpha_layer,), axis=-1) 776 | else: 777 | return np.concatenate(3 * (image,), axis=-1) 778 | 779 | else: 780 | raise ValueError("Input image expected to be RGB, RGBA or gray.") 781 | 782 | 783 | def xyz2lab(xyz, illuminant="D65", observer="2"): 784 | """XYZ to CIE-LAB color space conversion. 785 | 786 | Parameters 787 | ---------- 788 | xyz : array_like 789 | The image in XYZ format, in a 3- or 4-D array of shape 790 | ``(.., ..,[ ..,] 3)``. 791 | illuminant : {"A", "D50", "D55", "D65", "D75", "E"}, optional 792 | The name of the illuminant (the function is NOT case sensitive). 793 | observer : {"2", "10"}, optional 794 | The aperture angle of the observer. 795 | 796 | Returns 797 | ------- 798 | out : ndarray 799 | The image in CIE-LAB format, in a 3- or 4-D array of shape 800 | ``(.., ..,[ ..,] 3)``. 801 | 802 | Raises 803 | ------ 804 | ValueError 805 | If `xyz` is not a 3-D array of shape ``(.., ..,[ ..,] 3)``. 806 | ValueError 807 | If either the illuminant or the observer angle is unsupported or 808 | unknown. 809 | 810 | Notes 811 | ----- 812 | By default Observer= 2A, Illuminant= D65. CIE XYZ tristimulus values 813 | x_ref=95.047, y_ref=100., z_ref=108.883. See function `get_xyz_coords` for 814 | a list of supported illuminants. 815 | 816 | References 817 | ---------- 818 | .. [1] http://www.easyrgb.com/index.php?X=MATH&H=07#text7 819 | .. [2] http://en.wikipedia.org/wiki/Lab_color_space 820 | 821 | Examples 822 | -------- 823 | >>> from skimage import data 824 | >>> from skimage.color import rgb2xyz, xyz2lab 825 | >>> img = data.astronaut() 826 | >>> img_xyz = rgb2xyz(img) 827 | >>> img_lab = xyz2lab(img_xyz) 828 | """ 829 | arr = _prepare_colorarray(xyz) 830 | 831 | xyz_ref_white = get_xyz_coords(illuminant, observer) 832 | 833 | # scale by CIE XYZ tristimulus values of the reference white point 834 | arr = arr / xyz_ref_white 835 | 836 | # Nonlinear distortion and linear transformation 837 | mask = arr > 0.008856 838 | arr[mask] = np.power(arr[mask], 1. / 3.) 839 | arr[~mask] = 7.787 * arr[~mask] + 16. / 116. 840 | 841 | x, y, z = arr[..., 0], arr[..., 1], arr[..., 2] 842 | 843 | # Vector scaling 844 | L = (116. * y) - 16. 845 | a = 500.0 * (x - y) 846 | b = 200.0 * (y - z) 847 | 848 | return np.concatenate([x[..., np.newaxis] for x in [L, a, b]], axis=-1) 849 | 850 | 851 | def lab2xyz(lab, illuminant="D65", observer="2"): 852 | """CIE-LAB to XYZcolor space conversion. 853 | 854 | Parameters 855 | ---------- 856 | lab : array_like 857 | The image in lab format, in a 3-D array of shape ``(.., .., 3)``. 858 | illuminant : {"A", "D50", "D55", "D65", "D75", "E"}, optional 859 | The name of the illuminant (the function is NOT case sensitive). 860 | observer : {"2", "10"}, optional 861 | The aperture angle of the observer. 862 | 863 | Returns 864 | ------- 865 | out : ndarray 866 | The image in XYZ format, in a 3-D array of shape ``(.., .., 3)``. 867 | 868 | Raises 869 | ------ 870 | ValueError 871 | If `lab` is not a 3-D array of shape ``(.., .., 3)``. 872 | ValueError 873 | If either the illuminant or the observer angle are not supported or 874 | unknown. 875 | UserWarning 876 | If any of the pixels are invalid (Z < 0). 877 | 878 | 879 | Notes 880 | ----- 881 | By default Observer= 2A, Illuminant= D65. CIE XYZ tristimulus values x_ref 882 | = 95.047, y_ref = 100., z_ref = 108.883. See function 'get_xyz_coords' for 883 | a list of supported illuminants. 884 | 885 | References 886 | ---------- 887 | .. [1] http://www.easyrgb.com/index.php?X=MATH&H=07#text7 888 | .. [2] http://en.wikipedia.org/wiki/Lab_color_space 889 | 890 | """ 891 | 892 | arr = _prepare_colorarray(lab).copy() 893 | 894 | L, a, b = arr[:, :, 0], arr[:, :, 1], arr[:, :, 2] 895 | y = (L + 16.) / 116. 896 | x = (a / 500.) + y 897 | z = y - (b / 200.) 898 | 899 | if np.any(z < 0): 900 | invalid = np.nonzero(z < 0) 901 | warn('Color data out of range: Z < 0 in %s pixels' % invalid[0].size) 902 | z[invalid] = 0 903 | 904 | out = np.dstack([x, y, z]) 905 | 906 | mask = out > 0.2068966 907 | out[mask] = np.power(out[mask], 3.) 908 | out[~mask] = (out[~mask] - 16.0 / 116.) / 7.787 909 | 910 | # rescale to the reference white (illuminant) 911 | xyz_ref_white = get_xyz_coords(illuminant, observer) 912 | out *= xyz_ref_white 913 | return out 914 | 915 | 916 | def rgb2lab(rgb): 917 | """RGB to lab color space conversion. 918 | 919 | Parameters 920 | ---------- 921 | rgb : array_like 922 | The image in RGB format, in a 3- or 4-D array of shape 923 | ``(.., ..,[ ..,] 3)``. 924 | 925 | Returns 926 | ------- 927 | out : ndarray 928 | The image in Lab format, in a 3- or 4-D array of shape 929 | ``(.., ..,[ ..,] 3)``. 930 | 931 | Raises 932 | ------ 933 | ValueError 934 | If `rgb` is not a 3- or 4-D array of shape ``(.., ..,[ ..,] 3)``. 935 | 936 | Notes 937 | ----- 938 | This function uses rgb2xyz and xyz2lab. 939 | """ 940 | return xyz2lab(rgb2xyz(rgb)) 941 | 942 | 943 | def lab2rgb(lab): 944 | """Lab to RGB color space conversion. 945 | 946 | Parameters 947 | ---------- 948 | lab : array_like 949 | The image in Lab format, in a 3-D array of shape ``(.., .., 3)``. 950 | 951 | Returns 952 | ------- 953 | out : ndarray 954 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 955 | 956 | Raises 957 | ------ 958 | ValueError 959 | If `lab` is not a 3-D array of shape ``(.., .., 3)``. 960 | 961 | Notes 962 | ----- 963 | This function uses lab2xyz and xyz2rgb. 964 | """ 965 | return xyz2rgb(lab2xyz(lab)) 966 | 967 | 968 | def xyz2luv(xyz, illuminant="D65", observer="2"): 969 | """XYZ to CIE-Luv color space conversion. 970 | 971 | Parameters 972 | ---------- 973 | xyz : (M, N, [P,] 3) array_like 974 | The 3 or 4 dimensional image in XYZ format. Final dimension denotes 975 | channels. 976 | illuminant : {"A", "D50", "D55", "D65", "D75", "E"}, optional 977 | The name of the illuminant (the function is NOT case sensitive). 978 | observer : {"2", "10"}, optional 979 | The aperture angle of the observer. 980 | 981 | Returns 982 | ------- 983 | out : (M, N, [P,] 3) ndarray 984 | The image in CIE-Luv format. Same dimensions as input. 985 | 986 | Raises 987 | ------ 988 | ValueError 989 | If `xyz` is not a 3-D or 4-D array of shape ``(M, N, [P,] 3)``. 990 | ValueError 991 | If either the illuminant or the observer angle are not supported or 992 | unknown. 993 | 994 | Notes 995 | ----- 996 | By default XYZ conversion weights use observer=2A. Reference whitepoint 997 | for D65 Illuminant, with XYZ tristimulus values of ``(95.047, 100., 998 | 108.883)``. See function 'get_xyz_coords' for a list of supported 999 | illuminants. 1000 | 1001 | References 1002 | ---------- 1003 | .. [1] http://www.easyrgb.com/index.php?X=MATH&H=16#text16 1004 | .. [2] http://en.wikipedia.org/wiki/CIELUV 1005 | 1006 | Examples 1007 | -------- 1008 | >>> from skimage import data 1009 | >>> from skimage.color import rgb2xyz, xyz2luv 1010 | >>> img = data.astronaut() 1011 | >>> img_xyz = rgb2xyz(img) 1012 | >>> img_luv = xyz2luv(img_xyz) 1013 | """ 1014 | arr = _prepare_colorarray(xyz) 1015 | 1016 | # extract channels 1017 | x, y, z = arr[..., 0], arr[..., 1], arr[..., 2] 1018 | 1019 | eps = np.finfo(np.float).eps 1020 | 1021 | # compute y_r and L 1022 | xyz_ref_white = get_xyz_coords(illuminant, observer) 1023 | L = y / xyz_ref_white[1] 1024 | mask = L > 0.008856 1025 | L[mask] = 116. * np.power(L[mask], 1. / 3.) - 16. 1026 | L[~mask] = 903.3 * L[~mask] 1027 | 1028 | u0 = 4 * xyz_ref_white[0] / np.dot([1, 15, 3], xyz_ref_white) 1029 | v0 = 9 * xyz_ref_white[1] / np.dot([1, 15, 3], xyz_ref_white) 1030 | 1031 | # u' and v' helper functions 1032 | def fu(X, Y, Z): 1033 | return (4. * X) / (X + 15. * Y + 3. * Z + eps) 1034 | 1035 | def fv(X, Y, Z): 1036 | return (9. * Y) / (X + 15. * Y + 3. * Z + eps) 1037 | 1038 | # compute u and v using helper functions 1039 | u = 13. * L * (fu(x, y, z) - u0) 1040 | v = 13. * L * (fv(x, y, z) - v0) 1041 | 1042 | return np.concatenate([q[..., np.newaxis] for q in [L, u, v]], axis=-1) 1043 | 1044 | 1045 | def luv2xyz(luv, illuminant="D65", observer="2"): 1046 | """CIE-Luv to XYZ color space conversion. 1047 | 1048 | Parameters 1049 | ---------- 1050 | luv : (M, N, [P,] 3) array_like 1051 | The 3 or 4 dimensional image in CIE-Luv format. Final dimension denotes 1052 | channels. 1053 | illuminant : {"A", "D50", "D55", "D65", "D75", "E"}, optional 1054 | The name of the illuminant (the function is NOT case sensitive). 1055 | observer : {"2", "10"}, optional 1056 | The aperture angle of the observer. 1057 | 1058 | Returns 1059 | ------- 1060 | out : (M, N, [P,] 3) ndarray 1061 | The image in XYZ format. Same dimensions as input. 1062 | 1063 | Raises 1064 | ------ 1065 | ValueError 1066 | If `luv` is not a 3-D or 4-D array of shape ``(M, N, [P,] 3)``. 1067 | ValueError 1068 | If either the illuminant or the observer angle are not supported or 1069 | unknown. 1070 | 1071 | Notes 1072 | ----- 1073 | XYZ conversion weights use observer=2A. Reference whitepoint for D65 1074 | Illuminant, with XYZ tristimulus values of ``(95.047, 100., 108.883)``. See 1075 | function 'get_xyz_coords' for a list of supported illuminants. 1076 | 1077 | References 1078 | ---------- 1079 | .. [1] http://www.easyrgb.com/index.php?X=MATH&H=16#text16 1080 | .. [2] http://en.wikipedia.org/wiki/CIELUV 1081 | 1082 | """ 1083 | 1084 | arr = _prepare_colorarray(luv).copy() 1085 | 1086 | L, u, v = arr[:, :, 0], arr[:, :, 1], arr[:, :, 2] 1087 | 1088 | eps = np.finfo(np.float).eps 1089 | 1090 | # compute y 1091 | y = L.copy() 1092 | mask = y > 7.999625 1093 | y[mask] = np.power((y[mask] + 16.) / 116., 3.) 1094 | y[~mask] = y[~mask] / 903.3 1095 | xyz_ref_white = get_xyz_coords(illuminant, observer) 1096 | y *= xyz_ref_white[1] 1097 | 1098 | # reference white x,z 1099 | uv_weights = [1, 15, 3] 1100 | u0 = 4 * xyz_ref_white[0] / np.dot(uv_weights, xyz_ref_white) 1101 | v0 = 9 * xyz_ref_white[1] / np.dot(uv_weights, xyz_ref_white) 1102 | 1103 | # compute intermediate values 1104 | a = u0 + u / (13. * L + eps) 1105 | b = v0 + v / (13. * L + eps) 1106 | c = 3 * y * (5 * b - 3) 1107 | 1108 | # compute x and z 1109 | z = ((a - 4) * c - 15 * a * b * y) / (12 * b) 1110 | x = -(c / b + 3. * z) 1111 | 1112 | return np.concatenate([q[..., np.newaxis] for q in [x, y, z]], axis=-1) 1113 | 1114 | 1115 | def rgb2luv(rgb): 1116 | """RGB to CIE-Luv color space conversion. 1117 | 1118 | Parameters 1119 | ---------- 1120 | rgb : (M, N, [P,] 3) array_like 1121 | The 3 or 4 dimensional image in RGB format. Final dimension denotes 1122 | channels. 1123 | 1124 | Returns 1125 | ------- 1126 | out : (M, N, [P,] 3) ndarray 1127 | The image in CIE Luv format. Same dimensions as input. 1128 | 1129 | Raises 1130 | ------ 1131 | ValueError 1132 | If `rgb` is not a 3-D or 4-D array of shape ``(M, N, [P,] 3)``. 1133 | 1134 | Notes 1135 | ----- 1136 | This function uses rgb2xyz and xyz2luv. 1137 | """ 1138 | return xyz2luv(rgb2xyz(rgb)) 1139 | 1140 | 1141 | def luv2rgb(luv): 1142 | """Luv to RGB color space conversion. 1143 | 1144 | Parameters 1145 | ---------- 1146 | luv : (M, N, [P,] 3) array_like 1147 | The 3 or 4 dimensional image in CIE Luv format. Final dimension denotes 1148 | channels. 1149 | 1150 | Returns 1151 | ------- 1152 | out : (M, N, [P,] 3) ndarray 1153 | The image in RGB format. Same dimensions as input. 1154 | 1155 | Raises 1156 | ------ 1157 | ValueError 1158 | If `luv` is not a 3-D or 4-D array of shape ``(M, N, [P,] 3)``. 1159 | 1160 | Notes 1161 | ----- 1162 | This function uses luv2xyz and xyz2rgb. 1163 | """ 1164 | return xyz2rgb(luv2xyz(luv)) 1165 | 1166 | 1167 | def rgb2hed(rgb): 1168 | """RGB to Haematoxylin-Eosin-DAB (HED) color space conversion. 1169 | 1170 | Parameters 1171 | ---------- 1172 | rgb : array_like 1173 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 1174 | 1175 | Returns 1176 | ------- 1177 | out : ndarray 1178 | The image in HED format, in a 3-D array of shape ``(.., .., 3)``. 1179 | 1180 | Raises 1181 | ------ 1182 | ValueError 1183 | If `rgb` is not a 3-D array of shape ``(.., .., 3)``. 1184 | 1185 | 1186 | References 1187 | ---------- 1188 | .. [1] A. C. Ruifrok and D. A. Johnston, "Quantification of histochemical 1189 | staining by color deconvolution.," Analytical and quantitative 1190 | cytology and histology / the International Academy of Cytology [and] 1191 | American Society of Cytology, vol. 23, no. 4, pp. 291-9, Aug. 2001. 1192 | 1193 | Examples 1194 | -------- 1195 | >>> from skimage import data 1196 | >>> from skimage.color import rgb2hed 1197 | >>> ihc = data.immunohistochemistry() 1198 | >>> ihc_hed = rgb2hed(ihc) 1199 | """ 1200 | return separate_stains(rgb, hed_from_rgb) 1201 | 1202 | 1203 | def hed2rgb(hed): 1204 | """Haematoxylin-Eosin-DAB (HED) to RGB color space conversion. 1205 | 1206 | Parameters 1207 | ---------- 1208 | hed : array_like 1209 | The image in the HED color space, in a 3-D array of shape 1210 | ``(.., .., 3)``. 1211 | 1212 | Returns 1213 | ------- 1214 | out : ndarray 1215 | The image in RGB, in a 3-D array of shape ``(.., .., 3)``. 1216 | 1217 | Raises 1218 | ------ 1219 | ValueError 1220 | If `hed` is not a 3-D array of shape ``(.., .., 3)``. 1221 | 1222 | References 1223 | ---------- 1224 | .. [1] A. C. Ruifrok and D. A. Johnston, "Quantification of histochemical 1225 | staining by color deconvolution.," Analytical and quantitative 1226 | cytology and histology / the International Academy of Cytology [and] 1227 | American Society of Cytology, vol. 23, no. 4, pp. 291-9, Aug. 2001. 1228 | 1229 | Examples 1230 | -------- 1231 | >>> from skimage import data 1232 | >>> from skimage.color import rgb2hed, hed2rgb 1233 | >>> ihc = data.immunohistochemistry() 1234 | >>> ihc_hed = rgb2hed(ihc) 1235 | >>> ihc_rgb = hed2rgb(ihc_hed) 1236 | """ 1237 | return combine_stains(hed, rgb_from_hed) 1238 | 1239 | 1240 | def separate_stains(rgb, conv_matrix): 1241 | """RGB to stain color space conversion. 1242 | 1243 | Parameters 1244 | ---------- 1245 | rgb : array_like 1246 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 1247 | conv_matrix: ndarray 1248 | The stain separation matrix as described by G. Landini [1]_. 1249 | 1250 | Returns 1251 | ------- 1252 | out : ndarray 1253 | The image in stain color space, in a 3-D array of shape 1254 | ``(.., .., 3)``. 1255 | 1256 | Raises 1257 | ------ 1258 | ValueError 1259 | If `rgb` is not a 3-D array of shape ``(.., .., 3)``. 1260 | 1261 | Notes 1262 | ----- 1263 | Stain separation matrices available in the ``color`` module and their 1264 | respective colorspace: 1265 | 1266 | * ``hed_from_rgb``: Hematoxylin + Eosin + DAB 1267 | * ``hdx_from_rgb``: Hematoxylin + DAB 1268 | * ``fgx_from_rgb``: Feulgen + Light Green 1269 | * ``bex_from_rgb``: Giemsa stain : Methyl Blue + Eosin 1270 | * ``rbd_from_rgb``: FastRed + FastBlue + DAB 1271 | * ``gdx_from_rgb``: Methyl Green + DAB 1272 | * ``hax_from_rgb``: Hematoxylin + AEC 1273 | * ``bro_from_rgb``: Blue matrix Anilline Blue + Red matrix Azocarmine\ 1274 | + Orange matrix Orange-G 1275 | * ``bpx_from_rgb``: Methyl Blue + Ponceau Fuchsin 1276 | * ``ahx_from_rgb``: Alcian Blue + Hematoxylin 1277 | * ``hpx_from_rgb``: Hematoxylin + PAS 1278 | 1279 | References 1280 | ---------- 1281 | .. [1] http://www.dentistry.bham.ac.uk/landinig/software/cdeconv/cdeconv.html 1282 | 1283 | Examples 1284 | -------- 1285 | >>> from skimage import data 1286 | >>> from skimage.color import separate_stains, hdx_from_rgb 1287 | >>> ihc = data.immunohistochemistry() 1288 | >>> ihc_hdx = separate_stains(ihc, hdx_from_rgb) 1289 | """ 1290 | rgb = dtype.img_as_float(rgb, force_copy=True) 1291 | rgb += 2 1292 | stains = np.dot(np.reshape(-np.log(rgb), (-1, 3)), conv_matrix) 1293 | return np.reshape(stains, rgb.shape) 1294 | 1295 | 1296 | def combine_stains(stains, conv_matrix): 1297 | """Stain to RGB color space conversion. 1298 | 1299 | Parameters 1300 | ---------- 1301 | stains : array_like 1302 | The image in stain color space, in a 3-D array of shape 1303 | ``(.., .., 3)``. 1304 | conv_matrix: ndarray 1305 | The stain separation matrix as described by G. Landini [1]_. 1306 | 1307 | Returns 1308 | ------- 1309 | out : ndarray 1310 | The image in RGB format, in a 3-D array of shape ``(.., .., 3)``. 1311 | 1312 | Raises 1313 | ------ 1314 | ValueError 1315 | If `stains` is not a 3-D array of shape ``(.., .., 3)``. 1316 | 1317 | Notes 1318 | ----- 1319 | Stain combination matrices available in the ``color`` module and their 1320 | respective colorspace: 1321 | 1322 | * ``rgb_from_hed``: Hematoxylin + Eosin + DAB 1323 | * ``rgb_from_hdx``: Hematoxylin + DAB 1324 | * ``rgb_from_fgx``: Feulgen + Light Green 1325 | * ``rgb_from_bex``: Giemsa stain : Methyl Blue + Eosin 1326 | * ``rgb_from_rbd``: FastRed + FastBlue + DAB 1327 | * ``rgb_from_gdx``: Methyl Green + DAB 1328 | * ``rgb_from_hax``: Hematoxylin + AEC 1329 | * ``rgb_from_bro``: Blue matrix Anilline Blue + Red matrix Azocarmine\ 1330 | + Orange matrix Orange-G 1331 | * ``rgb_from_bpx``: Methyl Blue + Ponceau Fuchsin 1332 | * ``rgb_from_ahx``: Alcian Blue + Hematoxylin 1333 | * ``rgb_from_hpx``: Hematoxylin + PAS 1334 | 1335 | References 1336 | ---------- 1337 | .. [1] http://www.dentistry.bham.ac.uk/landinig/software/cdeconv/cdeconv.html 1338 | 1339 | 1340 | Examples 1341 | -------- 1342 | >>> from skimage import data 1343 | >>> from skimage.color import (separate_stains, combine_stains, 1344 | ... hdx_from_rgb, rgb_from_hdx) 1345 | >>> ihc = data.immunohistochemistry() 1346 | >>> ihc_hdx = separate_stains(ihc, hdx_from_rgb) 1347 | >>> ihc_rgb = combine_stains(ihc_hdx, rgb_from_hdx) 1348 | """ 1349 | from ..exposure import rescale_intensity 1350 | 1351 | stains = dtype.img_as_float(stains) 1352 | logrgb2 = np.dot(-np.reshape(stains, (-1, 3)), conv_matrix) 1353 | rgb2 = np.exp(logrgb2) 1354 | return rescale_intensity(np.reshape(rgb2 - 2, stains.shape), 1355 | in_range=(-1, 1)) 1356 | 1357 | 1358 | def lab2lch(lab): 1359 | """CIE-LAB to CIE-LCH color space conversion. 1360 | 1361 | LCH is the cylindrical representation of the LAB (Cartesian) colorspace 1362 | 1363 | Parameters 1364 | ---------- 1365 | lab : array_like 1366 | The N-D image in CIE-LAB format. The last (``N+1``-th) dimension must 1367 | have at least 3 elements, corresponding to the ``L``, ``a``, and ``b`` 1368 | color channels. Subsequent elements are copied. 1369 | 1370 | Returns 1371 | ------- 1372 | out : ndarray 1373 | The image in LCH format, in a N-D array with same shape as input `lab`. 1374 | 1375 | Raises 1376 | ------ 1377 | ValueError 1378 | If `lch` does not have at least 3 color channels (i.e. l, a, b). 1379 | 1380 | Notes 1381 | ----- 1382 | The Hue is expressed as an angle between ``(0, 2*pi)`` 1383 | 1384 | Examples 1385 | -------- 1386 | >>> from skimage import data 1387 | >>> from skimage.color import rgb2lab, lab2lch 1388 | >>> img = data.astronaut() 1389 | >>> img_lab = rgb2lab(img) 1390 | >>> img_lch = lab2lch(img_lab) 1391 | """ 1392 | lch = _prepare_lab_array(lab) 1393 | 1394 | a, b = lch[..., 1], lch[..., 2] 1395 | lch[..., 1], lch[..., 2] = _cart2polar_2pi(a, b) 1396 | return lch 1397 | 1398 | 1399 | def _cart2polar_2pi(x, y): 1400 | """convert cartesian coordinates to polar (uses non-standard theta range!) 1401 | 1402 | NON-STANDARD RANGE! Maps to ``(0, 2*pi)`` rather than usual ``(-pi, +pi)`` 1403 | """ 1404 | r, t = np.hypot(x, y), np.arctan2(y, x) 1405 | t += np.where(t < 0., 2 * np.pi, 0) 1406 | return r, t 1407 | 1408 | 1409 | def lch2lab(lch): 1410 | """CIE-LCH to CIE-LAB color space conversion. 1411 | 1412 | LCH is the cylindrical representation of the LAB (Cartesian) colorspace 1413 | 1414 | Parameters 1415 | ---------- 1416 | lch : array_like 1417 | The N-D image in CIE-LCH format. The last (``N+1``-th) dimension must 1418 | have at least 3 elements, corresponding to the ``L``, ``a``, and ``b`` 1419 | color channels. Subsequent elements are copied. 1420 | 1421 | Returns 1422 | ------- 1423 | out : ndarray 1424 | The image in LAB format, with same shape as input `lch`. 1425 | 1426 | Raises 1427 | ------ 1428 | ValueError 1429 | If `lch` does not have at least 3 color channels (i.e. l, c, h). 1430 | 1431 | Examples 1432 | -------- 1433 | >>> from skimage import data 1434 | >>> from skimage.color import rgb2lab, lch2lab 1435 | >>> img = data.astronaut() 1436 | >>> img_lab = rgb2lab(img) 1437 | >>> img_lch = lab2lch(img_lab) 1438 | >>> img_lab2 = lch2lab(img_lch) 1439 | """ 1440 | lch = _prepare_lab_array(lch) 1441 | 1442 | c, h = lch[..., 1], lch[..., 2] 1443 | lch[..., 1], lch[..., 2] = c * np.cos(h), c * np.sin(h) 1444 | return lch 1445 | 1446 | 1447 | def _prepare_lab_array(arr): 1448 | """Ensure input for lab2lch, lch2lab are well-posed. 1449 | 1450 | Arrays must be in floating point and have at least 3 elements in 1451 | last dimension. Return a new array. 1452 | """ 1453 | arr = np.asarray(arr) 1454 | shape = arr.shape 1455 | if shape[-1] < 3: 1456 | raise ValueError('Input array has less than 3 color channels') 1457 | return dtype.img_as_float(arr, force_copy=True) 1458 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/colorconv.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/colorconv.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/colorlabel.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | 3 | import numpy as np 4 | 5 | from .._shared.utils import warn 6 | from .. import img_as_float 7 | from . import rgb_colors 8 | from .colorconv import rgb2gray, gray2rgb 9 | 10 | import six 11 | from six.moves import zip 12 | 13 | 14 | __all__ = ['color_dict', 'label2rgb', 'DEFAULT_COLORS'] 15 | 16 | 17 | DEFAULT_COLORS = ('red', 'blue', 'yellow', 'magenta', 'green', 18 | 'indigo', 'darkorange', 'cyan', 'pink', 'yellowgreen') 19 | DEFAULT_COLORS1 = ('maroon', 'lime', 'olive', 'navy', 'purple', 'teal', 20 | 'gray', 'fcncat', 'fcnchair', 'fcncow', 'fcndining', 21 | 'fcndog', 'fcnhorse', 'fcnmotor', 'fcnperson', 'fcnpotte', 22 | 'fcnsheep', 'fcnsofa', 'fcntrain', 'fcntv') 23 | 24 | """ 25 | Authuor: Yawei Li 26 | R G B 27 | background 0 0 0 28 | aeroplane 128 0 0 29 | bicycle 0 128 0 30 | bird 128 128 0 31 | boat 0 0 128 32 | bottle 128 0 128 33 | bus 0 128 128 34 | car 128 128 128 35 | 36 | cat 64 0 0 37 | chair 192 0 0 38 | cow 64 128 0 39 | diningtable 192 128 0 40 | dog 64 0 128 41 | horse 192 0 128 42 | motorbike 64 128 128 43 | person 192 128 128 44 | pottedplant 0 64 0 45 | sheep 128 64 0 46 | sofa 0 192 0 47 | train 128 192 0 48 | tvmonitor 0 64 128 49 | """ 50 | 51 | color_dict = dict((k, v) for k, v in six.iteritems(rgb_colors.__dict__) 52 | if isinstance(v, tuple)) 53 | 54 | 55 | def _rgb_vector(color): 56 | """Return RGB color as (1, 3) array. 57 | 58 | This RGB array gets multiplied by masked regions of an RGB image, which are 59 | partially flattened by masking (i.e. dimensions 2D + RGB -> 1D + RGB). 60 | 61 | Parameters 62 | ---------- 63 | color : str or array 64 | Color name in `color_dict` or RGB float values between [0, 1]. 65 | """ 66 | if isinstance(color, six.string_types): 67 | color = color_dict[color] 68 | # Slice to handle RGBA colors. 69 | return np.array(color[:3]) 70 | 71 | 72 | def _match_label_with_color(label, colors, bg_label, bg_color): 73 | """Return `unique_labels` and `color_cycle` for label array and color list. 74 | 75 | Colors are cycled for normal labels, but the background color should only 76 | be used for the background. 77 | """ 78 | # Temporarily set background color; it will be removed later. 79 | if bg_color is None: 80 | bg_color = (0, 0, 0) 81 | bg_color = _rgb_vector([bg_color]) 82 | 83 | unique_labels = list(set(label.flat)) 84 | # Ensure that the background label is in front to match call to `chain`. 85 | if bg_label in unique_labels: 86 | unique_labels.remove(bg_label) 87 | unique_labels.insert(0, bg_label) 88 | 89 | # Modify labels and color cycle so background color is used only once. 90 | color_cycle = itertools.cycle(colors) 91 | color_cycle = itertools.chain(bg_color, color_cycle) 92 | 93 | return unique_labels, color_cycle 94 | 95 | 96 | def label2rgb(label, image=None, colors=None, alpha=0.3, 97 | bg_label=-1, bg_color=(0, 0, 0), image_alpha=1, kind='overlay'): 98 | """Return an RGB image where color-coded labels are painted over the image. 99 | 100 | Parameters 101 | ---------- 102 | label : array, shape (M, N) 103 | Integer array of labels with the same shape as `image`. 104 | image : array, shape (M, N, 3), optional 105 | Image used as underlay for labels. If the input is an RGB image, it's 106 | converted to grayscale before coloring. 107 | colors : list, optional 108 | List of colors. If the number of labels exceeds the number of colors, 109 | then the colors are cycled. 110 | alpha : float [0, 1], optional 111 | Opacity of colorized labels. Ignored if image is `None`. 112 | bg_label : int, optional 113 | Label that's treated as the background. 114 | bg_color : str or array, optional 115 | Background color. Must be a name in `color_dict` or RGB float values 116 | between [0, 1]. 117 | image_alpha : float [0, 1], optional 118 | Opacity of the image. 119 | kind : string, one of {'overlay', 'avg'} 120 | The kind of color image desired. 'overlay' cycles over defined colors 121 | and overlays the colored labels over the original image. 'avg' replaces 122 | each labeled segment with its average color, for a stained-class or 123 | pastel painting appearance. 124 | 125 | Returns 126 | ------- 127 | result : array of float, shape (M, N, 3) 128 | The result of blending a cycling colormap (`colors`) for each distinct 129 | value in `label` with the image, at a certain alpha value. 130 | """ 131 | if kind == 'overlay': 132 | return _label2rgb_overlay(label, image, colors, alpha, bg_label, 133 | bg_color, image_alpha) 134 | else: 135 | return _label2rgb_avg(label, image, bg_label, bg_color) 136 | 137 | 138 | def _label2rgb_overlay(label, image=None, colors=None, alpha=0.3, 139 | bg_label=-1, bg_color=None, image_alpha=1): 140 | """Return an RGB image where color-coded labels are painted over the image. 141 | 142 | Parameters 143 | ---------- 144 | label : array, shape (M, N) 145 | Integer array of labels with the same shape as `image`. 146 | image : array, shape (M, N, 3), optional 147 | Image used as underlay for labels. If the input is an RGB image, it's 148 | converted to grayscale before coloring. 149 | colors : list, optional 150 | List of colors. If the number of labels exceeds the number of colors, 151 | then the colors are cycled. 152 | alpha : float [0, 1], optional 153 | Opacity of colorized labels. Ignored if image is `None`. 154 | bg_label : int, optional 155 | Label that's treated as the background. 156 | bg_color : str or array, optional 157 | Background color. Must be a name in `color_dict` or RGB float values 158 | between [0, 1]. 159 | image_alpha : float [0, 1], optional 160 | Opacity of the image. 161 | 162 | Returns 163 | ------- 164 | result : array of float, shape (M, N, 3) 165 | The result of blending a cycling colormap (`colors`) for each distinct 166 | value in `label` with the image, at a certain alpha value. 167 | """ 168 | if colors is None: 169 | colors = DEFAULT_COLORS1 170 | colors = [_rgb_vector(c) for c in colors] 171 | 172 | if image is None: 173 | image = np.zeros(label.shape + (3,), dtype=np.float64) 174 | # Opacity doesn't make sense if no image exists. 175 | alpha = 1 176 | else: 177 | if not image.shape[:2] == label.shape: 178 | raise ValueError("`image` and `label` must be the same shape") 179 | 180 | if image.min() < 0: 181 | warn("Negative intensities in `image` are not supported") 182 | 183 | image = img_as_float(rgb2gray(image)) 184 | image = gray2rgb(image) * image_alpha + (1 - image_alpha) 185 | 186 | # Ensure that all labels are non-negative so we can index into 187 | # `label_to_color` correctly. 188 | offset = min(label.min(), bg_label) 189 | if offset != 0: 190 | label = label - offset # Make sure you don't modify the input array. 191 | bg_label -= offset 192 | 193 | new_type = np.min_scalar_type(int(label.max())) 194 | if new_type == np.bool: 195 | new_type = np.uint8 196 | label = label.astype(new_type) 197 | 198 | unique_labels, color_cycle = _match_label_with_color(label, colors, 199 | bg_label, bg_color) 200 | 201 | if len(unique_labels) == 0: 202 | return image 203 | 204 | dense_labels = range(max(unique_labels) + 1) 205 | label_to_color = np.array([c for i, c in zip(dense_labels, color_cycle)]) 206 | 207 | result = label_to_color[label] * alpha + image * (1 - alpha) 208 | 209 | # Remove background label if its color was not specified. 210 | remove_background = bg_label in unique_labels and bg_color is None 211 | if remove_background: 212 | result[label == bg_label] = image[label == bg_label] 213 | 214 | return result 215 | 216 | 217 | def _label2rgb_avg(label_field, image, bg_label=0, bg_color=(0, 0, 0)): 218 | """Visualise each segment in `label_field` with its mean color in `image`. 219 | 220 | Parameters 221 | ---------- 222 | label_field : array of int 223 | A segmentation of an image. 224 | image : array, shape ``label_field.shape + (3,)`` 225 | A color image of the same spatial shape as `label_field`. 226 | bg_label : int, optional 227 | A value in `label_field` to be treated as background. 228 | bg_color : 3-tuple of int, optional 229 | The color for the background label 230 | 231 | Returns 232 | ------- 233 | out : array, same shape and type as `image` 234 | The output visualization. 235 | """ 236 | out = np.zeros_like(image) 237 | labels = np.unique(label_field) 238 | bg = (labels == bg_label) 239 | if bg.any(): 240 | labels = labels[labels != bg_label] 241 | out[bg] = bg_color 242 | for label in labels: 243 | mask = (label_field == label).nonzero() 244 | color = image[mask].mean(axis=0) 245 | out[mask] = color 246 | return out 247 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/colorlabel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/colorlabel.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/delta_e.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions for calculating the "distance" between colors. 3 | 4 | Implicit in these definitions of "distance" is the notion of "Just Noticeable 5 | Distance" (JND). This represents the distance between colors where a human can 6 | perceive different colors. Humans are more sensitive to certain colors than 7 | others, which different deltaE metrics correct for with varying degrees of 8 | sophistication. 9 | 10 | The literature often mentions 1 as the minimum distance for visual 11 | differentiation, but more recent studies (Mahy 1994) peg JND at 2.3 12 | 13 | The delta-E notation comes from the German word for "Sensation" (Empfindung). 14 | 15 | Reference 16 | --------- 17 | http://en.wikipedia.org/wiki/Color_difference 18 | 19 | """ 20 | from __future__ import division 21 | 22 | import numpy as np 23 | 24 | from ..color.colorconv import lab2lch, _cart2polar_2pi 25 | 26 | 27 | def deltaE_cie76(lab1, lab2): 28 | """Euclidean distance between two points in Lab color space 29 | 30 | Parameters 31 | ---------- 32 | lab1 : array_like 33 | reference color (Lab colorspace) 34 | lab2 : array_like 35 | comparison color (Lab colorspace) 36 | 37 | Returns 38 | ------- 39 | dE : array_like 40 | distance between colors `lab1` and `lab2` 41 | 42 | References 43 | ---------- 44 | .. [1] http://en.wikipedia.org/wiki/Color_difference 45 | .. [2] A. R. Robertson, "The CIE 1976 color-difference formulae," 46 | Color Res. Appl. 2, 7-11 (1977). 47 | """ 48 | lab1 = np.asarray(lab1) 49 | lab2 = np.asarray(lab2) 50 | L1, a1, b1 = np.rollaxis(lab1, -1)[:3] 51 | L2, a2, b2 = np.rollaxis(lab2, -1)[:3] 52 | return np.sqrt((L2 - L1) ** 2 + (a2 - a1) ** 2 + (b2 - b1) ** 2) 53 | 54 | 55 | def deltaE_ciede94(lab1, lab2, kH=1, kC=1, kL=1, k1=0.045, k2=0.015): 56 | """Color difference according to CIEDE 94 standard 57 | 58 | Accommodates perceptual non-uniformities through the use of application 59 | specific scale factors (`kH`, `kC`, `kL`, `k1`, and `k2`). 60 | 61 | Parameters 62 | ---------- 63 | lab1 : array_like 64 | reference color (Lab colorspace) 65 | lab2 : array_like 66 | comparison color (Lab colorspace) 67 | kH : float, optional 68 | Hue scale 69 | kC : float, optional 70 | Chroma scale 71 | kL : float, optional 72 | Lightness scale 73 | k1 : float, optional 74 | first scale parameter 75 | k2 : float, optional 76 | second scale parameter 77 | 78 | Returns 79 | ------- 80 | dE : array_like 81 | color difference between `lab1` and `lab2` 82 | 83 | Notes 84 | ----- 85 | deltaE_ciede94 is not symmetric with respect to lab1 and lab2. CIEDE94 86 | defines the scales for the lightness, hue, and chroma in terms of the first 87 | color. Consequently, the first color should be regarded as the "reference" 88 | color. 89 | 90 | `kL`, `k1`, `k2` depend on the application and default to the values 91 | suggested for graphic arts 92 | 93 | ========== ============== ========== 94 | Parameter Graphic Arts Textiles 95 | ========== ============== ========== 96 | `kL` 1.000 2.000 97 | `k1` 0.045 0.048 98 | `k2` 0.015 0.014 99 | ========== ============== ========== 100 | 101 | References 102 | ---------- 103 | .. [1] http://en.wikipedia.org/wiki/Color_difference 104 | .. [2] http://www.brucelindbloom.com/index.html?Eqn_DeltaE_CIE94.html 105 | """ 106 | L1, C1 = np.rollaxis(lab2lch(lab1), -1)[:2] 107 | L2, C2 = np.rollaxis(lab2lch(lab2), -1)[:2] 108 | 109 | dL = L1 - L2 110 | dC = C1 - C2 111 | dH2 = get_dH2(lab1, lab2) 112 | 113 | SL = 1 114 | SC = 1 + k1 * C1 115 | SH = 1 + k2 * C1 116 | 117 | dE2 = (dL / (kL * SL)) ** 2 118 | dE2 += (dC / (kC * SC)) ** 2 119 | dE2 += dH2 / (kH * SH) ** 2 120 | return np.sqrt(dE2) 121 | 122 | 123 | def deltaE_ciede2000(lab1, lab2, kL=1, kC=1, kH=1): 124 | """Color difference as given by the CIEDE 2000 standard. 125 | 126 | CIEDE 2000 is a major revision of CIDE94. The perceptual calibration is 127 | largely based on experience with automotive paint on smooth surfaces. 128 | 129 | Parameters 130 | ---------- 131 | lab1 : array_like 132 | reference color (Lab colorspace) 133 | lab2 : array_like 134 | comparison color (Lab colorspace) 135 | kL : float (range), optional 136 | lightness scale factor, 1 for "acceptably close"; 2 for "imperceptible" 137 | see deltaE_cmc 138 | kC : float (range), optional 139 | chroma scale factor, usually 1 140 | kH : float (range), optional 141 | hue scale factor, usually 1 142 | 143 | Returns 144 | ------- 145 | deltaE : array_like 146 | The distance between `lab1` and `lab2` 147 | 148 | Notes 149 | ----- 150 | CIEDE 2000 assumes parametric weighting factors for the lightness, chroma, 151 | and hue (`kL`, `kC`, `kH` respectively). These default to 1. 152 | 153 | References 154 | ---------- 155 | .. [1] http://en.wikipedia.org/wiki/Color_difference 156 | .. [2] http://www.ece.rochester.edu/~gsharma/ciede2000/ciede2000noteCRNA.pdf 157 | (doi:10.1364/AO.33.008069) 158 | .. [3] M. Melgosa, J. Quesada, and E. Hita, "Uniformity of some recent 159 | color metrics tested with an accurate color-difference tolerance 160 | dataset," Appl. Opt. 33, 8069-8077 (1994). 161 | """ 162 | lab1 = np.asarray(lab1) 163 | lab2 = np.asarray(lab2) 164 | unroll = False 165 | if lab1.ndim == 1 and lab2.ndim == 1: 166 | unroll = True 167 | if lab1.ndim == 1: 168 | lab1 = lab1[None, :] 169 | if lab2.ndim == 1: 170 | lab2 = lab2[None, :] 171 | L1, a1, b1 = np.rollaxis(lab1, -1)[:3] 172 | L2, a2, b2 = np.rollaxis(lab2, -1)[:3] 173 | 174 | # distort `a` based on average chroma 175 | # then convert to lch coordines from distorted `a` 176 | # all subsequence calculations are in the new coordiantes 177 | # (often denoted "prime" in the literature) 178 | Cbar = 0.5 * (np.hypot(a1, b1) + np.hypot(a2, b2)) 179 | c7 = Cbar ** 7 180 | G = 0.5 * (1 - np.sqrt(c7 / (c7 + 25 ** 7))) 181 | scale = 1 + G 182 | C1, h1 = _cart2polar_2pi(a1 * scale, b1) 183 | C2, h2 = _cart2polar_2pi(a2 * scale, b2) 184 | # recall that c, h are polar coordiantes. c==r, h==theta 185 | 186 | # cide2000 has four terms to delta_e: 187 | # 1) Luminance term 188 | # 2) Hue term 189 | # 3) Chroma term 190 | # 4) hue Rotation term 191 | 192 | # lightness term 193 | Lbar = 0.5 * (L1 + L2) 194 | tmp = (Lbar - 50) ** 2 195 | SL = 1 + 0.015 * tmp / np.sqrt(20 + tmp) 196 | L_term = (L2 - L1) / (kL * SL) 197 | 198 | # chroma term 199 | Cbar = 0.5 * (C1 + C2) # new coordiantes 200 | SC = 1 + 0.045 * Cbar 201 | C_term = (C2 - C1) / (kC * SC) 202 | 203 | # hue term 204 | h_diff = h2 - h1 205 | h_sum = h1 + h2 206 | CC = C1 * C2 207 | 208 | dH = h_diff.copy() 209 | dH[h_diff > np.pi] -= 2 * np.pi 210 | dH[h_diff < -np.pi] += 2 * np.pi 211 | dH[CC == 0.] = 0. # if r == 0, dtheta == 0 212 | dH_term = 2 * np.sqrt(CC) * np.sin(dH / 2) 213 | 214 | Hbar = h_sum.copy() 215 | mask = np.logical_and(CC != 0., np.abs(h_diff) > np.pi) 216 | Hbar[mask * (h_sum < 2 * np.pi)] += 2 * np.pi 217 | Hbar[mask * (h_sum >= 2 * np.pi)] -= 2 * np.pi 218 | Hbar[CC == 0.] *= 2 219 | Hbar *= 0.5 220 | 221 | T = (1 - 222 | 0.17 * np.cos(Hbar - np.deg2rad(30)) + 223 | 0.24 * np.cos(2 * Hbar) + 224 | 0.32 * np.cos(3 * Hbar + np.deg2rad(6)) - 225 | 0.20 * np.cos(4 * Hbar - np.deg2rad(63)) 226 | ) 227 | SH = 1 + 0.015 * Cbar * T 228 | 229 | H_term = dH_term / (kH * SH) 230 | 231 | # hue rotation 232 | c7 = Cbar ** 7 233 | Rc = 2 * np.sqrt(c7 / (c7 + 25 ** 7)) 234 | dtheta = np.deg2rad(30) * np.exp(-((np.rad2deg(Hbar) - 275) / 25) ** 2) 235 | R_term = -np.sin(2 * dtheta) * Rc * C_term * H_term 236 | 237 | # put it all together 238 | dE2 = L_term ** 2 239 | dE2 += C_term ** 2 240 | dE2 += H_term ** 2 241 | dE2 += R_term 242 | ans = np.sqrt(dE2) 243 | if unroll: 244 | ans = ans[0] 245 | return ans 246 | 247 | 248 | def deltaE_cmc(lab1, lab2, kL=1, kC=1): 249 | """Color difference from the CMC l:c standard. 250 | 251 | This color difference was developed by the Colour Measurement Committee 252 | (CMC) of the Society of Dyers and Colourists (United Kingdom). It is 253 | intended for use in the textile industry. 254 | 255 | The scale factors `kL`, `kC` set the weight given to differences in 256 | lightness and chroma relative to differences in hue. The usual values are 257 | ``kL=2``, ``kC=1`` for "acceptability" and ``kL=1``, ``kC=1`` for 258 | "imperceptibility". Colors with ``dE > 1`` are "different" for the given 259 | scale factors. 260 | 261 | Parameters 262 | ---------- 263 | lab1 : array_like 264 | reference color (Lab colorspace) 265 | lab2 : array_like 266 | comparison color (Lab colorspace) 267 | 268 | Returns 269 | ------- 270 | dE : array_like 271 | distance between colors `lab1` and `lab2` 272 | 273 | Notes 274 | ----- 275 | deltaE_cmc the defines the scales for the lightness, hue, and chroma 276 | in terms of the first color. Consequently 277 | ``deltaE_cmc(lab1, lab2) != deltaE_cmc(lab2, lab1)`` 278 | 279 | References 280 | ---------- 281 | .. [1] http://en.wikipedia.org/wiki/Color_difference 282 | .. [2] http://www.brucelindbloom.com/index.html?Eqn_DeltaE_CIE94.html 283 | .. [3] F. J. J. Clarke, R. McDonald, and B. Rigg, "Modification to the 284 | JPC79 colour-difference formula," J. Soc. Dyers Colour. 100, 128-132 285 | (1984). 286 | """ 287 | L1, C1, h1 = np.rollaxis(lab2lch(lab1), -1)[:3] 288 | L2, C2, h2 = np.rollaxis(lab2lch(lab2), -1)[:3] 289 | 290 | dC = C1 - C2 291 | dL = L1 - L2 292 | dH2 = get_dH2(lab1, lab2) 293 | 294 | T = np.where(np.logical_and(np.rad2deg(h1) >= 164, np.rad2deg(h1) <= 345), 295 | 0.56 + 0.2 * np.abs(np.cos(h1 + np.deg2rad(168))), 296 | 0.36 + 0.4 * np.abs(np.cos(h1 + np.deg2rad(35))) 297 | ) 298 | c1_4 = C1 ** 4 299 | F = np.sqrt(c1_4 / (c1_4 + 1900)) 300 | 301 | SL = np.where(L1 < 16, 0.511, 0.040975 * L1 / (1. + 0.01765 * L1)) 302 | SC = 0.638 + 0.0638 * C1 / (1. + 0.0131 * C1) 303 | SH = SC * (F * T + 1 - F) 304 | 305 | dE2 = (dL / (kL * SL)) ** 2 306 | dE2 += (dC / (kC * SC)) ** 2 307 | dE2 += dH2 / (SH ** 2) 308 | return np.sqrt(dE2) 309 | 310 | 311 | def get_dH2(lab1, lab2): 312 | """squared hue difference term occurring in deltaE_cmc and deltaE_ciede94 313 | 314 | Despite its name, "dH" is not a simple difference of hue values. We avoid 315 | working directly with the hue value, since differencing angles is 316 | troublesome. The hue term is usually written as: 317 | c1 = sqrt(a1**2 + b1**2) 318 | c2 = sqrt(a2**2 + b2**2) 319 | term = (a1-a2)**2 + (b1-b2)**2 - (c1-c2)**2 320 | dH = sqrt(term) 321 | 322 | However, this has poor roundoff properties when a or b is dominant. 323 | Instead, ab is a vector with elements a and b. The same dH term can be 324 | re-written as: 325 | |ab1-ab2|**2 - (|ab1| - |ab2|)**2 326 | and then simplified to: 327 | 2*|ab1|*|ab2| - 2*dot(ab1, ab2) 328 | """ 329 | lab1 = np.asarray(lab1) 330 | lab2 = np.asarray(lab2) 331 | a1, b1 = np.rollaxis(lab1, -1)[1:3] 332 | a2, b2 = np.rollaxis(lab2, -1)[1:3] 333 | 334 | # magnitude of (a, b) is the chroma 335 | C1 = np.hypot(a1, b1) 336 | C2 = np.hypot(a2, b2) 337 | 338 | term = (C1 * C2) - (a1 * a2 + b1 * b2) 339 | return 2 * term 340 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/delta_e.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/delta_e.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/rgb_colors.py: -------------------------------------------------------------------------------- 1 | aliceblue = (0.941, 0.973, 1) 2 | antiquewhite = (0.98, 0.922, 0.843) 3 | aqua = (0, 1, 1) 4 | aquamarine = (0.498, 1, 0.831) 5 | azure = (0.941, 1, 1) 6 | beige = (0.961, 0.961, 0.863) 7 | bisque = (1, 0.894, 0.769) 8 | black = (0, 0, 0) 9 | blanchedalmond = (1, 0.922, 0.804) 10 | blue = (0, 0, 1) 11 | blueviolet = (0.541, 0.169, 0.886) 12 | brown = (0.647, 0.165, 0.165) 13 | burlywood = (0.871, 0.722, 0.529) 14 | cadetblue = (0.373, 0.62, 0.627) 15 | chartreuse = (0.498, 1, 0) 16 | chocolate = (0.824, 0.412, 0.118) 17 | coral = (1, 0.498, 0.314) 18 | cornflowerblue = (0.392, 0.584, 0.929) 19 | cornsilk = (1, 0.973, 0.863) 20 | crimson = (0.863, 0.0784, 0.235) 21 | cyan = (0, 1, 1) 22 | darkblue = (0, 0, 0.545) 23 | darkcyan = (0, 0.545, 0.545) 24 | darkgoldenrod = (0.722, 0.525, 0.0431) 25 | darkgray = (0.663, 0.663, 0.663) 26 | darkgreen = (0, 0.392, 0) 27 | darkgrey = (0.663, 0.663, 0.663) 28 | darkkhaki = (0.741, 0.718, 0.42) 29 | darkmagenta = (0.545, 0, 0.545) 30 | darkolivegreen = (0.333, 0.42, 0.184) 31 | darkorange = (1, 0.549, 0) 32 | darkorchid = (0.6, 0.196, 0.8) 33 | darkred = (0.545, 0, 0) 34 | darksalmon = (0.914, 0.588, 0.478) 35 | darkseagreen = (0.561, 0.737, 0.561) 36 | darkslateblue = (0.282, 0.239, 0.545) 37 | darkslategray = (0.184, 0.31, 0.31) 38 | darkslategrey = (0.184, 0.31, 0.31) 39 | darkturquoise = (0, 0.808, 0.82) 40 | darkviolet = (0.58, 0, 0.827) 41 | deeppink = (1, 0.0784, 0.576) 42 | deepskyblue = (0, 0.749, 1) 43 | dimgray = (0.412, 0.412, 0.412) 44 | dimgrey = (0.412, 0.412, 0.412) 45 | dodgerblue = (0.118, 0.565, 1) 46 | firebrick = (0.698, 0.133, 0.133) 47 | floralwhite = (1, 0.98, 0.941) 48 | forestgreen = (0.133, 0.545, 0.133) 49 | fuchsia = (1, 0, 1) 50 | gainsboro = (0.863, 0.863, 0.863) 51 | ghostwhite = (0.973, 0.973, 1) 52 | gold = (1, 0.843, 0) 53 | goldenrod = (0.855, 0.647, 0.125) 54 | gray = (0.502, 0.502, 0.502) 55 | green = (0, 0.502, 0) 56 | greenyellow = (0.678, 1, 0.184) 57 | grey = (0.502, 0.502, 0.502) 58 | honeydew = (0.941, 1, 0.941) 59 | hotpink = (1, 0.412, 0.706) 60 | indianred = (0.804, 0.361, 0.361) 61 | indigo = (0.294, 0, 0.51) 62 | ivory = (1, 1, 0.941) 63 | khaki = (0.941, 0.902, 0.549) 64 | lavender = (0.902, 0.902, 0.98) 65 | lavenderblush = (1, 0.941, 0.961) 66 | lawngreen = (0.486, 0.988, 0) 67 | lemonchiffon = (1, 0.98, 0.804) 68 | lightblue = (0.678, 0.847, 0.902) 69 | lightcoral = (0.941, 0.502, 0.502) 70 | lightcyan = (0.878, 1, 1) 71 | lightgoldenrodyellow = (0.98, 0.98, 0.824) 72 | lightgray = (0.827, 0.827, 0.827) 73 | lightgreen = (0.565, 0.933, 0.565) 74 | lightgrey = (0.827, 0.827, 0.827) 75 | lightpink = (1, 0.714, 0.757) 76 | lightsalmon = (1, 0.627, 0.478) 77 | lightseagreen = (0.125, 0.698, 0.667) 78 | lightskyblue = (0.529, 0.808, 0.98) 79 | lightslategray = (0.467, 0.533, 0.6) 80 | lightslategrey = (0.467, 0.533, 0.6) 81 | lightsteelblue = (0.69, 0.769, 0.871) 82 | lightyellow = (1, 1, 0.878) 83 | lime = (0, 1, 0) 84 | limegreen = (0.196, 0.804, 0.196) 85 | linen = (0.98, 0.941, 0.902) 86 | magenta = (1, 0, 1) 87 | maroon = (0.502, 0, 0) 88 | mediumaquamarine = (0.4, 0.804, 0.667) 89 | mediumblue = (0, 0, 0.804) 90 | mediumorchid = (0.729, 0.333, 0.827) 91 | mediumpurple = (0.576, 0.439, 0.859) 92 | mediumseagreen = (0.235, 0.702, 0.443) 93 | mediumslateblue = (0.482, 0.408, 0.933) 94 | mediumspringgreen = (0, 0.98, 0.604) 95 | mediumturquoise = (0.282, 0.82, 0.8) 96 | mediumvioletred = (0.78, 0.0824, 0.522) 97 | midnightblue = (0.098, 0.098, 0.439) 98 | mintcream = (0.961, 1, 0.98) 99 | mistyrose = (1, 0.894, 0.882) 100 | moccasin = (1, 0.894, 0.71) 101 | navajowhite = (1, 0.871, 0.678) 102 | navy = (0, 0, 0.502) 103 | oldlace = (0.992, 0.961, 0.902) 104 | olive = (0.502, 0.502, 0) 105 | olivedrab = (0.42, 0.557, 0.137) 106 | orange = (1, 0.647, 0) 107 | orangered = (1, 0.271, 0) 108 | orchid = (0.855, 0.439, 0.839) 109 | palegoldenrod = (0.933, 0.91, 0.667) 110 | palegreen = (0.596, 0.984, 0.596) 111 | palevioletred = (0.686, 0.933, 0.933) 112 | papayawhip = (1, 0.937, 0.835) 113 | peachpuff = (1, 0.855, 0.725) 114 | peru = (0.804, 0.522, 0.247) 115 | pink = (1, 0.753, 0.796) 116 | plum = (0.867, 0.627, 0.867) 117 | powderblue = (0.69, 0.878, 0.902) 118 | purple = (0.502, 0, 0.502) 119 | red = (1, 0, 0) 120 | rosybrown = (0.737, 0.561, 0.561) 121 | royalblue = (0.255, 0.412, 0.882) 122 | saddlebrown = (0.545, 0.271, 0.0745) 123 | salmon = (0.98, 0.502, 0.447) 124 | sandybrown = (0.98, 0.643, 0.376) 125 | seagreen = (0.18, 0.545, 0.341) 126 | seashell = (1, 0.961, 0.933) 127 | sienna = (0.627, 0.322, 0.176) 128 | silver = (0.753, 0.753, 0.753) 129 | skyblue = (0.529, 0.808, 0.922) 130 | slateblue = (0.416, 0.353, 0.804) 131 | slategray = (0.439, 0.502, 0.565) 132 | slategrey = (0.439, 0.502, 0.565) 133 | snow = (1, 0.98, 0.98) 134 | springgreen = (0, 1, 0.498) 135 | steelblue = (0.275, 0.51, 0.706) 136 | tan = (0.824, 0.706, 0.549) 137 | teal = (0, 0.502, 0.502) 138 | thistle = (0.847, 0.749, 0.847) 139 | tomato = (1, 0.388, 0.278) 140 | turquoise = (0.251, 0.878, 0.816) 141 | violet = (0.933, 0.51, 0.933) 142 | wheat = (0.961, 0.871, 0.702) 143 | white = (1, 1, 1) 144 | whitesmoke = (0.961, 0.961, 0.961) 145 | yellow = (1, 1, 0) 146 | yellowgreen = (0.604, 0.804, 0.196) 147 | fcnchair = (0.753, 0, 0) 148 | fcncat = (0.251, 0, 0) 149 | fcncow = (0.251, 0.502, 0) 150 | fcndining = (0.753, 0.502, 0) 151 | fcndog = (0.251, 0, 0.502) 152 | fcnhorse = (0.753, 0, 0.502) 153 | fcnmotor = (0.251, 0.502, 0.502) 154 | fcnperson = (0.753, 0.502, 0.502) 155 | fcnpotte = (0, 0.251, 0) 156 | fcnsheep = (0.502, 0.251, 0) 157 | fcnsofa = (0, 0.753, 0) 158 | fcntrain = (0.502, 0.753, 0) 159 | fcntv = (0, 0.251, 0.502) 160 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/rgb_colors.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/rgb_colors.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from ..._shared.testing import setup_test, teardown_test 2 | 3 | 4 | def setup(): 5 | setup_test() 6 | 7 | 8 | def teardown(): 9 | teardown_test() 10 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/__init__.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/ciede2000_test_data.txt: -------------------------------------------------------------------------------- 1 | # input, intermediate, and output values for CIEDE2000 dE function 2 | # data taken from "The CIEDE2000 Color-Difference Formula: Implementation Notes, ..." http://www.ece.rochester.edu/~gsharma/ciede2000/ciede2000noteCRNA.pdf 3 | # tab delimited data 4 | # pair 1 L1 a1 b1 ap1 cp1 hp1 hbar1 G T SL SC SH RT dE 2 L2 a2 b2 ap2 cp2 hp2 5 | 1 1 50.0000 2.6772 -79.7751 2.6774 79.8200 271.9222 270.9611 0.0001 0.6907 1.0000 4.6578 1.8421 -1.7042 2.0425 2 50.0000 0.0000 -82.7485 0.0000 82.7485 270.0000 6 | 2 1 50.0000 3.1571 -77.2803 3.1573 77.3448 272.3395 271.1698 0.0001 0.6843 1.0000 4.6021 1.8216 -1.7070 2.8615 2 50.0000 0.0000 -82.7485 0.0000 82.7485 270.0000 7 | 3 1 50.0000 2.8361 -74.0200 2.8363 74.0743 272.1944 271.0972 0.0001 0.6865 1.0000 4.5285 1.8074 -1.7060 3.4412 2 50.0000 0.0000 -82.7485 0.0000 82.7485 270.0000 8 | 4 1 50.0000 -1.3802 -84.2814 -1.3803 84.2927 269.0618 269.5309 0.0001 0.7357 1.0000 4.7584 1.9217 -1.6809 1.0000 2 50.0000 0.0000 -82.7485 0.0000 82.7485 270.0000 9 | 5 1 50.0000 -1.1848 -84.8006 -1.1849 84.8089 269.1995 269.5997 0.0001 0.7335 1.0000 4.7700 1.9218 -1.6822 1.0000 2 50.0000 0.0000 -82.7485 0.0000 82.7485 270.0000 10 | 6 1 50.0000 -0.9009 -85.5211 -0.9009 85.5258 269.3964 269.6982 0.0001 0.7303 1.0000 4.7862 1.9217 -1.6840 1.0000 2 50.0000 0.0000 -82.7485 0.0000 82.7485 270.0000 11 | 7 1 50.0000 0.0000 0.0000 0.0000 0.0000 0.0000 126.8697 0.5000 1.2200 1.0000 1.0562 1.0229 0.0000 2.3669 2 50.0000 -1.0000 2.0000 -1.5000 2.5000 126.8697 12 | 8 1 50.0000 -1.0000 2.0000 -1.5000 2.5000 126.8697 126.8697 0.5000 1.2200 1.0000 1.0562 1.0229 0.0000 2.3669 2 50.0000 0.0000 0.0000 0.0000 0.0000 0.0000 13 | 9 1 50.0000 2.4900 -0.0010 3.7346 3.7346 359.9847 269.9854 0.4998 0.7212 1.0000 1.1681 1.0404 -0.0022 7.1792 2 50.0000 -2.4900 0.0009 -3.7346 3.7346 179.9862 14 | 10 1 50.0000 2.4900 -0.0010 3.7346 3.7346 359.9847 269.9847 0.4998 0.7212 1.0000 1.1681 1.0404 -0.0022 7.1792 2 50.0000 -2.4900 0.0010 -3.7346 3.7346 179.9847 15 | 11 1 50.0000 2.4900 -0.0010 3.7346 3.7346 359.9847 89.9839 0.4998 0.6175 1.0000 1.1681 1.0346 0.0000 7.2195 2 50.0000 -2.4900 0.0011 -3.7346 3.7346 179.9831 16 | 12 1 50.0000 2.4900 -0.0010 3.7346 3.7346 359.9847 89.9831 0.4998 0.6175 1.0000 1.1681 1.0346 0.0000 7.2195 2 50.0000 -2.4900 0.0012 -3.7346 3.7346 179.9816 17 | 13 1 50.0000 -0.0010 2.4900 -0.0015 2.4900 90.0345 180.0328 0.4998 0.9779 1.0000 1.1121 1.0365 0.0000 4.8045 2 50.0000 0.0009 -2.4900 0.0013 2.4900 270.0311 18 | 14 1 50.0000 -0.0010 2.4900 -0.0015 2.4900 90.0345 180.0345 0.4998 0.9779 1.0000 1.1121 1.0365 0.0000 4.8045 2 50.0000 0.0010 -2.4900 0.0015 2.4900 270.0345 19 | 15 1 50.0000 -0.0010 2.4900 -0.0015 2.4900 90.0345 0.0362 0.4998 1.3197 1.0000 1.1121 1.0493 0.0000 4.7461 2 50.0000 0.0011 -2.4900 0.0016 2.4900 270.0380 20 | 16 1 50.0000 2.5000 0.0000 3.7496 3.7496 0.0000 315.0000 0.4998 0.8454 1.0000 1.1406 1.0396 -0.0001 4.3065 2 50.0000 0.0000 -2.5000 0.0000 2.5000 270.0000 21 | 17 1 50.0000 2.5000 0.0000 3.4569 3.4569 0.0000 346.2470 0.3827 1.4453 1.1608 1.9547 1.4599 -0.0003 27.1492 2 73.0000 25.0000 -18.0000 34.5687 38.9743 332.4939 22 | 18 1 50.0000 2.5000 0.0000 3.4954 3.4954 0.0000 51.7766 0.3981 0.6447 1.0640 1.7498 1.1612 0.0000 22.8977 2 61.0000 -5.0000 29.0000 -6.9907 29.8307 103.5532 23 | 19 1 50.0000 2.5000 0.0000 3.5514 3.5514 0.0000 272.2362 0.4206 0.6521 1.0251 1.9455 1.2055 -0.8219 31.9030 2 56.0000 -27.0000 -3.0000 -38.3556 38.4728 184.4723 24 | 20 1 50.0000 2.5000 0.0000 3.5244 3.5244 0.0000 11.9548 0.4098 1.1031 1.0400 1.9120 1.3353 0.0000 19.4535 2 58.0000 24.0000 15.0000 33.8342 37.0102 23.9095 25 | 21 1 50.0000 2.5000 0.0000 3.7494 3.7494 0.0000 3.5056 0.4997 1.2616 1.0000 1.1923 1.0808 0.0000 1.0000 2 50.0000 3.1736 0.5854 4.7596 4.7954 7.0113 26 | 22 1 50.0000 2.5000 0.0000 3.7493 3.7493 0.0000 0.0000 0.4997 1.3202 1.0000 1.1956 1.0861 0.0000 1.0000 2 50.0000 3.2972 0.0000 4.9450 4.9450 0.0000 27 | 23 1 50.0000 2.5000 0.0000 3.7497 3.7497 0.0000 5.8190 0.4999 1.2197 1.0000 1.1486 1.0604 0.0000 1.0000 2 50.0000 1.8634 0.5757 2.7949 2.8536 11.6380 28 | 24 1 50.0000 2.5000 0.0000 3.7493 3.7493 0.0000 1.9603 0.4997 1.2883 1.0000 1.1946 1.0836 0.0000 1.0000 2 50.0000 3.2592 0.3350 4.8879 4.8994 3.9206 29 | 25 1 60.2574 -34.0099 36.2677 -34.0678 49.7590 133.2085 132.0835 0.0017 1.3010 1.1427 3.2946 1.9951 0.0000 1.2644 2 60.4626 -34.1751 39.4387 -34.2333 52.2238 130.9584 30 | 26 1 63.0109 -31.0961 -5.8663 -32.6194 33.1427 190.1951 188.8221 0.0490 0.9402 1.1831 2.4549 1.4560 0.0000 1.2630 2 62.8187 -29.7946 -4.0864 -31.2542 31.5202 187.4490 31 | 27 1 61.2901 3.7196 -5.3901 5.5668 7.7487 315.9240 310.0313 0.4966 0.6952 1.1586 1.3092 1.0717 -0.0032 1.8731 2 61.4292 2.2480 -4.9620 3.3644 5.9950 304.1385 32 | 28 1 35.0831 -44.1164 3.7933 -44.3939 44.5557 175.1161 176.4290 0.0063 1.0168 1.2148 2.9105 1.6476 0.0000 1.8645 2 35.0232 -40.0716 1.5901 -40.3237 40.3550 177.7418 33 | 29 1 22.7233 20.0904 -46.6940 20.1424 50.8532 293.3339 291.3809 0.0026 0.3636 1.4014 3.1597 1.2617 -1.2537 2.0373 2 23.0331 14.9730 -42.5619 15.0118 45.1317 289.4279 34 | 30 1 36.4612 47.8580 18.3852 47.9197 51.3256 20.9901 21.8781 0.0013 0.9239 1.1943 3.3888 1.7357 0.0000 1.4146 2 36.2715 50.5065 21.2231 50.5716 54.8444 22.7660 35 | 31 1 90.8027 -2.0831 1.4410 -3.1245 3.4408 155.2410 167.1011 0.4999 1.1546 1.6110 1.1329 1.0511 0.0000 1.4441 2 91.1528 -1.6435 0.0447 -2.4651 2.4655 178.9612 36 | 32 1 90.9257 -0.5406 -0.9208 -0.8109 1.2270 228.6315 218.4363 0.5000 1.3916 1.5930 1.0620 1.0288 0.0000 1.5381 2 88.6381 -0.8985 -0.7239 -1.3477 1.5298 208.2412 37 | 33 1 6.7747 -0.2908 -2.4247 -0.4362 2.4636 259.8025 263.0049 0.4999 0.9556 1.6517 1.1057 1.0337 -0.0004 0.6377 2 5.8714 -0.0985 -2.2286 -0.1477 2.2335 266.2073 38 | 34 1 2.0776 0.0795 -1.1350 0.1192 1.1412 275.9978 268.0910 0.5000 0.7826 1.7246 1.0383 1.0100 0.0000 0.9082 2 0.9033 -0.0636 -0.5514 -0.0954 0.5596 260.18421 39 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_a_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_a_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_d50_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_d50_10.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_d50_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_d50_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_d55_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_d55_10.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_d55_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_d55_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_d65_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_d65_10.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_d65_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_d65_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_d75_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_d75_10.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_d75_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_d75_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/lab_array_e_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/lab_array_e_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_a_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_a_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_d50_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_d50_10.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_d50_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_d50_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_d55_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_d55_10.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_d55_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_d55_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_d65_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_d65_10.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_d65_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_d65_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_d75_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_d75_10.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_d75_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_d75_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/data/luv_array_e_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/data/luv_array_e_2.npy -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/test_adapt_rgb.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | 3 | import numpy as np 4 | 5 | from skimage import img_as_float, img_as_uint 6 | from skimage import color, data, filters 7 | from skimage.color.adapt_rgb import adapt_rgb, each_channel, hsv_value 8 | from skimage._shared._warnings import expected_warnings 9 | 10 | # Down-sample image for quicker testing. 11 | COLOR_IMAGE = data.astronaut()[::5, ::5] 12 | GRAY_IMAGE = data.camera()[::5, ::5] 13 | 14 | SIGMA = 3 15 | smooth = partial(filters.gaussian, sigma=SIGMA) 16 | assert_allclose = partial(np.testing.assert_allclose, atol=1e-8) 17 | 18 | 19 | @adapt_rgb(each_channel) 20 | def edges_each(image): 21 | return filters.sobel(image) 22 | 23 | 24 | @adapt_rgb(each_channel) 25 | def smooth_each(image, sigma): 26 | return filters.gaussian(image, sigma) 27 | 28 | 29 | @adapt_rgb(hsv_value) 30 | def edges_hsv(image): 31 | return filters.sobel(image) 32 | 33 | 34 | @adapt_rgb(hsv_value) 35 | def smooth_hsv(image, sigma): 36 | return filters.gaussian(image, sigma) 37 | 38 | 39 | @adapt_rgb(hsv_value) 40 | def edges_hsv_uint(image): 41 | with expected_warnings(['precision loss']): 42 | return img_as_uint(filters.sobel(image)) 43 | 44 | 45 | def test_gray_scale_image(): 46 | # We don't need to test both `hsv_value` and `each_channel` since 47 | # `adapt_rgb` is handling gray-scale inputs. 48 | assert_allclose(edges_each(GRAY_IMAGE), filters.sobel(GRAY_IMAGE)) 49 | 50 | 51 | def test_each_channel(): 52 | filtered = edges_each(COLOR_IMAGE) 53 | for i, channel in enumerate(np.rollaxis(filtered, axis=-1)): 54 | expected = img_as_float(filters.sobel(COLOR_IMAGE[:, :, i])) 55 | assert_allclose(channel, expected) 56 | 57 | 58 | def test_each_channel_with_filter_argument(): 59 | filtered = smooth_each(COLOR_IMAGE, SIGMA) 60 | for i, channel in enumerate(np.rollaxis(filtered, axis=-1)): 61 | assert_allclose(channel, smooth(COLOR_IMAGE[:, :, i])) 62 | 63 | 64 | def test_hsv_value(): 65 | filtered = edges_hsv(COLOR_IMAGE) 66 | value = color.rgb2hsv(COLOR_IMAGE)[:, :, 2] 67 | assert_allclose(color.rgb2hsv(filtered)[:, :, 2], filters.sobel(value)) 68 | 69 | 70 | def test_hsv_value_with_filter_argument(): 71 | filtered = smooth_hsv(COLOR_IMAGE, SIGMA) 72 | value = color.rgb2hsv(COLOR_IMAGE)[:, :, 2] 73 | assert_allclose(color.rgb2hsv(filtered)[:, :, 2], smooth(value)) 74 | 75 | 76 | def test_hsv_value_with_non_float_output(): 77 | # Since `rgb2hsv` returns a float image and the result of the filtered 78 | # result is inserted into the HSV image, we want to make sure there isn't 79 | # a dtype mismatch. 80 | filtered = edges_hsv_uint(COLOR_IMAGE) 81 | filtered_value = color.rgb2hsv(filtered)[:, :, 2] 82 | value = color.rgb2hsv(COLOR_IMAGE)[:, :, 2] 83 | # Reduce tolerance because dtype conversion. 84 | assert_allclose(filtered_value, filters.sobel(value), rtol=1e-5, atol=1e-5) 85 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/test_adapt_rgb.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/test_adapt_rgb.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/test_colorconv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """Tests for color conversion functions. 5 | 6 | Authors 7 | ------- 8 | - the rgb2hsv test was written by Nicolas Pinto, 2009 9 | - other tests written by Ralf Gommers, 2009 10 | 11 | :license: modified BSD 12 | """ 13 | 14 | from __future__ import division 15 | import os.path 16 | 17 | import numpy as np 18 | from numpy.testing import (assert_equal, 19 | assert_almost_equal, 20 | assert_array_almost_equal, 21 | assert_raises, 22 | TestCase, 23 | ) 24 | 25 | from skimage import img_as_float, img_as_ubyte 26 | from skimage.io import imread 27 | from skimage.color import (rgb2hsv, hsv2rgb, 28 | rgb2xyz, xyz2rgb, 29 | rgb2hed, hed2rgb, 30 | separate_stains, 31 | combine_stains, 32 | rgb2rgbcie, rgbcie2rgb, 33 | convert_colorspace, 34 | rgb2grey, gray2rgb, 35 | xyz2lab, lab2xyz, 36 | lab2rgb, rgb2lab, 37 | xyz2luv, luv2xyz, 38 | luv2rgb, rgb2luv, 39 | lab2lch, lch2lab, 40 | guess_spatial_dimensions 41 | ) 42 | 43 | from skimage import data_dir 44 | from skimage._shared._warnings import expected_warnings 45 | 46 | import colorsys 47 | 48 | 49 | def test_guess_spatial_dimensions(): 50 | im1 = np.zeros((5, 5)) 51 | im2 = np.zeros((5, 5, 5)) 52 | im3 = np.zeros((5, 5, 3)) 53 | im4 = np.zeros((5, 5, 5, 3)) 54 | im5 = np.zeros((5,)) 55 | assert_equal(guess_spatial_dimensions(im1), 2) 56 | assert_equal(guess_spatial_dimensions(im2), 3) 57 | assert_equal(guess_spatial_dimensions(im3), None) 58 | assert_equal(guess_spatial_dimensions(im4), 3) 59 | assert_raises(ValueError, guess_spatial_dimensions, im5) 60 | 61 | 62 | class TestColorconv(TestCase): 63 | 64 | img_rgb = imread(os.path.join(data_dir, 'color.png')) 65 | img_grayscale = imread(os.path.join(data_dir, 'camera.png')) 66 | 67 | colbars = np.array([[1, 1, 0, 0, 1, 1, 0, 0], 68 | [1, 1, 1, 1, 0, 0, 0, 0], 69 | [1, 0, 1, 0, 1, 0, 1, 0]]).astype(np.float) 70 | colbars_array = np.swapaxes(colbars.reshape(3, 4, 2), 0, 2) 71 | colbars_point75 = colbars * 0.75 72 | colbars_point75_array = np.swapaxes(colbars_point75.reshape(3, 4, 2), 0, 2) 73 | 74 | xyz_array = np.array([[[0.4124, 0.21260, 0.01930]], # red 75 | [[0, 0, 0]], # black 76 | [[.9505, 1., 1.089]], # white 77 | [[.1805, .0722, .9505]], # blue 78 | [[.07719, .15438, .02573]], # green 79 | ]) 80 | lab_array = np.array([[[53.233, 80.109, 67.220]], # red 81 | [[0., 0., 0.]], # black 82 | [[100.0, 0.005, -0.010]], # white 83 | [[32.303, 79.197, -107.864]], # blue 84 | [[46.229, -51.7, 49.898]], # green 85 | ]) 86 | 87 | luv_array = np.array([[[53.233, 175.053, 37.751]], # red 88 | [[0., 0., 0.]], # black 89 | [[100., 0.001, -0.017]], # white 90 | [[32.303, -9.400, -130.358]], # blue 91 | [[46.228, -43.774, 56.589]], # green 92 | ]) 93 | 94 | # RGB to HSV 95 | def test_rgb2hsv_conversion(self): 96 | rgb = img_as_float(self.img_rgb)[::16, ::16] 97 | hsv = rgb2hsv(rgb).reshape(-1, 3) 98 | # ground truth from colorsys 99 | gt = np.array([colorsys.rgb_to_hsv(pt[0], pt[1], pt[2]) 100 | for pt in rgb.reshape(-1, 3)] 101 | ) 102 | assert_almost_equal(hsv, gt) 103 | 104 | def test_rgb2hsv_error_grayscale(self): 105 | self.assertRaises(ValueError, rgb2hsv, self.img_grayscale) 106 | 107 | def test_rgb2hsv_error_one_element(self): 108 | self.assertRaises(ValueError, rgb2hsv, self.img_rgb[0, 0]) 109 | 110 | # HSV to RGB 111 | def test_hsv2rgb_conversion(self): 112 | rgb = self.img_rgb.astype("float32")[::16, ::16] 113 | # create HSV image with colorsys 114 | hsv = np.array([colorsys.rgb_to_hsv(pt[0], pt[1], pt[2]) 115 | for pt in rgb.reshape(-1, 3)]).reshape(rgb.shape) 116 | # convert back to RGB and compare with original. 117 | # relative precision for RGB -> HSV roundtrip is about 1e-6 118 | assert_almost_equal(rgb, hsv2rgb(hsv), decimal=4) 119 | 120 | def test_hsv2rgb_error_grayscale(self): 121 | self.assertRaises(ValueError, hsv2rgb, self.img_grayscale) 122 | 123 | def test_hsv2rgb_error_one_element(self): 124 | self.assertRaises(ValueError, hsv2rgb, self.img_rgb[0, 0]) 125 | 126 | # RGB to XYZ 127 | def test_rgb2xyz_conversion(self): 128 | gt = np.array([[[0.950456, 1. , 1.088754], 129 | [0.538003, 0.787329, 1.06942 ], 130 | [0.592876, 0.28484 , 0.969561], 131 | [0.180423, 0.072169, 0.950227]], 132 | [[0.770033, 0.927831, 0.138527], 133 | [0.35758 , 0.71516 , 0.119193], 134 | [0.412453, 0.212671, 0.019334], 135 | [0. , 0. , 0. ]]]) 136 | assert_almost_equal(rgb2xyz(self.colbars_array), gt) 137 | 138 | # stop repeating the "raises" checks for all other functions that are 139 | # implemented with color._convert() 140 | def test_rgb2xyz_error_grayscale(self): 141 | self.assertRaises(ValueError, rgb2xyz, self.img_grayscale) 142 | 143 | def test_rgb2xyz_error_one_element(self): 144 | self.assertRaises(ValueError, rgb2xyz, self.img_rgb[0, 0]) 145 | 146 | # XYZ to RGB 147 | def test_xyz2rgb_conversion(self): 148 | assert_almost_equal(xyz2rgb(rgb2xyz(self.colbars_array)), 149 | self.colbars_array) 150 | 151 | # RGB<->XYZ roundtrip on another image 152 | def test_xyz_rgb_roundtrip(self): 153 | img_rgb = img_as_float(self.img_rgb) 154 | assert_array_almost_equal(xyz2rgb(rgb2xyz(img_rgb)), img_rgb) 155 | 156 | # RGB<->HED roundtrip with ubyte image 157 | def test_hed_rgb_roundtrip(self): 158 | img_rgb = img_as_ubyte(self.img_rgb) 159 | with expected_warnings(['precision loss']): 160 | new = img_as_ubyte(hed2rgb(rgb2hed(img_rgb))) 161 | assert_equal(new, img_rgb) 162 | 163 | # RGB<->HED roundtrip with float image 164 | def test_hed_rgb_float_roundtrip(self): 165 | img_rgb = img_as_float(self.img_rgb) 166 | assert_array_almost_equal(hed2rgb(rgb2hed(img_rgb)), img_rgb) 167 | 168 | # RGB<->HDX roundtrip with ubyte image 169 | def test_hdx_rgb_roundtrip(self): 170 | from skimage.color.colorconv import hdx_from_rgb, rgb_from_hdx 171 | img_rgb = self.img_rgb 172 | conv = combine_stains(separate_stains(img_rgb, hdx_from_rgb), 173 | rgb_from_hdx) 174 | assert_equal(img_as_ubyte(conv), img_rgb) 175 | 176 | # RGB<->HDX roundtrip with ubyte image 177 | def test_hdx_rgb_roundtrip(self): 178 | from skimage.color.colorconv import hdx_from_rgb, rgb_from_hdx 179 | img_rgb = img_as_float(self.img_rgb) 180 | conv = combine_stains(separate_stains(img_rgb, hdx_from_rgb), 181 | rgb_from_hdx) 182 | assert_array_almost_equal(conv, img_rgb) 183 | 184 | # RGB to RGB CIE 185 | def test_rgb2rgbcie_conversion(self): 186 | gt = np.array([[[ 0.1488856 , 0.18288098, 0.19277574], 187 | [ 0.01163224, 0.16649536, 0.18948516], 188 | [ 0.12259182, 0.03308008, 0.17298223], 189 | [-0.01466154, 0.01669446, 0.16969164]], 190 | [[ 0.16354714, 0.16618652, 0.0230841 ], 191 | [ 0.02629378, 0.1498009 , 0.01979351], 192 | [ 0.13725336, 0.01638562, 0.00329059], 193 | [ 0. , 0. , 0. ]]]) 194 | assert_almost_equal(rgb2rgbcie(self.colbars_array), gt) 195 | 196 | # RGB CIE to RGB 197 | def test_rgbcie2rgb_conversion(self): 198 | # only roundtrip test, we checked rgb2rgbcie above already 199 | assert_almost_equal(rgbcie2rgb(rgb2rgbcie(self.colbars_array)), 200 | self.colbars_array) 201 | 202 | def test_convert_colorspace(self): 203 | colspaces = ['HSV', 'RGB CIE', 'XYZ'] 204 | colfuncs_from = [hsv2rgb, rgbcie2rgb, xyz2rgb] 205 | colfuncs_to = [rgb2hsv, rgb2rgbcie, rgb2xyz] 206 | 207 | assert_almost_equal(convert_colorspace(self.colbars_array, 'RGB', 208 | 'RGB'), self.colbars_array) 209 | for i, space in enumerate(colspaces): 210 | gt = colfuncs_from[i](self.colbars_array) 211 | assert_almost_equal(convert_colorspace(self.colbars_array, space, 212 | 'RGB'), gt) 213 | gt = colfuncs_to[i](self.colbars_array) 214 | assert_almost_equal(convert_colorspace(self.colbars_array, 'RGB', 215 | space), gt) 216 | 217 | self.assertRaises(ValueError, convert_colorspace, self.colbars_array, 218 | 'nokey', 'XYZ') 219 | self.assertRaises(ValueError, convert_colorspace, self.colbars_array, 220 | 'RGB', 'nokey') 221 | 222 | def test_rgb2grey(self): 223 | x = np.array([1, 1, 1]).reshape((1, 1, 3)).astype(np.float) 224 | g = rgb2grey(x) 225 | assert_array_almost_equal(g, 1) 226 | 227 | assert_equal(g.shape, (1, 1)) 228 | 229 | def test_rgb2grey_contiguous(self): 230 | x = np.random.rand(10, 10, 3) 231 | assert rgb2grey(x).flags["C_CONTIGUOUS"] 232 | assert rgb2grey(x[:5, :5]).flags["C_CONTIGUOUS"] 233 | 234 | def test_rgb2grey_alpha(self): 235 | x = np.random.rand(10, 10, 4) 236 | assert rgb2grey(x).ndim == 2 237 | 238 | def test_rgb2grey_on_grey(self): 239 | rgb2grey(np.random.rand(5, 5)) 240 | 241 | # test matrices for xyz2lab and lab2xyz generated using 242 | # http://www.easyrgb.com/index.php?X=CALC 243 | # Note: easyrgb website displays xyz*100 244 | def test_xyz2lab(self): 245 | assert_array_almost_equal(xyz2lab(self.xyz_array), 246 | self.lab_array, decimal=3) 247 | 248 | # Test the conversion with the rest of the illuminants. 249 | for I in ["d50", "d55", "d65", "d75"]: 250 | for obs in ["2", "10"]: 251 | fname = "lab_array_{0}_{1}.npy".format(I, obs) 252 | lab_array_I_obs = np.load( 253 | os.path.join(os.path.dirname(__file__), 'data', fname)) 254 | assert_array_almost_equal(lab_array_I_obs, 255 | xyz2lab(self.xyz_array, I, obs), 256 | decimal=2) 257 | for I in ["a", "e"]: 258 | fname = "lab_array_{0}_2.npy".format(I) 259 | lab_array_I_obs = np.load( 260 | os.path.join(os.path.dirname(__file__), 'data', fname)) 261 | assert_array_almost_equal(lab_array_I_obs, 262 | xyz2lab(self.xyz_array, I, "2"), 263 | decimal=2) 264 | 265 | def test_lab2xyz(self): 266 | assert_array_almost_equal(lab2xyz(self.lab_array), 267 | self.xyz_array, decimal=3) 268 | 269 | # Test the conversion with the rest of the illuminants. 270 | for I in ["d50", "d55", "d65", "d75"]: 271 | for obs in ["2", "10"]: 272 | fname = "lab_array_{0}_{1}.npy".format(I, obs) 273 | lab_array_I_obs = np.load( 274 | os.path.join(os.path.dirname(__file__), 'data', fname)) 275 | assert_array_almost_equal(lab2xyz(lab_array_I_obs, I, obs), 276 | self.xyz_array, decimal=3) 277 | for I in ["a", "e"]: 278 | fname = "lab_array_{0}_2.npy".format(I, obs) 279 | lab_array_I_obs = np.load( 280 | os.path.join(os.path.dirname(__file__), 'data', fname)) 281 | assert_array_almost_equal(lab2xyz(lab_array_I_obs, I, "2"), 282 | self.xyz_array, decimal=3) 283 | 284 | # And we include a call to test the exception handling in the code. 285 | try: 286 | xs = lab2xyz(lab_array_I_obs, "NaI", "2") # Not an illuminant 287 | except ValueError: 288 | pass 289 | 290 | try: 291 | xs = lab2xyz(lab_array_I_obs, "d50", "42") # Not a degree 292 | except ValueError: 293 | pass 294 | 295 | def test_rgb2lab_brucelindbloom(self): 296 | """ 297 | Test the RGB->Lab conversion by comparing to the calculator on the 298 | authoritative Bruce Lindbloom 299 | [website](http://brucelindbloom.com/index.html?ColorCalculator.html). 300 | """ 301 | # Obtained with D65 white point, sRGB model and gamma 302 | gt_for_colbars = np.array([ 303 | [100,0,0], 304 | [97.1393, -21.5537, 94.4780], 305 | [91.1132, -48.0875, -14.1312], 306 | [87.7347, -86.1827, 83.1793], 307 | [60.3242, 98.2343, -60.8249], 308 | [53.2408, 80.0925, 67.2032], 309 | [32.2970, 79.1875, -107.8602], 310 | [0,0,0]]).T 311 | gt_array = np.swapaxes(gt_for_colbars.reshape(3, 4, 2), 0, 2) 312 | assert_array_almost_equal(rgb2lab(self.colbars_array), gt_array, decimal=2) 313 | 314 | def test_lab_rgb_roundtrip(self): 315 | img_rgb = img_as_float(self.img_rgb) 316 | assert_array_almost_equal(lab2rgb(rgb2lab(img_rgb)), img_rgb) 317 | 318 | # test matrices for xyz2luv and luv2xyz generated using 319 | # http://www.easyrgb.com/index.php?X=CALC 320 | # Note: easyrgb website displays xyz*100 321 | def test_xyz2luv(self): 322 | assert_array_almost_equal(xyz2luv(self.xyz_array), 323 | self.luv_array, decimal=3) 324 | 325 | # Test the conversion with the rest of the illuminants. 326 | for I in ["d50", "d55", "d65", "d75"]: 327 | for obs in ["2", "10"]: 328 | fname = "luv_array_{0}_{1}.npy".format(I, obs) 329 | luv_array_I_obs = np.load( 330 | os.path.join(os.path.dirname(__file__), 'data', fname)) 331 | assert_array_almost_equal(luv_array_I_obs, 332 | xyz2luv(self.xyz_array, I, obs), 333 | decimal=2) 334 | for I in ["a", "e"]: 335 | fname = "luv_array_{0}_2.npy".format(I) 336 | luv_array_I_obs = np.load( 337 | os.path.join(os.path.dirname(__file__), 'data', fname)) 338 | assert_array_almost_equal(luv_array_I_obs, 339 | xyz2luv(self.xyz_array, I, "2"), 340 | decimal=2) 341 | 342 | def test_luv2xyz(self): 343 | assert_array_almost_equal(luv2xyz(self.luv_array), 344 | self.xyz_array, decimal=3) 345 | 346 | # Test the conversion with the rest of the illuminants. 347 | for I in ["d50", "d55", "d65", "d75"]: 348 | for obs in ["2", "10"]: 349 | fname = "luv_array_{0}_{1}.npy".format(I, obs) 350 | luv_array_I_obs = np.load( 351 | os.path.join(os.path.dirname(__file__), 'data', fname)) 352 | assert_array_almost_equal(luv2xyz(luv_array_I_obs, I, obs), 353 | self.xyz_array, decimal=3) 354 | for I in ["a", "e"]: 355 | fname = "luv_array_{0}_2.npy".format(I, obs) 356 | luv_array_I_obs = np.load( 357 | os.path.join(os.path.dirname(__file__), 'data', fname)) 358 | assert_array_almost_equal(luv2xyz(luv_array_I_obs, I, "2"), 359 | self.xyz_array, decimal=3) 360 | 361 | def test_rgb2luv_brucelindbloom(self): 362 | """ 363 | Test the RGB->Lab conversion by comparing to the calculator on the 364 | authoritative Bruce Lindbloom 365 | [website](http://brucelindbloom.com/index.html?ColorCalculator.html). 366 | """ 367 | # Obtained with D65 white point, sRGB model and gamma 368 | gt_for_colbars = np.array([ 369 | [100, 0, 0], 370 | [97.1393, 7.7056, 106.7866], 371 | [91.1132, -70.4773, -15.2042], 372 | [87.7347, -83.0776, 107.3985], 373 | [60.3242, 84.0714, -108.6834], 374 | [53.2408, 175.0151, 37.7564], 375 | [32.2970, -9.4054, -130.3423], 376 | [0, 0, 0]]).T 377 | gt_array = np.swapaxes(gt_for_colbars.reshape(3, 4, 2), 0, 2) 378 | assert_array_almost_equal(rgb2luv(self.colbars_array), 379 | gt_array, decimal=2) 380 | 381 | def test_luv_rgb_roundtrip(self): 382 | img_rgb = img_as_float(self.img_rgb) 383 | assert_array_almost_equal(luv2rgb(rgb2luv(img_rgb)), img_rgb) 384 | 385 | def test_lab_rgb_outlier(self): 386 | lab_array = np.ones((3, 1, 3)) 387 | lab_array[0] = [50, -12, 85] 388 | lab_array[1] = [50, 12, -85] 389 | lab_array[2] = [90, -4, -47] 390 | rgb_array = np.array([[[0.501, 0.481, 0]], 391 | [[0, 0.482, 1.]], 392 | [[0.578, 0.914, 1.]], 393 | ]) 394 | assert_almost_equal(lab2rgb(lab_array), rgb_array, decimal=3) 395 | 396 | def test_lab_full_gamut(self): 397 | a, b = np.meshgrid(np.arange(-100, 100), np.arange(-100, 100)) 398 | L = np.ones(a.shape) 399 | lab = np.dstack((L, a, b)) 400 | for value in [0, 10, 20]: 401 | lab[:, :, 0] = value 402 | with expected_warnings(['Color data out of range']): 403 | lab2xyz(lab) 404 | 405 | def test_lab_lch_roundtrip(self): 406 | rgb = img_as_float(self.img_rgb) 407 | lab = rgb2lab(rgb) 408 | lab2 = lch2lab(lab2lch(lab)) 409 | assert_array_almost_equal(lab2, lab) 410 | 411 | def test_rgb_lch_roundtrip(self): 412 | rgb = img_as_float(self.img_rgb) 413 | lab = rgb2lab(rgb) 414 | lch = lab2lch(lab) 415 | lab2 = lch2lab(lch) 416 | rgb2 = lab2rgb(lab2) 417 | assert_array_almost_equal(rgb, rgb2) 418 | 419 | def test_lab_lch_0d(self): 420 | lab0 = self._get_lab0() 421 | lch0 = lab2lch(lab0) 422 | lch2 = lab2lch(lab0[None, None, :]) 423 | assert_array_almost_equal(lch0, lch2[0, 0, :]) 424 | 425 | def test_lab_lch_1d(self): 426 | lab0 = self._get_lab0() 427 | lch0 = lab2lch(lab0) 428 | lch1 = lab2lch(lab0[None, :]) 429 | assert_array_almost_equal(lch0, lch1[0, :]) 430 | 431 | def test_lab_lch_3d(self): 432 | lab0 = self._get_lab0() 433 | lch0 = lab2lch(lab0) 434 | lch3 = lab2lch(lab0[None, None, None, :]) 435 | assert_array_almost_equal(lch0, lch3[0, 0, 0, :]) 436 | 437 | def _get_lab0(self): 438 | rgb = img_as_float(self.img_rgb[:1, :1, :]) 439 | return rgb2lab(rgb)[0, 0, :] 440 | 441 | 442 | def test_gray2rgb(): 443 | x = np.array([0, 0.5, 1]) 444 | assert_raises(ValueError, gray2rgb, x) 445 | 446 | x = x.reshape((3, 1)) 447 | y = gray2rgb(x) 448 | 449 | assert_equal(y.shape, (3, 1, 3)) 450 | assert_equal(y.dtype, x.dtype) 451 | assert_equal(y[..., 0], x) 452 | assert_equal(y[0, 0, :], [0, 0, 0]) 453 | 454 | x = np.array([[0, 128, 255]], dtype=np.uint8) 455 | z = gray2rgb(x) 456 | 457 | assert_equal(z.shape, (1, 3, 3)) 458 | assert_equal(z[..., 0], x) 459 | assert_equal(z[0, 1, :], [128, 128, 128]) 460 | 461 | 462 | def test_gray2rgb_rgb(): 463 | x = np.random.rand(5, 5, 4) 464 | y = gray2rgb(x) 465 | assert_equal(x, y) 466 | 467 | 468 | def test_gray2rgb_alpha(): 469 | x = np.random.random((5, 5, 4)) 470 | assert_equal(gray2rgb(x, alpha=None).shape, (5, 5, 4)) 471 | assert_equal(gray2rgb(x, alpha=False).shape, (5, 5, 3)) 472 | assert_equal(gray2rgb(x, alpha=True).shape, (5, 5, 4)) 473 | 474 | x = np.random.random((5, 5, 3)) 475 | assert_equal(gray2rgb(x, alpha=None).shape, (5, 5, 3)) 476 | assert_equal(gray2rgb(x, alpha=False).shape, (5, 5, 3)) 477 | assert_equal(gray2rgb(x, alpha=True).shape, (5, 5, 4)) 478 | 479 | assert_equal(gray2rgb(np.array([[1, 2], [3, 4.]]), 480 | alpha=True)[0, 0, 3], 1) 481 | assert_equal(gray2rgb(np.array([[1, 2], [3, 4]], dtype=np.uint8), 482 | alpha=True)[0, 0, 3], 255) 483 | 484 | 485 | if __name__ == "__main__": 486 | from numpy.testing import run_module_suite 487 | run_module_suite() 488 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/test_colorconv.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/test_colorconv.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/test_colorlabel.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | 3 | import numpy as np 4 | from numpy import testing 5 | from skimage.color.colorlabel import label2rgb 6 | from skimage._shared._warnings import expected_warnings 7 | from numpy.testing import (assert_array_almost_equal as assert_close, 8 | assert_array_equal, assert_warns) 9 | 10 | 11 | def test_shape_mismatch(): 12 | image = np.ones((3, 3)) 13 | label = np.ones((2, 2)) 14 | testing.assert_raises(ValueError, label2rgb, image, label) 15 | 16 | 17 | def test_rgb(): 18 | image = np.ones((1, 3)) 19 | label = np.arange(3).reshape(1, -1) 20 | colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)] 21 | # Set alphas just in case the defaults change 22 | rgb = label2rgb(label, image=image, colors=colors, alpha=1, image_alpha=1) 23 | assert_close(rgb, [colors]) 24 | 25 | 26 | def test_alpha(): 27 | image = np.random.uniform(size=(3, 3)) 28 | label = np.random.randint(0, 9, size=(3, 3)) 29 | # If we set `alpha = 0`, then rgb should match image exactly. 30 | rgb = label2rgb(label, image=image, alpha=0, image_alpha=1) 31 | assert_close(rgb[..., 0], image) 32 | assert_close(rgb[..., 1], image) 33 | assert_close(rgb[..., 2], image) 34 | 35 | 36 | def test_no_input_image(): 37 | label = np.arange(3).reshape(1, -1) 38 | colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)] 39 | rgb = label2rgb(label, colors=colors) 40 | assert_close(rgb, [colors]) 41 | 42 | 43 | def test_image_alpha(): 44 | image = np.random.uniform(size=(1, 3)) 45 | label = np.arange(3).reshape(1, -1) 46 | colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)] 47 | # If we set `image_alpha = 0`, then rgb should match label colors exactly. 48 | rgb = label2rgb(label, image=image, colors=colors, alpha=1, image_alpha=0) 49 | assert_close(rgb, [colors]) 50 | 51 | 52 | def test_color_names(): 53 | image = np.ones((1, 3)) 54 | label = np.arange(3).reshape(1, -1) 55 | cnames = ['red', 'lime', 'blue'] 56 | colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)] 57 | # Set alphas just in case the defaults change 58 | rgb = label2rgb(label, image=image, colors=cnames, alpha=1, image_alpha=1) 59 | assert_close(rgb, [colors]) 60 | 61 | 62 | def test_bg_and_color_cycle(): 63 | image = np.zeros((1, 10)) # dummy image 64 | label = np.arange(10).reshape(1, -1) 65 | colors = [(1, 0, 0), (0, 0, 1)] 66 | bg_color = (0, 0, 0) 67 | rgb = label2rgb(label, image=image, bg_label=0, bg_color=bg_color, 68 | colors=colors, alpha=1) 69 | assert_close(rgb[0, 0], bg_color) 70 | for pixel, color in zip(rgb[0, 1:], itertools.cycle(colors)): 71 | assert_close(pixel, color) 72 | 73 | 74 | def test_label_consistency(): 75 | """Assert that the same labels map to the same colors.""" 76 | label_1 = np.arange(5).reshape(1, -1) 77 | label_2 = np.array([2, 4]) 78 | colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1), (1, 1, 0), (1, 0, 1)] 79 | # Set alphas just in case the defaults change 80 | rgb_1 = label2rgb(label_1, colors=colors) 81 | rgb_2 = label2rgb(label_2, colors=colors) 82 | for label_id in label_2.flat: 83 | assert_close(rgb_1[label_1 == label_id], rgb_2[label_2 == label_id]) 84 | 85 | def test_leave_labels_alone(): 86 | labels = np.array([-1, 0, 1]) 87 | labels_saved = labels.copy() 88 | 89 | label2rgb(labels) 90 | label2rgb(labels, bg_label=1) 91 | assert_array_equal(labels, labels_saved) 92 | 93 | def test_avg(): 94 | # label image 95 | label_field = np.array([[1, 1, 1, 2], 96 | [1, 2, 2, 2], 97 | [3, 3, 3, 3]], dtype=np.uint8) 98 | 99 | # color image 100 | r = np.array([[1., 1., 0., 0.], 101 | [0., 0., 1., 1.], 102 | [0., 0., 0., 0.]]) 103 | g = np.array([[0., 0., 0., 1.], 104 | [1., 1., 1., 0.], 105 | [0., 0., 0., 0.]]) 106 | b = np.array([[0., 0., 0., 1.], 107 | [0., 1., 1., 1.], 108 | [0., 0., 1., 1.]]) 109 | image = np.dstack((r, g, b)) 110 | 111 | # reference label-colored image 112 | rout = np.array([[0.5, 0.5, 0.5, 0.5], 113 | [0.5, 0.5, 0.5, 0.5], 114 | [0. , 0. , 0. , 0. ]]) 115 | gout = np.array([[0.25, 0.25, 0.25, 0.75], 116 | [0.25, 0.75, 0.75, 0.75], 117 | [0. , 0. , 0. , 0. ]]) 118 | bout = np.array([[0. , 0. , 0. , 1. ], 119 | [0. , 1. , 1. , 1. ], 120 | [0.5, 0.5, 0.5, 0.5]]) 121 | expected_out = np.dstack((rout, gout, bout)) 122 | 123 | # test standard averaging 124 | out = label2rgb(label_field, image, kind='avg') 125 | assert_array_equal(out, expected_out) 126 | 127 | # test averaging with custom background value 128 | out_bg = label2rgb(label_field, image, bg_label=2, bg_color=(0, 0, 0), 129 | kind='avg') 130 | expected_out_bg = expected_out.copy() 131 | expected_out_bg[label_field == 2] = 0 132 | assert_array_equal(out_bg, expected_out_bg) 133 | 134 | # test default background color 135 | out_bg = label2rgb(label_field, image, bg_label=2, kind='avg') 136 | assert_array_equal(out_bg, expected_out_bg) 137 | 138 | 139 | def test_negative_intensity(): 140 | labels = np.arange(100).reshape(10, 10) 141 | image = -1 * np.ones((10, 10)) 142 | assert_warns(UserWarning, label2rgb, labels, image) 143 | 144 | 145 | if __name__ == '__main__': 146 | testing.run_module_suite() 147 | 148 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/test_colorlabel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/test_colorlabel.pyc -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/test_delta_e.py: -------------------------------------------------------------------------------- 1 | """Test for correctness of color distance functions""" 2 | from os.path import abspath, dirname, join as pjoin 3 | 4 | import numpy as np 5 | from numpy.testing import assert_allclose 6 | 7 | from skimage.color import (deltaE_cie76, 8 | deltaE_ciede94, 9 | deltaE_ciede2000, 10 | deltaE_cmc) 11 | 12 | 13 | def test_ciede2000_dE(): 14 | data = load_ciede2000_data() 15 | N = len(data) 16 | lab1 = np.zeros((N, 3)) 17 | lab1[:, 0] = data['L1'] 18 | lab1[:, 1] = data['a1'] 19 | lab1[:, 2] = data['b1'] 20 | 21 | lab2 = np.zeros((N, 3)) 22 | lab2[:, 0] = data['L2'] 23 | lab2[:, 1] = data['a2'] 24 | lab2[:, 2] = data['b2'] 25 | 26 | dE2 = deltaE_ciede2000(lab1, lab2) 27 | 28 | assert_allclose(dE2, data['dE'], rtol=1.e-4) 29 | 30 | 31 | def load_ciede2000_data(): 32 | dtype = [('pair', int), 33 | ('1', int), 34 | ('L1', float), 35 | ('a1', float), 36 | ('b1', float), 37 | ('a1_prime', float), 38 | ('C1_prime', float), 39 | ('h1_prime', float), 40 | ('hbar_prime', float), 41 | ('G', float), 42 | ('T', float), 43 | ('SL', float), 44 | ('SC', float), 45 | ('SH', float), 46 | ('RT', float), 47 | ('dE', float), 48 | ('2', int), 49 | ('L2', float), 50 | ('a2', float), 51 | ('b2', float), 52 | ('a2_prime', float), 53 | ('C2_prime', float), 54 | ('h2_prime', float), 55 | ] 56 | 57 | # note: ciede_test_data.txt contains several intermediate quantities 58 | path = pjoin(dirname(abspath(__file__)), 'ciede2000_test_data.txt') 59 | return np.loadtxt(path, dtype=dtype) 60 | 61 | 62 | def test_cie76(): 63 | data = load_ciede2000_data() 64 | N = len(data) 65 | lab1 = np.zeros((N, 3)) 66 | lab1[:, 0] = data['L1'] 67 | lab1[:, 1] = data['a1'] 68 | lab1[:, 2] = data['b1'] 69 | 70 | lab2 = np.zeros((N, 3)) 71 | lab2[:, 0] = data['L2'] 72 | lab2[:, 1] = data['a2'] 73 | lab2[:, 2] = data['b2'] 74 | 75 | dE2 = deltaE_cie76(lab1, lab2) 76 | oracle = np.array([ 77 | 4.00106328, 6.31415011, 9.1776999, 2.06270077, 2.36957073, 78 | 2.91529271, 2.23606798, 2.23606798, 4.98000036, 4.9800004, 79 | 4.98000044, 4.98000049, 4.98000036, 4.9800004, 4.98000044, 80 | 3.53553391, 36.86800781, 31.91002977, 30.25309901, 27.40894015, 81 | 0.89242934, 0.7972, 0.8583065, 0.82982507, 3.1819238, 82 | 2.21334297, 1.53890382, 4.60630929, 6.58467989, 3.88641412, 83 | 1.50514845, 2.3237848, 0.94413208, 1.31910843 84 | ]) 85 | assert_allclose(dE2, oracle, rtol=1.e-8) 86 | 87 | 88 | def test_ciede94(): 89 | data = load_ciede2000_data() 90 | N = len(data) 91 | lab1 = np.zeros((N, 3)) 92 | lab1[:, 0] = data['L1'] 93 | lab1[:, 1] = data['a1'] 94 | lab1[:, 2] = data['b1'] 95 | 96 | lab2 = np.zeros((N, 3)) 97 | lab2[:, 0] = data['L2'] 98 | lab2[:, 1] = data['a2'] 99 | lab2[:, 2] = data['b2'] 100 | 101 | dE2 = deltaE_ciede94(lab1, lab2) 102 | oracle = np.array([ 103 | 1.39503887, 1.93410055, 2.45433566, 0.68449187, 0.6695627, 104 | 0.69194527, 2.23606798, 2.03163832, 4.80069441, 4.80069445, 105 | 4.80069449, 4.80069453, 4.80069441, 4.80069445, 4.80069449, 106 | 3.40774352, 34.6891632, 29.44137328, 27.91408781, 24.93766082, 107 | 0.82213163, 0.71658427, 0.8048753, 0.75284394, 1.39099471, 108 | 1.24808929, 1.29795787, 1.82045088, 2.55613309, 1.42491303, 109 | 1.41945261, 2.3225685, 0.93853308, 1.30654464 110 | ]) 111 | assert_allclose(dE2, oracle, rtol=1.e-8) 112 | 113 | 114 | def test_cmc(): 115 | data = load_ciede2000_data() 116 | N = len(data) 117 | lab1 = np.zeros((N, 3)) 118 | lab1[:, 0] = data['L1'] 119 | lab1[:, 1] = data['a1'] 120 | lab1[:, 2] = data['b1'] 121 | 122 | lab2 = np.zeros((N, 3)) 123 | lab2[:, 0] = data['L2'] 124 | lab2[:, 1] = data['a2'] 125 | lab2[:, 2] = data['b2'] 126 | 127 | dE2 = deltaE_cmc(lab1, lab2) 128 | oracle = np.array([ 129 | 1.73873611, 2.49660844, 3.30494501, 0.85735576, 0.88332927, 130 | 0.97822692, 3.50480874, 2.87930032, 6.5783807, 6.57838075, 131 | 6.5783808, 6.57838086, 6.67492321, 6.67492326, 6.67492331, 132 | 4.66852997, 42.10875485, 39.45889064, 38.36005919, 33.93663807, 133 | 1.14400168, 1.00600419, 1.11302547, 1.05335328, 1.42822951, 134 | 1.2548143, 1.76838061, 2.02583367, 3.08695508, 1.74893533, 135 | 1.90095165, 1.70258148, 1.80317207, 2.44934417 136 | ]) 137 | 138 | assert_allclose(dE2, oracle, rtol=1.e-8) 139 | 140 | 141 | def test_single_color_cie76(): 142 | lab1 = (0.5, 0.5, 0.5) 143 | lab2 = (0.4, 0.4, 0.4) 144 | deltaE_cie76(lab1, lab2) 145 | 146 | 147 | def test_single_color_ciede94(): 148 | lab1 = (0.5, 0.5, 0.5) 149 | lab2 = (0.4, 0.4, 0.4) 150 | deltaE_ciede94(lab1, lab2) 151 | 152 | 153 | def test_single_color_ciede2000(): 154 | lab1 = (0.5, 0.5, 0.5) 155 | lab2 = (0.4, 0.4, 0.4) 156 | deltaE_ciede2000(lab1, lab2) 157 | 158 | 159 | def test_single_color_cmc(): 160 | lab1 = (0.5, 0.5, 0.5) 161 | lab2 = (0.4, 0.4, 0.4) 162 | deltaE_cmc(lab1, lab2) 163 | 164 | 165 | if __name__ == "__main__": 166 | from numpy.testing import run_module_suite 167 | run_module_suite() 168 | -------------------------------------------------------------------------------- /Add_colortoimg/skimage-color/tests/test_delta_e.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/315386775/FCN_train/7acaf3a263d9d7ac5de0320235cabb32e03578cd/Add_colortoimg/skimage-color/tests/test_delta_e.pyc -------------------------------------------------------------------------------- /Image_resize/scale_image.py: -------------------------------------------------------------------------------- 1 | # coding = utf-8 2 | import Image 3 | 4 | def convert(width,height): 5 | im = Image.open("C:\\workspace\\PythonLearn1\\test_1.jpg") 6 | (x, y)= im.size 7 | x_s = width 8 | y_s = y * x_s / x 9 | out = im.resize((x_s, y_s), Image.ANTIALIAS) 10 | out.save("C:\\workspace\\PythonLearn1\\test_1_out.jpg") 11 | if __name__ == '__main__': 12 | convert(256,256) -------------------------------------------------------------------------------- /Image_resize/single_iamge.py: -------------------------------------------------------------------------------- 1 | # coding = utf-8 2 | import Image 3 | 4 | def convert(width,height): 5 | im = Image.open("C:\\xxx\\test.jpg") 6 | out = im.resize((width, height),Image.ANTIALIAS) 7 | out.save("C:\\xxx\\test.jpg") 8 | if __name__ == '__main__': 9 | convert(256,256) -------------------------------------------------------------------------------- /Image_resize/whole_image.py: -------------------------------------------------------------------------------- 1 | # coding = utf-8 2 | import Image 3 | import os 4 | 5 | def convert(dir,width,height): 6 | file_list = os.listdir(dir) 7 | print(file_list) 8 | for filename in file_list: 9 | path = '' 10 | path = dir+filename 11 | im = Image.open(path) 12 | out = im.resize((256,256),Image.ANTIALIAS) 13 | print "%s has been resized!"%filename 14 | out.save(path) 15 | 16 | if __name__ == '__main__': 17 | dir = raw_input('please input the operate dir:') 18 | convert(dir,256,256) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FCN Dataset 2 | The code includes all the file that you need in the training stage for FCN. 3 | 4 | # FCN training Blog 5 | http://blog.csdn.net/u010402786/article/details/72883421 6 | 7 | # How to use this code 8 | 1. The first step 9 | Use the labelme toolbox to label the images that you need. 10 | Labelme : https://github.com/wkentaro/labelme 11 | 12 | 2. The second step 13 | add_color to images 14 | 15 | 3. The third step 16 | post_process, convert png24 to png8 17 | 18 | 4. Data Augmentation 19 | keras_dataAug: Use the kreas to augment the image. 20 | dataAugment: The origin method to augment the image. 21 | Jitering: The method use in alexnet network. 22 | 23 | -------------------------------------------------------------------------------- /data_augement/dataAugment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | """数据增强 4 | 1. 翻转变换 flip 5 | 2. 亮度变换 bright 6 | 3. 对比度变换 contrast 7 | 4. 随机修剪 random crop 8 | 5. PCA抖动 PCA jittering 9 | 6. 旋转变换/反射变换 Rotation/reflection 10 | author: Yawei Li 11 | date:2017-04-15 12 | """ 13 | 14 | import os 15 | import math 16 | import random 17 | import glob 18 | import numpy as np 19 | from scipy import misc 20 | from PIL import Image 21 | import cv2 22 | 23 | # 左右翻转 24 | def flip_left_right(image): 25 | return image[:, -1::-1] 26 | 27 | # 改变亮度 28 | def random_brightness(image, max_delta=63, seed=None): 29 | img = np.array(image) 30 | delta = np.random.uniform(-max_delta, max_delta) 31 | image = Image.fromarray(np.uint8(img + delta)) 32 | return image 33 | 34 | # 改变旋转 35 | def random_Rotation(image): 36 | """ 37 | 对图像进行随机任意角度(0~360度)旋转 38 | :param mode 邻近插值,双线性插值,双三次B样条插值(default) 39 | :param image PIL的图像image 40 | :return: 旋转转之后的图像 41 | """ 42 | random_angle = np.random.randint(1, 360) 43 | image = Image.rotate(random_angle) 44 | return image 45 | 46 | # 改变对比度 47 | def random_contrast(image, lower, upper, seed=None): 48 | factor = np.random.uniform(-lower, upper) 49 | mean = (image[0] + image[1] + image[2]).astype(np.float32) / 3 50 | img = np.zeros(image.shape, np.float32) 51 | for i in range(0, 3): 52 | img[i] = (img[i] - mean) * factor + mean 53 | return img 54 | 55 | # 裁剪图片 56 | def crop(image, name, crop_size, padding_size): 57 | (width, height) = image.shape 58 | cropped_images = [] 59 | for i in xrange(0, width, padding_size): 60 | for j in xrange(0, height, padding_size): 61 | box = (i, j, i+crop_size, j+crop_size) #left, upper, right, lower 62 | cropped_name = name + '_' + str(i) + '_' + str(j) + '.jpg' 63 | cropped_image = image[i:i+crop_size, j:j+crop_size] 64 | resized_image = cv2.resize(cropped_image, (IMAGE_SIZE, IMAGE_SIZE)) 65 | cropped_images.append(resized_image) 66 | 67 | return cropped_images 68 | 69 | 70 | 71 | 72 | # 数据扩增 73 | # 将选取的图片文件进行「左右翻转」「改变亮度」「改变对比度」「裁剪」操作data_num次 74 | def data_augmentation(image_files, data_num): 75 | image_list = [] 76 | file_num = len(image_files) 77 | 78 | for image_file in image_files: 79 | image_list.append(misc.imread(image_file)) 80 | 81 | if file_num >= data_num: 82 | return image_list 83 | 84 | for image in image_list: 85 | rotate_image = random_Rotation(image) 86 | image_list.append(rotate_image) 87 | if len(image_list) == data_num: 88 | return image_list 89 | 90 | ''' 91 | # 左右翻转 92 | random.shuffle(image_list) 93 | for image in image_list: 94 | flipped_image = flip_left_right(image) 95 | image_list.append(flipped_image) 96 | if len(image_list) == data_num: 97 | return image_list 98 | 99 | # 随机亮度 100 | random.shuffle(image_list) 101 | for image in image_list: 102 | brightness_image = random_brightness(image) 103 | image_list.append(brightness_image) 104 | if len(image_list) == data_num: 105 | return image_list 106 | 107 | # 随机旋转 108 | random.shuffle(image_list) 109 | for image in image_list: 110 | rotate_image = random_Rotation(image) 111 | image_list.append(rotate_image) 112 | if len(image_list) == data_num: 113 | return image_list 114 | 115 | # 随机对比度 116 | random.shuffle(image_list) 117 | for image in image_list: 118 | contrast_image = random_contrast(image) 119 | image_list.append(contrast_image) 120 | if len(image_list) == data_num: 121 | return image_list 122 | 123 | # 裁剪 124 | random.shuffle(image_list) 125 | image_list.clear() 126 | cropped_size = int(IMAGE_SIZE * 0.75) 127 | padding_size = IMAGE_SIZE - cropped_size 128 | for image in image_list: 129 | cropped_image_list = crop(image, 'image', cropped_size, padding_size) 130 | for cropped_image in cropped_image_list: 131 | image_list.append(cropped_image) 132 | if len(image_list) == data_num: 133 | return image_list 134 | ''' 135 | return image_list 136 | 137 | 138 | dir_list = os.listdir("xxx") 139 | IMAGE_SIZE = 256 140 | 141 | for dir in dir_list: 142 | image_files = glob.glob("xxx") 143 | if len(image_files) == 0: 144 | continue 145 | 146 | image_list = data_augmentation(image_files, 10) 147 | 148 | for i, image in enumerate(image_list): 149 | misc.imsave(os.path.join("xxx", str(i) + '.jpg'), image) 150 | -------------------------------------------------------------------------------- /data_augement/jitering.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import numpy as np 4 | import os 5 | from PIL import Image,ImageOps 6 | import argparse 7 | import random 8 | from scipy import misc 9 | import glob 10 | 11 | 12 | def pcaCreate(image_files,dir,name_num, dir_list): 13 | image_list = [] 14 | new_file_name = dir 15 | save_dir = dir_list + new_file_name 16 | save_dir_tt = save_dir + "\\" 17 | for image_file in image_files: 18 | image_list.append(misc.imread(image_file)) 19 | 20 | for image in image_list: 21 | img = np.asarray(image, dtype='float32') 22 | img = img / 255. 23 | img_size = img.size / 3 24 | img1 = img.reshape(img_size, 3) 25 | img1 = np.transpose(img1) 26 | img_cov = np.cov([img1[0], img1[1], img1[2]]) 27 | lamda, p = np.linalg.eig(img_cov) 28 | 29 | p = np.transpose(p) 30 | 31 | alpha1 = random.normalvariate(0, 0.3) 32 | alpha2 = random.normalvariate(0, 0.3) 33 | alpha3 = random.normalvariate(0, 0.3) 34 | v = np.transpose((alpha1 * lamda[0], alpha2 * lamda[1], alpha3 * lamda[2])) 35 | 36 | add_num = np.dot(p, v) 37 | 38 | img2 = np.array([img[:, :, 0] + add_num[0], img[:, :, 1] + add_num[1], img[:, :, 2] + add_num[2]]) 39 | 40 | img2 = np.swapaxes(img2, 0, 2) 41 | img2 = np.swapaxes(img2, 0, 1) 42 | 43 | misc.imsave(save_dir_tt + np.str(name_num) + '.jpg', img2) 44 | name_num += 1 45 | return image_list 46 | 47 | def pcaCreate_Ori(image_files,dir): 48 | parser = argparse.ArgumentParser() 49 | parser.add_argument("file_suffix", help="specific the file suffix") 50 | parser.add_argument("root_dir", help="E:\\") 51 | parser.add_argument("-f", "--file", help="record result to file") 52 | parser.add_argument("data_set",help= "specific the file suffix") 53 | args = parser.parse_args() 54 | img_num = len(os.listdir(args.root_dir + '/' + args.dataset)) 55 | for i in range(img_num): 56 | img_name = os.listdir(args.root_dir + '/' + args.dataset)[i] 57 | img = Image.open(os.path.join(args.root_dir, args.dataset, img_name)) 58 | 59 | img = np.asarray(img, dtype='float32') 60 | img = img / 255. 61 | img_size = img.size / 3 62 | img1 = img.reshape(img_size, 3) 63 | img1 = np.transpose(img1) 64 | img_cov = np.cov([img1[0], img1[1], img1[2]]) 65 | lamda, p = np.linalg.eig(img_cov) 66 | 67 | p = np.transpose(p) 68 | 69 | alpha1 = random.normalvariate(0, 0.3) 70 | alpha2 = random.normalvariate(0, 0.3) 71 | alpha3 = random.normalvariate(0, 0.3) 72 | v = np.transpose((alpha1 * lamda[0], alpha2 * lamda[1], alpha3 * lamda[2])) 73 | 74 | add_num = np.dot(p, v) 75 | 76 | img2 = np.array([img[:, :, 0] + add_num[0], img[:, :, 1] + add_num[1], img[:, :, 2] + add_num[2]]) 77 | 78 | img2 = np.swapaxes(img2, 0, 2) 79 | img2 = np.swapaxes(img2, 0, 1) 80 | 81 | misc.imsave('test2222.jpg', img2) 82 | 83 | dir_list = "xxx" 84 | 85 | for dir in os.listdir(dir_list): 86 | 87 | image_files = glob.glob(dir_list + dir + "\\*.jpg") 88 | 89 | if len(image_files) == 0: 90 | continue 91 | 92 | name_num = 0 93 | image_list = pcaCreate(image_files, dir, name_num, dir_list) -------------------------------------------------------------------------------- /data_augement/keras_dataAug.py: -------------------------------------------------------------------------------- 1 | # -- coding utf-8 -- 2 | # Use the keras to augment the image. 3 | __author__ = 'Yawei Li' 4 | 5 | # import packages 6 | import os 7 | import glob 8 | from scipy import misc 9 | from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img 10 | 11 | datagen = ImageDataGenerator( 12 | rotation_range=180, 13 | height_shift_range=0.1, 14 | shear_range=0.2, 15 | zoom_range=0.1, 16 | horizontal_flip=True, 17 | vertical_flip=True, 18 | fill_mode='nearest') 19 | 20 | 21 | def data_augmentation(image_files, dir): 22 | image_list = [] 23 | new_file_name = dir 24 | save_dir = "xxx" + new_file_name 25 | 26 | for image_file in image_files: 27 | image_list.append(misc.imread(image_file)) 28 | 29 | for image in image_list: 30 | x = img_to_array(image) # this is a Numpy array with shape (3, 150, 150) 31 | x = x.reshape((1,) + x.shape) # this is a Numpy array with shape (1, 3, 150, 150) 32 | i = 0 33 | for batch in datagen.flow(x, batch_size=1, save_to_dir=save_dir, 34 | save_prefix=dir, save_format='jpg'): 35 | i += 1 36 | if i > 99: 37 | break 38 | return image_list 39 | 40 | # List all the files 41 | dir_list = os.listdir("xxx") 42 | 43 | for dir in dir_list: 44 | 45 | image_files = glob.glob("xxx" + dir + "/*.jpg") 46 | 47 | if len(image_files) == 0: 48 | continue 49 | 50 | image_list = data_augmentation(image_files, dir) 51 | 52 | 53 | -------------------------------------------------------------------------------- /post_process/24png_8png.m: -------------------------------------------------------------------------------- 1 | dirs=dir('F:/xxx/*.png'); 2 | for n=1:numel(dirs) 3 | strname=strcat('F:/xxx/',dirs(n).name); 4 | img=imread(strname); 5 | [x,map]=rgb2ind(img,256); 6 | newname=strcat('F:/xxx/',dirs(n).name); 7 | imwrite(x,map,newname,'png'); 8 | end -------------------------------------------------------------------------------- /post_process/image_channel.py: -------------------------------------------------------------------------------- 1 | # coding = utf-8 2 | # To prepare data may need the code. 3 | # Change the image channels when you use the opencv. 4 | import numpy as np 5 | import cv2 6 | def preprocess(image): 7 | """Takes an image and apply preprocess""" 8 | # 调整图片大小成网络的输入 9 | image = cv2.resize(image, (data_shape, data_shape)) 10 | # 转换 BGR 到 RGB 11 | image = image[:, :, (2, 1, 0)] 12 | # 减mean之前先转成float 13 | image = image.astype(np.float32) 14 | # 减 mean 15 | image -= np.array([123, 117, 104]) 16 | # 调成为 [batch-channel-height-width] 17 | image = np.transpose(image, (2, 0, 1)) 18 | image = image[np.newaxis, :] 19 | # 转成 ndarray 20 | image = nd.array(image) 21 | return image 22 | 23 | image = cv2.imread('img/xxx.jpg') 24 | x = preprocess(image) 25 | print('x', x.shape) --------------------------------------------------------------------------------