├── .gitignore ├── LICENSE ├── README.md ├── example.png ├── padtransf └── __init__.py ├── poetry.lock └── pyproject.toml /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | **/__pycache__ 3 | .vscode/ 4 | venv/ 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Alexander Reynolds 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## padded-transformations 2 | Providing padded versions of OpenCV's `warpAffine()` and `warpPerspective()` functions. 3 | 4 | ![Example image](example.png "Example output") 5 | 6 | ## usage 7 | 8 | ```python 9 | import padtransf 10 | src_warped, dst_padded = padtransf.warpPerspectivePadded(src, dst, homography) 11 | src_warped, dst_padded = padtransf.warpAffinePadded(src, dst, affine_transf) 12 | ``` 13 | 14 | ## sources 15 | 16 | Read [my Stack Overflow answer](https://stackoverflow.com/questions/44457064/displaying-stitched-images-together-without-cutoff-using-warpaffine/44459869#44459869) which inspired this repository. 17 | 18 | The images used to produce `example.png` are from [Oxford's Visual Geometry Group](http://www.robots.ox.ac.uk/~vgg/data/affine/). 19 | -------------------------------------------------------------------------------- /example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alkasm/padded-transformations/b6586de14126695bc91cb2ab71d594fb3b122602/example.png -------------------------------------------------------------------------------- /padtransf/__init__.py: -------------------------------------------------------------------------------- 1 | """Padded transformation module. 2 | 3 | This module provides two functions, warpPerspectivePadded() and 4 | warpAffinePadded(), which compliment the built-in OpenCV functions 5 | warpPerspective() and warpAffine(). These functions calculate the 6 | extent of the warped image and pads both the destination and the 7 | warped image so both images can be fully displayed together. 8 | 9 | References 10 | ---------- 11 | See the following question and my answer on Stack Overflow for an 12 | idea of how this was conceptualized and to read the mathematics 13 | behind the functions: https://stackoverflow.com/a/44459869/5087436 14 | 15 | """ 16 | 17 | 18 | import cv2 19 | import numpy as np 20 | 21 | 22 | def warpPerspectivePadded( 23 | src, dst, M, 24 | flags=cv2.INTER_LINEAR, 25 | borderMode=cv2.BORDER_CONSTANT, 26 | borderValue=0): 27 | """Performs a perspective warp with padding. 28 | 29 | Parameters 30 | ---------- 31 | src : array_like 32 | source image, to be warped. 33 | dst : array_like 34 | destination image, to be padded. 35 | M : array_like 36 | `3x3` perspective transformation matrix. 37 | 38 | Returns 39 | ------- 40 | src_warped : ndarray 41 | padded and warped source image 42 | dst_padded : ndarray 43 | padded destination image, same size as src_warped 44 | 45 | Optional Parameters 46 | ------------------- 47 | flags : int, optional 48 | combination of interpolation methods (`cv2.INTER_LINEAR` or 49 | `cv2.INTER_NEAREST`) and the optional flag `cv2.WARP_INVERSE_MAP`, 50 | that sets `M` as the inverse transformation (`dst` --> `src`). 51 | borderMode : int, optional 52 | pixel extrapolation method (`cv2.BORDER_CONSTANT` or 53 | `cv2.BORDER_REPLICATE`). 54 | borderValue : numeric, optional 55 | value used in case of a constant border; by default, it equals 0. 56 | 57 | See Also 58 | -------- 59 | warpAffinePadded() : for `2x3` affine transformations 60 | cv2.warpPerspective(), cv2.warpAffine() : original OpenCV functions 61 | """ 62 | 63 | assert M.shape == (3, 3), \ 64 | 'Perspective transformation shape should be (3, 3).\n' \ 65 | + 'Use warpAffinePadded() for (2, 3) affine transformations.' 66 | 67 | M = M / M[2, 2] # ensure a legal homography 68 | if flags in (cv2.WARP_INVERSE_MAP, 69 | cv2.INTER_LINEAR + cv2.WARP_INVERSE_MAP, 70 | cv2.INTER_NEAREST + cv2.WARP_INVERSE_MAP): 71 | M = cv2.invert(M)[1] 72 | flags -= cv2.WARP_INVERSE_MAP 73 | 74 | # it is enough to find where the corners of the image go to find 75 | # the padding bounds; points in clockwise order from origin 76 | src_h, src_w = src.shape[:2] 77 | lin_homg_pts = np.array([ 78 | [0, src_w, src_w, 0], 79 | [0, 0, src_h, src_h], 80 | [1, 1, 1, 1]]) 81 | 82 | # transform points 83 | transf_lin_homg_pts = M.dot(lin_homg_pts) 84 | transf_lin_homg_pts /= transf_lin_homg_pts[2, :] 85 | 86 | # find min and max points 87 | min_x = np.floor(np.min(transf_lin_homg_pts[0])).astype(int) 88 | min_y = np.floor(np.min(transf_lin_homg_pts[1])).astype(int) 89 | max_x = np.ceil(np.max(transf_lin_homg_pts[0])).astype(int) 90 | max_y = np.ceil(np.max(transf_lin_homg_pts[1])).astype(int) 91 | 92 | # add translation to the transformation matrix to shift to positive values 93 | anchor_x, anchor_y = 0, 0 94 | transl_transf = np.eye(3, 3) 95 | if min_x < 0: 96 | anchor_x = -min_x 97 | transl_transf[0, 2] += anchor_x 98 | if min_y < 0: 99 | anchor_y = -min_y 100 | transl_transf[1, 2] += anchor_y 101 | shifted_transf = transl_transf.dot(M) 102 | shifted_transf /= shifted_transf[2, 2] 103 | 104 | # create padded destination image 105 | dst_h, dst_w = dst.shape[:2] 106 | 107 | pad_widths = [anchor_y, max(max_y, dst_h) - dst_h, 108 | anchor_x, max(max_x, dst_w) - dst_w] 109 | 110 | dst_padded = cv2.copyMakeBorder(dst, *pad_widths, 111 | borderType=borderMode, value=borderValue) 112 | 113 | dst_pad_h, dst_pad_w = dst_padded.shape[:2] 114 | src_warped = cv2.warpPerspective( 115 | src, shifted_transf, (dst_pad_w, dst_pad_h), 116 | flags=flags, borderMode=borderMode, borderValue=borderValue) 117 | 118 | return dst_padded, src_warped 119 | 120 | 121 | def warpAffinePadded( 122 | src, dst, M, 123 | flags=cv2.INTER_LINEAR, 124 | borderMode=cv2.BORDER_CONSTANT, 125 | borderValue=0): 126 | """Performs an affine or Euclidean/rigid warp with padding. 127 | 128 | Parameters 129 | ---------- 130 | src : array_like 131 | source image, to be warped. 132 | dst : array_like 133 | destination image, to be padded. 134 | M : array_like 135 | `2x3` affine transformation matrix. 136 | 137 | Returns 138 | ------- 139 | src_warped : ndarray 140 | padded and warped source image 141 | dst_padded : ndarray 142 | padded destination image, same size as src_warped 143 | 144 | Optional Parameters 145 | ------------------- 146 | flags : int, optional 147 | combination of interpolation methods (`cv2.INTER_LINEAR` or 148 | `cv2.INTER_NEAREST`) and the optional flag `cv2.WARP_INVERSE_MAP`, 149 | that sets `M` as the inverse transformation (`dst` --> `src`). 150 | borderMode : int, optional 151 | pixel extrapolation method (`cv2.BORDER_CONSTANT` or 152 | `cv2.BORDER_REPLICATE`). 153 | borderValue : numeric, optional 154 | value used in case of a constant border; by default, it equals 0. 155 | 156 | See Also 157 | -------- 158 | warpPerspectivePadded() : for `3x3` perspective transformations 159 | cv2.warpPerspective(), cv2.warpAffine() : original OpenCV functions 160 | """ 161 | assert M.shape == (2, 3), \ 162 | 'Affine transformation shape should be (2, 3).\n' \ 163 | + 'Use warpPerspectivePadded() for (3, 3) homography transformations.' 164 | 165 | if flags in (cv2.WARP_INVERSE_MAP, 166 | cv2.INTER_LINEAR + cv2.WARP_INVERSE_MAP, 167 | cv2.INTER_NEAREST + cv2.WARP_INVERSE_MAP): 168 | M = cv2.invertAffineTransform(M) 169 | flags -= cv2.WARP_INVERSE_MAP 170 | 171 | # it is enough to find where the corners of the image go to find 172 | # the padding bounds; points in clockwise order from origin 173 | src_h, src_w = src.shape[:2] 174 | lin_pts = np.array([ 175 | [0, src_w, src_w, 0], 176 | [0, 0, src_h, src_h]]) 177 | 178 | # transform points 179 | transf_lin_pts = M[:, :2].dot(lin_pts) + M[:, 2].reshape(2, 1) 180 | 181 | # find min and max points 182 | min_x = np.floor(np.min(transf_lin_pts[0])).astype(int) 183 | min_y = np.floor(np.min(transf_lin_pts[1])).astype(int) 184 | max_x = np.ceil(np.max(transf_lin_pts[0])).astype(int) 185 | max_y = np.ceil(np.max(transf_lin_pts[1])).astype(int) 186 | 187 | # add translation to the transformation matrix to shift to positive values 188 | anchor_x, anchor_y = 0, 0 189 | if min_x < 0: 190 | anchor_x = -min_x 191 | if min_y < 0: 192 | anchor_y = -min_y 193 | shifted_transf = M + [[0, 0, anchor_x], [0, 0, anchor_y]] 194 | 195 | # create padded destination image 196 | dst_h, dst_w = dst.shape[:2] 197 | 198 | pad_widths = [anchor_y, max(max_y, dst_h) - dst_h, 199 | anchor_x, max(max_x, dst_w) - dst_w] 200 | 201 | dst_padded = cv2.copyMakeBorder(dst, *pad_widths, 202 | borderType=borderMode, value=borderValue) 203 | 204 | dst_pad_h, dst_pad_w = dst_padded.shape[:2] 205 | src_warped = cv2.warpAffine( 206 | src, shifted_transf, (dst_pad_w, dst_pad_h), 207 | flags=flags, borderMode=borderMode, borderValue=borderValue) 208 | 209 | return dst_padded, src_warped 210 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | category = "main" 3 | description = "Wrapper package for OpenCV python bindings." 4 | name = "opencv-python" 5 | optional = false 6 | python-versions = ">=3.6" 7 | version = "4.4.0.46" 8 | 9 | [metadata] 10 | content-hash = "ab9136f6b7277a2795ba5782af3cad96c1cdd17cdd697727d53bcaa69a9aebd9" 11 | python-versions = "^3.6" 12 | 13 | [metadata.files] 14 | opencv-python = [ 15 | {file = "opencv-python-4.4.0.46.tar.gz", hash = "sha256:d80db278a07f51811dbf0f9c31ff7cd5b2501822fb7a7587e71f9ff27d5c04bd"}, 16 | {file = "opencv_python-4.4.0.46-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:14df77490c8aedceae74e660564d48c04761658aecc93895ac5e974006a89606"}, 17 | {file = "opencv_python-4.4.0.46-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:6b6d23de6d5ddc55e865ac8532bf8062b26ba70305fa1c87c671717027dcd370"}, 18 | {file = "opencv_python-4.4.0.46-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:6b1d85cbb64ce20ac5f79ad8e3e76a3dbff53d258c65f2fc0b9411321147a0be"}, 19 | {file = "opencv_python-4.4.0.46-cp36-cp36m-win32.whl", hash = "sha256:4af0053c6a70f127a52c26b112341826d3dbfce6955beb9044d3eabd7e14d1cd"}, 20 | {file = "opencv_python-4.4.0.46-cp36-cp36m-win_amd64.whl", hash = "sha256:135e05b69ab9665cbe2589f56e60895219bc2443a632bdc4bde72fb95eda1582"}, 21 | {file = "opencv_python-4.4.0.46-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:51baebb0f8f3cae4cccd30daf018a5bb75cb759d5658aea29100d34cd5cac106"}, 22 | {file = "opencv_python-4.4.0.46-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:9659e80059c9f39728c7dcc22032dff0d1d467f07b6cd8e036613393e4b7c71a"}, 23 | {file = "opencv_python-4.4.0.46-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:f69a56e958ecb549ba84e0497a438080932b4d52ded441cec04d80afde71dc0a"}, 24 | {file = "opencv_python-4.4.0.46-cp37-cp37m-win32.whl", hash = "sha256:68a9ec7e32f82cab267b6f757d9862a9a930371062739f9d00472e7c850c5854"}, 25 | {file = "opencv_python-4.4.0.46-cp37-cp37m-win_amd64.whl", hash = "sha256:17581c68400f828700e5c6b3b082f50c781bf74cb9a7b972a04f05d26c8e894a"}, 26 | {file = "opencv_python-4.4.0.46-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:db874c65654465ef71d6e8618bed8c725722bc90624132b9512bf061abb4eec0"}, 27 | {file = "opencv_python-4.4.0.46-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:8aeda9b2c37bf91fa88d67f09b85f2250661eec43d72184ec544783de204e96a"}, 28 | {file = "opencv_python-4.4.0.46-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:8a8ebd7ceebc0be9c14ca3e25a1c4ae086016b469848258e998247f2fc855314"}, 29 | {file = "opencv_python-4.4.0.46-cp38-cp38-win32.whl", hash = "sha256:e4c072cf4260063ebadc70e34d622fa1127a88e364475ed757709e249ebe990f"}, 30 | {file = "opencv_python-4.4.0.46-cp38-cp38-win_amd64.whl", hash = "sha256:6022609b67f9c0f14e6807e782660d1d1be94d4f0c7bc1794d7d8f600014acb2"}, 31 | {file = "opencv_python-4.4.0.46-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:117dbb2fd184de28d831f14c1da17864efcee7bb7895e43adf40f5e1da9137fb"}, 32 | {file = "opencv_python-4.4.0.46-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:c1382209a771ca8a25fe89d4a2377875538c6ed3cf8745280e65636cbd0988f2"}, 33 | {file = "opencv_python-4.4.0.46-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:744e9ae2fb4c8574e6d4a762146b4d0984bdec60b98480fc54a363c03a07a1ac"}, 34 | {file = "opencv_python-4.4.0.46-cp39-cp39-win32.whl", hash = "sha256:7fe81d08df4eb5dc4c6aa5f09888b6fd390fce5fa7d5624a98cac890b9aa6181"}, 35 | {file = "opencv_python-4.4.0.46-cp39-cp39-win_amd64.whl", hash = "sha256:0548981fe189e0d57b9cc65066b66fd70d4bc84ea906f349a63d9098e1b911c6"}, 36 | ] 37 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "padtransf" 3 | version = "0.1.0" 4 | description = "Padded versions of OpenCV's warping functions." 5 | authors = ["Alexander Reynolds "] 6 | homepage = "https://github.com/alkasm/padded-transformations" 7 | repository = "https://github.com/alkasm/padded-transformations" 8 | keywords = ["cvtools", "computer vision", "cv", "opencv", "warpAffine", "warpPerspective"] 9 | license = "MIT" 10 | classifiers = [ 11 | "Programming Language :: Python :: 3", 12 | "License :: OSI Approved :: MIT License", 13 | "Operating System :: OS Independent", 14 | ] 15 | 16 | [tool.poetry.dependencies] 17 | python = "^3.6" 18 | opencv-python = "!=4.2.0.32" 19 | 20 | [tool.poetry.dev-dependencies] 21 | 22 | [build-system] 23 | requires = ["poetry>=0.12"] 24 | build-backend = "poetry.masonry.api" 25 | --------------------------------------------------------------------------------