├── .devcontainer └── devcontainer.json ├── .github └── workflows │ ├── python-app.yml │ └── python-publish.yml ├── .gitignore ├── Dockerfile ├── Dockerfile-tf1-12 ├── Dockerfile_tf2_gpu ├── LICENSE ├── README.md ├── VersionData ├── UpdateVersion.py ├── Version.py └── __init__.py ├── build.cmd ├── getting_started ├── Mutual_information.ipynb ├── Mutual_information_ll_only.ipynb ├── dwt1d.ipynb ├── example.ipynb ├── example1.ipynb ├── pyramid_entorpy.ipynb └── tutorial.ipynb ├── pytest.ini ├── requirements.txt ├── setup.py └── src ├── Dockerfile ├── input ├── LennaGrey.png └── Lenna_orig.png ├── tensorflow_wavelets ├── Layers │ ├── DMWT.py │ ├── DTCWT.py │ ├── DWT.py │ ├── Threshold.py │ └── __init__.py ├── __init__.py └── utils │ ├── __init__.py │ ├── canny_edge_detector.py │ ├── cast.py │ ├── data.py │ ├── filters.py │ ├── helpers.py │ ├── models.py │ ├── mse.py │ ├── plot.py │ ├── psnt_hvs.py │ ├── salt_pepper.py │ ├── ssim.py │ └── write_raw.py ├── test.py └── tests ├── __init__.py └── test_dwt1d.py /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Existing Dockerfile", 3 | 4 | // Sets the run context to one level up instead of the .devcontainer folder. 5 | "context": "..", 6 | 7 | // Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename. 8 | "dockerFile": "../Dockerfile", 9 | 10 | // Set *default* container specific settings.json values on container create. 11 | "settings": { 12 | 13 | "python.defaultInterpreterPath": "/usr/local/bin/python" 14 | }, 15 | 16 | // Add the IDs of extensions you want installed when the container is created. 17 | "extensions": [ 18 | "ms-python.python", 19 | "donjayamanne.python-environment-manager", 20 | "yzhang.markdown-all-in-one", 21 | "ms-python.python", 22 | "ms-toolsai.jupyter", 23 | "knisterpeter.vscode-github" 24 | 25 | ], 26 | 27 | 28 | "runArgs": ["--privileged=true", "-v", "/mnt/:/mnt/","--gpus", "all", "--user", "1000:1000", "-p", "6006:6006"] 29 | 30 | } 31 | -------------------------------------------------------------------------------- /.github/workflows/python-app.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Python application 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | - name: Set up Python 3.8 23 | uses: actions/setup-python@v3 24 | with: 25 | python-version: "3.8" 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install flake8 pytest 30 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 31 | - name: Lint with flake8 32 | run: | 33 | # stop the build if there are Python syntax errors or undefined names 34 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 36 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 37 | - name: Test with pytest 38 | run: | 39 | cd src 40 | pytest test.py 41 | pytest tests/test_dwt1d.py 42 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package to PyPI when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | release-build: 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | 25 | - uses: actions/setup-python@v5 26 | with: 27 | python-version: "3.x" 28 | 29 | - name: Build release distributions 30 | run: | 31 | # NOTE: put your own distribution build steps here. 32 | python -m pip install build 33 | python -m build 34 | 35 | - name: Upload distributions 36 | uses: actions/upload-artifact@v4 37 | with: 38 | name: release-dists 39 | path: dist/ 40 | 41 | pypi-publish: 42 | runs-on: ubuntu-latest 43 | needs: 44 | - release-build 45 | permissions: 46 | # IMPORTANT: this permission is mandatory for trusted publishing 47 | id-token: write 48 | 49 | # Dedicated environments with protections for publishing are strongly recommended. 50 | # For more information, see: https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment#deployment-protection-rules 51 | environment: 52 | name: pypi 53 | # OPTIONAL: uncomment and update to include your PyPI project URL in the deployment status: 54 | url: https://pypi.org/project/tensorflow-wavelets/ 55 | 56 | steps: 57 | - name: Retrieve release distributions 58 | uses: actions/download-artifact@v4 59 | with: 60 | name: release-dists 61 | path: dist/ 62 | 63 | - name: Publish release distributions to PyPI 64 | uses: pypa/gh-action-pypi-publish@release/v1 65 | with: 66 | packages-dir: dist/ 67 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | *.h5 3 | .idea 4 | logs 5 | !requirements.txt 6 | .ipynb_checkpoints 7 | *__pycache__* 8 | *.asv 9 | *.png 10 | *.gif 11 | output/* 12 | *.mat 13 | !src/input/LennaGrey.png 14 | !src/input/Lenna_orig.png 15 | dist/* 16 | venv/* 17 | *.egg-info* 18 | Development/output/* 19 | *.jpg 20 | *.zip 21 | Development/OpenDVC/OpenDVC_model/* 22 | *.bin 23 | *.npy 24 | checkpoint 25 | please_save.somthing.data-00000-of-00001 26 | .DS_Store 27 | *ckpt.* 28 | *.pb 29 | *.index 30 | *.data-00000-of-00001 31 | *.gz 32 | *.v2 33 | *.profile-empty 34 | *.deb 35 | .vscode/* 36 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tensorflow/tensorflow:2.6.1-gpu 2 | 3 | # RUN apt-get install -y --no-install-recommends wget 4 | # RUN apt-key del 7fa2af80 5 | # RUN wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/cuda-keyring_1.0-1_all.deb 6 | # RUN dpkg -i cuda-keyring_1.0-1_all.deb 7 | # RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/3bf863cc.pub 8 | # RUN apt-get update 9 | 10 | RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/3bf863cc.pub 88 11 | RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu2004/x86_64/7fa2af80.pub 20 12 | 13 | 14 | RUN apt-get update 15 | RUN apt-get install ffmpeg libsm6 libxext6 -y 16 | RUN pip3 install --upgrade pip 17 | RUN pip3 install opencv-python 18 | RUN pip3 install tensorflow 19 | RUN pip3 install tensorflow-probability 20 | RUN pip3 install tensorflow_addons 21 | RUN pip3 install PyWavelets 22 | RUN pip3 install psnr-hvsm 23 | RUN pip3 install imageio 24 | ENV PYTHONPATH /workspace/tensorflow_wavelets/src 25 | RUN pip3 install numpy scipy 26 | RUN pip3 install tensorflow-compression 27 | RUN pip3 install matplotlib 28 | RUN pip3 install ipykernel 29 | 30 | RUN useradd -ms /bin/bash ubu-admin 31 | 32 | 33 | RUN apt-get install git -y 34 | RUN pip3 install jupyter 35 | RUN pip3 install jupyterlab 36 | RUN pip3 install notebook 37 | 38 | EXPOSE 8888 39 | # CMD ["jupyter", "notebook", "--port=8888", "--no-browser", "--ip=0.0.0.0"] 40 | -------------------------------------------------------------------------------- /Dockerfile-tf1-12: -------------------------------------------------------------------------------- 1 | FROM python:3.6 2 | 3 | RUN apt-get update 4 | RUN apt-get install ffmpeg libsm6 libxext6 -y 5 | 6 | RUN pip3 install opencv-python 7 | RUN pip3 install tensorflow==1.12 8 | RUN pip3 install tensorflow-probability==0.5.0 9 | RUN pip3 install tensorflow_addons 10 | RUN pip3 install PyWavelets 11 | RUN pip3 install psnr-hvsm 12 | RUN pip3 install imageio 13 | ENV PYTHONPATH /workspace/tensorflow_wavelets/src 14 | RUN pip install numpy scipy -------------------------------------------------------------------------------- /Dockerfile_tf2_gpu: -------------------------------------------------------------------------------- 1 | FROM tensorflow/tensorflow:latest-gpu 2 | 3 | RUN apt-get update 4 | RUN apt-get install ffmpeg libsm6 libxext6 -y 5 | RUN pip3 install --upgrade pip 6 | RUN pip3 install opencv-python 7 | RUN pip3 install tensorflow 8 | RUN pip3 install tensorflow-probability 9 | RUN pip3 install tensorflow_addons 10 | RUN pip3 install PyWavelets 11 | RUN pip3 install psnr-hvsm 12 | RUN pip3 install imageio 13 | ENV PYTHONPATH /workspace/tensorflow_wavelets/src 14 | RUN pip3 install numpy scipy 15 | RUN pip3 install tensorflow-compression 16 | RUN pip3 install matplotlib 17 | RUN pip3 install ipykernel 18 | 19 | RUN useradd -ms /bin/bash ubu-admin 20 | 21 | 22 | RUN apt-get install git -y 23 | RUN pip3 install jupyter 24 | RUN pip3 install jupyterlab 25 | RUN pip3 install notebook 26 | 27 | EXPOSE 8888 28 | # CMD ["jupyter", "notebook", "--port=8888", "--no-browser", "--ip=0.0.0.0"] 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Timor 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | tensorflow-wavelets is an implementation of Custom Layers for Neural Networks: 2 | - *Discrete Wavelets Transform Layer* 3 | - *Duel Tree Complex Wavelets Transform Layer* 4 | - *Multi Wavelets Transform Layer* 5 | 6 | 7 | 8 | # 9 | ``` 10 | git clone https://github.com/Timorleiderman/tensorflow-wavelets.git 11 | cd tensorflow-wavelets 12 | pip install -r requirements.txt 13 | ``` 14 | ## Installation 15 | #### tested with python 3.8 16 | ``` 17 | pip install tensorflow-wavelets 18 | ``` 19 | # Usage 20 | ``` 21 | from tensorflow import keras 22 | import tensorflow_wavelets.Layers.DWT as DWT 23 | import tensorflow_wavelets.Layers.DTCWT as DTCWT 24 | import tensorflow_wavelets.Layers.DMWT as DMWT 25 | 26 | # Custom Activation function Layer 27 | import tensorflow_wavelets.Layers.Threshold as Threshold 28 | ``` 29 | 30 | # Examples 31 | ## DWT(name="haar", concat=0) 32 | ### "name" can be found in pywt.wavelist(family) 33 | ### concat = 0 means to split to 4 smaller layers 34 | 35 | ``` 36 | from tensorflow import keras 37 | model = keras.Sequential() 38 | model.add(keras.Input(shape=(28, 28, 1))) 39 | model.add(DWT.DWT(name="haar",concat=0)) 40 | model.add(keras.layers.Flatten()) 41 | model.add(keras.layers.Dense(nb_classes, activation="softmax")) 42 | model.summary() 43 | ``` 44 | 45 | _________________________________________________________________ 46 | Layer (type) Output Shape Param # 47 | ================================================================= 48 | dwt_9_haar (DWT) (None, 14, 14, 4) 0 49 | _________________________________________________________________ 50 | flatten_9 (Flatten) (None, 784) 0 51 | _________________________________________________________________ 52 | dense_9 (Dense) (None, 10) 7850 53 | ================================================================= 54 | Total params: 7,850 55 | Trainable params: 7,850 56 | Non-trainable params: 0 57 | _________________________________________________________________ 58 | 59 | ### name = "db4" concat = 1 60 | ``` 61 | 62 | model = keras.Sequential() 63 | model.add(keras.layers.InputLayer(input_shape=(28, 28, 1))) 64 | model.add(DWT.DWT(name="db4", concat=1)) 65 | model.summary() 66 | ``` 67 | 68 | Model: "sequential" 69 | _________________________________________________________________ 70 | Layer (type) Output Shape Param # 71 | ================================================================= 72 | dwt_db4 (DWT) (None, 34, 34, 1) 0 73 | ================================================================= 74 | Total params: 0 75 | Trainable params: 0 76 | Non-trainable params: 0 77 | _________________________________________________________________ 78 | 79 | # DMWT 80 | ### functional example with Sure Threshold 81 | ``` 82 | 83 | x_inp = keras.layers.Input(shape=(512, 512, 1)) 84 | x = DMWT.DMWT("ghm")(x_inp) 85 | x = Threshold.Threshold(algo='sure', mode='hard')(x) # use "soft" or "hard" 86 | x = DMWT.IDMWT("ghm")(x) 87 | model = keras.models.Model(x_inp, x, name="MyModel") 88 | model.summary() 89 | ``` 90 | Model: "MyModel" 91 | _________________________________________________________________ 92 | Layer (type) Output Shape Param # 93 | ================================================================= 94 | input_1 (InputLayer) [(None, 512, 512, 1)] 0 95 | _________________________________________________________________ 96 | dmwt (DMWT) (None, 1024, 1024, 1) 0 97 | _________________________________________________________________ 98 | sure_threshold (SureThreshol (None, 1024, 1024, 1) 0 99 | _________________________________________________________________ 100 | idmwt (IDMWT) (None, 512, 512, 1) 0 101 | ================================================================= 102 | Total params: 0 103 | Trainable params: 0 104 | Non-trainable params: 0 105 | _________________________________________________________________ 106 | 107 | 108 | ## PyPi upload: 109 | ``` 110 | pip install --upgrade build 111 | pip install --upgrade twine 112 | python -m build 113 | python -m twine upload --repository pypi dist/* 114 | 115 | ``` 116 | 117 | If our open source codes are helpful for your research, please cite our 118 | [technical report:](https://www.mdpi.com/1099-4300/26/10/836) 119 | ``` 120 | @Article{e26100836, 121 | AUTHOR = {Leiderman, Timor and Ben Ezra, Yosef}, 122 | TITLE = {Information Bottleneck Driven Deep Video Compression—IBOpenDVCW}, 123 | JOURNAL = {Entropy}, 124 | VOLUME = {26}, 125 | YEAR = {2024}, 126 | NUMBER = {10}, 127 | ARTICLE-NUMBER = {836}, 128 | URL = {https://www.mdpi.com/1099-4300/26/10/836}, 129 | ISSN = {1099-4300}, 130 | DOI = {10.3390/e26100836} 131 | } 132 | ``` 133 | 134 | **Free Software, Hell Yeah!** 135 | -------------------------------------------------------------------------------- /VersionData/UpdateVersion.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | 5 | def read_config(filename): 6 | config_dict = {} 7 | with open(filename) as f: 8 | for lines in f: 9 | if len(lines.strip()) == 0: 10 | continue 11 | items = lines.replace(" ", "").strip("\n").split('=') 12 | config_dict[items[0]] = eval(items[1]) 13 | 14 | return config_dict 15 | 16 | 17 | versionTemplae = \ 18 | """ 19 | major = {major} 20 | minor = {minor} 21 | build = {build} 22 | """.strip("\n") 23 | 24 | versionFileName = "version.py" 25 | config = os.path.join(os.path.dirname(sys.argv[0]), versionFileName) 26 | version = read_config(config) 27 | 28 | versionData = versionTemplae.format(major=version["major"], minor=version["minor"], build=version["build"]+1) 29 | 30 | with open(config, "w") as versionFile: 31 | versionFile.write(versionData) 32 | -------------------------------------------------------------------------------- /VersionData/Version.py: -------------------------------------------------------------------------------- 1 | major = 1 2 | minor = 1 3 | build = 2 -------------------------------------------------------------------------------- /VersionData/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Timorleiderman/tensorflow-wavelets/58cf7dc8e469e963a6c7dd0567e05925a7d3c65c/VersionData/__init__.py -------------------------------------------------------------------------------- /build.cmd: -------------------------------------------------------------------------------- 1 | 2 | py VersionData\UpdateVersion.py 3 | 4 | py -m build 5 | 6 | py -m twine upload --repository pypi dist/* -------------------------------------------------------------------------------- /getting_started/Mutual_information.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "ename": "ModuleNotFoundError", 10 | "evalue": "No module named 'cv2'", 11 | "output_type": "error", 12 | "traceback": [ 13 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 14 | "\u001b[1;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", 15 | "\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_12600\\2135738620.py\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[1;32mimport\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mnumpy\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mpandas\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mpd\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mtensorflow\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mtf\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[1;32mfrom\u001b[0m \u001b[0mnpeet\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mentropy_estimators\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 16 | "\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'cv2'" 17 | ] 18 | } 19 | ], 20 | "source": [ 21 | "\n", 22 | "import cv2\n", 23 | "import numpy as np\n", 24 | "import pandas as pd\n", 25 | "import tensorflow as tf\n", 26 | "from npeet import entropy_estimators\n", 27 | "from tensorflow_wavelets.utils.models import DWT" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 42, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "# git clone https://github.com/gregversteeg/NPEET.git\n", 37 | "# cd NPEET\n", 38 | "# pip install .\n", 39 | "# x = [[1.3],[3.7],[5.1],[2.4],[3.4]]\n", 40 | "# y = [[1.5],[3.32],[5.3],[2.3],[3.3]]\n", 41 | "# entropy_estimators.mi(x,y)\n", 42 | "# Out: 0.168" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 43, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "\n", 52 | "def mutual_info_wavelet(wavelet_name, img, pd_dict:dict, shape=(240, 240)):\n", 53 | " # resize input image to be square\n", 54 | " img = cv2.resize(img, shape)\n", 55 | " # expand dims for the dwt transfor\n", 56 | " img_t = np.expand_dims(img, axis=-1)\n", 57 | " img_ex = np.expand_dims(img_t, axis=0)\n", 58 | " # concat=1 LL,LH,HL,HH in one image\n", 59 | " coeffs = DWT.DWT(wavelet_name, 1, \"VALID\", \"SYMMETRIC\")(tf.dtypes.cast(img_ex, tf.float32))\n", 60 | " # extract resault to numpy\n", 61 | " dtw_img = coeffs[0,:, :, 0].numpy()\n", 62 | " # resize the output to be like the input (maybe we need to use crop?)\n", 63 | " dtw_img_res = cv2.resize(dtw_img, dsize=img.shape, interpolation=cv2.INTER_LINEAR)\n", 64 | "\n", 65 | " flat_img = img_ex.flatten()\n", 66 | " flat_coeffs = dtw_img_res.flatten()\n", 67 | " mi = entropy_estimators.mi(flat_img, flat_coeffs)\n", 68 | " # print(f\"Mutual Information = {mi}, for {wavelet_name} Wavelet\")\n", 69 | " pd_dict[wavelet_name] = mi\n", 70 | "\n" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": null, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "\n", 80 | "# def mutual_info_wavelet2(wavelet_name, img, pd_dict:dict, shape=(240, 240)):\n", 81 | " # resize input image to be square\n", 82 | "img = cv2.imread(\"../src/input/Lenna_orig.png\", 0)\n", 83 | "res = dict()\n", 84 | "shape=(240, 240)\n", 85 | "wavelet_name = \"haar\"\n", 86 | "\n", 87 | "img = cv2.resize(img, shape)\n", 88 | "# expand dims for the dwt transfor\n", 89 | "img_t = np.expand_dims(img, axis=-1)\n", 90 | "img_ex = np.expand_dims(img_t, axis=0)\n", 91 | "# concat=1 LL,LH,HL,HH in one image\n", 92 | "coeffs = DWT.DWT(wavelet_name, concat=1)(tf.dtypes.cast(img_ex, tf.float32))\n", 93 | "\n", 94 | "\n", 95 | "idwt = DWT.IDWT(wavelet_name, concat=1)(tf.dtypes.cast(coeffs, tf.float32))\n", 96 | "# extract resault to numpy\n", 97 | "dtw_img = coeffs[0,:, :, 0].numpy()\n", 98 | "# resize the output to be like the input (maybe we need to use crop?)\n", 99 | "dtw_img_res = cv2.resize(dtw_img, dsize=img.shape, interpolation=cv2.INTER_LINEAR)\n", 100 | "\n", 101 | "flat_img = img_ex.flatten()\n", 102 | "flat_coeffs = dtw_img_res.flatten()\n", 103 | "mi = entropy_estimators.mi(flat_img, flat_coeffs)\n", 104 | "# print(f\"Mutual Information = {mi}, for {wavelet_name} Wavelet\")\n", 105 | "pd_dict[wavelet_name] = mi\n", 106 | "\n" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": 44, 112 | "metadata": {}, 113 | "outputs": [], 114 | "source": [ 115 | "img = cv2.imread(\"../src/input/Lenna_orig.png\", 0)\n", 116 | "res = dict()\n", 117 | "mutual_info_wavelet(\"haar\", img, res)\n", 118 | "mutual_info_wavelet(\"db2\", img, res)\n", 119 | "mutual_info_wavelet(\"db3\", img, res)\n", 120 | "mutual_info_wavelet(\"coif2\", img, res)\n", 121 | "mutual_info_wavelet(\"coif3\", img, res)\n", 122 | "mutual_info_wavelet(\"sym2\", img, res)\n", 123 | "mutual_info_wavelet(\"sym3\", img, res)\n", 124 | "mutual_info_wavelet(\"bior1.3\", img, res)\n", 125 | "mutual_info_wavelet(\"bior2.2\", img, res)\n", 126 | "mutual_info_wavelet(\"rbio1.3\", img, res)\n", 127 | "mutual_info_wavelet(\"rbio2.2\", img, res)\n" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 45, 133 | "metadata": {}, 134 | "outputs": [ 135 | { 136 | "data": { 137 | "text/html": [ 138 | "
\n", 139 | "\n", 152 | "\n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | " \n", 203 | " \n", 204 | " \n", 205 | " \n", 206 | " \n", 207 | " \n", 208 | " \n", 209 | " \n", 210 | " \n", 211 | " \n", 212 | " \n", 213 | " \n", 214 | " \n", 215 | " \n", 216 | " \n", 217 | "
wavelet_namemutual_information
0haar0.300046
1db20.248661
2db30.240524
3coif20.239007
4coif30.244888
5sym20.247446
6sym30.241551
7bior1.30.247472
8bior2.20.247495
9rbio1.30.252135
10rbio2.20.255849
\n", 218 | "
" 219 | ], 220 | "text/plain": [ 221 | " wavelet_name mutual_information\n", 222 | "0 haar 0.300046\n", 223 | "1 db2 0.248661\n", 224 | "2 db3 0.240524\n", 225 | "3 coif2 0.239007\n", 226 | "4 coif3 0.244888\n", 227 | "5 sym2 0.247446\n", 228 | "6 sym3 0.241551\n", 229 | "7 bior1.3 0.247472\n", 230 | "8 bior2.2 0.247495\n", 231 | "9 rbio1.3 0.252135\n", 232 | "10 rbio2.2 0.255849" 233 | ] 234 | }, 235 | "execution_count": 45, 236 | "metadata": {}, 237 | "output_type": "execute_result" 238 | } 239 | ], 240 | "source": [ 241 | "df = pd.DataFrame(list(res.items()), columns=['wavelet_name', 'mutual_information'])\n", 242 | "df" 243 | ] 244 | } 245 | ], 246 | "metadata": { 247 | "kernelspec": { 248 | "display_name": "py38", 249 | "language": "python", 250 | "name": "python3" 251 | }, 252 | "language_info": { 253 | "codemirror_mode": { 254 | "name": "ipython", 255 | "version": 3 256 | }, 257 | "file_extension": ".py", 258 | "mimetype": "text/x-python", 259 | "name": "python", 260 | "nbconvert_exporter": "python", 261 | "pygments_lexer": "ipython3", 262 | "version": "3.7.8" 263 | } 264 | }, 265 | "nbformat": 4, 266 | "nbformat_minor": 2 267 | } 268 | -------------------------------------------------------------------------------- /getting_started/dwt1d.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import tensorflow as tf\n", 11 | "import sys\n", 12 | "\n", 13 | "sys.path.append(\"../src\")\n", 14 | "from tensorflow_wavelets.utils.models import *\n", 15 | "from tensorflow_wavelets.utils.mse import *\n", 16 | "from tensorflow_wavelets.utils.data import *\n", 17 | "\n", 18 | "from matplotlib import pyplot as plt\n", 19 | "import numpy as np\n" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 2, 25 | "metadata": {}, 26 | "outputs": [ 27 | { 28 | "name": "stdout", 29 | "output_type": "stream", 30 | "text": [ 31 | "(32,)\n", 32 | "(32, 1)\n", 33 | "(1, 32, 1)\n" 34 | ] 35 | } 36 | ], 37 | "source": [ 38 | "num_samples = 32\n", 39 | "wavelet = \"db2\"\n", 40 | "x = np.linspace(0, 2 * np.pi, num_samples)\n", 41 | "signal = np.sin(5 * x).astype(np.float32) # 5 Hz sine wave\n", 42 | "\n", 43 | "# Expand dimensions to match model input requirements\n", 44 | "signal_ex1 = np.expand_dims(signal, axis=-1) # Add channel dimension\n", 45 | "signal_ex2 = np.expand_dims(signal_ex1, axis=0) # Add batch dimension\n", 46 | "\n", 47 | "print(signal.shape)\n", 48 | "print(signal_ex1.shape)\n", 49 | "print(signal_ex2.shape)" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 3, 55 | "metadata": {}, 56 | "outputs": [ 57 | { 58 | "name": "stdout", 59 | "output_type": "stream", 60 | "text": [ 61 | "cA.shape=(17,) cD.shape=(17,)\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "import pywt\n", 67 | "\n", 68 | "coeffs = pywt.dwt(signal, wavelet) # Returns (approximation, detail) coefficients\n", 69 | "cA, cD = coeffs # Approximation and detail coefficients\n", 70 | "print(f\"{cA.shape=} {cD.shape=}\")" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 4, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "name": "stdout", 80 | "output_type": "stream", 81 | "text": [ 82 | "dwt.shape=TensorShape([1, 17, 2])\n" 83 | ] 84 | } 85 | ], 86 | "source": [ 87 | "dwt = DWT.DWT1D(wavelet_name=wavelet)(tf.dtypes.cast(signal_ex2, tf.float32))\n", 88 | "print(f\"{dwt.shape=}\")\n", 89 | "t_cA = dwt[0, :, 0]\n", 90 | "t_cD = dwt[0, :, 1]" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 5, 96 | "metadata": {}, 97 | "outputs": [ 98 | { 99 | "name": "stdout", 100 | "output_type": "stream", 101 | "text": [ 102 | "True\n", 103 | "True\n" 104 | ] 105 | } 106 | ], 107 | "source": [ 108 | "print(np.allclose(cA, t_cA, atol=1e-3))\n", 109 | "print(np.allclose(cD, t_cD, atol=1e-3))" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 6, 115 | "metadata": {}, 116 | "outputs": [ 117 | { 118 | "name": "stdout", 119 | "output_type": "stream", 120 | "text": [ 121 | "idwt.shape=TensorShape([1, 32, 1])\n" 122 | ] 123 | } 124 | ], 125 | "source": [ 126 | "idwt = DWT.IDWT1D(wavelet_name=wavelet)(tf.dtypes.cast(dwt, tf.float32))\n", 127 | "print(f\"{idwt.shape=}\")\n" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 7, 133 | "metadata": {}, 134 | "outputs": [ 135 | { 136 | "data": { 137 | "text/plain": [ 138 | "[]" 139 | ] 140 | }, 141 | "execution_count": 7, 142 | "metadata": {}, 143 | "output_type": "execute_result" 144 | }, 145 | { 146 | "data": { 147 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjgAAAGdCAYAAAAfTAk2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACyj0lEQVR4nOydeXxcdbn/P7Nkluz7nJlMJpm26ZJ0pYFaNqFUWvWqXJcrWi/Lr4KiFRV/or1XwQv3inoVvXC5ClKkKghuuP9YbClwodCNAk23aTvJZDJzTvZ9JrOc+f3xPWeatEmznX2+79crL0hy8j1PppNznvN8n+fzMWUymQwoFAqFQqFQDIRZ7QAoFAqFQqFQpIYmOBQKhUKhUAwHTXAoFAqFQqEYDprgUCgUCoVCMRw0waFQKBQKhWI4aIJDoVAoFArFcNAEh0KhUCgUiuGgCQ6FQqFQKBTDYVU7ADXgeR6RSARFRUUwmUxqh0OhUCgUCmUGZDIZDA0NwePxwGy+cI0mJxOcSCSC2tpatcOgUCgUCoUyB9rb2+H1ei94TE4mOEVFRQDIC1RcXKxyNBQKhUKhUGbC4OAgamtrs/fxC5GTCY64LVVcXEwTHAqFQqFQdMZM2ktokzGFQqFQKBTDQRMcCoVCoVAohoMmOBQKhUKhUAwHTXAoFAqFQqEYDprgUCgUCoVCMRw0waFQKBQKhWI4aIJDoVAoFArFcNAEh0KhUCgUiuGgCQ6FQqFQKBTDIWuC8/LLL+MDH/gAPB4PTCYT/vCHP0z7M3v27MFFF10Eu92ORYsW4fHHHz/vmIceegj19fVwOBxYt24d9u3bJ33wFAqFQqFQdIusCc7IyAhWrVqFhx56aEbHB4NBvP/978fVV1+Nw4cP40tf+hI+/elP47nnnsse8/TTT+OOO+7A3XffjUOHDmHVqlXYtGkTOjs75fo1KBSKDIRP7MeLz9yP8In9aodCoVAMiCmTyWQUOZHJhGeeeQbXXXfdlMd87Wtfw1//+lccOXIk+7Xrr78e/f39ePbZZwEA69atw8UXX4z//u//BgDwPI/a2lp84QtfwNe//vUZxTI4OIiSkhIMDAxQLyoKRQV2PHATbu3ZCd4MmHngkYobsfX2x9UOi0KhaJzZ3L811YOzd+9ebNy4ccLXNm3ahL179wIAEokEDh48OOEYs9mMjRs3Zo+ZjLGxMQwODk74oFAo6hA+sT+b3AAAbwY+07OTVnIkhlbIKLmOphIclmXhcrkmfM3lcmFwcBCxWAzd3d1Ip9OTHsOy7JTr3nfffSgpKcl+1NbWyhI/hUKZnsDRV7LJjUjaDJw69qo6ARmQHQ/chLonL8GGt7+CuicvwY4HblI7JApFcTSV4MjF9u3bMTAwkP1ob29XOyQKJWdpaLwCZn7i1yw8sGjZZeoEZDBohYxCIWgqwWEYBhzHTfgax3EoLi6G0+lEZWUlLBbLpMcwDDPluna7HcXFxRM+KBSKOniXXIx/xRXZz00Z4OGKG+FdcrGKURkHWiGjUAiaSnDWr1+PXbt2TfjaCy+8gPXr1wMAbDYb1q5dO+EYnuexa9eu7DEUipTQPgZ58Jb5sv/vH7LSBmMJaWi8AjhndIRWyCi5iKwJzvDwMA4fPozDhw8DIGPghw8fRigUAkC2jm644Ybs8Z/97Gdx5swZ3HnnnTh+/Dj+53/+B7/+9a/x5S9/OXvMHXfcgZ/+9KfYuXMnjh07httuuw0jIyO4+eab5fxVKDkI7WOQjwORg9n/bytMYWyENv5LhXfJxWBGTdnPzTytkFFyE1kTnAMHDmDNmjVYs2YNAJKcrFmzBnfddRcAIBqNZpMdAPD7/fjrX/+KF154AatWrcIPfvADPProo9i0aVP2mI9//OP4/ve/j7vuugurV6/G4cOH8eyzz57XeEyhzAfaxyAvB5Nt2f9Pm4ETh55XMRpjkU4m0OM4W8L5anodrZBRchKrnItfddVVuJDMzmQqxVdddRXefPPNC667bds2bNu2bb7hUShTcqE+BvokPD/iw/14pygGAKgdtqC9MI2Woy9h5RUfVTkyY9B29DUkLWc/PzgcUC8YCkVFNNWDQ6FoBdrHIB/v7P0jkhagMmbCZlMDAKCl48IPNZSZEzhOmokLE+Tz/y3spVuAlJyEJjgUyiQkx2LA2TYGOukjIQfeJqrkzWMVaKpcBgBoGTqjZkiGIhA6DADYMMrANWpGPA94Y9dOdYMyGHT4QB/QBIdCmYQn/3jvhM+3jiyhfQwScYAlDcbNxUuxfCGZfmwxd6sZkqEI9JItqcX5Xlyd8gIAdh/6nZohGQo6fKAfaIJDoZxDhufxRN/LAIC1/fkAgO4ULfFLxcEUEdpcu+ByNK0hAwSnipKIDfaqGZZhOBnvAAA0VC3Bhtp3AwB299MtQCmgwwf6giY4FMo5HH75aRwrScCeAr6y9P8AAM6gT+WojEFssBdHiuMAgOZ3fRiu+uUoj5mQMQHHDz2ncnTGIGDuBwA01F2Eq999IwDg9eJBjA7QKtl8oSKK+oImOBTKOTzxwg8AAP8wUoPVq0mFIeiMI8PzF/oxygx467XfI20GXKNm1DSshclsRlO8CADQcuxllaPTP8n4KFoLUwCAxU1XYuGqq1E7bEHSArz6/KMqR6d/GhqvgIkOH+gGmuBQKONIJxP4VZKU87es3IL6ZZcCAIbsQG/ktJqhGYID75AqTXOiEiYzufw02UmfSEvksFphGYbgkVeQNgP5CcCz6CKYzGZcnakDAOx++48qR6d/vEsuxsb+8glfWzOYT4cPNApNcAwA7eiXjpf+/CAiBTxK4ya872P/CmdxOZgR8mcSPL5X5ej0zwGOJI9rS5Zmv9ZU2QgAaBkOqhKTkTgpjIgvGnVkE8gN/g0AgBeH31EtLiNRYnYCAN7XVwkTDxwoHcWLz9yvclSUyaAJjs6hHf3S8sTehwEAH00thr2AmLIuSBQAAIJth9UKyzAc5MMAgOaFV2a/1rRImKSy9KgSk5EIhN8GADSYKrJfu/pq0kd2oHgEg11hVeIyEqFMPwDg1otuwW3x5QCA21/5F6QScRWjokwGTXB0DO3ol5b4cD9+m0dGbLdc9tns1/0WcrMIssdVicsojPR14mjRGABg7bv+Mft1cZIqWJiijbDzJNB3CgCwuOCsmamvcT0WDlqRNgOv0D6ceRPKIyrcPt8K3POF36M8ZsKRkjH8+L8+pXJklHOhCY5OiQ324u5Ht9COfgn566//A4N2wDtswZUfOGsF4i8kPSLBwbapfpQyAw6/9jvwZsAzYoan4aLs16vrm1ApTFIdO/CsihHqn8BYFADQUL10wtc3WBYBAHa3/EXxmIxEfLgfbAEZNvAtakaFtwH/UX09AOCu3t+jK3RMzfAo50ATHJ2R4Xk89cjtWHpvNR4rPN9jhnb0z50njzwJAPiE7SKYLWdt2vwV5OYQTHCqxGUUDrS8AABoTlaf973l8RIAwJHjLykak9E4mTcAAGjwN0/4+oZF7wEA7I4dVTwmIxEOEJHK/ARQ7lkIALjlC49jdb8T/Y4M/vW/P6xmeJRzoAmOjtj3/M9w+R2l+ET0QYQK06gdtuD6gbOlaAtP7QTmSj/Xhr8URAAAWzb93wnf89euAAAEzVTsbz4c4A4DANaWLjvve02OWgBAS+QtJUMyFPHhfrQXpAEADU1XTvjeVRs/DQB4qySGnjA135wroeBhAIAvZss2cVvybHjwmu8DAB7NP44Df/+5WuFRzoEmODogfGI//vkrC7Bu7//Ba2VDyE8A95g24Pg3WDzxn6dhIdc07L3sZ9ROYI787um7kbACTQN2rLx8oqu1v+ESAEBrfhJ8OqVGeIbgYIYo7DY3vPu87zVVNQEAWkZblQzJUJx++yVkTEDxGFBd1zjhe8yClWgcsCNjAl564acqRah/QhGyBeXjiyZ8/fJ/+By2DNYjYwK+8NfP0+uERqAJjoYZ6evEt751FRb/4hL8spiM0N44tBAnb9yPb961C/kllTBbrPCOWgAAaZ7+Uc2VJ84QjZAt5e/OPpmJeBc3w8IDCSsQOUUl7+fCUE8Ex4uJvfXa9R857/tNDURvqMVC7RrmSiDwOgCgIZZ/3nsYADbYFgMAdp+gfU5zJdRDTGF9eZXnfe+7n/kNChLA66XD+OVPPqd0aJRJoAmOBuHTKfzifz6LJd92499MLyGWB1zeV4z9l+3E498/hZrFE/fXvSnilxTuoA1uc6Hj5AHsKe0HAHzyH+867/tWmwO+EdKTEzz5hpKhGYY3X/0dMiagdtgCl3/5ed9vumgzAKCtKI3hXlbp8AzByTDZ3mswn3/zBYCrl5DX+MXEScViMhqhYVKF9BXVnPe9msXN+GY+eY3vDO2gI/kagCY4GkEU63vm8a9h3f8twQ1dD6OjkEf9kBW/8d6Bl+/vQ/PGGyb9Wa+5lKzRfUbBiI3Dr373LWRMJImsa5q8QdufIiXpYOhtJUMzDAeO/h0AsDZ1foMxAFR4G+AaJZejowf+n2JxGYlAP/n7byism/T7737PLTBlgKMlY2DP0PfxXAgliYyBr2LBpN//0peeRsNgHrh8Hvf+6B8nPYaiHDTB0QDjxfo+3PY9HCgdRWECuM+yCce+1YWPbv3BpCVnkRoHuWmEB9uVCtlQPNG5GwCwxff+KY/x51UBAILdtEFzLhzsItWF5vKmKY9pGiOTVC0nXlEkJqMRSJLK12Jm8te4wtuA1QNEhfdF2oczJ0KWYQCAz3N+ozwA2AuK8aO1/woA+JHlAI7v+5tisVHOhyY4KnOuWB8AmDLAS9f8El//xrNwFJZOu4a3mOi0dMS7ZIrSuBx9/U84XBqDNQ187J/+bcrjFhSTp+LgEE0i58IBkAm15sVXTXlMdpKKpdWFuRCwDQEAGhY0T3nM1U7SfPzi6b8rEpORyPA8Qk7SR+bzr57yuPd98m78Q381Uhbgi0/eQE16VYQmOCoTOPrKeWJ9GRMwODjzZMVbRcql4XS/hJHlBk/87bsAgPcOuVDhbZjyOH/1EgBAMEWVdmfLQGcIJ4uTAIC1l350yuOWM2Qc/widpJo1w70sIoIAXcOKq6Y8bkPTPwAAdqdPKRGWoejpCCCWR/7f2zB1EgkAP7zhSdhSwPNlPfjTL7+hQHSUyaAJjso0NF4B8zkJ/mzF+rweoloato5KGZrhyfA8nhzdBwDY0vjxCx7rr1sFAAjmDcsel9E49OpvAQD1Q1ZU1i6Z8rimBvKeb7H2KxGWoTj1DhFILI+ZsgJ0k3HFtZ+GhQdOF6cQOkrNY2dDKHAAAMCMmLM+dVOxaM01+AqIx9qX3/5PxAbpdKAa0ARHZbxLLsa91vdkP5+LWF9NHXny7chPUf2FWfDa/3sYrUUpFCaAD/zTNy94rH/JuwAA4YI0EjGa5MyGA8d2AQDW8q4LHtfU/D4AQLgwjYHOkOxxGYnAKTLdt3is4ILHFVd50TxIjnnxxcdkj8tIhNqPAAB8SeeMjv+XL/8eNcNmBItS+MGP/knO0ChTQBMcDXDZStLcWjNsRusn981arM+9YBVMGSBpAbpC1BBypjzx8kMAgA+PLUB+yeSjtSLVdY3IT5Dtw9BxOio+Gw52vwMAaC4/fzx8PKWuOnhGhEmqg1SrZTYEIuTm22CZfEptPBsKVwIAdgd3yxqT0QhxZLzeZyqd0fGF5Qz+cxHRw/l2YhetmKkATXA0QIQl++GLkkVzslnIc+SDEUZsO4JU6n4mJOOj+LWJ+PJsueTT0x5vMptRH7MDAIKnqFv7bDhgIgaQzUs3THtsU6IUANBykhrGzoaTA8KIeHH9tMduWHUdAGC3qY02wM6C0AAx2/U5mBn/zPW3/Beu7CtBLA/4v4/SKo7S0ARHA0R6WwEAHkvpnNfwCmXTcAc105sJz/32O+hxZuAaNWPDdV+e0c/4M2SM+Uz4HTlDMxR90SBOF5Nt07WXfWza45ucZFqthaWv8WwIpDoBAIvdF66SAcCl7/k/yEuTrcDTb70od2iGIRQjZru+Ut80R57FZDbjgQ8/AjMP/KYkjBefuV+u8CiTQBMcDRAZIk+4NY6qOa/hBWl6C3eeliQmo/PEmzsBANebV8Jqc8zoZ/x20kMS7KGv8Uw5+OpvAAALhqwoc/unPb5JmKRqibXJGpfRCNhJX1jDwkumPTa/pBLrB0myvvuln8kal5EIZfoAAHWuqRvlJ2PVlf+Ez8ZJ4nnbK1/H33/7PYRP0CqwEtAERwNExshIuGcS+e+ZUmMnPSThftqcOR1DPRH80Ulepy0bvjTjn/OXkht0cDQiR1iG5MBx0ufRzLtndHzTYmGSytYvV0iGo59rQ1d+BgDQsOrqGf3MhrI1AIDdoZdli8tohPJiAABf7fRVsnO594t/QMEYcKIkife0fA11T16CHQ/cJHGElHOhCY4GiKQHAACeiskl1meCV0iOOoQyKmVq/vD0PYjlAQ2DeWi+5p9n/HN+hozjBzN05HOmHOwlza/NlStmdHzjWuLlEyng0RcNyhaXkQi8vQcA4Bo1o6jCM6OfuXrNhwEAL+aFaR/ODBgbGURU0BnyTaOBMxmjQ70YtZ39nDcDn+nZSSs5MkMTHA0QsRD9Gg8ztdDcdHgrSHUhnOqTJCYj80SA6LJsKb7sghYY57JgwVoAQNBO9YZmygEzSbibl10zo+NLqn2oHbYAAFoOUk+qmRA4TbScFo8Vzvhn1l1zI5xJoDM/g6Ov/1mu0AxDWNDAcSaBiprZX6cDR19BxjTxa2kzcOoYbaaXE5rgqEyG5xFxEJVXT23jnNfxusm+cNhCNVouBBc8gheKewAAWz44O4VR/7JLAQDdzgx1vJ4B3e0n0FpEGowvumxqBeNzaUqVAQBaAvTiPxMCHBksaMi7sM7QeOwFxbh8uBwAsPvVX8gSl5EIBQ8DAHwx26weikSkEHSlzB6a4KjMQFd7tnTp9q+c8zo1PrIvHHYkacn5Ajz9m7vBm4FL+guwaM3MqgoixVVelMfIY1jwKL35TsfB10ilrGEwDyXVM588acoXJqm4I7LEZTQCg60AgIaSyR2up+LqSrLV8mLkNalDMhyhyDEAgC898yrZeLxLLsYjFTfCRFqlYMrMXtCVMnsUSXAeeugh1NfXw+FwYN26ddi3b9+Ux1511VUwmUznfbz//Wednm+66abzvr9582YlfhXJiQi6NaVx07RicxeiZiFpGhy1kaZDyuQ8EX0OALDFvWlOP+8fI+P4weAhyWIyKgdO7gEANGNmfSEiTQxJ9Fvi1Nh0JpzkyZBCg2dmfU4iGy4muix7HCzSyYTkcRmJUA/RGfLlzf0avfX2x/FdG1Hrfnd/6awFXSmzR/YE5+mnn8Ydd9yBu+++G4cOHcKqVauwadMmdHZ2Tnr873//e0Sj0ezHkSNHYLFY8LGPTdTQ2Lx584TjfvWrX8n9q8hCJEyUhz1jtmmOvDDO4nJUCNWFjjOH5xuWIQkcfAH7Skdg4YGPf2xq5/AL4TeR7ZNg9JiUoRmSg70tAIDmqlWz+rmmJVcAAFrsA5LHZDQyPI+Ag/SELV78rln97Nqrt6BoDOhzZPDWK7+RIzzDEBrqAAD4Cuc+6QoATfVkjL/fNDbvmCjTI3uCc//99+OWW27BzTffjMbGRvzkJz9Bfn4+Hntsch+U8vJyMAyT/XjhhReQn59/XoJjt9snHFdWVib3ryILEY6oGHv4C3vIzATvGFHaDbe3zHstI/LkX74NANg4WAGXf/ajngDgd5JqRLCPTvhMxwEreYhpbtw4q59rbH4vAIDL59ETDkgel5Ho6Qig30H2PRauePesftZqc+DKGLF2ePGNpySPzUiEkt0AAF/F7LYBz8XtWQwAiNpogqMEsiY4iUQCBw8exMaNZy9wZrMZGzduxN69M/Pl2LFjB66//noUFExMAPbs2YPq6mosWbIEt912G3p6eqZcY2xsDIODgxM+tEKkj+ixeKyl814rK/bH0ZvCuWR4Hk8Mkr6ZLQ0zb3g9F385cWo+E49KEpdR4YJH0F6YhikDrLnsI7P62cJyBnVDwiTVIepJdSECR4iLuHfYMqct7g0uUvXZ3Un91S5EyDwEAPB5ls1rHbevCQDQ5eDptqACyJrgdHd3I51Ow+Wa2N3vcrnAstNPoezbtw9HjhzBpz890Sto8+bN+PnPf45du3bhu9/9Ll566SW8973vRTqdnnSd++67DyUlJdmP2trauf9SEhMZJjdKj3N6k7zpqMkjUxHhPtqDcy4Hdv0CgeIknEnguo/fNed1/DXkAhU09UsUmTE5uPd3AIAlg7YZa7OMpylN3sstAdoAeyFOniE6Kg3Jojn9/NXvuh4A8HJ+F5JxKn8wGRmeR8hJkhGff/W81qqqXQozT3RwOkPUVkduND1FtWPHDqxYsQKXXDJRfvz666/HBz/4QaxYsQLXXXcd/vKXv2D//v3Ys2fPpOts374dAwMD2Y/2du00L0YSpPLkKZ7f3i4AeAvJjSQ8QqsL5/LE7h8BAD4U883phiviXyho4TjH6LTaBTh4iijkNpvm9r5eXkB0nY500kmqCxHgSC/YYtvMlKLPZdUVH0NZ3IRhG3Bwz5NShmYYeiOns5Ou3oa181rLkmeDK0Zuu9E22kogN7ImOJWVlbBYLOC4ieq6HMeBYS7syDoyMoKnnnoKW7dunfY8CxYsQGVlJU6dOjXp9+12O4qLiyd8aIUITxopayrnt7cLAN4yMl7bkaJKu+NpfecV/DxzGACwZc2N81qrbtl6AMCIDegOn5hvaIblQB95Om12rZ7Tzze5SWNyy5h2Hka0SGCYbHE3lC2c08+bLVZcPUaSo937fy1ZXEYidIqI/LlGzXAUls57PSZJeiWjUdpKIDeyJjg2mw1r167Frl27sl/jeR67du3C+vXrL/izv/nNbzA2NoZPfepT054nHA6jp6cHbvfcnmLUJGIl/iYe9+J5r+VlyBph09C81zIKOx64CQt/eyX6nAAyQKTrzLzWcxSWomaY/NkEj78uQYTG5ECe0GDc9J45/XzT0isBAC0O7fTLaZFAhjS/NnjnrqF1tZsIWL7Yc1CSmIxGKESc7X0JpyTruUG0dKLddFBBbmTforrjjjvw05/+FDt37sSxY8dw2223YWRkBDfffDMA4IYbbsD27dvP+7kdO3bguuuuQ0VFxYSvDw8P46tf/Spef/11tLa2YteuXfjQhz6ERYsWYdOmuWmbqEWG5xHJJ31DHqH5bD7UCErIYTttXgOA8In9uLVnJ3jxXW4CPtf/xLz9X/xJcoEKth2eX4AGJRI4hEgBDzMPrL50dg3GIsuaN8OUIarRna20lD8ZGZ7HyXzygNSw+MIPjBdiwxXEj+1/C3sxNkITynMJcScBAD5TqSTrua1k4jc6EJZkPcrUyJ7gfPzjH8f3v/993HXXXVi9ejUOHz6MZ599Ntt4HAqFEI1O7Bk5ceIE/vd//3fS7SmLxYK3334bH/zgB7F48WJs3boVa9euxSuvvAK73S73ryMpPR0BJMmwCBj/7ES6JsO78CIAQL+DWgkAxP+FP+cdLoX/i99KplWCHN2imoyDrz8DAFg2ZEdB2dya5/NLKuEftgIAWt58TrLYjAQbfBsjNsDMAwuWXznndZZd8g9wjZoRzwNe//vPJIzQGIT6ydCGzzFzK4wL4c4nfxPsyORacBTpsCpxkm3btmHbtm2Tfm+yxuAlS5Ygk8lMerzT6cRzzxnjghdpJaXPqlETbM65SYCPp7jKi6IxYMgOdJx+E0vK3zvvNfVMQ+MVMB/GhCRHCv8Xf6EXwBkEB+m02mQcPP0KYAKazd55rdOUrsAZcGg5tRdXSxSbkQi0vAIAqBuxwl4w975Ck9mMq1NePIUQXnzzGbz7Q1+UKkRDEIpxgA3wlczcbuRCuItrgGEgmpha2oQiDZqeojI6HWHSiOlJSFd58sZJu39HOx1BFP1fIOTKZl4a/xd/JXETDiboE9hkHBggkz3NrjXzWqepkExStXTT9/JkBFpJz0xDqmTea22oJSKBu/vfnPdaRiOU6QMA+Kpn7yI+GUw5SZSioL2SckMTHBURG149mJuGxWR4eVIJCkfp9gkA/J9tj0Ewa8dLzQ9J4v/iryXbiUELvUCdS4bnccBGGl+bV8yvJ67JsxoA0DJGexUm42QnSSQbHHOXPRDZcNVNAIDXiwcxOtA97/WMRCiP6AP5auemfn4ubmYRACBqiUmyHmVqaIKjIpF+MgLrsUpnM1FjFcT+elslW1PPDPexEIpaWH3ZhyVZ099AdJnaCpJUjfQcOgIHweXzsPDAqkvn93o3LSN9JUecQ1RzaBICI+T6sbh8/pWFBSuvQu2wBUkL8Orzj857PaOQiA0jmk/ee75F89PAEXELwyBRZ5q+r2WGJjgqEhkhjcCefGma1wDAW0D0hcLDEcnW1DOsMIFTmCAWAFJQ07AWeWkgaSE3dMpZDrz+ewBA06ADzuLyea219KJNMPPEDJINvi1FeIYiYCJ6Vw21szMznQyT2YwNmXoAwO63/zjv9YxC+OQBZEyAIwlU+eZn0yDC1JNKUMIK9HO0j09OaIKjIpEEuUB5SubXjDkebynZ3+2gDWwAADZCtuqYeJ5ka1rybPCNkP78YGCfZOsagYNn/hcA0Gydvx2Ks7gcC4fJv1vLm8/Pez0jwadTOJVPDBsbls6vaV5kg38DAGD38DuSrGcEQsHDAABfLA8mszS3S0dhKcriJgBAtI2+1nJCExwViYBoTniq/JKt6RUa4cIYkGxNPcN2kj4nJi2NSJeIP02mVoLt9AI1ngODxwEAzYw05fwmnozkt5yhoorjCZ/Yj3geYE0D9U3SJDhXbyCyHAeKRzDQGZJkTb0T6iAN7r60dH2SAOAeI/vm0Q7aKyknNMFRkUheHABQM0+H2vF4RbE/W1yyNfUM20su1IxZ2gvUAhvRsgh2U7l1kQzP44CdVA7XrrhWkjWbioiFSUv3MUnWMwqBY6RStmAkD1abQ5I1a5etw6LBPPBm4BXahwMACPWQByRfXsU0R84Ohs8HAEQ756esTrkwNMFRiXQyAdZJGsw89dJ05wNAjZ/sx3fmZ6gqKQBuiPQ5Mbb59YOci7+Y+H4Fh+mEj0jo2F50OzOwpoGVl/6jJGs21ZBR85ZEhyTrGYVAGxnnbuClG1AAgA0W4mm189Bj81b8NgKhYfK+8xXO3wx5PG4LGe2P9tFKmZzQBEclOkNHwZuJNku1r1GydStqGmBPkf+PnD4s2bp6hY11AQCYAukauQHA71oCAAim6EityIE3/gAAWDHklMSUEACaGok+SwudpJpAoJvYBzQ4pL3xippRvy3pQN2Tl2DHAzdJu77OCCXJ9cNXLl0bAQC4HWTrNTpMFeflhCY4KhFpPQIAYGJmWPJskq1rMpvhHSUNsB2hI5Ktq1fYVD8AwFU8f62Q8fjrVgMAgnnDkq6rZw4GiQVGc16dZGsuuehaWHhgwAFETh2SbF29czImjIhXLpFszfCJ/Xg0/3j2c94MfKZnZ05XckKCcbFPwjYCAHAXkesRO0aHQeSEJjgqERGmezxJafbPx+NNFwAAwpHj0xxpfFiQBISpkO6mCwD+pe8CAEQKeLoVKHBgiLynmz3SNBgDgL2gGIuGyQPAEepJlSVg7gcANNTNTy16wpoyebfplQzPI+QkOle++tWSru0uE9SM03QYRE5ogqMSHV2nAQAezN1DZipqLKUAgHAPbWBjraTZmhHUQ6Wi0rsEBQkgYwLaju2VdG09kuF5HHASSfu1KzZLunZTRpikCtKRfABIJeI4U0BuvA3z9FUbT0PjFTCfswsohXebXuljgxgRiuveBumSdgBgqsmWV9QyKum6lInQBEclIgOkOdUjcfMrAHidpN8kPJjbjZl8OgVOaORmaqUtMZvMZvhjxEPszKncLeGLBN95GX2ODGwpYPm7Pijp2suLSXLa0kMnqQAgdOx1JC2APQXULl0n2bpyebfplVDgAACgetQ0b9HKc3HXLAUARO1UCV1OaIKjEpFRDgDgKZBGXXc83hIishYe65J8bT3RGzmNlIX8f7VEKqTj8WdKAQDBjhbJ19YbBw/8CQCwcjh/Xs7Wk9HkvQgA0JKi6twAcFIYEV80YofZYpV07a23P46PDpDG5TvT75LEu02vhEJEPduXkFZDCwDcwuTsoB3U+0tGaIKjEpEUKed7Suev+Hou3ioy6tmRye39XTZERLoqYibYnIWSr+93kOQ02Hta8rX1xgGxwdhWL/naTY1XAQCOOkfoJBWAQPthAEBDRvrqLwAsKCAJTjw9Jsv6eiHEkkk1n6lU8rWLK71wCibAbCsdBpELmuCoRASkO99TtUDytb01gtifNbfdatkoEeFjEtJNqY3HX0r20YMxWlk4MExe6+Ya6bczGtZcA2saGLID7cffkHx9vRHoIa91Q750Fi/jcRWSxJ0TrGRylVA/8Yny2aWVmADIFrc7Tqpv0TDdepULmuCoRMRGno48wl6slNTUrwAARPPTSCVyV9GY7QoCOKsaKjULPCSRDGb6ZFlfL/DpFA6KDcar3iv5+jZnIRYPk36nlrdekHx9vREYIwl1Q5X01w4AcJWSCg6b4xM+oRjRqPGV+GRZ350iW19R9pQs61NogqMKidgwuvJJJ59HSEakxFW/HBaejHhyOVz+5PpJIzcjqIZKjd9PekOC9tyulJ0+/CIGHKTptWndB2Q5RxOqAAAtrXSS6qSlHwCw2C/tZI8II3jjcebcfl+HeJK0+wR/P6lhzKRXLdpLHcXlgiY4KsAGiUFjXpqMG0uNJc8Gzyjpru1ozV0zSFZQCWUc0vrIiPgbyfhsrzODwa7ctWw4ePDPAIDVwwXIc8hTLWsqIZNUR3pyW9spERtGawGRKm9ovEKWc7gESQXOltsTPqE8MsLtq5XOSmc8bmGCNjpIt7jlgiY4KhAJkakbz6gFJrM8/wTeJCl/hjtyd3+XjROVUJfENg0iheUMKmMmAEDw2GuynEMPHGgjv3uzXfp+MpHsJFU6Kts59EDwCBHjK0gA7oWrZTmHq5ZsffU4M0jGc1OnJRkfRaSANLT7FslTKXMLE7TRWG5Pu8oJTXBUIBIl3fmelPTjhyI1ZrItE+7K3QkfsYeAkWFSTcQ/RioWweCbsp1D67w6Qqoq/hJp1aLHs3zFNQCAo/mj4NMp2c6jdQIniKjkolGHbA9HFTUNsAjDap3CJGKuET55ABkT2Xat8snT6+QuJU3ibLpflvUpNMFRhUg3aX71mOXpDQEAr70aABAeaJftHFqHNY8AAJiqetnO4TeRMnMwmpuVsp/+6Aa8Xkpe5zvjf5HNnHHR6g2wpYBRG9DWkrvVspPhtwAAi02Vsp3DbLGiOkZuDVz4hGzn0TIh4YGldjRPcq0hEbcwQRsF9bOTC5rgqEBEUBj22OTpDQEAbzGZhAjHO2U7h9ZhhR4Cxi1PkyAA+POJad6Z/tyzxQif2I/P9v0CILt0spozWm0OLBkhvm0tb/9d8vX1QqCPTNw0FMoz2SPiSpKpNY7LzQpwSNja96Wl188SEa9LUVtu6w3JCU1wVKAjJqgYF7plO4e3kjwddORo+TMZH0W3k0yqMb5G2c7jryCiisExTrZzaBWlzRmbTKQq2dKWu9YYgTHSg9RQLb0y93hcIFuvbHerrOfRKqEektjV5clXKXP7mgAAXc5MTst5yAlNcFQgkuoHAHjK5HsK83rIvnHYOiLbObSM2Dtg4UlPgVz4a8iERdCUe5ohDY1XwJSZ+DU5zRmbSsi/Y0vfSVnW1wOBPOJc3yDTiLgIIxj2cgO5OeETGiJTkb4Cj2znqPIthYUnhr25LOchJzTBUYGI0BvicUnrcD2emjqirxN2pnJS3p5tJ42vrphZtj10APAvbAYAtDrHcu519i65GO8fqM5+bpHZnLHJR27quTpJFRvsRagwDQBYvOIqWc/lEqQVuJHc3OIOJYk/lK/cL9s5zBYrXEKvUzRHm7nlhiY4KhARHGQ9Xnm68wHAI4yQJqxAdw42CrKCOigj9BLIRV3jepgypPm1sy33LlKePNJkfePQQrR+cp+s5oxNyzcAAI4WxJBO5p5Gy+l3XgIAlMTl0c8aT67bNYRMxErH55F3K9CdJH1lLFUzlgWa4CjM6EA3+h2CirF/lWznsTkL4Rol/7zhM4dlO49WYXtaAQAM5GsSBMjr7B0hoorBE6/Lei4t0pkkW3Pr3BfLVrkRWbjqathTQDyP6MHkGoEAeX81xPNlGxEXYbIjzLm39ZrheYScpPHXVy/fNRoA3ML1KZqjvU5yQxMchYkG3wYAOJNASZV8+iwA4E2Qp4OOHDRzE3sHGGup7OfyJ8lFKtj2luzn0hpchjzpusrlfS8DRKF76bAwSfXOLtnPpzUCHUSVvMFcJfu5XIK0Qi7aNfRzbRgW/HlrF8ubtDPWMgBAdCB3ldDlhCY4ChNpJ9sYnphV9qewGhCvk3Bn7pU/2REy1eRyyDcFIeK3knMEO3NvK5CzkukPV7V8vQrjWW4mWyctbQcUOZ+WONlPJnsWF9XLfq5ctmsIBciUXtWoCc7iclnP5c4nPWzR0dybwlQCmuAoTIQNAABq0gWyn8trIzfecH9I9nNpDVboHWCK5BvFF/EXkepFcDD3XmfOTlSFXZ7FipyvqYycp6U/9yapAklyE2xg5JM9EMllu4ZQG6mU+RLyKc2LuAW9smiO9jrJjSIJzkMPPYT6+no4HA6sW7cO+/ZN7Qj8+OOPw2QyTfhwOBwTjslkMrjrrrvgdrvhdDqxceNGBAIBuX8NSYj0EOdYj0wO1+PxFpERx/AoK/u5tAbLk3FaplxeQTQA8FeS8eVgMrcmTkb6OjEilPJdPnmbMUWa6sjUWgufe0+8ARvZDmxYIO+2CZDbdg0hjlRifZD/Gu2urAcAsBiS/Vy5iOwJztNPP4077rgDd999Nw4dOoRVq1Zh06ZN6Oyc+mZQXFyMaDSa/Whrm2gn/73vfQ8PPPAAfvKTn+CNN95AQUEBNm3ahHhc+2JJkSFBxdgu/z66VxhxDKdy7+mAtZDeAaZaPgNIEb9vJQDgjDW3LlKccONzJoHCMkaRczYJnlTHC+M5NUk13MsiKpg/Nsg8Ig7ktl1DqJ/cb3wOeUx6x+MWtgKjVu3fu/SI7AnO/fffj1tuuQU333wzGhsb8ZOf/AT5+fl47LHHpvwZk8kEhmGyHy7X2TdaJpPBj370I3zjG9/Ahz70IaxcuRI///nPEYlE8Ic//EHuX2feROLEOdajwNaJ103K+R3m3BP7Y+1JAACjwNaJf/E6AECoIJVTiqRchGwTueLy95OJ+FdcCWcSGLMCp996UZFzaoHA2+R3rYyZUOZWpt8pV+0aQjFS8faVyF/9ZQSpENaRzjkdLSWQ9aqUSCRw8OBBbNy48ewJzWZs3LgRe/funfLnhoeHUVdXh9raWnzoQx9CS0tL9nvBYBAsy05Ys6SkBOvWrZtyzbGxMQwODk74UIuIMHbpKa+X/Vw1tUQKvN2ZyKk/npG+TgwJ8jdMXZPs5/MsWgNbitgUhE/mTvMr10lMY6vT8moNjcdssWLZCOmNaDmyW7Hzqk3gFNnWb4jL37snwoCcK9fsGkLpPgCAr0o+IVYRpp4IsiasQG8ktxJJJZA1wenu7kY6nZ5QgQEAl8sFlp28L2TJkiV47LHH8Mc//hG//OUvwfM8Lr30UoTDZIxO/LnZrHnfffehpKQk+1FbK/9I61R0WAQVY0b+P56ahWsAACM2YLA7d8YQx2+dFFXIJ7UuYrZYUTeaBwAIBqbuLzMaXC9pqnbJrDV0Lk0WUv3805Hfy2LsqUUCUSLl32CtnuZI6XAJfYK5ZtcQyiNN1b7a5bKfy15QjPIYcauNtlG7BqnR3BTV+vXrccMNN2D16tV497vfjd///veoqqrCww8/POc1t2/fjoGBgexHe3u7hBHPnAzPI+IgWycer/yTEAVl1SiLkz+e8Kk3ZT+fVmA7SM8Ao+DWyQKejOQH299R5HxagBsklgkuBbSGxjOaJtuAjxedQt2Tl2DHAzcpen41CAyQatniEvl7ykRy0a4hGR9FJJ/YYfgWyev3JeJOkE79aCS3ep2UQNarf2VlJSwWCzhu4sQDx3FgmJk1Jebl5WHNmjU4dUqQ3hd+bjZr2u12FBcXT/hQg6GeSHbqxO1focg5vWNk+6AjnDuTEKzQM8CkHNMcKR1+G6koBntyR3OIGyX9ZC6n/FpDIuET+/H7krMVBd4MfKZnp+ErOSdTwoi4W/6qgoho18AmehQ7p9p0nDoE3gzYUkB1nfwPoQDg5oWtwK5WRc6XS8ia4NhsNqxduxa7dp1VHeV5Hrt27cL69etntEY6ncY777wDt5uUpf1+PxiGmbDm4OAg3njjjRmvqRYRQcW4eAwoLFdm6sSbKQIAhNnc0Q3hekmFjjEVKXZOf3EdACA4nENbgUnSq+AqUua9DACBo68gY5r4tbQZOHXsVcViUIOAg2xtNyy6RLFzinYNXFq9nkWlCZ0hle7aUausJr3jcQsV0Ghf7uloyY3s9fs77rgDP/3pT7Fz504cO3YMt912G0ZGRnDzzTcDAG644QZs3749e/w999yD559/HmfOnMGhQ4fwqU99Cm1tbfj0pz8NgExYfelLX8K///u/409/+hPeeecd3HDDDfB4PLjuuuvk/nXmRUSwTPDEbYqds8ZKlDjDfW3THGkc2CHyhO+ylSl2Tj9DpiGC6dx52uV4waahTLmetobGK2A+p1/ewgOLll2mWAxK0xcNottJ/OsWrXi3YufNRbuGUAepdPvSyvWVMXayFRgdzj29MrmRPUX9+Mc/jq6uLtx1111gWRarV6/Gs88+m20SDoVCMI/rk+jr68Mtt9wClmVRVlaGtWvX4rXXXkNj49ly4Z133omRkRHceuut6O/vx+WXX45nn332PEFArRHhyPaFh1duEsJb6AZwAuGRqGLnVBt2tBMoAJh85Roy/XWrgTAQtOXOSD4naA2JN0Il8C65GD8p+2fc2v8LwESSm4crbpTd6FNNAu/sAQC4R8yKNM2LuJhFwBGAzSG7hlA32d72WSsUO6e7yAOMAdGxbsXOmSsoUoPbtm0btm3bNun39uzZM+HzH/7wh/jhD394wfVMJhPuuece3HPPPVKFqAiRXlJFqbEqV1nwltUBfUA4mTuVBTbZDwBgipW7GfiXrgdeBaIFPGKDvbJ72GiBThtpmHe5GxQ97y1f+jn+9Wu/RFd+Bn9e+m947yfuUvT8ShM4Q/qLGhLKbbkCAOMjD5W9gl1DniNf0fOrQWgoDBQAvgLlrh3uch8QBaJ87mwFKoXmpqiMTGSYVFE8DvlVjEW8LnLz6cghKXBR9pypqFPsnOWehSgaI//feuw1xc6rFvHhfgwIBVNXrTI2DePxJJTT3lGbAEu2TRry5FfWHU+5Z2HO2TWEkqSK4itXRkwRANyuhQAA1pJbnl9KQBMcBYkIJUiPYLCmBDXCOHrYPqbYOdWGFWTPGeHCoQQmsxn+GLnjB08fVOy8atEZIv1kthRQ6lIukRRxZcg2L9dj/MbMwFArAKChVLkRcSA37RpCJlJFqfMol7S7a0j/XlRQX6dIB01wFCTCCyrGFfWKndO76CIApMw8OmD8Pd4Mz4N1EB0LUQZdKfwoBQAEIy0XPtAAcB1kKq86blFMa2g8LqsoQteh+LmV5iRPxvEX16xU/NyiXQPLGl/+IMPzaHOSB0Ff/SrFziuqrQ/ZiQo7RTpogqMgEStpyvS45fdHEimpqkWB0CPYkQNif/1cGxJCZ5lLAZuG8fgdRMog2HdG0fOqgehP5Eqps1XkspMeJ6OL0GV4HgEH2bpoaHiX4ucX7Rq4HuNPYQ50tWNYGHCtXaxc03pRhQf5wjU62po7QqFKQBMchcjwPCLOFADA41Puxmsym+EVxtLDIeP/8bAhUj0pjZvgKCxV9NwLyskWQjBm/Im1szYNyk0EjsdVSPpROsd6VTm/UnSHT2R7nRYqOCIukkt2DaGTpJm7MmZCfoly4pUmsxnuOHkqiwpSIhRpoAmOQvRGTmNMqCy4/cqWmr1pchPqYAOKnlcNWMHhmhnLU/zcfjfpdwpm+hQ/t9KIN7xq4QaoNNXChJzRRegCR14GANQOW1SZzBPtGtgRbpoj9U+ojQix+saUlxtxp8mEWjTHnNvlhiY4ChFpJX88FTET7AXKWkWIY+nhnqCi51UDTpA7d/FOxc/tX0C8a4KOuOLnVhpulGwNiTdApXFVksZmzmzsyZOTwoj44qQ69jJMIdl25RLGrpQBQIgjD0c+oZdOSdxm8u/L9hq/aV5JaIKjEJGO4wDUGW/15hMp/fCw8cvMbJ9g02BW/oZQv4xYhfQ7MujnjN2zwAlbQ6JfkdK4mEUkjjxjTwcGOsmWRYPdrcr5XaVk4tPolTIACPW1AgB8DuUEQkUYG6nORYeMf41WEprgKESkk5QePRnlexa8JT4AQHisS/FzKw07RPpfRPlzJSkoq0b1KDFKChpcCydr01CqnOTBeFzeJQCAbkcG6aRxlXbfGiTTS1VqVcpyyK4hFCNWCb5i5axHRNzCg0I0ZvxrtJLQBEchIv2ksuCxKr+P7q0mT7thGP8pjI2TUXimQFlRNBF/guylB4PGnlgTt4aUtGkYT1UtkQDgzUBPhzF7y3Y8cBP+Wkq2Av/d9Ap2PHCT4jGIlbJcsGsIpUlV0letrDI3ALhLSVIVTfUrfm4jQxMchYgIRmoep/LlzxpBSKojz/i9IWy6H8BZJ2Sl8ZvJk/YZ1tjKr5xo0yDcAJXGanOgIkaqZVz4uCoxyEn4xH7c2rMTEJzTMybgMz07ET6xX9E4zrVrMDKhPPL7+bzKyksAgLuKKCdHzbnjZacENMFRiEiCeEHVlChf/vQuWA0A4PJ5JGLDip9fSViQCwRTWa/K+f35ZMsm2G/cHpxkfBS9gru1q1ZZMcXxuBJE/oBjjTd5Ejj6Cvhzrs5pM3Dq2KuKxpErdg2pRBwd+UQg1LdoreLnd3vIlitr8J4ypaEJjkJEhO0hT5VyHicilbVLYEuRp8DombcUP7+SiKV0RmEDSBF/BaloBBPGHavtaicVEwsPVNSo8zoDZyfluO5W1WKQi4bGK2DmJ37NwgOLll2maBzj7RrYduNVykQ6AgfBm4G8NOCqX674+d2CKGlXvvErZUpCExyFiAj+SB73EsXPbbZYURMjIjxhYVzdiKSTCXQ5yF2BUcEAEgD8XnJxDJoGVDm/EnAdxJeoKmaG2WJVLQ6XhUzKcf3Gs2vwLrkYd6Sas59beODhihvhXaKcwq6IaNfAGVijJXSG9MzVjlpVeU9X1DTASgpI4NqMb/WiFDTBUQA+nUJUKH96FLYPEPGmSPNrR/SkKudXgq724+DNgJk/24SqNAsaLgEAtOYnkOH5aY7WJ+KWkCtpUzUOlzBa2zlszGrZ2joiO7Cy34HWT+7D1tsfVyWOXLBrCHWQ7TdfqlCV85stVriESlnUwFuBSkMTHAXoCh1H2gyYMuqUPwGgxlIKAAh3G9cniW0nmiFVcTMseercfGuXXAIzD8TzADZozGqZeKNzqSB5MJ7q/CoAABfvUTUOueD6wwCAJaZKVSo3Irlg1xDqJkm7T4UpVxF3iigoR1njPoQqDU1wFKAjSPpeXKNmWG3Ky4ADgFcQrwoPhlU5vxJwguOx2HyqBnmOfNSOWAAAwRNvqBaHnIhbQqKjt1q4SkS7BmNuB3JDZPLSZStTNY5csGsIDZHroq/Ao1oMbhQBAKIG7ClTC5rgKEAkQnoWPEl1nJcBwCtMb4XjxnVfZoULA6NyZcGfIheqYMiYDd2ig7fo6K0WrnIiYMnBmJOBnKDp5MpXXlpiPLlg1xBKEIE9X5nyQyAi7jySyLKDxq2UKQ1NcBQgImwLeYQMXQ28lQsBAGG+X7UY5IYVSvqMypUFfx7ZOgl2GrPUzI2RLSHR0VstXC7ynuasxhyt7UyRypSrWL2qApAbdg0hE/ndfCoMgYgwQiIbNXClTGlogqMAEeHG68lT74nXmxX7M67kOis0mzKOSlXj8BeRallwyJjGeeKNrlrtG2/NYgBAp5M3ZEM3J2g6uSqU184aj6hWzRrY2DTkIEmyr36VajG4i0kiGU32qRaD0aAJjgJERsmN11OgjjEhANTUrySxONOG9e5hBTFFsaSuFv4qcuM9kzSmr0zWpkFw9FaLah+RAkhaYEhzU06QlnBVL1A1jqyxqaBebTQGOkMYFLoHfIsvUS0OtyBOGsWQajEYDZrgKEAkSfauPSqoGIsw/hUw80DKYlxFUlaoLDBl6j7x+n0kmQxajXmhEh281bJpEHEUlqJY2J3ihAk6o5DheXAOIi3hqlFv2wQwvl1D6CSxv6iImVBQpl6/k5shoplRq/EtdZSCJjgKEBEyck+Vek9iVpsD7lHyzx02qJqxWEJnqtVrFAQA/+J1AID2grThbgjpZALdDsGmwavujRcAXGN5AAAuaizDzcHuMMYEvTmXkGCohdHtGkJt7wAAfGPqTLiKuGvJvzPrTINPp1SNxSjQBEcBIjbymFnjVUddV8SbItL24Q7jXaQAgLWTEjrjWaxqHMyClbCniNN1+4l9qsYiNd3hk+AFTSe1xBTH40oLdg1dreoGIjFciFSkisYAZ7G602pGt2sIsWTK1QeVZQ/qiQhsygL0RoyrGq0kNMGRmWR8FJ1O8vjjqVNH5E+kxkT+gDu6jCf2Fx/uR79QWWB86qhFi5gtVvhHiBbPM3/9geIO0HIiOndXxE2qaTqNx2Umk4mdfcbSdxIrUmKFSm0YA9s1hPpbAQA+u7rj+DZnISpixD4+2nZE1ViMAk1wZIZra0HGBFjTxPRSTbx2Mr4cHmhXNQ45EP1bbCmg1KVu8ysAWDPkQvV/k39B3ZOXYMcDN6kbkER0ciQ5VlNMcTzVeaUAAG6YVTcQieE6gwCA6rT6SSQAuAxs1xAajQIAfCr2SIq4hb+raMSYEhNKQxMcmYkImbg7ZlHVmBAAvEVkDDEcM57OAtdBLgiuuAUms7pv6/CJ/WgpOavNwpuBz/TsNEQlR3TudvH56gYi4BLtGmLdKkciLVwfeQhxmdTxRjoX0a6BHTCesWkoTcayfVUNKkcCuHmSSEYNWGVXA5rgyExEMLf0pNR/EvNWkCbncNp4OgssR2waGA28zoGjr0Ao4GRJm4FTx15VJyAJydo0CE7eauMqElR2k/3qBiIxWrFpEBHtGkQVayMRyiN6Qz6vulvbAOC2lgIAon3Gq7KrAU1wZKaji+xZe6D+DcHrIVtkHRZjTfYAACuUzhmo/8Tb0HgFTJmJX7PwwKJll6kTkISIW0Gik7fauMrJtgJnMO0QLkY0lFzOKpUjIRjVriGViKMjn4zj+xZepHI0gNtB/r2N7PulJDTBkZnIIHni9djVVdcFgBofaXIOO5OGU35lBadjJk/9J17vkovxxbHV2c8tPPBwxY2qOkJLhejc7SpQtyFTRBTB67QYy65BrEiJFSq1MapdQ/T0W0ibgbw0mX5UG0b4944KdiiU+UETHJmJjJKSrpoqxiKehasBAPE8440hssLrzGjkifeGTXcCAMpiQOsn92Hr7Y+rG5BEiM7dopO32rg8pG+CsxtLN0Q0EBUrVGpjVLuGttMHAQDeUavqPZIA4BYMZKP8gMqRGANFEpyHHnoI9fX1cDgcWLduHfbtm1ob5Kc//SmuuOIKlJWVoaysDBs3bjzv+Jtuugkmk2nCx+bNm+X+NeZEJEX6XTxlPpUjIcqvVaOkOSR85k2Vo5EWViidMyr7I4mI6rODdsC9QD1/G6k564+k/qQaAFQLWjyjNmC41ziTVJ0W0aZBXdFKEcZNtKWMZtcQCpPpS1+qQOVICG4XUQePWozrGagksic4Tz/9NO644w7cfffdOHToEFatWoVNmzahs3PyZrU9e/bgE5/4BF588UXs3bsXtbW1uPbaa9HRMbF7f/PmzYhGo9mPX/3qV3L/KnMiYiZPYp7qhSpHQvAmSBNuuN1YYn9shvRgMOXqJ5IAEcEzZUhzcU+HcVR2RZuGao3ceAvLGDiFey5nIJVdsSLlUlm0UsQlJJK9zgwSsWGVo5GOUDepZPusFSpHQnALpshRh7ESSbWQPcG5//77ccstt+Dmm29GY2MjfvKTnyA/Px+PPfbYpMc/8cQT+NznPofVq1dj6dKlePTRR8HzPHbt2jXhOLvdDoZhsh9lZer3XkxGxEaMLT016qu+AkANiDBaR9cplSORFlbwb2Fc2kgk8xz5qIiTapkojqd3+HQKnQ7Su6W2P5KIyWyGK062FriIMRLJkb5OCDqRcPnUVT8XmWjXYBzfr9AQEYj05Wuj18ntXwEAGLEBQz0RlaPRP7ImOIlEAgcPHsTGjRvPntBsxsaNG7F3794ZrTE6OopkMony8olTG3v27EF1dTWWLFmC2267DT09UzdljY2NYXBwcMKHEsSH+9HrJOM0NQtWK3LO6fDaSLNzuC+kciTSkeF5sA7yxMto5MYLnBXD41hj9Dv1RYNIWcj/V2vkxgsArjRR2e00iF2DWIlyJkmFSguMt2vgwidUjkY6QgkyreYrr1c3EIHCcgaF5JkYbGuLusEYAFkTnO7ubqTTabhcrglfd7lcYNmZ7Zd/7Wtfg8fjmZAkbd68GT//+c+xa9cufPe738VLL72E9773vUin05Oucd9996GkpCT7UVurTONe9MzbAABHUhvqugDgLSQ9KuFR4/QrDPVEEBMU7dU2JhwPI4jhsV1BlSORBi5MntxL4ybYC9SXPRBxCVVJrtcYSTsXEUUrraqLVo7HiHYNIRN52PUx2qiwAwATJxezaIcxKr9qon7b+AX4zne+g6eeegp79uyBw3FWwO3666/P/v+KFSuwcuVKLFy4EHv27ME111xz3jrbt2/HHXfckf18cHBQkSQn0k4ycI+GLlTe8nqgGwgnjTOGKBoTFiaAgjJtjC8DohheH7h+Y/gkcVGyrelKaMMfSaTaWgKABTcYVTsUSRBtGsTKlFYgdg0xQ9k1hOxka9tXr/6IuIg77cQpJBHljNVGoAay3nUrKythsVjAcRNFiziOA8NcuPT6/e9/H9/5znfw/PPPY+XKC7/5FixYgMrKSpw6Nfkbwm63o7i4eMKHEkRY0hPgSWlD1h4AvAxpWgybjdMoyEZIyVx88tEKLrug/jpsDNEu0bFbdPDWCi4n2XblRrtUjkQaxEqUWJnSCkazaxjoDGFAeG6ubWhWN5hxuM3kdY4apCKpJrImODabDWvXrp3QICw2DK9fv37Kn/ve976He++9F88++yyam6d/44XDYfT09MDt1kajmEikpxUA4BHesFqgppbIkXc4EipHIh1sJ/FtYbR24xXE8DiDiHZ1ijYNZo3deIvIwxKXNIYFiViJIpUp7cA4hETSIHYN7QGigVMWN6GoQhvyEgDgFh6MokO0yXi+yL5vcscdd+CnP/0pdu7ciWPHjuG2227DyMgIbr75ZgDADTfcgO3bt2eP/+53v4tvfvObeOyxx1BfXw+WZcGyLIaHScVheHgYX/3qV/H666+jtbUVu3btwoc+9CEsWrQImzZtkvvXmRUdA2RrQgsqxiI1gtjfoB0Y7DLG1gkrPOkwGrvxMqVeAACbMoZo11mbBm1NLLrKyHZzJ28MuwaxEiVWprSCq1BIJA1i1xBqIz2Svri2tgLdwuvMxo1lIKsGsvfgfPzjH0dXVxfuuususCyL1atX49lnn802HodCIZjH9af8+Mc/RiKRwEc/+tEJ69x999341re+BYvFgrfffhs7d+5Ef38/PB4Prr32Wtx7772w27X1Ro3EuwAb4NGI3DoAFFV4UBIHBhxAx5nDKK7yqh3SvGGHooAFYDTijyTiqqgDugHONKJ2KJLAxbqBQsCVr50+J0BQ2e0AOIOIo4mVKLEypRVcpTUAB7AGsWsIsWRruw6l6gZyDkypF+gGoql+tUPRPYo0GW/btg3btm2b9Ht79uyZ8Hlra+sF13I6nXjuueckikxeIul+AICnXBsTVCLeMTsGHGMIh45g2bp/UDucecPGuoBCgClwTX+wgriYhcAJgMszxnYgJ1xwXRpRixapZoj2kVFUdjmhEiVWprSCq6oe4ADOIHYNRzrfAZxAuUUbKsYi7qoFJMExG+PBSE20MdpjUCKCa7dHkN/WCjU8cdwOsydVjkQaWOHGy5TUqBvIOTC1RCum08kjndR/knPWpkFjN17hdR5wEO0pvSNWokT/J61gJLuGHQ/chIccRwAAOwtPY8cDN6kb0Djcgnp11Kb/a4ba0ARHRiJ2QcXYqx1RNADw5pGtnA6DiP2xgjEhU1GvbiDnINo18Aaxa+Csoj/SApUjmUgZ40eeIIFlBJVdzk4SCJe7QeVIJmIUu4bwif24tWcnQITGkTEBn+nZifCJ/eoGJuCuWw4A6NH566wFaIIjE0M9EQwJLUEejZktegtIT1B42Bhd+qzgj8Qw2qqUWW0Ow9g1ZHgenINkEVqxaRAxmc1Zld1Onds1xIf7MShcN8TKlFYwil1D4Ogr4M+586XNwKljr6oT0DlU1DRkE3Y2+I66wegcmuDIRFR4YxYmoKkRRADwlpKeICOI/fHpFDin4I/k1daNFzhr18BG9X3jHewOY0zo2BMdvLWEKyWq7J5ROZL5ISYOtpR21M9FzBYrXAawa2hovAJmfuLXLDywaNll6gR0DiazGUyMeKKwOn8wUhua4MhERJC1r4nbVI7kfLxCT1AY+p+G6I2cRlp4F2vJH0lEtGvguvWt/jpeLTq/RFvjywBQDdIoqneVXa6D9MVVxy2aUT8fj8sAdg3eJRfjweJ/yn5u4YGHK26Ed8nFKkY1ESZFFAijrL4fjNRGe39BBiGrYpzWjoqxSI2X+DV12MdUjmT+sIIxYUXMBJuzUOVozofYNUD3dg2cUIESnbu1hqiyyw3qe9tVTBzEipTWcAmJJNvdqm4g82TzxtsAkEpZ8PrXsfX2x9UN6Bzcgop1VOevs9rQBEcmIn3kSdJjKVU3kEnwLlwDAOh2ZnQ/dSJu/TAJ7VXKAOPYNZz1R9KWWrSIyyG8zjpX2T1r06Ct0WURoySSbAfZYquJWVG7bJ3K0ZyPO4+IaUYHjWGLoRY0wZGJyBCRW/c4qlSO5HzKGD+cwqRnx6lD6gYzT0SnbnErSGuI2jyszu0auL52AIDLpL0qGXBWZbczoW+7Bm6AJA4ua6m6gUyBUewaWLFSlnZMc6Q6uIXrRnRU36+z2tAERyYicSK37inWljYLQJrYvDFiTBlu03eXPttPbryMRVu+PSKuUvLvz+ncroEb0qZNg0j2dda5yi4n3NDEipTWMIpdQ9beRWOGpiJuQdMrmtT366w2NMGRiQ6e3NA8GtNmEfGmSQk8HNXvNAQAsII/EqPVG0IFmYTRu10DFxP9kbRXkQQAV2U9AP2r7HJj5IZWrTFVbhExkdS7XQMrbLExGk3YGeG6IWp8UeYGTXBkImIVVIwZbYl1idRYyB92h+B4rlfYONn6YQq15dsj4hJsBFidq5J2JknC7tKQr9p4ql1EfJDT+euctWko1V7lFwAYQeRR94mkkLAzGvNVE3EzgpqxIK5JmRs0wZGBDM8j4kgBADy1jSpHMznefPKEGB7SdxMbmyY3XqZUW/YBIqJdQ5dD33YNXEa48ZZr83V2eYk2T48jg1RCvzcFMXHQmk2DiEsQ09S7XQOb7AcAMBrzVRNx+8h9g3Py4NMplaPRLzTBkYF+rg1x0uICt3+lusFMgbeE3KjCY10qRzI/WOGGwFT5VY5kcoxi13DWpkGbr3OldzHMPJHd7w7r12NNTBxcGlPlFjGKXQMLkrCLW0Faw1XXBFMGSFmA7nZ9txGoCU1wZCASfBsAUBY3wVlcrnI0k+OtNobYn7j1I24FaQ2j2DVwdvIU6RKMALWGJc+GSp2/zsn4KHqdGQBnEwmtYRS7BlZI2BmXNq8beY58VMbI+zkaalE5Gv1CExwZiHSQP3zPmDa1WQCgxkMuoOG8mMqRzJ1kfBQ9wg2B8WlzKxDQv13DSF8nRoS3skuDatEiWZVdVp8qu52CaKWFJ35EWsQIdg0Zngcr+KoxXm0mkgDgTpD3s16vG1qAJjgyEOkkF1gPr02xLgDwLlgNAIjm8wi+/bK6wcwRPdwQAP3bNXDC6+xMAoVl2mzmBoBqnogQ6vV1Fo1Cq2JmmC3aVIwGziaSLHtK5UjmRj/XhoTw8rrqmtQN5gIwGXL/iApaX5TZQxMcGYj0EY2FGqs2t6cA4M9/+j6QAWACFv3u3djxwE1qhzRr2HayFeHS+g1B53YNXETb/kgi2dd5QKevs1B5ciW1W/kFzqos69X3ixW2fErjJjgKS9UN5gK4BbHHqKD1RZk92r1a6ZjIsKBirNERxPCJ/fhs3y8AssUL3gx8pmcnwif2qxvYLBGfIJmkNn17RES7BlGzR2+ctWnQpuqriPg6dw7rU/1VTBhcGe1WfgH92zWwQsLOjOWpHMmFcQuaU1GdXje0AE1wZCCSINosWlQxBoDA0VfAn/MvnzYDp469qk5Ac4QVNHwYaNM+QES0axBF3PTGWX8kbb/OrgLyQMHp1BaD6yeSDS6rNlW5RfRu18CKCTuvTV81EXcRGWGPJvT5ftYCNMGRgUiGTCZ5KrU5UtvQeAXM/MSvWXhg0bLL1AlojrAD5IbAaNS3R0Tvdg3cIKlIatUfScRVIto16PR1FhIGl127W9vAWbsGvfqrsYKvGmMuVjmSC+MWRtijvL4nXdWEJjgy0GElk0ketzZHar1LLsYjFTfCRAaQYMoAD1fcCO+Si9UNbJawI8Shm9GofYCI3u0auFHRpqFS5UguTLUgQshBnyq7YuXJVahNmwaRbMIuqC7rDVZoIWDs2rR3ERFVo1mLfidd1YYmOBLDp1OIOskIose3XOVopmbr7Y/jq8lLAAAfHfBi6+2PqxvQHGAFwz9Go/YBInq3a+CSxKHbVaTdCSoAcAmaJlzemMqRzA3RKNRV4lU5kgujd7sGTrR30ajfl4jbSyQZoo4UMjw/zdGUyaAJjsR0t59AykL+n/GvUDeYaVjKkBHJQejzCYEVniCZcp/KkVwYvds1ZP2RyrRp0yDi8i4BQF5nPcrbiwlDdYXGX2dBZZm169OugU33AwCYUm0nku56cv8YtQFDPfps6FYbmuBITKTtCACgetSEPEe+ytFcGHcVeRKLQp9bJ2Lp1qVRmwYRvds1dGZf53p1A5mGakGEMGUB+qL60w4RK09atWkQEVWW+xz6tGtghesdIzjQa5WCsmoUCcXIaOs76gajU2iCIzGRDqLN4kloe6QWABg3EcdjbTot6QtPkIxG7QNExts1sO36k7fP+iO5tSumCAA2ZyHKsnYN+nqd08kEuh2CTYNQidIqerdrELeKGY2/nwHALYyyRzv0qRqtNjTBkZhI1xkAgEfjI7UA4PaRLaoup/4cmEf6OjEkyN8wGlYjFWEEuwZOZ+qv8eF+DAi5uqtWuzYNItUJckPgovp6nbvDJ8GbScN/lUZ9qET0bNeQTibQ5SDZGaOD97M7TXYB2M4zKkeiT2iCIzERQXXSo2EVY5Eq31JYBAdmrvWI2uHMivH2AUUVHpWjmR6XTu0axCd0WwoodWnTeXk8rrRg19DVqm4gs0Q0CK2Im2C1ab/6q1e7hq724+DNgJnXfiIJnB1ljwpaVJTZQRMciYmMENXJmgJtT5wAE5/EokLCoBdYoWTLxK2atg8Q0atdA9ehD5sGEZe5CADQKYjm6QWxsicas2odvdo1iFvEVXEzLHnaf63ddiLNEB2iTcZzQftXLJ0RSZLRZY/GO/RF3EnytBjVmWMtyxHfHial/addQL92DZzwOrtS2rbDEHHZygAAnM5e584eQS2a1/ZggghjKQWgP7sG0Zmb0Uki6RZEFaPxbpUj0Sc0wZGYCMhIradygcqRzAzR5iDao6+pE1Yo2TKmIpUjmRl6tWsQbRqqoW1/JBGX4P/GxfR1Q8jaNFi0ra4r4nIICbsgtqkX2O5WANr3+xJxlxEJjKgw2k6ZHTTBkZhIHmnW9Xi0PQkh4raSJ152QGdPYsKTI2PTfq8ToF+7Bk54X4gGi1rHVUz6sbhUv7qBzBKx4uTSy/tZqCzoLWFnhS1iRuN+XyJuQVQxqlNRRbVRJMF56KGHUF9fD4fDgXXr1mHfvn0XPP43v/kNli5dCofDgRUrVuBvf/vbhO9nMhncddddcLvdcDqd2LhxIwIB9bdYUok4OCfp0PfUa1vkT8QtPPFGR3X2JBYj9gGMRh3bz0W0a2B1ZtfAjQr+SA5ty9qLVJeTrWFOZ9pOorquaBiqdfRq1yD6fYmGoVrnrJSH/gRCtYDsCc7TTz+NO+64A3fffTcOHTqEVatWYdOmTejsnNyJ9rXXXsMnPvEJbN26FW+++Sauu+46XHfddThy5OyUz/e+9z088MAD+MlPfoI33ngDBQUF2LRpE+JxdUedudYj2Q79sVF9/OG7BcfzaEJnT2KifUCxtm0aRMQLFaezC5X4hC4+sWsdl2gjYNWX7IFoEOoq0f5EIKBfuwZWcOZmCvVx3XDXEbufXmcGYyP6Mt0Mn9iPF5+5H+ET+1WLQfYE5/7778ctt9yCm2++GY2NjfjJT36C/Px8PPbYY5Me/1//9V/YvHkzvvrVr2LZsmW49957cdFFF+G///u/AZDqzY9+9CN84xvfwIc+9CGsXLkSP//5zxGJRPCHP/xB7l/ngvz4l18CQBRrF/zmcux44CZV45kJbkHNMwp9JGQiLIiCKlOh/dFlYKKNgJ7sGrI2DcITu9ZxCaKPnfa0rvx7xIqTSy/vZ53aNbCC3xejcdsRkXLPQtgE1xFWR2rGOx64CXVPXoINb38FdU9eotq9UNYEJ5FI4ODBg9i4cePZE5rN2LhxI/bu3Tvpz+zdu3fC8QCwadOm7PHBYBAsy044pqSkBOvWrZtyzbGxMQwODk74kJrwif34tvnV7Oe8GfhMz05Vs9eZ4BYuVFG9PfEK8TKCwaLW0atdg/iErnWbBhFXXSMAIJ6nL/+erE2DTt7PerVrYIX3M1OtbXsXEZPZDCZOzA2jOlFBD5/Yj1t7doIXsgs174WyJjjd3d1Ip9NwuSa6trpcLrDs5GOcLMte8Hjxv7NZ87777kNJSUn2o7ZW+uw9cPQVZEwTv5Y2A6eOvTr5D2gExksuVKxTP0+8GZ4H6yCO7WL8Wkevdg1ZmwaN+yOJ5JdUolAokHE6sRHg0yl0Cuq61R7t2wcApLJgJX+CurJrYHVi7zIetyCFEWX18WAUOPpKNrkRUetemBNTVNu3b8fAwED2o729XfJzNDReAfM5+YGFBxYtu0zyc0kJIzRDJy1Ab+S0ytHMjH6uDQkr+X+XDmwaRPRm15CMj6LXKfgj6UD1VaR6jLw5OJ1oO/VFg0iRh/SsYajWMVusqI6T24deEvb4cD/6Bb8vxqej6waIFAbbow8144bGK2DKTPyaWvdCWROcyspKWCwWcNzECR2O48AwkzctMgxzwePF/85mTbvdjuLi4gkfUuNdcjEeqbgxa0Jn4YGHK26Ed8nFkp9LSuwFxSiPkcpCtE0fdg1sqAUAUBo3wVFYqm4ws0Bvdg1d7cQ+wMIDFTX6qCwAgEt44uU69aHtJBqDlsZNsBfoQwcHOGvXwHH68Eni2sh1Qy+2IyJuQTogOqgPdW7vkouxfOCsMKia90JZExybzYa1a9di165d2a/xPI9du3Zh/fr1k/7M+vXrJxwPAC+88EL2eL/fD4ZhJhwzODiIN954Y8o1lWLr7Y+j9ZP78OKqH6L1k/uw9fbHVY1npriFykI0og/jPDZC7AMYwWlXL4gibmy/9BVEOeAEO4yqmBlmi1XlaGaOSxB/7OzThy2GaAzqSujs/awzuwZW8PtidGI7IuLOJ+0Y0dHJJ4+1iFVILe4zb1L1Xij7VeuOO+7AjTfeiObmZlxyySX40Y9+hJGREdx8880AgBtuuAE1NTW47777AABf/OIX8e53vxs/+MEP8P73vx9PPfUUDhw4gEceeQQAYDKZ8KUvfQn//u//joaGBvj9fnzzm9+Ex+PBddddJ/evMy3eJRdrvmpzLm6+AC0YQ1QnjrWs8GTO6ETWXoSxVwJoAzesD80hjhVsGpL6kLUXceWVAoiCG4qqHcqMEI1BRaNQvUDsGrp1Y9egN3sXEXeJFxgAoqk+tUOZMREbaZrffOXNqt4PZU9wPv7xj6Orqwt33XUXWJbF6tWr8eyzz2abhEOhEMzjsulLL70UTz75JL7xjW/gX/7lX9DQ0IA//OEPWL58efaYO++8EyMjI7j11lvR39+Pyy+/HM8++ywcDn29cbWC21oKoBdRnVQW2D4SJ6MTWXsRV0E1kNKP+qv4ZK4XWXsRV34VgGPgBDFIrSMasIpGoXqBiD+e0o1dAyu+nwV7Gr3grqwnCY5OpDyS8VF0ioK3dcunOVpeFKk7b9u2Ddu2bZv0e3v27Dnvax/72MfwsY99bMr1TCYT7rnnHtxzzz1ShZjTMPYKAGfA6qSywA5HgTyAselDXVfEVVoDdOvHriHrj6QTWXuR6kIXMAJwCX088XJDLGA9axSqF1yFDDCmn4Q9a++Sp6/X2e1eDJwGooKUgNbh2lqQMQHWNFBZq65lkX42Iimy4S4i6qnRMX0YFLKCs65eZO1F9GbXIMraV9v1dUNwCSJuXEYf+iydok2DTmxHRESxPL3YNXCjgr2Ls0rlSGYHI0wwdjr1IRLa0fo2AMAds6jeu0cTHArc5YJjLa8PKXBWqIAwpV6VI5kderNr4MZEfyTXNEdqC5cg4tZp0Yd4pWgMKhqF6gVR/JHViV0DK9jRMHp7neuXw5QhWjLd4ZNqhzMtkSiJ0aOBXiea4FDgFtRToxZ9XKg4oQLCCDYTekFvdg2cIGvvKtGHTYOIS0wkdWIjcNamQR/2ASKi+KNeXmc2QypNjPBApxesNgeqRCkPQSJDy0S6yRCIB+r3SNIEhwJ3DSmBRh06uVAJFRCxIqIXxts16OFJLGvTUKkfzRAAcAlieUN2IDao/f4Q0XZENArVC3qza2B1Zu8yHneC6MroQcojIuj1eOzqO7bTBIcCRlADHrYBw72T211ohXQygS5B1p6p1Yfqq4jV5kCl8CTGCZocWibrj6QTmwaR4kpv1qCQCx1VN5hpyPA8OMF2xFWjbkPmbNGTXQOxdyFvCkZnrzMAuIXJr2iX9sUrI4Jej6dgcuFdJaEJDgVFFR7kCzsmbJu2S6Bd7cfBmwEzTyoiekPUlNG6XUM6mUC3IGsvbq3pBZPZDJdgUCiKFWqVwe4wBGcJVOvs/awnu4ahnghigo6iy9eobjBzgGgOAdEB7YtXRgS9Hk+Z+luBNMGhwGQ2wx0nV9loWNsXKvFCWhU3w5KnLwE64KxdAyuIu2mV7vBJ8GbAlNFpIpkiJf3OzlZ1A5kG0RC0MEGMQvWGXuwaxAe3ojGgoExf02oA4BYmv/SgORQxk+1KT7X6W4E0waEAANxpcuONcto23GQFA0XRuFJviHYNnMafxDqFykdF3ASrTf1piNkiirlxvdo2KBQNQV1x/VhhjIcRX2eN2zXo1d5FxC1Mfr0Vb0P4xH6Vo7kwEaFHssarfqWMJjgUAIDbTG680V6NX6i6WwEADPSlrivCCI13WrdryNo06DWRFMQJtW4jIBqC6s2mQcRlIa8zO6BtI0hWsKHR6+t8tItU+l4pG0Tdk5dgxwM3qRvQFMSH+9HrJFvbHv9KlaOhCQ5FwG0nqsBRjd8QWEHWXtyT1huiOKHW1V85IZF06czvS6TaQd7PolihVuEE2xGXSV/2ASIu8XXWuBEkK1TyGJ3ZYQBA+MR+POI82yzPm4HP9OzUZCUneoaI/DmS2nBspwkOBQDACGJuokqwVhH3oMULq95wlRJNGVYQd9MqWZsGnfl9ibgKyQSH1u0auCEytag3mwaR7Ous9YR9mLzOjK1c5UhmT+DoK+DPuVOnzcCpY6+qE9AFiLSTXidP3KoJx3b1I6BoAncpERmLavzGywrqukyhW+VI5oZo18CZtC2qKN4QXDq8IQCAS1C57tS4jYBoCOrSmX2AiF7sGljhdWZ0psoNAA2NV8DMT/yahQcWLbtMnYAuQIQlPWWelDYqvzTBoQAA3FVE3j5q1rZPknghFS+sekMvdg2c6I+kM78vEdFGgNO4jQCX7AcAuIp0mrDrxK5BrJgyOlPlBgDvkovxSMWNAGltgZkHHq64Ed4lF6sb2CR0iCrGZm0Y9NIEhwIAcHuI1knUpm3HWvFCygh+Q3pDL3YNXJr4fblK9OXbI5K1EbBpW527UzAEdZXrM2HXi10DC/I6MxX16gYyR7be/jgaB8hI/s+qb8HW2x9XN6Ap0JKKMUATHIqAW1Az7nZqW3adFS6kjGexypHMDb3YNYj+SNU68+0REW0Eep0ZJOParS6ctWnQacI+zq5hbES7Zr2soMrN6EyVezwLQKoi8aR238+RONkK9GikIkkTHAoAoKKmQfOy6/HhfvQL6rqMr0nlaOaGXuwasjYNLn35I4mUexbCIvQtdGrYroGzE/sAl04Tdj3YNfDpFDinaO+iP9FKEa+NVEXCfdrVdoqk+wEAnnL1J6gAmuBQBMwWK1wx8naIavSGwAlqpLaUNkYQ54rW7Rr4dAqdgt+X3vyRRMwWK6qE93NnJKByNJMz0teJEUFmSDQI1Rvj7Rq0aovR0xFAWrjTVevQpkGkpoBURTpGtesXGLGQ6pLHpY1KGU1wKFncKaJYG2W1uXXCChUPJm7RxAjiXNG6XUNfNIgUsXJCtU5vvMD4RFKb6tyiEagzCRSWqW9MOFe0btfACq9zZcyEPIc2pnvmgre8HgAQTmp3JD9iJ32FHq82rhv6vUtQJMcNIoIVFUTetAYr2EgwKf1ZB4xH63YNnOBHVho3wV6gTx0cAHBliNq1Vm0EOME+wKURzZC5onW7Br3bu4h4GbKNGTZrs0dyqCeCIZLrwrNglbrBCOj3r4oiOe48IjYW1ajsOitcQEWfIb2idbsGLkq2zlwJffr2iGTtGvq1+X4+a9NgVzmS+aF1uwaxUqpXVW4Rr285ACDs0OakazT4DgBiaFpUoY3pS5rgULK484kIVnRUmzdeVrCRYPL0qfoqImrLsILWjNbgxBuCTn17RKrt5H2iVbsG0Qi0WucJu9btGth+YofBWLShzTJXahauBgAM2km1RGtEhMqvZ0w7lTKa4FCyMIJjLSuIj2kNVriAMjpVfRUR7RpErRmt0SnaNOjQt2c8LkG1tlOjNgLcYBQA4LKWqhvIPNG6XQMn2LswOrV3ESmq8KBYKN50nD6saiyTkVUxTmunUkYTHEoWd2U9ACAKbcqui75CYiKmVxjhddaqXcNZmwadV8pKtJ1IcqOiTYM2RNHmiqgqzvLa1MERK6VMoX4buUW8cbKdGQ4dUTmS84n0kRYCj4aMkGmCQ8niZoiNQFQQH9MabEawadCp+JzIWZVdbSoZczFiuOrK16dNg4irUtu+X1ySJOyuIn3feM/aYsTUDWQKWCHBZUr1qRY9Hi9PtjPDGpx0jQyRiqTHoZ0KO01wKFnctUQjgnWmwadTKkdzPmEruVGZzRaVI5kfWrdr4ATfHpfOK2WaTyQFXzWXTn3VRLRu15C1d6nSp1r0eLx5xPw23NuqbiCT0CGqGBdrx++LJjiULK56og6cshBxLC3x6I9uRHshEZ/7yJnvYMcDN6kb0DzQul2DaNPgqtD3jbfaQyqSmk0kLaTiIVZA9AojiOdp1a6BFRJc0ehWz9Tkk2pfx0hU5UjOJ8KTSplHQ35fNMGhZLE5C7M2AqyG1IzDJ/bjM30/B0ho4M3AZ3p2Inxiv7qBzRGt2zWc9UfSp02DSJUgy8+bgd6o9sT+OgUjUJfOb7xlbr9m7RoSsWH0OEV7F/2qGIt4y8j2fDjRrXIk5xMRKuweRjvvZ5rgUCYgimFFI9qpLASOvgL+nHdq2gycOvaqOgFJgKiyK4qQaYUMz4NzkLtVtc5vvHmOfFSIiWS7thLJ+HA/BgS9SletNlRf54qW7RrEhMuaJr5ZesdbTf4mwxobBMnwPCIO0tbgqdVOIkkTHMoE3DxRf412aUd2vaHxCpj5iV+z8MCiZZepE5AEiKJjXLe21F8Hu8MYs5L/d9Vp50I1V1wJbfp+iTdevfuqiWjVroFtJ6+zK2aG2WJVOZr5460lbQRhu7YGQfq5NsQFXVC3f6W6wYyDJjiUCbgFTY5oX7u6gYzDu+RifGrk7HaJhQcerrgR3iUXqxjV/BBFx7Rm18AJN97CBJBfou/xZQBw8USskNOY7xfXQSqk1Tr3VRMR7RrYnlZ1AzkHVkhs9W7vIuJduAYA0O3MID7cr24w44gE3wYAlMVNcBaXqxzNWfT/l0WRFLcw4hcd0ZZjbV0B6cz/QL8LrZ/ch623P65uQPPEZRfUXzVm18AJW2auuP6fdgGgWhAr1JpdAyf4qrlS+rZpEHFlE3ZtKeyetXcpUDkSaShj/HAIw2oRDYn9RTrIg1GNhlSMAZkTnN7eXmzZsgXFxcUoLS3F1q1bMTw8tVFYb28vvvCFL2DJkiVwOp3w+Xy4/fbbMTAwUajLZDKd9/HUU0/J+avkDG5hNDg6pi0bgdAIuXC+q3ylris3Ilq1azjrj6RvmwYRl508TXaOaCyRFGwajHLj1apdAyfau+hcLVrEZDbDGyN7QeG2d1SO5iyRTpKwe3ht2Y7I+pi2ZcsWRKNRvPDCC0gmk7j55ptx66234sknn5z0+Egkgkgkgu9///tobGxEW1sbPvvZzyISieC3v/3thGN/9rOfYfPmzdnPS0tL5fxVcgamrBaIaU+VNJQiiYCvUv+NgoBg19CtPZVdTtiadJm0daGaK678aoAHOK0lkkKlQ+82DSJMkRuIA6zG7BrYEQ5w6t/eZTzedAFOoR/hiHYa5yN9JGH3WLWlfi5bgnPs2DE8++yz2L9/P5qbmwEADz74IN73vvfh+9//Pjye80XEli9fjt/97nfZzxcuXIj/+I//wKc+9SmkUilYrWfDLS0tBcPoWwFUi7hdi4AIELVoS5U0ZCGVP1+N/htfAcGuoVt7KrvcEAuY9W/TIOIq8QB9Z8ULtQI32gk4gGq7dvoV5oOr1AuwAKexByM20UsSnCK32qFIhtdSBqAf4Z6g2qFkiQxHASfg0Zj6uWxbVHv37kVpaWk2uQGAjRs3wmw244033pjxOgMDAyguLp6Q3ADA5z//eVRWVuKSSy7BY489hkwmI1nsuYy7hmiHRB3aUSXl0ym055MRRN+CNSpHIw2i+itrG1M5kol0ijYNBnnidQm2HqJ4oVYQjSldBvBHArRr18DyxrB3GU+NkyQRHcPa6XeKJEiFVEsqxoCMFRyWZVFdPTGbs1qtKC8vB8vOrIG1u7sb9957L2699dYJX7/nnnuwYcMG5Ofn4/nnn8fnPvc5DA8P4/bbb590nbGxMYyNnb2RDA5q6ylDS7j9KwAAIzZgqCeCogr15fo7244iYQXMPFCz6CK1w5EE0a6h25FBOpmAJU8bzXmc4CTvMsgTr8u1EDgNcFZtJZJZm4ZSbd0Q5oqLWQS8oz27BlaoRDM6F60cj7ekFhh9A+GxLrVDydKREVSMK7VlhzHrCs7Xv/71SZt8x38cPz7/vcHBwUG8//3vR2NjI771rW9N+N43v/lNXHbZZVizZg2+9rWv4c4778R//ud/TrnWfffdh5KSkuxHba2+JejlpLCcQaGgah8NaqOJLXTqIADAM2pBniNf5WikQat2DZxgaOoqN8bfSLWHVMo6HWlkeH6ao5WDE/yR9G7TIKJVuwZWqEQzniUqRyId3mryng5ntNO/FxHUzz3uxSpHMpFZJzhf+cpXcOzYsQt+LFiwAAzDoLNzYkd9KpVCb2/vtL0zQ0ND2Lx5M4qKivDMM88gLy/vgsevW7cO4XB4QpVmPNu3b8fAwED2o71dOxovWsQtKDaxGlEzDrUfAQD4ksZIbgDt2jWctWnQ1pPYXHHVEWG0hBUY6NLO3z0n2jQIW5V6R4t2DcO9LIaFwigj+OwZAW8NUb4O52ljO5BPpxB1kn98j2+5ytFMZNZbVFVVVaiqmn5/fv369ejv78fBgwexdu1aAMDu3bvB8zzWrVs35c8NDg5i06ZNsNvt+NOf/gSHY3qBpsOHD6OsrAx2++SaEna7fcrvUc6HSTsRQBJRThvqr6EuEofPYozGVxFX0oYujIGNBqAV7U/OTnqdXB5tPYnNFUdhKYrHgEE7wIWOakI1OBkfRa/gj+QS/LL0jmjXECngwXWcQO2yqa/xSsEJfnr5CaCwzBi9TgDgXbAaeBVgnTxSiTisNnVFDLvbTyBlIf/PCC0OWkG2JuNly5Zh8+bNuOWWW7Bv3z68+uqr2LZtG66//vrsBFVHRweWLl2Kffv2ASDJzbXXXouRkRHs2LEDg4ODYFkWLMsinSYZ4p///Gc8+uijOHLkCE6dOoUf//jH+Pa3v40vfOELcv0qOYfbTES7ooJWh9qEBkgcPqdxLlKA9uwaRvo6MSI88bp8+vZHGo9rjFQkuag2EvYuwRfLwgMVNfr2+xoPI9g1sBqxxWAFXyxmzGoItWiR6rpGWNNke5vVQBtBpI1U2KtHTZprIZBVB+eJJ57Atm3bcM0118BsNuMjH/kIHnjggez3k8kkTpw4gdFRsh996NCh7ITVokUTS7fBYBD19fXIy8vDQw89hC9/+cvIZDJYtGgR7r//ftxyyy1y/io5hdteAaAd0SFtdOmH4hzgAHyl6j99Swmxa+jTjF2D+MTrSBrridclVCS5Lm2M1YqGlFUG8UcScaEQQCwrYqg2rOCLxaSMIVopYrZY4YlZECpMIxx8S3Xh00gHSdg9Ce3ZYcj611VeXj6lqB8A1NfXTxjvvuqqq6Yd9968efMEgT+K9LgLGSAFROPdaocCAAiBNNP5XMZpFAREu4ZWsMPasMXghJ4rl0H8kUSqzYUABtHZp5FEkhVsGpLamJyTCmLX0IW97Xux6cR+1W+8RhOtHI83mY8QhhDuUL/fKSIYM3ugvdfZOFcximS4S8kETVQj4mhtdtJM56vT1v7ufBHtGri4NtRfz9o0aO9JbD648kjvFjekkURS9EfKGMOmQYQVrhc7Ck+i7slLsOOBm9SNR7RpsBlDTHE8XnMpACDcrb57e6SfJJI1eRUqR3I+NMGhnAdTRSZoWLP64mijA93oFhoyfQ3696AaD1PqBaAdu4az/kjaexKbD658MhTBxbShGyIaf7qsJSpHIh3hE/vxXOnZii9vBj7TsxPhE/tVi4kV/r0ZjanrSoHXIYj9DapflYwIxsyefJfKkZwPTXAo5+EWJmiitoTKkQDtJ8kFsjABTUzASImrkvw+WrFr4AajAIzjjyQiihaKIoZqw40Q+QyXQWwaACBw9BVkTBO/ljYDp469qk5AANhkHwCAKVFfrFRqagTF4HBcfXPTSJJUoD3CA5uWoAkO5TzcdUTLoMeZQSI2tfu7EoRa3wIA+GJ2Q/WFANqza+BGyROvy1mpciTSIooWclD3vSzCjRFZe1eh9p5450pD4xUwn6OjaOGBRcsuUycgAKzw781UGOvBCAC8lUSZOZzuVzcQABEQcVBPpfbUoo11x6BIQkVNA/IE0S61xxBDUdKh78sUqRqHHJxr16A2nPDE6yoyzgQVAFQLasGdlri6gQhwaaL0W11snMqCd8nF+GHBh7OfW3jg4YobVW00ZgXRSsZlDDHF8Xg9RD8pbFW/+hvJE1SMNagWTRMcynmYzGYwMaLcFG0/qmosoV7S+OrLM1ZVAdCeXUOn6I9UZgybBhFRtFAUMVSbrE1DpbEqC7ff+TsUCTnkcyu+h623P65aLBmeByuo6zJeY4gpjsdbT6RBO/JT4NPqva9TiTg4Jyndeeq1NwRCExzKpLhTZJKG5U6rGkdIcMz1FWlvf3e+jLdrYFVOJAGAE4wJjeKPJCKKFo7YiJih2nB5ZEvSKDYN41kYJ5ozsfiQqnH0sUEkBXVd0a7DSLgXrIIpAyQtRElYLbjWI+DNpGJXpUFVbprgUCaFAdkSina3qhpHKEX6FXwVC1WNQy5ELRRRG0VNsv5IbuOo6wJEtNApmFyLYoZqkU4m0O0QbBq82ivpzxc/SgEAwai6rzMbagEAlMVNsBcUqxqLHOQ58uEaJbfv8JnDqsURaSOvMzNqhiVPe7pONMGhTIpb0A6JDnaoGkfIQhoFfTXGsQ4Yj1bsGuLD/RgQ5G9ctcZ6rU1mM1xxomnKqWwg2x0+Cd4MmDLafOKdL34n6SsK9qmrGs1GAgAAZuzCRs16xpskf7DhDvWSyUiUVI88GlWLpgkOZVLcBWTCIzqqXkmfT6fQnk8evX3+NarFISfErgGq2zWIDtC2lPHG8QGgOk18kjq7WlWNQ3SOr4ibVDdJlAN/GdHQCsaiqsbBdhIBPPEBwoh4Qa4d4U71qr8Rwf7EA20OgdAEhzIp7hKisxBNqqey2xU6jjEredqtabhItTjkhNg1QHW7Bq6DVDaqDWbTICKKF6rtk9Qp+CO5Etor50uB30P6XYLoUzUOUUyRsRhve0rEayeDF+F+9d7TEUFo0GPTnooxQBMcyhS4K8iTWFRF7ZDQqYMAAM+IGTansdR1RbRi18AJzeTVKWPeeEXxQlHMUC04oafNqJUF/0LyIHLGGUeG56c5Wj7YYfLvzGj0xisFNYVkO7AjxqkWQ2SUnNtToE1pCZrgUCbFLTSailoSahBqPwIA8CWNeTMAtGPXYFSbBhFRvFAUM1SLrE2DQSsL9csuBQAM2YHeiHpbJ6xgFMwYSEzxXLzCQ2g4pV61LCKcu6bMp1oMF4ImOJRJYYRGU87Jq6azEOokjYI+c5kq51cCUQuFNasr2MUNkHF8l8U4/kjjEcULuYTKlTJhK9JlQANIAMgvqcxO9wSP71UtDjZFHhgYDdoHSIXXTabwwhb1quwRocLvqdbmlCtNcCiT4qprgikDpFTUWQgNkKqCz6nN8qcUiFooojaKWnBCM7nLYcySvihe2Mmra9fAxQWbhgLjGUCKLBgjFddg22HVYmBNxCiYqaxXLQa58dYRYb2wI6nadmDETq5bnhptTgTSBIcyKXmO/KwIXVTQlFCa0BjZ3/WVGm+qR0Qrdg3BGKks2C121WKQk2rR2FQQM1QLcSvSZUADSBG/hSTJQfa4ajGI/m6MwTSdxlOzkEyWjtqAga52xc8/NjKIbifRdPIIyspagyY4lClxJ8jNLqqSdkgo0w8A8DGLVTm/Eoy3a+hqV+eGsOOBm/DnUpJM3mt6GTseuEmVOOTE5RYqZYKYoVpwIJUFlwENIEX8BWRbKDiojrZTKhFHl3DjZWobVYlBCZzF5SgXHkLDpw4pfn62lfgU2lJAuYduUVF0hhsFAABWJTXjNht52vb5tPl0IAXj7RpEjRQlCZ/Yj1t7dgIkBGRMwGd6diJ8Yr/isciJKF7Y78hgbGRQtTjErcjqar9qMciNv5Ikk8GEOtM9Xe3HkTEBZh6o9Br34QgAvGPkITTcrnyVvaONDIF4YlbNSktoMyqKJmAspQCAaL/y5c/YYC+68slTmK+hWfHzK4madg2Bo6+AP+cqkDYDp469qngsclLG+JFHvBezooZKw6dT6HSQXglXjfFsGkT8XtIbEjSrk0iy7eTftzqmTfsAKfEKAnthLqD4uSNRUtnXqooxQBMcygVwO6sAAFEVROjaT5IKQkGC3JyMDMOTSpkadg0NjVfAfE5/ooUHFi27TPFY5MRkNqM6Ri53XIc6TfN90SBSggFktc9Ydhjj8S+6GADQmp9UZQKTjQo2DUlj9pONx5tH+p3CfcpfOyI9rQAAj1m7kgc0waFMibuINEJGEz2KnzvU+hYAwBezabb8KRWiJgqrQqXMu+RifDm5Nvu5hQcerrgR3iUXKx6L3FSnBLuGTnV8krgwqSyUGtQAUqR26SWw8EDCCkROvan4+bkeUdPJuPpZIl5R7G9E+YfQiOBT6BEUlbWIse8clHnhFhoho7zypeZQhNwMfBltepxIiWjXwI2o07OwqJL0KbyrrwCtn9yHrbc/rkoccuMSesq4HnWaX7noKRJHwrgGkADpK6sdIeamwZNvKH5+tp/YB4hb7EamppQI7IVTyj+ERuJENNNT6Fb83DOFJjiUKXG7SLMgq8JobaiXPGX78qoUP7fSqG3XEOgme+nvci42ZOVGxJU1No2ocn5OMPp0pbXbsyAV/hRRxA6G3lb83KzwoMA4tFtZkAqvMGEaNg0pfm5RxdhTpt2JQJrgUKbE7SXiTVFHSnEhqdAIuQn5CmsUPa8aiGqrrEp2DYEYeeJtqDT2xIkoYsiNdKpy/k7RpsFs/KrkgjyStAe7lW9+ZcdINYMp0m5lQSq8PmJuGrYrr6EVMRPJgxpGu1pDNMGhTAlTtxwAEZIa6lH2qTeUJF4yvooFip5XDUS7Bk4lu4aTZvIktrjOmI7tIq5Cde0azto0GNd6RMRfTLZOgkPK95WxwpY6I6hXGxmvYG7a78hgpE/ZxD0iJFUer3Yb5mmCQ5mSgrJqFAkOAlFB1EkpQoK/iq/GuEJdImraNaQScZwpIBeqBoNNTp1LtaAe3JlWZ3yZi5Gk3ZVvXJsGEX8VqQYGU92Kn1vcUmeqjf9wVFzlzV6jO04r19A90teJAQf5f49fuzplNMGhXBD3GGmIjCo4WpvheYTyieKsz79asfOqhWhsqoZdQ+jY60haAHsKqF26TtFzK43alTIu1U/iKDauTYOIv24VACCYp7z3F2sjf0OMx9hbriI1caL1Ew4dUeyc4gNvQQIoqtDu+5kmOJQL4k6TUcsop5wIXVfoGMasgCkD1Cwy9rYJQNRW1bJrCBwngn4LR+wwW6yKnltpzlbK1PH8OmvTYPytE/+SdwEAwgVpJGLKJTmxwd5sZYGpa1LsvGri5UlDdziq3ENopP0oAMATz9O0jId2I6NoArcwecL2KbeXHjp1EADAjJoNrRcioqZdw8kQKWsvzpQrel41cAlN893ODFKJuOLn56zknK4c2Dpx1S+HM0msP0LHlRsV50LkxmtPASVVxk8kAcBrJX+74d5Wxc7ZIaoYp7WtNUQTHMoFYWxk8iQ6pFyTcaidlFrrktr+45EStewaAj1kyqUh36voedVArJRlTEB3WFkD2QzPg3MQrwgj2zSImMxm1I+S93TwlHK+ZqzwgMDELJquLEiJt4A0z3cMRxU7Z0RIpjwa1xrKjXcAZc64hcmTaFy5ZsFQJ7np+kzGnzYREe0a2C5lVXYDYyRxbahaquh51cCSZ0OZULh558DfFD33YHcYY8IOYHWt8V9rAPBnSPX3TFi5AQVW2Epn0g7Fzqk2NSWkUhVOKHeNjgyRZMqjca0hWROc3t5ebNmyBcXFxSgtLcXWrVsxPHzh/dirrroKJpNpwsdnP/vZCceEQiG8//3vR35+Pqqrq/HVr34VqZTynie5gLuMjHtG0/2KnTM0QKTWfU6XYudUG9GugRvoUPS8AXM/AGCxf+2FDzQAOx64Cb3CfW9zy3bseOAmxc7NCQafhQkgv0TbNwWp8NvJw1GwR7mqJNcr2jQYX2tIxFtNesvCUE5HKzImqBgXaVunTNYEZ8uWLWhpacELL7yAv/zlL3j55Zdx6623Tvtzt9xyC6LRaPbje9/7XvZ76XQa73//+5FIJPDaa69h586dePzxx3HXXXfJ+avkLG6hXyCq4ORJKE6USH2l2lXIlBo17BoSsWEEC8mDQUPjFYqdVw3CJ/bj1p6dAGl1Am8GPtOzE+ETymyfcIIBpCtu7Ebu8SwoJSa5wVHltrfZQXIuJq9UsXOqjbdWEPuzKddXFhFEST3l2r5Gy5bgHDt2DM8++yweffRRrFu3DpdffjkefPBBPPXUU4hELvyGz8/PB8Mw2Y/i4rONps8//zyOHj2KX/7yl1i9ejXe+9734t5778VDDz2EREKd6Qgj4/aQfgFx9FIJQpl+AIDPlRtjngDAFJJqlZJ2DcEjr4A3k1FP98LVip1XDQJHye86nrQZOHXsVUXOzwkGn7lg0yDiZ8hWXDCj3HuaHSVid0wOaA2JeBesBgB05mcwNqKMxlPEQh54PRpWMQZkTHD27t2L0tJSNDc3Z7+2ceNGmM1mvPHGhbvqn3jiCVRWVmL58uXYvn07RkfPVg/27t2LFStWwOU6u32xadMmDA4OoqWlRfpfJMdx1xM1416ncn88bTby7+3zrVDkfFrAVUJKvUraNQRO7AUALBp1GL4hs6HxCpjPcRux8MAihcQNOWEK0WUqVOR8WsDvJxIPQbty1V82QVS5mSLtarNITUVNA+xCh0Y0KL/3V4bnEXEQnTJPrbaFWGW7qrEsi+rqiVm01WpFeXk5WHZqa/dPfvKT+OUvf4kXX3wR27dvxy9+8Qt86lOfmrDu+OQGQPbzqdYdGxvD4ODghA/KzChj/LAJfzysAmrGscFedOZnAAC+Rc3THG0c1BChC4TJxXCxyfg9Id4lF+ORihvPJjkZ4OGKGxUzF+WGcsemQcS/bD0AMpY/3Dv1NV9KWBDTSabCp8j5tIDJbEZNjGx9hlvlT3AGu8MQBuTgWbBK9vPNh1knOF//+tfPawI+9+P48blredx6663YtGkTVqxYgS1btuDnP/85nnnmGZw+PfdGtfvuuw8lJSXZj9ra3NBHkAKT2QwmbgEARNuPyX6+cIBo4OQngHLPQtnPpxXUsGs42SeMiBfmxs1g6+2P49X1jwAArDxww63/o9i5O0WbBmeVYudUm5JqH8ripOkpeFSZrUDWQvpQGFfuXDsAwJsiU5jhiPw6WhGhSlQaN2m+YX7WCc5XvvIVHDt27IIfCxYsAMMw6OycaP6VSqXQ29sLhmFmfL5164h8/KlTpwAADMOA4yY2YoqfT7Xu9u3bMTAwkP1ob1feAE7PuFNk9CTKyu8MHGo9DADwxWyG3zYZj2jX0KWgCF1gjIx6NlRr1yxPatZduxWFCSBlAQKH/67YeblkPwDAlQMO1+Pxx8m1Ixg8JPu5MjwP1kHKzUxNbozii3gFPZpwzxnZzxUJkwddz5hN9nPNl1m39FdVVaGqavqnkPXr16O/vx8HDx7E2rVkBHX37t3geT6btMyEw4cPAwDcbnd23f/4j/9AZ2dndgvshRdeQHFxMRobJ98PtNvtsNvtMz4nZSJuFAMYQbSnTfZzhSLkj8fH586YJ3C+CB2zQH4Du0Ae2aptyIERcRGT2Yym0QK8YRtBS8seNL7rg4qcl8uQrRNXeW5Vj/2mchxCB4JR+au/g91hxIl1Hlx12u4NkRqv0wWgDR2D8k+sRQStIY+g3aVlZHtEXrZsGTZv3oxbbrkF+/btw6uvvopt27bh+uuvh8dDGsA6OjqwdOlS7Nu3DwBw+vRp3HvvvTh48CBaW1vxpz/9CTfccAOuvPJKrFxJLvjXXnstGhsb8c///M9466238Nxzz+Eb3/gGPv/5z9MkRibcNiIFzirwxxPqJdMmvjxtlz6lRmm7hthgL9oLiLLu4hVXyX4+LdFkJdeflnb5qwoiZ20a/IqdUwv4neTBNNgnv4Al20aGTIrHckdrSMQriv2NdU5z5PyJ9JEHXY+1VPZzzRdZ9wCeeOIJLF26FNdccw3e97734fLLL8cjjzyS/X4ymcSJEyeyU1I2mw1///vfce2112Lp0qX4yle+go985CP485//nP0Zi8WCv/zlL7BYLFi/fj0+9alP4YYbbsA999wj56+S07gLSBN3dFT+P57QMBG682lcQEoOlLRrOP3OS8iYgJI4UOk1vnXAeJoqyJZcy+Apxc7JCWMurhxxuBbxl5NemDNx+W0E2Aix32DG8mQ/l9aoqSR6ZeGM/FOYHYJtj8ep/VF8WVWnysvL8eSTT075/fr6emQymezntbW1eOmll6Zdt66uDn/7m7JS67kMU1wDDADRVJ/s5wolewAAvgrjGxKeC8MX4AjGFLFrCAReBwA0xPNzqtcJAJr8lwDH/4QWdClyvpG+TowI7QouX+70OwHAgprlwImnETT1y34urqsVAMBo3ABSDrw1y4AQELbGZD9XJNEDOAFPsfYfQnPrykaZE+7KegBAVBjBlJOQhZzD58mtGwGgrF1DoIOM/DeYc2eqR6Rp9bUAgEBhQhFtJ9Hh2pkECstmPmBhBPwLSX9X0DmGDM9Pc/T8YEWbBnNu9e8BgNdPxrWj+Wmkk/KKskZ4QcW4QvvbrTTBoUyL203K6lGZR5gzPI+QkwhI+fyrZT2XFlHSriHQT6YtFhfVy34urVHTsBbFY2SS6uSbL8h+Pk7YOqmO547DtUidoIUzYgO6wydkPRcrGEAyQs9gLuGqXw4LT9S5udYjsp4rIlSJPG5tqxgDNMGhzAC3j0wkdDp5WZ8OusMnslMQ3obcEfkTEe0a2HiP7Oc6mSTCaw1Mbk2bAOIkFVEUbjk6/Zb4fDlr05A7DtcijsJSeEbIbSZ4/HVZz8XGidYQU5A7Jr0iljwbPKNErywcfEu282R4HpF8Mpzg8TXJdh6poAkOZVqq6xphypCng+7wSdnOExJE/pgRM+wFxdMcbTxEuwYuLf+2ScBGtgIbFiij5Ks1mvLIa93S8abs5zreTs5RnNG+bogc+BNknDjYdljW87CpfgAAU6L93hA5qEkRn7Nwh3wj+T0dASRJHgW3xlWMAZrgUGaA1eZAlTDCHA3J5/cVaid9IXXJ3DEkHI9Sdg3DvSyiBaQfoiHHRsRFmirFSSp5J9Z2PHAT/iX1HABgV2kfdjxwk6zn0yJ+CxnZDnIyb1FhGADACD2DuYbXVAIACHfJ956OCHY9VaMm2Jza91WjCQ5lRrgTRGMoGpHvIhXqJErJPlPu+PWMRym7hsDbLwIAKmMmlLm13ygoB8sXkt6QFnO3bOcIn9iPW3t2IkOeDZAxAZ/p2Ynwif2ynVOL+Au9AIDgoLxCoazwd8MIf0e5htdOxrY7BsOynSPSQTS6PAl9aM7RBIcyI9wg2XpUxhHmUD+ZgvA5c28PHVDOriFwighrNsS1r0QqF01ryCTVqcIk4sP9spwjcPQV8OdcYdNm4NQxZXyZtIK/kiQcwYR8OlrpZAKdTlKVFP+Ocg2vMLYdjsk3pBDpFFSMoY9JNZrgUGaE20qqKtEB+Z4OQnHS+OorqZPtHFrmXLsGuQhEyZRFg1X7Ql1ywfhXoixuAm8GThx6XpZzNDRecda9XMDCA4uWXSbL+bSKv5ao0Act8slM9HQEkDYDpgxQVZtbPlQiNcLYdjjdL9s5Iv3Ex9Fj1UeVnSY4lBnhFlyQWRlHmNsy/QAAnyu31F5FlLJrCAyQKtziktwTUxQxmc1oipGnULkmqbxLLsa/4ors5xYeeLjiRniX5FZj94LFxHuwrSAp2xQm204aaytjJuQ5ck/oDwC8HpLYha0jsp0jMkIeQj35+qiy0wSHMiPcRcS/J5role0cIRvRV/DVLpftHFqHSZK9bTYqn3P7yRRJUhvcufs6A0CTTZikihyW7RzeMh8AYG2/E62f3Iettz8u27m0Sk3DWuSlgaQF6BAmJaVG/HsR/35yEW89qZR1OFOyiSpGEkTCwlPilWV9qaEJDmVGMOXkQh3l5Rlhjg/3g8snf5S+HNTAEXHx5OmT65avITPgIE94DYsuke0ceqCpimgAHRk6I9s5DkbIDf3awtU5V7kRseTZ4BshrkDBwD5ZziH+vYh/P7mIZ+FqAMCYlWzZyUEHyPXfU6WP4QSa4FBmhFuYTIjK5HUSFp7snEmgokb7CplyIbddQz/Xhm4n8X9btOLdspxDLzQpMEl1IEluvGv9udV3cy7+NHlfBwUpCKlhhd4QxlIiy/p6wOYshGuU3NLDZw7Lco5IHhl+8Lj1YdBLExzKjHB7yWRC1CFP+TMUPAwA8MVsOSdnPx657RoCb+8BALhHzCiq8MhyDr3QtGYTAOBMUQqjA9InOfHhfrxTRB4ImtddJ/n6esJvIw3twW55KgvsMPl7YRwVsqyvF2qE8e1wu/R6ZelkAqwwqeap08f2du7eSSizwu0n+7vxPGCwW/pJqlCENAn60toXj5ITue0aTp5+AwDQkNDHmKecVNc1oiJmQsYEHD/4nOTrv7P3j0haSOOrT/BkylX8xWSLOzgszxQmOybYNBTmlpnpuXghiP11npJ87c7QUfBmwMyTvx09QBMcyoxwFpejRJBmicpg5hbqIX0QvrxKydfWE3LbNQRY4mzdkKePKQg5MZnNWB4nWyctx1+RfP0Dbz8LAGgeq8jpqiQA+KvJlkYwJc92ICv8vTBltbKsrxe8NnL9DAuaYlISEa77rpgZVps+fNVy+6+OMiuYBPHSiXZIP8IcGiY9J77C3PSREZHbriEw1AoAaCjN3RHx8TTZyQ1Rjkmqg+whAEBzcW7qsozHX0d8i4J5w7Kszwp/L4xOml/lwitMu3aMSr/FHRFU7D1JfSQ3AE1wKLPAnSYTClFOeq+TUJI82fkqcvvGy7hJg3XIHpdF0j/Ak9d5cc1KydfWI03VxBG5ZUR6he4DKfIUvXbB5ZKvrTf8S94FAIgU8Bgbkb46ydqIvg7jzk0NLRFvuSD2l5JeziPSTarsNdCPETJNcCgzxi1MKET7pC9/hizkyc7nyU2ZdZFdr/4SADDgAOqevERSc8YMz+OkOCLe8C7J1tUzTYuESSqLtDeE2GAvjhSTPd3md31Y0rX1SJVvGfITRKW77dheSdceGxlErzAZyPj00RsiFzUMeUAKm6WvlEUEFXuPrVzyteWCJjiUGeO2k/3d6HBU0nUzPI+QkzyB+fyrJV1bT4RP7MdX43/Ofs6bpTVn7A6fwIBQXV6Y4yPiIk0XbQYABItSGOmTzivprdd+j7QZcI2aUdOwVrJ19YrJbMaCGJnwOXNK2spkZ4gMKOSlkbPmsSJeH5luCjulV4yOCJOdngL9NHLTBIcyY9xF5I0t9YRPT0cAsTzy/94cvhnIbc4YOPIyAKB22AJnsX6ewuSksnYJqkeJPcbRA/9PsnUPvEOmspoTlTnfYCziz5QCAIId0o4ws4KtiStmgdlilXRtvVGzcA0AYNgGDHZJO7EWEba9PKX6aeSmf3mUGeMWZOej6QFJ1w0FDgAgT7uOwlJJ19YTcpszBoLkdV6c1M8euhI0jZGt15YT0k1SHeQOAwCaS3J7y3U8fgd5QAr2StvDxwk9gUwqd20aRArKqlEWJwl7+PQhSdeOgGx7ear00ydJExzKjBEnFKJmac3cQu1k/LAu4ZR0Xb3hXXIxHqm48WySk5HWnPEkJ4yI292SrGcUmhzCJFX0LcnWPMATZd21C6+Y5sjcwV9Krh/BWETSddkewaYBua2hJVIzRqZdwyFpK2UdNkHFuEY/U4E0waHMGHcN0bKI2pOSrhviTgIAfKZSSdfVI1tvfxz7rvxF9vN//Og3JVs7MEyawxvKFkm2phFoqiZ9Cy2j0vh/jfR14mjRGABg7bv+UZI1jYDfTapZwUyfpOuygq0JYy2VdF294s0QEc8we1KyNROxYXTlk0ZuT/0KydaVG5rgUGaMu468sfsdGcQGpZs6CQ2QG4vPoZ/mNTlZe82nsGzABpiAl55/RLJ1AxnSO9VAR8QnsHwxGeNusUrznj782u/AmwHPiBmehoskWdMI+P3ktQjapfWzY0dJczjjrJJ0Xb3itRK7inCfdIa9bJB4iOWl9eUVSBMcyowpddXBniL/z4WOSrZuKEa6832lPsnW1DtX5xE9jxePPyvJehmeRyCf3FgWL71UkjWNQtPa9wIAQoVpDPXMf/vkQMsLAIDmZPW81zIS/kbyvut1ZiRtgGUTJDFliujWKwB4C8nr0DEi3bRrRNjucuuskZsmOJQZYzKb4Y6RN3e0XboEp00oWfuq9fNkIDcblpDx5d2JE5KsFz19GCM24iPjX077QsZT5vbDPUIuhVJMUh3sJL08a0tpg/F4iio8qIiRBtjgsdckW5flBZuGcvqABADeMqKGHk5KN+0aiZLtLk9KX32SNMGhzAp3igipRFnpzNxCeaSy4KvVz96u3Fx17a0AgJaSMXDB+Xt/BY6SCaH6EStsTtqMeS5NiVIAQMuJ/533Wgd4Up1obqBaQ+fiHyM3yGDwTcnWZC3k+sG4Fkq2pp6pqSY9dmEMSbZmpJsofXtM+prApAkOZVYwJtLAFu1plWS9sZFBsAVkbMiXwxo451LhbcCqfpJM7vn7T+e9XqCVjIwuTpfOey0j0uQkT/9H2Lfntc5QTwTHi4nI2tr1H5l3XEZjgYn0hwSjxyRbk3WQfXNGGILIdby1xH4kbB+TbM3IIGnk9tgqJFtTCWiCQ5kVbuENHh2UZtQzLGjgOJJApZdeoMazwUlk53cHnp/3WoEuIobWYPfMey0j0uQi1cP5TlK9+ervkDERMUWXf7kUoRkKfz55/53pPyPJesO9LEbIVDTSSenVe/WIdxFp5u51SjcMEomRRu6aQn1dP2iCQ5kV7gIXACAak0bWPhQ8DADwxfKo4us5XL3sfQCAF9PzF0Y7OUJ0WRoqaJ/TZDQtJmKKLXnzG2E+cPTvAIDmFG0wngx/BdlGCo5J43b94P/cmP3/xr++T1LvNr1SUlWLAiHX6zgtzVZgJEX+Ljxl+upzoncUyqxwl3oBAGyqX5L1QhFSqvaliyRZz0hcuekWmHkgUJxE+7E35rVWwESe5BpqV0sQmfFoXEuaujsKefRzc6/iHOwSGozLmySJy2j4a0hVK2iavxp6+MR+fCN5troptXebXjGZzfDGBbG/tnckWTMiiLt6XPrS0JI1went7cWWLVtQXFyM0tJSbN26FcPDU7uctra2wmQyTfrxm9/8JnvcZN9/6qmn5PxVKALuSkHNGNK41YZ6SKnal6evvV0lKKn2oXmwAADw4u4dc16HT6dwOp/sxy9edrkksRmNUlcdaoaFSaqDcx/NPwCyddu8+CopwjIc/oXNAIBW5xgyPD/N0RdGbu82PVOTJteNcFSaKcyInZSEPF79qBgDMic4W7ZsQUtLC1544QX85S9/wcsvv4xbb711yuNra2sRjUYnfPzbv/0bCgsL8d73vnfCsT/72c8mHHfdddfJ+atQBBiGZPDRPGka2EJDpHnNV1gjyXpG4+oC8sT7YvDFOa8RPrEf8Twi0uVb9i6pQjMcy1NlAICWk3O7QQ50hnCymKh8r730o5LFZSR8S9fBlAFGbUBn2/ykJhoarwAyE78mpXebnvFayXs53BOc91qxwV70OQQVY/+qea+nJLIlOMeOHcOzzz6LRx99FOvWrcPll1+OBx98EE899RQikckbVC0WCxiGmfDxzDPP4J/+6Z9QWDhxtLW0tHTCcQ6HQ65fhTIOdx0pvXc6eUma+kLJbgCAr9w/77WMyIaVHwIA7DYF5/zEe1IYEV8wYoPVRv9OpqLJWQ8AaOHmVtY/9OpvAQD1Q1ZU1tKG+cmwFxSjZsQCAAieeH1eayXHYoAJ2STHwkvr3aZnvPlEFb5jeP7DIJEzhwEAziTp79ETsiU4e/fuRWlpKZqbm7Nf27hxI8xmM954Y2b9BAcPHsThw4exdevW8773+c9/HpWVlbjkkkvw2GOPIZPJTLICRWqqfY0w82S/u1MCNeOQmWg1+DxUFG0yLnvPVuSlicrumbf3zGmNQBtpNGzgS6ULzIA0McIkVSw0p58/cGwXAGAt75IsJiPiT5Ltk2Db/MxNn/zjvQCAS/uL8OKqH6L1k/uw9fbH5xueIfCWkGbg8Fj3vNeKCKKunphVd4MgskXLsiyqqydOElitVpSXl4Nl2RmtsWPHDixbtgyXXjpRWv6ee+7Br3/9a7zwwgv4yEc+gs997nN48MEHp1xnbGwMg4ODEz4oc8OSZ0N1jLxt2Pb5aVlkeB4hJ6kC+epXzzc0Q1JQVo11g0Rc68WXds5pjUAPUSFd7NTX05fSNC0RPKls/XP6+YPdpPLTXE7Hwy+E31oJAAh2zr0/JMPzeKLvZQDA1gUfwVXXfYlWbsbhzYr9zb+ZO8IGAACedP6811KaWSc4X//616dsBBY/jh8/Pu/AYrEYnnzyyUmrN9/85jdx2WWXYc2aNfja176GO++8E//5n/855Vr33XcfSkpKsh+1tfRCPx/cSTsAIBqZn1ttb+Q0RgUNCy8V+ZuSDaWrAQC7Qy/N6edPxoiybkPlYqlCMiSNzaTPL1rAozcy+9H8Aybi/dO8dIOkcRmNBUWkuhAcnFulDAAOv/w0jpUkYE8BH7n+36QKzTDU1JBm4HBefN5rRXrIVKHHXDLvtZRm1gnOV77yFRw7duyCHwsWLADDMOjsnKiVkkql0NvbC4aZ3jX6t7/9LUZHR3HDDTdMe+y6desQDocxNjZ54+v27dsxMDCQ/Whvb5/ZL0uZFDdIP1S0e34NbKFTROSvetQEZ3H5vOMyKhvWfBgAsNvaPqc+nICZaFg01K2RNC6jUVThgW+Y9Ie0HJydJ1VfNIjTxURRd+1lH5M8NiPhryRaTMHk3LW0nnjhBwCAfxipQUm1vrRZlMC7YDUAgMvnkYyPzmutiDAI4nHoz6191ragVVVVqKqa/hddv349+vv7cfDgQaxdS57Od+/eDZ7nsW7duml/fseOHfjgBz84o3MdPnwYZWVlsNvtk37fbrdP+T3K7GGspQC6EO2fnyNwKERK+r6EvgzclOZdG2+GY/+XwOXzOL7/b1i27h9m/LOpRBxnCshkTwMdEZ+WplQ5QuhCS+A1XIFtM/65g68SGYuFg1aUuWnD/IXw+1YC3cAZ69y8ktLJBH6VfBOwAVtWbpE4OmNQWbsEthSQsALRM2/B17h+zmtF4l2ADfDo0K1dth6cZcuWYfPmzbjllluwb98+vPrqq9i2bRuuv/56eDxE7rmjowNLly7Fvn37JvzsqVOn8PLLL+PTn/70eev++c9/xqOPPoojR47g1KlT+PGPf4xvf/vb+MIXviDXr0I5B7eT9FYd6DsyL1GtEEe2uOpMZZLEZVTsBcW4bJi8Rrtf+fmsfrbt6GtIWYgVBu1RmJ6mfOLE3NI5O4PTA8d3AwDWZvR3E1Aa/2LygBsqSCGVmP0Wykt/fhCRAh6lcRPe97F/lTo8Q2C2WFETI/WLcOv8/NUiadLHU6PDSVdZW6KfeOIJLF26FNdccw3e97734fLLL8cjjzyS/X4ymcSJEycwOjqxhPbYY4/B6/Xi2muvPW/NvLw8PPTQQ1i/fj1Wr16Nhx9+GPfffz/uvvtuOX8VyjiCw6Ry88dSDnVPXjJnefRQP9nb9Tno1Ml0bKgg04i7I7PTaDl5jLhjLxq1w2yZdcE252hyE52PlvjstrEP9pKEqLlyheQxGQ3PojWwpYgoX/jkgVn//BN7HwYAfDS1GPYCfblbK0lNijQFhyPz64mNWAQVY0ZfKsbAHLaoZkN5eTmefPLJKb9fX18/6Xj3t7/9bXz729+e9Gc2b96MzZs3SxYjZXaET+zHr4rPStmL8uibTnx+1hWCUIwDbICvhO6hT8fVzR8F9r2APbYo+HRqxslKoJ2M4jZkaI/TTFi+9Erg1R1osc9u+uSAmXgrNS+7Ro6wDIXZYkXdaB4CxUkEA/tQv3zmW6fx4X78No9M9Wy57LNyhWgIvJZSAIMId8/P2DTiIFvcHm/j/INSGH0NtVNUJ3D0FWRME782V3n0tgzxR/JVUwPI6Wi++lMoTBCH4Lf/93cz/rlAL7kZNBTQycGZsEzwpOrMz6C7fWZjzN3tJ9BaRBqML7qMKhjPBH+aVF6C7bMTVfzrr/8Dg3bAO2zBlR+YeY9ULuJ1kFaCjsGOOa8x1BPBsDDp6vbrrzpJExzKrGhovALmcwZ55iqPHsqLAQB8tVQ3ZDryHPm4cpQ03O/eO3VV9FwCY0TJdHE1FVKcCQVl1fAPkepYy6GZeVIdfI0oGDcM5tGJnhnit5Gbb7Dn1Kx+7ol3ngAAfMJ2Ed1ynQZvCXmoCcfnPq0WOUMqwMVjQGH59NPPWoMmOJRZ4V1yMR6puHFCknMnv37W21NjI4OIFpBFfIuoBs5MuLr6EgDAi9zMJe5PWshWS0M9fY1nSlOabOcdCcysKnng5B4AQDM8coVkOPwlpJlb7OebCf1cG/5aSLSGtmz6v7LEZSS8lQsBAGG+f85rRMJEzNUjuJPrDZrgUGbN1tsfR9sn92FTH3EAf2H4LfDp1KzW6Dh1CABgTwFVPlpdmAkb1n0CAPCSs3NG0yeJ2DDaCsi/S0PTFbLGZiSaCsi0SEvXzKxIDva2AACaq/RlRKgmCxjyNx9M98z4Z3739N1IWIGmATtWXk63AqejxkP80MJ5c9fB6RBVjPkCSWJSGprgUOaEd8nFePz23SgaAw6UjuJnD50/0n8hQsHDAADfaJ7u/E3UYtUVH0NZ3IQhO3DwxSemPf7MOy+DNwOFCYDxr1QgQmPQ5FkNAGgZm1l14YCVbAE0N26UKyTD4a9bDQAI2kZm/DNPnPkjAGBL+bvpNWMGeAXn74gzPesHUJFIr6BibCmVKixFoe8SypxhFqzEt4o/CADY3vFz9HNt0/zEWUId5OnYly6c5kiKiCXPhnfHyT74i/t/M+3xgRN7AQANo056Q5gFTUtJtavFMTitcjQXPIL2wjRMGWDNZR9RIjxD4F9KhOeiBTxig73THt9x8gD2lPYDAD75j3fJGZphYPwrYOaBlAXobJubMXJkmGwJepz6UzEGaIJDmSdf+NKvsGzAhq78DL71o+tm/HOhHjK66MurlCkyY7LBQ4xnd3dPL7B4MnwYANBgoq/xbFi6dhNMGaDHmZn2xnBwL5loWzJoQ1EF7cGZKeWehSgSnHVaj7027fG/+t23kDEBl/cVo65p9gMNuYjV5oB7lNziw2cOz2mNiOBG7imukSosRaEJDmVe5Dny8V/ridndf9sO48irz8zo50LDZHTRV0BvCrNhw2X/DAD438JejI0MXvDYQB8xjGwopJM9syG/pBILxEmqN5+74LEHAsQAtdmkzxuAWpjMZvhjDgBA8PTBaY9/opMoRW/xvV/WuIyGN0VscMIdx+b080GeVNfsVodkMSkJTXAo8+Y9H/s6/rHfjbQZ+OJvts7IEDKU7AIA+HQo/60mje/6AKpHTYjlAft2X9i2IZBgAQCLXfoT6FKb5TyperWc3nvB4w72kxtHs2u13CEZDj9KAQDBSMsFjzv6+p9wuDQGaxr42D9R5/DZUGMiDuDhztmN4wPAjgduwoFS0qD8+YFfzVmxXk1ogkORhB/c/BQcSWB3WR9+97OvTnt8yESM9nweOkE1G0xmM65OegEAuw9dWPAvkEcqPA0LmmWPy2g0FS0AMP0k1YE8ocG46T2yx2Q0/A7i2xXsu7DS7hN/+y4A4L1DLlR4qSjobPDaSe9Mx+DsjJHDJ/bj1p6dgCDqKirWz8d7UA1ogkORBP/KK3Gn9UoAwFeO/xdGB7qnPDbD8wg5EwAAXz0drZ0tG3zkdX6x780pjxkd6EZ7YRoA0LD83YrEZSSyk1SJqVVgI4FDiBTwMPPA6ktpg/Fs8ZeR6m0wFp3ymAzP48lRYsa8pfHjisRlJLxFZOs0PMrN+GcGOkP4ysMfAX9OdjBXxXo1oQkORTK+9uXfwTdsQagwje/+cOoLfh8bxIigG1W7mDpcz5arr7gRALC3aGDKRPL0O6Q3pDRuQkUNfeqdLU2NJCk84hyacsv14Ouk32zZkB0FZdWKxWYU/G6ydRrM9E15zGv/72G0FqVQmAA+8E/fVCo0w+CtIJXIcHrq11gklYjjJ/d/Eovur8evS843m52rYr2a0ASHIhn5JZX4wdIvAgC+m34ZwbdfnvS4UIA4CFeNmuAspiaQs2XRmmvgHbYgYQVee+GxSY8JBN4AACyO59MR8Tmw5KJrYeaBfkcG0dOHJz3m4OlXAADNZq+CkRmHBQvJ1mnQMbVo5RMvPwQA+PDYAuSX0GnA2eIVxf4sF9Ybev7X92H110tx29Cv0O3MYOmADbfHV8Ei5PYWHni44sZZK9arDb3yUSTlIzf/Jzb0lWHMCnzlZ9dPekwo9DYAwJdwKhmaYTCZzdiQIVL3L779x0mPCUSIiWGDWZ/6FWrjKCzFouE8AEDL4ecnPebAgNhgvEaxuIxE/TKihdPvyEyqoZWMj+LXJtIDteWS2QmJUgg1PuLzF3YmJ61EHt/3N/zDl13YdOxf0FIyhvKYCQ/mfxRvf7sP/3XfYbR+ch9eXPVDtH5yH7be/rjC0c8fmuBQJMVkNuOBf/oZLDzwTGkUL/zmO+cdE2JPAgDqTKUKR2ccrq6/GgCwe+jtSb9/sl8YES+uVyokw9EkTlKdeeO872V4HgdsZHuwecUmReMyCgVl1ageJV2swUm0cJ777XfQ48zANWrGhuu+rHR4hsCzcDUAIJ5HWgNEeiOn8cXtq7HiL+/HX0s7YU0DX4yvRuD2ALZ99TfIc+QDIIr1V133Jd1VbkRogkORnKZLP4RtidUAgNv33o1kfKIXSqifPK357C6lQzMMV199MwBgf/Ewhnoi530/kCRNhQ10RHzONBUTs8KW7vM1RDoCB8Hl87DwwKpLP6x0aIbBnyA30jNnDp33vSfe3AkAuN68ElabPnVY1MZRWIoqIYkMn34TyfgoHvjeR7DowQY84HgLKQvwD/3VOPKBv+FH972Jcs9ClSOWFprgUGThW1/6A6pGTTheksCDP/rEhO+FYkSfxVdCBejmSl3TZVg4aEXaDLzy3E/P+37ANgwAWLxondKhGYamGrL11JI8f5LqwOu/J8cMOmgf2Tzwm4lhb5CbmEQO9UTwR2cIALBlw5eUDstQVCfIRMdDf/4mVvxrGb4Y+z36HBksH7Dj+cb78Ocfclhy8XtVjlIeaIJDkYVSVx3uq7kBAPCtwT+BPXN2K6VNmJrwVdPpnvlwtZk8be1u+fOErw/1RMAWkP32hpVXKR2WYVjeRLYBW/KHz+tfOHjmfwEAzdZaxeMyEv58MsYc7J/Yg/OHp+9BLA9oGMxD8zX/rEZohmDHAzehpYR4YjxScBwnihOojJnw46JP4M3v9OM9H/u6yhHKC01wKLJx8+cfxcX9BRiyA9v/5+zYeCiPbFn5aperFZoh2LCIiMvtHp0oRhd460UAZEqtpJpWyebK4oveA2saGLSTLanxHBg8DgBoZtaqEZph8FcsAgAEExN1Wp4I/BYAsKX4MjoFOEfOFesDAFMGeOm6P+KzdzyZE9t+9J1DkQ2zxYoHN/8XAODxolN4/dlHkYgNI5pPnoZ9i/5/e3cf1OS15wH8myAJoJCoIJDy0qAIioAt1pS+WCtZ0e70YnU7OmWmOHVltbC3tNpWO1ttO72LY2d6p3acOp3ulO6uV1s7pbbO1q2VF29dBKFyUdRcwLToyMtWL+FNRMnZP1LjRiloSXKSh+9nJjPkyRP98pszkx9PznMOPxzG4vG/WwsAqNddweWLLc7jTS2OhdESr3Kn9rHQBE9CYq/j8v7/v5NK2O2o1V4CAGSkLpaSTSmMMY4/cqwqm/NYh/UUDoU56pv7u3+RkksJmk7/+bbF+oQK6GxvGf4NCsQGhzzKlL0Gq3scf6X983+/gPOWGggVoL0ORMQlS07n36IS0jDLpoFQAZXffug83tTm2Ntn5gRO4h6rFDhusz917pjzWOuZKvwcLDBhCEh76ClZ0RTBOMOxFs6PIYPOrwE/3bcVdjUwv2siZtyXJTOeX0uc/SjUt9wZ7o+L9Y0FGxzyuG0FpQi7CtTq+/HmfzquOsT2B0IdMEFyMv+3SONYyKvs7DfOY3/tdtwOmqjjRqZjlRLmaM4bL1ucx2qrvwQApPYEI2iSXkIq5YhLfhBqu+M25narY57e7jbHDu650bz9fixikh7Ah1Pz/H6xvrFgg0MeF2mcgzfCcgAA/xHq2Fhv2pDyv//1hsdnOj4Eygf/6jzWNOTYADLRwDlOY5USez8AoPH6zVvx66yO/XjmBcZLyaQkgUEhiO0LAABYLdVoqjuEGn0fAuzAyqe5c/hYrfl9id8v1jcWbHDIKwqL/oTovpvDrUrfg3/bsVpeIIVYuDgfANCou4oO6ykAQJPWsSz7zMQHpeVSipSUhQCA0yF9zq9QanscV3PmGTiHzB2M10MBANbWv+BPB/4VAGDunopIIxt0d/D3xfrGgg0OeUXHT43oCL75hbBQAf906RNcsByXmMr/TY1JxNwux5YX5Yc+xOWLLbgULAAAM1IXSkymDIlzzQgcAno1jrk3wm5HbbBjmYOM1CWS0ymDMdAxz+lcpwW7ux1Xx3IT/0FmJFIINjjkFcPN6B9SA81njsoJpCCPB88CAJQ3H0LTyQoAgKFPzR2u3SAwKAQze7UAgMa/fAfrySP4W5CA5jow58HfSU6nDMZQx1pCn//tf9AUdg3B14BlK7dITkVKwAaHvIIz+j1n0ey/BwCUDbWg6Zxjp/bEwVCZkRQlBY5GsfHHGtQed2xumtYbAu3EMJmxFMMYMRMA0KB37CqecyUOoVMNMiORQrDBIa/gjH7PWZCdjwA70Bx2DWXnDgMAZmqiJadSjjl6x4rbjZctqPvRsSnkPM29EhMpizEuzeV57n15kpKQ0vA+XfKaNb8vQbalAM1njmLGrIfZ3LhJWEQMMronokbfh081TQCARH2C5FTKkRKbAbSW4dRQG873XgYmAxmGebJjKcaxhv+6+UQAF//3nLwwpCi8gkNeNZ5n9HvSokmpAIB+x8K7SLwnbYSz6W6kzHHsSXUmpB91v0wwnjf3CZmRFOOC5TheGThw84AKeL5rN28+ILdgg0OkAIvSclyeJ/IWcbeZnv44NNcdzaMtyLEKd4rpSdmxFIE3H5AnscEhUoCHF/8jAod+eSIAbTD3oXKXCZogJPfeXJhybu9EBAaFSEykHLz5gDzJYw3OH/7wBzz00EMICQmBXq+/o/cIIbBlyxZER0cjODgYZrMZTU1NLudcvnwZubm5CAsLg16vx5o1a9Db2+uB34DIf4TownFv3y9T6lRA0pdmLqToRinqm/t6JQdwjy934c0H5Ekea3AGBwfx9NNPY/369Xf8nu3bt2PHjh3YtWsXqqurMXHiRGRnZ2NgYMB5Tm5uLhobG3Ho0CEcOHAAR44cQX5+vid+BSK/ccFyHM2h153P7WoupOhOV+yDzp//fdI5No9uNN63EyDPUQkhhCf/g5KSEhQVFaGrq2vE84QQMBgM2LBhAzZu3AgAsNlsiIyMRElJCVatWoUzZ85g9uzZOH78OObNc9zFcPDgQTzxxBO4cOECDIY7Wzuhu7sbOp0ONpsNYWFcy4L8X3npu1jUsOH24+l/xMJlRd4PpCAXLMcRt2c+hOrmsQA78OMzNbzSQORld/P57TNzcKxWK9rb22E2m53HdDodTCYTqqqqAABVVVXQ6/XO5gYAzGYz1Go1qqurf/Xfvnr1Krq7u10eRErCuQye03T6zy7NDcCJsET+wGcanPb2dgBAZKTr99uRkZHO19rb2zFtmuvy8xMmTMCUKVOc5wynuLgYOp3O+YiNjXVzeiK5OJfBc9g8Evmnu2pwNm3aBJVKNeLj7Nmznsr6m23evBk2m835OH/+vOxIRG7HuQyeweaRyD/d1UrGGzZswOrVq0c8JyHht62gGhUVBQDo6OhAdPTNZeY7Ojowd+5c5zmdnZ0u77t+/TouX77sfP9wtFottFrtb8pF5E9ikh7gB68HcBVuIv9zVw1OREQEIiIiPBLEaDQiKioKhw8fdjY03d3dqK6udt6JlZmZia6uLtTV1SEjIwMAUFZWBrvdDpPJ5JFcREQAm0cif+OxOTitra2or69Ha2srhoaGUF9fj/r6epc1a5KTk1FaWgoAUKlUKCoqwttvv42vvvoKJ0+exLPPPguDwYBly5YBAGbNmoUlS5Zg7dq1qKmpwdGjR1FYWIhVq1bd8R1UREREpHwe22xzy5Yt+OSTT5zP77vvPgBAeXk5Fi5cCACwWCyw2WzOc1555RX09fUhPz8fXV1deOSRR3Dw4EEEBd1cRXT37t0oLCxEVlYW1Go1VqxYgR07dnjq1yAiIiI/5PF1cHwR18EhIiLyP365Dg4RERGRu7DBISIiIsVhg0NERESKwwaHiIiIFIcNDhERESkOGxwiIiJSHDY4REREpDhscIiIiEhxPLaSsS+7sbZhd3e35CRERER0p258bt/JGsXjssHp6ekBAMTGxkpOQkRERHerp6cHOp1uxHPG5VYNdrsdFy9eRGhoKFQqlVv/7e7ubsTGxuL8+fPcBmIYrM/oWKPRsUajY41GxvqMzhdrJIRAT08PDAYD1OqRZ9mMyys4arUaMTExHv0/wsLCfGZA+CLWZ3Ss0ehYo9GxRiNjfUbnazUa7crNDZxkTERERIrDBoeIiIgUhw2Om2m1WmzduhVarVZ2FJ/E+oyONRodazQ61mhkrM/o/L1G43KSMRERESkbr+AQERGR4rDBISIiIsVhg0NERESKwwaHiIiIFIcNjhvt3LkT9957L4KCgmAymVBTUyM7ks944403oFKpXB7JycmyY0l15MgRPPnkkzAYDFCpVPjyyy9dXhdCYMuWLYiOjkZwcDDMZjOamprkhJVktBqtXr36tnG1ZMkSOWElKC4uxgMPPIDQ0FBMmzYNy5Ytg8VicTlnYGAABQUFmDp1KiZNmoQVK1ago6NDUmLvu5MaLVy48LZxtG7dOkmJveuDDz5AWlqaczG/zMxMfPPNN87X/Xn8sMFxk08//RQvvfQStm7dih9++AHp6enIzs5GZ2en7Gg+IyUlBW1tbc7H999/LzuSVH19fUhPT8fOnTuHfX379u3YsWMHdu3aherqakycOBHZ2dkYGBjwclJ5RqsRACxZssRlXO3Zs8eLCeWqrKxEQUEBjh07hkOHDuHatWtYvHgx+vr6nOe8+OKL+Prrr7Fv3z5UVlbi4sWLWL58ucTU3nUnNQKAtWvXuoyj7du3S0rsXTExMdi2bRvq6upQW1uLRYsWIScnB42NjQD8fPwIcov58+eLgoIC5/OhoSFhMBhEcXGxxFS+Y+vWrSI9PV12DJ8FQJSWljqf2+12ERUVJd555x3nsa6uLqHVasWePXskJJTv1hoJIUReXp7IycmRkscXdXZ2CgCisrJSCOEYM4GBgWLfvn3Oc86cOSMAiKqqKlkxpbq1RkII8dhjj4kXXnhBXigfM3nyZPHRRx/5/fjhFRw3GBwcRF1dHcxms/OYWq2G2WxGVVWVxGS+pampCQaDAQkJCcjNzUVra6vsSD7LarWivb3dZUzpdDqYTCaOqVtUVFRg2rRpSEpKwvr163Hp0iXZkaSx2WwAgClTpgAA6urqcO3aNZdxlJycjLi4uHE7jm6t0Q27d+9GeHg45syZg82bN6O/v19GPKmGhoawd+9e9PX1ITMz0+/Hz7jcbNPdfv75ZwwNDSEyMtLleGRkJM6ePSsplW8xmUwoKSlBUlIS2tra8Oabb+LRRx/FqVOnEBoaKjuez2lvbweAYcfUjdfI8fXU8uXLYTQa0dLSgtdeew1Lly5FVVUVAgICZMfzKrvdjqKiIjz88MOYM2cOAMc40mg00Ov1LueO13E0XI0A4JlnnkF8fDwMBgMaGhrw6quvwmKx4IsvvpCY1ntOnjyJzMxMDAwMYNKkSSgtLcXs2bNRX1/v1+OHDQ55xdKlS50/p6WlwWQyIT4+Hp999hnWrFkjMRn5s1WrVjl/Tk1NRVpaGqZPn46KigpkZWVJTOZ9BQUFOHXq1Lif2zaSX6tRfn6+8+fU1FRER0cjKysLLS0tmD59urdjel1SUhLq6+ths9nw+eefIy8vD5WVlbJjjRm/onKD8PBwBAQE3DazvKOjA1FRUZJS+Ta9Xo+ZM2eiublZdhSfdGPccEzdnYSEBISHh4+7cVVYWIgDBw6gvLwcMTExzuNRUVEYHBxEV1eXy/njcRz9Wo2GYzKZAGDcjCONRoMZM2YgIyMDxcXFSE9Px3vvvef344cNjhtoNBpkZGTg8OHDzmN2ux2HDx9GZmamxGS+q7e3Fy0tLYiOjpYdxScZjUZERUW5jKnu7m5UV1dzTI3gwoULuHTp0rgZV0IIFBYWorS0FGVlZTAajS6vZ2RkIDAw0GUcWSwWtLa2jptxNFqNhlNfXw8A42Yc3cput+Pq1av+P35kz3JWir179wqtVitKSkrE6dOnRX5+vtDr9aK9vV12NJ+wYcMGUVFRIaxWqzh69Kgwm80iPDxcdHZ2yo4mTU9Pjzhx4oQ4ceKEACDeffddceLECfHTTz8JIYTYtm2b0Ov1Yv/+/aKhoUHk5OQIo9Eorly5Ijm594xUo56eHrFx40ZRVVUlrFar+O6778T9998vEhMTxcDAgOzoXrF+/Xqh0+lERUWFaGtrcz76+/ud56xbt07ExcWJsrIyUVtbKzIzM0VmZqbE1N41Wo2am5vFW2+9JWpra4XVahX79+8XCQkJYsGCBZKTe8emTZtEZWWlsFqtoqGhQWzatEmoVCrx7bffCiH8e/ywwXGj999/X8TFxQmNRiPmz58vjh07JjuSz1i5cqWIjo4WGo1G3HPPPWLlypWiublZdiypysvLBYDbHnl5eUIIx63ir7/+uoiMjBRarVZkZWUJi8UiN7SXjVSj/v5+sXjxYhERESECAwNFfHy8WLt27bj6o2K42gAQH3/8sfOcK1euiOeff15MnjxZhISEiKeeekq0tbXJC+1lo9WotbVVLFiwQEyZMkVotVoxY8YM8fLLLwubzSY3uJc899xzIj4+Xmg0GhERESGysrKczY0Q/j1+VEII4b3rRURERESexzk4REREpDhscIiIiEhx2OAQERGR4rDBISIiIsVhg0NERESKwwaHiIiIFIcNDhERESkOGxwiIiJSHDY4REREpDhscIiIiEhx2OAQERGR4rDBISIiIsX5P+X0C5iTCFhvAAAAAElFTkSuQmCC", 148 | "text/plain": [ 149 | "
" 150 | ] 151 | }, 152 | "metadata": {}, 153 | "output_type": "display_data" 154 | } 155 | ], 156 | "source": [ 157 | "plt.plot(idwt[0, :], **{'color': 'r', 'marker': '.'})\n", 158 | "plt.plot(signal, **{'color': 'g', 'marker': '.'})\n" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": 8, 164 | "metadata": {}, 165 | "outputs": [ 166 | { 167 | "name": "stdout", 168 | "output_type": "stream", 169 | "text": [ 170 | "False\n", 171 | "True\n", 172 | "1.1920929e-07\n" 173 | ] 174 | } 175 | ], 176 | "source": [ 177 | "reconstructed = idwt[0,: ,:].numpy().flatten()\n", 178 | "print(np.array_equal(reconstructed, signal))\n", 179 | "print(np.allclose(reconstructed, signal, atol=1e-05))\n", 180 | "print(abs(reconstructed - signal).max())\n" 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": 9, 186 | "metadata": {}, 187 | "outputs": [ 188 | { 189 | "name": "stdout", 190 | "output_type": "stream", 191 | "text": [ 192 | "iter 1 ... with Signal shape signal.shape=(1, 32, 1)\n", 193 | "dwt.shape=TensorShape([1, 17, 2])\n", 194 | "idwt.shape=TensorShape([1, 32, 1])\n", 195 | "\n", 196 | "iter 2 ... with Signal shape signal.shape=(1, 32, 2)\n", 197 | "dwt.shape=TensorShape([1, 17, 4])\n", 198 | "idwt.shape=TensorShape([1, 32, 2])\n", 199 | "\n", 200 | "iter 3 ... with Signal shape signal.shape=(1, 32, 3)\n", 201 | "dwt.shape=TensorShape([1, 17, 6])\n", 202 | "idwt.shape=TensorShape([1, 32, 3])\n", 203 | "\n", 204 | "iter 4 ... with Signal shape signal.shape=(1, 32, 4)\n", 205 | "dwt.shape=TensorShape([1, 17, 8])\n", 206 | "idwt.shape=TensorShape([1, 32, 4])\n", 207 | "\n", 208 | "iter 5 ... with Signal shape signal.shape=(1, 32, 5)\n", 209 | "dwt.shape=TensorShape([1, 17, 10])\n", 210 | "idwt.shape=TensorShape([1, 32, 5])\n", 211 | "\n", 212 | "iter 6 ... with Signal shape signal.shape=(1, 32, 6)\n", 213 | "dwt.shape=TensorShape([1, 17, 12])\n", 214 | "idwt.shape=TensorShape([1, 32, 6])\n", 215 | "\n", 216 | "iter 7 ... with Signal shape signal.shape=(1, 32, 7)\n", 217 | "dwt.shape=TensorShape([1, 17, 14])\n", 218 | "idwt.shape=TensorShape([1, 32, 7])\n", 219 | "\n", 220 | "iter 8 ... with Signal shape signal.shape=(1, 32, 8)\n", 221 | "dwt.shape=TensorShape([1, 17, 16])\n", 222 | "idwt.shape=TensorShape([1, 32, 8])\n", 223 | "\n", 224 | "iter 9 ... with Signal shape signal.shape=(1, 32, 9)\n", 225 | "dwt.shape=TensorShape([1, 17, 18])\n", 226 | "idwt.shape=TensorShape([1, 32, 9])\n", 227 | "\n" 228 | ] 229 | } 230 | ], 231 | "source": [ 232 | "def create_sig(channel_count: int):\n", 233 | " num_samples = 32\n", 234 | " x = np.linspace(0, 2 * np.pi, num_samples)\n", 235 | " signal = np.sin(5 * x).astype(np.float32) # 5 Hz sine wave\n", 236 | " return np.repeat(signal[None, :, None], repeats=channel_count, axis=-1)\n", 237 | "\n", 238 | "for i in range(1, 10):\n", 239 | " signal = create_sig(i)\n", 240 | " print(f\"iter {i} ... with Signal shape {signal.shape=}\")\n", 241 | " dwt = DWT.DWT1D(wavelet_name=\"db2\")(tf.dtypes.cast(create_sig(i), tf.float32))\n", 242 | " print(f\"{dwt.shape=}\")\n", 243 | " idwt = DWT.IDWT1D(wavelet_name=\"db2\")(tf.dtypes.cast(dwt, tf.float32))\n", 244 | " print(f\"{idwt.shape=}\\n\")" 245 | ] 246 | } 247 | ], 248 | "metadata": { 249 | "kernelspec": { 250 | "display_name": ".venv", 251 | "language": "python", 252 | "name": "python3" 253 | }, 254 | "language_info": { 255 | "codemirror_mode": { 256 | "name": "ipython", 257 | "version": 3 258 | }, 259 | "file_extension": ".py", 260 | "mimetype": "text/x-python", 261 | "name": "python", 262 | "nbconvert_exporter": "python", 263 | "pygments_lexer": "ipython3", 264 | "version": "3.8.20" 265 | }, 266 | "orig_nbformat": 4 267 | }, 268 | "nbformat": 4, 269 | "nbformat_minor": 2 270 | } 271 | -------------------------------------------------------------------------------- /getting_started/tutorial.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from tensorflow import keras\n", 10 | "import tensorflow_wavelets.Layers.DWT as DWT\n", 11 | "import tensorflow_wavelets.Layers.DTCWT as DTCWT\n", 12 | "import tensorflow_wavelets.Layers.DMWT as DMWT\n", 13 | "import tensorflow_wavelets.Layers.Threshold as Threshold\n" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 2, 19 | "metadata": {}, 20 | "outputs": [ 21 | { 22 | "name": "stdout", 23 | "output_type": "stream", 24 | "text": [ 25 | "Model: \"sequential\"\n", 26 | "_________________________________________________________________\n", 27 | "Layer (type) Output Shape Param # \n", 28 | "=================================================================\n", 29 | "haar (DWT) (None, 14, 14, 4) 0 \n", 30 | "_________________________________________________________________\n", 31 | "flatten (Flatten) (None, 784) 0 \n", 32 | "_________________________________________________________________\n", 33 | "dense (Dense) (None, 10) 7850 \n", 34 | "=================================================================\n", 35 | "Total params: 7,850\n", 36 | "Trainable params: 7,850\n", 37 | "Non-trainable params: 0\n", 38 | "_________________________________________________________________\n" 39 | ] 40 | } 41 | ], 42 | "source": [ 43 | "\n", 44 | "nb_classes = 10\n", 45 | "model = keras.Sequential()\n", 46 | "model.add(keras.Input(shape=(28, 28, 1)))\n", 47 | "model.add(DWT.DWT(name=\"haar\",concat=0))\n", 48 | "model.add(keras.layers.Flatten())\n", 49 | "model.add(keras.layers.Dense(nb_classes, activation=\"softmax\"))\n", 50 | "model.summary()" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 3, 56 | "metadata": {}, 57 | "outputs": [ 58 | { 59 | "name": "stdout", 60 | "output_type": "stream", 61 | "text": [ 62 | "Model: \"sequential_1\"\n", 63 | "_________________________________________________________________\n", 64 | "Layer (type) Output Shape Param # \n", 65 | "=================================================================\n", 66 | "db4 (DWT) (None, 28, 28, 1) 0 \n", 67 | "=================================================================\n", 68 | "Total params: 0\n", 69 | "Trainable params: 0\n", 70 | "Non-trainable params: 0\n", 71 | "_________________________________________________________________\n" 72 | ] 73 | } 74 | ], 75 | "source": [ 76 | "model = keras.Sequential()\n", 77 | "model.add(keras.layers.InputLayer(input_shape=(28, 28, 1)))\n", 78 | "model.add(DWT.DWT(name=\"db4\", concat=1))\n", 79 | "model.summary()" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 4, 85 | "metadata": {}, 86 | "outputs": [ 87 | { 88 | "name": "stdout", 89 | "output_type": "stream", 90 | "text": [ 91 | "Model: \"MyModel\"\n", 92 | "_________________________________________________________________\n", 93 | "Layer (type) Output Shape Param # \n", 94 | "=================================================================\n", 95 | "input_3 (InputLayer) [(None, 512, 512, 1)] 0 \n", 96 | "_________________________________________________________________\n", 97 | "dmwt (DMWT) (None, 1024, 1024, 1) 0 \n", 98 | "_________________________________________________________________\n", 99 | "threshold (Threshold) (None, 1024, 1024, 1) 0 \n", 100 | "_________________________________________________________________\n", 101 | "idmwt (IDMWT) (None, 512, 512, 1) 0 \n", 102 | "=================================================================\n", 103 | "Total params: 0\n", 104 | "Trainable params: 0\n", 105 | "Non-trainable params: 0\n", 106 | "_________________________________________________________________\n" 107 | ] 108 | } 109 | ], 110 | "source": [ 111 | "\n", 112 | "x_inp = keras.layers.Input(shape=(512, 512, 1))\n", 113 | "x = DMWT.DMWT(\"ghm\")(x_inp)\n", 114 | "x = Threshold.Threshold(algo='sure', mode='hard')(x) # use \"soft\" or \"hard\"\n", 115 | "x = DMWT.IDMWT(\"ghm\")(x)\n", 116 | "model = keras.models.Model(x_inp, x, name=\"MyModel\")\n", 117 | "model.summary()" 118 | ] 119 | } 120 | ], 121 | "metadata": { 122 | "kernelspec": { 123 | "display_name": "base", 124 | "language": "python", 125 | "name": "python3" 126 | }, 127 | "language_info": { 128 | "codemirror_mode": { 129 | "name": "ipython", 130 | "version": 3 131 | }, 132 | "file_extension": ".py", 133 | "mimetype": "text/x-python", 134 | "name": "python", 135 | "nbconvert_exporter": "python", 136 | "pygments_lexer": "ipython3", 137 | "version": "3.8.16" 138 | }, 139 | "orig_nbformat": 4, 140 | "vscode": { 141 | "interpreter": { 142 | "hash": "bf71414324ede15b82b265bb1e616b7922ff32121ec1c144d1e6aa17f905da62" 143 | } 144 | } 145 | }, 146 | "nbformat": 4, 147 | "nbformat_minor": 2 148 | } 149 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | 3 | log_cli=1 4 | 5 | log_cli_level = DEBUG 6 | log_file_level = DEBUG 7 | 8 | log_file = logs/pytest-logs.log 9 | 10 | log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) 11 | log_cli_date_format = %Y-%m-%d %H:%M:%S 12 | log_file_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) 13 | log_file_date_format=%Y-%m-%d %H:%M:%S 14 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | scipy==1.5.2 2 | matplotlib~=3.4.3 3 | tensorflow==2.6.1 4 | keras==2.6 5 | psnr-hvsm==0.1.0 6 | opencv-python 7 | scikit-image 8 | pywavelets 9 | tensorflow_probability==0.14.1 10 | numpy==1.19.2 11 | ipykernel 12 | protobuf==3.20.3 13 | pandas==1.4 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r", encoding="utf-8") as fh: 4 | long_description = fh.read() 5 | 6 | setuptools.setup( 7 | name="tensorflow-wavelets", 8 | version="1.1.2", 9 | author="Timor Leiderman", 10 | author_email="Timorleiderman@gmail.com", 11 | description="Tensorflow wavelet Layers", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://github.com/Timorleiderman/tensorflow-wavelets", 15 | project_urls={ 16 | "Bug Tracker": "https://github.com/Timorleiderman/tensorflow-wavelets/issues", 17 | }, 18 | classifiers=[ 19 | "Programming Language :: Python :: 3", 20 | "License :: OSI Approved :: MIT License", 21 | "Operating System :: OS Independent", 22 | ], 23 | keywords = ['Wavelets', 'Tensorflow'], 24 | install_requires=[ 25 | 'tensorflow', 26 | 'tensorflow-probability', 27 | 'PyWavelets', 28 | ], 29 | package_dir={"": "src"}, 30 | packages=setuptools.find_packages(where="src"), 31 | python_requires=">=3.6", 32 | ) 33 | -------------------------------------------------------------------------------- /src/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8 2 | 3 | RUN apt-get update 4 | RUN apt-get install ffmpeg libsm6 libxext6 -y 5 | 6 | RUN pip3 install opencv-python 7 | RUN pip3 install tensorflow 8 | RUN pip3 install --upgrade tensorflow-probability 9 | RUN pip3 install PyWavelets 10 | RUN pip3 install psnr-hvsm 11 | 12 | WORKDIR /workspaces 13 | RUN git clone https://github.com/RenYang-home/OpenDVC.git 14 | 15 | ENV PYTHONPATH /workspace/tensorflow_wavelets/src -------------------------------------------------------------------------------- /src/input/LennaGrey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Timorleiderman/tensorflow-wavelets/58cf7dc8e469e963a6c7dd0567e05925a7d3c65c/src/input/LennaGrey.png -------------------------------------------------------------------------------- /src/input/Lenna_orig.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Timorleiderman/tensorflow-wavelets/58cf7dc8e469e963a6c7dd0567e05925a7d3c65c/src/input/Lenna_orig.png -------------------------------------------------------------------------------- /src/tensorflow_wavelets/Layers/DMWT.py: -------------------------------------------------------------------------------- 1 | # Timor Leiderman AUG 2021 2 | 3 | from tensorflow.keras import layers 4 | from tensorflow_wavelets.utils import filters 5 | from tensorflow_wavelets.utils.helpers import * 6 | from tensorflow_wavelets.utils.cast import * 7 | 8 | # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # for tensor flow warning 9 | # os.environ['CUDA_VISIBLE_DEVICES'] = '-1' 10 | 11 | 12 | # Discrete MultiWavelet transform Layer 13 | class DMWT(layers.Layer): 14 | """ 15 | Discrete Multi Wavlelets Transform 16 | Input: wave_name - name of the Wavele Filters (ghm, dd2) 17 | TODO: add support for more wavelets 18 | """ 19 | def __init__(self, wavelet_name='ghm', **kwargs): 20 | super(DMWT, self).__init__(**kwargs) 21 | self.wave_name = wavelet_name.lower() 22 | self.w_mat = None 23 | 24 | def build(self, input_shape): 25 | # create filter matrix 26 | h = int(input_shape[1]) 27 | w = int(input_shape[2]) 28 | if self.wave_name == 'dd2': 29 | w_mat = filters.dd2(h, w) 30 | else: 31 | w_mat = filters.ghm_w_mat(h, w) 32 | w_mat = tf.constant(w_mat, dtype=tf.float32) 33 | w_mat = tf.expand_dims(w_mat, axis=0) 34 | self.w_mat = tf.expand_dims(w_mat, axis=-1) 35 | # repeat if number of channels is bigger then 1 36 | if input_shape[-1] != 1: 37 | self.w_mat = tf.repeat(self.w_mat, input_shape[-1], axis=-1) 38 | 39 | def call(self, inputs, training=None, mask=None): 40 | if self.wave_name == 'dd2': 41 | res = analysis_filter_bank2d_dd2_mult(inputs, self.w_mat) 42 | else: 43 | res = analysis_filter_bank2d_ghm_mult(inputs, self.w_mat) 44 | return res 45 | 46 | 47 | # Inverse Discrete MultiWavelet transform Layer 48 | 49 | class IDMWT(layers.Layer): 50 | """ 51 | Inverse Multi Wavelet Transform 52 | wave_name - name of the Wavele Filters (ghm, dd2) 53 | """ 54 | def __init__(self, wave_name='ghm', **kwargs): 55 | super(IDMWT, self).__init__(**kwargs) 56 | self.wave_name = wave_name 57 | self.w_mat = None 58 | 59 | def build(self, input_shape): 60 | # create filter matrix 61 | h = int(input_shape[1])//2 62 | w = int(input_shape[2])//2 63 | if self.wave_name == 'dd2': 64 | w_mat = filters.dd2(2*h, 2*w) 65 | else: 66 | w_mat = filters.ghm_w_mat(h, w) 67 | w_mat = tf.constant(w_mat, dtype=tf.float32) 68 | # transpose for the reconstruction 69 | w_mat = tf.transpose(w_mat, perm=[1, 0]) 70 | w_mat = tf.expand_dims(w_mat, axis=-1) 71 | self.w_mat = tf.expand_dims(w_mat, axis=0) 72 | # repeat if channels bigger then 1 73 | if input_shape[-1] != 1: 74 | self.w_mat = tf.repeat(self.w_mat, input_shape[-1], axis=-1) 75 | 76 | def call(self, inputs, training=None, mask=None): 77 | if self.wave_name == 'dd2': 78 | res = synthesis_filter_bank2d_dd2_mult(inputs, self.w_mat) 79 | else: 80 | res = synthesis_filter_bank2d_ghm_mult(inputs, self.w_mat) 81 | 82 | return res 83 | 84 | 85 | if __name__ == "__main__": 86 | 87 | import cv2 88 | from tensorflow.keras import Model 89 | from tensorflow_wavelets.Layers import DWT 90 | from tensorflow_wavelets.Layers.Threshold import * 91 | from tensorflow_wavelets.utils.cast import * 92 | import numpy as np 93 | from tensorflow_wavelets.utils.mse import mse 94 | 95 | img = cv2.imread("../../../src/input/LennaGrey.png", 0) 96 | img_ex1 = np.expand_dims(img, axis=0) 97 | img_ex1 = np.expand_dims(img_ex1, axis=-1) 98 | 99 | # _, h, w, c = img_ex1.shape 100 | h, w, c = 512, 512, 1 101 | x_inp = layers.Input(shape=(h, w, c)) 102 | x = DMWT("ghm")(x_inp) 103 | x = Threshold(algo='1', mode="hard")(x) 104 | x = IDMWT("ghm")(x) 105 | model = Model(x_inp, x, name="MyModel") 106 | model.summary() 107 | model.run_eagerly = True 108 | 109 | out = model.predict(img_ex1) 110 | print(mse(img, out[0, ..., 0])) 111 | cv2.imshow("orig", out[0, ..., 0].astype("uint8")) 112 | cv2.waitKey(0) 113 | 114 | # 115 | # out_l = tf_rgb_to_ndarray(out*2) 116 | # out1 = cast_like_matlab_uint8_2d_rgb(out_l) 117 | # cv2.imshow("orig", out1.astype('uint8')) 118 | # cv2.waitKey(0) 119 | 120 | # x_inp = layers.Input(shape=(28, 28, 1)) 121 | # x = DMWT()(x_inp) 122 | # # x = IDMWT()(x) 123 | # x = layers.Flatten()(x) 124 | # x = layers.Dense(10, activation="softmax")(x) 125 | # 126 | # model = Model(x_inp, x, name="mymodel") 127 | # model.summary() 128 | # optimizer = SGD(lr=1e-4, momentum=0.9) 129 | # model.compile(loss="categorical_crossentropy", 130 | # optimizer=optimizer, metrics=["accuracy"]) 131 | # (x_train, y_train), (x_test, y_test) = mnist.load_data() 132 | # 133 | # y_train = to_categorical(y_train) 134 | # y_test = to_categorical(y_test) 135 | # x_train = x_train.astype('float32') / 255.0 136 | # x_train = np.expand_dims(x_train, axis=-1) 137 | # 138 | # x_test = x_test.astype('float32') / 255.0 139 | # x_test = np.expand_dims(x_test, axis=-1) 140 | # history = model.fit(x_train, y_train, 141 | # validation_split=0.2, 142 | # epochs=40, 143 | # batch_size=32, 144 | # verbose=2, 145 | # ) 146 | -------------------------------------------------------------------------------- /src/tensorflow_wavelets/Layers/DTCWT.py: -------------------------------------------------------------------------------- 1 | from tensorflow.keras import layers 2 | from tensorflow_wavelets.utils import filters 3 | from tensorflow_wavelets.utils.helpers import * 4 | from tensorflow_wavelets.utils.cast import * 5 | 6 | # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # for tensor flow warning 7 | # os.environ['CUDA_VISIBLE_DEVICES'] = '-1' 8 | 9 | 10 | class DTCWT(layers.Layer): 11 | """ 12 | Durel Tree Complex Wavelet Transform 13 | Input: level - tree-level (int) 14 | """ 15 | def __init__(self, level=1, concat=True, **kwargs): 16 | super(DTCWT, self).__init__(**kwargs) 17 | 18 | if level <= 1: 19 | level = 1 20 | 21 | self.level = int(level) 22 | self.conv_type = "SAME" 23 | self.border_padd = "SYMMETRIC" 24 | 25 | # Faf - First analysis filter - for the first level 26 | # Fsf - First synthesis filter 27 | faf, fsf = filters.fs_farras() 28 | af, sf = filters.duelfilt() 29 | 30 | # convert to tensor 31 | self.Faf = duel_filter_tf(faf) 32 | self.Fsf = duel_filter_tf(fsf) 33 | self.af = duel_filter_tf(af) 34 | self.sf = duel_filter_tf(sf) 35 | 36 | self.concat = concat 37 | def build(self, input_shape): 38 | # repeat last channel if input channel bigger then 1 39 | if input_shape[-1] > 1: 40 | self.Faf = tf.repeat(self.Faf, input_shape[-1], axis=-1) 41 | self.Fsf = tf.repeat(self.Fsf, input_shape[-1], axis=-1) 42 | self.af = tf.repeat(self.af, input_shape[-1], axis=-1) 43 | self.sf = tf.repeat(self.sf, input_shape[-1], axis=-1) 44 | 45 | def call(self, inputs, training=None, mask=None): 46 | 47 | # normalize 48 | x_norm = tf.math.divide(inputs, 2) 49 | 50 | # 2 trees J+1 lists 51 | w = [[[[], []] for _ in range(2)] for __ in range(self.level+1)] 52 | 53 | # 2 trees - 2 filters ( first stage is using differnet filter 54 | for m in range(2): 55 | for n in range(2): 56 | [lo, w[0][m][n]] = analysis_filter_bank2d(x_norm, self.Faf[m][0], self.Faf[m][1], 57 | self.Faf[n][0], self.Faf[n][1]) 58 | for j in range(1, self.level): 59 | [lo, w[j][m][n]] = analysis_filter_bank2d(lo, self.af[m][0], self.af[m][1], 60 | self.af[n][0], self.af[n][1]) 61 | w[self.level][m][n] = lo 62 | 63 | # add and subtract for the complex 64 | for j in range(self.level): 65 | for m in range(3): 66 | 67 | w[j][0][0][m], w[j][1][1][m] = add_sub(w[j][0][0][m], w[j][1][1][m]) 68 | w[j][0][1][m], w[j][1][0][m] = add_sub(w[j][0][1][m], w[j][1][0][m]) 69 | 70 | # concat into one big image 71 | # different resolution as the tree is deeper 72 | # TODO: How to split different resolutions into different channels 73 | if not self.concat: 74 | return w 75 | j = 1 76 | w_c = w 77 | 78 | for j in [x for x in range(1, self.level)][::-1]: 79 | 80 | w_c[j][0][0] = tf.concat([tf.concat([w_c[j+1][0][0], w_c[j][0][0][0]], axis=2), 81 | tf.concat([w_c[j][0][0][1], w_c[j][0][0][2]], axis=2)], axis=1) 82 | w_c[j][0][1] = tf.concat([tf.concat([w_c[j+1][0][1], w_c[j][0][1][0]], axis=2), 83 | tf.concat([w_c[j][0][1][1], w_c[j][0][1][2]], axis=2)], axis=1) 84 | w_c[j][1][0] = tf.concat([tf.concat([w_c[j+1][1][0], w_c[j][1][0][0]], axis=2), 85 | tf.concat([w_c[j][1][0][1], w_c[j][1][0][2]], axis=2)], axis=1) 86 | w_c[j][1][1] = tf.concat([tf.concat([w_c[j+1][1][1], w_c[j][1][1][0]], axis=2), 87 | tf.concat([w_c[j][1][1][1], w_c[j][1][1][2]], axis=2)], axis=1) 88 | 89 | w_0 = tf.concat([tf.concat([w_c[j][0][0], w_c[0][0][0][0]], axis=2), 90 | tf.concat([w_c[0][0][0][1], w_c[0][0][0][2]], axis=2)], axis=1) 91 | w_1 = tf.concat([tf.concat([w_c[j][0][1], w_c[0][0][1][0]], axis=2), 92 | tf.concat([w_c[0][0][1][1], w_c[0][0][1][2]], axis=2)], axis=1) 93 | w_2 = tf.concat([tf.concat([w_c[j][1][0], w_c[0][1][0][0]], axis=2), 94 | tf.concat([w_c[0][1][0][1], w_c[0][1][0][2]], axis=2)], axis=1) 95 | w_3 = tf.concat([tf.concat([w_c[j][1][1], w_c[0][1][1][0]], axis=2), 96 | tf.concat([w_c[0][1][1][1], w_c[0][1][1][2]], axis=2)], axis=1) 97 | 98 | w_1234 = tf.concat([tf.concat([w_0, w_1], axis=2), tf.concat([w_2, w_3], axis=2)], axis=1) 99 | return w_1234 100 | 101 | 102 | class IDTCWT(layers.Layer): 103 | """ 104 | Inverse Duel Tree Complex Wavelet Transform 105 | Input: level - tree-level (int) 106 | """ 107 | def __init__(self, level=1, caoncatenated=True, **kwargs): 108 | super(IDTCWT, self).__init__(**kwargs) 109 | 110 | if level <= 1: 111 | level = 1 112 | 113 | self.level = int(level) 114 | self.conv_type = "SAME" 115 | self.border_padd = "SYMMETRIC" 116 | 117 | # Faf - First analysis filter - for the first level 118 | # Fsf - First synthesis filter 119 | faf, fsf = filters.fs_farras() 120 | af, sf = filters.duelfilt() 121 | 122 | self.Faf = duel_filter_tf(faf) 123 | self.Fsf = duel_filter_tf(fsf) 124 | self.af = duel_filter_tf(af) 125 | self.sf = duel_filter_tf(sf) 126 | 127 | self.caoncatenated = caoncatenated 128 | def build(self, input_shape): 129 | # repeat last channel if input channel bigger then 1 130 | if input_shape[-1] > 1: 131 | self.Faf = tf.repeat(self.Faf, input_shape[-1], axis=-1) 132 | self.Fsf = tf.repeat(self.Fsf, input_shape[-1], axis=-1) 133 | self.af = tf.repeat(self.af, input_shape[-1], axis=-1) 134 | self.sf = tf.repeat(self.sf, input_shape[-1], axis=-1) 135 | 136 | def call(self, inputs, training=None, mask=None): 137 | 138 | # convert one big image into list of tree levels 139 | 140 | if self.caoncatenated: 141 | w_rec = reconstruct_w_leveln(inputs, self.level) 142 | else: 143 | w_rec = inputs 144 | 145 | height = int(w_rec[0][0][0][0].shape[1]*2) 146 | width = int(w_rec[0][0][0][0].shape[2]*2) 147 | 148 | # init image to be reconstructed 149 | y = tf.zeros((height, width, inputs.shape[-1]), dtype=tf.float32) 150 | 151 | w_i = [[[[list() for _ in range(3)], [list() for _ in range(3)]] 152 | for __ in range(2)] for ___ in range(self.level+1)] 153 | 154 | # first add and subtract (inverse the transform) 155 | for j in range(self.level): 156 | for m in range(3): 157 | 158 | w_i[j][0][0][m], w_i[j][1][1][m] = add_sub(w_rec[j][0][0][m], w_rec[j][1][1][m]) 159 | w_i[j][0][1][m], w_i[j][1][0][m] = add_sub(w_rec[j][0][1][m], w_rec[j][1][0][m]) 160 | 161 | # synthesis with the First filters to be last in the reconstruction 162 | for m in range(2): 163 | for n in range(2): 164 | lo = w_rec[self.level][m][n] 165 | for j in [x for x in range(1, self.level)][::-1]: 166 | lo = synthesis_filter_bank2d(lo, w_i[j][m][n], self.sf[m][0], 167 | self.sf[m][1], self.sf[n][0], self.sf[n][1]) 168 | lo = synthesis_filter_bank2d(lo, w_i[0][m][n], self.Fsf[m][0], 169 | self.Fsf[m][1], self.Fsf[n][0], self.Fsf[n][1]) 170 | y = tf.math.add(y, lo) 171 | 172 | # revert the normalization 173 | y = tf.math.divide(y, 2) 174 | return y 175 | 176 | 177 | if __name__ == "__main__": 178 | pass 179 | # from tensorflow.keras.datasets import mnist, cifar10 180 | # from tensorflow.keras.optimizers import Adam, SGD 181 | # from tensorflow.keras.utils import to_categorical 182 | 183 | # img = cv2.imread("../input/Lenna_orig.png", 0) 184 | # img_ex1 = np.expand_dims(img, axis=0) 185 | # # 186 | # if len(img_ex1.shape) <= 3: 187 | # img_ex1 = np.expand_dims(img_ex1, axis=-1) 188 | # 189 | # _, h, w, c = img_ex1.shape 190 | # # 191 | # 192 | # cplx_input = layers.Input(shape=(h, w, c)) 193 | # x = DTCWT(2)(cplx_input) 194 | # # x = IDTCWT(2)(x) 195 | # model = Model(cplx_input, x, name="mymodel") 196 | # model.summary() 197 | # 198 | # out = model.predict(img_ex1) 199 | # # diff = np.max(out[0] - img) 200 | # # print("diff is", diff) 201 | # cv2.imshow("orig", out[0].astype('uint8')) 202 | # # cv2.imshow("reconstructed", img.astype('uint8')) 203 | # cv2.waitKey(0) 204 | # x = layers.Conv2D(32, (3, 3), activation="relu",padding='same')(x) 205 | # x = layers.Dropout(0.5)(x) 206 | # x = layers.Flatten()(x) 207 | # x = layers.Dense(10, activation="softmax")(x) 208 | # model = Model(cplx_input, x, name="mymodel") 209 | # model.summary() 210 | # 211 | # optimizer = SGD(lr=1e-4, momentum=0.9) 212 | # model.compile(loss="categorical_crossentropy", 213 | # optimizer=optimizer, metrics=["accuracy"]) 214 | # (x_train, y_train), (x_test, y_test) = cifar10.load_data() 215 | # 216 | # y_train = to_categorical(y_train) 217 | # y_test = to_categorical(y_test) 218 | # x_train = x_train.astype('float32') / 255.0 219 | # #x_train = np.expand_dims(x_train, axis=-1) 220 | # 221 | # x_test = x_test.astype('float32') / 255.0 222 | # #x_test = np.expand_dims(x_test, axis=-1) 223 | # print(x_test.shape) 224 | # history = model.fit(x_train, y_train, 225 | # validation_split=0.2, 226 | # epochs=30, 227 | # batch_size=32, 228 | # verbose=2, 229 | # ) 230 | 231 | -------------------------------------------------------------------------------- /src/tensorflow_wavelets/Layers/DWT.py: -------------------------------------------------------------------------------- 1 | import pywt 2 | import tensorflow as tf 3 | from tensorflow.keras import layers 4 | from tensorflow_wavelets.utils.helpers import * 5 | 6 | # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # for tensor flow warning 7 | # os.environ['CUDA_VISIBLE_DEVICES'] = '-1' 8 | 9 | 10 | class DWT(layers.Layer): 11 | """ 12 | Discrete Wavelet transform - tensorflow - keras 13 | inputs: 14 | name - wavelet name ( from pywavelet library) 15 | concat - 1 - merge transform output to one channel 16 | - 0 - split to 4 channels ( 1 img in -> 4 smaller img out) 17 | """ 18 | 19 | def __init__(self, wavelet_name='haar', concat=1, **kwargs): 20 | super(DWT, self).__init__(**kwargs) 21 | # self._name = self.name + "_" + name 22 | # get filter coeffs from 3rd party lib 23 | wavelet = pywt.Wavelet(wavelet_name) 24 | self.wavelet_name = wavelet_name 25 | self.dec_len = wavelet.dec_len 26 | self.concat = concat 27 | # decomposition filter low pass and hight pass coeffs 28 | dec_lpf = wavelet.dec_lo 29 | dec_hpf = wavelet.dec_hi 30 | 31 | # covert filters into tensors and reshape for convolution math 32 | dec_lpf = tf.constant(dec_lpf[::-1]) 33 | self.dec_lpf = tf.reshape(dec_lpf, (1, wavelet.dec_len, 1, 1)) 34 | 35 | dec_hpf = tf.constant(dec_hpf[::-1]) 36 | self.dec_hpf = tf.reshape(dec_hpf, (1, wavelet.dec_len, 1, 1)) 37 | 38 | self.conv_type = "VALID" 39 | self.border_padd = "SYMMETRIC" 40 | 41 | def build(self, input_shape): 42 | # filter dims should be bigger if input is not gray scale 43 | if input_shape[-1] != 1: 44 | # self.db2_lpf = tf.repeat(self.db2_lpf, input_shape[-1], axis=-1) 45 | self.dec_lpf = tf.keras.backend.repeat_elements(self.dec_lpf, input_shape[-1], axis=-1) 46 | # self.db2_hpf = tf.repeat(self.db2_hpf, input_shape[-1], axis=-1) 47 | self.dec_hpf = tf.keras.backend.repeat_elements(self.dec_hpf, input_shape[-1], axis=-1) 48 | 49 | def call(self, inputs, training=None, mask=None): 50 | 51 | # border padding symatric add coulums 52 | inputs_pad = tf.pad(inputs, [[0, 0], [0, 0], [self.dec_len-1, self.dec_len-1], [0, 0]], self.border_padd) 53 | 54 | # approximation conv only rows 55 | a = tf.nn.conv2d( 56 | inputs_pad, self.dec_lpf, padding=self.conv_type, strides=[1, 1, 1, 1], 57 | ) 58 | # details conv only rows 59 | d = tf.nn.conv2d( 60 | inputs_pad, self.dec_hpf, padding=self.conv_type, strides=[1, 1, 1, 1], 61 | ) 62 | # ds - down sample 63 | a_ds = a[:, :, 1:a.shape[2]:2, :] 64 | d_ds = d[:, :, 1:d.shape[2]:2, :] 65 | 66 | # border padding symatric add rows 67 | a_ds_pad = tf.pad(a_ds, [[0, 0], [self.dec_len-1, self.dec_len-1], [0, 0], [0, 0]], self.border_padd) 68 | d_ds_pad = tf.pad(d_ds, [[0, 0], [self.dec_len-1, self.dec_len-1], [0, 0], [0, 0]], self.border_padd) 69 | 70 | # convolution is done on the rows so we need to 71 | # transpose the matrix in order to convolve the colums 72 | a_ds_pad = tf.transpose(a_ds_pad, perm=[0, 2, 1, 3]) 73 | d_ds_pad = tf.transpose(d_ds_pad, perm=[0, 2, 1, 3]) 74 | 75 | # aa approximation approximation 76 | aa = tf.nn.conv2d( 77 | a_ds_pad, self.dec_lpf, padding=self.conv_type, strides=[1, 1, 1, 1], 78 | ) 79 | # ad approximation details 80 | ad = tf.nn.conv2d( 81 | a_ds_pad, self.dec_hpf, padding=self.conv_type, strides=[1, 1, 1, 1], 82 | ) 83 | # ad details aproximation 84 | da = tf.nn.conv2d( 85 | d_ds_pad, self.dec_lpf, padding=self.conv_type, strides=[1, 1, 1, 1], 86 | ) 87 | # dd details details 88 | dd = tf.nn.conv2d( 89 | d_ds_pad, self.dec_hpf, padding=self.conv_type, strides=[1, 1, 1, 1], 90 | ) 91 | 92 | # transpose back the matrix 93 | aa = tf.transpose(aa, perm=[0, 2, 1, 3]) 94 | ad = tf.transpose(ad, perm=[0, 2, 1, 3]) 95 | da = tf.transpose(da, perm=[0, 2, 1, 3]) 96 | dd = tf.transpose(dd, perm=[0, 2, 1, 3]) 97 | 98 | # down sample 99 | ll = aa[:, 1:aa.shape[1]:2, :, :] 100 | lh = ad[:, 1:ad.shape[1]:2, :, :] 101 | hl = da[:, 1:da.shape[1]:2, :, :] 102 | hh = dd[:, 1:dd.shape[1]:2, :, :] 103 | 104 | # concate all outputs ionto tensor 105 | if self.concat == 0: 106 | x = tf.concat([ll, lh, hl, hh], axis=-1) 107 | else: 108 | x = tf.concat([tf.concat([ll, lh], axis=1), tf.concat([hl, hh], axis=1)], axis=2) 109 | return x 110 | 111 | 112 | class IDWT(layers.Layer): 113 | """ 114 | Inverse Discrete Wavelet Transform - Tensorflow - keras 115 | Inputs: 116 | name - wavelet name ( from pywavelet library) 117 | concat - 1 - not splitted One channel input([[ll , lh],[hl, hh]]) 118 | 0 - splitted 4 channels input([ll , lh, hl ,hh]) 119 | """ 120 | def __init__(self, wavelet_name='haar', concat=1, **kwargs): 121 | super(IDWT, self).__init__(**kwargs) 122 | # self._name = self.name + "_" + name 123 | self.pad_type = "VALID" 124 | self.border_pad = "SYMMETRIC" 125 | self.concat = concat 126 | # get filter coeffs from 3rd party lib 127 | wavelet = pywt.Wavelet(wavelet_name) 128 | self.wavelet_name = wavelet_name 129 | self.rec_len = wavelet.rec_len 130 | 131 | # decomposition filter low pass and hight pass coeffs 132 | rec_lpf = wavelet.rec_lo 133 | rec_hpf = wavelet.rec_hi 134 | 135 | # covert filters into tensors and reshape for convolution math 136 | rec_lpf = tf.constant(rec_lpf[::-1]) 137 | self.rec_lpf = tf.reshape(rec_lpf, (1, wavelet.rec_len, 1, 1)) 138 | 139 | rec_hpf = tf.constant(rec_hpf[::-1]) 140 | self.rec_hpf = tf.reshape(rec_hpf, (1, wavelet.rec_len, 1, 1)) 141 | 142 | def call(self, inputs, training=None, mask=None): 143 | 144 | if self.concat == 0: 145 | ll = tf.expand_dims(inputs[:,:,:,0], axis = -1) 146 | lh = tf.expand_dims(inputs[:,:,:,1], axis = -1) 147 | hl = tf.expand_dims(inputs[:,:,:,2], axis = -1) 148 | hh = tf.expand_dims(inputs[:,:,:,3], axis = -1) 149 | x = tf.concat([ll, hl, lh, hh], axis=-1) 150 | else: 151 | ll_lh_hl_hh = tf.split(inputs, 2, axis=1) 152 | ll_lh = tf.split(ll_lh_hl_hh[0], 2, axis=2) 153 | hl_hh = tf.split(ll_lh_hl_hh[1], 2, axis=2) 154 | ll_lh_conc = tf.concat(ll_lh, axis=-1) 155 | hl_hh_conc = tf.concat(hl_hh, axis=-1) 156 | x = tf.concat([ll_lh_conc, hl_hh_conc], axis=-1) 157 | 158 | # border padding for convolution with low pass and high pass filters 159 | x = tf.pad(x, 160 | [[0, 0], [self.rec_len-1, self.rec_len-1], [self.rec_len-1, self.rec_len-1], [0, 0]], 161 | self.border_pad) 162 | # convert to float32 163 | # x = tf.cast(x, tf.float32) 164 | # GPU works with float 32 165 | # CPU can work with 64 but need to add extra flag 166 | # convert to float64 167 | # x = tf.cast(x, tf.float64) 168 | 169 | # extract approximation and details from input tensor 170 | # TODO: whit if tensor shape is bigger then 4? 171 | # and expand the dims for the up sampling 172 | 173 | ll = tf.expand_dims(x[:, :, :, 0], axis=-1) 174 | lh = tf.expand_dims(x[:, :, :, 1], axis=-1) 175 | hl = tf.expand_dims(x[:, :, :, 2], axis=-1) 176 | hh = tf.expand_dims(x[:, :, :, 3], axis=-1) 177 | 178 | ll_us_pad = upsampler2d(ll) 179 | lh_us_pad = upsampler2d(lh) 180 | hl_us_pad = upsampler2d(hl) 181 | hh_us_pad = upsampler2d(hh) 182 | 183 | # convolution for the rows 184 | # transpose for the column convolution 185 | # convolution for the column 186 | # transpose back to normal 187 | 188 | ll_conv_lpf_lpf_tr = conv_tr_conv_tr(ll_us_pad, self.rec_lpf, self.rec_lpf, self.pad_type) 189 | lh_conv_lpf_hpf_tr = conv_tr_conv_tr(lh_us_pad, self.rec_hpf, self.rec_lpf, self.pad_type) 190 | hl_conv_hpf_lpf_tr = conv_tr_conv_tr(hl_us_pad, self.rec_lpf, self.rec_hpf, self.pad_type) 191 | hh_conv_hpf_hpf_tr = conv_tr_conv_tr(hh_us_pad, self.rec_hpf, self.rec_hpf, self.pad_type) 192 | 193 | # add all together 194 | reconstructed = tf.math.add_n([ll_conv_lpf_lpf_tr, lh_conv_lpf_hpf_tr, 195 | hl_conv_hpf_lpf_tr, hh_conv_hpf_hpf_tr]) 196 | # crop the paded part 197 | crop = (self.rec_len - 1)*2 198 | return reconstructed[:, crop-1:-crop, crop-1:-crop, :] 199 | 200 | 201 | class DWT1D(layers.Layer): 202 | """ 203 | 1D Discrete Wavelet Transform - TensorFlow - Keras 204 | """ 205 | 206 | def __init__(self, wavelet_name='haar', **kwargs): 207 | super(DWT1D, self).__init__(**kwargs) 208 | wavelet = pywt.Wavelet(wavelet_name) 209 | self.wavelet_name = wavelet_name 210 | self.dec_len = wavelet.dec_len 211 | 212 | # decomposition filters (reverse order) 213 | dec_lpf = wavelet.dec_lo[::-1] 214 | dec_hpf = wavelet.dec_hi[::-1] 215 | 216 | # Convert filters to tensors (batch, in_channels, out_channels) 217 | self.dec_lpf = tf.constant(dec_lpf, dtype=tf.float32)[:, tf.newaxis, tf.newaxis] 218 | self.dec_hpf = tf.constant(dec_hpf, dtype=tf.float32)[:, tf.newaxis, tf.newaxis] 219 | 220 | self.border_padd = "SYMMETRIC" 221 | self.border_type = "VALID" 222 | 223 | def call(self, inputs, training=None, mask=None): 224 | batch_size, length, channels = tf.unstack(tf.shape(inputs)) 225 | 226 | # Pad input symmetrically 227 | inputs_pad = tf.pad(inputs, [[0, 0], [self.dec_len-1, self.dec_len-1], [0, 0]], self.border_padd) 228 | 229 | # Expand filters to support multiple channels (dec_len, 1, channels) 230 | dec_lpf = tf.tile(self.dec_lpf, [1, 1, channels]) # Repeat for all channels 231 | dec_hpf = tf.tile(self.dec_hpf, [1, 1, channels]) # Repeat for all channels 232 | 233 | # Apply convolution independently per channel 234 | a = tf.nn.conv1d(inputs_pad, dec_lpf, stride=1, padding=self.border_type) 235 | d = tf.nn.conv1d(inputs_pad, dec_hpf, stride=1, padding=self.border_type) 236 | 237 | # Downsampling 238 | a_ds = a[:, 1:a.shape[1]:2, :] 239 | d_ds = d[:, 1:d.shape[1]:2, :] 240 | 241 | return tf.concat([a_ds, d_ds], axis=-1) # Concatenate along channel dimension 242 | 243 | 244 | class IDWT1D(layers.Layer): 245 | """ 246 | 1D Inverse Discrete Wavelet Transform - TensorFlow - Keras 247 | """ 248 | 249 | def __init__(self, wavelet_name='haar', **kwargs): 250 | super(IDWT1D, self).__init__(**kwargs) 251 | wavelet = pywt.Wavelet(wavelet_name) 252 | self.wavelet_name = wavelet_name 253 | self.rec_len = wavelet.rec_len # Reconstruction filter length 254 | 255 | # Reconstruction filters 256 | rec_lpf = wavelet.rec_lo[::-1] # Low-pass reconstruction filter 257 | rec_hpf = wavelet.rec_hi[::-1] # High-pass reconstruction filter 258 | 259 | # Convert filters to tensors 260 | self.rec_lpf = tf.constant(rec_lpf, dtype=tf.float32)[:, tf.newaxis, tf.newaxis] 261 | self.rec_hpf = tf.constant(rec_hpf, dtype=tf.float32)[:, tf.newaxis, tf.newaxis] 262 | 263 | self.border_padd = "REFLECT" 264 | self.border_type = "VALID" 265 | 266 | def call(self, inputs, training=None, mask=None): 267 | # Split approximation and detail coefficients 268 | a_ds, d_ds = tf.split(inputs, num_or_size_splits=2, axis=-1) 269 | 270 | batch_size, length, channels = tf.unstack(tf.shape(a_ds)) 271 | 272 | # Upsample (interleave with zeros) 273 | upsampled_length = length * 2 274 | a_upsampled = tf.reshape(tf.stack([a_ds, tf.zeros_like(a_ds)], axis=2), (batch_size, upsampled_length, channels)) 275 | d_upsampled = tf.reshape(tf.stack([d_ds, tf.zeros_like(d_ds)], axis=2), (batch_size, upsampled_length, channels)) 276 | 277 | # Compute padding size dynamically 278 | pad_size = self.rec_len - 1 279 | 280 | # Apply padding before convolution 281 | a_upsampled_pad = tf.pad(a_upsampled, [[0, 0], [pad_size, pad_size], [0, 0]], self.border_padd) 282 | d_upsampled_pad = tf.pad(d_upsampled, [[0, 0], [pad_size, pad_size], [0, 0]], self.border_padd) 283 | 284 | # Expand filters to match the number of channels 285 | rec_lpf = tf.tile(self.rec_lpf, [1, 1, channels]) 286 | rec_hpf = tf.tile(self.rec_hpf, [1, 1, channels]) 287 | 288 | # Convolve with reconstruction filters 289 | a_rec = tf.nn.conv1d(a_upsampled_pad, rec_lpf, stride=1, padding=self.border_type) 290 | d_rec = tf.nn.conv1d(d_upsampled_pad, rec_hpf, stride=1, padding=self.border_type) 291 | 292 | # Add the reconstructed signals 293 | reconstructed = a_rec[:, pad_size-1:-pad_size, :] + d_rec[:, pad_size-1:-pad_size, :] 294 | 295 | return reconstructed 296 | 297 | 298 | if __name__ == "__main__": 299 | pass 300 | import cv2 301 | from tensorflow import keras 302 | # (x_train, y_train), (x_test, y_test) = cifar10.load_data() 303 | # x_train = x_train.astype("float32") 304 | # x_test = x_test.astype("float32") 305 | # # x_train = cv2.imread("../input/LennaGrey.png", 0) 306 | # frog = tf.expand_dims( 307 | # x_train[0, :, :, :], 0, name=None 308 | # ) 309 | # print("frog shape", frog.shape) 310 | # model = keras.Sequential() 311 | # model.add(keras.Input(shape=(256, 256, 4))) 312 | # model.add(IDWT()) 313 | # model.summary() 314 | 315 | name = "db2" 316 | img = cv2.imread("../../../src/input/LennaGrey.png", 0) 317 | # img = cv2.imread("../input/Lenna_orig.png",0) 318 | img_ex1 = np.expand_dims(img, axis=-1) 319 | img_ex2 = np.expand_dims(img_ex1, axis=0) 320 | # # img_ex2 = np.expand_dims(img, axis=0) 321 | # 322 | model = keras.Sequential() 323 | model.add(layers.InputLayer(input_shape=(28, 28, 1))) 324 | model.add(DWT(name="db4", concat=1)) 325 | model.summary() 326 | # coeffs = model.predict(img_ex2) 327 | # _, w_coef, h_coef, c_coef = coeffs.shape 328 | 329 | # data = tf_to_ndarray(coeffs, channel=3) 330 | # data = cast_like_matlab_uint8_2d(data) 331 | # cv2.imshow("test", data) 332 | # cv2.waitKey(0) 333 | 334 | # concat = 1 335 | # LL = coeffs[0, 0:w_coef//2, 0:h_coef//2, 0] 336 | # LH = coeffs[0, 0:w_coef//2, h_coef//2:, 0] 337 | # HL = coeffs[0, w_coef//2:, 0:h_coef//2, 0] 338 | # HH = coeffs[0, w_coef//2:, h_coef//2:, 0] 339 | # print(coeffs.shape[1:]) 340 | # model = keras.Sequential() 341 | # model.add(layers.InputLayer(input_shape=coeffs.shape[1:])) 342 | # model.add(IDWT(name=name, splited=1)) 343 | # model.summary() 344 | 345 | # my_recon = model.predict(coeffs) 346 | # img_my_rec = my_recon[0, :, :, 0] 347 | # coeffs2 = pywt.wavedec2(img, name, level=1) 348 | 349 | # LL2 = coeffs2[0] 350 | # LH2 = coeffs2[1][0] 351 | # HL2 = coeffs2[1][1] 352 | # HH2 = coeffs2[1][2] 353 | 354 | # recon_pywt = pywt.waverec2(coeffs2, name) 355 | # img_pywt_rec = recon_pywt 356 | 357 | # print("LL mse ", mse.mse(LL, LL2)) 358 | # print("LH mse ", mse.mse(LH, LH2)) 359 | # print("HL mse ", mse.mse(HL, HL2)) 360 | # print("HH mse ", mse.mse(HH, HH2)) 361 | # print("img mse ", mse.mse(img_pywt_rec, img_my_rec)) 362 | 363 | # difference = cv2.absdiff(np.int32(img_my_rec), np.int32(img_pywt_rec)) 364 | # _, mask = cv2.threshold(difference.astype("uint8"), 0, 255, cv2.THRESH_BINARY) 365 | 366 | # cv2.imshow("diff", mask) 367 | # cv2.waitKey(0) 368 | # pass 369 | 370 | # 371 | # model = keras.Sequential() 372 | # model.add(layers.InputLayer(input_shape=coeffs.shape[1:])) 373 | # model.add(DWT(name=name, concat=0)) 374 | # model.add(IDWT(name=name, splited=1)) 375 | # model.summary() 376 | # 377 | # 378 | 379 | # a = model.predict(frog, steps=1) 380 | # # 381 | # approx = tf.image.convert_image_dtype(a[0, ..., 0], dtype=tf.float32) 382 | # with tf.Session() as sess: 383 | # img = sess.run(approx) 384 | # # pass 385 | # # 386 | # img = np.clip(img, 0, 255) 387 | # img = np.ceil(img) 388 | # img = img.astype("uint8") 389 | # with open(r"D:\TEMP\LL_python_layer.raw", "wb") as outfile: 390 | # outfile.write(img) # Write it 391 | 392 | # model = models.WaveletCifar10CNN.WaveletCNN((32,32,3), 10) 393 | # model.summary() 394 | -------------------------------------------------------------------------------- /src/tensorflow_wavelets/Layers/Threshold.py: -------------------------------------------------------------------------------- 1 | # Timor Leiderman 2021 Custom Activation Layer 2 | 3 | import tensorflow as tf 4 | import tensorflow_probability as tfp 5 | from tensorflow.keras import layers 6 | 7 | 8 | class Threshold(layers.Layer): 9 | """ 10 | Discrete Multi Wavlelets Transform 11 | Input: wave_name - name of the Wavele Filters (ghm, dd2) 12 | TODO: add support for more wavelets 13 | """ 14 | def __init__(self, algo='sure', mode='soft', **kwargs): 15 | super(Threshold, self).__init__(**kwargs) 16 | self.mode = mode.lower() 17 | self.algo = algo.lower() 18 | 19 | def build(self, input_shape): 20 | pass 21 | 22 | def call(self, inputs, training=None, mask=None): 23 | 24 | # DWT concat=1 25 | if inputs.shape[-1] == 1: 26 | ll_lh_hl_hh = tf.split(inputs, 2, axis=1) 27 | ll_hl = tf.split(ll_lh_hl_hh[0], 2, axis=2) 28 | lh_hh = tf.split(ll_lh_hl_hh[1], 2, axis=2) 29 | ll = ll_hl[0] 30 | lh = lh_hh[0] 31 | hl = ll_hl[1] 32 | hh = lh_hh[1] 33 | # DWT concat=0 34 | elif inputs.shape[-1] == 4: 35 | ll = inputs[:, :, :, 0] 36 | lh = inputs[:, :, :, 2] 37 | hl = inputs[:, :, :, 1] 38 | hh = inputs[:, :, :, 3] 39 | else: 40 | return None 41 | 42 | # calculate global threshold on the HH component 43 | med = tfp.stats.percentile(tf.abs(hh), 50) 44 | sigma = tf.math.divide(med, 0.67448975) 45 | 46 | if self.algo == 'sure': 47 | lambda_sqrt = tf.math.sqrt(2*tf.math.log(tf.constant([hh.shape[1]*hh.shape[2]], dtype=tf.float32))) 48 | threshold = tf.math.multiply(sigma, lambda_sqrt) 49 | else: 50 | # BayesShrink 51 | sigma_square = tf.math.square(sigma) 52 | var = tf.experimental.numpy.var(hh) 53 | var_square = tf.math.square(var) 54 | denominator = tf.math.sqrt(tf.maximum(tf.math.subtract(var_square, sigma_square), 0)) 55 | threshold = sigma_square / denominator 56 | 57 | if self.mode == 'hard': 58 | cond = tf.math.less(hh, threshold) 59 | mask = tf.where(cond, tf.zeros(tf.shape(hh)), tf.ones(tf.shape(hh))) 60 | hh_new = tf.multiply(hh, mask) 61 | else: 62 | hh_new = tfp.math.soft_threshold(hh, threshold) 63 | # concat everything back to one image 64 | if inputs.shape[-1] == 1: 65 | x = tf.concat([tf.concat([ll, lh], axis=1), tf.concat([hl, hh_new], axis=1)], axis=2) 66 | else: 67 | x = tf.concat([ll, lh, hl, hh_new], axis=-1) 68 | return x 69 | 70 | 71 | if __name__ == "__main__": 72 | pass 73 | 74 | import cv2 75 | from tensorflow.keras import Model 76 | from tensorflow_wavelets.Layers import DWT 77 | from tensorflow_wavelets.utils.cast import * 78 | import numpy as np 79 | from tensorflow_wavelets.utils.mse import mse 80 | 81 | img = cv2.imread("../../../Development/input/LennaGrey.png", 0) 82 | inputs = np.expand_dims(img, axis=0) 83 | inputs = np.expand_dims(inputs, axis=-1) 84 | # inputs = tf.cast(inputs, dtype=tf.float32) 85 | 86 | _, h, w, c = inputs.shape 87 | x_inp = layers.Input(shape=(h, w, c)) 88 | x = DWT.DWT(name="db2", concat=1)(x_inp) 89 | x = Threshold(algo='sure', mode='soft')(x) 90 | x = DWT.IDWT(name="db2", concat=1)(x) 91 | model = Model(x_inp, x, name="mymodel") 92 | model.run_eagerly = True 93 | model.summary() 94 | 95 | # 96 | out = model.predict(inputs) 97 | print(mse(img, out[0, ..., 0])) 98 | # cv2.imshow("orig", out[0, ..., 0].astype("uint8")) 99 | # cv2.waitKey(0) 100 | -------------------------------------------------------------------------------- /src/tensorflow_wavelets/Layers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Timorleiderman/tensorflow-wavelets/58cf7dc8e469e963a6c7dd0567e05925a7d3c65c/src/tensorflow_wavelets/Layers/__init__.py -------------------------------------------------------------------------------- /src/tensorflow_wavelets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Timorleiderman/tensorflow-wavelets/58cf7dc8e469e963a6c7dd0567e05925a7d3c65c/src/tensorflow_wavelets/__init__.py -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Timorleiderman/tensorflow-wavelets/58cf7dc8e469e963a6c7dd0567e05925a7d3c65c/src/tensorflow_wavelets/utils/__init__.py -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/canny_edge_detector.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import os 3 | import cv2 4 | import matplotlib.pyplot as plt 5 | 6 | 7 | # defining the canny detector function 8 | 9 | # here weak_th and strong_th are thresholds for 10 | # double thresholding step 11 | def Canny_detector(img, weak_th=None, strong_th=None): 12 | # conversion of image to grayscale 13 | img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) 14 | 15 | # Noise reduction step 16 | img = cv2.GaussianBlur(img, (5, 5), 1.4) 17 | 18 | # Calculating the gradients 19 | gx = cv2.Sobel(np.float32(img), cv2.CV_64F, 1, 0, 3) 20 | gy = cv2.Sobel(np.float32(img), cv2.CV_64F, 0, 1, 3) 21 | 22 | # Conversion of Cartesian coordinates to polar 23 | mag, ang = cv2.cartToPolar(gx, gy, angleInDegrees=True) 24 | 25 | # setting the minimum and maximum thresholds 26 | # for double thresholding 27 | mag_max = np.max(mag) 28 | if not weak_th: weak_th = mag_max * 0.1 29 | if not strong_th: strong_th = mag_max * 0.5 30 | 31 | # getting the dimensions of the input image 32 | height, width = img.shape 33 | 34 | # Looping through every pixel of the grayscale 35 | # image 36 | for i_x in range(width): 37 | for i_y in range(height): 38 | 39 | grad_ang = ang[i_y, i_x] 40 | grad_ang = abs(grad_ang - 180) if abs(grad_ang) > 180 else abs(grad_ang) 41 | 42 | # selecting the neighbours of the target pixel 43 | # according to the gradient direction 44 | # In the x axis direction 45 | if grad_ang <= 22.5: 46 | neighb_1_x, neighb_1_y = i_x - 1, i_y 47 | neighb_2_x, neighb_2_y = i_x + 1, i_y 48 | 49 | # top right (diagnol-1) direction 50 | elif grad_ang > 22.5 and grad_ang <= (22.5 + 45): 51 | neighb_1_x, neighb_1_y = i_x - 1, i_y - 1 52 | neighb_2_x, neighb_2_y = i_x + 1, i_y + 1 53 | 54 | # In y-axis direction 55 | elif grad_ang > (22.5 + 45) and grad_ang <= (22.5 + 90): 56 | neighb_1_x, neighb_1_y = i_x, i_y - 1 57 | neighb_2_x, neighb_2_y = i_x, i_y + 1 58 | 59 | # top left (diagnol-2) direction 60 | elif grad_ang > (22.5 + 90) and grad_ang <= (22.5 + 135): 61 | neighb_1_x, neighb_1_y = i_x - 1, i_y + 1 62 | neighb_2_x, neighb_2_y = i_x + 1, i_y - 1 63 | 64 | # Now it restarts the cycle 65 | elif grad_ang > (22.5 + 135) and grad_ang <= (22.5 + 180): 66 | neighb_1_x, neighb_1_y = i_x - 1, i_y 67 | neighb_2_x, neighb_2_y = i_x + 1, i_y 68 | 69 | # Non-maximum suppression step 70 | if width > neighb_1_x >= 0 and height > neighb_1_y >= 0: 71 | if mag[i_y, i_x] < mag[neighb_1_y, neighb_1_x]: 72 | mag[i_y, i_x] = 0 73 | continue 74 | 75 | if width > neighb_2_x >= 0 and height > neighb_2_y >= 0: 76 | if mag[i_y, i_x] < mag[neighb_2_y, neighb_2_x]: 77 | mag[i_y, i_x] = 0 78 | 79 | weak_ids = np.zeros_like(img) 80 | strong_ids = np.zeros_like(img) 81 | ids = np.zeros_like(img) 82 | 83 | # double thresholding step 84 | for i_x in range(width): 85 | for i_y in range(height): 86 | 87 | grad_mag = mag[i_y, i_x] 88 | 89 | if grad_mag < weak_th: 90 | mag[i_y, i_x] = 0 91 | elif strong_th > grad_mag >= weak_th: 92 | ids[i_y, i_x] = 1 93 | else: 94 | ids[i_y, i_x] = 2 95 | 96 | # finally returning the magnitude of 97 | # gradients of edges 98 | return mag 99 | 100 | 101 | if __name__ == "__main__": 102 | frame = cv2.imread('../input/Lenna_orig.png') 103 | 104 | # calling the designed function for 105 | # finding edges 106 | canny_img = Canny_detector(frame) 107 | 108 | # Displaying the input and output image 109 | plt.figure() 110 | f, plots = plt.subplots(2, 1) 111 | plots[0].imshow(frame) 112 | plots[1].imshow(canny_img) 113 | 114 | plt.show() -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/cast.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | import math 4 | import tensorflow as tf 5 | 6 | 7 | def tf_1d_to_ndarray(data, datatype=tf.float64): 8 | 9 | with tf.Session() as sess: 10 | data = sess.run(data) 11 | return data 12 | 13 | 14 | def tf_to_ndarray(data, datatype=tf.float32): 15 | data = tf.image.convert_image_dtype(data[0, ..., 0], dtype=datatype) 16 | with tf.Session() as sess: 17 | data = sess.run(data) 18 | return data 19 | 20 | 21 | def tf_rgb_to_ndarray(data, datatype=tf.float32): 22 | data = tf.image.convert_image_dtype(data[0, ..., :], dtype=datatype) 23 | with tf.Session() as sess: 24 | data = sess.run(data) 25 | return data 26 | 27 | 28 | def tf2_rgb_to_ndarray(data, datatype=tf.float32): 29 | data = tf.image.convert_image_dtype(data[0, ..., :], dtype=datatype) 30 | return data 31 | 32 | 33 | def tf_rank4_to_ndarray(data, datatype=tf.float32): 34 | data = tf.image.convert_image_dtype(data[0, ..., 0], dtype=datatype) 35 | with tf.Session() as sess: 36 | data = sess.run(data) 37 | return data 38 | 39 | 40 | def tf_rank2_to_ndarray(data, datatype=tf.float32): 41 | data = tf.image.convert_image_dtype(data, dtype=datatype) 42 | with tf.Session() as sess: 43 | data = sess.run(data) 44 | return data 45 | 46 | 47 | def cast_like_matlab_uint8_2d_rgb(data): 48 | data = np.clip(data, 0, 255) 49 | h, w, c = data.shape 50 | for ch in range(c): 51 | for row in range(h): 52 | for col in range(w): 53 | frac, integ = math.modf(data[row, col, ch]) 54 | if frac > 0.5: 55 | data[row, col, ch] = np.ceil(data[row, col, ch]) 56 | elif frac <= 0.5: 57 | data[row, col, ch] = np.floor(data[row, col, ch]) 58 | 59 | return data.astype('uint8') 60 | 61 | 62 | def cast_like_matlab_uint8_2d(data): 63 | data = np.clip(data, 0, 255) 64 | h, w = data.shape 65 | for row in range(h): 66 | for col in range(w): 67 | frac, integ = math.modf(data[row,col]) 68 | if frac > 0.5: 69 | data[row, col] = np.ceil(data[row, col]) 70 | elif frac <= 0.5: 71 | data[row, col] = np.floor(data[row, col]) 72 | 73 | 74 | return data.astype('uint8') -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from tensorflow.keras.datasets import mnist 3 | from tensorflow.keras.utils import to_categorical 4 | 5 | 6 | def load_mnist(categorical=True, remove_n_samples=1000, expand_d=True): 7 | (x_train, y_train), (x_test, y_test) = mnist.load_data() 8 | 9 | # Remove images to get smaller dataset 10 | if remove_n_samples != 0: 11 | x_train = x_train[:remove_n_samples, :, :] 12 | y_train = y_train[:remove_n_samples] 13 | x_test = x_test[:remove_n_samples//2, :, :] 14 | y_test = y_test[:remove_n_samples//2] 15 | 16 | if categorical: 17 | y_train = to_categorical(y_train) 18 | y_test = to_categorical(y_test) 19 | 20 | x_train = x_train.astype('float32') / 255.0 21 | x_test = x_test.astype('float32') / 255.0 22 | 23 | if expand_d: 24 | x_train = np.expand_dims(x_train, axis=-1) 25 | x_test = np.expand_dims(x_test, axis=-1) 26 | 27 | return (x_train, y_train), (x_test, y_test) -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/filters.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | 4 | 5 | def fs_farras(): 6 | 7 | a_lo_hi_rows_cols = list() 8 | s_lo_hi_rows_cols = list() 9 | 10 | a_lo_hi_rows_cols.append([ 11 | [0, 12 | -0.0883883476483200, 13 | 0.0883883476483200, 14 | 0.695879989034000, 15 | 0.695879989034000, 16 | 0.0883883476483200, 17 | -0.0883883476483200, 18 | 0.0112267921525400, 19 | 0.0112267921525400, 20 | 0], 21 | [0, 22 | -0.0112267921525400, 23 | 0.0112267921525400, 24 | 0.0883883476483200, 25 | 0.0883883476483200, 26 | -0.695879989034000, 27 | 0.695879989034000, 28 | -0.0883883476483200, 29 | -0.0883883476483200, 30 | 0] 31 | ]) 32 | 33 | a_lo_hi_rows_cols.append([ 34 | [0.0112267921525400, 35 | 0.0112267921525400, 36 | -0.0883883476483200, 37 | 0.0883883476483200, 38 | 0.695879989034000, 39 | 0.695879989034000, 40 | 0.0883883476483200, 41 | -0.0883883476483200, 42 | 0, 43 | 0], 44 | [0, 45 | 0, 46 | -0.0883883476483200, 47 | -0.0883883476483200, 48 | 0.695879989034000, 49 | -0.695879989034000, 50 | 0.0883883476483200, 51 | 0.0883883476483200, 52 | 0.0112267921525400, 53 | -0.0112267921525400] 54 | 55 | ]) 56 | s_lo_hi_rows_cols.append(list()) 57 | s_lo_hi_rows_cols.append(list()) 58 | 59 | s_lo_hi_rows_cols[0].append(a_lo_hi_rows_cols[0][0][::-1]) 60 | s_lo_hi_rows_cols[0].append(a_lo_hi_rows_cols[0][1][::-1]) 61 | s_lo_hi_rows_cols[1].append(a_lo_hi_rows_cols[1][0][::-1]) 62 | s_lo_hi_rows_cols[1].append(a_lo_hi_rows_cols[1][1][::-1]) 63 | 64 | return a_lo_hi_rows_cols, s_lo_hi_rows_cols 65 | 66 | 67 | def duelfilt(): 68 | a_lo_hi_rows_cols = list() 69 | s_lo_hi_rows_cols = list() 70 | a_lo_hi_rows_cols.append([ 71 | [0.0351638400000000, 72 | 0, 73 | -0.0883294200000000, 74 | 0.233890320000000, 75 | 0.760272370000000, 76 | 0.587518300000000, 77 | 0, 78 | -0.114301840000000, 79 | 0, 80 | 0], 81 | [0, 82 | 0, 83 | -0.114301840000000, 84 | 0, 85 | 0.587518300000000, 86 | -0.760272370000000, 87 | 0.233890320000000, 88 | 0.0883294200000000, 89 | 0, 90 | -0.0351638400000000] 91 | ]) 92 | a_lo_hi_rows_cols.append([ 93 | [0, 94 | 0, 95 | -0.114301840000000, 96 | 0, 97 | 0.587518300000000, 98 | 0.760272370000000, 99 | 0.233890320000000, 100 | -0.0883294200000000, 101 | 0, 102 | 0.0351638400000000, 103 | ], 104 | [-0.0351638400000000, 105 | 0, 106 | 0.0883294200000000, 107 | 0.233890320000000, 108 | -0.760272370000000, 109 | 0.587518300000000, 110 | 0, 111 | -0.114301840000000, 112 | 0, 113 | 0] 114 | ]) 115 | 116 | s_lo_hi_rows_cols.append(list()) 117 | s_lo_hi_rows_cols.append(list()) 118 | 119 | s_lo_hi_rows_cols[0].append(a_lo_hi_rows_cols[0][0][::-1]) 120 | s_lo_hi_rows_cols[0].append(a_lo_hi_rows_cols[0][1][::-1]) 121 | s_lo_hi_rows_cols[1].append(a_lo_hi_rows_cols[1][0][::-1]) 122 | s_lo_hi_rows_cols[1].append(a_lo_hi_rows_cols[1][1][::-1]) 123 | 124 | return a_lo_hi_rows_cols, s_lo_hi_rows_cols 125 | 126 | 127 | def ghm_w_mat(hight=512, width=512): 128 | # initialize the coefficients 129 | h0 = [[3/(5*math.sqrt(2)), 4/5], [-1/20, -3/(10*math.sqrt(2))]] 130 | 131 | h1 = [[3/(5*math.sqrt(2)), 0], [9/20, 1/math.sqrt(2)]] 132 | 133 | h2 = [[0, 0], [9/20, -3/(10*math.sqrt(2))]] 134 | 135 | h3 = [[0, 0], [-1/20, 0]] 136 | 137 | g0 = [[-1/20, -3/(10*math.sqrt(2))], [1/(10*math.sqrt(2)), 3/10]] 138 | 139 | g1 = [[9/20, -1/math.sqrt(2)], [-9/(10*math.sqrt(2)), 0]] 140 | 141 | g2 = [[9/20, -3/(10*math.sqrt(2))], [9/(10*math.sqrt(2)), -3/10]] 142 | 143 | g3 = [[-1/20, 0.0], [-1/(10*math.sqrt(2)), 0.0]] 144 | 145 | h_filter = [x+y+z+w for x, y, z, w in zip(h0, h1, h2, h3)] 146 | g_filter = [x+y+z+w for x, y, z, w in zip(g0, g1, g2, g3)] 147 | 148 | w = h_filter + g_filter 149 | 150 | w_mat = np.zeros((2*hight-4, 2*width)) 151 | 152 | last_filter1 = [x+y for x, y in zip(h2, h3)] 153 | last_filter2 = [x+y for x, y in zip(g2, g3)] 154 | last_filter3 = [x+y for x, y in zip(h0, h1)] 155 | last_filter4 = [x+y for x, y in zip(g0, g1)] 156 | 157 | last_fil12 = last_filter1 + last_filter2 158 | last_fil34 = last_filter3 + last_filter4 159 | zeros_between = np.zeros((4, 2*hight-8)) 160 | lat_box = np.concatenate([last_fil12, zeros_between, last_fil34], axis=1) 161 | for i in range((hight//2)-1): 162 | w_mat[4*i:4*(i+1), 4*(i+1)-4:4*(i+1)+4] = w 163 | 164 | w_mat = np.concatenate([w_mat, lat_box], axis=0) 165 | 166 | return w_mat 167 | 168 | 169 | def ghm(): 170 | # initialize the coefficients 171 | h0 = [[3/(5*math.sqrt(2)), 4/5], [-1/20, -3/(10*math.sqrt(2))]] 172 | 173 | h1 = [[3/(5*math.sqrt(2)), 0.0], [9/20, 1/math.sqrt(2)]] 174 | 175 | h2 = [[0.0, 0.0], [9/20, -3/(10*math.sqrt(2))]] 176 | 177 | h3 = [[0.0, 0.0], [-1/20, 0]] 178 | 179 | g0 = [[-1/20, -3/(10*math.sqrt(2))], [1/(10*math.sqrt(2)), 3/10]] 180 | 181 | g1 = [[9/20, -1/math.sqrt(2)], [-9/(10*math.sqrt(2)), 0]] 182 | 183 | g2 = [[9/20, -3/(10*math.sqrt(2))], [9/(10*math.sqrt(2)), -3/10]] 184 | 185 | g3 = [[-1/20, 0.0], [-1/(10*math.sqrt(2)), 0.0]] 186 | 187 | h_filter = [x+y+z+w for x, y, z, w in zip(h0, h1, h2, h3)] 188 | g_filter = [x+y+z+w for x, y, z, w in zip(g0, g1, g2, g3)] 189 | 190 | w = h_filter + g_filter 191 | hight, width = 512, 512 192 | w_mat = np.zeros((2*hight-4, 2*width)) 193 | 194 | last_filter1 = [x+y for x, y in zip(h2, h3)] 195 | last_filter2 = [x+y for x, y in zip(g2, g3)] 196 | last_filter3 = [x+y for x, y in zip(h0, h1)] 197 | last_filter4 = [x+y for x, y in zip(g0, g1)] 198 | 199 | last_fil12 = last_filter1 + last_filter2 200 | last_fil34 = last_filter3 + last_filter4 201 | zeros_between = np.zeros((4, 2*hight-8)) 202 | lat_box = np.concatenate([last_fil12, zeros_between, last_fil34], axis=1) 203 | for i in range((hight//2)-1): 204 | w_mat[4*i:4*(i+1), 4*(i+1)-4:4*(i+1)+4] = w 205 | 206 | # w_mat = np.concatenate([w_mat, lat_box], axis=0) 207 | 208 | return [h_filter[0], h_filter[1], g_filter[0], g_filter[1]] 209 | 210 | 211 | def ighm(): 212 | h0 = [0, 0.450000000000000, 0.450000000000000, 0.636396103067893, 213 | 0.424264068711929, -0.0500000000000000, -0.0500000000000000, 0.0707106781186548] 214 | h1 = [0, -0.212132034355964, -0.212132034355964, -0.300000000000000, 215 | 0.800000000000000, -0.212132034355964, -0.212132034355964, 0.300000000000000] 216 | g0 = [0, -0.0500000000000000, -0.0500000000000000, -0.0707106781186548, 217 | 0.424264068711929, 0.450000000000000, 0.450000000000000, -0.636396103067893] 218 | g1 = [0, 0, 0, 0, 0, 0.707106781186548, -0.707106781186548, 0] 219 | 220 | return [h0, h1, g0, g1] 221 | 222 | 223 | def dd2(height=512, width=512): 224 | c0 = (1+math.sqrt(3))/(4*math.sqrt(2)) 225 | c1 = (3+math.sqrt(3))/(4*math.sqrt(2)) 226 | c2 = (3-math.sqrt(3))/(4*math.sqrt(2)) 227 | c3 = (1-math.sqrt(3))/(4*math.sqrt(2)) 228 | 229 | w = [c0, c1, c2, c3] 230 | wi = [c3, -c2, c1, -c0] 231 | w_mat = np.zeros((height-2, width)) 232 | zeros_between = np.zeros((2, height-4)) 233 | lat_box = np.concatenate([[w[2:], wi[2:]], zeros_between, [w[:2], wi[:2]]], axis=1) 234 | 235 | for i in range((height//2)-1): 236 | w_mat[2*i:2*(i+1), 2*(i+1)-2:2*(i+1)+2] = [w]+[wi] 237 | 238 | w_mat = np.concatenate([w_mat, lat_box], axis=0) 239 | return w_mat 240 | -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/helpers.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | from tensorflow_wavelets.utils.cast import * 3 | 4 | 5 | 6 | def split_wt_to_lllhhlhh(data): 7 | ll = tf.split(tf.split(data, 2, axis=1)[0], 2, axis=2)[0] 8 | lh = tf.split(tf.split(data, 2, axis=1)[0], 2, axis=2)[1] 9 | hl = tf.split(tf.split(data, 2, axis=1)[1], 2, axis=2)[0] 10 | hh = tf.split(tf.split(data, 2, axis=1)[1], 2, axis=2)[1] 11 | return [ll, lh, hl, hh] 12 | 13 | 14 | def split_to_ll_lhhlhh(data): 15 | split_0 = tf.split(tf.split(data, 2, axis=1)[0], 2, axis=2) 16 | split_1 = tf.split(tf.split(data, 2, axis=1)[1], 2, axis=2) 17 | lhhlhh = [split_0[1]] + split_1 18 | ll = split_0[0] 19 | return [ll, lhhlhh] 20 | 21 | 22 | def split_mwt_to_lllhhlhh(data): 23 | ll = tf.split(tf.split(data, 2, axis=1)[0], 2, axis=2)[0] 24 | lh = tf.split(tf.split(data, 2, axis=1)[0], 2, axis=2)[1] 25 | hl = tf.split(tf.split(data, 2, axis=1)[1], 2, axis=2)[0] 26 | hh = tf.split(tf.split(data, 2, axis=1)[1], 2, axis=2)[1] 27 | return [ll, lh, hl, hh] 28 | 29 | def reconstruct_w_leveln(w, level): 30 | w_rec = [[[[], []] for _ in range(2)] for __ in range(level+1)] 31 | ws01 = tf.split(tf.split(w, 2, axis=1)[0], 2, axis=2) 32 | ws02 = tf.split(tf.split(w, 2, axis=1)[1], 2, axis=2) 33 | w_split = [ws01]+[ws02] 34 | 35 | for m in range(2): 36 | for n in range(2): 37 | [lo, lhhlhh] = split_to_ll_lhhlhh(w_split[m][n]) 38 | w_rec[0][m][n] = lhhlhh 39 | j = 1 40 | for j in range(1, level): 41 | [lo, lhhlhh] = split_to_ll_lhhlhh(lo) 42 | w_rec[j][m][n] = lhhlhh 43 | w_rec[j+1][m][n] = lo 44 | 45 | return w_rec 46 | 47 | 48 | def reconstruct_w_level2(w): 49 | w_rec = [[[[], []] for _ in range(2)] for __ in range(2+1)] 50 | ws01 = tf.split(tf.split(w, 2, axis=1)[0], 2, axis=2) 51 | ws02 = tf.split(tf.split(w, 2, axis=1)[1], 2, axis=2) 52 | w_split = [ws01] + [ws02] 53 | 54 | for m in range(2): 55 | for n in range(2): 56 | ws11 = tf.split(tf.split(w_split[m][n], 2, axis=1)[0], 2, axis=2) 57 | ws12 = tf.split(tf.split(w_split[m][n], 2, axis=1)[1], 2, axis=2) 58 | w_rec[0][m][n] = [ws11[1]] + ws12 59 | ll_lh = tf.split(tf.split(ws11[0], 2, axis=1)[0], 2, axis=2) 60 | hl_hh = tf.split(tf.split(ws11[0], 2, axis=1)[1], 2, axis=2) 61 | ll = ll_lh[0] 62 | lh_hl_ll = [ll_lh[1]] + hl_hh 63 | w_rec[1][m][n] = lh_hl_ll 64 | w_rec[2][m][n] = ll 65 | return w_rec 66 | 67 | 68 | def roll_pad(data, pad_len): 69 | 70 | # circular shift 71 | # This procedure (periodic extension) can create 72 | # undesirable artifacts at the beginning and end 73 | # of the subband signals, however, it is the most 74 | # convenient solution. 75 | # When the analysis and synthesis filters are exactly symmetric, 76 | # a different procedure (symmetric extension) can be used, 77 | # that avoids the artifacts associated with periodic extension 78 | 79 | data_roll = tf.roll(data, shift=-pad_len, axis=1) 80 | # zero padding 81 | 82 | data_roll_pad = tf.pad(data_roll, 83 | [[0, 0], [pad_len, pad_len], [0, 0], [0, 0]], 84 | mode='CONSTANT', 85 | constant_values=0) 86 | 87 | return data_roll_pad 88 | 89 | 90 | def fir_down_sample(data, fir, start=0, step=2): 91 | # input tensors rank 4 92 | 93 | data_tr = tf.transpose(data, perm=[0, 2, 1, 3]) 94 | conv = tf.nn.conv2d( 95 | data_tr, fir, padding='SAME', strides=[1, 1, 1, 1], 96 | ) 97 | conv_tr = tf.transpose(conv, perm=[0, 2, 1, 3]) 98 | 99 | # down sample 100 | lo_conv_ds = conv_tr[:, start:conv_tr.shape[1]:step, :, :] 101 | return lo_conv_ds 102 | 103 | 104 | def circular_shift_fix_crop(data, shift_fix, crop): 105 | 106 | circular_shift_fix = tf.math.add(data[:, 0:shift_fix, :, :], 107 | data[:, -shift_fix:, :, :]) 108 | 109 | fix = tf.concat([circular_shift_fix, data[:, shift_fix:, :, :]], axis=1) 110 | 111 | if crop == 0: 112 | res = fix 113 | else: 114 | res = fix[:, 0:crop, :, :] 115 | 116 | return res 117 | 118 | 119 | def list_to_tf(data): 120 | list_len = len(data) 121 | data_tf = tf.constant(data) 122 | data_tf = tf.reshape(data_tf, (1, list_len, 1, 1)) 123 | return data_tf 124 | 125 | 126 | def duel_filter_tf(duelfilt): 127 | 128 | tree1_lp_tf = list_to_tf(duelfilt[0][0]) 129 | tree1_hp_tf = list_to_tf(duelfilt[0][1]) 130 | tree2_lp_tf = list_to_tf(duelfilt[1][0]) 131 | tree2_hp_tf = list_to_tf(duelfilt[1][1]) 132 | 133 | tree1 = tf.stack((tree1_lp_tf, tree1_hp_tf), axis=0) 134 | tree2 = tf.stack((tree2_lp_tf, tree2_hp_tf), axis=0) 135 | duelfilt_tf = tf.stack((tree1, tree2), axis=0) 136 | return duelfilt_tf 137 | 138 | 139 | def construct_tf_filter(lod_row, hid_row, lod_col, hid_col): 140 | 141 | filt_len = len(lod_row) 142 | 143 | lod_row_tf = tf.constant(lod_row[::-1]) 144 | lod_row_tf = tf.reshape(lod_row_tf, (1, filt_len, 1, 1)) 145 | 146 | hid_row_tf = tf.constant(hid_row[::-1]) 147 | hid_row_tf = tf.reshape(hid_row_tf, (1, filt_len, 1, 1)) 148 | 149 | lod_col_tf = tf.constant(lod_col[::-1]) 150 | lod_col_tf = tf.reshape(lod_col_tf, (1, filt_len, 1, 1)) 151 | 152 | hid_col_tf = tf.constant(hid_col[::-1]) 153 | hid_col_tf = tf.reshape(hid_col_tf, (1, filt_len, 1, 1)) 154 | 155 | return lod_row_tf, hid_row_tf, lod_col_tf, hid_col_tf 156 | 157 | 158 | def add_sub(a, b): 159 | add = (a + b) / math.sqrt(2) 160 | sub = (a - b) / math.sqrt(2) 161 | return add, sub 162 | 163 | 164 | def incr(lst, i): 165 | return [x+i for x in lst] 166 | 167 | 168 | def split_to_ll_lh_hl_hh(x): 169 | 170 | x_split0 = tf.concat([x[:, 0::2, :, :], x[:, 1::2, :, :]], axis=1) 171 | x_split0_tr = tf.transpose(x_split0, perm=[0, 2, 1, 3]) 172 | x_split = tf.concat([x_split0_tr[:, 0::2, :, :], x_split0_tr[:, 1::2, :, :]], axis=1) 173 | res = tf.transpose(x_split, perm=[0, 2, 1, 3]) 174 | return res 175 | 176 | 177 | def permute_rows_4_2(x): 178 | slice_x_rc0 = tf.split(x, 2, axis=1)[0] 179 | slice_x_rc1 = tf.split(x, 2, axis=1)[1] 180 | 181 | slice_x_rc0_ds0 = slice_x_rc0[:, 0::2, :, :] 182 | slice_x_rc0_ds1 = slice_x_rc0[:, 1::2, :, :] 183 | slice_x_rc1_ds0 = slice_x_rc1[:, 0::2, :, :] 184 | slice_x_rc1_ds1 = slice_x_rc1[:, 1::2, :, :] 185 | 186 | slice_x_rc0_ds0_tr = tf.transpose(slice_x_rc0_ds0, perm=[0, 2, 1, 3]) 187 | slice_x_rc0_ds1_tr = tf.transpose(slice_x_rc0_ds1, perm=[0, 2, 1, 3]) 188 | slice_x_rc1_ds0_tr = tf.transpose(slice_x_rc1_ds0, perm=[0, 2, 1, 3]) 189 | slice_x_rc1_ds1_tr = tf.transpose(slice_x_rc1_ds1, perm=[0, 2, 1, 3]) 190 | stack_x = tf.stack([slice_x_rc0_ds0_tr, slice_x_rc0_ds1_tr, slice_x_rc1_ds0_tr, slice_x_rc1_ds1_tr], axis=3) 191 | # reshape for insertion between the rows 192 | perm_cols = tf.reshape(stack_x, shape=[-1, slice_x_rc0_ds0_tr.shape[1], slice_x_rc0_ds0_tr.shape[2]*4, 193 | slice_x_rc0_ds0_tr.shape[3]]) 194 | res = tf.transpose(perm_cols, perm=[0, 2, 1, 3]) 195 | 196 | return res 197 | 198 | 199 | def dd2_col_shuffel(x): 200 | h = int(x.shape[1])//2 201 | x_slic0 = x[:, 0:h, :, :] 202 | x_slic1 = x[:, h:, :, :] 203 | 204 | x_slic0_tr = tf.transpose(x_slic0, perm=[0, 2, 1, 3]) 205 | x_slic1_tr = tf.transpose(x_slic1, perm=[0, 2, 1, 3]) 206 | stack = tf.stack([x_slic0_tr, x_slic1_tr], axis=3) 207 | perm_cols = tf.reshape(stack, shape=[-1, x_slic0_tr.shape[1], x_slic0_tr.shape[2]*2, x_slic0_tr.shape[3]]) 208 | res = tf.transpose(perm_cols, perm=[0, 2, 1, 3]) 209 | return res 210 | 211 | 212 | def dd2_row_permutation(x): 213 | x_even = x[:, 0::2, :, :] 214 | x_odd = x[:, 1::2, :, :] 215 | 216 | res = tf.concat([x_even, x_odd], axis=1) 217 | return res 218 | 219 | 220 | def permute_rows_2_1(x): 221 | 222 | x_ds1 = x[:, 0::4, :, :] 223 | x_ds2 = x[:, 1::4, :, :] 224 | x_ds3 = x[:, 2::4, :, :] 225 | x_ds4 = x[:, 3::4, :, :] 226 | 227 | x_ds1 = tf.transpose(x_ds1, perm=[0, 2, 1, 3]) 228 | x_ds2 = tf.transpose(x_ds2, perm=[0, 2, 1, 3]) 229 | x_ds3 = tf.transpose(x_ds3, perm=[0, 2, 1, 3]) 230 | x_ds4 = tf.transpose(x_ds4, perm=[0, 2, 1, 3]) 231 | 232 | stack_wx_ds12 = tf.stack([x_ds1, x_ds2], axis=3) 233 | # reshape for insertion between the rows 234 | wx_ds12 = tf.reshape(stack_wx_ds12, shape=[-1, x_ds1.shape[1], x_ds1.shape[2]*2, x_ds1.shape[3]]) 235 | stack_wx_ds34 = tf.stack([x_ds3, x_ds4], axis=3) 236 | 237 | wx_ds34 = tf.reshape(stack_wx_ds34, shape=[-1, x_ds3.shape[1], x_ds3.shape[2]*2, x_ds3.shape[3]]) 238 | res = tf.concat([wx_ds12, wx_ds34], axis=2) 239 | res = tf.transpose(res, perm=[0, 2, 1, 3]) 240 | 241 | return res 242 | 243 | 244 | def pad_fir(x, fir): 245 | 246 | filt_len = fir.shape[1] 247 | x_pad = tf.pad(x, 248 | [[0, 0], [filt_len, filt_len], [0, 0], [0, 0]], 249 | mode='CONSTANT', 250 | constant_values=0) 251 | 252 | res = tf.nn.conv2d( 253 | x_pad, fir, padding='SAME', strides=[1, 1, 1, 1], 254 | ) 255 | 256 | return res 257 | 258 | 259 | def upsampler2d(x): 260 | """ 261 | up sampling with zero insertion between rows and columns 262 | :param x: 4 dim tensor (?, w, h, ch) 263 | :return: up sampled tensor with shape (?, 2*w, 2*h, ch) 264 | """ 265 | # create zero like tensor 266 | zero_tensor = tf.zeros_like(x) 267 | # stack both tensors 268 | stack_rows = tf.stack([x, zero_tensor], axis=3) 269 | # reshape for zero insertion between the rows 270 | stack_rows = tf.reshape(stack_rows, shape=[-1, x.shape[2], x.shape[1]*2, x.shape[3]]) 271 | # transpose in order to insert zeros for the columns 272 | stack_rows = tf.transpose(stack_rows, perm=[0, 2, 1, 3]) 273 | # create zero like tensor but now like the padded one 274 | zero_tensor_1 = tf.zeros_like(stack_rows) 275 | # stack both tensors 276 | stack_rows_cols = tf.stack([stack_rows, zero_tensor_1], axis=3) 277 | # reshape for zero insertion between the columns 278 | us_padded = tf.reshape(stack_rows_cols, shape=[-1, x.shape[2]*2, x.shape[1]*2, x.shape[3]]) 279 | # transpose back to normal 280 | us_padded = tf.transpose(us_padded, perm=[0, 2, 1, 3]) 281 | return us_padded 282 | 283 | 284 | def conv_tr_conv_tr(x, fir1, fir2, pad="VALID"): 285 | conv_fir1 = tf.nn.conv2d(x, fir1, padding=pad, strides=[1, 1, 1, 1], ) 286 | conv_fir1_tr = tf.transpose(conv_fir1, perm=[0, 2, 1, 3]) 287 | conv_fir1_tr_conv_fir2 = tf.nn.conv2d(conv_fir1_tr, fir2, padding=pad, strides=[1, 1, 1, 1], ) 288 | conv_fir1_tr_conv_fir2_tr = tf.transpose(conv_fir1_tr_conv_fir2, perm=[0, 2, 1, 3]) 289 | return conv_fir1_tr_conv_fir2_tr 290 | 291 | 292 | def up_sample_fir(x, fir): 293 | # create zero like tensor 294 | x = tf.transpose(x, perm=[0, 2, 1, 3]) 295 | zero_tensor = tf.zeros_like(x) 296 | # stack both tensors 297 | stack_rows = tf.stack([x, zero_tensor], axis=3) 298 | # reshape for zero insertion between the rows 299 | stack_rows = tf.reshape(stack_rows, shape=[-1, x.shape[1], x.shape[2]*2, x.shape[3]]) 300 | 301 | conv = tf.nn.conv2d( 302 | stack_rows, fir, padding='SAME', strides=[1, 1, 1, 1], 303 | ) 304 | res = tf.transpose(conv, perm=[0, 2, 1, 3]) 305 | return res 306 | 307 | 308 | def up_sample_4_1(x): 309 | 310 | a = tf.split(x, 2, axis=2)[0] 311 | b = tf.split(x, 2, axis=2)[1] 312 | stack_a_b = tf.stack([a, b], axis=3) 313 | us = tf.reshape(stack_a_b, shape=[-1, a.shape[1], a.shape[2]*2, a.shape[3]]) 314 | us_tr = tf.transpose(us, perm=[0, 2, 1, 3]) 315 | us_l = tf.split(us_tr, 2, axis=2)[0] 316 | us_r = tf.split(us_tr, 2, axis=2)[1] 317 | stack_l_r = tf.stack([us_l, us_r], axis=3) 318 | us_us = tf.reshape(stack_l_r, shape=[-1, us_l.shape[1], us_l.shape[2]*2, x.shape[3]]) 319 | res = tf.transpose(us_us, perm=[0, 2, 1, 3]) 320 | 321 | return res 322 | 323 | 324 | def over_sample_rows(x): 325 | # create zero like tensor 326 | x = tf.transpose(x, perm=[0, 2, 1, 3]) 327 | x_sqrt = x*(1/math.sqrt(2)) 328 | # stack both tensors 329 | stack_rows = tf.stack([x, x_sqrt], axis=3) 330 | # reshape for zero insertion between the rows 331 | stack_rows = tf.reshape(stack_rows, shape=[-1, x.shape[1], x.shape[2]*2, x.shape[3]]) 332 | 333 | res = tf.transpose(stack_rows, perm=[0, 2, 1, 3]) 334 | return res 335 | 336 | 337 | def analysis_filter_bank2d(x, lod_row, hid_row, lod_col, hid_col): 338 | # parameters 339 | h = int(x.shape[1]) 340 | w = int(x.shape[2]) 341 | filt_len = int(lod_row.shape[1]) 342 | x_roll_padd = roll_pad(x, filt_len//2) 343 | 344 | lo_conv_ds = fir_down_sample(x_roll_padd, lod_row) 345 | hi_conv_ds = fir_down_sample(x_roll_padd, hid_row) 346 | 347 | # # crop to needed dims 348 | lo = circular_shift_fix_crop(lo_conv_ds, filt_len//2, h//2) 349 | hi = circular_shift_fix_crop(hi_conv_ds, filt_len//2, h//2) 350 | 351 | # next is the columns filtering 352 | lo_tr = tf.transpose(lo, perm=[0, 2, 1, 3]) 353 | hi_tr = tf.transpose(hi, perm=[0, 2, 1, 3]) 354 | 355 | lo_tr_roll_padd = roll_pad(lo_tr, filt_len//2) 356 | hi_tr_roll_padd = roll_pad(hi_tr, filt_len//2) 357 | 358 | lo_lo_conv_ds = fir_down_sample(lo_tr_roll_padd, lod_col) 359 | lo_hi_conv_ds = fir_down_sample(lo_tr_roll_padd, hid_col) 360 | hi_lo_conv_ds = fir_down_sample(hi_tr_roll_padd, lod_col) 361 | hi_hi_conv_ds = fir_down_sample(hi_tr_roll_padd, hid_col) 362 | 363 | lo_lo = circular_shift_fix_crop(lo_lo_conv_ds, filt_len//2, w//2) 364 | lo_hi = circular_shift_fix_crop(lo_hi_conv_ds, filt_len//2, w//2) 365 | hi_lo = circular_shift_fix_crop(hi_lo_conv_ds, filt_len//2, w//2) 366 | hi_hi = circular_shift_fix_crop(hi_hi_conv_ds, filt_len//2, w//2) 367 | 368 | lo_lo = tf.transpose(lo_lo, perm=[0, 2, 1, 3]) 369 | lo_hi = tf.transpose(lo_hi, perm=[0, 2, 1, 3]) 370 | hi_lo = tf.transpose(hi_lo, perm=[0, 2, 1, 3]) 371 | hi_hi = tf.transpose(hi_hi, perm=[0, 2, 1, 3]) 372 | 373 | return [lo_lo, [lo_hi, hi_lo, hi_hi]] 374 | 375 | 376 | def synthesis_filter_bank2d(ca, cd, lor_row, hir_row, lor_col, hir_col): 377 | 378 | h = int(ca.shape[1]) 379 | w = int(ca.shape[2]) 380 | filt_len = int(lor_row.shape[1]) 381 | 382 | ll = tf.transpose(ca, perm=[0, 2, 1, 3]) 383 | lh = tf.transpose(cd[0], perm=[0, 2, 1, 3]) 384 | hl = tf.transpose(cd[1], perm=[0, 2, 1, 3]) 385 | hh = tf.transpose(cd[2], perm=[0, 2, 1, 3]) 386 | 387 | ll_pad = tf.pad(ll, 388 | [[0, 0], [filt_len//2, filt_len//2], [0, 0], [0, 0]], 389 | mode='CONSTANT', 390 | constant_values=0) 391 | 392 | lh_pad = tf.pad(lh, 393 | [[0, 0], [filt_len//2, filt_len//2], [0, 0], [0, 0]], 394 | mode='CONSTANT', 395 | constant_values=0) 396 | 397 | hl_pad = tf.pad(hl, 398 | [[0, 0], [filt_len//2, filt_len//2], [0, 0], [0, 0]], 399 | mode='CONSTANT', 400 | constant_values=0) 401 | 402 | hh_pad = tf.pad(hh, 403 | [[0, 0], [filt_len//2, filt_len//2], [0, 0], [0, 0]], 404 | mode='CONSTANT', 405 | constant_values=0) 406 | 407 | ll_conv = up_sample_fir(ll_pad, lor_col) 408 | lh_conv = up_sample_fir(lh_pad, hir_col) 409 | hl_conv = up_sample_fir(hl_pad, lor_col) 410 | hh_conv = up_sample_fir(hh_pad, hir_col) 411 | 412 | ll_lh_add = tf.math.add(ll_conv, lh_conv) 413 | hl_hh_add = tf.math.add(hl_conv, hh_conv) 414 | 415 | ll_lh_crop = ll_lh_add[:, filt_len//2:-filt_len//2-2, :, :] 416 | hl_hh_crop = hl_hh_add[:, filt_len//2:-filt_len//2-2, :, :] 417 | 418 | ll_lh_fix_crop = circular_shift_fix_crop(ll_lh_crop, filt_len-2, 2*w) 419 | hl_hh_fix_crop = circular_shift_fix_crop(hl_hh_crop, filt_len-2, 2*w) 420 | 421 | ll_lh_fix_crop_roll = tf.roll(ll_lh_fix_crop, shift=1-filt_len//2, axis=1) 422 | hl_hh_fix_crop_roll = tf.roll(hl_hh_fix_crop, shift=1-filt_len//2, axis=1) 423 | 424 | ll_lh = tf.transpose(ll_lh_fix_crop_roll, perm=[0, 2, 1, 3]) 425 | hl_hh = tf.transpose(hl_hh_fix_crop_roll, perm=[0, 2, 1, 3]) 426 | 427 | ll_lh_pad = tf.pad(ll_lh, 428 | [[0, 0], [filt_len//2, filt_len//2], [0, 0], [0, 0]], 429 | mode='CONSTANT', 430 | constant_values=0) 431 | 432 | hl_hh_pad = tf.pad(hl_hh, 433 | [[0, 0], [filt_len//2, filt_len//2], [0, 0], [0, 0]], 434 | mode='CONSTANT', 435 | constant_values=0) 436 | 437 | ll_lh_conv = up_sample_fir(ll_lh_pad, lor_row) 438 | hl_hh_conv = up_sample_fir(hl_hh_pad, hir_row) 439 | 440 | ll_lh_hl_hh_add = tf.math.add(ll_lh_conv, hl_hh_conv) 441 | ll_lh_hl_hh_add_crop = ll_lh_hl_hh_add[:, filt_len//2:-filt_len//2-2, :, :] 442 | 443 | y = circular_shift_fix_crop(ll_lh_hl_hh_add_crop, filt_len-2, 2*h) 444 | y = tf.roll(y, 1-filt_len//2, axis=1) 445 | 446 | return y 447 | 448 | 449 | def analysis_filter_bank2d_ghm(x, lp1, lp2, hp1, hp2): 450 | filt_len = int(lp1.shape[1]) 451 | x_os = over_sample_rows(x) 452 | x_pad = tf.pad(x_os, [[0, 0], [filt_len, filt_len], [0, 0], [0, 0]], mode='CONSTANT', constant_values=0) 453 | 454 | lp1_ds = fir_down_sample(x_pad, lp1, filt_len-2) 455 | lp1_ds1 = lp1_ds[:, 0:lp1_ds.shape[1]-5:2, :, :] 456 | 457 | lp2_ds = fir_down_sample(x_pad, lp2, filt_len-2) 458 | lp2_ds1 = lp2_ds[:, 2:lp2_ds.shape[1]-3:2, :, :] 459 | 460 | hp1_ds = fir_down_sample(x_pad, hp1, filt_len-2) 461 | hp1_ds1 = hp1_ds[:, 0:lp1_ds.shape[1]-5:2, :, :] 462 | 463 | hp2_ds = fir_down_sample(x_pad, hp2, filt_len-2) 464 | hp2_ds1 = hp2_ds[:, 2:lp2_ds.shape[1]-3:2, :, :]*(-1) 465 | 466 | lp1_ds1_tr = tf.transpose(lp1_ds1, perm=[0, 2, 1, 3]) 467 | lp2_ds1_tr = tf.transpose(lp2_ds1, perm=[0, 2, 1, 3]) 468 | hp1_ds1_tr = tf.transpose(hp1_ds1, perm=[0, 2, 1, 3]) 469 | hp2_ds1_tr = tf.transpose(hp2_ds1, perm=[0, 2, 1, 3]) 470 | 471 | lp1_ds1_os = over_sample_rows(lp1_ds1_tr) 472 | lp2_ds1_os = over_sample_rows(lp2_ds1_tr) 473 | hp1_ds1_os = over_sample_rows(hp1_ds1_tr) 474 | hp2_ds1_os = over_sample_rows(hp2_ds1_tr) 475 | 476 | lp1_ds1_os_pad = tf.pad(lp1_ds1_os, 477 | [[0, 0], [filt_len, filt_len], [0, 0], [0, 0]], mode='CONSTANT', constant_values=0) 478 | lp2_ds1_os_pad = tf.pad(lp2_ds1_os, 479 | [[0, 0], [filt_len, filt_len], [0, 0], [0, 0]], mode='CONSTANT', constant_values=0) 480 | hp1_ds1_os_pad = tf.pad(hp1_ds1_os, 481 | [[0, 0], [filt_len, filt_len], [0, 0], [0, 0]], mode='CONSTANT', constant_values=0) 482 | hp2_ds1_os_pad = tf.pad(hp2_ds1_os, 483 | [[0, 0], [filt_len, filt_len], [0, 0], [0, 0]], mode='CONSTANT', constant_values=0) 484 | 485 | lp1_lp1_ds = fir_down_sample(lp1_ds1_os_pad, lp1, start=filt_len-2, step=4) 486 | lp1_hp1_ds = fir_down_sample(lp1_ds1_os_pad, hp1, start=filt_len-2, step=4) 487 | hp1_lp1_ds = fir_down_sample(hp1_ds1_os_pad, lp1, start=filt_len-2, step=4) 488 | hp1_hp1_ds = fir_down_sample(hp1_ds1_os_pad, hp1, start=filt_len-2, step=4) 489 | 490 | lp1_lp1_tr = tf.transpose(lp1_lp1_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 491 | lp1_hp1_tr = tf.transpose(lp1_hp1_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 492 | hp1_lp1_tr = tf.transpose(hp1_lp1_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 493 | hp1_hp1_tr = tf.transpose(hp1_hp1_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 494 | 495 | lp1_lp2_ds = fir_down_sample(lp1_ds1_os_pad, lp2, start=filt_len-2, step=4) 496 | lp1_hp2_ds = fir_down_sample(lp1_ds1_os_pad, hp2, start=filt_len-2, step=4) 497 | hp1_lp2_ds = fir_down_sample(hp1_ds1_os_pad, lp2, start=filt_len-2, step=4) 498 | hp1_hp2_ds = fir_down_sample(hp1_ds1_os_pad, hp2, start=filt_len-2, step=4) 499 | 500 | lp1_lp2_tr = tf.transpose(lp1_lp2_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 501 | lp1_hp2_tr = tf.transpose(lp1_hp2_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 502 | hp1_lp2_tr = tf.transpose(hp1_lp2_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 503 | hp1_hp2_tr = tf.transpose(hp1_hp2_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 504 | 505 | lp2_lp1_ds = fir_down_sample(lp2_ds1_os_pad, lp1, start=filt_len-2, step=4) 506 | lp2_hp1_ds = fir_down_sample(lp2_ds1_os_pad, hp1, start=filt_len-2, step=4) 507 | hp2_lp1_ds = fir_down_sample(hp2_ds1_os_pad, lp1, start=filt_len-2, step=4) 508 | hp2_hp1_ds = fir_down_sample(hp2_ds1_os_pad, hp1, start=filt_len-2, step=4) 509 | 510 | lp2_lp1_tr = tf.transpose(lp2_lp1_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 511 | lp2_hp1_tr = tf.transpose(lp2_hp1_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 512 | hp2_lp1_tr = tf.transpose(hp2_lp1_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 513 | hp2_hp1_tr = tf.transpose(hp2_hp1_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 514 | 515 | lp2_lp2_ds = fir_down_sample(lp2_ds1_os_pad, lp2, start=filt_len-2, step=4) 516 | lp2_hp2_ds = fir_down_sample(lp2_ds1_os_pad, hp2, start=filt_len-2, step=4) 517 | hp2_lp2_ds = fir_down_sample(hp2_ds1_os_pad, lp2, start=filt_len-2, step=4) 518 | hp2_hp2_ds = fir_down_sample(hp2_ds1_os_pad, hp2, start=filt_len-2, step=4) 519 | 520 | lp2_lp2_tr = tf.transpose(lp2_lp2_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 521 | lp2_hp2_tr = tf.transpose(lp2_hp2_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 522 | hp2_lp2_tr = tf.transpose(hp2_lp2_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 523 | hp2_hp2_tr = tf.transpose(hp2_hp2_ds[:, :-3, :, :], perm=[0, 2, 1, 3]) 524 | 525 | res = [[lp1_lp1_tr, lp1_hp1_tr, hp1_lp1_tr, hp1_hp1_tr], 526 | [lp1_lp2_tr, lp1_hp2_tr, hp1_lp2_tr, hp1_hp2_tr], 527 | [lp2_lp1_tr, lp2_hp1_tr, hp2_lp1_tr, hp2_hp1_tr], 528 | [lp2_lp2_tr, lp2_hp2_tr, hp2_lp2_tr, hp2_hp2_tr], 529 | ] 530 | return res 531 | 532 | 533 | def synthesis_filter_bank2d_ghm_mult(x, w_mat): 534 | 535 | ll = tf.split(tf.split(x, 2, axis=1)[0], 2, axis=2)[0] 536 | lh = tf.split(tf.split(x, 2, axis=1)[0], 2, axis=2)[1] 537 | hl = tf.split(tf.split(x, 2, axis=1)[1], 2, axis=2)[0] 538 | hh = tf.split(tf.split(x, 2, axis=1)[1], 2, axis=2)[1] 539 | 540 | ll = up_sample_4_1(ll) 541 | lh = up_sample_4_1(lh) 542 | hl = up_sample_4_1(hl) 543 | hh = up_sample_4_1(hh) 544 | 545 | recon_1 = tf.concat([tf.concat([ll, lh], axis=2), tf.concat([hl, hh], axis=2)], axis=1) 546 | recon_1_tr = tf.transpose(recon_1, perm=[0, 2, 1, 3]) 547 | 548 | perm_cols = permute_rows_4_2(recon_1_tr) 549 | 550 | cros_w_x = tf.einsum('fijc,bjkc->bikc', w_mat, perm_cols) 551 | 552 | cros_w_x_ds = cros_w_x[:, 0::2, :, :] 553 | cros_w_x_ds_tr = tf.transpose(cros_w_x_ds, perm=[0, 2, 1, 3]) 554 | perm_rows = permute_rows_4_2(cros_w_x_ds_tr) 555 | 556 | cross_w_perm_rows = tf.einsum('fijc,bjkc->bikc', w_mat, perm_rows) 557 | 558 | res = cross_w_perm_rows[:, 0::2, :, :] 559 | return res 560 | 561 | 562 | def analysis_filter_bank2d_ghm_mult(x, w_mat): 563 | # parameters 564 | x_os = over_sample_rows(x) 565 | cros_w_x = tf.einsum('fijc,bjkc->bikc', w_mat, x_os) 566 | 567 | # cros_w_x = tf.matmul(w_mat, x_os) 568 | perm_rows = permute_rows_2_1(cros_w_x) 569 | perm_rows_tr = tf.transpose(perm_rows, perm=[0, 2, 1, 3]) 570 | perm_rows_os = over_sample_rows(perm_rows_tr) 571 | 572 | z_w_x = tf.einsum('fijc,bjkc->bikc', w_mat, perm_rows_os) 573 | perm_cols = permute_rows_2_1(z_w_x) 574 | perm_cols = tf.transpose(perm_cols, perm=[0, 2, 1, 3]) 575 | 576 | ll = tf.split(tf.split(perm_cols, 2, axis=1)[0], 2, axis=2)[0] 577 | lh = tf.split(tf.split(perm_cols, 2, axis=1)[0], 2, axis=2)[1] 578 | hl = tf.split(tf.split(perm_cols, 2, axis=1)[1], 2, axis=2)[0] 579 | hh = tf.split(tf.split(perm_cols, 2, axis=1)[1], 2, axis=2)[1] 580 | 581 | ll = split_to_ll_lh_hl_hh(ll) 582 | lh = split_to_ll_lh_hl_hh(lh) 583 | hl = split_to_ll_lh_hl_hh(hl) 584 | hh = split_to_ll_lh_hl_hh(hh) 585 | 586 | res = tf.concat([tf.concat([ll, lh], axis=2), tf.concat([hl, hh], axis=2)], axis=1) 587 | return res 588 | 589 | 590 | def analysis_filter_bank2d_dd2_mult(x, w_mat): 591 | # parameters 592 | 593 | cros_w_x = tf.einsum('fijc,bjkc->bikc', w_mat, x) 594 | 595 | perm_rows = dd2_row_permutation(cros_w_x) 596 | perm_rows_tr = tf.transpose(perm_rows, perm=[0, 2, 1, 3]) 597 | 598 | z_w_x = tf.einsum('fijc,bjkc->bikc', w_mat, perm_rows_tr) 599 | perm_cols = dd2_row_permutation(z_w_x) 600 | 601 | res = tf.transpose(perm_cols, perm=[0, 2, 1, 3]) 602 | return res 603 | 604 | 605 | def synthesis_filter_bank2d_dd2_mult(x, w_mat): 606 | 607 | x_tr = tf.transpose(x, perm=[0, 2, 1, 3]) 608 | x_col_shufl = dd2_col_shuffel(x_tr) 609 | 610 | cros_w_x = tf.einsum('fijc,bjkc->bikc', w_mat, x_col_shufl) 611 | cros_w_x_tr = tf.transpose(cros_w_x, perm=[0, 2, 1, 3]) 612 | 613 | x_row_shufl = dd2_col_shuffel(cros_w_x_tr) 614 | res = tf.einsum('fijc,bjkc->bikc', w_mat, x_row_shufl) 615 | 616 | return res 617 | -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/models.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | from tensorflow import keras 3 | from tensorflow.keras import layers 4 | from tensorflow.keras.layers import AveragePooling2D 5 | 6 | import tensorflow_wavelets.Layers.DWT as DWT 7 | import tensorflow_wavelets.Layers.DMWT as DMWT 8 | import tensorflow_wavelets.Layers.DTCWT as DTCWT 9 | import tensorflow_wavelets.Layers.Threshold as Activation 10 | from tensorflow.keras.models import Model 11 | 12 | 13 | def basic_dwt_idwt(input_shape, wave_name="db2", eagerly=False, threshold=True, mode='soft', algo='sure', concat = True): 14 | # load DWT IDWT model 15 | model = keras.Sequential() 16 | model.add(layers.InputLayer(input_shape=input_shape)) 17 | model.add(DWT.DWT(wavelet_name=wave_name, concat = concat)) 18 | if threshold: 19 | model.add(Activation.Threshold(algo=algo, mode=mode)) 20 | model.add(DWT.IDWT(wavelet_name=wave_name, concat = concat)) 21 | 22 | # for debug with break points 23 | model.run_eagerly = eagerly 24 | return model 25 | 26 | def basic_dwt_idwt_1d(input_shape, wave_name="db2", eagerly=False, threshold=True, mode='soft', algo='sure', concat = True): 27 | # load DWT IDWT model 28 | model = keras.Sequential() 29 | model.add(layers.InputLayer(input_shape=input_shape)) 30 | model.add(DWT.DWT1D(wavelet_name=wave_name)) 31 | if threshold: 32 | model.add(Activation.Threshold(algo=algo, mode=mode)) 33 | model.add(DWT.IDWT1D(wavelet_name=wave_name)) 34 | 35 | # for debug with break points 36 | model.run_eagerly = eagerly 37 | return model 38 | 39 | def basic_dmwt(input_shape, nb_classes=10, wave_name="ghm", eagerly=False): 40 | 41 | x_input = keras.Input(shape=input_shape) 42 | x = DMWT.DMWT(wavelet_name=wave_name)(x_input) 43 | x = layers.Flatten()(x) 44 | x = layers.Dense(nb_classes, activation="softmax")(x) 45 | model = Model(x_input, x, name="mymodel") 46 | # for debug with break points 47 | model.run_eagerly = eagerly 48 | return model 49 | 50 | 51 | def basic_dtcwt(input_shape, nb_classes=10, level=2, eagerly=False): 52 | 53 | cplx_input = keras.Input(shape=input_shape) 54 | x = DTCWT.DTCWT(level)(cplx_input) 55 | x = layers.Flatten()(x) 56 | x = layers.Dense(nb_classes, activation="softmax")(x) 57 | model = Model(cplx_input, x, name="mymodel") 58 | # for debug with break points 59 | model.run_eagerly = eagerly 60 | return model 61 | 62 | 63 | class AutocodeBasic(Model): 64 | 65 | def __init__(self, latent_dim, width=28, height=28): 66 | super(AutocodeBasic, self).__init__() 67 | 68 | self.latent_dim = latent_dim 69 | 70 | self.encoder = tf.keras.Sequential([ 71 | layers.Flatten(), 72 | layers.Dense(latent_dim, activation='relu'), 73 | ]) 74 | 75 | self.decoder = tf.keras.Sequential([ 76 | layers.Dense(width*height, activation='sigmoid'), 77 | layers.Reshape((width, height, 1)), 78 | ]) 79 | 80 | def get_config(self): 81 | return {"latent_dim", self.latent_dim} 82 | 83 | def call(self, x): 84 | encoded = self.encoder(x) 85 | decoded = self.decoder(encoded) 86 | return decoded 87 | 88 | 89 | class AutocodeBasicDWT(Model): 90 | def get_config(self): 91 | pass 92 | 93 | def __init__(self, latent_dim, width=28, height=28, wave_name="db2"): 94 | super(AutocodeBasicDWT, self).__init__() 95 | 96 | self.latent_dim = latent_dim 97 | 98 | self.encoder = tf.keras.Sequential([ 99 | DWT.DWT(wavelet_name=wave_name), 100 | Activation.Threshold(), 101 | DWT.IDWT(wavelet_name=wave_name), 102 | layers.Flatten(), 103 | layers.Dense(latent_dim, activation='relu'), 104 | ]) 105 | 106 | self.decoder = tf.keras.Sequential([ 107 | layers.Dense(width*height, activation='sigmoid'), 108 | layers.Reshape((width, height, 1)), 109 | ]) 110 | 111 | def call(self, x): 112 | encoded = self.encoder(x) 113 | decoded = self.decoder(encoded) 114 | return decoded 115 | 116 | 117 | 118 | class AveragePooling2DPyramid(tf.keras.Model): 119 | """ 120 | """ 121 | def __init__(self, batch_size, width, height, **kwargs): 122 | super(AveragePooling2DPyramid, self).__init__(**kwargs) 123 | self.batch_size = batch_size 124 | self.width = width 125 | self.height = height 126 | 127 | def build(self, input_shape): 128 | super(AveragePooling2DPyramid, self).build(input_shape) 129 | 130 | def call(self, inputs, training=None, mask=None): 131 | 132 | im1_4 = inputs 133 | im1_3 = AveragePooling2D(pool_size=2, strides=2, padding='same')(im1_4) 134 | im1_2 = AveragePooling2D(pool_size=2, strides=2, padding='same')(im1_3) 135 | im1_1 = AveragePooling2D(pool_size=2, strides=2, padding='same')(im1_2) 136 | im1_0 = AveragePooling2D(pool_size=2, strides=2, padding='same')(im1_1) 137 | 138 | return im1_0, im1_1, im1_2, im1_3 139 | 140 | -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/mse.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def mse(imageA, imageB): 5 | 6 | err = np.sum((imageA.astype("float") - imageB.astype("float")) ** 2) 7 | err /= float(imageA.shape[0] * imageA.shape[1]) 8 | 9 | return err 10 | 11 | def mse_1d(a, b): 12 | """Mean Squared Error calculation.""" 13 | return np.mean((a - b) ** 2) -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/plot.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | 5 | def plot_n_examples(x, y, start=0, stop=9, p=None): 6 | if stop < start: 7 | print("stop value mast be bigger then start") 8 | return 9 | 10 | indices = range(start, stop) 11 | y = np.argmax(y, axis=1) 12 | if p is None: 13 | p = y 14 | plt.figure(figsize=(10, 5)) 15 | for i, index in enumerate(indices): 16 | plt.subplot(2, 5, i+1) 17 | plt.imshow(x[index].reshape((28, 28)), cmap='binary') 18 | plt.xticks([]) 19 | plt.yticks([]) 20 | if y[index] == p[index]: 21 | col = 'g' 22 | else: 23 | col = 'r' 24 | plt.xlabel(str(p[index]), color=col) 25 | return plt 26 | 27 | 28 | def plot_random_examples(x, y, p=None): 29 | indices = np.random.choice(range(0, x.shape[0]), 10) 30 | y = np.argmax(y, axis=1) 31 | if p is None: 32 | p = y 33 | plt.figure(figsize=(10, 5)) 34 | for i, index in enumerate(indices): 35 | plt.subplot(2, 5, i+1) 36 | plt.imshow(x[index].reshape((28, 28)), cmap='binary') 37 | plt.xticks([]) 38 | plt.yticks([]) 39 | if y[index] == p[index]: 40 | col = 'g' 41 | else: 42 | col = 'r' 43 | plt.xlabel(str(p[index]), color=col) 44 | return plt -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/psnt_hvs.py: -------------------------------------------------------------------------------- 1 | from math import log10, sqrt 2 | import cv2 3 | import numpy as np 4 | from psnr_hvsm import psnr_hvs_hvsm 5 | from skimage import feature 6 | import canny_edge_detector 7 | 8 | 9 | def psnr_hvsm_e(ref, img): 10 | 11 | # convert to yuv color space and pass luma 12 | orig_yuv = cv2.cvtColor(ref, cv2.COLOR_RGB2YUV) 13 | img_yuv = cv2.cvtColor(img, cv2.COLOR_RGB2YUV) 14 | 15 | psnr_hvs, psnr_hvsm = psnr_hvs_hvsm(orig_yuv[:, :, 0].astype('float64')/255, img_yuv[:, :, 0].astype('float64')/255) 16 | 17 | return psnr_hvs, psnr_hvsm 18 | 19 | 20 | def psnr_ed(ref, img): 21 | # ref = ref.astype('float64') 22 | # img = img.astype('float64') 23 | 24 | ref_edge = canny_edge_detector.Canny_detector(ref) 25 | img_edge = canny_edge_detector.Canny_detector(img) 26 | 27 | mse = np.mean((ref_edge-img_edge)**2) 28 | 29 | if (mse == 0): # MSE is zero means no noise is present in the signal . 30 | return 100 31 | max_pixel = 255.0 32 | 33 | psnr_ed = 10 * log10(max_pixel**2 / mse) 34 | 35 | return psnr_ed 36 | 37 | 38 | def psnr_e(ref, img): 39 | ref = ref.astype('float64') 40 | img = img.astype('float64') 41 | 42 | M = ref.shape[0] 43 | N = ref.shape[1] 44 | 45 | er = (1/(M*N)) * np.sum((ref[:, :, 0] - img[:, :, 0]) ** 2) 46 | eg = (1/(M*N)) * np.sum((ref[:, :, 1] - img[:, :, 1]) ** 2) 47 | eb = (1/(M*N)) * np.sum((ref[:, :, 2] - img[:, :, 2]) ** 2) 48 | 49 | mse = (er + eg + eb)/3 50 | 51 | max_pixel = 255.0 52 | psnr_e = 10 * log10(max_pixel**2/mse) 53 | return psnr_e 54 | 55 | 56 | def psnr_s(ref, img): 57 | ref = ref.astype('float64') 58 | img = img.astype('float64') 59 | 60 | h, w, c = ref.shape 61 | 62 | # dividing the image in equal size and non overlapping square regions 63 | n = 8 64 | m = 8 65 | 66 | ref_blocks = np.array([ref[i:i + n, j:j + n] for j in range(0, w, n) for i in range(0, h, m)]) 67 | img_blocks = np.array([img[i:i + n, j:j + n] for j in range(0, w, n) for i in range(0, h, m)]) 68 | 69 | X = list() 70 | for ref_block, img_block in zip(ref_blocks, img_blocks): 71 | Xa = 0.5*(np.mean(ref_block) - np.mean(img_block))**2 72 | Xp = 0.25*(np.max(ref_block) - np.max(img_block))**2 73 | Xb = 0.25*(np.min(ref_block) - np.min(img_block))**2 74 | X.append(Xa+Xp+Xb) 75 | 76 | mse = np.mean(X) 77 | if (mse == 0): # MSE is zero means no noise is present in the signal . 78 | return 100 79 | max_pixel = 255.0 80 | psnr_s = 10 * log10(max_pixel**2/mse) 81 | return psnr_s 82 | 83 | 84 | def psnr(ref, img): 85 | ref = ref.astype('float64') 86 | img = img.astype('float64') 87 | 88 | mse = np.mean((ref - img) ** 2) 89 | if (mse == 0): # MSE is zero means no noise is present in the signal . 90 | # Therefore PSNR have no importance. 91 | return 100 92 | max_pixel = 255.0 93 | psnr = 20 * log10(max_pixel / sqrt(mse)) 94 | return psnr 95 | 96 | 97 | def main(): 98 | original = cv2.imread("../input/Lenna_orig.png") 99 | noise = cv2.imread("../input/Lenna_salt_pepper.png") 100 | 101 | compressed_10 = cv2.imread("../input/Lenna_comp_10.jpg", 1) 102 | compressed_100 = cv2.imread("../input/Lenna_comp_100.jpg", 1) 103 | 104 | value_e = psnr_e(original, noise) 105 | print(f"PSNR_e value compressed quality is {value_e} dB") 106 | value_s = psnr_s(original, noise) 107 | print(f"PSNR_s value compressed quality is {value_s} dB") 108 | value_ed = psnr_ed(original, noise) 109 | print(f"PSNR_ed value compressed quality is {value_ed} dB") 110 | 111 | value = psnr(original, noise) 112 | print(f"PSNR value compressed quality is {value} dB") 113 | 114 | print(value_e*0.32 + value_ed*0.38 + value_s*0.3) 115 | print(psnr_hvsm_e(original, noise)) 116 | 117 | # value = psnr_e(original, compressed_10) 118 | # print(f"PSNR_e value compressed 10% quality is {value} dB") 119 | # 120 | # value = psnr_s(original, compressed_10) 121 | # print(f"PSNR_s value compressed 10% quality is {value} dB") 122 | # 123 | # value = psnr_ed(original, compressed_10) 124 | # print(f"PSNR_ed value compressed 10% quality is {value} dB") 125 | # 126 | # 127 | # value = psnr(original, compressed_10) 128 | # print(f"PSNR value compressed 10% quality is {value} dB") 129 | 130 | 131 | # print(psnr_hvsm_e(original, noise)) 132 | # value = psnr(original, compressed_100) 133 | # print(f"PSNR value compressed 100% quality is {value} dB") 134 | 135 | 136 | if __name__ == "__main__": 137 | main() -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/salt_pepper.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | 4 | 5 | def addsalt_pepper(img, SNR): 6 | 7 | img_ = img.copy() 8 | w, h, c = img_.shape 9 | mask = np.random.choice((0, 1, 2), size=(w, h, 1), p=[SNR, (1 - SNR) / 2., (1 - SNR) / 2.]) 10 | # Copy by channel to have the same shape as img 11 | Mask = np.repeat(mask, c, axis=2) 12 | # salt noise 13 | img_[Mask == 1] = 255 14 | # pepper noise 15 | img_[Mask == 2] = 0 16 | return img_ 17 | 18 | 19 | if __name__ == "__main__": 20 | img = cv2.imread("../input/Lenna_orig.png") 21 | cv2.imwrite( "../input/Lenna_salt_pepper.png", addsalt_pepper(img, 0.9)) -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/ssim.py: -------------------------------------------------------------------------------- 1 | from skimage.metrics import structural_similarity 2 | 3 | 4 | def ssim(ref_img, rend_img, data_range=255, multichannel=False): 5 | mssim = structural_similarity(ref_img, rend_img, data_range=data_range, multichannel=multichannel) 6 | return mssim 7 | 8 | 9 | if __name__ == "__main__": 10 | import cv2 11 | import numpy as np 12 | import matplotlib.pyplot as plt 13 | from skimage import img_as_float 14 | from skimage.metrics import mean_squared_error 15 | img = cv2.imread("../input/Lenna_orig.png") 16 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) 17 | # img = cv2.cvtColor(img, cv2.COLOR_BGRA2GRAY) 18 | multichannel = True 19 | if multichannel: 20 | rows, cols, ch = img.shape 21 | else: 22 | rows, cols = img.shape 23 | img = img_as_float(img) 24 | 25 | noise = np.ones_like(img) * 0.2 * (img.max() - img.min()) 26 | noise[np.random.random(size=noise.shape) > 0.5] *= -1 27 | 28 | img_noise = img + noise 29 | img_const = img + abs(noise) 30 | 31 | fig, axes = plt.subplots(nrows=1, ncols=3, figsize=(10, 4), 32 | sharex=True, sharey=True) 33 | ax = axes.ravel() 34 | 35 | mse_none = mean_squared_error(img, img) 36 | ssim_none = ssim(img, img, data_range=img.max() - img.min(), multichannel=multichannel) 37 | 38 | mse_noise = mean_squared_error(img, img_noise) 39 | ssim_noise = ssim(img, img_noise, 40 | data_range=img_noise.max() - img_noise.min(), multichannel=multichannel) 41 | 42 | mse_const = mean_squared_error(img, img_const) 43 | ssim_const = ssim(img, img_const, 44 | data_range=img_const.max() - img_const.min(), multichannel=multichannel) 45 | label = 'MSE: {:.2f}, SSIM: {:.2f}' 46 | 47 | ax[0].imshow(img, cmap=plt.cm.gray, vmin=0, vmax=1) 48 | ax[0].set_xlabel(label.format(mse_none, ssim_none)) 49 | ax[0].set_title('Original image') 50 | 51 | ax[1].imshow(img_noise, cmap=plt.cm.gray, vmin=0, vmax=1) 52 | ax[1].set_xlabel(label.format(mse_noise, ssim_noise)) 53 | ax[1].set_title('Image with noise') 54 | 55 | ax[2].imshow(img_const, cmap=plt.cm.gray, vmin=0, vmax=1) 56 | ax[2].set_xlabel(label.format(mse_const, ssim_const)) 57 | ax[2].set_title('Image plus constant') 58 | 59 | plt.tight_layout() 60 | plt.show() -------------------------------------------------------------------------------- /src/tensorflow_wavelets/utils/write_raw.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | import tensorflow as tf 4 | from tensorflow_wavelets.utils.cast import cast_like_matlab_uint8_2d 5 | 6 | 7 | def write_raw(file_path, data): 8 | with open(file_path, "wb") as outfile: 9 | outfile.write(data) # Write it 10 | 11 | 12 | def tensor_to_write_raw(file_path, tensor_data, dastype='uint8'): 13 | out_img = tf.image.convert_image_dtype(tensor_data, dtype=tf.float32) 14 | with tf.Session() as sess: 15 | out_img = sess.run(out_img) 16 | out_img = cast_like_matlab_uint8_2d(out_img) 17 | write_raw(file_path, out_img) 18 | -------------------------------------------------------------------------------- /src/test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import cv2 3 | import unittest 4 | 5 | 6 | from tensorflow_wavelets.utils.models import * 7 | from tensorflow_wavelets.utils.mse import * 8 | from tensorflow_wavelets.utils.data import * 9 | 10 | # install dependencies on venv 11 | # conda install scipy 12 | # conda install matplotlib 13 | # pip install psnr-hvsm 14 | # conda install scikit-image 15 | # conda install -c conda-forge opencv 16 | 17 | 18 | # unitests for Development 19 | class TestSrc(unittest.TestCase): 20 | ''' 21 | run from src dir. 22 | if not change the path of lenna_input_path 23 | ''' 24 | lenna_input_path = "../src/input/LennaGrey.png" 25 | 26 | def test_dwt_idwt_hard_thresh(self): 27 | 28 | img = cv2.imread(self.lenna_input_path, 0) 29 | self.assertIsNotNone(img, "LennaGrey.png not found in " + self.lenna_input_path) 30 | img_ex1 = np.expand_dims(img, axis=-1) 31 | img_ex2 = np.expand_dims(img_ex1, axis=0) 32 | model = basic_dwt_idwt(input_shape=img_ex1.shape, wave_name="db2", eagerly=True, threshold=True, mode='hard', algo='sure') 33 | rec = model.predict(img_ex2) 34 | rec = rec[0, ..., 0] 35 | mse_lim = 3.5 36 | self.assertLess(mse(img, rec), mse_lim, "Should be less than " + str(mse_lim)) 37 | 38 | def test_dwt_idwt(self): 39 | 40 | img = cv2.imread(self.lenna_input_path, 0) 41 | self.assertIsNotNone(img, "LennaGrey.png not found in " + self.lenna_input_path) 42 | img_ex1 = np.expand_dims(img, axis=-1) 43 | img_ex2 = np.expand_dims(img_ex1, axis=0) 44 | model = basic_dwt_idwt(input_shape=img_ex1.shape, wave_name="db2", eagerly=True, threshold=False) 45 | rec = model.predict(img_ex2) 46 | rec = rec[0, ..., 0] 47 | mse_lim = 1e-3 48 | self.assertLess(mse(img, rec), mse_lim, "Should be less than " + str(mse_lim)) 49 | 50 | def test_dwt_idwt_1d(self): 51 | 52 | # Generate a synthetic 1D signal (e.g., a sine wave) 53 | num_samples = 32 54 | x = np.linspace(0, 2 * np.pi, num_samples) 55 | signal = np.sin(5 * x) # 5 Hz sine wave 56 | 57 | # Expand dimensions to match model input requirements 58 | signal_ex1 = np.expand_dims(signal, axis=-1) # Add channel dimension 59 | signal_ex2 = np.expand_dims(signal_ex1, axis=0) # Add batch dimension 60 | 61 | # Initialize 1D DWT/IDWT model 62 | model = basic_dwt_idwt_1d(input_shape=signal_ex2.shape[1:], wave_name="db2", eagerly=False, threshold=False) 63 | 64 | # Predict and reconstruct the signal 65 | rec = model.predict(signal_ex2) 66 | # rec = rec[0, ..., 0] # Remove batch and channel dimensions 67 | rec = np.squeeze(rec) 68 | # Define error threshold and assert 69 | mse_lim = 1e-3 70 | self.assertLess(mse_1d(signal, rec), mse_lim, "Should be less than " + str(mse_lim)) 71 | 72 | 73 | def test_dwt_idwt_not_concat(self): 74 | 75 | img = cv2.imread(self.lenna_input_path, 0) 76 | self.assertIsNotNone(img, "LennaGrey.png not found in " + self.lenna_input_path) 77 | img_ex1 = np.expand_dims(img, axis=-1) 78 | img_ex2 = np.expand_dims(img_ex1, axis=0) 79 | model = basic_dwt_idwt(input_shape=img_ex1.shape, wave_name="db2", eagerly=True, threshold=False, concat = False) 80 | rec = model.predict(img_ex2) 81 | rec = rec[0, ..., 0] 82 | mse_lim = 1e-3 83 | self.assertLess(mse(img, rec), mse_lim, "Should be less than " + str(mse_lim)) 84 | 85 | def test_basic_train_mnist(self): 86 | (x_train, y_train), (x_test, y_test) = load_mnist(remove_n_samples=1000) 87 | 88 | model = AutocodeBasicDWT(latent_dim=64, width=28, height=28) 89 | model.compile(optimizer='adam', loss="mse") 90 | model.fit(x_train, x_train, epochs=1, shuffle=True, validation_data=(x_test, x_test), verbose=1) 91 | 92 | encoded_imgs = model.encoder(x_test).numpy() 93 | decoded_imgs = model.decoder(encoded_imgs).numpy() 94 | for img_dec, img_test in zip(decoded_imgs, x_test): 95 | self.assertLess(mse(img_dec, img_test), 1e2, "mse should be less then 0.01") 96 | 97 | def test_dmwt(self): 98 | (x_train, y_train), (x_test, y_test) = load_mnist(remove_n_samples=1000) 99 | input_shape = (28, 28, 1) 100 | model = basic_dmwt(input_shape=input_shape, nb_classes=10, wave_name="ghm", eagerly=True) 101 | model.compile(loss="categorical_crossentropy",optimizer='adam', metrics=["accuracy"]) 102 | model.fit(x_train, y_train, validation_split=0.2, epochs=20, batch_size=32, verbose=1,) 103 | test_loss, test_acc = model.evaluate(x_test, y_test, verbose=1) 104 | self.assertGreater(test_acc, 0.8, "test accuracy should be higher then 0.8") 105 | self.assertLess(test_loss, 0.8, "test loss should be less then 0.8") 106 | 107 | def test_dtcwt(self): 108 | (x_train, y_train), (x_test, y_test) = load_mnist(remove_n_samples=1000) 109 | input_shape = (28, 28, 1) 110 | model = basic_dtcwt(input_shape=input_shape, nb_classes=10, level=2, eagerly=True) 111 | model.compile(loss="categorical_crossentropy", optimizer='adam', metrics=["accuracy"]) 112 | model.fit(x_train, y_train, validation_split=0.2, epochs=20, batch_size=32, verbose=1,) 113 | test_loss, test_acc = model.evaluate(x_test, y_test, verbose=1) 114 | self.assertGreater(test_acc, 0.8, "test accuracy should be higher then 0.8") 115 | self.assertLess(test_loss, 0.8, "test loss should be less then 0.8") 116 | 117 | 118 | def test_dwt_idwt_sof_thresh(self): 119 | 120 | img = cv2.imread(self.lenna_input_path, 0) 121 | self.assertIsNotNone(img, "LennaGrey.png not found in " + self.lenna_input_path) 122 | img_ex1 = np.expand_dims(img, axis=-1) 123 | img_ex2 = np.expand_dims(img_ex1, axis=0) 124 | model = basic_dwt_idwt(input_shape=img_ex1.shape, wave_name="db2", eagerly=True, threshold=True, mode='soft', algo='sure') 125 | rec = model.predict(img_ex2) 126 | rec = rec[0, ..., 0] 127 | mse_lim = 3.5 128 | self.assertLess(mse(img, rec), mse_lim, "Should be less than " + str(mse_lim)) 129 | 130 | def test_shapes(self): 131 | transform = DWT.DWT(name='db4', concat=0) 132 | inverse = DWT.IDWT(name='db4', concat=0) 133 | ones = tf.ones((1, 160, 64, 1)) 134 | output = transform(ones) 135 | inverted = inverse(output) 136 | print(output.shape) 137 | print(inverted.shape) 138 | self.assertEqual(ones.shape, inverted.shape) 139 | 140 | if __name__ == '__main__': 141 | unittest.main() 142 | -------------------------------------------------------------------------------- /src/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Timorleiderman/tensorflow-wavelets/58cf7dc8e469e963a6c7dd0567e05925a7d3c65c/src/tests/__init__.py -------------------------------------------------------------------------------- /src/tests/test_dwt1d.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import random 3 | import logging 4 | import numpy as np 5 | import tensorflow as tf 6 | 7 | from tensorflow_wavelets.Layers.DWT import DWT1D, IDWT1D 8 | 9 | logger = logging.getLogger(__file__) 10 | 11 | def create_signal(channel_count: int): 12 | # Create a simple 1D signal 13 | num_samples = random.randint(8, 128) * 2 14 | x = np.linspace(0, 2 * np.pi, num_samples) 15 | signal = np.sin(random.randint(1, 1e5) * x).astype(np.float32) 16 | return np.repeat(signal[None, :, None], repeats=channel_count, axis=-1) 17 | 18 | 19 | @pytest.mark.parametrize("wavelet_name", ["haar", "db2", "coif1"]) 20 | def test_wavelet_transform(wavelet_name): 21 | 22 | for idx in range(1, 8): 23 | signal = create_signal(idx) 24 | logger.info(f"{signal.shape}") 25 | # Create Wavelet Transform layers 26 | dwt = DWT1D(wavelet_name) 27 | idwt = IDWT1D(wavelet_name) 28 | 29 | # Pass through DWT and then inverse DWT 30 | transformed = dwt(signal) 31 | reconstructed = idwt(transformed).numpy() 32 | 33 | assert signal.shape == reconstructed.shape 34 | 35 | # Assert that the reconstructed signal is close to the original 36 | np.testing.assert_allclose(signal, reconstructed[0:, :, :], atol=1e-5) 37 | --------------------------------------------------------------------------------