├── Mini project 3
├── usersdb.db
├── kindpng_1372491.png
├── kindpng_170301.png
├── __pycache__
│ ├── mydatabase.cpython-39.pyc
│ └── ui_main_window.cpython-39.pyc
├── mydatabase.py
├── Readme.md
├── webcam.ui
├── log in.ui
├── new user.ui
├── main.ui
├── main.py
└── user edit.ui
├── assignment-23
├── img
│ ├── lips.png
│ ├── emoji1.png
│ ├── emoji2.png
│ └── sunglasses.png
├── Readme.md
└── Main.py
├── assignment-25
├── input
│ ├── me.jpg
│ ├── lion.png
│ ├── building.tif
│ ├── flower_input.jpg
│ ├── 3ShadesOfGray.mp4
│ └── flower_output.jpg
├── output
│ ├── result_me.jpg
│ ├── result_lion.jpg
│ ├── result_lion2.jpg
│ ├── Portrate_Flower.png
│ └── result_building.jpg
├── lion.py
├── lion2.py
├── building.py
├── ConvolutionFunc.py
├── Readme.md
├── Portrateflower.py
└── ColorSensing.py
├── assignment-27
├── Input
│ ├── 0.jpg
│ ├── 1.jpg
│ ├── 10.jpg
│ ├── 11.png
│ ├── 12.png
│ ├── 2.jpg
│ ├── 3.jpg
│ ├── puma.jpg
│ └── jLw8Rbf.jpg
├── Object Detection Ex
│ ├── Input
│ │ ├── coins.jpg
│ │ ├── puma.jpg
│ │ ├── wolf.jpg
│ │ ├── pinguine2.jpg
│ │ └── pinguine3.jpg
│ └── Readme.md
├── Readme.md
└── Find Contour Method.ipynb
├── assignment-24
├── input
│ ├── image.jpg
│ └── img.jpg
├── output
│ └── result2.jpg
├── weights
│ ├── RFB-320.tflite
│ ├── coor_2d106.tflite
│ ├── iris_localization.tflite
│ └── head_pose_object_points.npy
├── requirements.txt
├── __pycache__
│ └── TFLiteFaceDetector.cpython-39.pyc
├── SolvePnPHeadPoseEstimation.py
├── vtuber_link_start.py
├── TFLiteFaceDetector.py
├── TFLiteIrisLocalization.py
├── WebcamTFLiteFaceAlignment.py
└── MyTFLiteFaceAlignment.py
├── assignment-26-1
├── Input
│ ├── 1.png
│ ├── 2.png
│ ├── 3.tif
│ └── sudoku.tif
├── Output
│ ├── cell0.jpg
│ ├── cell1.jpg
│ ├── cell10.jpg
│ ├── cell11.jpg
│ ├── cell12.jpg
│ ├── cell13.jpg
│ ├── cell14.jpg
│ ├── cell15.jpg
│ ├── cell16.jpg
│ ├── cell17.jpg
│ ├── cell18.jpg
│ ├── cell19.jpg
│ ├── cell2.jpg
│ ├── cell20.jpg
│ ├── cell21.jpg
│ ├── cell22.jpg
│ ├── cell23.jpg
│ ├── cell24.jpg
│ ├── cell25.jpg
│ ├── cell26.jpg
│ ├── cell27.jpg
│ ├── cell28.jpg
│ ├── cell29.jpg
│ ├── cell3.jpg
│ ├── cell4.jpg
│ ├── cell5.jpg
│ ├── cell6.jpg
│ ├── cell7.jpg
│ ├── cell8.jpg
│ ├── cell9.jpg
│ └── final.png
├── Readme.md
├── ColorSensing.py
├── Increase Contrast.ipynb
└── test.ipynb
├── assignment-29
├── Input
│ ├── rubix.png
│ ├── Carrot.jpg
│ └── color picker.ui
├── Output
│ ├── Rubix.png
│ ├── Green Carrot.jpg
│ └── Microsoft Logo.png
├── ColorSensing.py
├── Readme.md
├── Color Picker.py
└── Microsoft Logo.ipynb
├── assignment-30
├── input
│ ├── sky.jpg
│ └── SuperMan.jpg
├── Skin detector .py
└── Readme.md
├── assignment-28
├── Input
│ ├── sudoku1.jpg
│ ├── sudoku2.jpg
│ └── sudoku3.png
├── Output
│ ├── sudoku4.jpg
│ ├── sudoku5.jpg
│ └── sudoku6.jpg
├── time warp scan
│ ├── Readme.md
│ └── time warp scan.py
├── Readme.md
├── Sudoku.py
└── Webcam Sudoku Detector.py
└── assignment-26-2
├── Mr Bin
├── input
│ └── 1.jpg
└── output
│ ├── rotated_img.jpg
│ ├── salt_pepper_img.jpg
│ └── img_without_noise.jpg
├── snowy gif
├── output
│ └── img116.jpg
└── input
│ └── village-scaled.jpg
├── numbers sepration
├── input
│ └── mnist.png
├── output
│ └── cell0.jpg
└── numbers sepration.ipynb
└── Readme.md
/Mini project 3/usersdb.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/Mini project 3/usersdb.db
--------------------------------------------------------------------------------
/assignment-23/img/lips.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-23/img/lips.png
--------------------------------------------------------------------------------
/assignment-25/input/me.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/input/me.jpg
--------------------------------------------------------------------------------
/assignment-27/Input/0.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/0.jpg
--------------------------------------------------------------------------------
/assignment-27/Input/1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/1.jpg
--------------------------------------------------------------------------------
/assignment-27/Input/10.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/10.jpg
--------------------------------------------------------------------------------
/assignment-27/Input/11.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/11.png
--------------------------------------------------------------------------------
/assignment-27/Input/12.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/12.png
--------------------------------------------------------------------------------
/assignment-27/Input/2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/2.jpg
--------------------------------------------------------------------------------
/assignment-27/Input/3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/3.jpg
--------------------------------------------------------------------------------
/assignment-23/img/emoji1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-23/img/emoji1.png
--------------------------------------------------------------------------------
/assignment-23/img/emoji2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-23/img/emoji2.png
--------------------------------------------------------------------------------
/assignment-24/input/image.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-24/input/image.jpg
--------------------------------------------------------------------------------
/assignment-24/input/img.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-24/input/img.jpg
--------------------------------------------------------------------------------
/assignment-25/input/lion.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/input/lion.png
--------------------------------------------------------------------------------
/assignment-26-1/Input/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Input/1.png
--------------------------------------------------------------------------------
/assignment-26-1/Input/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Input/2.png
--------------------------------------------------------------------------------
/assignment-26-1/Input/3.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Input/3.tif
--------------------------------------------------------------------------------
/assignment-27/Input/puma.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/puma.jpg
--------------------------------------------------------------------------------
/assignment-29/Input/rubix.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-29/Input/rubix.png
--------------------------------------------------------------------------------
/assignment-30/input/sky.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-30/input/sky.jpg
--------------------------------------------------------------------------------
/assignment-27/Input/jLw8Rbf.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Input/jLw8Rbf.jpg
--------------------------------------------------------------------------------
/assignment-28/Input/sudoku1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-28/Input/sudoku1.jpg
--------------------------------------------------------------------------------
/assignment-28/Input/sudoku2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-28/Input/sudoku2.jpg
--------------------------------------------------------------------------------
/assignment-28/Input/sudoku3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-28/Input/sudoku3.png
--------------------------------------------------------------------------------
/assignment-29/Input/Carrot.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-29/Input/Carrot.jpg
--------------------------------------------------------------------------------
/assignment-29/Output/Rubix.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-29/Output/Rubix.png
--------------------------------------------------------------------------------
/Mini project 3/kindpng_1372491.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/Mini project 3/kindpng_1372491.png
--------------------------------------------------------------------------------
/Mini project 3/kindpng_170301.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/Mini project 3/kindpng_170301.png
--------------------------------------------------------------------------------
/assignment-23/img/sunglasses.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-23/img/sunglasses.png
--------------------------------------------------------------------------------
/assignment-24/output/result2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-24/output/result2.jpg
--------------------------------------------------------------------------------
/assignment-25/input/building.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/input/building.tif
--------------------------------------------------------------------------------
/assignment-25/output/result_me.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/output/result_me.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Input/sudoku.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Input/sudoku.tif
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell0.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell0.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell1.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell10.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell10.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell11.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell11.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell12.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell12.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell13.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell13.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell14.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell14.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell15.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell15.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell16.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell16.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell17.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell17.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell18.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell18.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell19.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell19.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell2.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell20.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell20.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell21.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell21.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell22.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell22.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell23.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell23.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell24.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell24.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell25.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell25.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell26.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell26.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell27.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell27.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell28.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell28.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell29.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell29.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell3.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell4.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell4.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell5.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell5.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell6.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell6.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell7.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell7.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell8.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell8.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/cell9.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/cell9.jpg
--------------------------------------------------------------------------------
/assignment-26-1/Output/final.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-1/Output/final.png
--------------------------------------------------------------------------------
/assignment-26-2/Mr Bin/input/1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-2/Mr Bin/input/1.jpg
--------------------------------------------------------------------------------
/assignment-28/Output/sudoku4.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-28/Output/sudoku4.jpg
--------------------------------------------------------------------------------
/assignment-28/Output/sudoku5.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-28/Output/sudoku5.jpg
--------------------------------------------------------------------------------
/assignment-28/Output/sudoku6.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-28/Output/sudoku6.jpg
--------------------------------------------------------------------------------
/assignment-30/input/SuperMan.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-30/input/SuperMan.jpg
--------------------------------------------------------------------------------
/assignment-24/weights/RFB-320.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-24/weights/RFB-320.tflite
--------------------------------------------------------------------------------
/assignment-25/input/flower_input.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/input/flower_input.jpg
--------------------------------------------------------------------------------
/assignment-25/output/result_lion.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/output/result_lion.jpg
--------------------------------------------------------------------------------
/assignment-24/weights/coor_2d106.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-24/weights/coor_2d106.tflite
--------------------------------------------------------------------------------
/assignment-25/input/3ShadesOfGray.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/input/3ShadesOfGray.mp4
--------------------------------------------------------------------------------
/assignment-25/input/flower_output.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/input/flower_output.jpg
--------------------------------------------------------------------------------
/assignment-25/output/result_lion2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/output/result_lion2.jpg
--------------------------------------------------------------------------------
/assignment-29/Output/Green Carrot.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-29/Output/Green Carrot.jpg
--------------------------------------------------------------------------------
/assignment-29/Output/Microsoft Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-29/Output/Microsoft Logo.png
--------------------------------------------------------------------------------
/assignment-25/output/Portrate_Flower.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/output/Portrate_Flower.png
--------------------------------------------------------------------------------
/assignment-25/output/result_building.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-25/output/result_building.jpg
--------------------------------------------------------------------------------
/assignment-26-2/snowy gif/output/img116.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-2/snowy gif/output/img116.jpg
--------------------------------------------------------------------------------
/assignment-24/weights/iris_localization.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-24/weights/iris_localization.tflite
--------------------------------------------------------------------------------
/assignment-26-2/Mr Bin/output/rotated_img.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-2/Mr Bin/output/rotated_img.jpg
--------------------------------------------------------------------------------
/assignment-24/weights/head_pose_object_points.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-24/weights/head_pose_object_points.npy
--------------------------------------------------------------------------------
/assignment-26-2/Mr Bin/output/salt_pepper_img.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-2/Mr Bin/output/salt_pepper_img.jpg
--------------------------------------------------------------------------------
/assignment-26-2/numbers sepration/input/mnist.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-2/numbers sepration/input/mnist.png
--------------------------------------------------------------------------------
/assignment-27/Object Detection Ex/Input/coins.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Object Detection Ex/Input/coins.jpg
--------------------------------------------------------------------------------
/assignment-27/Object Detection Ex/Input/puma.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Object Detection Ex/Input/puma.jpg
--------------------------------------------------------------------------------
/assignment-27/Object Detection Ex/Input/wolf.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Object Detection Ex/Input/wolf.jpg
--------------------------------------------------------------------------------
/assignment-24/requirements.txt:
--------------------------------------------------------------------------------
1 | tensorflow >= 2.3
2 | opencv-python
3 | bidict == 0.21.2
4 | websocket-client==0.57.0
5 | python-socketio == 5.0.4
6 | python-engineio == 4.0.0
--------------------------------------------------------------------------------
/assignment-26-2/Mr Bin/output/img_without_noise.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-2/Mr Bin/output/img_without_noise.jpg
--------------------------------------------------------------------------------
/assignment-26-2/numbers sepration/output/cell0.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-2/numbers sepration/output/cell0.jpg
--------------------------------------------------------------------------------
/assignment-26-2/snowy gif/input/village-scaled.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-26-2/snowy gif/input/village-scaled.jpg
--------------------------------------------------------------------------------
/Mini project 3/__pycache__/mydatabase.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/Mini project 3/__pycache__/mydatabase.cpython-39.pyc
--------------------------------------------------------------------------------
/assignment-27/Object Detection Ex/Input/pinguine2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Object Detection Ex/Input/pinguine2.jpg
--------------------------------------------------------------------------------
/assignment-27/Object Detection Ex/Input/pinguine3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-27/Object Detection Ex/Input/pinguine3.jpg
--------------------------------------------------------------------------------
/Mini project 3/__pycache__/ui_main_window.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/Mini project 3/__pycache__/ui_main_window.cpython-39.pyc
--------------------------------------------------------------------------------
/assignment-24/__pycache__/TFLiteFaceDetector.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mahdisesmaeelian/Python-ImageProcessing/HEAD/assignment-24/__pycache__/TFLiteFaceDetector.cpython-39.pyc
--------------------------------------------------------------------------------
/assignment-26-1/Readme.md:
--------------------------------------------------------------------------------
1 | ## 
2 | ## 
3 |
--------------------------------------------------------------------------------
/assignment-27/Object Detection Ex/Readme.md:
--------------------------------------------------------------------------------
1 | ## Exercises of detecting objects in images by opencv
2 | ## 
3 | ## 
4 | ## 
5 |
--------------------------------------------------------------------------------
/assignment-28/time warp scan/Readme.md:
--------------------------------------------------------------------------------
1 | ## Time warp scan filter
2 |
3 | What this project do is grabbing a column of pixels where the height of the column is the height of the video,
4 | and width is some small value(1 pixel). We store these pixels as an image so that we can draw it to the screen overtop of our capture display.
5 |
6 | 
7 |
--------------------------------------------------------------------------------
/assignment-25/lion.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | img = cv2.imread("input/lion.png")
5 | img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
6 |
7 | result = np.zeros(img.shape)
8 |
9 | mask = np.array([[0 , -1 , 0],
10 | [-1 , 4 , -1],
11 | [0 , -1 , 0]])
12 |
13 | rows , cols = img.shape
14 |
15 | for i in range (1,rows-1):
16 | for j in range(1,cols-1):
17 | small_img = img[i-1:i+2, j-1:j+2]
18 | result[i,j] =np.sum(small_img * mask)
19 |
20 | cv2.imwrite("result_lion.jpg",result)
--------------------------------------------------------------------------------
/assignment-25/lion2.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | img = cv2.imread("input/lion.png")
5 | img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
6 |
7 | result = np.zeros(img.shape)
8 |
9 | mask = np.array([[-1 , -1 , -1],
10 | [0 , 0 , 0],
11 | [1 , 1 , 1]])
12 |
13 | rows , cols = img.shape
14 |
15 | for i in range (1,rows-1):
16 | for j in range(1,cols-1):
17 | small_img = img[i-1:i+2, j-1:j+2]
18 | result[i,j] =np.sum(small_img * mask)
19 |
20 | cv2.imwrite("result_lion2.jpg",result)
--------------------------------------------------------------------------------
/assignment-26-2/Readme.md:
--------------------------------------------------------------------------------
1 | ## Seprate numbers of an image
2 | ## 
3 | ## Prosses on an image by adding noise and remove it, and changing face alignment
4 | ## 
5 | ## Creating snowfall gif
6 | ##
7 |
8 | https://user-images.githubusercontent.com/88204357/145231592-41682bef-de86-4629-9c2f-c153c317e16f.mp4
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/assignment-25/building.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | img = cv2.imread("input/building.tif")
5 | img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
6 |
7 | result = np.zeros(img.shape)
8 |
9 | mask = np.array([[-1 , 0 , 1],
10 | [-1 , 0 , 1],
11 | [-1 , 0 , 1]])
12 |
13 | rows , cols = img.shape
14 |
15 | for i in range (1,rows-1):
16 | for j in range(1,cols-1):
17 | small_img = img[i-1:i+2, j-1:j+2]
18 | result[i,j] =np.sum(small_img * mask)
19 |
20 | cv2.imwrite("result_building.jpg",result)
--------------------------------------------------------------------------------
/assignment-25/ConvolutionFunc.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | img = cv2.imread("input/me.jpg")
5 | img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
6 |
7 | result = np.zeros(img.shape)
8 |
9 | def Canvolution(a):
10 | b = a//2
11 | mask = np.ones((a,a)) / (a*a)
12 |
13 | rows , cols = img.shape
14 |
15 | for i in range (b,rows-b):
16 | for j in range(b,cols-b):
17 | small_img = img[i-b:i+(b+1), j-b:j+(b+1)]
18 | result[i,j] =np.sum(small_img * mask)
19 |
20 | Canvolution(15)
21 |
22 | cv2.imwrite("result_me.jpg",result)
23 |
--------------------------------------------------------------------------------
/assignment-25/Readme.md:
--------------------------------------------------------------------------------
1 | ## 
2 | ## 
3 | ## 
4 | ## 
5 | ##
6 |
7 | https://user-images.githubusercontent.com/88204357/143685303-ab48b87a-c6db-4bdc-86c4-e542da768a9f.mp4
8 |
--------------------------------------------------------------------------------
/assignment-30/Skin detector .py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | min_HSV = np.array([0, 48, 80], dtype = "uint8")
5 | max_HSV = np.array([20, 255, 255], dtype = "uint8")
6 |
7 | video_cap = cv2.VideoCapture(0)
8 |
9 | while True:
10 |
11 | ret,frame = video_cap.read()
12 |
13 | frame_HSV = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
14 | Mask = cv2.inRange(frame_HSV, min_HSV, max_HSV)
15 |
16 | skinMask = cv2.GaussianBlur(Mask, (3, 3), 0)
17 | output = cv2.bitwise_and(frame, frame, mask = skinMask)
18 |
19 | cv2.imshow("Camera", output)
20 |
21 | if cv2.waitKey(1) & 0xFF == ord("q"):
22 | break
--------------------------------------------------------------------------------
/assignment-30/Readme.md:
--------------------------------------------------------------------------------
1 | ## change superman's background using blue screen technique :
2 | 
3 | 
4 | 
5 |
6 | ## Camera Skin detector using open cv and HSV color range:
7 | 
8 |
--------------------------------------------------------------------------------
/assignment-25/Portrateflower.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | img = cv2.imread('input/flower_input.jpg', 0)
5 |
6 | threshold = 180
7 | result = np.zeros(img.shape)
8 | mask = np.ones((15,15)) / 225
9 | rows , cols = img.shape
10 |
11 | for i in range (7,rows-7):
12 | for j in range(7,cols-7):
13 | small_img = img[i-7:i+8,j-7:j+8]
14 | result[i,j] = np.sum(small_img * mask)
15 |
16 | for i in range(rows):
17 | for j in range(cols):
18 | if img[i,j] <= threshold:
19 | img[i,j] = result[i,j]
20 |
21 | cv2.imwrite("Portrate_Flower.png", img)
22 | cv2.waitKey()
--------------------------------------------------------------------------------
/assignment-28/time warp scan/time warp scan.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | list_frames = []
5 | y = 1
6 |
7 | cam = cv2.VideoCapture(0)
8 |
9 | while True:
10 |
11 | ret,frame = cam.read()
12 |
13 | if not ret:
14 | break
15 |
16 | rows , cols , _= frame.shape
17 |
18 | list_frames.append(frame[y-1])
19 | frame[:y, :] = np.array(list_frames)
20 |
21 | cv2.line(frame, (0, y), (cols, y), (238,0,0), 1)
22 | y += 1
23 |
24 | if cv2.waitKey(1) == ord('q'):
25 | break
26 |
27 | if y == rows :
28 | break
29 |
30 | cv2.imshow("Camera", frame)
31 | cv2.waitKey(1)
32 |
33 | cv2.imwrite("Time warp scan.jpg",frame)
--------------------------------------------------------------------------------
/Mini project 3/mydatabase.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 |
3 | mydb = sqlite3.connect("usersdb.db")
4 | myCursor = mydb.cursor()
5 |
6 | # with open("face_images/user.jpg","rb") as f:
7 | # data = f.read()
8 |
9 | # def InsertImg():
10 | # myCursor.execute(f'INSERT INTO Users(photo) VALUES {data}')
11 |
12 | def Add(name,lastname,nationalcode,birthdate):
13 | myCursor.execute(f'INSERT INTO Users(Name, LastName, nationalCode, birthdate) VALUES("{name}","{lastname}","{nationalcode}","{birthdate}")')
14 | mydb.commit()
15 |
16 | def Edit(name,lastname,nationalcode1,birthdate,id):
17 | myCursor.execute(f'UPDATE Users SET Name = "{name}" ,LastName = "{lastname}", nationalCode = "{nationalcode1}" ,birthdate = "{birthdate}" WHERE Id = "{id}"')
18 | mydb.commit()
19 |
20 | def GetAll():
21 | myCursor.execute("SELECT * FROM Users")
22 | result = myCursor.fetchall()
23 | return result
--------------------------------------------------------------------------------
/assignment-29/ColorSensing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | video_cap = cv2.VideoCapture(0)
5 |
6 | while True:
7 | ret,frame = video_cap.read()
8 |
9 | myrec = frame[180:300,270:390]
10 |
11 | if ret == False:
12 | break
13 |
14 | kernel = np.ones((35,35),np.float32)/1225
15 | dst = cv2.filter2D(frame,-1,kernel)
16 |
17 | cv2.rectangle(frame,(270,180), (390,300), (0, 0, 0),2)
18 | dst[180:300,270:390] = myrec
19 |
20 | if 0 < np.average(myrec) <= 60:
21 | cv2.putText(dst, "Black", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
22 | elif 60 < np.average(myrec) <= 120:
23 | cv2.putText(dst, "Gray", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
24 | else:
25 | cv2.putText(dst, "White", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
26 |
27 | cv2.imshow('Camera',dst)
28 | cv2.waitKey(1)
--------------------------------------------------------------------------------
/assignment-23/Readme.md:
--------------------------------------------------------------------------------
1 | ## Image Processing
2 |
3 |
4 | Opening the camera and put effect on faces:
5 |
6 |
7 | You will see this emojie on your face by press number 1 on keyboard(face detection):
8 | ## 
9 |
10 | By pressing number 2 this effect shown on your eyes and mouth(created by eyes and mouth detection xml file):
11 | ## 
12 |
13 | Key number 3 will bluring your face:
14 | ## 
15 |
16 | And key number 4 is flip your image vertically:
17 | ## 
18 |
--------------------------------------------------------------------------------
/assignment-29/Readme.md:
--------------------------------------------------------------------------------
1 | ## Recolor the carrot by change r value :
2 | 
3 | 
4 |
5 | ## Design the microsoft LOGO and put special font on it :
6 | 
7 |
8 | ## Solving the rubix cube by np.where fuction :
9 | 
10 | 
11 |
12 | ## Creat a Color Picker Box using opencv + qt
13 | 
14 |
--------------------------------------------------------------------------------
/assignment-25/ColorSensing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | video_cap = cv2.VideoCapture(0)
5 |
6 | while True:
7 | ret,frame = video_cap.read()
8 | frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
9 |
10 | myrec = frame[180:300,270:390]
11 |
12 | if ret == False:
13 | break
14 |
15 | kernel = np.ones((35,35),np.float32)/1225
16 | dst = cv2.filter2D(frame,-1,kernel)
17 |
18 | cv2.rectangle(frame,(270,180), (390,300), (0, 0, 0),2)
19 | dst[180:300,270:390] = myrec
20 | color_detect_area = dst[180:300,270:390]
21 |
22 | if 0 < np.average(color_detect_area) <= 70:
23 | cv2.putText(dst, "Black", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
24 | elif 70 < np.average(color_detect_area) <= 120:
25 | cv2.putText(dst, "Gray", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
26 | else:
27 | cv2.putText(dst, "White", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
28 |
29 | cv2.imshow('Camera',dst)
30 | cv2.waitKey(1)
31 |
--------------------------------------------------------------------------------
/assignment-26-1/ColorSensing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 |
4 | video_cap = cv2.VideoCapture(0)
5 |
6 | while True:
7 | ret,frame = video_cap.read()
8 | frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
9 |
10 | myrec = frame[180:300,270:390]
11 |
12 | alpha = 1.5
13 | beta = 0
14 |
15 | if ret == False:
16 | break
17 |
18 | kernel = np.ones((35,35),np.float32)/1225
19 | dst = cv2.filter2D(frame,-1,kernel)
20 |
21 | cv2.rectangle(frame,(270,180), (390,300), (0, 0, 0),2)
22 | dst[180:300,270:390] = myrec
23 | color_detect_area = cv2.convertScaleAbs(dst[180:300,270:390], alpha=alpha, beta=beta)
24 | dst[180:300,270:390] = color_detect_area
25 |
26 | if 0 < np.average(color_detect_area) <= 60:
27 | cv2.putText(dst, "Black", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
28 | elif 60 < np.average(color_detect_area) <= 120:
29 | cv2.putText(dst, "Gray", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
30 | else:
31 | cv2.putText(dst, "White", (25, 50), cv2.FONT_HERSHEY_PLAIN,3, (0, 0, 0),3)
32 |
33 | cv2.imshow('Camera',dst)
34 | cv2.waitKey(1)
--------------------------------------------------------------------------------
/assignment-27/Readme.md:
--------------------------------------------------------------------------------
1 | ## Find contour method from scratch for detect an object :
2 | 
3 | 
4 | ## Dice dots detection using opencv library ,Find Contours ,Laplacian and HoughCircles methods :
5 | 
6 | 
7 | 
8 | ##
9 | 
10 | 
11 | 
12 |
--------------------------------------------------------------------------------
/assignment-26-1/Increase Contrast.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "id": "e60ef6e3-97d8-4270-8dd9-abb84d1dfd73",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import cv2\n",
11 | "\n",
12 | "image = cv2.imread('input/1.png')\n",
13 | "image = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)\n",
14 | "\n",
15 | "alpha = 1\n",
16 | "beta = -100\n",
17 | "\n",
18 | "adjusted = cv2.convertScaleAbs(image, alpha=alpha, beta=beta)\n",
19 | "\n",
20 | "cv2.imshow('original', image)\n",
21 | "cv2.imshow('adjusted', adjusted)\n",
22 | "cv2.waitKey()"
23 | ]
24 | }
25 | ],
26 | "metadata": {
27 | "kernelspec": {
28 | "display_name": "Python 3 (ipykernel)",
29 | "language": "python",
30 | "name": "python3"
31 | },
32 | "language_info": {
33 | "codemirror_mode": {
34 | "name": "ipython",
35 | "version": 3
36 | },
37 | "file_extension": ".py",
38 | "mimetype": "text/x-python",
39 | "name": "python",
40 | "nbconvert_exporter": "python",
41 | "pygments_lexer": "ipython3",
42 | "version": "3.9.0"
43 | }
44 | },
45 | "nbformat": 4,
46 | "nbformat_minor": 5
47 | }
48 |
--------------------------------------------------------------------------------
/Mini project 3/Readme.md:
--------------------------------------------------------------------------------
1 | ## Company users management
2 | ♦Login with the right username and password:
3 |
4 |
5 | 
6 |
7 |
8 | 
9 |
10 | ♦Showing users list , which has modify and add new user option :
11 |
12 |
13 | 
14 |
15 |
16 | 
17 |
18 |
19 | 
20 |
21 |
22 | 
23 |
24 |
25 | 
26 |
27 |
28 | 
29 |
--------------------------------------------------------------------------------
/assignment-28/Readme.md:
--------------------------------------------------------------------------------
1 | ## Sudoku detector using opencv
2 |
3 | The following code is a Python program that can be run at the command line ,It takes input image and output name from user :
4 |
5 | ◾Convert bgr color to gray
6 |
7 | ◾Using GaussianBlur for decrease noises
8 |
9 | 
10 |
11 | ◾Adaptive threshold to obtain binary image
12 |
13 | 
14 |
15 |
16 | ◾Find contours and filter for largest contour
17 |
18 | ◾Find shape with 4 points by approxPolyDP
19 |
20 | ◾Draw contours around the sudoku table
21 |
22 | 
23 |
24 |
25 | ◾Crop the table and save it as a jpg image
26 |
27 |
28 | 
29 |
30 | ## Webcam Sudoku Detector.py
31 |
32 |
33 | This python project has an ability to recognize Sudoku table in live webcam.
34 |
35 | By pressing the s button, the sudoku image is cropped and saved.
36 |
37 | 
38 |
39 | 
40 |
--------------------------------------------------------------------------------
/assignment-28/Sudoku.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import cv2
3 | import matplotlib.pyplot as plt
4 | import imutils
5 | from imutils import contours
6 | from imutils.perspective import four_point_transform
7 |
8 | parser = argparse.ArgumentParser(description="Sudoku Detector version 1.0")
9 | parser.add_argument("--input" , type=str , help="path of your input image")
10 | parser.add_argument("--filter_size" , type=int , help="size of GaussianBlur mask", default= 7)
11 | parser.add_argument("--output" , type=str , help="path of your output image")
12 | args = parser.parse_args()
13 |
14 | img = cv2.imread(args.input)
15 | gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
16 |
17 | blurred_img = cv2.GaussianBlur(gray_img , (args.filter_size,args.filter_size), 3)
18 |
19 | thresh = cv2.adaptiveThreshold(blurred_img , 255 , cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV , 11 ,2)
20 |
21 | contours = cv2.findContours(thresh, cv2.RETR_EXTERNAL , cv2.CHAIN_APPROX_SIMPLE)
22 |
23 | contours = imutils.grab_contours(contours)
24 | contours = sorted(contours , key = cv2.contourArea , reverse = True)
25 |
26 | sudoku_contour = None
27 |
28 | for contour in contours:
29 | epsilon = 0.02 * cv2.arcLength(contour , True)
30 | approx = cv2.approxPolyDP(contour , epsilon , True)
31 |
32 | if len(approx) == 4:
33 | sudoku_contour = approx
34 | break
35 |
36 | if sudoku_contour is None :
37 | print("Not found")
38 | else:
39 | result = cv2.drawContours(img , [sudoku_contour] , -1 ,(0,255,0) , 8)
40 | puzzle = four_point_transform(img, sudoku_contour.reshape(4,2))
41 | warped = four_point_transform(gray_img, sudoku_contour.reshape(4,2))
42 | cv2.imshow("Puzzle" ,puzzle)
43 | cv2.imwrite(args.output, puzzle)
--------------------------------------------------------------------------------
/assignment-28/Webcam Sudoku Detector.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import cv2
3 | import matplotlib.pyplot as plt
4 | import imutils
5 | from imutils import contours
6 | from imutils.perspective import four_point_transform
7 |
8 | cam = cv2.VideoCapture(0)
9 |
10 | while True:
11 |
12 | ret,frame = cam.read()
13 |
14 | if not ret:
15 | break
16 |
17 | gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
18 |
19 | blurred_img = cv2.GaussianBlur(gray_frame , (3,3), 3)
20 |
21 | thresh = cv2.adaptiveThreshold(blurred_img , 255 , cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV , 11 ,2)
22 |
23 | contours = cv2.findContours(thresh, cv2.RETR_EXTERNAL , cv2.CHAIN_APPROX_SIMPLE)
24 |
25 | contours = imutils.grab_contours(contours)
26 | contours = sorted(contours , key = cv2.contourArea , reverse = True)
27 |
28 | sudoku_contour = None
29 |
30 | for contour in contours:
31 | epsilon = 0.12 * cv2.arcLength(contour , True)
32 | approx = cv2.approxPolyDP(contour , epsilon , True)
33 |
34 | if len(approx) == 4:
35 | sudoku_contour = approx
36 | break
37 |
38 | if sudoku_contour is None :
39 | print("Not found")
40 | else:
41 | result = cv2.drawContours(frame , [sudoku_contour] , -1 ,(0,255,0) , 8)
42 |
43 | cv2.imshow("Camera", frame)
44 |
45 |
46 | if cv2.waitKey(1) == ord('s'):
47 | if sudoku_contour is None :
48 | print("Not found")
49 | else:
50 | puzzle = four_point_transform(frame, sudoku_contour.reshape(4,2))
51 | warped = four_point_transform(gray_frame, sudoku_contour.reshape(4,2))
52 | cv2.imwrite("Detected_Sudoku.jpg" , puzzle)
53 |
54 | if cv2.waitKey(1) == ord('q'):
55 | break
--------------------------------------------------------------------------------
/assignment-29/Color Picker.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from functools import partial
3 | from PySide6.QtWidgets import *
4 | from PySide6.QtUiTools import *
5 | from PySide6.QtCore import *
6 |
7 | loader = QUiLoader()
8 | app = QApplication(sys.argv)
9 | ui = loader.load("Input\color picker.ui", None)
10 | ui.setWindowTitle("Color Picker")
11 |
12 | rcode = ui.redSlider.value()
13 | gcode = ui.greenSlider.value()
14 | bcode = ui.blueSlider.value()
15 |
16 | def red_slider_changed(_,rcode):
17 | ui.lableR.setText(f"{rcode}")
18 | rcode = ui.redSlider.value()
19 | gcode = ui.greenSlider.value()
20 | bcode = ui.blueSlider.value()
21 | ui.labelColor.setText(f"rgb({rcode} , {gcode} , {bcode})")
22 | ui.labelColor.setStyleSheet(f"background-color: rgb({rcode},{gcode},{bcode});")
23 |
24 | def green_slider_changed(_,gcode):
25 | ui.lableG.setText(f"{gcode}")
26 | rcode = ui.redSlider.value()
27 | gcode = ui.greenSlider.value()
28 | bcode = ui.blueSlider.value()
29 | ui.labelColor.setText(f"rgb({rcode} , {gcode} , {bcode})")
30 | ui.labelColor.setStyleSheet(f"background-color: rgb({rcode},{gcode},{bcode});")
31 |
32 | def blue_slider_changed(_,bcode):
33 | ui.lableB.setText(f"{bcode}")
34 | rcode = ui.redSlider.value()
35 | gcode = ui.greenSlider.value()
36 | bcode = ui.blueSlider.value()
37 | ui.labelColor.setText(f"rgb({rcode} , {gcode} , {bcode})")
38 | ui.labelColor.setStyleSheet(f"background-color: rgb({rcode},{gcode},{bcode});")
39 |
40 |
41 | ui.redSlider.valueChanged.connect(partial(red_slider_changed,rcode))
42 | ui.greenSlider.valueChanged.connect(partial(green_slider_changed,gcode))
43 | ui.blueSlider.valueChanged.connect(partial(blue_slider_changed,bcode))
44 |
45 | ui.show()
46 | app.exec_()
47 |
--------------------------------------------------------------------------------
/Mini project 3/webcam.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | MainWindow
4 |
5 |
6 |
7 | 0
8 | 0
9 | 478
10 | 355
11 |
12 |
13 |
14 | MainWindow
15 |
16 |
17 |
18 | background-color: rgb(255, 145, 173);
19 |
20 |
21 |
22 |
23 | 6
24 | 10
25 |
26 |
27 |
28 | -
29 |
30 |
31 | QLayout::SetNoConstraint
32 |
33 |
-
34 |
35 |
36 | Click to open the webcam
37 |
38 |
39 |
40 | C:/Users/User/Downloads/—Pngtree—photo camera_4732850.pngC:/Users/User/Downloads/—Pngtree—photo camera_4732850.png
41 |
42 |
43 |
44 | -
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
--------------------------------------------------------------------------------
/assignment-26-2/numbers sepration/numbers sepration.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 14,
6 | "id": "2360fbea-e355-4299-87d3-9f2e5d5ada42",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import numpy as np\n",
11 | "import cv2\n",
12 | "import matplotlib.pyplot as plt"
13 | ]
14 | },
15 | {
16 | "cell_type": "code",
17 | "execution_count": 15,
18 | "id": "e8a5738d-6ca1-4ecf-ba59-0c3cc8dc09d0",
19 | "metadata": {},
20 | "outputs": [],
21 | "source": [
22 | "image = cv2.imread('input/mnist.png')\n",
23 | "image = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)"
24 | ]
25 | },
26 | {
27 | "cell_type": "code",
28 | "execution_count": 17,
29 | "id": "c9caa432-21ef-4512-b3ae-74a1764feafe",
30 | "metadata": {},
31 | "outputs": [],
32 | "source": [
33 | "width, height = image.shape\n",
34 | "width_cell = width // 50\n",
35 | "height_cell = height // 100"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 18,
41 | "id": "407d5143-08b6-412c-8a73-a528efa19cf4",
42 | "metadata": {},
43 | "outputs": [],
44 | "source": [
45 | "counter = 0\n",
46 | "\n",
47 | "for i in range(0, width, width_cell):\n",
48 | " for j in range(0, height, height_cell):\n",
49 | " single_cell = image[i:i+width_cell, j:j+height_cell]\n",
50 | " if single_cell.shape == (width_cell, height_cell):\n",
51 | " cv2.imwrite(f\"output/cell{counter}.jpg\",single_cell)\n",
52 | " counter += 1"
53 | ]
54 | }
55 | ],
56 | "metadata": {
57 | "kernelspec": {
58 | "display_name": "Python 3 (ipykernel)",
59 | "language": "python",
60 | "name": "python3"
61 | },
62 | "language_info": {
63 | "codemirror_mode": {
64 | "name": "ipython",
65 | "version": 3
66 | },
67 | "file_extension": ".py",
68 | "mimetype": "text/x-python",
69 | "name": "python",
70 | "nbconvert_exporter": "python",
71 | "pygments_lexer": "ipython3",
72 | "version": "3.9.0"
73 | }
74 | },
75 | "nbformat": 4,
76 | "nbformat_minor": 5
77 | }
78 |
--------------------------------------------------------------------------------
/assignment-24/SolvePnPHeadPoseEstimation.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import numpy as np
3 | import sys
4 |
5 |
6 | class HeadPoseEstimator:
7 |
8 | def __init__(self, filepath, W, H) -> None:
9 | # camera matrix
10 | matrix = np.array([[W, 0, W/2.0],
11 | [0, W, H/2.0],
12 | [0, 0, 1]])
13 |
14 | # load pre-defined 3d object points and mapping indexes
15 | obj, index = np.load(filepath, allow_pickle=True)
16 | obj = obj.T
17 |
18 | def solve_pnp_wrapper(obj, index, matrix):
19 | def solve_pnp(shape):
20 | return cv2.solvePnP(obj, shape[index], matrix, None)
21 | return solve_pnp
22 |
23 | self._solve_pnp = solve_pnp_wrapper(obj, index, matrix)
24 |
25 | def get_head_pose(self, shape):
26 | if len(shape) != 106:
27 | raise RuntimeError('Unsupported shape format')
28 |
29 | _, rotation_vec, translation_vec = self._solve_pnp(shape)
30 |
31 | rotation_mat = cv2.Rodrigues(rotation_vec)[0]
32 | pose_mat = cv2.hconcat((rotation_mat, translation_vec))
33 | euler_angle = cv2.decomposeProjectionMatrix(pose_mat)[-1]
34 |
35 | return euler_angle
36 |
37 | @staticmethod
38 | def draw_axis(img, euler_angle, center, size=80, thickness=3,
39 | angle_const=np.pi/180, copy=False):
40 | if copy:
41 | img = img.copy()
42 |
43 | euler_angle *= angle_const
44 | sin_pitch, sin_yaw, sin_roll = np.sin(euler_angle)
45 | cos_pitch, cos_yaw, cos_roll = np.cos(euler_angle)
46 |
47 | axis = np.array([
48 | [cos_yaw * cos_roll,
49 | cos_pitch * sin_roll + cos_roll * sin_pitch * sin_yaw],
50 | [-cos_yaw * sin_roll,
51 | cos_pitch * cos_roll - sin_pitch * sin_yaw * sin_roll],
52 | [sin_yaw,
53 | -cos_yaw * sin_pitch]
54 | ])
55 |
56 | axis *= size
57 | axis += center
58 |
59 | axis = axis.astype(np.int)
60 | tp_center = tuple(center.astype(np.int))
61 |
62 | cv2.line(img, tp_center, tuple(axis[0]), (0, 0, 255), thickness)
63 | cv2.line(img, tp_center, tuple(axis[1]), (0, 255, 0), thickness)
64 | cv2.line(img, tp_center, tuple(axis[2]), (255, 0, 0), thickness)
65 |
66 | return img
67 |
68 |
69 | def main(filename):
70 |
71 | from TFLiteFaceDetector import UltraLightFaceDetecion
72 | from TFLiteFaceAlignment import CoordinateAlignmentModel
73 |
74 | cap = cv2.VideoCapture(filename)
75 |
76 | fd = UltraLightFaceDetecion("weights/RFB-320.tflite",
77 | conf_threshold=0.95)
78 | fa = CoordinateAlignmentModel("weights/coor_2d106.tflite")
79 | hp = HeadPoseEstimator("weights/head_pose_object_points.npy",
80 | cap.get(3), cap.get(4))
81 |
82 | color = (125, 255, 125)
83 |
84 | while True:
85 | ret, frame = cap.read()
86 |
87 | if not ret:
88 | break
89 |
90 | bboxes, _ = fd.inference(frame)
91 |
92 | for pred in fa.get_landmarks(frame, bboxes):
93 | for p in np.round(pred).astype(np.int):
94 | cv2.circle(frame, tuple(p), 1, color, 1, cv2.LINE_AA)
95 | face_center = np.mean(pred, axis=0)
96 | euler_angle = hp.get_head_pose(pred).flatten()
97 | print(*euler_angle)
98 | hp.draw_axis(frame, euler_angle, face_center)
99 |
100 | cv2.imshow("result", frame)
101 | if cv2.waitKey(0) == ord('q'):
102 | break
103 |
104 |
105 | if __name__ == '__main__':
106 | main(sys.argv[1])
107 |
--------------------------------------------------------------------------------
/assignment-23/Main.py:
--------------------------------------------------------------------------------
1 | import cv2
2 |
3 | blurred = False
4 | rotated = False
5 | pixelate = False
6 | EmojiOnFace = False
7 | SunglassesMustache = False
8 |
9 | face_detector = cv2.CascadeClassifier("src/haarcascade_frontalface_default.xml")
10 | eyes_detector = cv2.CascadeClassifier('src/frontalEyes35x16.xml')
11 | mouth_detector = cv2.CascadeClassifier('src/mouth.xml')
12 |
13 | sticker1 = cv2.imread("img/emoji1.png")
14 | lip = cv2.imread('img/lips.png')
15 | sunglasses = cv2.imread('img/sunglasses.png')
16 |
17 | video_cap = cv2.VideoCapture(0)
18 |
19 | while True:
20 |
21 | ret,frame = video_cap.read()
22 | if ret == False:
23 | break
24 |
25 | faces = face_detector.detectMultiScale(frame, scaleFactor=1.3, minNeighbors=5)
26 |
27 | for (x, y, w, h) in faces:
28 |
29 | face_pos = frame[y:y+h, x:x+w]
30 | eyes = eyes_detector.detectMultiScale(face_pos, scaleFactor=1.2, minNeighbors=5)
31 | mouth = mouth_detector.detectMultiScale(face_pos, scaleFactor=1.3, minNeighbors=50)
32 |
33 | if EmojiOnFace:
34 |
35 | sticker = cv2.resize(sticker1, (w,h))
36 | img2gray = cv2.cvtColor(sticker,cv2.COLOR_BGR2GRAY)
37 |
38 | _,mask= cv2.threshold(img2gray,10,255,cv2.THRESH_BINARY)
39 | mask_inv = cv2.bitwise_not(mask)
40 |
41 | background1 = cv2.bitwise_and(face_pos,face_pos,mask = mask_inv)
42 | mask_sticker = cv2.bitwise_and(sticker, sticker, mask=mask)
43 |
44 | finalsticker =cv2.add(mask_sticker ,background1)
45 | frame[y:y+h, x:x+h] = finalsticker
46 |
47 | if SunglassesMustache:
48 | for (ex, ey, ew, eh) in eyes:
49 |
50 | sunglasses_resize = cv2.resize(sunglasses, (ew,eh))
51 | sunglasses2gray = cv2.cvtColor(sunglasses_resize,cv2.COLOR_BGR2GRAY)
52 |
53 | _,mask= cv2.threshold(sunglasses2gray,10,255,cv2.THRESH_BINARY)
54 | mask_inv = cv2.bitwise_not(mask)
55 |
56 | eyes_pos = cv2.bitwise_and(face_pos[ey:ey+eh, ex:ex+ew],face_pos[ey:ey+eh, ex:ex+ew],mask = mask)
57 | mask_glasses = cv2.bitwise_and(sunglasses_resize, sunglasses_resize, mask=mask_inv)
58 |
59 | finalsticker =cv2.add(mask_glasses ,eyes_pos)
60 | face_pos[ey:ey+eh, ex:ex+ew] = finalsticker
61 |
62 | for (mx, my, mw, mh) in mouth:
63 |
64 | mouth_resize = cv2.resize(lip, (mw,mh))
65 | mouth2gray = cv2.cvtColor(mouth_resize,cv2.COLOR_BGR2GRAY)
66 |
67 | _,mask = cv2.threshold(mouth2gray,10,255,cv2.THRESH_BINARY)
68 | mask_inv = cv2.bitwise_not(mask)
69 |
70 | nose_pos = cv2.bitwise_and(face_pos[my:my+mh, mx:mx+mw],face_pos[my:my+mh, mx:mx+mw],mask = mask_inv)
71 | mask_mouth = cv2.bitwise_and(mouth_resize, mouth_resize, mask=mask)
72 |
73 | finalmouth =cv2.add(mask_mouth ,nose_pos)
74 | face_pos[my:my+mh, mx:mx+mw] = finalmouth
75 |
76 | if blurred:
77 | frame[y:y+h, x:x+h] = cv2.blur(frame[y:y+h, x:x+h],(30,30))
78 |
79 | if pixelate:
80 | for (x, y, w, h) in faces:
81 | square = cv2.resize(frame[y:y+h,x:x+w], (10,10))
82 | output = cv2.resize(square, (w, h), interpolation=cv2.INTER_NEAREST)
83 | frame[y:y+h, x:x+h] = output
84 |
85 | if rotated:
86 | frame[y:y+h, x:x+h] = cv2.rotate(frame[y:y+h, x:x+h], cv2.ROTATE_180)
87 |
88 |
89 | key = cv2.waitKey(1)
90 |
91 | if key == ord('1'):
92 | EmojiOnFace = not EmojiOnFace
93 |
94 | if key == ord('2'):
95 | SunglassesMustache = not SunglassesMustache
96 |
97 | if key == ord('3'):
98 | blurred = not blurred
99 |
100 | if key == ord('4'):
101 | pixelate = not pixelate
102 |
103 | if key == ord('5'):
104 | rotated = not rotated
105 |
106 | cv2.imshow('Camera',(frame))
107 | cv2.waitKey(1)
108 |
--------------------------------------------------------------------------------
/assignment-24/vtuber_link_start.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 |
3 | import numpy as np
4 | import service
5 | import cv2
6 | import sys
7 | import socketio
8 |
9 | from threading import Thread
10 | from queue import Queue
11 |
12 |
13 | cap = cv2.VideoCapture(sys.argv[1])
14 |
15 | fd = service.UltraLightFaceDetecion("weights/RFB-320.tflite",
16 | conf_threshold=0.98)
17 | fa = service.CoordinateAlignmentModel("weights/coor_2d106.tflite")
18 | hp = service.HeadPoseEstimator("weights/head_pose_object_points.npy",
19 | cap.get(3), cap.get(4))
20 | gs = service.IrisLocalizationModel("weights/iris_localization.tflite")
21 |
22 | QUEUE_BUFFER_SIZE = 18
23 |
24 | box_queue = Queue(maxsize=QUEUE_BUFFER_SIZE)
25 | landmark_queue = Queue(maxsize=QUEUE_BUFFER_SIZE)
26 | iris_queue = Queue(maxsize=QUEUE_BUFFER_SIZE)
27 | upstream_queue = Queue(maxsize=QUEUE_BUFFER_SIZE)
28 |
29 | # ======================================================
30 |
31 | def face_detection():
32 | while True:
33 | ret, frame = cap.read()
34 |
35 | if not ret:
36 | break
37 |
38 | face_boxes, _ = fd.inference(frame)
39 | box_queue.put((frame, face_boxes))
40 |
41 |
42 | def face_alignment():
43 | while True:
44 | frame, boxes = box_queue.get()
45 | landmarks = fa.get_landmarks(frame, boxes)
46 | landmark_queue.put((frame, landmarks))
47 |
48 |
49 | def iris_localization(YAW_THD=45):
50 | sio = socketio.Client()
51 |
52 | sio.connect("http://127.0.0.1:6789", namespaces='/kizuna')
53 |
54 | while True:
55 | frame, preds = landmark_queue.get()
56 |
57 | for landmarks in preds:
58 | # calculate head pose
59 | euler_angle = hp.get_head_pose(landmarks).flatten()
60 | pitch, yaw, roll = euler_angle
61 |
62 | eye_starts = landmarks[[35, 89]]
63 | eye_ends = landmarks[[39, 93]]
64 | eye_centers = landmarks[[34, 88]]
65 | eye_lengths = (eye_ends - eye_starts)[:, 0]
66 |
67 | pupils = eye_centers.copy()
68 |
69 | if yaw > -YAW_THD:
70 | iris_left = gs.get_mesh(frame, eye_lengths[0], eye_centers[0])
71 | pupils[0] = iris_left[0]
72 |
73 | if yaw < YAW_THD:
74 | iris_right = gs.get_mesh(frame, eye_lengths[1], eye_centers[1])
75 | pupils[1] = iris_right[0]
76 |
77 | poi = eye_starts, eye_ends, pupils, eye_centers
78 |
79 | theta, pha, _ = gs.calculate_3d_gaze(poi)
80 | mouth_open_percent = (
81 | landmarks[60, 1] - landmarks[62, 1]) / (landmarks[53, 1] - landmarks[71, 1])
82 | left_eye_status = (
83 | landmarks[33, 1] - landmarks[40, 1]) / eye_lengths[0]
84 | right_eye_status = (
85 | landmarks[87, 1] - landmarks[94, 1]) / eye_lengths[1]
86 | result_string = {'euler': (pitch, -yaw, -roll),
87 | 'eye': (theta.mean(), pha.mean()),
88 | 'mouth': mouth_open_percent,
89 | 'blink': (left_eye_status, right_eye_status)}
90 | sio.emit('result_data', result_string, namespace='/kizuna')
91 | upstream_queue.put((frame, landmarks, euler_angle))
92 | break
93 |
94 |
95 | def draw(color=(125, 255, 0), thickness=2):
96 | while True:
97 | frame, landmarks, euler_angle = upstream_queue.get()
98 |
99 | for p in np.round(landmarks).astype(np.int):
100 | cv2.circle(frame, tuple(p), 1, color, thickness, cv2.LINE_AA)
101 |
102 | face_center = np.mean(landmarks, axis=0)
103 | hp.draw_axis(frame, euler_angle, face_center)
104 |
105 | frame = cv2.resize(frame, (960, 720))
106 |
107 | cv2.imshow('result', frame)
108 | cv2.waitKey(1)
109 |
110 |
111 | draw_thread = Thread(target=draw)
112 | draw_thread.start()
113 |
114 | iris_thread = Thread(target=iris_localization)
115 | iris_thread.start()
116 |
117 | alignment_thread = Thread(target=face_alignment)
118 | alignment_thread.start()
119 |
120 | face_detection()
121 | cap.release()
122 | cv2.destroyAllWindows()
123 |
--------------------------------------------------------------------------------
/assignment-24/TFLiteFaceDetector.py:
--------------------------------------------------------------------------------
1 | from functools import partial
2 | import cv2
3 | import tensorflow as tf
4 | import numpy as np
5 |
6 |
7 | class UltraLightFaceDetecion():
8 | def __init__(self, filepath, input_size=(320, 240), conf_threshold=0.6,
9 | center_variance=0.1, size_variance=0.2,
10 | nms_max_output_size=200, nms_iou_threshold=0.3) -> None:
11 |
12 | self._feature_maps = np.array([[40, 30], [20, 15], [10, 8], [5, 4]])
13 | self._min_boxes = [[10, 16, 24], [32, 48], [64, 96], [128, 192, 256]]
14 |
15 | self._resize = partial(cv2.resize, dsize=input_size)
16 | self._input_size = np.array(input_size)[:, None]
17 |
18 | self._anchors_xy, self._anchors_wh = self._generate_anchors()
19 | self._conf_threshold = conf_threshold
20 | self._center_variance = center_variance
21 | self._size_variance = size_variance
22 | self._nms = partial(tf.image.non_max_suppression,
23 | max_output_size=nms_max_output_size,
24 | iou_threshold=nms_iou_threshold)
25 |
26 | # tflite model init
27 | self._interpreter = tf.lite.Interpreter(model_path=filepath)
28 | self._interpreter.allocate_tensors()
29 |
30 | # model details
31 | input_details = self._interpreter.get_input_details()
32 | output_details = self._interpreter.get_output_details()
33 |
34 | # inference helper
35 | self._set_input_tensor = partial(self._interpreter.set_tensor,
36 | input_details[0]["index"])
37 | self._get_boxes_tensor = partial(self._interpreter.get_tensor,
38 | output_details[0]["index"])
39 | self._get_scores_tensor = partial(self._interpreter.get_tensor,
40 | output_details[1]["index"])
41 |
42 | def _generate_anchors(self):
43 | anchors = []
44 | for feature_map_w_h, min_box in zip(self._feature_maps, self._min_boxes):
45 |
46 | wh_grid = min_box / self._input_size
47 | wh_grid = np.tile(wh_grid.T, (np.prod(feature_map_w_h), 1))
48 |
49 | xy_grid = np.meshgrid(range(feature_map_w_h[0]),
50 | range(feature_map_w_h[1]))
51 | xy_grid = np.add(xy_grid, 0.5)
52 |
53 | xy_grid /= feature_map_w_h[..., None, None]
54 |
55 | xy_grid = np.stack(xy_grid, axis=-1)
56 | xy_grid = np.tile(xy_grid, [1, 1, len(min_box)])
57 | xy_grid = xy_grid.reshape(-1, 2)
58 |
59 | prior = np.concatenate((xy_grid, wh_grid), axis=-1)
60 | anchors.append(prior)
61 |
62 | anchors = np.concatenate(anchors, axis=0)
63 | anchors = np.clip(anchors, 0.0, 1.0)
64 |
65 | return anchors[:, :2], anchors[:, 2:]
66 |
67 | def _pre_processing(self, img):
68 | resized = self._resize(img)
69 | image_rgb = resized[..., ::-1]
70 | image_norm = image_rgb.astype(np.float32)
71 | cv2.normalize(image_norm, image_norm,
72 | alpha=-1, beta=1, norm_type=cv2.NORM_MINMAX)
73 | return image_norm[None, ...]
74 |
75 | def inference(self, img):
76 | # BGR image to tensor
77 | input_tensor = self._pre_processing(img)
78 |
79 | # set tensor and invoke
80 | self._set_input_tensor(input_tensor)
81 | self._interpreter.invoke()
82 |
83 | # get results
84 | boxes = self._get_boxes_tensor()[0]
85 | scores = self._get_scores_tensor()[0]
86 |
87 | # decode boxes to corner format
88 | boxes, scores = self._post_processing(boxes, scores)
89 | boxes *= np.tile(img.shape[1::-1], 2)
90 |
91 | return boxes, scores
92 |
93 | def _post_processing(self, boxes, scores):
94 | # bounding box regression
95 | boxes = self._decode_regression(boxes)
96 | scores = scores[:, 1]
97 |
98 | # confidence threshold filter
99 | conf_mask = self._conf_threshold < scores
100 | boxes, scores = boxes[conf_mask], scores[conf_mask]
101 |
102 | # non-maximum suppression
103 | nms_mask = self._nms(boxes=boxes, scores=scores)
104 | boxes = np.take(boxes, nms_mask, axis=0)
105 |
106 | return boxes, scores
107 |
108 | def _decode_regression(self, reg):
109 | # bounding box regression
110 | center_xy = reg[:, :2] * self._center_variance * \
111 | self._anchors_wh + self._anchors_xy
112 | center_wh = np.exp(
113 | reg[:, 2:] * self._size_variance) * self._anchors_wh / 2
114 |
115 | # center to corner
116 | start_xy = center_xy - center_wh
117 | end_xy = center_xy + center_wh
118 |
119 | boxes = np.concatenate((start_xy, end_xy), axis=-1)
120 | boxes = np.clip(boxes, 0.0, 1.0)
121 |
122 | return boxes
123 |
124 |
125 | if __name__ == '__main__':
126 | import sys
127 | import time
128 |
129 | fd = UltraLightFaceDetecion("weights/RFB-320.tflite",
130 | conf_threshold=0.88)
131 |
132 | cap = cv2.VideoCapture(sys.argv[1])
133 | color = (125, 255, 125)
134 |
135 | while True:
136 | ret, frame = cap.read()
137 |
138 | if not ret:
139 | break
140 |
141 | start_time = time.perf_counter()
142 | boxes, scores = fd.inference(frame)
143 | print(time.perf_counter() - start_time)
144 |
145 | for det in boxes.astype(np.int32):
146 | cv2.rectangle(frame, (det[0], det[1]),
147 | (det[2], det[3]), (2, 255, 0), 1)
148 |
149 | cv2.imshow("result", frame)
150 | if cv2.waitKey(1) == ord('q'):
151 | break
152 |
--------------------------------------------------------------------------------
/Mini project 3/log in.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | MainWindow
4 |
5 |
6 |
7 | 0
8 | 0
9 | 476
10 | 349
11 |
12 |
13 |
14 | MainWindow
15 |
16 |
17 |
18 | background-color: rgb(255, 145, 173);
19 |
20 |
21 |
22 | -
23 |
24 |
-
25 |
26 |
-
27 |
28 |
29 | Qt::Horizontal
30 |
31 |
32 |
33 | 40
34 | 20
35 |
36 |
37 |
38 |
39 | -
40 |
41 |
42 |
43 | 12
44 |
45 |
46 |
47 | Enter
48 |
49 |
50 |
51 |
52 |
53 | -
54 |
55 |
56 | Password:
57 |
58 |
59 |
60 | -
61 |
62 |
63 | Qt::Horizontal
64 |
65 |
66 | QSizePolicy::Minimum
67 |
68 |
69 |
70 | 60
71 | 20
72 |
73 |
74 |
75 |
76 | -
77 |
78 |
79 |
80 | 17
81 | true
82 |
83 |
84 |
85 | Mahdis Company
86 |
87 |
88 |
89 | -
90 |
91 |
92 | -
93 |
94 |
95 | Username:
96 |
97 |
98 |
99 | -
100 |
101 |
102 | -
103 |
104 |
105 |
106 | 13
107 | true
108 |
109 |
110 |
111 | Login :
112 |
113 |
114 |
115 | -
116 |
117 |
118 |
119 | 120
120 | 100
121 |
122 |
123 |
124 |
125 |
126 |
127 | :/newPrefix/kindpng_170301.png
128 |
129 |
130 | true
131 |
132 |
133 |
134 | -
135 |
136 |
137 | Qt::Vertical
138 |
139 |
140 | QSizePolicy::Maximum
141 |
142 |
143 |
144 | 20
145 | 40
146 |
147 |
148 |
149 |
150 | -
151 |
152 |
153 | Qt::Vertical
154 |
155 |
156 | QSizePolicy::Minimum
157 |
158 |
159 |
160 | 20
161 | 25
162 |
163 |
164 |
165 |
166 | -
167 |
168 |
169 | Qt::Horizontal
170 |
171 |
172 | QSizePolicy::Minimum
173 |
174 |
175 |
176 | 40
177 | 20
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
--------------------------------------------------------------------------------
/Mini project 3/new user.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | MainWindow
4 |
5 |
6 |
7 | 0
8 | 0
9 | 478
10 | 344
11 |
12 |
13 |
14 | MainWindow
15 |
16 |
17 |
18 | background-color: rgb(255, 145, 173);
19 |
20 |
21 |
22 | -
23 |
24 |
25 | 25
26 |
27 |
-
28 |
29 |
30 | Last name :
31 |
32 |
33 | Qt::AlignCenter
34 |
35 |
36 |
37 | -
38 |
39 |
40 | Birthdate :
41 |
42 |
43 | Qt::AlignCenter
44 |
45 |
46 |
47 | -
48 |
49 |
50 | -
51 |
52 |
53 | Add this account
54 |
55 |
56 |
57 | -
58 |
59 |
60 | -
61 |
62 |
63 | Qt::Vertical
64 |
65 |
66 | QSizePolicy::Maximum
67 |
68 |
69 |
70 | 20
71 | 25
72 |
73 |
74 |
75 |
76 | -
77 |
78 |
79 | Qt::Vertical
80 |
81 |
82 | QSizePolicy::Maximum
83 |
84 |
85 |
86 | 20
87 | 25
88 |
89 |
90 |
91 |
92 | -
93 |
94 |
95 | Qt::Vertical
96 |
97 |
98 | QSizePolicy::Maximum
99 |
100 |
101 |
102 | 20
103 | 25
104 |
105 |
106 |
107 |
108 | -
109 |
110 |
111 | Name :
112 |
113 |
114 | Qt::AlignCenter
115 |
116 |
117 |
118 | -
119 |
120 |
121 |
122 | 17
123 | true
124 |
125 |
126 |
127 | New Account:
128 |
129 |
130 | Qt::AlignLeading|Qt::AlignLeft|Qt::AlignVCenter
131 |
132 |
133 |
134 | -
135 |
136 |
137 | -
138 |
139 |
140 | -
141 |
142 |
143 | National code :
144 |
145 |
146 | Qt::AlignCenter
147 |
148 |
149 |
150 | -
151 |
152 |
153 | Qt::Vertical
154 |
155 |
156 | QSizePolicy::Maximum
157 |
158 |
159 |
160 | 20
161 | 25
162 |
163 |
164 |
165 |
166 | -
167 |
168 |
169 |
170 |
171 |
172 |
173 | C:/Users/User/Downloads/—Pngtree—photo camera_4732850.pngC:/Users/User/Downloads/—Pngtree—photo camera_4732850.png
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
--------------------------------------------------------------------------------
/Mini project 3/main.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | MainWindow
4 |
5 |
6 |
7 | 0
8 | 0
9 | 478
10 | 354
11 |
12 |
13 |
14 | MainWindow
15 |
16 |
17 |
18 | background-color: rgb(255, 145, 173);
19 |
20 |
21 |
22 | -
23 |
24 |
25 | QLayout::SetNoConstraint
26 |
27 |
-
28 |
29 |
30 | Qt::Horizontal
31 |
32 |
33 |
34 | -
35 |
36 |
37 | Qt::Vertical
38 |
39 |
40 |
41 | -
42 |
43 |
44 | 2
45 |
46 |
47 |
48 | -
49 |
50 |
51 | Qt::Vertical
52 |
53 |
54 |
55 | -
56 |
57 |
58 |
59 | 17
60 | true
61 |
62 |
63 |
64 | Users List:
65 |
66 |
67 | Qt::AlignLeading|Qt::AlignLeft|Qt::AlignVCenter
68 |
69 |
70 |
71 | -
72 |
73 |
74 | Qt::Vertical
75 |
76 |
77 | QSizePolicy::Maximum
78 |
79 |
80 |
81 | 20
82 | 30
83 |
84 |
85 |
86 |
87 | -
88 |
89 |
90 | Add new account
91 |
92 |
93 |
94 | -
95 |
96 |
97 | Qt::Vertical
98 |
99 |
100 |
101 | -
102 |
103 |
104 | Qt::Horizontal
105 |
106 |
107 |
108 | -
109 |
110 |
111 | Qt::Horizontal
112 |
113 |
114 |
115 | -
116 |
117 |
118 | -
119 |
120 |
121 | Name :
122 |
123 |
124 | Qt::AlignLeading|Qt::AlignLeft|Qt::AlignVCenter
125 |
126 |
127 |
128 | -
129 |
130 |
131 | Qt::Horizontal
132 |
133 |
134 |
135 | -
136 |
137 |
138 | User ID :
139 |
140 |
141 | Qt::AlignLeading|Qt::AlignLeft|Qt::AlignVCenter
142 |
143 |
144 |
145 | -
146 |
147 |
148 | -
149 |
150 |
151 | 2
152 |
153 |
154 |
155 | -
156 |
157 |
158 | Qt::Vertical
159 |
160 |
161 |
162 | -
163 |
164 |
165 | -
166 |
167 |
168 | Lastname :
169 |
170 |
171 | Qt::AlignLeading|Qt::AlignLeft|Qt::AlignVCenter
172 |
173 |
174 |
175 | -
176 |
177 |
178 | Userimg :
179 |
180 |
181 |
182 | -
183 |
184 |
185 | Qt::Horizontal
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
--------------------------------------------------------------------------------
/assignment-24/TFLiteIrisLocalization.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import tensorflow as tf
3 | import numpy as np
4 | from functools import partial
5 |
6 |
7 | class IrisLocalizationModel():
8 |
9 | def __init__(self, filepath):
10 | # Load the TFLite model and allocate tensors.
11 | self._interpreter = tf.lite.Interpreter(model_path=filepath)
12 | self._interpreter.allocate_tensors()
13 |
14 | # model details
15 | input_details = self._interpreter.get_input_details()
16 | output_details = self._interpreter.get_output_details()
17 |
18 | # inference helper
19 | self._set_input_tensor = partial(self._interpreter.set_tensor,
20 | input_details[0]["index"])
21 | self._get_output_tensor = partial(self._interpreter.get_tensor,
22 | output_details[0]["index"])
23 |
24 | self.trans_distance = 32
25 | self.input_shape = (64, 64)
26 |
27 | def _preprocess(self, img, length, center, name=None):
28 | """Preprocess the image to meet the model's input requirement.
29 | Args:
30 | img: An image in default BGR format.
31 |
32 | Returns:
33 | image_norm: The normalized image ready to be feeded.
34 | """
35 |
36 | scale = 23 / length
37 | cx, cy = self.trans_distance - scale * center
38 |
39 | M = np.array([[scale, 0, cx], [0, scale, cy]])
40 |
41 | resized = cv2.warpAffine(img, M, self.input_shape, borderValue=0.0)
42 |
43 | if name is not None:
44 | cv2.imshow(name, resized)
45 |
46 | image_rgb = cv2.cvtColor(resized, cv2.COLOR_BGR2RGB)
47 | image_norm = image_rgb.astype(np.float32)
48 | cv2.normalize(image_norm, image_norm, alpha=-1,
49 | beta=1, norm_type=cv2.NORM_MINMAX)
50 |
51 | return image_norm, M
52 |
53 | def get_mesh(self, image, length, center, name=None):
54 | """Detect the face mesh from the image given.
55 | Args:
56 | image: An image in default BGR format.
57 |
58 | Returns:
59 | mesh: An eyebrow mesh, normalized.
60 | iris: Iris landmarks.
61 | """
62 |
63 | # Preprocess the image before sending to the network.
64 | image, M = self._preprocess(image, length, center, name)
65 |
66 | image = tf.image.convert_image_dtype(image, tf.float32)
67 | image = image[tf.newaxis, :]
68 |
69 | # The actual detection.
70 | self._set_input_tensor(image)
71 | self._interpreter.invoke()
72 |
73 | # Save the results.
74 | iris = self._get_output_tensor()[0]
75 |
76 | iris = iris.reshape(-1, 3)
77 | iris[:, 2] = 1
78 |
79 | iM = cv2.invertAffineTransform(M)
80 |
81 | return iris @ iM.T
82 |
83 | @staticmethod
84 | def draw_pupil(iris, frame, color=(0, 0, 255), thickness=2):
85 | pupil = iris[0]
86 | radius = np.linalg.norm(iris[1:] - iris[0], axis=1)
87 |
88 | pupil = pupil.astype(int)
89 | radius = int(max(radius))
90 |
91 | cv2.circle(frame, tuple(pupil), radius, color, thickness, cv2.LINE_AA)
92 |
93 | return pupil, radius
94 |
95 | @staticmethod
96 | def draw_eye_markers(landmarks, frame, close=True, color=(0, 255, 255), thickness=2):
97 | landmarks = landmarks.astype(np.int32)
98 | cv2.polylines(frame, landmarks, close, color, thickness, cv2.LINE_AA)
99 |
100 | @staticmethod
101 | def calculate_3d_gaze(poi, scale=256):
102 | SIN_LEFT_THETA = 2 * np.sin(np.pi / 4)
103 | SIN_UP_THETA = np.sin(np.pi / 6)
104 |
105 | starts, ends, pupils, centers = poi
106 |
107 | eye_length = np.linalg.norm(starts - ends, axis=1)
108 | ic_distance = np.linalg.norm(pupils - centers, axis=1)
109 | zc_distance = np.linalg.norm(pupils - starts, axis=1)
110 |
111 | s0 = (starts[:, 1] - ends[:, 1]) * pupils[:, 0]
112 | s1 = (starts[:, 0] - ends[:, 0]) * pupils[:, 1]
113 | s2 = starts[:, 0] * ends[:, 1]
114 | s3 = starts[:, 1] * ends[:, 0]
115 |
116 | delta_y = (s0 - s1 + s2 - s3) / eye_length / 2
117 | delta_x = np.sqrt(abs(ic_distance**2 - delta_y**2))
118 |
119 | delta = np.array((delta_x * SIN_LEFT_THETA,
120 | delta_y * SIN_UP_THETA))
121 | delta /= eye_length
122 | theta, pha = np.arcsin(delta)
123 |
124 | inv_judge = zc_distance**2 - delta_y**2 < eye_length**2 / 4
125 |
126 | delta[0, inv_judge] *= -1
127 | theta[inv_judge] *= -1
128 | delta *= scale
129 |
130 | return theta, pha, delta.T
131 |
132 |
133 | if __name__ == "__main__":
134 | import sys
135 | from SolvePnPHeadPoseEstimation import HeadPoseEstimator
136 | from TFLiteFaceAlignment import CoordinateAlignmentModel
137 | from TFLiteFaceDetector import UltraLightFaceDetecion
138 |
139 | gpu_ctx = -1
140 | video = sys.argv[1]
141 | YAW_THD = 45
142 |
143 | cap = cv2.VideoCapture(video)
144 |
145 | fd = UltraLightFaceDetecion("weights/RFB-320.tflite",
146 | conf_threshold=0.9)
147 | fa = CoordinateAlignmentModel("weights/coor_2d106.tflite")
148 | hp = HeadPoseEstimator("weights/head_pose_object_points.npy",
149 | cap.get(3), cap.get(4))
150 | gs = IrisLocalizationModel("weights/iris_localization.tflite")
151 |
152 | counter = 0
153 |
154 | while True:
155 | ret, frame = cap.read()
156 |
157 | if not ret:
158 | break
159 |
160 | # frame = frame[:480, 380:920, :] # dress
161 | # frame = cv2.resize(frame, (960, 1080))
162 |
163 | bboxes, _ = fd.inference(frame)
164 |
165 | for landmarks in fa.get_landmarks(frame, bboxes):
166 | # calculate head pose
167 | euler_angle = hp.get_head_pose(landmarks)
168 | pitch, yaw, roll = euler_angle[:, 0]
169 |
170 | eye_markers = np.take(landmarks, fa.eye_bound, axis=0)
171 |
172 | # eye_centers = np.average(eye_markers, axis=1)
173 | eye_centers = landmarks[[34, 88]]
174 |
175 | # eye_lengths = np.linalg.norm(landmarks[[39, 93]] - landmarks[[35, 89]], axis=1)
176 | eye_start = landmarks[[35, 89]]
177 | eye_end = landmarks[[39, 93]]
178 | eye_lengths = (eye_end - eye_start)[:, 0]
179 |
180 | pupils = eye_centers.copy()
181 |
182 | if yaw > -YAW_THD:
183 | iris_left = gs.get_mesh(frame, eye_lengths[0], eye_centers[0])
184 | pupils[0], _ = gs.draw_pupil(iris_left, frame, thickness=1)
185 |
186 | if yaw < YAW_THD:
187 | iris_right = gs.get_mesh(frame, eye_lengths[1], eye_centers[1])
188 | pupils[1], _ = gs.draw_pupil(iris_right, frame, thickness=1)
189 |
190 | poi = landmarks[[35, 89]], landmarks[[39, 93]], pupils, eye_centers
191 |
192 | theta, pha, _ = gs.calculate_3d_gaze(poi)
193 |
194 | # print(theta.mean(), pha.mean())
195 |
196 | gs.draw_eye_markers(eye_markers, frame, thickness=1)
197 |
198 | cv2.imshow('res', frame)
199 | # cv2.imwrite(f'./asset/orign_dress/img{counter:0>3}.png', frame)
200 |
201 | counter += 1
202 | if cv2.waitKey(0) == ord('q'):
203 | break
204 |
205 | cap.release()
206 | cv2.destroyAllWindows()
207 |
--------------------------------------------------------------------------------
/Mini project 3/main.py:
--------------------------------------------------------------------------------
1 | from os import name
2 | import sys
3 | from functools import partial
4 | from PySide6.QtWidgets import *
5 | from PySide6.QtUiTools import *
6 | from PySide6.QtCore import *
7 | from PySide6.QtGui import *
8 | import cv2
9 | import mydatabase
10 |
11 | class MainWindow(QMainWindow):
12 | def __init__(self):
13 | super().__init__()
14 | loader = QUiLoader()
15 | self.ui = loader.load("log in.ui", None)
16 | self.ui.show()
17 | self.ui.setWindowTitle("Log In")
18 | self.ui.label.setStyleSheet("image : url(kindpng_170301.png);")
19 | self.ui.enterbtn.clicked.connect(self.enterroom)
20 |
21 | def enterroom(self):
22 | if self.ui.lineEdit.text() == "admin" and self.ui.lineEdit_2.text() == "admin":
23 | self.ui = Userslist()
24 | else :
25 | self.msgBox = QMessageBox()
26 | self.msgBox.setText("Wrong username or password! \n Try again!")
27 | self.msgBox.exec()
28 | self.ui.lineEdit.setText("")
29 | self.ui.lineEdit_2.setText("")
30 |
31 | class Userslist(QWidget):
32 | def __init__(self):
33 | super().__init__()
34 | loader = QUiLoader()
35 | self.ui = loader.load("main.ui", None)
36 | self.ui.show()
37 | self.ReadFromDB()
38 | self.ui.setWindowTitle("Users list")
39 | self.ui.add_account.clicked.connect(self.next_page)
40 |
41 | def ReadFromDB(self):
42 | self.result = mydatabase.GetAll()
43 | self.editlist = []
44 |
45 | for i in range(len(self.result)):
46 |
47 | self.id_label = QLabel()
48 | self.id_label.setText(self.result[i][4])
49 | self.ui.IdLayout.addWidget(self.id_label, 5)
50 |
51 | self.new_label = QLabel()
52 | self.new_label.setText(self.result[i][0])
53 | self.ui.nameslayout.addWidget(self.new_label, 5)
54 |
55 | self.new_label = QLabel()
56 | self.new_label.setText(self.result[i][1])
57 | self.ui.Familylayout.addWidget(self.new_label, 5)
58 |
59 | self.edit_btn = QPushButton()
60 | self.edit_btn.setText('📝')
61 | self.ui.EditLayout.addWidget(self.edit_btn, 5)
62 | self.editlist.append(self.edit_btn)
63 |
64 | # self.img_label = QLabel()
65 | # self.ui.img_label.setStyleSheet("image : url(f'face_images/user{counter}.jpg');")
66 | # self.ui.ImageLayout.addWidget(self.img_label)
67 |
68 | for i in range(len(self.editlist)):
69 | self.editlist[i].clicked.connect(partial(self.edit_user_page,self.result[i][0],self.result[i][1],self.result[i][2],self.result[i][3],self.result[i][4]))
70 |
71 | def edit_user_page(self,name,lastname,nationalcode,birthdate,id):
72 | self.ui = EditUser(name,lastname,nationalcode,birthdate,id)
73 |
74 | def next_page(self):
75 | self.ui = NewUser()
76 |
77 | class NewUser(QWidget):
78 | def __init__(self):
79 | super().__init__()
80 | loader = QUiLoader()
81 | self.ui = loader.load("new user.ui", None)
82 | self.ui.show()
83 | self.ui.setWindowTitle("Add new account")
84 | self.ui.addAccount.clicked.connect(self.AddNewUser)
85 | self.ui.capButton.clicked.connect(self.OpenCam)
86 |
87 | def AddNewUser(self):
88 | # id = self.ui.id.text()
89 | name = self.ui.uname.text()
90 | family = self.ui.ufamily.text()
91 | nationalcode = self.ui.unational.text()
92 | birthdate = self.ui.ubirthdate.text()
93 | mydatabase.Add(name,family,nationalcode,birthdate)
94 |
95 |
96 | self.msgBox = QMessageBox()
97 | self.msgBox.setText("The User added successfully ;)")
98 | self.msgBox.exec()
99 | self.ui = Userslist()
100 |
101 | def OpenCam(self):
102 | self.ui = Webcam()
103 |
104 | class EditUser(QWidget):
105 | def __init__(self,name,lastname,nationalcode,birthdate,id):
106 | super().__init__()
107 | loader = QUiLoader()
108 | self.ui = loader.load("user edit.ui", None)
109 | self.ui.show()
110 | self.ui.setWindowTitle("Edit User Info")
111 | self.ui.DoneBtn.clicked.connect(partial(self.Edit_User,name,lastname,nationalcode,birthdate,id))
112 | self.ui.pushButton.clicked.connect(self.OpenCam)
113 |
114 | self.new_label = QLabel()
115 | self.new_label.setText(f"Name : {name}")
116 | self.family_label = QLabel()
117 | self.family_label.setText(f"Lastname : {lastname}")
118 | self.nation_label = QLabel()
119 | self.nation_label.setText(f"Nationalcode : {nationalcode}")
120 | self.birthdate_label = QLabel()
121 | self.birthdate_label.setText(f"Birthdate : {birthdate}")
122 | self.id_label = QLabel()
123 | self.id_label.setText(f"User ID : {id}")
124 |
125 | self.ui.verticalLayout_3.addWidget(self.id_label)
126 | self.ui.verticalLayout_3.addWidget(self.new_label)
127 | self.ui.verticalLayout_3.addWidget(self.family_label)
128 | self.ui.verticalLayout_3.addWidget(self.nation_label)
129 | self.ui.verticalLayout_3.addWidget(self.birthdate_label)
130 |
131 |
132 | def Edit_User(self,name,lastname,nationalcode,birthdate,id):
133 |
134 | if self.ui.uname.text() == "" :
135 | name = name
136 | else :
137 | name = self.ui.uname.text()
138 |
139 | if self.ui.ufamily.text() == "":
140 | family = lastname
141 | else:
142 | family = self.ui.ufamily.text()
143 |
144 | if self.ui.unational.text() == "":
145 | nationalcode1 = nationalcode
146 | else:
147 | nationalcode1 = self.ui.unational.text()
148 |
149 | if self.ui.ubirthdate.text() == "":
150 | birthdate = birthdate
151 | else:
152 | birthdate = self.ui.ubirthdate.text()
153 |
154 | mydatabase.Edit(name,family,nationalcode1,birthdate,id)
155 | # mydatabase.InsertImg()
156 |
157 | self.msgBox = QMessageBox()
158 | self.msgBox.setText("The User info modified successfully")
159 | self.msgBox.exec()
160 | self.ui = Userslist()
161 |
162 | def OpenCam(self):
163 | self.ui = Webcam()
164 |
165 | class Webcam(QWidget):
166 | def __init__(self):
167 | super().__init__()
168 | loader = QUiLoader()
169 | self.ui = loader.load("webcam.ui", None)
170 | self.ui.show()
171 | self.ui.setWindowTitle("Capture Image")
172 | self.timer = QTimer()
173 | self.timer.timeout.connect(self.viewCam)
174 | self.ui.captureButton.clicked.connect(self.controlTimer)
175 |
176 | def viewCam(self):
177 | ret, image = self.cap.read()
178 | self.image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
179 | height, width, channel = image.shape
180 | step = channel * width
181 | qImg = QImage(image.data, width, height, step, QImage.Format_RGB888)
182 | self.ui.labelCam.setPixmap(QPixmap.fromImage(qImg))
183 |
184 | def controlTimer(self):
185 | counter = 1
186 | if not self.timer.isActive():
187 | self.cap = cv2.VideoCapture(0)
188 | self.timer.start(1)
189 | self.ui.captureButton.setText("Capture")
190 |
191 | else:
192 | self.timer.stop()
193 | self.cap.release()
194 | self.ui.captureButton.setText("Done")
195 | cv2.imwrite(f"face_images/user{counter}.jpg", self.image)
196 | counter +=1
197 | self.msgBox = QMessageBox()
198 | self.msgBox.setText("Photo captured successfully")
199 | self.msgBox.exec()
200 | self.ui = Userslist()
201 |
202 | app = QApplication(sys.argv)
203 | window = MainWindow()
204 | app.exec()
--------------------------------------------------------------------------------
/assignment-24/WebcamTFLiteFaceAlignment.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 | import tensorflow as tf
4 | from functools import partial
5 | import time
6 | from TFLiteFaceDetector import UltraLightFaceDetecion
7 | import sys
8 |
9 | class CoordinateAlignmentModel():
10 | def __init__(self, filepath, marker_nums=106, input_size=(192, 192)):
11 | self._marker_nums = marker_nums
12 | self._input_shape = input_size
13 | self._trans_distance = self._input_shape[-1] / 2.0
14 |
15 | self.eye_bound = ([35, 41, 40, 42, 39, 37, 33, 36],
16 | [89, 95, 94, 96, 93, 91, 87, 90])
17 |
18 | # tflite model init
19 | self._interpreter = tf.lite.Interpreter(model_path=filepath)
20 | self._interpreter.allocate_tensors()
21 |
22 | # model details
23 | input_details = self._interpreter.get_input_details()
24 | output_details = self._interpreter.get_output_details()
25 |
26 | # inference helper
27 | self._set_input_tensor = partial(self._interpreter.set_tensor,
28 | input_details[0]["index"])
29 | self._get_output_tensor = partial(self._interpreter.get_tensor,
30 | output_details[0]["index"])
31 |
32 | self.pre_landmarks = None
33 |
34 | def _calibrate(self, pred, thd, skip=6):
35 | if self.pre_landmarks is not None:
36 | for i in range(pred.shape[0]):
37 | if abs(self.pre_landmarks[i, 0] - pred[i, 0]) > skip:
38 | self.pre_landmarks[i, 0] = pred[i, 0]
39 | elif abs(self.pre_landmarks[i, 0] - pred[i, 0]) > thd:
40 | self.pre_landmarks[i, 0] += pred[i, 0]
41 | self.pre_landmarks[i, 0] /= 2
42 |
43 | if abs(self.pre_landmarks[i, 1] - pred[i, 1]) > skip:
44 | self.pre_landmarks[i, 1] = pred[i, 1]
45 | elif abs(self.pre_landmarks[i, 1] - pred[i, 1]) > thd:
46 | self.pre_landmarks[i, 1] += pred[i, 1]
47 | self.pre_landmarks[i, 1] /= 2
48 | else:
49 | self.pre_landmarks = pred
50 |
51 | def _preprocessing(self, img, bbox, factor=3.0):
52 | """Pre-processing of the BGR image. Adopting warp affine for face corp.
53 |
54 | Arguments
55 | ----------
56 | img {numpy.array} : the raw BGR image.
57 | bbox {numpy.array} : bounding box with format: {x1, y1, x2, y2, score}.
58 |
59 | Keyword Arguments
60 | ----------
61 | factor : max edge scale factor for bounding box cropping.
62 |
63 | Returns
64 | ----------
65 | inp : input tensor with NHWC format.
66 | M : warp affine matrix.
67 | """
68 |
69 | maximum_edge = max(bbox[2:4] - bbox[:2]) * factor
70 | scale = self._trans_distance * 4.0 / maximum_edge
71 | center = (bbox[2:4] + bbox[:2]) / 2.0
72 | cx, cy = self._trans_distance - scale * center
73 |
74 | M = np.array([[scale, 0, cx], [0, scale, cy]])
75 |
76 | cropped = cv2.warpAffine(img, M, self._input_shape, borderValue=0.0)
77 | inp = cropped[..., ::-1].astype(np.float32)
78 |
79 | return inp[None, ...], M
80 |
81 | def _inference(self, input_tensor):
82 | self._set_input_tensor(input_tensor)
83 | self._interpreter.invoke()
84 |
85 | return self._get_output_tensor()[0]
86 |
87 | def _postprocessing(self, out, M):
88 | iM = cv2.invertAffineTransform(M)
89 | col = np.ones((self._marker_nums, 1))
90 |
91 | out = out.reshape((self._marker_nums, 2))
92 |
93 | out += 1
94 | out *= self._trans_distance
95 |
96 | out = np.concatenate((out, col), axis=1)
97 |
98 | return out @ iM.T # dot product
99 |
100 | def get_landmarks(self, image, detected_faces=None):
101 | """Predict the landmarks for each face present in the image.
102 |
103 | This function predicts a set of 68 2D or 3D images, one for each image present.
104 | If detect_faces is None the method will also run a face detector.
105 |
106 | Arguments
107 | ----------
108 | image {numpy.array} : The input image.
109 |
110 | Keyword Arguments
111 | ----------
112 | detected_faces {list of numpy.array} : list of bounding boxes, one for each
113 | face found in the image (default: {None}, format: {x1, y1, x2, y2, score})
114 | """
115 |
116 | for box in detected_faces:
117 | inp, M = self._preprocessing(image, box)
118 | out = self._inference(inp)
119 | pred = self._postprocessing(out, M)
120 |
121 | # self._calibrate(pred, 1, skip=6)
122 | # yield self.pre_landmarks
123 |
124 | yield pred
125 |
126 | def insta_filter(frame,landmarks):
127 |
128 | x, y, w, h = cv2.boundingRect(landmarks)
129 |
130 | rows= frame.shape[0]
131 | cols = frame.shape[1]
132 |
133 | mask = np.zeros((rows,cols,3),dtype='uint8')
134 |
135 | cv2.drawContours(mask,[landmarks],-1,(255,255,255),-1)
136 |
137 | frame_2x = cv2.resize(frame,(0,0), fx = 2, fy= 2)
138 | mask_2x = cv2.resize(mask,(0,0), fx = 2, fy= 2)
139 |
140 | frame_2x = frame_2x / 255
141 | mask_2x = mask_2x / 255
142 |
143 | frame_target = frame[int(y-(h*0.5)):int(y+h+(h*0.5)), int(x-(w*0.5)):int(x+w+(w*0.5))]
144 | frame_target = frame_target / 255
145 |
146 | forground = cv2.multiply(mask_2x, frame_2x)
147 | background = cv2.multiply(frame_target,1- mask_2x[y*2:(y+h)*2,x*2:(x+w)*2])
148 |
149 | res = cv2.add(background, forground[y*2:(y+h)*2,x*2:(x+w)*2])
150 | frame[int(y-(h*0.5)):int(y+h+(h*0.5)), int(x-(w*0.5)):int(x+w+(w*0.5))] = res*255
151 | return frame
152 |
153 | def insta_filter_lips(frame,landmarks):
154 |
155 | x, y, w, h = cv2.boundingRect(landmarks)
156 |
157 | rows= frame.shape[0]
158 | cols = frame.shape[1]
159 |
160 | mask = np.zeros((rows,cols,3),dtype='uint8')
161 |
162 | cv2.drawContours(mask,[landmarks],-1,(255,255,255),-1)
163 |
164 | frame_2x = cv2.resize(frame,(0,0), fx = 2, fy= 2)
165 | mask_2x = cv2.resize(mask,(0,0), fx = 2, fy= 2)
166 |
167 | frame_2x = frame_2x / 255
168 | mask_2x = mask_2x / 255
169 |
170 | frame_target = frame[int(y-(h*0.5)):int(y+h+(h*0.5)), int(x-(w*0.5)):int(x+w+(w*0.5))]
171 | frame_target = frame_target / 255
172 |
173 | forground = cv2.multiply(mask_2x, frame_2x)
174 | background = cv2.multiply(frame_target,1- mask_2x[y*2:(y+h)*2,x*2:(x+w)*2])
175 |
176 | if h >= 32:
177 | res = cv2.add(background, forground[y*2:(y+h)*2,x*2:(x+w)*2])
178 | frame[int(y-(h*0.5)):int(y+h+(h*0.5)), int(x-(w*0.5)):int(x+w+(w*0.5))] = res*255
179 |
180 | return frame
181 |
182 | if __name__ == '__main__':
183 |
184 | fd = UltraLightFaceDetecion("weights/RFB-320.tflite",conf_threshold=0.88)
185 | fa = CoordinateAlignmentModel("weights/coor_2d106.tflite")
186 |
187 | cam = cv2.VideoCapture(0)
188 |
189 | fourcc = cv2.VideoWriter_fourcc(*'MP4V')
190 | out = cv2.VideoWriter('output.mp4', fourcc,20.0, (640,480))
191 |
192 | while True:
193 | ret,frame = cam.read()
194 |
195 | if not ret:
196 | break
197 |
198 | rows= frame.shape[0]
199 | cols = frame.shape[1]
200 |
201 | color = (125, 255, 125)
202 |
203 | boxes, scores = fd.inference(frame)
204 |
205 | for pred in fa.get_landmarks(frame, boxes):
206 | pred_int =np.round(pred).astype(np.int)
207 |
208 | landmarks_left_eye = []
209 | for i in [35 ,36 ,33 ,37 ,39 ,42 ,40 ,41]:
210 | landmarks_left_eye.append(tuple(pred_int[i]))
211 |
212 | landmarks_right_eye = []
213 | for i in [89 ,90 ,87 ,91 ,93 ,96 ,94 ,95]:
214 | landmarks_right_eye.append(tuple(pred_int[i]))
215 |
216 | landmarks_lips = []
217 | for i in [52 ,55 ,56 ,53 ,56 ,58 ,69 ,68 ,67 ,71 ,63 ,64]:
218 | landmarks_lips.append(tuple(pred_int[i]))
219 |
220 | landmarks_left_eye = np.array(landmarks_left_eye)
221 | landmarks_right_eye = np.array(landmarks_right_eye)
222 | landmarks_lips = np.array(landmarks_lips)
223 |
224 | frame = insta_filter_lips(frame,landmarks_lips)
225 | frame = insta_filter(frame,landmarks_left_eye)
226 | frame = insta_filter(frame,landmarks_right_eye)
227 |
228 | out.write(frame)
229 | cv2.imshow("Camera", frame)
230 | if cv2.waitKey(1) == ord('q'):
231 | break
232 |
233 | cam.release()
234 | cv2.destroyAllWindows()
235 |
--------------------------------------------------------------------------------
/Mini project 3/user edit.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | MainWindow
4 |
5 |
6 |
7 | 0
8 | 0
9 | 469
10 | 362
11 |
12 |
13 |
14 | MainWindow
15 |
16 |
17 |
18 | background-color: rgb(255, 145, 173);
19 |
20 |
21 |
22 | -
23 |
24 |
25 | 3
26 |
27 |
28 | 12
29 |
30 |
-
31 |
32 |
33 |
34 | 17
35 | true
36 |
37 |
38 |
39 | Edit users info:
40 |
41 |
42 | Qt::AlignLeading|Qt::AlignLeft|Qt::AlignVCenter
43 |
44 |
45 |
46 | -
47 |
48 |
49 | New Birthdate :
50 |
51 |
52 | Qt::AlignCenter
53 |
54 |
55 |
56 | -
57 |
58 |
59 | -
60 |
61 |
62 | -
63 |
64 |
65 | Qt::Vertical
66 |
67 |
68 | QSizePolicy::Maximum
69 |
70 |
71 |
72 | 20
73 | 18
74 |
75 |
76 |
77 |
78 | -
79 |
80 |
81 | New Name :
82 |
83 |
84 | Qt::AlignCenter
85 |
86 |
87 |
88 | -
89 |
90 |
-
91 |
92 |
93 | -
94 |
95 |
96 | Qt::Vertical
97 |
98 |
99 | QSizePolicy::Maximum
100 |
101 |
102 |
103 | 20
104 | 23
105 |
106 |
107 |
108 |
109 |
110 |
111 | -
112 |
113 |
114 |
115 |
116 |
117 |
118 | C:/Users/User/Downloads/—Pngtree—photo camera_4732850.pngC:/Users/User/Downloads/—Pngtree—photo camera_4732850.png
119 |
120 |
121 |
122 | -
123 |
124 |
125 | Qt::Horizontal
126 |
127 |
128 |
129 | 40
130 | 20
131 |
132 |
133 |
134 |
135 | -
136 |
137 |
138 | Qt::Vertical
139 |
140 |
141 | QSizePolicy::Maximum
142 |
143 |
144 |
145 | 20
146 | 55
147 |
148 |
149 |
150 |
151 | -
152 |
153 |
154 | -
155 |
156 |
157 | -
158 |
159 |
160 | New National code :
161 |
162 |
163 | Qt::AlignCenter
164 |
165 |
166 |
167 | -
168 |
169 |
170 | Qt::Vertical
171 |
172 |
173 | QSizePolicy::Maximum
174 |
175 |
176 |
177 | 20
178 | 18
179 |
180 |
181 |
182 |
183 | -
184 |
185 |
186 | Qt::Vertical
187 |
188 |
189 | QSizePolicy::Maximum
190 |
191 |
192 |
193 | 20
194 | 18
195 |
196 |
197 |
198 |
199 | -
200 |
201 |
202 | New Last name :
203 |
204 |
205 | Qt::AlignCenter
206 |
207 |
208 |
209 | -
210 |
211 |
212 | Done
213 |
214 |
215 |
216 | -
217 |
218 |
219 | Qt::Vertical
220 |
221 |
222 |
223 | -
224 |
225 |
226 | Qt::Vertical
227 |
228 |
229 |
230 | -
231 |
232 |
233 | Qt::Vertical
234 |
235 |
236 |
237 | -
238 |
239 |
240 | Qt::Vertical
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
--------------------------------------------------------------------------------
/assignment-24/MyTFLiteFaceAlignment.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import cv2
3 | import tensorflow as tf
4 | from functools import partial
5 | import time
6 | from TFLiteFaceDetector import UltraLightFaceDetecion
7 | import sys
8 |
9 | class CoordinateAlignmentModel():
10 | def __init__(self, filepath, marker_nums=106, input_size=(192, 192)):
11 | self._marker_nums = marker_nums
12 | self._input_shape = input_size
13 | self._trans_distance = self._input_shape[-1] / 2.0
14 |
15 | self.eye_bound = ([35, 41, 40, 42, 39, 37, 33, 36],
16 | [89, 95, 94, 96, 93, 91, 87, 90])
17 |
18 | # tflite model init
19 | self._interpreter = tf.lite.Interpreter(model_path=filepath)
20 | self._interpreter.allocate_tensors()
21 |
22 | # model details
23 | input_details = self._interpreter.get_input_details()
24 | output_details = self._interpreter.get_output_details()
25 |
26 | # inference helper
27 | self._set_input_tensor = partial(self._interpreter.set_tensor,
28 | input_details[0]["index"])
29 | self._get_output_tensor = partial(self._interpreter.get_tensor,
30 | output_details[0]["index"])
31 |
32 | self.pre_landmarks = None
33 |
34 | def _calibrate(self, pred, thd, skip=6):
35 | if self.pre_landmarks is not None:
36 | for i in range(pred.shape[0]):
37 | if abs(self.pre_landmarks[i, 0] - pred[i, 0]) > skip:
38 | self.pre_landmarks[i, 0] = pred[i, 0]
39 | elif abs(self.pre_landmarks[i, 0] - pred[i, 0]) > thd:
40 | self.pre_landmarks[i, 0] += pred[i, 0]
41 | self.pre_landmarks[i, 0] /= 2
42 |
43 | if abs(self.pre_landmarks[i, 1] - pred[i, 1]) > skip:
44 | self.pre_landmarks[i, 1] = pred[i, 1]
45 | elif abs(self.pre_landmarks[i, 1] - pred[i, 1]) > thd:
46 | self.pre_landmarks[i, 1] += pred[i, 1]
47 | self.pre_landmarks[i, 1] /= 2
48 | else:
49 | self.pre_landmarks = pred
50 |
51 | def _preprocessing(self, img, bbox, factor=3.0):
52 | """Pre-processing of the BGR image. Adopting warp affine for face corp.
53 |
54 | Arguments
55 | ----------
56 | img {numpy.array} : the raw BGR image.
57 | bbox {numpy.array} : bounding box with format: {x1, y1, x2, y2, score}.
58 |
59 | Keyword Arguments
60 | ----------
61 | factor : max edge scale factor for bounding box cropping.
62 |
63 | Returns
64 | ----------
65 | inp : input tensor with NHWC format.
66 | M : warp affine matrix.
67 | """
68 |
69 | maximum_edge = max(bbox[2:4] - bbox[:2]) * factor
70 | scale = self._trans_distance * 4.0 / maximum_edge
71 | center = (bbox[2:4] + bbox[:2]) / 2.0
72 | cx, cy = self._trans_distance - scale * center
73 |
74 | M = np.array([[scale, 0, cx], [0, scale, cy]])
75 |
76 | cropped = cv2.warpAffine(img, M, self._input_shape, borderValue=0.0)
77 | inp = cropped[..., ::-1].astype(np.float32)
78 |
79 | return inp[None, ...], M
80 |
81 | def _inference(self, input_tensor):
82 | self._set_input_tensor(input_tensor)
83 | self._interpreter.invoke()
84 |
85 | return self._get_output_tensor()[0]
86 |
87 | def _postprocessing(self, out, M):
88 | iM = cv2.invertAffineTransform(M)
89 | col = np.ones((self._marker_nums, 1))
90 |
91 | out = out.reshape((self._marker_nums, 2))
92 |
93 | out += 1
94 | out *= self._trans_distance
95 |
96 | out = np.concatenate((out, col), axis=1)
97 |
98 | return out @ iM.T # dot product
99 |
100 | def get_landmarks(self, image, detected_faces=None):
101 | """Predict the landmarks for each face present in the image.
102 |
103 | This function predicts a set of 68 2D or 3D images, one for each image present.
104 | If detect_faces is None the method will also run a face detector.
105 |
106 | Arguments
107 | ----------
108 | image {numpy.array} : The input image.
109 |
110 | Keyword Arguments
111 | ----------
112 | detected_faces {list of numpy.array} : list of bounding boxes, one for each
113 | face found in the image (default: {None}, format: {x1, y1, x2, y2, score})
114 | """
115 |
116 | for box in detected_faces:
117 | inp, M = self._preprocessing(image, box)
118 | out = self._inference(inp)
119 | pred = self._postprocessing(out, M)
120 |
121 | # self._calibrate(pred, 1, skip=6)
122 | # yield self.pre_landmarks
123 |
124 | yield pred
125 |
126 | #left eye = 35 36 33 37 39 42 40 41
127 | #right eye = 89 90 87 91 93 96 94 95
128 | #lips = 52 55 56 53 56 58 69 68 67 71 63 64
129 |
130 | def insta_filter(frame,landmarks):
131 |
132 | x, y, w, h = cv2.boundingRect(landmarks)
133 |
134 | rows= frame.shape[0]
135 | cols = frame.shape[1]
136 |
137 | mask = np.zeros((rows,cols,3),dtype='uint8')
138 |
139 | cv2.drawContours(mask,[landmarks],-1,(255,255,255),-1)
140 |
141 | frame_2x = cv2.resize(frame,(0,0), fx = 2, fy= 2)
142 | mask_2x = cv2.resize(mask,(0,0), fx = 2, fy= 2)
143 |
144 | frame_2x = frame_2x / 255
145 | mask_2x = mask_2x / 255
146 |
147 | frame_target = frame[int(y-(h*0.5)):int(y+h+(h*0.5)), int(x-(w*0.5)):int(x+w+(w*0.5))]
148 | frame_target = frame_target / 255
149 |
150 | forground = cv2.multiply(mask_2x, frame_2x)
151 | background = cv2.multiply(frame_target,1- mask_2x[y*2:(y+h)*2,x*2:(x+w)*2])
152 |
153 | res = cv2.add(background, forground[y*2:(y+h)*2,x*2:(x+w)*2])
154 | frame[int(y-(h*0.5)):int(y+h+(h*0.5)), int(x-(w*0.5)):int(x+w+(w*0.5))] = res*255
155 | return frame
156 |
157 | if __name__ == '__main__':
158 |
159 | fd = UltraLightFaceDetecion("weights/RFB-320.tflite",conf_threshold=0.88)
160 | fa = CoordinateAlignmentModel("weights/coor_2d106.tflite")
161 |
162 | frame = cv2.imread("input/image.jpg")
163 |
164 | rows= frame.shape[0]
165 | cols = frame.shape[1]
166 |
167 | # mask_left_eye = np.zeros((rows,cols,3),dtype='uint8')
168 | # mask_right_eye = np.zeros((rows,cols,3),dtype='uint8')
169 |
170 | # img = cv2.resize(img,(0,0),fx= 0.5 ,fy= 0.5)
171 | color = (125, 255, 125)
172 |
173 | boxes, scores = fd.inference(frame)
174 |
175 | for pred in fa.get_landmarks(frame, boxes):
176 | pred_int =np.round(pred).astype(np.int)
177 |
178 | landmarks_left_eye = []
179 | for i in [35 ,36 ,33 ,37 ,39 ,42 ,40 ,41]:
180 | landmarks_left_eye.append(tuple(pred_int[i]))
181 |
182 | landmarks_right_eye = []
183 | for i in [89 ,90 ,87 ,91 ,93 ,96 ,94 ,95]:
184 | landmarks_right_eye.append(tuple(pred_int[i]))
185 |
186 | landmarks_lips = []
187 | for i in [52 ,55 ,56 ,53 ,56 ,58 ,69 ,68 ,67 ,71 ,63 ,64]:
188 | landmarks_lips.append(tuple(pred_int[i]))
189 |
190 | landmarks_left_eye = np.array(landmarks_left_eye)
191 | landmarks_right_eye = np.array(landmarks_right_eye)
192 | landmarks_lips = np.array(landmarks_lips)
193 |
194 | # cv2.drawContours(mask,[landmarks_right_eye],-1,(255,255,255),-1)
195 | # cv2.drawContours(mask,[landmarks_left_eye],-1,(255,255,255),-1)
196 | # cv2.drawContours(mask,[landmarks_lips],-1,(255,255,255),-1)
197 |
198 | # for index,p in enumerate(np.round(pred).astype(np.int)):
199 | # cv2.circle(img, tuple(p), 1, color, 1, cv2.LINE_AA)
200 | # cv2.putText(img, str(index),p,cv2.FONT_HERSHEY_COMPLEX,0.5,(0,0,255),1)
201 |
202 | frame = insta_filter(frame,landmarks_lips)
203 | frame = insta_filter(frame,landmarks_left_eye)
204 | frame = insta_filter(frame,landmarks_right_eye)
205 |
206 | cv2.imwrite("output/result2.jpg", frame)
207 | cv2.imshow("result", frame)
208 | cv2.waitKey()
209 |
210 |
211 | # if __name__ == '__main__':
212 |
213 | # from TFLiteFaceDetector import UltraLightFaceDetecion
214 | # import sys
215 |
216 | # fd = UltraLightFaceDetecion(
217 | # "weights/RFB-320.tflite",
218 | # conf_threshold=0.88)
219 | # fa = CoordinateAlignmentModel(
220 | # "weights/coor_2d106.tflite")
221 |
222 | # cap = cv2.VideoCapture(sys.argv[1])
223 | # color = (125, 255, 125)
224 |
225 | # while True:
226 | # ret, frame = cap.read()
227 |
228 | # if not ret:
229 | # break
230 |
231 | # start_time = time.perf_counter()
232 |
233 | # boxes, scores = fd.inference(frame)
234 |
235 | # for pred in fa.get_landmarks(frame, boxes):
236 | # for p in np.round(pred).astype(np.int):
237 | # cv2.circle(frame, tuple(p), 1, color, 1, cv2.LINE_AA)
238 |
239 | # print(time.perf_counter() - start_time)
240 |
241 | # cv2.imshow("result", frame)
242 | # if cv2.waitKey(0) == ord('q'):
243 | # break
--------------------------------------------------------------------------------
/assignment-29/Input/color picker.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | MainWindow
4 |
5 |
6 |
7 | 0
8 | 0
9 | 567
10 | 351
11 |
12 |
13 |
14 | MainWindow
15 |
16 |
17 |
18 |
19 |
20 | 0
21 | 0
22 | 561
23 | 301
24 |
25 |
26 |
27 | -
28 |
29 |
-
30 |
31 |
-
32 |
33 |
34 | Qt::Horizontal
35 |
36 |
37 | QSizePolicy::Maximum
38 |
39 |
40 |
41 | 40
42 | 20
43 |
44 |
45 |
46 |
47 | -
48 |
49 |
-
50 |
51 |
52 | Qt::Vertical
53 |
54 |
55 | QSizePolicy::Maximum
56 |
57 |
58 |
59 | 20
60 | 20
61 |
62 |
63 |
64 |
65 | -
66 |
67 |
68 |
69 | 17
70 | true
71 |
72 |
73 |
74 | Qt::LeftToRight
75 |
76 |
77 | color: rgb(0, 0, 0);
78 |
79 |
80 | rgb(0 , 0 , 0)
81 |
82 |
83 | Qt::AlignCenter
84 |
85 |
86 |
87 | -
88 |
89 |
90 | Qt::Vertical
91 |
92 |
93 | QSizePolicy::Maximum
94 |
95 |
96 |
97 | 20
98 | 20
99 |
100 |
101 |
102 |
103 |
104 |
105 | -
106 |
107 |
108 | Qt::Horizontal
109 |
110 |
111 | QSizePolicy::Maximum
112 |
113 |
114 |
115 | 40
116 | 20
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 | -
126 |
127 |
128 | Qt::Vertical
129 |
130 |
131 | QSizePolicy::Maximum
132 |
133 |
134 |
135 | 20
136 | 25
137 |
138 |
139 |
140 |
141 | -
142 |
143 |
-
144 |
145 |
-
146 |
147 |
148 |
149 | 14
150 | false
151 | true
152 |
153 |
154 |
155 | RED
156 |
157 |
158 | Qt::AutoText
159 |
160 |
161 | Qt::AlignCenter
162 |
163 |
164 | 24
165 |
166 |
167 | 37
168 |
169 |
170 |
171 | -
172 |
173 |
174 | 0
175 |
176 |
177 | 255
178 |
179 |
180 | Qt::Horizontal
181 |
182 |
183 |
184 | -
185 |
186 |
187 |
188 | true
189 |
190 |
191 |
192 | 0
193 |
194 |
195 | Qt::AlignCenter
196 |
197 |
198 |
199 |
200 |
201 | -
202 |
203 |
204 | Qt::Horizontal
205 |
206 |
207 | QSizePolicy::Fixed
208 |
209 |
210 |
211 | 12
212 | 20
213 |
214 |
215 |
216 |
217 | -
218 |
219 |
-
220 |
221 |
222 |
223 | 14
224 | false
225 | true
226 |
227 |
228 |
229 | GREEN
230 |
231 |
232 | Qt::AutoText
233 |
234 |
235 | Qt::AlignCenter
236 |
237 |
238 | 24
239 |
240 |
241 | 25
242 |
243 |
244 |
245 | -
246 |
247 |
248 | 0
249 |
250 |
251 | 255
252 |
253 |
254 | Qt::Horizontal
255 |
256 |
257 |
258 | -
259 |
260 |
261 |
262 | true
263 |
264 |
265 |
266 | 0
267 |
268 |
269 | Qt::AlignCenter
270 |
271 |
272 |
273 |
274 |
275 | -
276 |
277 |
278 | Qt::Horizontal
279 |
280 |
281 | QSizePolicy::Fixed
282 |
283 |
284 |
285 | 12
286 | 20
287 |
288 |
289 |
290 |
291 | -
292 |
293 |
-
294 |
295 |
296 |
297 | 14
298 | false
299 | true
300 |
301 |
302 |
303 | BLUE
304 |
305 |
306 | Qt::AutoText
307 |
308 |
309 | Qt::AlignCenter
310 |
311 |
312 | 24
313 |
314 |
315 | 37
316 |
317 |
318 |
319 | -
320 |
321 |
322 | 0
323 |
324 |
325 | 255
326 |
327 |
328 | Qt::Horizontal
329 |
330 |
331 |
332 | -
333 |
334 |
335 |
336 | true
337 |
338 |
339 |
340 | 0
341 |
342 |
343 | Qt::AlignCenter
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
364 |
365 |
366 |
367 |
368 |
369 |
--------------------------------------------------------------------------------
/assignment-27/Find Contour Method.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 188,
6 | "id": "23985123-2d42-445a-88d5-84ba4c92cce3",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import cv2\n",
11 | "import matplotlib.pyplot as plt"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 189,
17 | "id": "83a2e5f8-628c-4488-be05-e3ff41584dba",
18 | "metadata": {},
19 | "outputs": [
20 | {
21 | "data": {
22 | "text/plain": [
23 | ""
24 | ]
25 | },
26 | "execution_count": 189,
27 | "metadata": {},
28 | "output_type": "execute_result"
29 | },
30 | {
31 | "data": {
32 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVQAAAD8CAYAAAAoqlyCAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAQr0lEQVR4nO3df6xfdX3H8edrBcH5CyrYdG0zULsYXGYlHdboH4hBCllWTAyBLNKYZnUJZpqYbeCSqclMNJkySTayGhhlcSJTDA1hw1pIjH8ItFigpSJ1Qtqm0KlQSkxgre/98f0Uv+ku3Ht7P/fe7/36fCQn33Pe53O+5/MJlxfn15eTqkKSNHO/M98dkKRxYaBKUicGqiR1YqBKUicGqiR1YqBKUiezFqhJ1iZ5PMneJNfO1n4kaVRkNp5DTbII+AlwMbAfeBC4qqoe674zSRoRs3WEegGwt6r+u6peAm4D1s3SviRpJJwyS9+7DNg3tLwfeM8rNU7iz7UkLSQ/r6qzTyzOVqBOKslGYON87V+SZuCpiYqzFagHgBVDy8tb7WVVtQnYBB6hShoPs3UN9UFgZZJzk7wGuBLYMkv7kqSRMCtHqFV1NMkngHuARcDNVbV7NvYlSaNiVh6bmnYnPOWXtLDsqKrVJxb9pZQkdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdTKjl/QleRI4AhwDjlbV6iSLgW8C5wBPAldU1bMz66Ykjb4eR6gfqKpVQy+suhbYVlUrgW1tWZLG3myc8q8DNrf5zcDls7APSRo5Mw3UAr6bZEeSja22pKoOtvmngSUz3IckLQgzuoYKvL+qDiR5C7A1yY+HV1ZVJamJNmwBvHGidZK0EM3oCLWqDrTPQ8B3gAuAZ5IsBWifh15h201VtXro2qskLWgnHahJXpfkDcfngQ8Bu4AtwPrWbD1w50w7KUkLwUxO+ZcA30ly/Hv+var+K8mDwO1JNgBPAVfMvJuSNPpSNeElzrntxCtcZ5WkEbVjosuV/lJKkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjqZNFCT3JzkUJJdQ7XFSbYmeaJ9ntnqSXJDkr1JHkly/mx2XpJGyVSOUG8B1p5QuxbYVlUrgW1tGeBSYGWbNgI39ummJI2+SQO1qr4P/PKE8jpgc5vfDFw+VL+1Bn4InJFkaae+StJIO9lrqEuq6mCbfxpY0uaXAfuG2u1vNUkae6fM9AuqqpLUdLdLspHBZQFJGgsne4T6zPFT+fZ5qNUPACuG2i1vtf+nqjZV1eqqWn2SfZCkkXKygboFWN/m1wN3DtWvbnf71wCHhy4NSNJYm/SUP8k3gAuBs5LsBz4LfBG4PckG4Cngitb8buAyYC/wK+Bjs9BnSRpJqZr25c/+nTiJa7CSNI92THS50l9KSVInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdTLj10hLs239+vUsXrx4zvZ3xx138NRTT83Z/jRGqupVJ+BmBq+J3jVU+xyD10PvbNNlQ+uuY/CSvseBSyb7/rZNOTlNNCWp3bt311y65JJL5n3cTiM/bZ8oy6Zyyn8LsHaC+vVVtapNdwMkOQ+4Enhn2+afkyyawj4kacGbNFCr6vvAL6f4feuA26rqxar6GYMj1Qtm0D9JWjBmclPqE0keSXJzkjNbbRmwb6jN/laTpLF3soF6I/A2YBVwEPjydL8gycYk25NsP8k+SNJIOalArapnqupYVf0a+Bq/Oa0/AKwYarq81Sb6jk1VtbqqVp9MHyRp1JxUoCZZOrT4YWBXm98CXJnktCTnAiuBB2bWRUlaGCZ9DjXJN4ALgbOS7Ac+C1yYZBWDxweeBD4OUFW7k9wOPAYcBa6pqmOz0nNJGjGTBmpVXTVB+aZXaf8F4Asz6ZQkLUT+9FSSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJamTSQM1yYok9yV5LMnuJJ9s9cVJtiZ5on2e2epJckOSvUkeSXL+bA9CkkbBVI5QjwKfrqrzgDXANUnOA64FtlXVSmBbWwa4FFjZpo3Ajd17LUkjaNJAraqDVfVQmz8C7AGWAeuAza3ZZuDyNr8OuLUGfgickWRp745L0qiZ1jXUJOcA7wbuB5ZU1cG26mlgSZtfBuwb2mx/q534XRuTbE+yfbqdlqRRNOVATfJ64NvAp6rq+eF1VVVATWfHVbWpqlZX1erpbCdJo2pKgZrkVAZh+vWquqOVnzl+Kt8+D7X6AWDF0ObLW02SxtpU7vIHuAnYU1VfGVq1BVjf5tcDdw7Vr253+9cAh4cuDUjS2DplCm3eB3wUeDTJzlb7DPBF4PYkG4CngCvauruBy4C9wK+Aj/XssCSNqkkDtap+AOQVVn9wgvYFXDPDfknSguMvpSSpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjqZyjulpHl15MgRDh8+PGf7e+mll+ZsXxovGbwC6lUaJCuAW4ElQAGbquqrST4H/DnwP63pZ6rq7rbNdcAG4Bjwl1V1zyT7ePVO6LfaG9/4RhYtWjRn+zty5AhHjx6ds/1pQdpRVatPLE7lCPUo8OmqeijJG4AdSba2dddX1T8MN05yHnAl8E7g94DvJfmDqjo2s/7rt9Xzzz8/312QpmTSa6hVdbCqHmrzR4A9wLJX2WQdcFtVvVhVP2PwOukLenRWkkbZtG5KJTkHeDdwfyt9IskjSW5OcmarLQP2DW22nwkCOMnGJNuTbJ9+tyVp9Ew5UJO8Hvg28Kmqeh64EXgbsAo4CHx5Ojuuqk1VtXqi6xCStBBNKVCTnMogTL9eVXcAVNUzVXWsqn4NfI3fnNYfAFYMbb681SRprE0aqEkC3ATsqaqvDNWXDjX7MLCrzW8BrkxyWpJzgZXAA/26LEmjaSp3+d8HfBR4NMnOVvsMcFWSVQwepXoS+DhAVe1OcjvwGIMnBK7xDr+k3waTPoc6J53wOVRJC8uEz6H601NJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6mQqbz09PckDSR5OsjvJ51v93CT3J9mb5JtJXtPqp7XlvW39ObM8BkkaCVM5Qn0RuKiq3gWsAtYmWQN8Cbi+qt4OPAtsaO03AM+2+vWtnSSNvUkDtQZeaIuntqmAi4Bvtfpm4PI2v64t09Z/MEl6dViSRtWUrqEmWZRkJ3AI2Ar8FHiuqo62JvuBZW1+GbAPoK0/DLx5gu/cmGR7ku0zGoEkjYgpBWpVHauqVcBy4ALgHTPdcVVtqqrVE73bWpIWomnd5a+q54D7gPcCZyQ5pa1aDhxo8weAFQBt/ZuAX/TorCSNsqnc5T87yRlt/rXAxcAeBsH6kdZsPXBnm9/Slmnr762q6thnSRpJp0zehKXA5iSLGATw7VV1V5LHgNuS/D3wI+Cm1v4m4N+S7AV+CVw5C/2WpJGTUTh4TDL/nZCkqdsx0f0ffyklSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ1M5a2npyd5IMnDSXYn+Xyr35LkZ0l2tmlVqyfJDUn2JnkkyfmzPAZJGglTeevpi8BFVfVCklOBHyT5z7bur6rqWye0vxRY2ab3ADe2T0kaa5MeodbAC23x1Da92ltK1wG3tu1+CJyRZOnMuypJo21K11CTLEqyEzgEbK2q+9uqL7TT+uuTnNZqy4B9Q5vvb7UTv3Njku1Jtp989yVpdEwpUKvqWFWtApYDFyT5Q+A64B3AHwOLgb+Zzo6ralNVrZ7o3daStBBN6y5/VT0H3AesraqD7bT+ReBfgQtaswPAiqHNlreaJI21qdzlPzvJGW3+tcDFwI+PXxdNEuByYFfbZAtwdbvbvwY4XFUHZ6HvkjRSpnKXfymwOckiBgF8e1XdleTeJGcDAXYCf9Ha3w1cBuwFfgV8rHuvJWkEperVbtjPUSeS+e+EJE3djonu//hLKUnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqZCr/g+m58ALw+Hx3YpadBfx8vjsxixzfwub4puf3JyqOSqA+Pu4v60uyfZzH6PgWNsfXh6f8ktSJgSpJnYxKoG6a7w7MgXEfo+Nb2BxfByPxkj5JGgejcoQqSQvevAdqkrVJHk+yN8m1892fk5Hk5iSHkuwaqi1OsjXJE+3zzFZPkhvaeB9Jcv789XxqkqxIcl+Sx5LsTvLJVh+LMSY5PckDSR5u4/t8q5+b5P42jm8meU2rn9aW97b158zrAKYoyaIkP0pyV1set/E9meTRJDuTbG+1Of0bnddATbII+CfgUuA84Kok581nn07SLcDaE2rXAtuqaiWwrS3DYKwr27QRuHGO+jgTR4FPV9V5wBrgmvbPaVzG+CJwUVW9C1gFrE2yBvgScH1VvR14FtjQ2m8Anm3161u7heCTwJ6h5XEbH8AHqmrV0CNSc/s3WlXzNgHvBe4ZWr4OuG4++zSDsZwD7BpafhxY2uaXMnjWFuBfgKsmardQJuBO4OJxHCPwu8BDwHsYPAh+Squ//LcK3AO8t82f0tplvvs+ybiWMwiUi4C7gIzT+FpfnwTOOqE2p3+j833KvwzYN7S8v9XGwZKqOtjmnwaWtPkFPeZ2+vdu4H7GaIztdHgncAjYCvwUeK6qjrYmw2N4eXxt/WHgzXPa4en7R+CvgV+35TczXuMDKOC7SXYk2dhqc/o3Oiq/lBprVVVJFvzjFEleD3wb+FRVPZ/k5XULfYxVdQxYleQM4DvAO+a3R/0k+RPgUFXtSHLhPHdnNr2/qg4keQuwNcmPh1fOxd/ofB+hHgBWDC0vb7Vx8EySpQDt81CrL8gxJzmVQZh+varuaOWxGiNAVT0H3MfgFPiMJMcPOobH8PL42vo3Ab+Y255Oy/uAP03yJHAbg9P+rzI+4wOgqg60z0MM/qN4AXP8NzrfgfogsLLdbXwNcCWwZZ771MsWYH2bX8/guuPx+tXtLuMa4PDQKclIyuBQ9CZgT1V9ZWjVWIwxydntyJQkr2VwfXgPg2D9SGt24viOj/sjwL3VLsSNoqq6rqqWV9U5DP4du7eq/owxGR9AktclecPxeeBDwC7m+m90BC4kXwb8hME1q7+d7/6c5Bi+ARwE/pfBtZgNDK45bQOeAL4HLG5tw+DJhp8CjwKr57v/Uxjf+xlcn3oE2Nmmy8ZljMAfAT9q49sF/F2rvxV4ANgL/AdwWquf3pb3tvVvne8xTGOsFwJ3jdv42lgebtPu41ky13+j/lJKkjqZ71N+SRobBqokdWKgSlInBqokdWKgSlInBqokdWKgSlInBqokdfJ/l08FeyCcAIsAAAAASUVORK5CYII=\n",
33 | "text/plain": [
34 | ""
35 | ]
36 | },
37 | "metadata": {
38 | "needs_background": "light"
39 | },
40 | "output_type": "display_data"
41 | }
42 | ],
43 | "source": [
44 | "image = cv2.imread('Input/12.png' , cv2.IMREAD_GRAYSCALE)\n",
45 | "\n",
46 | "_ , image = cv2.threshold(image , 127 , 255 , cv2.THRESH_BINARY)\n",
47 | "\n",
48 | "plt.imshow(image, cmap = \"gray\")"
49 | ]
50 | },
51 | {
52 | "cell_type": "code",
53 | "execution_count": 190,
54 | "id": "5af35fca-6138-40b3-ab58-05e0a30f962d",
55 | "metadata": {},
56 | "outputs": [
57 | {
58 | "name": "stdout",
59 | "output_type": "stream",
60 | "text": [
61 | "103\n",
62 | "51\n",
63 | "[143, 142]\n",
64 | "[242, 241]\n"
65 | ]
66 | }
67 | ],
68 | "source": [
69 | "rows, cols = image.shape\n",
70 | "\n",
71 | "def first_white_pixle():\n",
72 | " for i in range(rows):\n",
73 | " for j in range(cols):\n",
74 | " if image[i,j] != 0: \n",
75 | " return i,j\n",
76 | "\n",
77 | "x1 , y1 = first_white_pixle()\n",
78 | "\n",
79 | "list_whitepixle_rows_down = []\n",
80 | "list_whitepixle_cols_right = []\n",
81 | "list_whitepixle_rows_up = []\n",
82 | "list_whitepixle_cols_left = []\n",
83 | "\n",
84 | "for i in range(x1,rows):\n",
85 | " list_whitepixle_rows_down.append(i)\n",
86 | " if image[i,y1] <= 200:\n",
87 | " break\n",
88 | " \n",
89 | "for i in range(y1,cols):\n",
90 | " list_whitepixle_cols_right.append(i)\n",
91 | " if image[x1,i] <= 200:\n",
92 | " break\n",
93 | "\n",
94 | "for i in range(x1,0,-1):\n",
95 | " list_whitepixle_rows_up.append(i)\n",
96 | " if image[i,y1] <= 200:\n",
97 | " break\n",
98 | " \n",
99 | "for i in range(y1,0,-1):\n",
100 | " list_whitepixle_cols_left.append(i)\n",
101 | " if image[x1,i] <= 200:\n",
102 | " break\n",
103 | "\n",
104 | "print(len(list_whitepixle_rows_down))\n",
105 | "print(len(list_whitepixle_cols_right))\n",
106 | "print(list_whitepixle_rows_up)\n",
107 | "print(list_whitepixle_cols_left)"
108 | ]
109 | },
110 | {
111 | "cell_type": "code",
112 | "execution_count": 191,
113 | "id": "2951620f-4aa2-4022-906b-d502a5fa8a81",
114 | "metadata": {},
115 | "outputs": [
116 | {
117 | "name": "stdout",
118 | "output_type": "stream",
119 | "text": [
120 | "143 387 242 535\n"
121 | ]
122 | }
123 | ],
124 | "source": [
125 | "print(x1,rows,y1,cols)"
126 | ]
127 | },
128 | {
129 | "cell_type": "code",
130 | "execution_count": 192,
131 | "id": "ae642247-81c1-4d75-a067-537d60e34a37",
132 | "metadata": {},
133 | "outputs": [
134 | {
135 | "name": "stdout",
136 | "output_type": "stream",
137 | "text": [
138 | "143 242 245 292\n"
139 | ]
140 | },
141 | {
142 | "data": {
143 | "text/plain": [
144 | ""
145 | ]
146 | },
147 | "execution_count": 192,
148 | "metadata": {},
149 | "output_type": "execute_result"
150 | },
151 | {
152 | "data": {
153 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVQAAAD8CAYAAAAoqlyCAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAARPElEQVR4nO3df6zddX3H8edLWkCHDlBsatsIaodhy6ymQwz+gSxs2CxrTYyBLNIYkroEE03MNnDJNpOZbMmUzcyQ1cCsixPZ/EFFN1cryTKjQKsVWxC9TkzbVBoRUEJga/veH+dTctJd6Ln3fs695x6fj+Sb8/2+v5/v+X4+6emr3x/n9JuqQpK0cC9Y6g5I0rQwUCWpEwNVkjoxUCWpEwNVkjoxUCWpk7EFapKrkzyUZCbJjePajyRNiozje6hJzgC+D1wFHALuA66tqge670ySJsS4jlAvBWaq6r+r6n+A24HNY9qXJE2EFWN63zXAwaHlQ8Abn6txEn+uJWk5+WlVXXBqcVyBelpJtgHblmr/krQAP56tOK5APQysG1pe22rPqqrtwHbwCFXSdBjXNdT7gPVJLkpyJnANsHNM+5KkiTCWI9SqOpbkPcBXgDOA26rqwDj2JUmTYixfm5pzJzzll7S87K2qjacW/aWUJHVioEpSJwaqJHVioEpSJwaqJHVioEpSJwaqJHVioEpSJwaqJHVioEpSJwaqJHVioEpSJwaqJHVioEpSJwaqJHVioEpSJwaqJHVioEpSJwaqJHWyoIf0JXkY+AVwHDhWVRuTnA98BrgQeBh4R1U9trBuStLk63GE+paq2jD0wKobgd1VtR7Y3ZYlaeqN45R/M7Cjze8AtoxhH5I0cRYaqAX8R5K9Sba12qqqOtLmfwKsWuA+JGlZWNA1VODNVXU4ycuBXUm+N7yyqipJzbZhC+Bts62TpOVoQUeoVXW4vR4FPg9cCjySZDVAez36HNtur6qNQ9deJWlZm3egJvmVJC8+OQ/8DrAf2Alsbc22AncutJOStBws5JR/FfD5JCff55+r6t+T3AfckeR64MfAOxbeTUmafKma9RLn4nbiOa6zStKE2jvb5Up/KSVJnRioktSJgSpJnRioktSJgSpJnRioktSJgSpJnRioktSJgSpJnRioktSJgSpJnRioktSJgSpJnRioktSJgSpJnRioktSJgSpJnRioktSJgSpJnZw2UJPcluRokv1DtfOT7Eryg/Z6XqsnyUeTzCS5P8kbxtl5SZokoxyhfgK4+pTajcDuqloP7G7LAG8F1rdpG3BLn25K0uQ7baBW1X8CPzulvBnY0eZ3AFuG6p+sgW8C5yZZ3amvkjTR5nsNdVVVHWnzPwFWtfk1wMGhdodaTZKm3oqFvkFVVZKa63ZJtjG4LCBJU2G+R6iPnDyVb69HW/0wsG6o3dpW+3+qantVbayqjfPsgyRNlPkG6k5ga5vfCtw5VL+u3e2/DHhi6NKAJE21057yJ/k0cAXwsiSHgD8H/gq4I8n1wI+Bd7TmXwY2ATPAU8C7xtBnSZpIqZrz5c/+nZjHNVhJWkJ7Z7tc6S+lJKkTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJakTA1WSOjFQJamTBT/1VBqnFStWcPnll/OiF71o0fb51FNP8fWvf51jx44t2j41HQxUTawXvOAFbNq0iQ0bNpBk0fZbVbzkJS/hS1/6EidOnFi0/Wr5O+0pf5LbkhxNsn+o9hdJDifZ16ZNQ+tuSjKT5KEkvzuujmv6rVixgosvvnhRwxQgCRdffDErVni8obkZ5RPzCeDvgU+eUr+5qv5muJDkEuAa4NeBVwBfTfJrVXW8Q1/1S+y8885jy5YtYw3XquILX/gCjz322Nj2oel22kCtqv9McuGI77cZuL2qngF+lGQGuBT4xvy7KMGZZ57JK1/5yrHvZ+XKlWPfh6bXQu7yvyfJ/e2SwHmttgY4ONTmUKtJ0tSbb6DeArwa2AAcAT481zdIsi3JniR75tkHSZoo8wrUqnqkqo5X1Qng4wxO6wEOA+uGmq5ttdneY3tVbayqjfPpgyRNmnkFapLVQ4tvA05+A2AncE2Ss5JcBKwH7l1YFyVpeTjtTakknwauAF6W5BDw58AVSTYABTwMvBugqg4kuQN4ADgG3OAdfkm/LEa5y3/tLOVbn6f9h4APLaRTkrQc+Vt+SerEQJWkTgxUSerEQJWkTgxUSerEQJWkTgxUSerEQJWkTgxUSerEQJWkTgxUSerEQJWkTgxUSerEQJWkTgxUSerEQJWkTgxUSerEQJWkTgxUSerEQJWkTk4bqEnWJbk7yQNJDiR5b6ufn2RXkh+01/NaPUk+mmQmyf1J3jDuQUjSJBjlCPUY8P6qugS4DLghySXAjcDuqloP7G7LAG8F1rdpG3BL915L0gQ6baBW1ZGq+lab/wXwILAG2AzsaM12AFva/GbgkzXwTeDcJKt7d1ySJs2crqEmuRB4PXAPsKqqjrRVPwFWtfk1wMGhzQ612qnvtS3JniR75tppSZpEIwdqknOAzwLvq6qfD6+rqgJqLjuuqu1VtbGqNs5lO0maVCMFapKVDML0U1X1uVZ+5OSpfHs92uqHgXVDm69tNUmaaqPc5Q9wK/BgVX1kaNVOYGub3wrcOVS/rt3tvwx4YujSgCRNrRUjtLkceCfw3ST7Wu0DwF8BdyS5Hvgx8I627svAJmAGeAp4V88OS9KkOm2gVtV/AXmO1b89S/sCblhgvyRp2fGXUpLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUiYEqSZ0YqJLUySiPQJGW3LFjx3j00UcZPOJsPKqK48ePj+39Nf0MVC0Ljz76KB/72MfGvp8TJ06MfR+aXqM89XRdkruTPJDkQJL3tvpfJDmcZF+bNg1tc1OSmSQPJfndcQ5A0+v48eMcPHjw2eUTJ06MfTrp0KFDHq1qzjJ4pt7zNEhWA6ur6ltJXgzsBbYweMrpk1X1N6e0vwT4NHAp8Argq8CvVdVzfjqTPH8n9Evr7LPP5qqrruKcc85ZtH0++eST7Nq1i6effnrR9qllZ29VbTy1OMpTT48AR9r8L5I8CKx5nk02A7dX1TPAj5LMMAjXb8yr2/ql9vTTT/PFL35xqbshjWROd/mTXAi8Hrinld6T5P4ktyU5r9XWAAeHNjvELAGcZFuSPUn2zL3bkjR5Rg7UJOcAnwXeV1U/B24BXg1sYHAE++G57LiqtlfVxtkOmyVpORopUJOsZBCmn6qqzwFU1SNVdbyqTgAfZ3BaD3AYWDe0+dpWk6SpNspd/gC3Ag9W1UeG6quHmr0N2N/mdwLXJDkryUXAeuDefl2WpMk0yvdQLwfeCXw3yb5W+wBwbZINQAEPA+8GqKoDSe4AHgCOATc83x1+SZoWp/3a1KJ0wq9NSVpeZv3alL/ll6RODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRODFRJ6sRAlaRORnnq6dlJ7k3ynSQHknyw1S9Kck+SmSSfSXJmq5/Vlmfa+gvHPAZJmgijHKE+A1xZVa8DNgBXJ7kM+Gvg5qp6DfAYcH1rfz3wWKvf3NpJ0tQ7baDWwJNtcWWbCrgS+NdW3wFsafOb2zJt/W8nSa8OS9KkGukaapIzkuwDjgK7gB8Cj1fVsdbkELCmza8BDgK09U8AL53lPbcl2ZNkz4JGIEkTYqRArarjVbUBWAtcCrx2oTuuqu1VtXG2Z1tL0nI0p7v8VfU4cDfwJuDcJCvaqrXA4TZ/GFgH0Nb/KvBoj85K0iQb5S7/BUnObfMvBK4CHmQQrG9vzbYCd7b5nW2Ztv5rVVUd+yxJE2nF6ZuwGtiR5AwGAXxHVd2V5AHg9iR/CXwbuLW1vxX4pyQzwM+Aa8bQb0maOJmEg8ckS98JSRrd3tnu//hLKUnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqxECVpE4MVEnqZJSnnp6d5N4k30lyIMkHW/0TSX6UZF+bNrR6knw0yUyS+5O8YcxjkKSJMMpTT58BrqyqJ5OsBP4ryb+1dX9UVf96Svu3Auvb9EbglvYqSVPttEeoNfBkW1zZpud7Sulm4JNtu28C5yZZvfCuStJkG+kaapIzkuwDjgK7quqetupD7bT+5iRntdoa4ODQ5oda7dT33JZkT5I98+++JE2OkQK1qo5X1QZgLXBpkt8AbgJeC/wWcD7wJ3PZcVVtr6qNsz3bWpKWoznd5a+qx4G7gaur6kg7rX8G+Efg0tbsMLBuaLO1rSZJU22Uu/wXJDm3zb8QuAr43snrokkCbAH2t012Ate1u/2XAU9U1ZEx9F2SJsood/lXAzuSnMEggO+oqruSfC3JBUCAfcAftvZfBjYBM8BTwLu691qSJlCqnu+G/SJ1Iln6TkjS6PbOdv/HX0pJUicGqiR1YqBKUicGqiR1YqBKUicGqiR1YqBKUicGqiR1YqBKUicGqiR1YqBKUicGqiR1YqBKUicGqiR1YqBKUiej/AfTi+FJ4KGl7sSYvQz46VJ3Yowc3/Lm+ObmlbMVJyVQH5r2h/Ul2TPNY3R8y5vj68NTfknqxECVpE4mJVC3L3UHFsG0j9HxLW+Or4OJeEifJE2DSTlClaRlb8kDNcnVSR5KMpPkxqXuz3wkuS3J0ST7h2rnJ9mV5Aft9bxWT5KPtvHen+QNS9fz0SRZl+TuJA8kOZDkva0+FWNMcnaSe5N8p43vg61+UZJ72jg+k+TMVj+rLc+09Rcu6QBGlOSMJN9OcldbnrbxPZzku0n2JdnTaov6GV3SQE1yBvAx4K3AJcC1SS5Zyj7N0yeAq0+p3Qjsrqr1wO62DIOxrm/TNuCWRerjQhwD3l9VlwCXATe0P6dpGeMzwJVV9TpgA3B1ksuAvwZurqrXAI8B17f21wOPtfrNrd1y8F7gwaHlaRsfwFuqasPQV6QW9zNaVUs2AW8CvjK0fBNw01L2aQFjuRDYP7T8ELC6za9m8F1bgH8Arp2t3XKZgDuBq6ZxjMCLgG8Bb2TwRfAVrf7sZxX4CvCmNr+itctS9/0041rLIFCuBO4CMk3ja319GHjZKbVF/Ywu9Sn/GuDg0PKhVpsGq6rqSJv/CbCqzS/rMbfTv9cD9zBFY2ynw/uAo8Au4IfA41V1rDUZHsOz42vrnwBeuqgdnru/Bf4YONGWX8p0jQ+ggP9IsjfJtlZb1M/opPxSaqpVVSVZ9l+nSHIO8FngfVX18yTPrlvuY6yq48CGJOcCnwdeu7Q96ifJ7wFHq2pvkiuWuDvj9OaqOpzk5cCuJN8bXrkYn9GlPkI9DKwbWl7batPgkSSrAdrr0VZflmNOspJBmH6qqj7XylM1RoCqehy4m8Ep8LlJTh50DI/h2fG19b8KPLq4PZ2Ty4HfT/IwcDuD0/6/Y3rGB0BVHW6vRxn8o3gpi/wZXepAvQ9Y3+42nglcA+xc4j71shPY2ua3MrjueLJ+XbvLeBnwxNApyUTK4FD0VuDBqvrI0KqpGGOSC9qRKUleyOD68IMMgvXtrdmp4zs57rcDX6t2IW4SVdVNVbW2qi5k8Hfsa1X1B0zJ+ACS/EqSF5+cB34H2M9if0Yn4ELyJuD7DK5Z/elS92eeY/g0cAT4XwbXYq5ncM1pN/AD4KvA+a1tGHyz4YfAd4GNS93/Ecb3ZgbXp+4H9rVp07SMEfhN4NttfPuBP2v1VwH3AjPAvwBntfrZbXmmrX/VUo9hDmO9Arhr2sbXxvKdNh04mSWL/Rn1l1KS1MlSn/JL0tQwUCWpEwNVkjoxUCWpEwNVkjoxUCWpEwNVkjoxUCWpk/8DYGCgnCkDkBAAAAAASUVORK5CYII=\n",
154 | "text/plain": [
155 | ""
156 | ]
157 | },
158 | "metadata": {
159 | "needs_background": "light"
160 | },
161 | "output_type": "display_data"
162 | }
163 | ],
164 | "source": [
165 | "x = list_whitepixle_rows_up[0]\n",
166 | "y = list_whitepixle_cols_left[0]\n",
167 | "w = list_whitepixle_rows_down[102]\n",
168 | "h = list_whitepixle_cols_right[50]\n",
169 | "print (x , y , w , h)\n",
170 | "cv2.rectangle(image, (y,x) , (h,w), (127,127,127), 4)\n",
171 | "plt.imshow(image, cmap = \"gray\")"
172 | ]
173 | }
174 | ],
175 | "metadata": {
176 | "kernelspec": {
177 | "display_name": "Python 3 (ipykernel)",
178 | "language": "python",
179 | "name": "python3"
180 | },
181 | "language_info": {
182 | "codemirror_mode": {
183 | "name": "ipython",
184 | "version": 3
185 | },
186 | "file_extension": ".py",
187 | "mimetype": "text/x-python",
188 | "name": "python",
189 | "nbconvert_exporter": "python",
190 | "pygments_lexer": "ipython3",
191 | "version": "3.9.0"
192 | }
193 | },
194 | "nbformat": 4,
195 | "nbformat_minor": 5
196 | }
197 |
--------------------------------------------------------------------------------
/assignment-26-1/test.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 20,
6 | "id": "bc18c19f-3757-451f-9ae6-6f8eba5e6c65",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import numpy as np\n",
11 | "import cv2\n",
12 | "import time\n",
13 | "import matplotlib.pyplot as plt"
14 | ]
15 | },
16 | {
17 | "cell_type": "code",
18 | "execution_count": 14,
19 | "id": "ee26f601-ef5c-4dc2-951f-0ce6e752f37d",
20 | "metadata": {},
21 | "outputs": [
22 | {
23 | "name": "stdout",
24 | "output_type": "stream",
25 | "text": [
26 | "8.89730954170227\n"
27 | ]
28 | }
29 | ],
30 | "source": [
31 | "start_time = time.time()\n",
32 | "img = cv2.imread(\"me.jpg\")\n",
33 | "img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\n",
34 | "\n",
35 | "mask = np.ones((3,3)) / 9\n",
36 | "result = np.zeros(img.shape)\n",
37 | "\n",
38 | "rows , cols = img.shape\n",
39 | "\n",
40 | "for i in range (1,rows-1):\n",
41 | " for j in range(1,cols-1):\n",
42 | " small_img = img[i-1:i+2, j-1:j+2]\n",
43 | " result[i,j] =np.sum(small_img * mask)\n",
44 | "\n",
45 | "cv2.imwrite(\"Output.jpg\",result)\n",
46 | "end_time = time.time()\n",
47 | "print(end_time - start_time)"
48 | ]
49 | },
50 | {
51 | "cell_type": "code",
52 | "execution_count": 22,
53 | "id": "6c8fb0f7-a612-4669-8750-d7990984cf34",
54 | "metadata": {},
55 | "outputs": [
56 | {
57 | "data": {
58 | "text/plain": [
59 | "[]"
60 | ]
61 | },
62 | "execution_count": 22,
63 | "metadata": {},
64 | "output_type": "execute_result"
65 | },
66 | {
67 | "data": {
68 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAD4CAYAAAAAczaOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAA3a0lEQVR4nO3deXycZ3no/d89I41mkTQardZmSd7t2E5iO7ZD9oWs0AQIbfqypDQ5OVBooRzeEg4t4W2hAVqg0ENzGkggQJqEJZCUJZuzL7bjLd4XWbasfd9Hs9/vH/PMWLYlS7I188xyfT8ffzx65pmZ+/HI11xz3ZvSWiOEECI7WMxugBBCiOSRoC+EEFlEgr4QQmQRCfpCCJFFJOgLIUQWyTG7AWdTWlqq6+vrzW6GEEKkle3bt/dqrcsmuy+lg359fT3btm0zuxlCCJFWlFLNU90n5R0hhMgiEvSFECKLSNAXQogsIkFfCCGyiAR9IYTIIhL0hRAii0jQF0KILCJBXwgRt+14P9ubB8xuhkiglJ6cJYRIPK01Lx3sZlvzAP/56lEq3Q7e+OI1KKVOOc8bCDHoDVJV5DCppWIuSNAXIgv96PUm/vvddq5YXMai8nw+9+QuAKrcdtoGx2nu81Jf6jrlMd/bdISfv93Ma393DSX5eSa0WswFCfpCZKEn32mha9jHu61DAKysLuSxuzfSN+bn2m+/yptHe88I+vvbhxkLhPnRG8f44k3LzGi2mAMzqukrpf5WKbVPKbVXKfW4UsqulGpQSm1RSjUqpZ5UStmMc/OMnxuN++snPM+XjOOHlFI3JuiahBBnMeoP0dgzyt2XL+DrH1hJeUEeD3xgNW5nLg2lLqrcdt5s7D3jcU09YwD89K3jjPiCyW62mCPTBn2lVDXwN8A6rfVKwArcCXwT+K7WehEwANxtPORuYMA4/l3jPJRSK4zHXQDcBPyHUso6t5cjhJjO3rYhtIbVtW4+sqGOLf/7OlbVuAFQSnHZolLeOtrHxP2zxwNh2ofGWV5ZyFggTEv/uFnNF+dppqN3cgCHUioHcAIdwLXAr4z7HwVuN27fZvyMcf91KtojdBvwhNbar7U+BjQC68/7CoQQs/JuyyAAF9YUAZzRYbumzsOgN8iJfm/82LHeMbSGdXUeAAa8gaS0Vcy9aYO+1roN+FfgBNFgPwRsBwa11iHjtFag2rhdDbQYjw0Z55dMPD7JY+KUUvcqpbYppbb19PScyzUJIc5id+sQNR4HxS7bpPevqo5m/XvbhuPHmnpHAVhXHw36/WMS9NPVTMo7HqJZegNQBbiIlmcSQmv9kNZ6ndZ6XVnZpHsACCHOw972IVYb5ZzJLK7IJ9eq2Ns+FD8Wq+evmS+ZfrqbSXnneuCY1rpHax0EngIuA4qMcg9ADdBm3G4DagGM+91A38TjkzxGCJEEgVCEln4vi8oLpjwnL8fKkooC9rZNDPqjVBc5qHTbAcn009lMgv4JYKNSymnU5q8D9gMvA3cY59wFPG3cfsb4GeP+l3S0R+gZ4E5jdE8DsBjYOjeXIYSYiZYBLxEN9SXOs563qtptdPhGO3MPdY2ysDyfHKuFQnsOAxL009ZMavpbiHbI7gD2GI95CPgi8HmlVCPRmv3DxkMeBkqM458H7jOeZx/wC6IfGM8Cn9Zah+f0aoQQZ9XcFy3T1JW4znreBdVuBrxB3mjsxRcMc7hrhNVGrb/YZWPAK0M209WMJmdpre8H7j/tcBOTjL7RWvuAD0/xPF8Hvj7LNgoh5sjx3uiInOky/VtXVfLTt45z90+2cd/NywhHdHxYp8dlk5p+GpMF1+ZYOKJ5bEsz3kBo+pOFSLLmvjEK8nKmHLkTU+yy8atPvgd7roV/ee4QQLzzt9hpk5p+GpOgP8deO9zDl3+zlxf2d5ndFCHOcLzPS12p84yx+ZNxO3N534VVjAfDlObnMa8w2onrcdmkpp/GJOjPsRcPRIN955DP5JYIcabmvrFp6/kTffDi6FSa1TXu+AdFsctGv5R30pYE/TkUW6IWoHvEz/P7OnnjyJlrmAhhhmA4QuvA+LT1/InW1nm4YUUFt11UFT/mcdrwBSOMB2QcRjqSVTbn0L72YTqMDL9r2Mc3nj1IldvB5YtLTW6ZENFvn6GIZn7xzIO+UoqHPr7ulGMeZy4A/d4A1TZZWz/dSKY/hzY39QGwoNRF17CP1v5xRvzSoStSQ9tgdJG06qKZB/3JeIxOYKnrpycJ+nNob9sQlW47K6oKOdAxQiAcYUyCvkgR7UbQryqyn9fzxEb+yLDN9CRBfw50DfvoHvaxu22IldVuKgrtjBrBPhb0XznUzXse2MTBzuGzPZUQp9Ba88c9HQRCkfN+rraBWNA/v5JMiRH0u4f9590mkXwS9OfAp36+nT97aDPHesdYVe2mvODkVnKjvhCHOkf4ix+/Q/uQjx3Ng+Y1VKSdg50jfOqxHfx25/kvU9U+NE5pvg177vltYxH70IiVi0R6kaB/nkb9Id5tHYqvN76q2k154cmgPxYI8fqRk0tEtw54J3saISbVORwdGLCrdfC8n6t1YHxONjW351opL8iT3+U0JUH/PO1oHiAc0ViMuS4rq91UFJysmUZ0dPgmQHWRg5YByY7EzPUavzu75yDotw+OUz0HQR+gxuOgVX6X05IE/RkY8gbjqw1qrU/ZRm7rsX6sFsV9Ny/jmqVllBXknZLpQ/RrcEFeDvWlTlr6JTsSM9c7Gu0sPdgxgi947uPitda0D/rmJNMHqPE4aZFMPy1J0J9G36if9f/8Is/t6wTgJ28d59pvvxq/f+uxflZWu7n3yoX8+BPR9efKjEw/Lyf6z9s+OE6hI5eaIqd8JRaz0jsazfRDEc1fP76TH73edE7PM+ANMh4Mz1nQry120DHoIxQ+/w5mkVwS9KdxpHsUfyjCoc7odnGvHOrhWO8YwXCEQCjCrtZB1htbyMUU2nO4qLaI65dXANGg73bkUlvsoHc0IIuxiRnrHfXjtEU7Xl/Y38W3nj1E1/Dsl/hoj4/Rn7tMPxTRdI3ICJ50I0F/Gif6opl557APrXV8N6Exf4j9HcMEQpH4FnIxSil+++nL+MjG+UC0ph8N+tFJMVILFTPVO+pn6bwCPrpxPp9/7xJCkQg/fG322f4f93agFCyvnHrHrNmo8UQ/PFqlXJl2JOhPo7k/uulE17CPjiEffcYsxBFfiJ0nBgC4+LSgH5OfF13lQmtwO3Kp8cSCvvxHETPTOxKgND+Pr92+ir+5bjE3r6zkqVkO3xzyBnn0rWZuXjlvVoutnc3J32VJYNKNBP1J7GsfYuuxfgCaY5n+kI89E/YMHfWH2HlikEq3nXnuyWc4xoI+EC/vALT0y38UMTO9o37KJsz7WF3jpn8swND4zHeuempnK6P+EJ+5ZvGctauqyI5SSGduGpKgP4nvPH+YL/9mDwAnjK+v3SM+9p0W9He1DHJRbdGUzzMx6Bc6cijLj/7n7RuVOqiYXigcod8bzfRjYoulzWYU2IGOYUrzbayoKpyztuXlWCnNz6NjUJYQTzcS9CcxOB6Mj62PZfq9owF2nBjEagzIb+7zcqLfe9ag7zot01dK4ci14pUlacUM9HsDaA1l+Sd3uao9h6B/tGeMhWX5c96+Kred9iH51ppuJOhPYsQXZGg8SM+In6HxIIvKo/9h3m7q42IjyB/pGgGgoXTqGqnTZiW2QZHbkRs/5j2P8dYie/SORPuPTsn0jbXwT8ww6GutaeweZWH53Af9SrcjvpS4SB8S9Ccx4osOqdzeHO2oXd9QDET3v71qSRlw8j+d5yx7jSqlcNmi2X6hEfQdNis+yfTFDMTG6JdOqOkX2nPxOHNPCfpj/tCUE7f6jPp/IjL9yiI7HYPjp0xWFKlPgv4kTgb9aGfuBiPoA1y1NBr0Yx1YHufZN5iO1fVPyfQl6IsZiI2MmbiAH0Tr+i0D47QOeNFa8/FHtvKFX7474XFerv/Oq1z+zZf45h8PArCwbG5G7UxU5XYwFggz7JN5J+lEgv5pwhEdXxZ5m5HpX1IfDfpFzlwuqHKj1Mnx+7FdhKbiyotOrHHHM/0cKe+IGXmjsYd5hfYzdrqqLXayuamPy7/5Ms/t62JXyyAvHujCFwzjD4X51M930DXko8iZyy+3twLES5RzKTZqTfaDTi8S9E8zOmHTk71tQ5QV5FHptmPLsbB2vgerRZFvy2HYF0Kpk8F8Kmdk+rlWxmVGrphGMBzh9cO9XLOsLL4hecz8Ymd8ff0fvt5EOKLxBSO82djLK4d62NM2xAMfWsW///kaciwKe66FKvfcb2sY24xFOnPTi+yRe5oR38nxz8Gwpq7YiVKKL928jJXVbgDy7TmM+EMU2nPJsZ79czM2gqdwQnmnc3jmY6xFdtp2fIARf4irl5afcd/yykJyrYryAnu838lmtfDigW7suRbsuRbeu6KCvBwrf3X1QloHxrFY1BnPc74qjQ8SGbaZXiTon2bktPpkbLTEJy5riB+LZe/FZ+nEPf1c94SO3HGp6YtpvH6kh1yr4rJFpWfcd+uqSi5bVMojbxzj/7zcSGm+jUvqi3l+Xycel421dR7ycqJlxc/fsDRhbSwvyMOioEMy/bQi5Z3TnB7064rP7ADLt0cDedE09XyIBn2nzUqu8Y1AOnLFTOzvGGZRecEpE/xiLBZFscvGJcYAg5XVbj62sY6+sQCN3aNsaChJShtzrBbKC+y8ergnvgqtSH0S9E8TK++4jJUN55ecWQuNZ/rTjNwBeM+iUm5aOS/+s9OWI6tsimkd7hxhacXZO1/X1nmw51pYV+fh0oUlXGjMIdm4IDlBH2BZZQG7W4f4/JO7ZOhmmpCgf5pYR+4CY1zz/Eky/YJ4pj990L9jbQ3f+dOL4j87bFbGZfSOOIthX5D2IR9L5p19Rcz8vBye/9xV3HPFApRSfPmW5bx3RcVZZ4nPtYc+to7Pv3cJY4EwA17pq0oHUtM/TWzM8YIyF3vahqgrcZ5xzsma/vTlndM5c60Ew5pgOBIv+Qgx0ZGu6N4NSyumXwZ5/oTfz/UNxfGJhMliy7GwzPhwaun3zqifS5hLgv5pYuWdO9bW4MrLoWSSX+L8vGiwn0mmfzqHUTbyBsK4HRL0xZkOG0t8LJlB0E8FE/eJuDCJ3zLEuZGgf5oRX4hcq+LyRaVcsbhs0nNiHbnnktU4jWUZxgPhacf4i+x0qHMEp806Z7tcJVpsQxVZZjk9SKp5mhFfkAJ77hkTYiYqMMo7083GnYwznulLZ66Y3J62IRZXFCRkbH0iFNhzKXLmzmrlT2EeCfqnGfGF4h21U4ll+tOtuzOZWHlHOnPFZJr7xtjePMB7l585KSuV1XqcsotWmpCgf5qZBP1Kd3TXoKpz+Pody/RlgpY4XTAc4cl3WrAouGNtrdnNmZUaj0PKO2lCavqnGfEFKcg7e9nmqiVlvPKFq+MdWLPhyD3ZkZuJvvPCYS6uLeKaZemVqZppYCzAd188zBPvtBAIRbhmadmUW3CmqtpiJy8d7EZrfdbSqDCfBP3TjPhCZ6xqeDql1DlvMD1x9E4m+v6mIwAc/8atJrckPextG+ITP3mHgbEAH7i4mtpiJ7eurjS7WbNW63HgD0XoHvFTUZheH1jZRoL+BFpr+sYC8YXVEiE+eieYeR25oXAkfrtzyJd22aoZfr65GV8gzDOfuXxO97BNtnpjB7mmnjEJ+ilOavoTHO4apWfEz8XzixL2Gs4MzvQnrlv0woEuE1uSPg53jbCiqjCtAz6cnMHe1DtqckvEdGYU9JVSRUqpXymlDiqlDiilLlVKFSulXlBKHTH+9hjnKqXU95VSjUqp3UqpNROe5y7j/CNKqbsSdVHn6kUjUF23rCJhr+HI4I7c4QnLUj8vC3BNS2vNka7RtJmEdTaVhXbsuRaaesbMboqYxkwz/e8Bz2qtlwEXAgeA+4BNWuvFwCbjZ4CbgcXGn3uBBwGUUsXA/cAGYD1wf+yDIlVsOtDFqmp3QssSzgzuyB0ej2b61UUOth0fIDih3CPO1DHkY8QfmnaNnXRgsSgaSvNp6pFMP9VNG/SVUm7gSuBhAK11QGs9CNwGPGqc9ihwu3H7NuCnOmozUKSUqgRuBF7QWvdrrQeAF4Cb5vBazsuoP8TOlsGEjzrJsVqwWS2ZGfSNTP+9KyoYD4bZ2zZkcotSW3y5hQRsZWiGBWUumnol0091M8n0G4Ae4MdKqZ1KqR8ppVxAhda6wzinE4jVRKqBlgmPbzWOTXX8FEqpe5VS25RS23p6emZ3NedhxBdEa6hKQudjdCOVzOvIHR6PBv3rjIlFW471m9mclBWJaD728Ba+9vsDQPqssTOdhaUuWvq9+EOZl9BkkpkE/RxgDfCg1vpiYIyTpRwAdHQh7TlZTFtr/ZDWep3Wel1Z2eRr3ySCPxgtReTlJr5vO1M3Uoll+gvK8llY5mKrBP1JvXW0j9eP9NLYPUpZQR6eDFmZsqHMRUTDiT6ZpJXKZhLhWoFWrfUW4+dfEf0Q6DLKNhh/dxv3twETpxPWGMemOp4S/MZG07Ft5hLJYbMylpGZfvSaCu05rG8oYUtTH+2DMjX/dD/f3IzHmcudl9Ry24VVZjdnziwojY3gkRJPKps26GutO4EWpVRss83rgP3AM0BsBM5dwNPG7WeAjxujeDYCQ0YZ6DngBqWUx+jAvcE4lhJ8xlo4eTmJz/RXVbt55VAPPSP+hL9WMg37glgUuGw53HNFA0op/ufPtkuH7gRNPaO8cKCLD6+r5RsfWs3fv2+F2U2aM7FJjbLwWmqbaYT7a+AxpdRu4CLgn4FvAO9VSh0Brjd+BvgD0AQ0Aj8E/gpAa90P/BPwjvHnH41jKSGZmf5nr1uMPxTh/7x0JOGvlUzD49EVSi0WxcKyfO5//wr2tA2xu3XQ7KalhEhEc9+v9+CyWbnn8gazmzPnipy55OflyMJrKW5GM3K11ruAdZPcdd0k52rg01M8zyPAI7NoX9LEOp+SUdNfUJbPrasq+f2eDv6/21Ym/PWSZdgXotBx8lcqtlfr4a5R1tYld0enVPTUzja2Hu/nWx9aTXkGzlpVSlHjcdAqC6+lNJmRa4h15NqTkOkDLKnIp3c0EC8rZYLh8SCF9pOL1VUXOXDkWuNDE7PZeCDMvz53iAtri/jwuhqzm5MwtcVOWvol009lEvQN8fJOEjJ9OLksc1sGdXQO+04N+haLYnFFPo3dMmHnsS3NdA77+PItyzN6FcrYEsvRL/wiFUnQNySzIxeIb4XXlkH1z+HxU8s7AIvK8yXTB361vZWL5xclfePyZKv1OPEGwvSPBcxuipiCBH1DMjtyAao9mZ/pQ3TiUdewn6Hx4BSPynwHO4c52DnC7RedMRcx48T2mGjJoGQm00jQN8Q7cpOU6c8rtGO1qAzL9IMUnrbZ+2JjiYHG7uzN9p/e1Y7VotJynfzZqi02NkmXYZspS4K+IZbp23OTk+nnWC3MK7SnfaYfCEVo6hklFI4wFgifkekvr4wuGbz12MCkjz/YOcxAhpcC3mrsZW2dh9L8PLObknC1nmimf0wmaKUsCfqG2OgdW5IyfYjW9dM96H/pqT1c951XeeCPBwHOqOlXFTlYM7+I3+xsPaNz7/GtJ7j5e69zxbde5r/fbU9am5MpGI5woHOEC2sStzFPKnHl5bCq2s3Lh7qnP1mYQoK+wRcKk2tVWC3JG1lR7XGkdXnn3ZZBfr2jlfKCPB5+4xiF9hwuqi0647wPrqnhcNco+9qH48d2nhjgS0/t4fJFpcxz2/nP144mseXJc6RrlEAoktDd2FLNTSvnsfPEIB1D6fu7nckk6Bv8wUjSOnFjqorsdA77TtlmMF08u7eTT/zkHUrz83j+b6/iuc9dyTt/fz0Xzz9zi4T3r64i16r43e6O+LGHXmui0J7Dgx9dy9VLyjjSNUo4knnD/Pa2R5eXXpVlQR/gub2ykU4qkqBv8IfCSevEjakrdhGO6LQc6fD3v91Lab6Nx+7ZgNuRy9J5BVN+aLqduSypKOBARzTTb+4b49l9nXx0Yx35eTksnVeAPxTheF9m1YG9gRB7WofIz8uhvsRldnOSZmFZPksq8nlWdk9LSRL0Df5QJGmduDFLjR2TDnUOT3NmavEFw/SO+nnf6qr4NUxnaUUBhzqjI3hePNCN1vCRjXUALJsX7ew91DlCY/cIF/3j87zbMpiQtifLoDfA+q9v4mebm1lRVYgliWXDVHDTykq2Huune8THG0d6eXpXW1p+o81EEvQN/lAk6Zn+kooClIIDHek1nLFr2AdA5Sw2nFkyr4DOYR9D3iCHOocpcdniE9QWV+RjUXCwc4Sfbz7BoDfIH/Z2TPOMqe2d4wOM+kOsrnHzp+tqp39AhrnpgnlENHzowbf46MNb+OwTu3jlUPI2RRJTk6Bv8AXDSR25A9F19RtKXPEMOF20D8aCvmPGj1lq7A51uHuEQ12jp3xDsOdaqS9xsad1kN/uim6x8MaRXv7txcP8envrHLY8ebY192OzWvjF/7yUO9Zm7lo7U1leWUBdSXQdno9fWodSnNKRL8wzo1U2s4E/FCEvyeUdiJZ4YrXuVDfoDfCvzx+KB/DKotll+gAHO4Y50jVyRva7dF4BfzQ6/jYuKGZzU388SHwoDYPmtuMDrKpxJ71kmCqUUtx75QLePtrHV963gjeO9LK/Q/ZMTgUS9A3+YPI7ciFaz352XyfeQAinLbXfjuf2dfLzzSe4oCpag59NeafKbSc/L4cXD3TjDYRZdlpfwD1XNFDkzKWuxMWGhmI+8B9vAVBWkF4TmoLhCI3do+xpHeITl9eb3RxTfWRDHR/ZEO23WV5VKPsqpAgp7xjMqOlDNMPVGl47nPr1zp0nBoHo13S3I3dWH1JKKVZUFvJGYy/AGR3Aa+uKeeCDq/nkVQtZXVPEFYtLqS12MDQeTMkVG1/c38UV33rpjDWFHnqtiZu/9zqBcIRLZA+BuBWVhbT0j8f3URbmkaBvMGP0DsAVi0tZWlHA557cxa4UH7ESC/owuyw/5r5blmE1lhVeUjH1qB+rRfGzuzfwl5c1EAhF6EvBZRrebuqjpX+c5/d18r0Xj7CrZRCtNb/e0crK6kIe+OAqrl5aZnYzU8YK49vhwTQbtJCJJOgbzCrvuPJyeOx/bCA/L4cfvNyY9NefqWFfkMPdI/EZy+cS9NfM9/C9Oy/inssbcOVN/y0h1lHcOeSb9WslWlNPdI+Af/7DAb774mH+nx9u5sFXj9LUM8ZHN9Tx5+vnk2OV/14xFxhrMO1tk7q+2eS30hAt75jT6Vaan8cda2t56WA33cOpF+AAdrcMoTXceEEFAPNmMXJnoptXVc54M/Aqo6O4PQXXJ2oyFhQb8AZZXlnI/GIn33r2ELlWxc0rM381zdkqL7Qzv9jJW0f7zG5K1pOgb/CHwknbNWsyd15SSzii+WWKDlHc3TYIwEeNjrmqc8j0ZyuW6XekWKbvD4Vp6fdyw4oKnDYrX3nfCp7+zGV85X0r+OqfXIDbmTv9k2ShKxaXsrmpj6BM0jJVag8XSaLo2jvmBf36UhfL5hWwvXnyJYjN1tzrpTTfxvqGYv58/XxuuGBewl+zxGXDZrXQnmILdzX3eYlouHV1Jf/5sbXx7Q//8vIGk1uW2q5YXMpjW06wq2WQS+qlk9sskukbzOrInai22Jmyq242948xv9hJjtXCAx9cNePlF86HxaKY57abUtN/9K3jPLVj8m9dsXr+gtL8jN7vdq5durAUi4LX02CkWiaToA+EI5pA2NxMH6Lr66di/RrgRJ+XOhMWDZvnttMxOLug39LvPa+ZvD0jfr7++wNTdqwf7YnW8xvKsmcRtbngduSyqDyfA2k2Az3TSNAnuvsTJG9/3KlUFzkY8YdSbiyzPxSmY9jHfGP/02SqKXLQMjC7rff+6Xf7+V+/fHfaLft8wfCkcwD+a8sJAuEIR3vGGPKe+V7sbh1kXmF0spmYnRJXHoPe1BuCm00k6JP8/XGnUmUsQJZqJZ7WgXG0hrqS5Af95ZWFdAz56B31z+j8ln4vLx7oAuC3O9v468d3Trq2UTAc4bpvv8od//dtuod99I36+crTe7njwbf4wSuN8ZnAu06bRdo97GPTgW7ef6GM0DkXxS4b/Sk47yKbSNDn5P64Zo7egdQdoniiL5oxmxH0LzR24prpFP6fb2lGKUWV286/bTrCf7/bHv8QmGhzUx9tg+PsODHAxx/Zytd+f4D/2nICi1L86boaHrtnAxYFO5oH8AZC/MtzBxn0Bnh8awuhiI4vLyBmx+PKZWCSb08ieeT7KSf3x7WbXd7xRDP9lAv6Rpmk1oTyzsrqQiwKdrUMce2yimnPf+lAN5ctKmVVdSE/eDm6BeNkm3Q/t68TR66Vb3xoFZ99YhcHO0e4+/IG/mHCHIIlFQXsbBnkv7ac4AcvH6W8wM6vd7RyxeJS6kulnn8uil15DHgDhCM6qVuTipMk0ye6Py6Yn+mXuvKwWS20pkDQ11rzzWcPcqhzhOY+L06blbL85C9+5rTlsKSiYMpNVQa9Ab7y9F6GxoP0jfo50j3KxgXFfHhtLZcvKmVhmYvmvjF+s7OVh147yiuHurn2X1/h6V3tXLWkjD+5sIrrl1eQn5fDp65eeMpzb2goZvPRPv7vq01AdCP3E8b4fHFuip25aM0ZaxaJ5JFMn5OZvtkduRaLorLIHl+v3kzH+7w8+MpRWgfGae4bY3G5ecMTL6wp4rn9nWitz2jDs3s7+enbzVQVOagzvolsaCihvtTFz+/ZwBd/tZtNB7v595caaeoZw2a1kG/PYcQX4vaLq1FK8R8fWcOAN0DpaR9qn7t+Ca8d6eVY7xjVRQ4OGn0DVy6RNXXOlcdlA6B/LECxcVsklwR9UqcjF6IjeFpnOVolEWJrpDy7t4NgWJ9S9ki2NXVFPLmthUNdI/GtFWNik9me2HqCq5aU4ci1srrm5CbkdaVOekf99I76qSjMI6LhqU+9h5J8W3yVUFuOhYrCM2cYe1w2fvqX69l0oItCRy6f/8W71Jc4TRm6miligX5ARvCYRoI+4Itn+uYH/TXzPfzglUbeOd5v6qzFve3RoB8Ma3IsitsvqjKtLdcsLQdg04HuSYN+fl4Ox/u8dG5rYV1dMbkTFjprmBCgv3fnxayt85xy/3Rqi538xWUNdI9Ev31Jln9+PM6Tmb4wh/lRLgXEpvlPlu0l26euXkiV28F9v95NJGLeOvL724dZUVlIQ6mLG1fOo8SEen5MeaGdC2vcvLA/OgonHNEc7hphb9sQTb1j3HNFA9cuK+fyRWV8+ppFpzw2lpVbLYoLa4pmFfBPaUOBnR9/4hL+5rrF53cxWS6e6UvQN41k+sDx3jFyLIoaz7mtHDmXXHk5fPLqhfzDb/fSNjhuyogZrTV724a4YcU8/vcty5O+d/Bkrl9ewbdfOMwdD77FvvZhxoPh+H2XLSrlc9cvmfRxsWGmF1QV4rCdX59N7BuHOHfxTF/KO6aRoE90SF9sXZlUENuD9mjPqClBv33Ix4A3yMrqwpRZMfL9F1bxxDstaODPLqllVbWb1470sKdtiFXV7ikf58rLYWV1IdcvlxE3qcBhs+LItdI/KkHfLBL0iQb9hhQad73QWNOlqWeMq5fCQ68d5YIqN5ctKk3K6+88Ee0cXVVTlJTXm4n6Uhdv3nftKcdmumH67/76ipTccjFbFbtskumbKDVSWxNFIprjfWMpNdmm2GXD7cjlaM8orxzq5p//cJCfvd2ctNd/+2gf+Xk5rKwqnP7kNCGrYaaOYpeN1w738J0XDsuHsQmyPuh3DvvwBSMplekrpVhY5uJg5wj3P7MPgNbBxA/jHBoP0tLv5e2mPi6p96RMuUtklhFfkN7RAN/fdIS2FJiImG2y+n/1t549yHu+8RJASgV9gAVl+WxvHqC5z8uCMhct/Yn/z/HVZ/Zx47+9RlPPGBsXlCT89UR2mti/crhLlllOtqwO+s/t64zfTrWgv7AsH4AlFfn86bpahsaDCV1yWWvN60d68Qaio2IuXShBXyTGl29dzs5/eC8AhzpHTW5N9plx0FdKWZVSO5VSvzN+blBKbVFKNSqlnlRK2YzjecbPjcb99ROe40vG8UNKqRvn/GpmKbaU8cc21lGZhD1fZ2NxeTTo33P5Amo90RE8c7nk8puNvdzw3Vfjk46O9ozRO+rnYxvruPvyBi6omnpEjBDnQymFx2Wjym2XTN8Es8n0PwscmPDzN4Hvaq0XAQPA3cbxu4EB4/h3jfNQSq0A7gQuAG4C/kMpZepiN+OBMJctKuGfbl+Zch19Vy8t48GPrOFDa2vi8wd+sa2FL/zyXbTW/HFPB6P+0Dk//1M72jjcNcq/b4ruDrW5qQ8gvtKkrIAoEm3JvIL4ekYieWYU9JVSNcCtwI+MnxVwLfAr45RHgduN27cZP2Pcf51x/m3AE1prv9b6GNAIrJ+Dazhn48EwDpP3xZ1KjtXCzasqsU6YNPbjN4/zq+2tPLevk089toPHt5w45TEdM9xAPBLRvHq4B6tF8fjWE7x0sItNB7qYV2g3Zc18kZ2WVhRwtHuUUDhidlOyykwz/X8D/g6IvTslwKDWOpZqtgLVxu1qoAXAuH/IOD9+fJLHxCml7lVKbVNKbevpSewGyr5gmLwUDfoTFbtsp3w4/edr0aV+d7UM8o//vZ8vPbWHF/d3cekDL/Evzx2cdhjcgc5hekf9/N2NS5lf4uQvf7KNlw/18P4LK1PuG4/IXEsqCgiEI5PudyASZ9rJWUqp9wHdWuvtSqmrE90grfVDwEMA69atS+ggXl8wkrKZ/kRKKWqLHRzuGsWiYOeJQSC62NioP0SOVeF2RGfO/uDloyyvLOR9q6deIO3Vw9EP0w9cXM1d76nnl9tbmV/s5MrFyZn8JQTA+obogoK/39PB54xZ6CLxZpLpXwb8iVLqOPAE0bLO94AipVTsQ6MGaDNutwG1AMb9bqBv4vFJHmOKVC7vnG5hWT71Jc74UEqrRdE57GPUH2LQG+SVQ90srSigyJnLK4fO/g3p9cO9LJtXQHmhHXuulY9trOOqJWWS5Yukqi12cuWSMp7Y2iIlniSaNuhrrb+kta7RWtcT7Yh9SWv9EeBl4A7jtLuAp43bzxg/Y9z/ko7WG54B7jRG9zQAi4Gtc3Yl58AXDGM3ebesmfra7St5/N6NrJnvAeDmlfNOuf9g5wgrqgq5dEEJbzX2TlniGQ+E2d48wOVJWtJBiLP56Ib5dA77eHmaREXMnfOJeF8EPq+UaiRas3/YOP4wUGIc/zxwH4DWeh/wC2A/8Czwaa11+IxnTRKtdVpl+iX5eVS6HVy5pIxcq+KTVy3EalFcPL+IWIK+dF4B71lUSvuQj+a+yWfwbmvuJxCOcJmUckQKuHppOUrBHmPTHpF4s1pwTWv9CvCKcbuJSUbfaK19wIenePzXga/PtpGJ4A9F0Jq06MidaH1DMXu+eiP2XCv3v38FKyoL+cIv3+V4n5dl8wqYb6zK+ebR3knXE3qzsY8ci2K9iRu0CBFjy7FQUWCnXZZjSJr0qG0kQGxf3HTJ9CeyG23++KX1rKsvZoWxMNpyY9OT2mIHz+7tnPSxbzb2sma+B1eeLLAqUkO1xzGnEw/F2WVt0I9twnG+G2ukgltXVXH98nLKC/JQSvGBi6p5s7GXzqFTN1gf9AbY2z6UtCWahZiJqiKHLLyWRFkf9NOlI/dsbl1dyY/uuiQ++uaDa2qiG4DvbGVv2xCf/Nl2xgNh3j7ah9Zw2SJZV0ekjuoiBx1D46ZuD5pNsvY7vi+W6adheWc69aUuNjQU88PXmnhqRxuN3aPc1TLIm0d7cdmsXFhbZHYThYir9jgIhjU9o/6U2Kc606V/mnuOTmb6mRf0AR744CpCYU1jd3QVw/0dw7zZ2MfGBSXnvDm4EIlQYyx82Cp1/aTI2v/9vkBmB/0FZfk8/BeX8Pe3LqesII/n9nZyrHdM6vki5cRWu5W6fnJkbXlnPIPLOzHrG4pZ31DMG4298Vm61ywrN7lVQpyq2lhQUEbwJEf2ZvqxIZsZMHpnOhcYQzrrS5wpt1mMEPl5ObgdubQOJH5LUJHFQT9e08/J/KC/ojK6IcrVSyXLF6lpaUUBe9uHzW5GVpCgb8v8f4JLGjxUFzm47aKpV94Uwkxr6z3saxtiPGDayixZI/Mj3hT8WVDTjykvsPPmfddysbFYmxCpZl2dh1BEs7t10OymZLysDfrjGT56R4h0Els9dlvzgMktyXzZG/SDYXIsSsasC5ECPC4bC8tcbJegn3BZG/HSZdcsIbLFqmo3h2Sj9ITL2qA/nib74wqRLWo8TjqHfbKLVoJlbdD3BcM4smDkjhDpotrjIBzRdI34zW5KRsvaqOdLo12zhMgG1bE1ePplklYiZW3QHw+GZeSOECmkxiNr8CRD9gb9gAR9IVJJlay2mRRZG/R9oYgEfSFSiD3XSllBniy8lmDZG/QDYRwZsGuWEJmkWrZOTLisjXrj0pErRMqp8Thktc0Ey9qg7w2EcOZl7XYCQqSkao+D9kGf7JebQFkb9Ef9IfIl6AuRUioL7QTCEQa8AbObkrGyMuiHIxpfMIIzCzZQESKdzHNHN0bvGpYJWomSlUHfGwgB4LJJpi9EKikvNIL+iM/klmSuLA360WWVnXmS6QuRSipiQX9Ign6iZGXQH/NLpi9EKirLzwOkvJNIWRn0Y5m+SzpyhUgpthwLJS6blHcSKCuD/slMX8o7QqSa8kI73cMS9BMlO4O+0ZEr4/SFSD0VhXlS3kmg7Az6fqO8I5m+ECmnosBOl2T6CZOVQd8rmb4QKavCbad31C87aCVIVgb9WKafL6N3hEg5FYV5RDR8f9MR+sdkZu5cy8qgH8v0HVLeESLlNJS6APj+S408taPV5NZknqwM+mOBMDarBVtOVl6+ECnt0gUlvPC3V+Jx5nK0Z9Ts5mScrIx6Y/6QzMYVIkUppVhcUcCi8nyO9oyZ3ZyMk6VBPyyzcYVIcQtK82mSTH/OZWXQ9wZCssKmECluYbmL3tEAQ96g2U3JKNMGfaVUrVLqZaXUfqXUPqXUZ43jxUqpF5RSR4y/PcZxpZT6vlKqUSm1Wym1ZsJz3WWcf0QpdVfiLuvsxgJhWYJBiBS3oDQfgKO9ku3PpZlk+iHgf2mtVwAbgU8rpVYA9wGbtNaLgU3GzwA3A4uNP/cCD0L0QwK4H9gArAfuj31QJJvXH8IlNX0hUtrC8mjQb5K6/pyaNuhrrTu01juM2yPAAaAauA141DjtUeB24/ZtwE911GagSClVCdwIvKC17tdaDwAvADfN5cXM1FggjFNq+kKktFqPg1yrkhE8c2xWNX2lVD1wMbAFqNBadxh3dQIVxu1qoGXCw1qNY1MdP/017lVKbVNKbevp6ZlN82ZszB+SJRiESHE5Vgs1Hicn+mWj9Lk046CvlMoHfg18Tms9PPE+rbUG5mQnY631Q1rrdVrrdWVlZXPxlGeQTdGFSA81HgetA+NmNyOjzCjoK6VyiQb8x7TWTxmHu4yyDcbf3cbxNqB2wsNrjGNTHU+6MX9YNkUXIg3UeBy0DUimP5dmMnpHAQ8DB7TW35lw1zNAbATOXcDTE45/3BjFsxEYMspAzwE3KKU8RgfuDcaxpApHNOPBsAzZFCINVBc56B0N4AuGCYYjvHywe/oHibOaSaZ/GfAx4Fql1C7jzy3AN4D3KqWOANcbPwP8AWgCGoEfAn8FoLXuB/4JeMf484/GsaSSTdGFSB81HicArQPj/GZHG5/4yTvsax8yuVXpbdrIp7V+A1BT3H3dJOdr4NNTPNcjwCOzaeBcG/FFg36BXYK+EKmuxuMAoHXAy+amPgAau0e5oMptZrPSWtbNyB32RWf3FTpyTW6JEGI61UbQbxscZ+vxaGHgeK/U+M9H1gV9yfSFSB/lBXZyrYp3jvXHR/Eckxm65yXrgv7wuJHp2yXTFyLVWS2KqiIHf9zbCUCJy8axPsn0z0fWBX3J9IVIL/UlLvyhCOUFedxwQQXHe2VZhvORdZFPavpCpJcHPriK431jXFhTxONbTzA0HmRgLIDHZTO7aWkp64K+ZPpCpJeqIgdVRdEO3fqS6FaKTb1jrJWgf06yrrwzPB4kL8dCXo5MzhIi3SydV4BFwV89tj0+hFPMTvYFfV+QAunEFSIt1RY7+ckn1hOOaH785jGzm5OWsjDohyh0SGlHiHR15ZIyLltUyq6WQbObkpayL+iPS6YvRLq7qLaIrmE/HUOyAudsZV3QH/GFKJROXCHS2kW1RQDsOjFoajvSUdYF/WFfUCZmCZHmVlQVYrNapMRzDrIu6I9ITV+ItJeXY2V5VSG7W2XFzdnKuqAvNX0hMsOS8nzZP/ccZFXQ94fC+EMRqekLkQEWlOXTPeJnxJhlL2Ymq4L+ydm4kukLke4aSqOzc4/JWjyzkpVBX2r6QqS/hWXRoP/r7a3c8r3XJeOfoawK+rFllQvyJNMXIt3NL3FiUfCzzc3s7ximWZZcnpGsCvr93gAAHpcEfSHSXV6OldpiJxEd/blvLGBug9JEVgX9ziEfAPPcDpNbIoSYC7G6PkD/mN/ElqSPrAv6SkF5QZ7ZTRFCzIHVNUW4jb0x+kYl05+JrAr6XcM+Slx55Fqz6rKFyFifuWYRr/6/V2O1KPqlvDMjWTWMpXPYR6XbbnYzhBBzxJZjwZZjw+O0SdCfoaxKeTuHfFQUStAXItOUuGzSkTtD2RX0h33Mc0s9X4hMU+ySTH+msibo+4JhBr1B5kmmL0TGKc63MSBBf0ayJuh3DctwTSEylZR3Zi5rgn5HbIy+ZPpCZJxil42h8SDBcMTspqS8rAn6JzN9qekLkWlKXDYABryS7U8na4J++6CUd4TIVB4j6F/6wEv84p0Wk1uT2rIm6J/oH6PEZSM/L6umJgiRFYqNoB+OaP6wt8Pk1qS2rAn6zX1e5pc4zW6GECIBaj0n/2/vaB4gEluFTZwhq4J+XbEEfSEyUW2xk9f/7hr+5Y7VDPtCNMo2ilPKilpHIBShY2ic+cXVZjdFCJEgtcVOQkaGv715gCUVBSa3KDVlRabfOuAlomF+iWv6k4UQaau+xEmJy8b25gGzm5KysiLoN/dHd9Spk5q+EBlNKcWaOo8E/bPIiqDfEgv6UtMXIuOtrfNwrHeMvlHZVGUyWRH0m/u82HMtlMnmKUJkvHV1HgDJ9qeQ8UFfa83rR3pYWeVGKWV2c4QQCbay2o3NamH7CQn6k0l60FdK3aSUOqSUalRK3Zfo19vXPszhrlFuv1hG7giRDey5VlZWF7LtuAT9ySQ16CulrMAPgJuBFcCfK6VWJOr1ekb8PPzGMWxWC+9bXZmolxFCpJirl5azvXmA3+xsNbspKSfZ4/TXA41a6yYApdQTwG3A/rl8kT2tQ3z2iZ009Y4B8Gfraily2ubyJYQQKexTVy/kraO9/O2T7/K13x3AnmtFKbAohUVFR/nEfk7Vou/VS8v48q1znxMnO+hXAxNXQ2oFNkw8QSl1L3AvwPz588/pRSrceSwoc/Fnl9SyvqGY1TVF59ZaIURayrVaeOjj63hyawtNvaMEw5qI1mgNEa2J6Gh/X0Sn7nINidraNeVm5GqtHwIeAli3bt05vSPlBXZ+dNclc9ouIUR6KbTn8j+uXGB2M1JOsjty24DaCT/XGMeEEEIkQbKD/jvAYqVUg1LKBtwJPJPkNgghRNZKanlHax1SSn0GeA6wAo9orfclsw1CCJHNkl7T11r/AfhDsl9XCCFEFszIFUIIcZIEfSGEyCIS9IUQIotI0BdCiCyidArPSFNK9QDN5/EUpUDvHDUn1WXTtYJcbybLpmuFxFxvnda6bLI7Ujrony+l1Dat9Tqz25EM2XStINebybLpWiH51yvlHSGEyCIS9IUQIotketB/yOwGJFE2XSvI9WaybLpWSPL1ZnRNXwghxKkyPdMXQggxgQR9IYTIIhkZ9JO9+boZlFLHlVJ7lFK7lFLbjGPFSqkXlFJHjL89ZrfzXCmlHlFKdSul9k44Nun1qajvG+/3bqXUGvNaPntTXOtXlVJtxvu7Syl1y4T7vmRc6yGl1I3mtPrcKaVqlVIvK6X2K6X2KaU+axzPuPf3LNdq3vurtc6oP0SXbD4KLABswLvACrPblYDrPA6UnnbsW8B9xu37gG+a3c7zuL4rgTXA3umuD7gF+COggI3AFrPbPwfX+lXgC5Ocu8L4nc4DGozfdavZ1zDL660E1hi3C4DDxnVl3Pt7lms17f3NxEw/vvm61joAxDZfzwa3AY8atx8FbjevKedHa/0a0H/a4amu7zbgpzpqM1CklKpMSkPnwBTXOpXbgCe01n6t9TGgkejvfNrQWndorXcYt0eAA0T3z8649/cs1zqVhL+/mRj0J9t8/Wz/yOlKA88rpbYbm8kDVGitO4zbnUCFOU1LmKmuL1Pf888Y5YxHJpTqMupalVL1wMXAFjL8/T3tWsGk9zcTg362uFxrvQa4Gfi0UurKiXfq6HfFjB2Pm+nXBzwILAQuAjqAb5vamgRQSuUDvwY+p7Uennhfpr2/k1yrae9vJgb9rNh8XWvdZvzdDfyG6FfArtjXXuPvbvNamBBTXV/Gveda6y6tdVhrHQF+yMmv+BlxrUqpXKJB8DGt9VPG4Yx8fye7VjPf30wM+hm/+bpSyqWUKojdBm4A9hK9zruM0+4CnjanhQkz1fU9A3zcGOWxERiaUCZIS6fVrD9A9P2F6LXeqZTKU0o1AIuBrclu3/lQSingYeCA1vo7E+7KuPd3qms19f01u3c7QT3mtxDtJT8KfNns9iTg+hYQ7eF/F9gXu0agBNgEHAFeBIrNbut5XOPjRL/2BonWNe+e6vqIjur4gfF+7wHWmd3+ObjWnxnXstsIBJUTzv+yca2HgJvNbv85XO/lREs3u4Fdxp9bMvH9Pcu1mvb+yjIMQgiRRTKxvCOEEGIKEvSFECKLSNAXQogsIkFfCCGyiAR9IYTIIhL0hRAii0jQF0KILPL/A+lHPAQcI6WOAAAAAElFTkSuQmCC\n",
69 | "text/plain": [
70 | ""
71 | ]
72 | },
73 | "metadata": {
74 | "needs_background": "light"
75 | },
76 | "output_type": "display_data"
77 | }
78 | ],
79 | "source": [
80 | "img = cv2.imread(\"me.jpg\")\n",
81 | "img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\n",
82 | "\n",
83 | "hist = np.zeros((256), dtype = int)\n",
84 | "\n",
85 | "rows, cols = img.shape\n",
86 | "\n",
87 | "for i in range(rows):\n",
88 | " for j in range(cols):\n",
89 | " index = img[i,j] \n",
90 | " hist[index] +=1\n",
91 | "\n",
92 | "optimized_img = cv2.equalizeHist(img)\n",
93 | "cv2.imwrite(\"Output.jpg\",result)\n",
94 | " \n",
95 | "plt.plot(hist)"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": null,
101 | "id": "2ca9ac97-95aa-4181-b819-3c0fb4a3924f",
102 | "metadata": {},
103 | "outputs": [],
104 | "source": []
105 | }
106 | ],
107 | "metadata": {
108 | "kernelspec": {
109 | "display_name": "Python 3 (ipykernel)",
110 | "language": "python",
111 | "name": "python3"
112 | },
113 | "language_info": {
114 | "codemirror_mode": {
115 | "name": "ipython",
116 | "version": 3
117 | },
118 | "file_extension": ".py",
119 | "mimetype": "text/x-python",
120 | "name": "python",
121 | "nbconvert_exporter": "python",
122 | "pygments_lexer": "ipython3",
123 | "version": "3.9.0"
124 | }
125 | },
126 | "nbformat": 4,
127 | "nbformat_minor": 5
128 | }
129 |
--------------------------------------------------------------------------------
/assignment-29/Microsoft Logo.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 87,
6 | "id": "80742eaf-bd27-4edb-a455-bc829668dd73",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import cv2\n",
11 | "import numpy as np\n",
12 | "import matplotlib.pyplot as plt\n",
13 | "from PIL import ImageFont, ImageDraw, Image\n",
14 | "import time"
15 | ]
16 | },
17 | {
18 | "cell_type": "code",
19 | "execution_count": 88,
20 | "id": "bf45998e-4927-443c-86b2-6537a2c3daaa",
21 | "metadata": {},
22 | "outputs": [
23 | {
24 | "data": {
25 | "text/plain": [
26 | ""
27 | ]
28 | },
29 | "execution_count": 88,
30 | "metadata": {},
31 | "output_type": "execute_result"
32 | },
33 | {
34 | "data": {
35 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADACAYAAAD/eCOHAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPbklEQVR4nO3df6zddX3H8edrVLhOXUupa1jbDIzNDP8IrHEQzeJgIjBiWYIEYkZlXZpsbNG5xJX5x2KyP2RbREkWtBG3apjCUEdDmIwVzLI/QMpE5IfIFcW2AaoI6EbulPneH+dTOK23vef2/jj3fvZ8JCfn8/18Pueez+d8b1/33M/9nn5SVUiS+vIL4x6AJGn+Ge6S1CHDXZI6ZLhLUocMd0nqkOEuSR1akHBPcn6Sx5JMJtm+EM8hSTqyzPd17kmOA74FvAPYB9wHXF5Vj8zrE0mSjmgh3rm/BZisqieq6ifA54HNC/A8kqQjWLEAX3MdsHfoeB/wG4d3SrIN2AawYsWKX1+5cuUCDEWS+vXss8/+oKpeP13bQoT7SKpqB7ADYM2aNXXRRReNayiStCzt3LnzySO1LcSyzH5gw9Dx+lYnSVokCxHu9wEbk5ya5HjgMmDXAjyPJOkI5n1ZpqpeSvLHwB3AccCnq+rh+X4eSdKRLciae1XdDty+EF9bkjQzP6EqSR0y3CWpQ4a7JHXIcJekDhnuktQhw12SOmS4S1KHDHdJ6pDhLkkdMtwlqUOGuyR1yHCXpA4Z7pLUIcNdkjpkuEtShwx3SerQjOGe5NNJDiR5aKhudZI7kzze7k9s9UlyXZLJJA8mOXMhBy9Jmt4o79z/ATj/sLrtwO6q2gjsbscAFwAb220bcP38DFOSNBszhntV/Tvww8OqNwM7W3kncPFQ/Wdq4B5gVZKT52mskqQRHeua+9qqeqqVnwbWtvI6YO9Qv32t7uck2ZZkT5I9U1NTxzgMSdJ05vwH1aoqoI7hcTuqalNVbZqYmJjrMCRJQ4413J85uNzS7g+0+v3AhqF+61udJGkRHWu47wK2tPIW4Nah+ivaVTNnAS8MLd9IkhbJipk6JPkc8HZgTZJ9wF8CHwFuTrIVeBK4tHW/HbgQmAReBK5cgDFLkmYwY7hX1eVHaDp3mr4FXDXXQUmS5sZPqEpShwx3SeqQ4S5JHTLcJalDhrskdchwl6QOGe6S1CHDXZI6ZLhLUocMd0nqkOEuSR0y3CWpQ4a7JHXIcJekDhnuktShGcM9yYYkdyd5JMnDSd7X6lcnuTPJ4+3+xFafJNclmUzyYJIzF3oSkqRDjfLO/SXgz6rqNOAs4KokpwHbgd1VtRHY3Y4BLgA2tts24Pp5H7Uk6ahmDPeqeqqq/rOVfww8CqwDNgM7W7edwMWtvBn4TA3cA6w6uJm2JGlxzGrNPckpwBnAvcDaoc2vnwbWtvI6YO/Qw/a1OknSIhk53JO8FvgC8P6q+tFwW9s7tWbzxEm2JdmTZM/U1NRsHipJmsFI4Z7kVQyC/caq+mKrfubgcku7P9Dq9wMbhh6+vtUdoqp2VNWmqto0MTFxrOOXJE1jlKtlAtwAPFpVHx1q2gVsaeUtwK1D9Ve0q2bOAl4YWr6RJC2CFSP0eSvwe8A3kjzQ6v4C+Ahwc5KtwJPApa3tduBCYBJ4EbhyPgcsSZrZjOFeVf8B5AjN507Tv4Cr5jguSdIc+AlVSeqQ4S5JHTLcJalDhrskdchwl6QOGe6S1CHDXZI6ZLhLUocMd0nqkOEuSR0y3CWpQ4a7JHXIcJekDhnuktQhw12SOmS4S1KHRtlmbyLJV5N8PcnDST7c6k9Ncm+SySQ3JTm+1Z/Qjidb+ykLPAdJ0mFGeef+P8A5VfVm4HTg/LY36jXAtVX1RuA5YGvrvxV4rtVf2/pJkhbRjOFeA//VDl/VbgWcA9zS6ncCF7fy5nZMaz+3bbItSVokI625JzmubY59ALgT+DbwfFW91LrsA9a18jpgL0BrfwE4aZqvuS3JniR7pqam5jQJSdKhRgr3qvrfqjodWA+8BXjTXJ+4qnZU1aaq2jQxMTHXLydJGjKrq2Wq6nngbuBsYFWSFa1pPbC/lfcDGwBa+0rg2fkYrCRpNKNcLfP6JKta+dXAO4BHGYT8Ja3bFuDWVt7Vjmntd1VVzeOYJUkzWDFzF04GdiY5jsEPg5ur6rYkjwCfT/JXwNeAG1r/G4DPJpkEfghctgDjliQdxYzhXlUPAmdMU/8Eg/X3w+ungHfPy+gkScfET6hKUocMd0nqkOEuSR0y3CWpQ4a7JHXIcJekDhnuktQhw12SOmS4S1KHDHdJ6pDhLkkdMtwlqUOGuyR1yHCXpA4Z7pLUIcNdkjo0crgnOS7J15Lc1o5PTXJvkskkNyU5vtWf0I4nW/spCzR2SdIRzOad+/sY7J160DXAtVX1RuA5YGur3wo81+qvbf0kSYtopHBPsh74HeBT7TjAOcAtrctO4OJW3tyOae3ntv6SpEUy6jv3jwEfBH7Wjk8Cnq+ql9rxPmBdK68D9gK09hda/0Mk2ZZkT5I9U1NTxzZ6SdK0Zgz3JBcBB6rq/vl84qraUVWbqmrTxMTEfH5pSfp/b8UIfd4KvCvJhcAE8EvAx4FVSVa0d+frgf2t/35gA7AvyQpgJfDsvI9cknREM75zr6qrq2p9VZ0CXAbcVVXvAe4GLmndtgC3tvKudkxrv6uqal5HLUk6qrlc5/7nwAeSTDJYU7+h1d8AnNTqPwBsn9sQJUmzNcqyzMuq6ivAV1r5CeAt0/SZAt49D2OTJB0jP6EqSR0y3CWpQ4a7JHXIcJekDhnuktQhw12SOmS4S1KHDHdJ6pDhLkkdMtwlqUOGuyR1yHCXpA4Z7pLUIcNdkjpkuEtSh0YK9yTfTfKNJA8k2dPqVie5M8nj7f7EVp8k1yWZTPJgkjMXcgKSpJ83m3fuv1VVp1fVpna8HdhdVRuB3byy49IFwMZ22wZcP1+DlSSNZi7LMpuBna28E7h4qP4zNXAPg420T57D80iSZmnUcC/gX5Pcn2Rbq1tbVU+18tPA2lZeB+wdeuy+VneIJNuS7EmyZ2pq6hiGLkk6klH3UH1bVe1P8svAnUm+OdxYVZWkZvPEVbUD2AGwZs2aWT1WknR0I71zr6r97f4A8CUGG2M/c3C5pd0faN33AxuGHr6+1UmSFsmM4Z7kNUled7AMnAc8BOwCtrRuW4BbW3kXcEW7auYs4IWh5RtJ0iIYZVlmLfClJAf7/2NVfTnJfcDNSbYCTwKXtv63AxcCk8CLwJXzPmpJ0lHNGO5V9QTw5mnqnwXOnaa+gKvmZXSSpGPiJ1QlqUOGuyR1yHCXpA4Z7pLUIcNdkjpkuEtShwx3SeqQ4S5JHTLcJalDhrskdchwl6QOGe6S1CHDXZI6ZLhLUocMd0nqkOEuSR0aKdyTrEpyS5JvJnk0ydlJVie5M8nj7f7E1jdJrksymeTBJGcu7BQkSYcb9Z37x4EvV9WbGOzK9CiwHdhdVRuB3e0Y4AJgY7ttA66f1xFLkmY0ygbZK4HfBG4AqKqfVNXzwGZgZ+u2E7i4lTcDn6mBe4BVSU6e53FLko5ilHfupwLfB/4+ydeSfCrJa4C1VfVU6/M0g420AdYBe4cev6/VHSLJtiR7kuyZmpo69hlIkn7OKOG+AjgTuL6qzgD+m1eWYICXN8Wu2TxxVe2oqk1VtWliYmI2D5UkzWCUcN8H7Kuqe9vxLQzC/pmDyy3t/kBr3w9sGHr8+lYnSVokM4Z7VT0N7E3ya63qXOARYBewpdVtAW5t5V3AFe2qmbOAF4aWbyRJi2DFiP3+BLgxyfHAE8CVDH4w3JxkK/AkcGnreztwITAJvNj6SpIW0UjhXlUPAJumaTp3mr4FXDW3YUmS5sJPqEpShwx3SeqQ4S5JHTLcJalDGfz9c8yDSH4MPDbucczRGuAH4x7EPOhhHs5haehhDrC05/GrVfX66RpGvRRyoT1WVdNdjbNsJNmz3OcAfczDOSwNPcwBlu88XJaRpA4Z7pLUoaUS7jvGPYB50MMcoI95OIeloYc5wDKdx5L4g6okaX4tlXfukqR5ZLhLUofGHu5Jzk/yWNtQe/vMjxiPJBuS3J3kkSQPJ3lfq192G4UnOa7tqnVbOz41yb1trDe1//2TJCe048nWfspYB970sGF7kj9t30cPJflckonlcB6SfDrJgSQPDdXN+rVPsqX1fzzJlumea5Hn8Dft++nBJF9Ksmqo7eo2h8eSvHOofmlnV1WN7QYcB3wbeANwPPB14LRxjukoYz0ZOLOVXwd8CzgN+Gtge6vfDlzTyhcC/wIEOAu4d9xzGJrLB4B/BG5rxzcDl7XyJ4A/bOU/Aj7RypcBN4177G0sO4E/aOXjgVXL6Tww2HbyO8Crh17/9y6H88BgP+UzgYeG6mb12gOrGfzX4auBE1v5xDHP4TxgRStfMzSH01ouncBgy9Fvt9xa8tk17m/ys4E7ho6vBq4e94sy4thvBd7B4JO1J7e6kxl8IAvgk8DlQ/1f7jfmca8HdgPnALe1f3g/GPrGfvmcAHcAZ7fyitYvYx7/yhaMOax+2ZwHXtlneHV7XW8D3rlczgNwymHBOKvXHrgc+ORQ/SH9xjGHw9p+F7ixlQ/JpIPnYjlk17iXZUbaTHupab8WnwHcyxw3Ch+DjwEfBH7Wjk8Cnq+ql9rx8DhfnkNrf6H1H6cF2bB9MVXVfuBvge8BTzF4Xe9neZ2HYbN97ZfcOTnM7zP4jQOW7xzGHu7LTpLXAl8A3l9VPxpuq8GP8CV7bWmSi4ADVXX/uMcyBwuyYftiamvSmxn8oPoV4DXA+WMd1DxZ6q/9TJJ8CHgJuHHcY5mrcYf7stpMO8mrGAT7jVX1xVa9nDYKfyvwriTfBT7PYGnm48CqJAf/n6Hhcb48h9a+Enh2MQc8jR42bP9t4DtV9f2q+inwRQbnZjmdh2Gzfe2X4jkhyXuBi4D3tB9SsMzmMGzc4X4fsLFdJXA8gz8W7RrzmKaVJMANwKNV9dGhpmWzUXhVXV1V66vqFAav9V1V9R7gbuCS1u3wORyc2yWt/1jflVUfG7Z/DzgryS+276uDc1g25+Ews33t7wDOS3Ji+y3mvFY3NknOZ7Bc+a6qenGoaRdwWbti6VRgI/BVlkN2jXvRn8Ff1L/F4C/PHxr3eI4yzrcx+HXzQeCBdruQwdrnbuBx4N+A1a1/gL9r8/oGsGncczhsPm/nlatl3sDgG3YS+CfghFY/0Y4nW/sbxj3uNq7TgT3tXPwzgysultV5AD4MfBN4CPgsg6sxlvx5AD7H4O8EP2XwW9TWY3ntGaxrT7bblUtgDpMM1tAP/tv+xFD/D7U5PAZcMFS/pLPL/35Akjo07mUZSdICMNwlqUOGuyR1yHCXpA4Z7pLUIcNdkjpkuEtSh/4P9c+cGBVOx3QAAAAASUVORK5CYII=\n",
36 | "text/plain": [
37 | ""
38 | ]
39 | },
40 | "metadata": {
41 | "needs_background": "light"
42 | },
43 | "output_type": "display_data"
44 | }
45 | ],
46 | "source": [
47 | "img = np.ones((640,1360,3) , dtype = \"uint8\")*95\n",
48 | "plt.imshow(img)"
49 | ]
50 | },
51 | {
52 | "cell_type": "code",
53 | "execution_count": 89,
54 | "id": "61ff1420-2db2-402b-b379-588e055a7062",
55 | "metadata": {},
56 | "outputs": [
57 | {
58 | "data": {
59 | "text/plain": [
60 | ""
61 | ]
62 | },
63 | "execution_count": 89,
64 | "metadata": {},
65 | "output_type": "execute_result"
66 | },
67 | {
68 | "data": {
69 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADACAYAAAD/eCOHAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAQ10lEQVR4nO3df4xlZX3H8fdnd3Z3EOguuEro7rZg3dTQpiJuFKNpVKoC3bg0oQZj6kq32cRio7WJhfpHY+If2jb+Shp0I7aDQYH6o2wIleJKYppUdBBBBJHxB7JbcAX5oeKAy377x30WL+ssc2dn7tyZ4/uV3NxznvPce57nnpnPPfe5P55UFZKkblkx6gZIkhae4S5JHWS4S1IHGe6S1EGGuyR1kOEuSR00lHBPcnaSu5JMJbl4GPuQJB1ZFvpz7klWAt8BXgPsBb4GvLGq7ljQHUmSjmgYZ+4vAaaq6ntV9QRwJbBtCPuRJB3B2BDucwNwb9/6XuClh1dKshPYCTA2NvbitWvXDqEpktRdDz744ANV9ZyZtg0j3AdSVbuAXQDr16+vrVu3jqopkrQsTUxM3HOkbcMYltkHbOpb39jKJEmLZBjh/jVgc5JTk6wGLgB2D2E/kqQjWPBhmao6kORtwPXASuATVfWthd6PJOnIhjLmXlXXAdcN474lSbPzG6qS1EGGuyR1kOEuSR1kuEtSBxnuktRBhrskdZDhLkkdZLhLUgcZ7pLUQYa7JHWQ4S5JHWS4S1IHGe6S1EGGuyR1kOEuSR1kuEtSB80a7kk+kWR/ktv7yk5MckOSu9v1Ca08ST6SZCrJbUnOGGbjJUkzG+TM/d+Bsw8ruxjYU1WbgT1tHeAcYHO77AQuXZhmSpLmYtZwr6ovAz85rHgbMNGWJ4Dz+sovr56vAOuSnLxAbZUkDehox9xPqqr72vL9wElteQNwb1+9va3s1yTZmWQyyeT09PRRNkOSNJN5v6FaVQXUUdxuV1Vtqaot4+Pj822GJKnP0Yb7jw4Nt7Tr/a18H7Cpr97GViZJWkRHG+67ge1teTtwTV/5m9unZs4EHukbvpEkLZKx2Sok+TTwSmB9kr3APwLvA65OsgO4B3hDq34dcC4wBTwGXDiENkuSZjFruFfVG4+w6awZ6hZw0XwbJUmaH7+hKkkdZLhLUgcZ7pLUQYa7JHWQ4S5JHWS4S1IHGe6S1EGGuyR1kOEuSR1kuEtSBxnuktRBhrskdZDhLkkdZLhLUgcZ7pLUQbOGe5JNSW5MckeSbyV5eys/MckNSe5u1ye08iT5SJKpJLclOWPYnZAkPd0gZ+4HgL+rqtOAM4GLkpwGXAzsqarNwJ62DnAOsLlddgKXLnirJUnPaNZwr6r7qurrbfmnwJ3ABmAbMNGqTQDnteVtwOXV8xVg3aHJtCVJi2NOY+5JTgFeBNwEnNQ3+fX9wElteQNwb9/N9rYySdIiGTjckxwHfBZ4R1U92r+tzZ1ac9lxkp1JJpNMTk9Pz+WmkqRZzDpBNkCSVfSC/Yqq+lwr/lGSk6vqvjbssr+V7wM29d18Yyt7mqraBewCWL9+/ZyeGJay8TrA+oM/H+o+frLiGB7LarLyAGPHPwoZ3r6efOwYDj5+zPB2IGkoZg33JAEuA+6sqg/0bdoNbAfe166v6St/W5IrgZcCj/QN33TeqU8+xFunJ4e6j8vX/BGTqzYwtvYRnvuaL5AVB4e2r4dveTE/+/ZpQ7t/ScMxyJn7y4G/AL6Z5But7B/ohfrVSXYA9wBvaNuuA84FpoDHgAsXssFLXSjGODi0k+mifyytyIqDZOXwwj3pzIsq6TfKrOFeVf/DkV/4nzVD/QIumme7JEnz4DdUJamDDHdJ6iDDXZI6yHCXpA4y3CWpgwx3Seogw12SOshwl6QOMtwlqYMMd0nqIMNdkjrIcJekDjLcJamDDHdJ6iDDXZI6yHCXpA6aNdyTjCf5apJbk3wryXta+alJbkoyleSqJKtb+Zq2PtW2nzLkPkiSDjPImfvjwKur6oXA6cDZSc4E3g98sKqeDzwE7Gj1dwAPtfIPtnqSpEU0a7hXz8/a6qp2KeDVwGda+QRwXlve1tZp289qk2xLkhbJQGPuSVa2ybH3AzcA3wUerqoDrcpeYENb3gDcC9C2PwI8e4b73JlkMsnk9PT0vDohSXq6gcK9qp6sqtOBjcBLgBfMd8dVtauqtlTVlvHx8fnenSSpz9hcKlfVw0luBF4GrEsy1s7ONwL7WrV9wCZgb5IxYC3w4AK2eUl7YMWz+MKq32OY41D/t+I4AA7+4hgeveMPSWpo+3r8gfVDu29JwzNruCd5DvDLFuzHAK+h9ybpjcD5wJXAduCadpPdbf1/2/YvVdXw0meJ2b/iOHavmfcLm4E8+YtjefTWFy3KviQtL4OcuZ8MTCRZSW8Y5+qqujbJHcCVSd4L3AJc1upfBnwyyRTwE+CCIbR7yXoyK3li1bOGuo9VB37B2MEDrMhBjlvz+FD3Nf3LVTzx5Jxe4ElaAmb9r62q24BfOz2squ/RG38/vHwa+PMFad0ytH/d8/jyH2ynhjgwc+ZdV3PKj29lw7qHeeervsjKFQeHtq/P33o6N969OK9EJC0cT8kW2MGMMb3qeBjWpz+reHLFKgBW5iDHr3mcsZXDC/c1Y08O7b4lDY8/PyBJHWS4S1IHGe6S1EGGuyR1kOEuSR1kuEtSBxnuktRBhrskdZDhLkkdZLhLUgcZ7pLUQYa7JHWQ4S5JHWS4S1IHGe6S1EEDh3uSlUluSXJtWz81yU1JppJclWR1K1/T1qfa9lOG1HZJ0hHM5cz97cCdfevvBz5YVc8HHgJ2tPIdwEOt/IOtniRpEQ0U7kk2An8KfLytB3g18JlWZQI4ry1va+u07We1+pKkRTLomfuHgHcBh+ZzezbwcFUdaOt7gQ1teQNwL0Db/kir/zRJdiaZTDI5PT19dK2XJM1o1nBPshXYX1U3L+SOq2pXVW2pqi3j4+MLedeS9BtvkAmyXw68Psm5wDjwW8CHgXVJxtrZ+UZgX6u/D9gE7E0yBqwFHlzwlkuSjmjWM/equqSqNlbVKcAFwJeq6k3AjcD5rdp24Jq2vLut07Z/qapqQVstSXpGg5y5H8nfA1cmeS9wC3BZK78M+GSSKeAn9J4QfmOMP/Eov/PjW4HhvYd87OMPAfDYE6v5+t5NrMjwnjvv/+nxQ7tvScOTpXBSvX79+tq6deuom7EgFuvRzAj2JmlpmZiYuLmqtsy0bT5n7prB4sagoStpZv78gCR1kOEuSR1kuEtSBxnuktRBhrskdZDhLkkdZLhLUgcZ7pLUQYa7JHWQ4S5JHWS4S1IHGe6S1EGGuyR1kOEuSR00ULgn+UGSbyb5RpLJVnZikhuS3N2uT2jlSfKRJFNJbktyxjA7IEn6dXM5c39VVZ3e98PwFwN7qmozsKetA5wDbG6XncClC9VYSdJg5jMssw2YaMsTwHl95ZdXz1foTaR98jz2I0mao0HDvYD/TnJzkp2t7KSquq8t3w+c1JY3APf23XZvK3uaJDuTTCaZnJ6ePoqmS5KOZNBp9l5RVfuSPBe4Icm3+zdWVSVzm6W5qnYBu6A3h+pcbitJemYDnblX1b52vR/4PPAS4EeHhlva9f5WfR+wqe/mG1uZJGmRzBruSY5NcvyhZeC1wO3AbmB7q7YduKYt7wbe3D41cybwSN/wjSRpEQwyLHMS8Pkkh+p/qqq+kORrwNVJdgD3AG9o9a8DzgWmgMeACxe81ZKkZzRruFfV94AXzlD+IHDWDOUFXLQgrZMkHRW/oSpJHWS4S1IHGe6S1EGGuyR1kOEuSR1kuEtSBxnuktRBhrskdZDhLkkdZLhLUgcZ7pLUQYa7JHWQ4S5JHWS4S1IHGe6S1EGGuyR10EDhnmRdks8k+XaSO5O8LMmJSW5Icne7PqHVTZKPJJlKcluSM4bbBUnS4QY9c/8w8IWqegG9WZnuBC4G9lTVZmBPWwc4B9jcLjuBSxe0xZKkWQ0yQfZa4I+BywCq6omqehjYBky0ahPAeW15G3B59XwFWJfk5AVutyTpGQxy5n4q8GPg35LckuTjSY4FTqqq+1qd++lNpA2wAbi37/Z7W9nTJNmZZDLJ5PT09NH3QJL0awYJ9zHgDODSqnoR8HN+NQQDPDUpds1lx1W1q6q2VNWW8fHxudxUkjSLQcJ9L7C3qm5q65+hF/Y/OjTc0q73t+37gE19t9/YyiRJi2TWcK+q+4F7k/x+KzoLuAPYDWxvZduBa9rybuDN7VMzZwKP9A3fSJIWwdiA9f4GuCLJauB7wIX0nhiuTrIDuAd4Q6t7HXAuMAU81upKkhbRQOFeVd8Atsyw6awZ6hZw0fyaJUmaD7+hKkkdZLhLUgcZ7pLUQYa7JHVQeu9/jrgRyU+Bu0bdjnlaDzww6kYsgC70wz4sDV3oAyztfvxuVT1npg2DfhRy2O6qqpk+jbNsJJlc7n2AbvTDPiwNXegDLN9+OCwjSR1kuEtSBy2VcN816gYsgC70AbrRD/uwNHShD7BM+7Ek3lCVJC2spXLmLklaQIa7JHXQyMM9ydlJ7moTal88+y1GI8mmJDcmuSPJt5K8vZUvu4nCk6xss2pd29ZPTXJTa+tV7dc/SbKmrU+17aeMtOFNFyZsT/K37e/o9iSfTjK+HI5Dkk8k2Z/k9r6yOT/2Sba3+ncn2T7Tvha5D//c/p5uS/L5JOv6tl3S+nBXktf1lS/t7KqqkV2AlcB3gecBq4FbgdNG2aZnaOvJwBlt+XjgO8BpwD8BF7fyi4H3t+Vzgf8CApwJ3DTqPvT15Z3Ap4Br2/rVwAVt+aPAW9vyXwMfbcsXAFeNuu2tLRPAX7Xl1cC65XQc6E07+X3gmL7H/y3L4TjQm0/5DOD2vrI5PfbAifR+OvxE4IS2fMKI+/BaYKwtv7+vD6e1XFpDb8rR77bcWvLZNeo/8pcB1/etXwJcMuoHZcC2XwO8ht43a09uZSfT+0IWwMeAN/bVf6reiNu9EdgDvBq4tv3jPdD3h/3UMQGuB17WlsdavYy4/WtbMOaw8mVzHPjVPMMntsf1WuB1y+U4AKccFoxzeuyBNwIf6yt/Wr1R9OGwbX8GXNGWn5ZJh47FcsiuUQ/LDDSZ9lLTXha/CLiJeU4UPgIfAt4FHGzrzwYerqoDbb2/nU/1oW1/pNUfpaFM2L6Yqmof8C/AD4H76D2uN7O8jkO/uT72S+6YHOYv6b3igOXbh5GH+7KT5Djgs8A7qurR/m3Vewpfsp8tTbIV2F9VN4+6LfMwlAnbF1Mbk95G74nqt4FjgbNH2qgFstQf+9kkeTdwALhi1G2Zr1GH+7KaTDvJKnrBfkVVfa4VL6eJwl8OvD7JD4Ar6Q3NfBhYl+TQ7wz1t/OpPrTta4EHF7PBM+jChO1/Any/qn5cVb8EPkfv2Cyn49Bvro/9UjwmJHkLsBV4U3uSgmXWh36jDvevAZvbpwRW03uzaPeI2zSjJAEuA+6sqg/0bVo2E4VX1SVVtbGqTqH3WH+pqt4E3Aic36od3odDfTu/1R/pWVl1Y8L2HwJnJnlW+7s61IdlcxwOM9fH/nrgtUlOaK9iXtvKRibJ2fSGK19fVY/1bdoNXNA+sXQqsBn4Ksshu0Y96E/vHfXv0Hvn+d2jbs8ztPMV9F5u3gZ8o13OpTf2uQe4G/gicGKrH+BfW7++CWwZdR8O688r+dWnZZ5H7w92CvgPYE0rH2/rU23780bd7tau04HJdiz+k94nLpbVcQDeA3wbuB34JL1PYyz54wB8mt77BL+k9ypqx9E89vTGtafa5cIl0IcpemPoh/63P9pX/92tD3cB5/SVL+ns8ucHJKmDRj0sI0kaAsNdkjrIcJekDjLcJamDDHdJ6iDDXZI6yHCXpA76f7R1CFI4N+OoAAAAAElFTkSuQmCC\n",
70 | "text/plain": [
71 | ""
72 | ]
73 | },
74 | "metadata": {
75 | "needs_background": "light"
76 | },
77 | "output_type": "display_data"
78 | }
79 | ],
80 | "source": [
81 | "#red\n",
82 | "img[210:310,210:310] = (246, 83, 20)\n",
83 | "#green\n",
84 | "img[210:310,320:420] = (124, 187, 0)\n",
85 | "#blue\n",
86 | "img[320:420,210:310] = (0, 161, 241)\n",
87 | "#yellow\n",
88 | "img[320:420,320:420] = ( 255, 187, 0)\n",
89 | "plt.imshow(img)"
90 | ]
91 | },
92 | {
93 | "cell_type": "code",
94 | "execution_count": 93,
95 | "id": "18ee7f65-8fe3-44eb-ad44-d9ccc39391ab",
96 | "metadata": {},
97 | "outputs": [
98 | {
99 | "data": {
100 | "text/plain": [
101 | "True"
102 | ]
103 | },
104 | "execution_count": 93,
105 | "metadata": {},
106 | "output_type": "execute_result"
107 | },
108 | {
109 | "data": {
110 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADACAYAAAD/eCOHAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8/fFQqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAmPUlEQVR4nO3deXBU153o8e+vu7WAFiQhBLIWJEDIgDACM8SOjc1iE9tgQ2I/22RmzCR2qJDYNZO8qXnOmzc186ZmqiZ5mZlMqmIndpwEJ9iOSYJNHBMHY1yYjA1IyAaJRUisEotAK9qXPu+Pvt3pq+6WWkitpf37VKl0+9zt3NvS7957zrnniDEGpZRS0cUx1hlQSik18jS4K6VUFNLgrpRSUUiDu1JKRSEN7kopFYU0uCulVBSKSHAXkftE5KSIVInIs5HYh1JKqdBkpNu5i4gTqATuBWqAQ8BGY8yxEd2RUkqpkCJx574MqDLGnDbGdAOvAesjsB+llFIhuCKwzSzggt/nGuAz/RcSkc3AZgCXy3XrlClTIpAVpZSKXvX19deMMdOCzYtEcA+LMeYF4AWA9PR0s27durHKilJKTUhbt249F2peJIplaoEcv8/ZVppSSqlREongfggoEJF8EYkFHgd2RmA/SimlQhjxYhljTK+IPA28AziBnxhjKkZ6P0oppUKLSJm7MeZt4O1IbFsppdTg9A1VpZSKQhrclVIqCmlwV0qpKKTBXSmlopAGd6WUikIa3JVSKgppcFdKqSikwV0ppaKQBnellIpCGtyVUioKaXBXSqkopMFdKaWikAZ3pZSKQhrclVIqCmlwV0qpKKTBXSmlotCgwV1EfiIidSJS7peWJiK7ReSU9TvVShcR+b6IVInIERFZEsnMK6WUCi6cO/efAff1S3sW2GOMKQD2WJ8B7gcKrJ/NwPMjk02llFJDMWhwN8bsAxr6Ja8HtlrTW4ENfukvG4+PgBQRyRyhvCqllArTjZa5TzfGXLKmLwPTreks4ILfcjVWWgAR2SwiJSJS0tnZeYPZUEopFcywK1SNMQYwN7DeC8aYpcaYpfHx8cPNhlJKKT83GtyveItbrN91VnotkOO3XLaVppRSahTdaHDfCWyypjcBb/qlP2G1mrkNaPYrvlFKKTVKXIMtICKvAiuAdBGpAf4R+DfgdRF5EjgHPGot/jbwAFAFtANfikCelVJKDWLQ4G6M2Rhi1uogyxrg68PNlFJKqeHRN1SVUioKaXBXSqkopMFdKaWikAZ3pZSKQhrclVIqCmlwV0qpKKTBXSmlopAGd6WUikIa3JVSKgppcFdKqSikwV0ppaKQBnellIpCGtyVUioKaXBXSqkopMFdKaWi0KDBXURyRGSviBwTkQoR+WsrPU1EdovIKet3qpUuIvJ9EakSkSMisiTSB6GUUsounDv3XuB/GmPmA7cBXxeR+cCzwB5jTAGwx/oMcD9QYP1sBp4f8VwrpZQa0KDB3RhzyRhz2Jq+DhwHsoD1wFZrsa3ABmt6PfCy8fgISPEOpq2UUmp0DKnMXUTygMXAAWC63+DXl4Hp1nQWcMFvtRorTSml1CgJO7iLSCLwa+BvjDEt/vOssVPNUHYsIptFpERESjo7O4eyqlJKqUEMOkA2gIjE4Ans24wxv7GSr4hIpjHmklXsUmel1wI5fqtnW2k2xpgXgBcA0tPTh3RhGM/iTS/p7raI7qPBMYl2iUWcvbiSWkAit6++9km4uyZFbgdKqYgYNLiLiAAvAceNMf/hN2snsAn4N+v3m37pT4vIa8BngGa/4puol9/XyJbOkoju4+W4WyiJycI1pZmMe3+PONwR21dT2a20npgfse0rpSIjnDv3O4C/BI6KyMdW2v/GE9RfF5EngXPAo9a8t4EHgCqgHfjSSGZ4vBMMLtwRu5k2+JelGcThRpyRC+4iUfNQpdSnyqDB3Rizn9AP/quDLG+Arw8zX0oppYZB31BVSqkopMFdKaWikAZ3pZSKQhrclVIqCmlwV0qpKKTBXSmlopAGd6WUikJhdT+glFKRJCIkJCQQGxvrS+vu7qajo4O+vr4xzNnEpcFdjajZs2ezbNmyoPM++eQTjh07dkPbLSoqYuHChQHpV65c4f3338ft9rylGxcXx5o1a0hMTPQtU1dXx969e33LqPHD4XAwb948Vq5cSV5eHpMmTUJEMMbQ0dFBXV0dv/jFL7h48SLguQgkJyfT3t5OT0/PGOd+fNPgrkbU0qVLWbNmDZ4uif7EGEN+fj6VlZX09vYOaZuxsbF84QtfID8/P2C7V69e5cCBA7S1eTprmzZtGmvXriU+Pt633/r6ej788EPa29uHcWRqpDkcDj73uc+xfv164uLiAr7bhIQE0tLSSE9P5+LFiyQlJbFq1Sruvvtudu3axe7du8co5xODBnc1ohwOR8A/KXjuuHJycpgxYwY1NTVD2mZWVhZZWVkht+ufPthnNX4UFRWxYcMGYmNjQ35H3d3dNDY2smTJEtavX09OTg4iQlpa2ijnduLR4K5GTXx8PLfccsuQg3txcbGtLFZNfA6Hg9WrVw8Y2AFaW1spLCzk0UcfJSYmRi/UQ6DBXY0aEWHRokX84Q9/CLtoJi4ujuLi4shmTI26xMREcnNzbcHaGENzczPnz5+nr6+PtLQ0Tp06RWxsrAb2G6DBXY2q3NzcIRXN5OTkkJmZGfY/dnNzM6dPnyY5OdmXVltbq5Vv44zL5Qp4Guvq6uK5556jqqoKYwwul4u+vr6gdThqcBrc1aiKj49n4cKFYQf3xYsXExMTE/b2W1pa+Pd//3dbMHC73dqcbpyJjY3F4bC/ZtPU1MT58+d9rZr0gjw8GtzVqBIRiouL2b1796BFM94y+qHetYmILXB4hhgYXGJiIpmZmcyYMcO3fldXFzU1NdTV1dHd3R10PYfDYbsAud1uenp6mDx5MgUFBaSkpGCM4dKlS1y4cIFQYwY7HA5SU1PJysoiJSXFd9ytra3U1NRQX18/5JZGCQkJzJgxg8zMTJxOpy9/V69e5cqVKzQ3Nw+5iWhsbCwZGRlkZ2cTFxdn2+alS5doaWkJec69ZezBWscYY4iJiQlY1+UKDFNOp9O2b70QBApnmL14YB8QZy3/K2PMP4pIPvAaMBUoBf7SGNMtInHAy8CtQD3wmDHmbITyryag3Nxcpk+fTm1twNC6NjNnzmTGjBlD2vaUKVN4+umnA4plnn/++ZABIDMzk5UrV1JcXExqaqotmBhj6O7upqamhr1793Lo0KGAIL969WpWr17tC1YtLS3s2rWLBx98kNzcXN+Fore3l5qaGt555x0OHjzoC6pOp5Obb76ZVatWUVBQQEJCQsDFqbOzk7Nnz/L+++9TVlY2aDCbNm0aq1atYsmSJaSlpQUEyL6+PlpbWzl58iR79+6lsrJy0CCfkJDA7bffzh133EFmZmZAZWhfXx8tLS0cPXqUPXv2cOHCBVugzsjIYMuWLUyePBmn0xlQLDNt2jT+4R/+wbeOMQYRYfLkyQF5ueOOO1i0aBEA165d4wc/+IE2de0nnDv3LmCVMabVGih7v4jsAr4J/Kcx5jUR+SHwJPC89bvRGDNHRB4Hvg08FqH8qwnA+8/qDQTeopnBgvuSJUsCAq3/doJJTk4mNzfXd1cHnju/mJiYgIDodDpZsWIFDz74IFOmTAnZ1DI+Pp45c+aQn59PcXExv/jFL2hubvYt461H8EpPT+epp57yvZDjFRMTQ35+Phs3bqSyspKGhgaSkpL4whe+wGc/+9mQLUe8AW7evHnMnTuXsrIyXnvtNerr64Oeg6KiIp544gmmTZsW8ly5XC5SUlJYtmwZixYt4v3332fHjh10dXUFXX7OnDl88YtfJC8vL6A4xX+baWlp3HXXXSxZsoTf/e537Nmzx3feU1JSyM3N9T1BBFs/IyMj6Lz+EhISSEhIsE1rcLcbtG8Z49FqfYyxfgywCviVlb4V2GBNr7c+Y81fLVob8qnW3t5uuyv0Fs0Ee9z2mjx5MgsXLvQFJ2PMkIskBuJwOFi7di2PP/64rQhkIE6nk6VLl/LlL3856N2k/3KTJ08Ouk1jDLW1tbS0tJCcnMzmzZtZsWJF0GKK/kQEl8vF0qVLefrpp4MGwpkzZ/Lkk08OGNj7bzM+Pp41a9bwwAMPBF2nqKiIp59+mvz8/JCBvf82k5KSeOSRR3j44YcH/J5V5ITVcZiIOK3BseuA3UA10GSM8f631QBZ1nQWcAHAmt+Mp+im/zY3i0iJiJSEKoNU0eHcuXM0Njba0mbOnMn06dNDrpOfnx8QvE6ePBl2+flgFi9ezNq1awMqa70Xkba2Njo6OnC73bZ9iggLFy7knnvuuaH9GmM4ePAgAI899hhFRUUBzQG9RUFtbW10dnb60vzzkJeXxxNPPMGkSZN86U6nkw0bNgRcrIwx9PT00NbWRltbGz09PQHnUURYtWoVmZmZtvTMzEw2bdoU8GRjjMHtdtPR0RFymy6Xi3vuuYeVK1cCngrS3t5e3/H0X94/vf9PsPPo/fFuV9mFdUk1xvQBxSKSAuwAbh7ujo0xLwAvAKSnp4/Mf6wal1pbW7l27RrLly8Pu2hmyZIltsf39vZ2jh49yvz584edn8mTJ/PQQw/Zyny9Qb2kpIT9+/dz+fJlXC4XCxYsYO3ataSlpfny7nA4WLFiBfv376ehoSHkfowxNDQ0cPXqVV/FZmtrK0eOHGHRokUsW7YsIGBev36dvXv3UlZWRnNzMwkJCRQVFXHPPfcwdepU3/Iiwvz587nrrrt45513AJgxYwY333yzbZvd3d289957lJSU+PKamprKZz7zGVasWOFrPy4iJCYmsnjxYl8/Lk6nk4ceeoj09PSAfJ45c4Z3332X6upqenp6yMjI4LOf/Sy33XabrU260+lk3bp1lJeXc+HCBb73ve8RGxtLWloaGzdutH0HjY2NbNu2LSBQ33rrrdx11122tNLSUj744AMA2traaGpqGugr/1Qa0vOSMaZJRPYCtwMpIuKy7s6zAe9/aS2QA9SIiAuYgqdi9VPhmmMyv4+ZTSTLoS46PJ1iuTsm0XKsCJHIXRu7rqUPextut5vDhw9zxx132AJ2cXEx7777bsA/c0JCAgsWLLAVyZw+fZqrV68OOy8ACxcuJDs7O6Ay8I033uCdd96x5efKlSvU1tbyzDPP2DojS01NpaioiH379gXdhzGGI0eO8PLLL9PY2EhMTAwFBQU4nU5aW1u55557AuoTmpqa+NGPfsSJEyd86Y2NjdTU1FBeXs6WLVu46aabbBeZlStX8sc//pHW1lZb6xWvsrIytm/fbmsK2tDQwJkzZwC49957bcsXFhaya9cu3G43OTk5FBcXBwT2srIyfvazn9HS0mLb5qlTpzh9+jQbN2705cPb0deKFSt49dVXOX78OOB5IuhfgdvZ2cnRo0cDKqz7P02Ap0+hTz75JOi5Vx7htJaZBvRYgX0ScC+eStK9wCN4WsxsAt60Vtlpff7Qmv+eGaln6QmgzpHIzrhhP9iEpa8jgZZPFo/Kvoarurqa+vp6X1GLiPiKZvrfvc+ePZv0dPtFpbS0dETaqosIt956a0BrlIqKipBvzlZWVnLgwAFWrVplu3MuLCwMGdw7Ozv59a9/7av07Orqory8HPBUwPbvBM3tdrNjxw5bYPdXU1PDq6++yjPPPGML4NOmTaOwsJDS0tKgFbKTJ08mNjaWjo4OW7rb7ea///u/Wb58OTExMXR2dtLZ2Ultba2vGGTJkiW+Dti86uvreeWVV2yB3auvr499+/aRm5vLypUrbedq8eLF7Ny509fBm4q8cO7cM4GtIuLEU0b/ujHmLRE5BrwmIv8ClAEvWcu/BPxcRKqABuDxCOR73OoTJ90xoSvbRkJMbwcudy8OcZMYF7x1w0jp7Imhu2/4FWKtra1UVFTYKvri4+MpKioKCO633nqr7Q7/+vXrlJeXk5WVxXBNnjyZvLy8gLvR/fv3h2xeaIyhpKSEO++8E4fDQXt7u+9N2FAuXbrEpUuXgs6bPXt2wB12XV0dJSUlA+b9xIkTVFVVMX/+fNvd+7x58ygtLaWpqQm32207dwsWLOCb3/wmBw8e5MSJE9TV1flaxJw/f57vfve7OBwOGhoa6OzspKOjA2MMTqeTuXPnBpyngwcPcu3atZB5dLvd7N27l9tvv91WH5CWlsZNN93EqVOnBjxGNXIG/a81xhwBAm4PjTGngYCOu40xncD/GJHcTUB1KbPYt2ATJoIFM7edfJ28q5+QldLEN1e+i9MRuX7Kd3xSzN5Tw38SMcZw+PBhli9fbiuOKC4uZs+ePb475qSkJFvwMsZQVVVFQ0PDiAT35ORkWxt4wNeGfCCVlZV85zvfwe12+wJhqGaDxhhqampCXiz6H4cxhurq6kGb8vX29nL8+HFbvYO3t02n08n58+dpbGy0PfU4HA7mzJnDnDlz6Orqoq6ujsrKSioqKjhz5gynT58OWmEZHx/PtGnTbGlut5uKiooB8wieoqy6ujpmzpzpS3M6nWRnZ2twH0XaRmmEucVFZ0wSRKr1pzH0OTwtPJziJimuC5czcsE9zjVyr+2fPn2a+vp6XysZb9FMRkaGrxJvzpw5tu5cjTGUlpaO2EAb6enpAS1kmpubuX79+oDr9fX1UV1dHfZ+ghVbgCfYTp8+PaD4ZLA2/17Bum1ITEzE4XDQ3NzMvn372LBhg63Yyf9JKTc3l5ycHFauXElLSwuVlZUcOnSIiooKW9FNcnJyQHPPrq6uAe/avbq7u7ly5YotuIPnBTM1enQMVTVq2traKC8vt90pTpo0iaKiIiB4eXhLS4uvEm4kBOvTJBJN6UJtT0RsxRVe/i9FDSRUM0ZvAN+9ezeHDh0a8GIoIjidTlJTU1m2bBlbtmzhb//2b7n55j89ocXGxga0T+/u7g77RaFgXTXo6y6jS4O7GlVlZWUBFaOLFy/G5XKRnJzMvHnzbEUyJ0+eDGgjPxyjFWAGakMQ6uWmkdh2R0cHP/3pT9m+fTt1dXUB7fSD5cXpdDJr1iy+9rWvsWDBgrDzMRQa2EefBnc1qk6fPm17tPcvmpk7dy4pKSm+ed4imZFsbBXsjjbc0ZocDgdOp3NYgSrUm7bhDkYSbP/9X/Tp7Oxk165d/Ou//isvvvgihw4d8nU6Fupcet8qfeyxx0hISAh6UXA4HGG/bdp/uVAvI6nI0TJ3Nara29spLy+3lTtPmjSJhQsXBrze3tjYyMmTJ0d0/y0tLfT29trK3ZOSkkhISBjwRZiYmBieeOIJbrrpJurr631txQdr4dKfd0xXb6dYXuFWFvd/oQg8ZeH9A6e3Pf2HH37IRx99RFJSEjk5ORQWFlJYWEh2dnZAFwkiQnZ2NoWFhZw+fZqOjg7bRWfSpEmkpaUN+iTlLfLpT99EH10a3NWoO3z4MCtWrLDd3d15550kJyfbimSOHz8esmLyRjU2NtLV1WUL7t6BmAcK7lOmTGHx4sUkJiYya9YsAF+F5FAYY6irqwtInzlzJi6Xa8CyfxFhzpw5AemXL1+mr6+PuXPnMnfuXDIyMsjMzKS1tZUXX3yR9vZ2WlpaqKiooKKiApfLRXp6OsuWLeP++++31QGICLNnz6aiooLr16/bKkFdLhd5eXmDViwnJycHdC3h7VNHjR4tllGj7uzZs7YA571j9G+i6Ha7R7xIBjwBua6uzrZdl8vl6z42lNmzZ/taj3iLcZqbm2/opZzq6uqAjtRmzpxJbm7ugOtNnTrVVicBnqB59uxZ35PFww8/zPLlyykoKKCoqIjs7OyA7fT29nL58mV++9vfUlZWFnCO4+Li6Orq4uzZswHz/uzP/mzQIqRFixYFNDdtb2/nypUrA66nRpYGdzXqvEUz/TvD8g9a9fX1EWkT3dPTw9GjR21pIsLy5cuDBkLwFEf4v3EJf6rsDTWAx0DOnDkT0KQwLi6Oz3/+875ubPuLiYnhwQcftNVJgKeo4/jx4/T19dHc3Gw7jy6XK6DLB38OhyPgDVTvNgE+/vjjgIvQnDlzuOuuu0LWO8yYMYP7778/4FydP39+wH54BhOs2EkNTIO7GhNlZWUhiyC83QG0trYGnT9cBw8eDLjjTk1NZfPmzRQWFvqKi0TE18FV/7c1u7u7OXDgwA3tv7W1lQMHDgRc3BYsWMBTTz1FVlaWL3g5HA7S09P5i7/4C+68886AoHns2DEuXrxIX18fR44cCdjm7bffzrp162xFXuB5U3f16tUBvVK63W7fRfXYsWO27gjAE1QffvhhHnjgAZKSknzrxsTEMG/ePLZs2UJGRkbANvft23fDzU2DPb1lZmaSnJzMrFmzuOWWW8LqivjTRsvc1ZjwFs0Eq0js6+ujtLQ0Yvu+ePEiH3zwAffdd5+t/5OcnBy+8Y1vcOHCBa5evUp8fDx5eXmkpqYGBNXDhw/7Ot+6Ee+99x5Lly61Df7tcDgoLi6moKCAc+fO0dTURGJiInl5eQHB2RhDa2srv/3tb31NS0tKSlizZo2tB8vY2FjWr1/P8uXLOX/+vK+SNDs7m4yMjIA+dmpqanzBvb29nbfeeouvfOUrvjoKbzv9hx9+mLvvvptz587R09PD9OnTyc7ODujfxnuhPnz48A2fq2AX+fnz5/NP//RPJCQk0NPTwz//8z8Hrcv4NNPgrsZER0cHR48etfVy6HX16tUB+20ZLmMMv/vd75g5c6atDNs7cEVBQQEFBQUh17106RK/+c1vhtWRWVNTE9u2beOrX/0qiYmJtjwkJiYO2N7c29/79u3bbd0m1NfX8/bbb7Nx40ZbZbX37r9/Z2z9t9nV1cUbb7xhe1GptLSUWbNmce+999qKQhwOBxkZGQOOnGSM4eLFi7zyyis3VHzldfHiRXp6emxl/U6n0/cms9PpJCUlRYN7P/oso8ZMsKIZYwxHjx6N+JBpra2t/PjHP+bYsWNhd23gDVYvvvjiiHQ/XFFRwU9+8hMaGhrCrjj29vm+bds29u/fHzB/3759vr56hrLNjo4OXn/9dT7++GPbvL6+Pnbs2MHu3bvp7u4Oa5veNu3V1dU899xzXL58Oax8hHLx4kWqq6tD7tvpdAbtFvjTTu/c1YgK1uY6VAdb586d4+LFi7ZWIj09PSGLZLq7u+nr67Pd6ff09NjuoHt7e+nt7bXd5YUKdA0NDTz33HPce++93H333b7Kyv7FCuCpZDx8+DBvvPFGQGAPFvRCHXN/ZWVlXLlyhYceeohFixb5KjiD5cHbbfBbb70VsqOznp4etm/fTm1tLffddx8zZswI+pKWd5u9vb1UVVWxc+dOTpw4EfQ8dXV1sX37dqqrq1m7di3Z2dm+u/hgo0g1NjbywQcfsGfPnpB99vT29tLX12fbX//P/vt/5ZVX+MpXvkJOTo5tv97l/fvaVx4yHt4aS09PN+vWrRvrbIyI2rR5vLvoqxHtOOzOYz9n9pUS8tKu8X8+tyuiHYe9VrqU3x8P/5X01NRU8vPzbWnnz58P2eGUt+zXq6urixMnTgQt8oiNjaWwsNDWRr25udnW7trhcHDzzTfbWoG0tLRQVVU1YL6nTp3KwoULfe3EHQ4Hxhja2tqorq7m448/pqamJmi+pk6dausky9uT5WCdkflzOp1kZWVRVFTE7NmzfUPlGWNoaWmhurqao0ePhsxDMImJibaXlrzjtHqP6+zZs5SXl3P27Nmwi00mTZpEQUEBCxYsICcnx7fNvr4+Ll++zMmTJ6moqAjrRafCwkLb99TU1DRgcVxKSgq33XYbCxYs8DVLbWtro7Kykj/+8Y8j2k3FRLF169ZSY8zSYPM0uI+wT3twn+hEJKCScaR6pLzRPAzWP0w4HA5H0DFQR2qbo3mexmq/49FAwV2LZZTyY4wZkRGfxlseIhEAxyqofpqD+VCEXaEqIk4RKRORt6zP+SJyQESqROSXIhJrpcdZn6us+XkRyrtSSqkQhtJa5q8B/461vw38pzFmDtAIPGmlPwk0Wun/aS2nlFJqFIUV3EUkG1gL/Nj6LMAq4FfWIluBDdb0eusz1vzVop05K6XUqAr3zv17wN8B3sKuqUCTMcbbSLkG8L5qmAVcALDmN1vL24jIZhEpEZES7QpUKaVG1qDBXUTWAXXGmBF9H9wY84IxZqkxZmmwzouUUkrduHBay9wBPCQiDwDxQDLwX0CKiLisu/NswNtZcy2QA9SIiAuYAtSPeM6VUkqFNOiduzHmW8aYbGNMHvA48J4x5s+BvcAj1mKbgDet6Z3WZ6z575nx0JheKaU+RYbTzv1/Aa+JyL8AZcBLVvpLwM9FpApowHNB+NSI724h9+onQOTqkBO6PG/itXfHcrgmB4dE7tp5+XpSxLatlIocfUN1hI3W2ZQx2JtSanzRN1RH0eiGQQ26SqngtMtfpZSKQhrclVIqCmlwV0qpKKTBXSmlopAGd6WUikIa3JVSKgppcFdKqSikwV0ppaKQBnellIpCGtyVUioKaXBXSqkopMFdKaWikAZ3pZSKQhrclVIqCoUV3EXkrIgcFZGPRaTESksTkd0icsr6nWqli4h8X0SqROSIiCyJ5AEopZQKNJQ795XGmGK/juGfBfYYYwqAPdZngPuBAutnM/D8SGVWKaVUeIZTLLMe2GpNbwU2+KW/bDw+wjOQduYw9qOUUmqIwg3uBviDiJSKyGYrbbox5pI1fRmYbk1nARf81q2x0mxEZLOIlIhISWdn5w1kXSmlVCjhDrN3pzGmVkQygN0icsJ/pjHGiAxtlGZjzAvAC+AZQ3Uo6yqllBpYWHfuxpha63cdsANYBlzxFrdYv+usxWuBHL/Vs600pZRSo2TQ4C4iCSKS5J0G1gDlwE5gk7XYJuBNa3on8ITVauY2oNmv+EYppdQoCKdYZjqwQ0S8y79ijPm9iBwCXheRJ4FzwKPW8m8DDwBVQDvwpRHPtVJKqQENGtyNMaeBRUHS64HVQdIN8PURyZ1SSqkbom+oKqVUFNLgrpRSUUiDu1JKRSEN7kopFYU0uCulVBTS4K6UUlFIg7tSSkUhDe5KKRWFNLgrpVQU0uCulFJRSIO7UkpFIQ3uSikVhTS4K6VUFNLgrpRSUUiDu1JKRSEN7kopFYXCCu4ikiIivxKREyJyXERuF5E0EdktIqes36nWsiIi3xeRKhE5IiJLInsISiml+gv3zv2/gN8bY27GMyrTceBZYI8xpgDYY30GuB8osH42A8+PaI6VUkoNKpwBsqcAdwEvARhjuo0xTcB6YKu12FZggzW9HnjZeHwEpIhI5gjnWyml1ADCuXPPB64CPxWRMhH5sYgkANONMZesZS7jGUgbIAu44Ld+jZVmIyKbRaREREo6Oztv/AiUUkoFCCe4u4AlwPPGmMVAG38qggF8g2KboezYGPOCMWapMWZpfHz8UFZVSik1iHCCew1QY4w5YH3+FZ5gf8Vb3GL9rrPm1wI5futnW2lKKaVGyaDB3RhzGbggIoVW0mrgGLAT2GSlbQLetKZ3Ak9YrWZuA5r9im+UUkqNAleYyz0DbBORWOA08CU8F4bXReRJ4BzwqLXs28ADQBXQbi2rlFJqFIUV3I0xHwNLg8xaHWRZA3x9eNlSSik1HPqGqlJKRSEN7kopFYU0uCulVBTS4K6UUlFIPPWfY5wJkevAybHOxzClA9fGOhMjIBqOQ49hfIiGY4DxfRwzjTHTgs0ItylkpJ00xgRrjTNhiEjJRD8GiI7j0GMYH6LhGGDiHocWyyilVBTS4K6UUlFovAT3F8Y6AyMgGo4BouM49BjGh2g4BpigxzEuKlSVUkqNrPFy566UUmoEaXBXSqkoNObBXUTuE5GT1oDazw6+xtgQkRwR2Ssix0SkQkT+2kqfcAOFi4jTGlXrLetzvogcsPL6S6v3T0QkzvpcZc3PG9OMW6JhwHYR+Yb1d1QuIq+KSPxE+B5E5CciUici5X5pQz73IrLJWv6UiGwKtq9RPob/Z/09HRGRHSKS4jfvW9YxnBSRz/mlj+/YZYwZsx/ACVQDs4BY4BNg/ljmaYC8ZgJLrOkkoBKYD3wHeNZKfxb4tjX9ALALEOA24MBYH4PfsXwTeAV4y/r8OvC4Nf1DYIs1/TXgh9b048AvxzrvVl62Ak9Z07FAykT6HvAMO3kGmOR3/v9qInwPeMZTXgKU+6UN6dwDaXi6Dk8DUq3p1DE+hjWAy5r+tt8xzLfiUhyeIUerrbg17mPXWP+R3w684/f5W8C3xvqkhJn3N4F78bxZm2mlZeJ5IQvgR8BGv+V9y41xvrOBPcAq4C3rH++a3x+27zsB3gFut6Zd1nIyxvmfYgVG6Zc+Yb4H/jTOcJp1Xt8CPjdRvgcgr19gHNK5BzYCP/JLty03FsfQb97ngW3WtC0meb+LiRC7xrpYJqzBtMcb67F4MXCAYQ4UPga+B/wd4LY+TwWajDG91mf/fPqOwZrfbC0/liIyYPtoMsbUAt8FzgOX8JzXUibW9+BvqOd+3H0n/XwZzxMHTNxjGPPgPuGISCLwa+BvjDEt/vOM5xI+btuWisg6oM4YUzrWeRmGiAzYPpqsMun1eC5UNwEJwH1jmqkRMt7P/WBE5O+BXmDbWOdluMY6uE+owbRFJAZPYN9mjPmNlTyRBgq/A3hIRM4Cr+EpmvkvIEVEvP0M+efTdwzW/ClA/WhmOIhoGLD9HuCMMeaqMaYH+A2e72YifQ/+hnrux+N3goj8FbAO+HPrIgUT7Bj8jXVwPwQUWK0EYvFUFu0c4zwFJSICvAQcN8b8h9+sCTNQuDHmW8aYbGNMHp5z/Z4x5s+BvcAj1mL9j8F7bI9Yy4/pXZmJjgHbzwO3ichk6+/KewwT5nvoZ6jn/h1gjYikWk8xa6y0MSMi9+EprnzIGNPuN2sn8LjVYikfKAAOMhFi11gX+uOpUa/EU/P892OdnwHyeSeex80jwMfWzwN4yj73AKeAd4E0a3kBfmAd11Fg6VgfQ7/jWcGfWsvMwvMHWwVsB+Ks9Hjrc5U1f9ZY59vKVzFQYn0Xb+BpcTGhvgfg/wIngHLg53haY4z77wF4FU89QQ+ep6gnb+Tc4ynXrrJ+vjQOjqEKTxm693/7h37L/711DCeB+/3Sx3Xs0u4HlFIqCo11sYxSSqkI0OCulFJRSIO7UkpFIQ3uSikVhTS4K6VUFNLgrpRSUUiDu1JKRaH/D0SYIWTYHL5yAAAAAElFTkSuQmCC\n",
111 | "text/plain": [
112 | ""
113 | ]
114 | },
115 | "metadata": {
116 | "needs_background": "light"
117 | },
118 | "output_type": "display_data"
119 | }
120 | ],
121 | "source": [
122 | "font = ImageFont.truetype(\"arial.ttf\", 160)\n",
123 | "img_pil = Image.fromarray(img)\n",
124 | "draw = ImageDraw.Draw(img_pil)\n",
125 | "draw.text((460, 230), \"Microsoft\", font = font, stroke_width=3 , fill = (255,255,255))\n",
126 | "img = np.array(img_pil)\n",
127 | " \n",
128 | "plt.imshow(img)\n",
129 | "img_rgb = cv2.cvtColor(img ,cv2.COLOR_BGR2RGB)\n",
130 | "cv2.imwrite(\"Microsoft Logo.png\", img_rgb)"
131 | ]
132 | }
133 | ],
134 | "metadata": {
135 | "kernelspec": {
136 | "display_name": "Python 3 (ipykernel)",
137 | "language": "python",
138 | "name": "python3"
139 | },
140 | "language_info": {
141 | "codemirror_mode": {
142 | "name": "ipython",
143 | "version": 3
144 | },
145 | "file_extension": ".py",
146 | "mimetype": "text/x-python",
147 | "name": "python",
148 | "nbconvert_exporter": "python",
149 | "pygments_lexer": "ipython3",
150 | "version": "3.9.0"
151 | }
152 | },
153 | "nbformat": 4,
154 | "nbformat_minor": 5
155 | }
156 |
--------------------------------------------------------------------------------