├── Assets
├── FaceSwapperExample
│ ├── ReadMe.pdf
│ ├── Resources
│ │ ├── family.png
│ │ ├── face_mask.png
│ │ ├── LoadingIcon.png
│ │ ├── family.png.meta
│ │ ├── LoadingIcon.png.meta
│ │ └── face_mask.png.meta
│ ├── Materials
│ │ ├── quad_material.mat.meta
│ │ └── quad_material.mat
│ ├── ReadMe.pdf.meta
│ ├── ShowLicense.unity.meta
│ ├── FaceSwapperExample.unity.meta
│ ├── Texture2DFaceChangerExample
│ │ ├── Texture2DFaceChangerExample.unity.meta
│ │ ├── Texture2DFaceChangerExample.cs.meta
│ │ └── Texture2DFaceChangerExample.cs
│ ├── Texture2DFaceSwapperExample
│ │ ├── Texture2DFaceSwapperExample.unity.meta
│ │ ├── Texture2DFaceSwapperExample.cs.meta
│ │ └── Texture2DFaceSwapperExample.cs
│ ├── WebCamTextureFaceChangerExample
│ │ ├── WebCamTextureFaceChangerExample.unity.meta
│ │ ├── WebCamTextureFaceChangerExample.cs.meta
│ │ └── WebCamTextureFaceChangerExample.cs
│ ├── WebCamTextureFaceSwapperExample
│ │ ├── WebCamTextureFaceSwapperExample.unity.meta
│ │ ├── WebCamTextureFaceSwapperExample.cs.meta
│ │ └── WebCamTextureFaceSwapperExample.cs
│ ├── Materials.meta
│ ├── Resources.meta
│ ├── Scripts.meta
│ ├── Scripts
│ │ ├── Utils.meta
│ │ ├── NoiseFilter.meta
│ │ ├── Utils
│ │ │ ├── FpsMonitor.cs.meta
│ │ │ ├── FrontalFaceChecker.cs.meta
│ │ │ ├── OpenCVForUnityUtils.cs.meta
│ │ │ ├── FpsMonitor.cs
│ │ │ └── FrontalFaceChecker.cs
│ │ └── NoiseFilter
│ │ │ ├── OFPointsFilter.cs.meta
│ │ │ ├── LowPassPointsFilter.cs.meta
│ │ │ ├── PointsFilterBase.cs.meta
│ │ │ ├── PointsFilterBase.cs
│ │ │ ├── LowPassPointsFilter.cs
│ │ │ └── OFPointsFilter.cs
│ ├── OpenCVFaceChanger.meta
│ ├── OpenCVFaceSwapper.meta
│ ├── RectangleTracker.meta
│ ├── RectangleTracker
│ │ ├── Scripts.meta
│ │ └── Scripts
│ │ │ ├── TrackedObject.cs.meta
│ │ │ ├── TrackedRect.cs.meta
│ │ │ ├── RectangleTracker.cs.meta
│ │ │ ├── TrackerParameters.cs.meta
│ │ │ ├── TrackedRect.cs
│ │ │ ├── TrackedObject.cs
│ │ │ ├── TrackerParameters.cs
│ │ │ └── RectangleTracker.cs
│ ├── OpenCVFaceChanger
│ │ ├── Scripts.meta
│ │ └── Scripts
│ │ │ ├── RectUtils.cs.meta
│ │ │ ├── FaceChanger.cs.meta
│ │ │ ├── PointUtils.cs.meta
│ │ │ ├── DlibFaceChanger.cs.meta
│ │ │ ├── NonRigidObjectTrackerFaceChanger.cs.meta
│ │ │ ├── PointUtils.cs
│ │ │ ├── DlibFaceChanger.cs
│ │ │ ├── NonRigidObjectTrackerFaceChanger.cs
│ │ │ └── RectUtils.cs
│ ├── OpenCVFaceSwapper
│ │ ├── Scripts.meta
│ │ └── Scripts
│ │ │ ├── FaceSwapper.cs.meta
│ │ │ ├── PointUtils.cs.meta
│ │ │ ├── RectUtils.cs.meta
│ │ │ ├── DlibFaceSwapper.cs.meta
│ │ │ ├── NonRigidObjectTrackerFaceSwapper.cs.meta
│ │ │ ├── PointUtils.cs
│ │ │ ├── DlibFaceSwapper.cs
│ │ │ ├── NonRigidObjectTrackerFaceSwapper.cs
│ │ │ └── RectUtils.cs
│ ├── Texture2DFaceChangerExample.meta
│ ├── Texture2DFaceSwapperExample.meta
│ ├── VideoCaptureFaceSwapperExample.meta
│ ├── WebCamTextureFaceChangerExample.meta
│ ├── WebCamTextureFaceSwapperExample.meta
│ ├── VideoCaptureFaceSwapperExample
│ │ ├── VideoCaptureFaceSwapperExample.unity.meta
│ │ ├── VideoCaptureFaceSwapperExample.cs.meta
│ │ └── VideoCaptureFaceSwapperExample.cs
│ ├── ShowLicense.cs.meta
│ ├── FaceSwapperExample.cs.meta
│ ├── ShowLicense.cs
│ └── FaceSwapperExample.cs
└── FaceSwapperExample.meta
└── README.md
/Assets/FaceSwapperExample/ReadMe.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnoxSoftware/FaceSwapperExample/HEAD/Assets/FaceSwapperExample/ReadMe.pdf
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Resources/family.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnoxSoftware/FaceSwapperExample/HEAD/Assets/FaceSwapperExample/Resources/family.png
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Resources/face_mask.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnoxSoftware/FaceSwapperExample/HEAD/Assets/FaceSwapperExample/Resources/face_mask.png
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Materials/quad_material.mat.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 2fe89ad658086d24f8fe283057df260d
3 | NativeFormatImporter:
4 | userData:
5 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Resources/LoadingIcon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnoxSoftware/FaceSwapperExample/HEAD/Assets/FaceSwapperExample/Resources/LoadingIcon.png
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/ReadMe.pdf.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: a6df57592dc8e1c4a8af3a44efd670fe
3 | DefaultImporter:
4 | userData:
5 | assetBundleName:
6 | assetBundleVariant:
7 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/ShowLicense.unity.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: bffcb2baf6cb09f4c91f5a11a618b29b
3 | DefaultImporter:
4 | userData:
5 | assetBundleName:
6 | assetBundleVariant:
7 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/FaceSwapperExample.unity.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: e5d0c664715582e4ca34e57c9c810679
3 | DefaultImporter:
4 | userData:
5 | assetBundleName:
6 | assetBundleVariant:
7 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Texture2DFaceChangerExample/Texture2DFaceChangerExample.unity.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 66328ed0f7ca81e4c8b3c5c89e38d6a1
3 | DefaultImporter:
4 | userData:
5 | assetBundleName:
6 | assetBundleVariant:
7 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Texture2DFaceSwapperExample/Texture2DFaceSwapperExample.unity.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 52f3044f03b445d40b792ea9adc8d3d9
3 | DefaultImporter:
4 | userData:
5 | assetBundleName:
6 | assetBundleVariant:
7 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/WebCamTextureFaceChangerExample/WebCamTextureFaceChangerExample.unity.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 8930824bc8abb5d488da2aefe77b83f1
3 | DefaultImporter:
4 | userData:
5 | assetBundleName:
6 | assetBundleVariant:
7 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/WebCamTextureFaceSwapperExample/WebCamTextureFaceSwapperExample.unity.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 2f907fe501981c742b2199b6b1866926
3 | DefaultImporter:
4 | userData:
5 | assetBundleName:
6 | assetBundleVariant:
7 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 213d24cf492691546ac4ae1b8b1073f6
3 | folderAsset: yes
4 | timeCreated: 1479443957
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Materials.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 5903462d6f8e9c045aefcddde2d35304
3 | folderAsset: yes
4 | timeCreated: 1479443957
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Resources.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: e4d01064e730d614dbeb596e22b641f7
3 | folderAsset: yes
4 | timeCreated: 1479443957
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 33a112c23a06b4340864c40d0c0c344b
3 | folderAsset: yes
4 | timeCreated: 1526559409
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/Utils.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: aaa691705bea5e049a90b2206a12e25b
3 | folderAsset: yes
4 | timeCreated: 1483717174
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 5f62433e1ec2fd04795589f4990b5fe9
3 | folderAsset: yes
4 | timeCreated: 1479458535
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 626817b4d34f74141961f772cef98476
3 | folderAsset: yes
4 | timeCreated: 1479443957
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 1c93efbfc058f854aaa16ed1a6eb97d4
3 | folderAsset: yes
4 | timeCreated: 1479443957
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/NoiseFilter.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 29bb3fd8ddda0c0498eb3e1408e6423c
3 | folderAsset: yes
4 | timeCreated: 1524888405
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: f7c57559e5ca892409cb9a5b9ee4d309
3 | folderAsset: yes
4 | timeCreated: 1493639110
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: ebfb2dcb22f180d43973546ddb53ad08
3 | folderAsset: yes
4 | timeCreated: 1464554941
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: d3c8a771006749c498468d4731ea7585
3 | folderAsset: yes
4 | timeCreated: 1479443957
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Texture2DFaceChangerExample.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: aa55b1e721b2c054289dd599cfa025fe
3 | folderAsset: yes
4 | timeCreated: 1499788152
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Texture2DFaceSwapperExample.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 1a0a8ccf796ba404abbec73458c8a6ed
3 | folderAsset: yes
4 | timeCreated: 1499788404
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/VideoCaptureFaceSwapperExample.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 873752211ba1eac4e805db3eb3083a60
3 | folderAsset: yes
4 | timeCreated: 1499788526
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/WebCamTextureFaceChangerExample.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: fcd1ab2d0d0efc34dbde05f64d860e04
3 | folderAsset: yes
4 | timeCreated: 1499788543
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/WebCamTextureFaceSwapperExample.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 76ef7b04e780635479c215beb7772594
3 | folderAsset: yes
4 | timeCreated: 1499788560
5 | licenseType: Free
6 | DefaultImporter:
7 | userData:
8 | assetBundleName:
9 | assetBundleVariant:
10 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/VideoCaptureFaceSwapperExample/VideoCaptureFaceSwapperExample.unity.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 8c0f13bf77892b641a26f022f103dfc9
3 | timeCreated: 1463652241
4 | licenseType: Store
5 | DefaultImporter:
6 | userData:
7 | assetBundleName:
8 | assetBundleVariant:
9 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/ShowLicense.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 13f2a73f7ca1dbf42b421bdd812acbef
3 | MonoImporter:
4 | serializedVersion: 2
5 | defaultReferences: []
6 | executionOrder: 0
7 | icon: {instanceID: 0}
8 | userData:
9 | assetBundleName:
10 | assetBundleVariant:
11 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/FaceSwapperExample.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 661a709561282a140b876853278c3522
3 | MonoImporter:
4 | serializedVersion: 2
5 | defaultReferences: []
6 | executionOrder: 0
7 | icon: {instanceID: 0}
8 | userData:
9 | assetBundleName:
10 | assetBundleVariant:
11 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Texture2DFaceChangerExample/Texture2DFaceChangerExample.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: cd27f43d2d1bc4c479f77ae0bba6bb57
3 | MonoImporter:
4 | serializedVersion: 2
5 | defaultReferences: []
6 | executionOrder: 0
7 | icon: {instanceID: 0}
8 | userData:
9 | assetBundleName:
10 | assetBundleVariant:
11 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Texture2DFaceSwapperExample/Texture2DFaceSwapperExample.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 2b5f56412d33c3e499e04f152433fd5b
3 | MonoImporter:
4 | serializedVersion: 2
5 | defaultReferences: []
6 | executionOrder: 0
7 | icon: {instanceID: 0}
8 | userData:
9 | assetBundleName:
10 | assetBundleVariant:
11 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/WebCamTextureFaceChangerExample/WebCamTextureFaceChangerExample.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 4ec56ffde0d7376439c55b1bce791050
3 | MonoImporter:
4 | serializedVersion: 2
5 | defaultReferences: []
6 | executionOrder: 0
7 | icon: {instanceID: 0}
8 | userData:
9 | assetBundleName:
10 | assetBundleVariant:
11 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/WebCamTextureFaceSwapperExample/WebCamTextureFaceSwapperExample.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 29941611c584b7b4ea9f5a83eec9e1e9
3 | MonoImporter:
4 | serializedVersion: 2
5 | defaultReferences: []
6 | executionOrder: 0
7 | icon: {instanceID: 0}
8 | userData:
9 | assetBundleName:
10 | assetBundleVariant:
11 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/Utils/FpsMonitor.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 32c5e879aceb75e469a29d0979709285
3 | timeCreated: 1524881952
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/RectUtils.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 378b605fb967c3140aa1fe3b795669f6
3 | timeCreated: 1464561302
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/NoiseFilter/OFPointsFilter.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: f4c93fd256414df4288326391a099a01
3 | timeCreated: 1524888405
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/Utils/FrontalFaceChecker.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 4c57ef078c131834aa757a71ae3ec6ba
3 | timeCreated: 1498237711
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/Utils/OpenCVForUnityUtils.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: e29736da80afb5d4aa50c3407eeea008
3 | timeCreated: 1463661767
4 | licenseType: Store
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/FaceChanger.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 124167cb3721fe945a710b178fef5b72
3 | timeCreated: 1464561302
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/PointUtils.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: f24b4787f4fccc9468d3f1d3a5c4c2a9
3 | timeCreated: 1464561302
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/FaceSwapper.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: ba6438b58e5479b46990e81848dc5b03
3 | timeCreated: 1464539197
4 | licenseType: Store
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/PointUtils.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 7d20b04cba21e3e4296c754903a3cb3e
3 | timeCreated: 1464539197
4 | licenseType: Store
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/RectUtils.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 66c9ea5092809794caba465840d6d80f
3 | timeCreated: 1464539197
4 | licenseType: Store
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts/TrackedObject.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: f59f132f755a0354c887f6c0e12e99a5
3 | timeCreated: 1493639110
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts/TrackedRect.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: d05ff3f2c557b92438a33f05b1203d21
3 | timeCreated: 1493639110
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/NoiseFilter/LowPassPointsFilter.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: c5f88fbc473a3ef429120ccc24659a8c
3 | timeCreated: 1524888405
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/NoiseFilter/PointsFilterBase.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 4f9a33f73403bbe41b6c0079b90e0665
3 | timeCreated: 1524888405
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/DlibFaceChanger.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 8b672b9119e396140b993432c4097d27
3 | timeCreated: 1464647045
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/DlibFaceSwapper.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: f03be5c9f0658d14cb6605b9603ac6dc
3 | timeCreated: 1464539199
4 | licenseType: Store
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts/RectangleTracker.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 2729ba24949514e4987e9b249f4115f9
3 | timeCreated: 1493639110
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts/TrackerParameters.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 20ef35f558fce4646b446cab40538276
3 | timeCreated: 1493639110
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/NonRigidObjectTrackerFaceChanger.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 87ce4ed7743474a42be1de9daddbc473
3 | timeCreated: 1464647045
4 | licenseType: Free
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/NonRigidObjectTrackerFaceSwapper.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 4137cc5f5ba6b00498b76159d18889ce
3 | timeCreated: 1464539197
4 | licenseType: Store
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/VideoCaptureFaceSwapperExample/VideoCaptureFaceSwapperExample.cs.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: d8d13016d5ab01348b307ab2d2a60701
3 | timeCreated: 1463652228
4 | licenseType: Store
5 | MonoImporter:
6 | serializedVersion: 2
7 | defaultReferences: []
8 | executionOrder: 0
9 | icon: {instanceID: 0}
10 | userData:
11 | assetBundleName:
12 | assetBundleVariant:
13 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/ShowLicense.cs:
--------------------------------------------------------------------------------
1 | using UnityEngine;
2 | using UnityEngine.SceneManagement;
3 |
4 | namespace FaceSwapperExample
5 | {
6 | ///
7 | /// Show License
8 | ///
9 | public class ShowLicense : MonoBehaviour
10 | {
11 | // Use this for initialization
12 | void Start()
13 | {
14 |
15 | }
16 |
17 | // Update is called once per frame
18 | void Update()
19 | {
20 |
21 | }
22 |
23 | public void OnBackButtonClick()
24 | {
25 | SceneManager.LoadScene("FaceSwapperExample");
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts/TrackedRect.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 |
3 | namespace OpenCVForUnity.RectangleTrack
4 | {
5 | public class TrackedRect : Rect
6 | {
7 | public int numDetectedFrames;
8 | public int numFramesNotDetected;
9 | public int id;
10 | public TrackedState state;
11 |
12 | public TrackedRect(int id, Rect rect, TrackedState state, int numDetectedFrames, int numFramesNotDetected)
13 | : base(rect.x, rect.y, rect.width, rect.height)
14 | {
15 | this.numDetectedFrames = numDetectedFrames;
16 | this.numFramesNotDetected = numFramesNotDetected;
17 | this.id = id;
18 | this.state = state;
19 | }
20 | }
21 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Materials/quad_material.mat:
--------------------------------------------------------------------------------
1 | %YAML 1.1
2 | %TAG !u! tag:unity3d.com,2011:
3 | --- !u!21 &2100000
4 | Material:
5 | serializedVersion: 5
6 | m_ObjectHideFlags: 0
7 | m_PrefabParentObject: {fileID: 0}
8 | m_PrefabInternal: {fileID: 0}
9 | m_Name: quad_material
10 | m_Shader: {fileID: 10750, guid: 0000000000000000f000000000000000, type: 0}
11 | m_ShaderKeywords:
12 | m_LightmapFlags: 5
13 | m_CustomRenderQueue: -1
14 | m_SavedProperties:
15 | serializedVersion: 2
16 | m_TexEnvs:
17 | data:
18 | first:
19 | name: _MainTex
20 | second:
21 | m_Texture: {fileID: 2800000, guid: 60efc80de1039114fa7285c55dc1e203, type: 3}
22 | m_Scale: {x: 1, y: 1}
23 | m_Offset: {x: 0, y: 0}
24 | m_Floats: {}
25 | m_Colors: {}
26 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | FaceSwapper Example
2 | ====================
3 |
4 | Overview
5 | -----
6 | [https://assetstore.unity.com/packages/templates/tutorials/faceswapper-example-66602](https://assetstore.unity.com/packages/templates/tutorials/faceswapper-example-66602?aid=1011l4ehR)
7 |
8 | Environment
9 | -----
10 | [OpenCVForUnity](https://assetstore.unity.com/packages/tools/integration/opencv-for-unity-21088?aid=1011l4ehR)
11 | [DlibFaceLandmarkDetector](https://assetstore.unity.com/packages/tools/integration/dlib-facelandmark-detector-64314?aid=1011l4ehR)
12 |
13 | Demo Video
14 | -----
15 | [](https://www.youtube.com/watch?v=lQPL85LbSYo)
16 |
17 | Demo
18 | -----
19 | - WebGL (simd)
20 | - Android
21 |
22 | Manual
23 | -----
24 | [ReadMe.pdf](/Assets/FaceSwapperExample/ReadMe.pdf)
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Resources/family.png.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 3abc257791f75f1459d7c9a6dff35cf1
3 | TextureImporter:
4 | fileIDToRecycleName: {}
5 | serializedVersion: 2
6 | mipmaps:
7 | mipMapMode: 0
8 | enableMipMap: 0
9 | linearTexture: 0
10 | correctGamma: 0
11 | fadeOut: 0
12 | borderMipMap: 0
13 | mipMapFadeDistanceStart: 1
14 | mipMapFadeDistanceEnd: 3
15 | bumpmap:
16 | convertToNormalMap: 0
17 | externalNormalMap: 0
18 | heightScale: .25
19 | normalMapFilter: 0
20 | isReadable: 1
21 | grayScaleToAlpha: 0
22 | generateCubemap: 0
23 | seamlessCubemap: 0
24 | textureFormat: 4
25 | maxTextureSize: 1024
26 | textureSettings:
27 | filterMode: -1
28 | aniso: -1
29 | mipBias: -1
30 | wrapMode: -1
31 | nPOTScale: 0
32 | lightmap: 0
33 | compressionQuality: 50
34 | spriteMode: 0
35 | spriteExtrude: 1
36 | spriteMeshType: 1
37 | alignment: 0
38 | spritePivot: {x: .5, y: .5}
39 | spriteBorder: {x: 0, y: 0, z: 0, w: 0}
40 | spritePixelsToUnits: 100
41 | alphaIsTransparency: 0
42 | textureType: 5
43 | buildTargetSettings: []
44 | spriteSheet:
45 | sprites: []
46 | spritePackingTag:
47 | userData:
48 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/PointUtils.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 |
4 | namespace OpenCVForUnity.FaceSwap
5 | {
6 | public class PointUtils
7 | {
8 | ///
9 | /// Returns the distance between the specified two points
10 | ///
11 | ///
12 | ///
13 | ///
14 | public static double Distance(Point p1, Point p2)
15 | {
16 | return Math.Sqrt(Math.Pow(p2.x - p1.x, 2) + Math.Pow(p2.y - p1.y, 2));
17 | }
18 |
19 | ///
20 | /// Calculates the dot product of two 2D vectors.
21 | ///
22 | ///
23 | ///
24 | ///
25 | public static double DotProduct(Point p1, Point p2)
26 | {
27 | return p1.x * p2.x + p1.y * p2.y;
28 | }
29 |
30 | ///
31 | /// Calculates the cross product of two 2D vectors.
32 | ///
33 | ///
34 | ///
35 | ///
36 | public static double CrossProduct(Point p1, Point p2)
37 | {
38 | return p1.x * p2.y - p2.x * p1.y;
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/PointUtils.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 |
4 | namespace OpenCVForUnity.FaceChange
5 | {
6 | public class PointUtils
7 | {
8 | ///
9 | /// Returns the distance between the specified two points
10 | ///
11 | ///
12 | ///
13 | ///
14 | public static double Distance(Point p1, Point p2)
15 | {
16 | return Math.Sqrt(Math.Pow(p2.x - p1.x, 2) + Math.Pow(p2.y - p1.y, 2));
17 | }
18 |
19 | ///
20 | /// Calculates the dot product of two 2D vectors.
21 | ///
22 | ///
23 | ///
24 | ///
25 | public static double DotProduct(Point p1, Point p2)
26 | {
27 | return p1.x * p2.x + p1.y * p2.y;
28 | }
29 |
30 | ///
31 | /// Calculates the cross product of two 2D vectors.
32 | ///
33 | ///
34 | ///
35 | ///
36 | public static double CrossProduct(Point p1, Point p2)
37 | {
38 | return p1.x * p2.y - p2.x * p1.y;
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Resources/LoadingIcon.png.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: 60efc80de1039114fa7285c55dc1e203
3 | timeCreated: 1479410294
4 | licenseType: Pro
5 | TextureImporter:
6 | fileIDToRecycleName: {}
7 | serializedVersion: 2
8 | mipmaps:
9 | mipMapMode: 0
10 | enableMipMap: 1
11 | linearTexture: 0
12 | correctGamma: 0
13 | fadeOut: 0
14 | borderMipMap: 0
15 | mipMapFadeDistanceStart: 1
16 | mipMapFadeDistanceEnd: 3
17 | bumpmap:
18 | convertToNormalMap: 0
19 | externalNormalMap: 0
20 | heightScale: .25
21 | normalMapFilter: 0
22 | isReadable: 0
23 | grayScaleToAlpha: 0
24 | generateCubemap: 0
25 | cubemapConvolution: 0
26 | cubemapConvolutionSteps: 8
27 | cubemapConvolutionExponent: 1.5
28 | seamlessCubemap: 0
29 | textureFormat: -1
30 | maxTextureSize: 512
31 | textureSettings:
32 | filterMode: -1
33 | aniso: -1
34 | mipBias: -1
35 | wrapMode: 0
36 | nPOTScale: 1
37 | lightmap: 0
38 | rGBM: 0
39 | compressionQuality: 50
40 | spriteMode: 0
41 | spriteExtrude: 1
42 | spriteMeshType: 1
43 | alignment: 0
44 | spritePivot: {x: .5, y: .5}
45 | spriteBorder: {x: 0, y: 0, z: 0, w: 0}
46 | spritePixelsToUnits: 100
47 | alphaIsTransparency: 0
48 | textureType: 0
49 | buildTargetSettings: []
50 | spriteSheet:
51 | sprites: []
52 | spritePackingTag:
53 | userData:
54 | assetBundleName:
55 | assetBundleVariant:
56 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Resources/face_mask.png.meta:
--------------------------------------------------------------------------------
1 | fileFormatVersion: 2
2 | guid: e0a7f14f275499c46b9878f443c430ad
3 | timeCreated: 1479533578
4 | licenseType: Free
5 | TextureImporter:
6 | fileIDToRecycleName: {}
7 | serializedVersion: 2
8 | mipmaps:
9 | mipMapMode: 0
10 | enableMipMap: 0
11 | linearTexture: 0
12 | correctGamma: 0
13 | fadeOut: 0
14 | borderMipMap: 0
15 | mipMapFadeDistanceStart: 1
16 | mipMapFadeDistanceEnd: 3
17 | bumpmap:
18 | convertToNormalMap: 0
19 | externalNormalMap: 0
20 | heightScale: .25
21 | normalMapFilter: 0
22 | isReadable: 1
23 | grayScaleToAlpha: 0
24 | generateCubemap: 0
25 | cubemapConvolution: 0
26 | cubemapConvolutionSteps: 7
27 | cubemapConvolutionExponent: 1.5
28 | seamlessCubemap: 0
29 | textureFormat: 4
30 | maxTextureSize: 512
31 | textureSettings:
32 | filterMode: -1
33 | aniso: -1
34 | mipBias: -1
35 | wrapMode: -1
36 | nPOTScale: 0
37 | lightmap: 0
38 | rGBM: 0
39 | compressionQuality: 50
40 | spriteMode: 0
41 | spriteExtrude: 1
42 | spriteMeshType: 1
43 | alignment: 0
44 | spritePivot: {x: .5, y: .5}
45 | spriteBorder: {x: 0, y: 0, z: 0, w: 0}
46 | spritePixelsToUnits: 100
47 | alphaIsTransparency: 0
48 | textureType: 5
49 | buildTargetSettings: []
50 | spriteSheet:
51 | sprites: []
52 | spritePackingTag:
53 | userData:
54 | assetBundleName:
55 | assetBundleVariant:
56 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts/TrackedObject.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using PositionsVector = System.Collections.Generic.List;
3 |
4 | namespace OpenCVForUnity.RectangleTrack
5 | {
6 | public enum TrackedState : int
7 | {
8 | NEW = 0,
9 | PENDING = 1,
10 | NEW_DISPLAYED = 2,
11 | DISPLAYED = 3,
12 | NEW_HIDED = 4,
13 | HIDED = 5,
14 | DELETED = 6
15 | }
16 |
17 | public class TrackedObject
18 | {
19 | public PositionsVector lastPositions;
20 | public int numDetectedFrames;
21 | public int numFramesNotDetected;
22 | public int id;
23 | public TrackedState state;
24 |
25 | public Rect position
26 | {
27 | get { return lastPositions[lastPositions.Count - 1].clone(); }
28 | }
29 |
30 | static private int _id = 0;
31 |
32 | public TrackedObject(Rect rect)
33 | {
34 | lastPositions = new PositionsVector();
35 |
36 | numDetectedFrames = 1;
37 | numFramesNotDetected = 0;
38 | state = TrackedState.NEW;
39 |
40 | lastPositions.Add(rect.clone());
41 |
42 | _id = GetNextId();
43 | id = _id;
44 | }
45 |
46 | static int GetNextId()
47 | {
48 | _id++;
49 | return _id;
50 | }
51 | }
52 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts/TrackerParameters.cs:
--------------------------------------------------------------------------------
1 | namespace OpenCVForUnity.RectangleTrack
2 | {
3 | public class TrackerParameters
4 | {
5 | public int numLastPositionsToTrack = 4;
6 | public int numStepsToWaitBeforeFirstShow = 6;
7 | public int numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown = 3;
8 | public int numStepsToShowWithoutDetecting = 3;
9 |
10 | public int maxTrackLifetime = 5;
11 |
12 | public float coeffObjectSpeedUsingInPrediction = 0.8f;
13 | public float coeffRectangleOverlap = 0.7f;
14 |
15 | public TrackerParameters()
16 | {
17 | }
18 |
19 | public TrackerParameters Clone()
20 | {
21 | TrackerParameters trackerParameters = new TrackerParameters();
22 | trackerParameters.numLastPositionsToTrack = numLastPositionsToTrack;
23 | trackerParameters.numStepsToWaitBeforeFirstShow = numStepsToWaitBeforeFirstShow;
24 | trackerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown = numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown;
25 | trackerParameters.numStepsToShowWithoutDetecting = numStepsToShowWithoutDetecting;
26 | trackerParameters.maxTrackLifetime = maxTrackLifetime;
27 | trackerParameters.coeffObjectSpeedUsingInPrediction = coeffObjectSpeedUsingInPrediction;
28 | trackerParameters.coeffRectangleOverlap = coeffRectangleOverlap;
29 |
30 | return trackerParameters;
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/DlibFaceSwapper.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 |
4 | namespace OpenCVForUnity.FaceSwap
5 | {
6 | public class DlibFaceSwapper : FaceSwapper
7 | {
8 | // Finds facial landmarks on faces and extracts the useful points
9 | protected override void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
10 | {
11 | if (landmark_points.Length != 68)
12 | throw new ArgumentNullException("Invalid landmark_points.");
13 |
14 | //points(facial contour)
15 | points[0] = landmark_points[0];
16 | points[1] = landmark_points[3];
17 | points[2] = landmark_points[5];
18 | points[3] = landmark_points[8];
19 | points[4] = landmark_points[11];
20 | points[5] = landmark_points[13];
21 | points[6] = landmark_points[16];
22 | Point nose_length = new Point(landmark_points[27].x - landmark_points[30].x, landmark_points[27].y - landmark_points[30].y);
23 | points[7] = new Point(landmark_points[26].x + nose_length.x, landmark_points[26].y + nose_length.y);
24 | points[8] = new Point(landmark_points[17].x + nose_length.x, landmark_points[17].y + nose_length.y);
25 |
26 | //affine_transform_keypoints(eyes and chin)
27 | affine_transform_keypoints[0] = points[3];
28 | affine_transform_keypoints[1] = landmark_points[36];
29 | affine_transform_keypoints[2] = landmark_points[45];
30 | }
31 | }
32 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/DlibFaceChanger.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 |
4 | namespace OpenCVForUnity.FaceChange
5 | {
6 | public class DlibFaceChanger : FaceChanger
7 | {
8 | // Finds facial landmarks on faces and extracts the useful points
9 | protected override void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
10 | {
11 | if (landmark_points.Length != 68)
12 | throw new ArgumentNullException("Invalid landmark_points.");
13 |
14 | //points(facial contour)
15 | points[0] = landmark_points[0].clone();
16 | points[1] = landmark_points[3].clone();
17 | points[2] = landmark_points[5].clone();
18 | points[3] = landmark_points[8].clone();
19 | points[4] = landmark_points[11].clone();
20 | points[5] = landmark_points[13].clone();
21 | points[6] = landmark_points[16].clone();
22 | Point nose_length = new Point(landmark_points[27].x - landmark_points[30].x, landmark_points[27].y - landmark_points[30].y);
23 | points[7] = new Point(landmark_points[26].x + nose_length.x, landmark_points[26].y + nose_length.y);
24 | points[8] = new Point(landmark_points[17].x + nose_length.x, landmark_points[17].y + nose_length.y);
25 |
26 | //affine_transform_keypoints(eyes and chin)
27 | affine_transform_keypoints[0] = points[3].clone();
28 | affine_transform_keypoints[1] = landmark_points[36].clone();
29 | affine_transform_keypoints[2] = landmark_points[45].clone();
30 | }
31 | }
32 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/NonRigidObjectTrackerFaceChanger.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 |
4 | namespace OpenCVForUnity.FaceChange
5 | {
6 | public class NonRigidObjectTrackerFaceChanger : FaceChanger
7 | {
8 | // Finds facial landmarks on faces and extracts the useful points
9 | protected override void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
10 | {
11 | if (landmark_points.Length != 76)
12 | throw new ArgumentNullException("Invalid landmark_points.");
13 |
14 | //points(facial contour)
15 | points[0] = landmark_points[0];
16 | points[1] = landmark_points[2];
17 | points[2] = landmark_points[5];
18 | points[3] = landmark_points[7];
19 | points[4] = landmark_points[9];
20 | points[5] = landmark_points[12];
21 | points[6] = landmark_points[14];
22 | Point nose_line_base = new Point((landmark_points[37].x + landmark_points[45].x) / 2, (landmark_points[37].y + landmark_points[45].y) / 2);
23 | Point nose_length = new Point(nose_line_base.x - landmark_points[67].x, nose_line_base.y - landmark_points[67].y);
24 | points[7] = new Point(landmark_points[15].x + nose_length.x, landmark_points[15].y + nose_length.y);
25 | points[8] = new Point(landmark_points[21].x + nose_length.x, landmark_points[21].y + nose_length.y);
26 |
27 | //affine_transform_keypoints(eyes and chin)
28 | affine_transform_keypoints[0] = points[3];
29 | affine_transform_keypoints[1] = landmark_points[27];
30 | affine_transform_keypoints[2] = landmark_points[32];
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/NonRigidObjectTrackerFaceSwapper.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 |
4 | namespace OpenCVForUnity.FaceSwap
5 | {
6 |
7 | public class NonRigidObjectTrackerFaceSwapper : FaceSwapper
8 | {
9 | // Finds facial landmarks on faces and extracts the useful points
10 | protected override void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
11 | {
12 | if (landmark_points.Length != 76)
13 | throw new ArgumentNullException("Invalid landmark_points.");
14 |
15 | //points(facial contour)
16 | points[0] = landmark_points[0];
17 | points[1] = landmark_points[2];
18 | points[2] = landmark_points[5];
19 | points[3] = landmark_points[7];
20 | points[4] = landmark_points[9];
21 | points[5] = landmark_points[12];
22 | points[6] = landmark_points[14];
23 | Point nose_line_base = new Point((landmark_points[37].x + landmark_points[45].x) / 2, (landmark_points[37].y + landmark_points[45].y) / 2);
24 | Point nose_length = new Point(nose_line_base.x - landmark_points[67].x, nose_line_base.y - landmark_points[67].y);
25 | points[7] = new Point(landmark_points[15].x + nose_length.x, landmark_points[15].y + nose_length.y);
26 | points[8] = new Point(landmark_points[21].x + nose_length.x, landmark_points[21].y + nose_length.y);
27 |
28 | //affine_transform_keypoints(eyes and chin)
29 | affine_transform_keypoints[0] = points[3];
30 | affine_transform_keypoints[1] = landmark_points[27];
31 | affine_transform_keypoints[2] = landmark_points[32];
32 | }
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceChanger/Scripts/RectUtils.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 |
4 | namespace OpenCVForUnity.FaceChange
5 | {
6 | public class RectUtils
7 | {
8 | ///
9 | /// Creates and returns an inflated copy of the specified CvRect structure.
10 | ///
11 | /// The Rectangle with which to start. This rectangle is not modified.
12 | /// The amount to inflate this Rectangle horizontally.
13 | /// The amount to inflate this Rectangle vertically.
14 | ///
15 | public static Rect Inflate(Rect rect, int x, int y)
16 | {
17 | rect.x -= x;
18 | rect.y -= y;
19 | rect.width += (2 * x);
20 | rect.height += (2 * y);
21 | return rect;
22 | }
23 |
24 | ///
25 | /// Determines the CvRect structure that represents the intersection of two rectangles.
26 | ///
27 | /// A rectangle to intersect.
28 | /// A rectangle to intersect.
29 | ///
30 | public static Rect Intersect(Rect a, Rect b)
31 | {
32 | int x1 = Math.Max(a.x, b.x);
33 | int x2 = Math.Min(a.x + a.width, b.x + b.width);
34 | int y1 = Math.Max(a.y, b.y);
35 | int y2 = Math.Min(a.y + a.height, b.y + b.height);
36 |
37 | if (x2 >= x1 && y2 >= y1)
38 | return new Rect(x1, y1, x2 - x1, y2 - y1);
39 | else
40 | return new Rect();
41 | }
42 |
43 | ///
44 | /// Gets a CvRect structure that contains the union of two CvRect structures.
45 | ///
46 | /// A rectangle to union.
47 | /// A rectangle to union.
48 | ///
49 | public static Rect Union(Rect a, Rect b)
50 | {
51 | int x1 = Math.Min(a.x, b.x);
52 | int x2 = Math.Max(a.x + a.width, b.x + b.width);
53 | int y1 = Math.Min(a.y, b.y);
54 | int y2 = Math.Max(a.y + a.height, b.y + b.height);
55 |
56 | return new Rect(x1, y1, x2 - x1, y2 - y1);
57 | }
58 | }
59 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/OpenCVFaceSwapper/Scripts/RectUtils.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 |
4 | namespace OpenCVForUnity.FaceSwap
5 | {
6 | public class RectUtils
7 | {
8 | ///
9 | /// Creates and returns an inflated copy of the specified CvRect structure.
10 | ///
11 | /// The Rectangle with which to start. This rectangle is not modified.
12 | /// The amount to inflate this Rectangle horizontally.
13 | /// The amount to inflate this Rectangle vertically.
14 | ///
15 | public static Rect Inflate(Rect rect, int x, int y)
16 | {
17 | rect.x -= x;
18 | rect.y -= y;
19 | rect.width += (2 * x);
20 | rect.height += (2 * y);
21 | return rect;
22 | }
23 |
24 | ///
25 | /// Determines the CvRect structure that represents the intersection of two rectangles.
26 | ///
27 | /// A rectangle to intersect.
28 | /// A rectangle to intersect.
29 | ///
30 | public static Rect Intersect(Rect a, Rect b)
31 | {
32 | int x1 = Math.Max(a.x, b.x);
33 | int x2 = Math.Min(a.x + a.width, b.x + b.width);
34 | int y1 = Math.Max(a.y, b.y);
35 | int y2 = Math.Min(a.y + a.height, b.y + b.height);
36 |
37 | if (x2 >= x1 && y2 >= y1)
38 | return new Rect(x1, y1, x2 - x1, y2 - y1);
39 | else
40 | return new Rect();
41 | }
42 |
43 | ///
44 | /// Gets a CvRect structure that contains the union of two CvRect structures.
45 | ///
46 | /// A rectangle to union.
47 | /// A rectangle to union.
48 | ///
49 | public static Rect Union(Rect a, Rect b)
50 | {
51 | int x1 = Math.Min(a.x, b.x);
52 | int x2 = Math.Max(a.x + a.width, b.x + b.width);
53 | int y1 = Math.Min(a.y, b.y);
54 | int y2 = Math.Max(a.y + a.height, b.y + b.height);
55 |
56 | return new Rect(x1, y1, x2 - x1, y2 - y1);
57 | }
58 | }
59 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/NoiseFilter/PointsFilterBase.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 | using System.Collections.Generic;
4 | using UnityEngine;
5 |
6 | namespace FaceSwapperExample
7 | {
8 | ///
9 | /// Points Filter Base.
10 | /// v 1.0.4
11 | ///
12 | public abstract class PointsFilterBase
13 | {
14 | protected int numberOfElements;
15 |
16 | public PointsFilterBase(int numberOfElements)
17 | {
18 | this.numberOfElements = numberOfElements;
19 | }
20 |
21 | ///
22 | /// Processes points by filter.
23 | ///
24 | /// Image mat.
25 | /// Input points.
26 | /// Output points.
27 | /// if true, draws debug points.
28 | /// Output points.
29 | public abstract List Process(Mat img, List srcPoints, List dstPoints = null, bool drawDebugPoints = false);
30 |
31 | ///
32 | /// Resets filter.
33 | ///
34 | public abstract void Reset();
35 |
36 | ///
37 | /// To release the resources for the initialized method.
38 | ///
39 | public abstract void Dispose();
40 |
41 | // This function is to calculate the variance
42 | protected virtual double calDistanceDiff(IList curPoints, IList lastPoints)
43 | {
44 | double variance = 0.0;
45 | double sum = 0.0;
46 | List diffs = new List();
47 | if (curPoints.Count == lastPoints.Count)
48 | {
49 | for (int i = 0; i < curPoints.Count; i++)
50 | {
51 | double diff = Math.Sqrt(Math.Pow(curPoints[i].x - lastPoints[i].x, 2.0) + Math.Pow(curPoints[i].y - lastPoints[i].y, 2.0));
52 | sum += diff;
53 | diffs.Add(diff);
54 | }
55 | double mean = sum / diffs.Count;
56 | for (int i = 0; i < curPoints.Count; i++)
57 | {
58 | variance += Math.Pow(diffs[i] - mean, 2);
59 | }
60 | return variance / diffs.Count;
61 | }
62 | return variance;
63 | }
64 |
65 | protected virtual void Swap(ref T a, ref T b)
66 | {
67 | var t = a;
68 | a = b;
69 | b = t;
70 | }
71 | }
72 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/FaceSwapperExample.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using UnityEngine;
3 | using UnityEngine.SceneManagement;
4 | using UnityEngine.UI;
5 |
6 | namespace FaceSwapperExample
7 | {
8 | ///
9 | /// FaceSwapper Example
10 | ///
11 | public class FaceSwapperExample : MonoBehaviour
12 | {
13 | public Text exampleTitle;
14 | public Text versionInfo;
15 | public ScrollRect scrollRect;
16 | static float verticalNormalizedPosition = 1f;
17 |
18 | // Use this for initialization
19 | void Start()
20 | {
21 | exampleTitle.text = "FaceSwapper Example " + Application.version;
22 |
23 | versionInfo.text = Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion() + " (" + Core.VERSION + ")";
24 | versionInfo.text += " / " + "dlibfacelandmarkdetector" + " " + DlibFaceLandmarkDetector.UnityUtils.Utils.getVersion();
25 | versionInfo.text += " / UnityEditor " + Application.unityVersion;
26 | versionInfo.text += " / ";
27 |
28 | #if UNITY_EDITOR
29 | versionInfo.text += "Editor";
30 | #elif UNITY_STANDALONE_WIN
31 | versionInfo.text += "Windows";
32 | #elif UNITY_STANDALONE_OSX
33 | versionInfo.text += "Mac OSX";
34 | #elif UNITY_STANDALONE_LINUX
35 | versionInfo.text += "Linux";
36 | #elif UNITY_ANDROID
37 | versionInfo.text += "Android";
38 | #elif UNITY_IOS
39 | versionInfo.text += "iOS";
40 | #elif UNITY_WSA
41 | versionInfo.text += "WSA";
42 | #elif UNITY_WEBGL
43 | versionInfo.text += "WebGL";
44 | #endif
45 | versionInfo.text += " ";
46 | #if ENABLE_MONO
47 | versionInfo.text += "Mono";
48 | #elif ENABLE_IL2CPP
49 | versionInfo.text += "IL2CPP";
50 | #elif ENABLE_DOTNET
51 | versionInfo.text += ".NET";
52 | #endif
53 |
54 | scrollRect.verticalNormalizedPosition = verticalNormalizedPosition;
55 | }
56 |
57 | // Update is called once per frame
58 | void Update()
59 | {
60 |
61 | }
62 |
63 | public void OnScrollRectValueChanged()
64 | {
65 | verticalNormalizedPosition = scrollRect.verticalNormalizedPosition;
66 | }
67 |
68 | public void OnShowLicenseButtonClick()
69 | {
70 | SceneManager.LoadScene("ShowLicense");
71 | }
72 |
73 | public void OnTexture2DFaceSwapperExampleButtonClick()
74 | {
75 | SceneManager.LoadScene("Texture2DFaceSwapperExample");
76 | }
77 |
78 | public void OnWebCamTextureFaceSwapperExampleButtonClick()
79 | {
80 | SceneManager.LoadScene("WebCamTextureFaceSwapperExample");
81 | }
82 |
83 | public void OnVideoCaptureFaceSwapperExampleButtonClick()
84 | {
85 | SceneManager.LoadScene("VideoCaptureFaceSwapperExample");
86 | }
87 |
88 | public void OnTexture2DFaceChangerExampleButtonClick()
89 | {
90 | SceneManager.LoadScene("Texture2DFaceChangerExample");
91 | }
92 |
93 | public void OnWebCamTextureFaceChangerExampleButtonClick()
94 | {
95 | SceneManager.LoadScene("WebCamTextureFaceChangerExample");
96 | }
97 | }
98 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/NoiseFilter/LowPassPointsFilter.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using OpenCVForUnity.ImgprocModule;
3 | using System;
4 | using System.Collections.Generic;
5 | using UnityEngine;
6 |
7 | namespace FaceSwapperExample
8 | {
9 | ///
10 | /// Low Pass Points Filter.
11 | /// v 1.0.4
12 | ///
13 | public class LowPassPointsFilter : PointsFilterBase
14 | {
15 | public double diffLawPass = 2;
16 |
17 | bool flag = false;
18 |
19 | List lastPoints;
20 |
21 | public LowPassPointsFilter(int numberOfElements) : base(numberOfElements)
22 | {
23 | lastPoints = new List();
24 | for (int i = 0; i < numberOfElements; i++)
25 | {
26 | lastPoints.Add(new Vector2());
27 | }
28 | }
29 |
30 | ///
31 | /// Processes points by filter.
32 | ///
33 | /// Image mat.
34 | /// Input points.
35 | /// Output points.
36 | /// if true, draws debug points.
37 | /// Output points.
38 | public override List Process(Mat img, List srcPoints, List dstPoints = null, bool drawDebugPoints = false)
39 | {
40 | if (srcPoints != null && srcPoints.Count != numberOfElements)
41 | {
42 | throw new ArgumentException("The number of elements is different.");
43 | }
44 |
45 | if (srcPoints != null)
46 | {
47 |
48 | if (dstPoints == null)
49 | {
50 | dstPoints = new List();
51 | }
52 | if (dstPoints != null && dstPoints.Count != numberOfElements)
53 | {
54 | dstPoints.Clear();
55 | for (int i = 0; i < numberOfElements; i++)
56 | {
57 | dstPoints.Add(new Vector2());
58 | }
59 | }
60 |
61 | if (flag)
62 | {
63 | for (int i = 0; i < numberOfElements; i++)
64 | {
65 | double diff = Math.Sqrt(Math.Pow(srcPoints[i].x - lastPoints[i].x, 2.0) + Math.Pow(srcPoints[i].y - lastPoints[i].y, 2.0));
66 | if (diff > diffLawPass)
67 | {
68 | lastPoints[i] = srcPoints[i];
69 | if (drawDebugPoints)
70 | Imgproc.circle(img, new Point(srcPoints[i].x, srcPoints[i].y), 1, new Scalar(0, 255, 0, 255), -1);
71 | }
72 | else
73 | {
74 | if (drawDebugPoints)
75 | Imgproc.circle(img, new Point(lastPoints[i].x, lastPoints[i].y), 1, new Scalar(255, 0, 0, 255), -1);
76 | }
77 | dstPoints[i] = lastPoints[i];
78 | }
79 | }
80 | else
81 | {
82 | for (int i = 0; i < numberOfElements; i++)
83 | {
84 | lastPoints[i] = srcPoints[i];
85 | dstPoints[i] = srcPoints[i];
86 | }
87 | if (drawDebugPoints)
88 | {
89 | for (int i = 0; i < numberOfElements; i++)
90 | {
91 | Imgproc.circle(img, new Point(srcPoints[i].x, srcPoints[i].y), 1, new Scalar(0, 0, 255, 255), -1);
92 | }
93 | }
94 | flag = true;
95 | }
96 | return dstPoints;
97 | }
98 | else
99 | {
100 | return dstPoints == null ? srcPoints : dstPoints;
101 | }
102 | }
103 |
104 | ///
105 | /// Resets filter.
106 | ///
107 | public override void Reset()
108 | {
109 | flag = false;
110 | for (int i = 0; i < lastPoints.Count; i++)
111 | {
112 | lastPoints[i] = new Vector2();
113 | }
114 | }
115 |
116 | ///
117 | /// To release the resources for the initialized method.
118 | ///
119 | public override void Dispose()
120 | {
121 | if (lastPoints != null)
122 | lastPoints.Clear();
123 | }
124 | }
125 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/Utils/FpsMonitor.cs:
--------------------------------------------------------------------------------
1 | using System.Collections.Generic;
2 | using UnityEngine;
3 |
4 | namespace FaceSwapperExample
5 | {
6 | public class FpsMonitor : MonoBehaviour
7 | {
8 | int tick = 0;
9 | float elapsed = 0;
10 | float fps = 0;
11 |
12 | public enum Alignment
13 | {
14 | LeftTop,
15 | RightTop,
16 | LeftBottom,
17 | RightBottom,
18 | }
19 |
20 | public Alignment alignment = Alignment.RightTop;
21 |
22 | const float GUI_WIDTH = 75f;
23 | const float GUI_HEIGHT = 30f;
24 | const float MARGIN_X = 10f;
25 | const float MARGIN_Y = 10f;
26 | const float INNER_X = 8f;
27 | const float INNER_Y = 5f;
28 | const float GUI_CONSOLE_HEIGHT = 50f;
29 |
30 | public Vector2 offset = new Vector2(MARGIN_X, MARGIN_Y);
31 | public bool boxVisible = true;
32 | public float boxWidth = GUI_WIDTH;
33 | public float boxHeight = GUI_HEIGHT;
34 | public Vector2 padding = new Vector2(INNER_X, INNER_Y);
35 | public float consoleHeight = GUI_CONSOLE_HEIGHT;
36 |
37 | GUIStyle console_labelStyle;
38 |
39 | float x, y;
40 | Rect outer;
41 | Rect inner;
42 |
43 | float console_x, console_y;
44 | Rect console_outer;
45 | Rect console_inner;
46 |
47 | int oldScrWidth;
48 | int oldScrHeight;
49 |
50 | Dictionary outputDict = new Dictionary();
51 | public string consoleText;
52 |
53 | // Use this for initialization
54 | void Start()
55 | {
56 | console_labelStyle = new GUIStyle();
57 | console_labelStyle.fontSize = 32;
58 | console_labelStyle.fontStyle = FontStyle.Normal;
59 | console_labelStyle.wordWrap = true;
60 | console_labelStyle.normal.textColor = Color.white;
61 |
62 | oldScrWidth = Screen.width;
63 | oldScrHeight = Screen.height;
64 | LocateGUI();
65 | }
66 |
67 | // Update is called once per frame
68 | void Update()
69 | {
70 | tick++;
71 | elapsed += Time.deltaTime;
72 | if (elapsed >= 1f)
73 | {
74 | fps = tick / elapsed;
75 | tick = 0;
76 | elapsed = 0;
77 | }
78 | }
79 |
80 | void OnGUI()
81 | {
82 | if (oldScrWidth != Screen.width || oldScrHeight != Screen.height)
83 | {
84 | LocateGUI();
85 | }
86 | oldScrWidth = Screen.width;
87 | oldScrHeight = Screen.height;
88 |
89 | if (boxVisible)
90 | {
91 | GUI.Box(outer, "");
92 | }
93 |
94 | GUILayout.BeginArea(inner);
95 | {
96 | GUILayout.BeginVertical();
97 | GUILayout.Label("fps : " + fps.ToString("F1"));
98 | foreach (KeyValuePair pair in outputDict)
99 | {
100 | GUILayout.Label(pair.Key + " : " + pair.Value);
101 | }
102 | GUILayout.EndVertical();
103 | }
104 | GUILayout.EndArea();
105 |
106 | if (!string.IsNullOrEmpty(consoleText))
107 | {
108 | if (boxVisible)
109 | {
110 | GUI.Box(console_outer, "");
111 | }
112 |
113 | GUILayout.BeginArea(console_inner);
114 | {
115 | GUILayout.BeginVertical();
116 | GUILayout.Label(consoleText, console_labelStyle);
117 | GUILayout.EndVertical();
118 | }
119 | GUILayout.EndArea();
120 | }
121 | }
122 |
123 | public void Add(string key, string value)
124 | {
125 | if (outputDict.ContainsKey(key))
126 | {
127 | outputDict[key] = value;
128 | }
129 | else
130 | {
131 | outputDict.Add(key, value);
132 | }
133 | }
134 |
135 | public void Remove(string key)
136 | {
137 | outputDict.Remove(key);
138 | }
139 |
140 | public void Clear()
141 | {
142 | outputDict.Clear();
143 | }
144 |
145 | public void LocateGUI()
146 | {
147 | x = GetAlignedX(alignment, boxWidth);
148 | y = GetAlignedY(alignment, boxHeight);
149 | outer = new Rect(x, y, boxWidth, boxHeight);
150 | inner = new Rect(x + padding.x, y + padding.y, boxWidth, boxHeight);
151 |
152 | console_x = GetAlignedX(Alignment.LeftBottom, Screen.width);
153 | console_y = GetAlignedY(Alignment.LeftBottom, consoleHeight);
154 | console_outer = new Rect(console_x, console_y, Screen.width - offset.x * 2, consoleHeight);
155 | console_inner = new Rect(console_x + padding.x, console_y + padding.y, Screen.width - offset.x * 2 - padding.x, consoleHeight);
156 | }
157 |
158 | float GetAlignedX(Alignment anchor, float w)
159 | {
160 | switch (anchor)
161 | {
162 | default:
163 | case Alignment.LeftTop:
164 | case Alignment.LeftBottom:
165 | return offset.x;
166 |
167 | case Alignment.RightTop:
168 | case Alignment.RightBottom:
169 | return Screen.width - w - offset.x;
170 | }
171 | }
172 |
173 | float GetAlignedY(Alignment anchor, float h)
174 | {
175 | switch (anchor)
176 | {
177 | default:
178 | case Alignment.LeftTop:
179 | case Alignment.RightTop:
180 | return offset.y;
181 |
182 | case Alignment.LeftBottom:
183 | case Alignment.RightBottom:
184 | return Screen.height - h - offset.y;
185 | }
186 | }
187 | }
188 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/Utils/FrontalFaceChecker.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.Calib3dModule;
2 | using OpenCVForUnity.CoreModule;
3 | using System;
4 | using System.Collections.Generic;
5 | using UnityEngine;
6 |
7 | namespace FaceSwapperExample
8 | {
9 | ///
10 | /// Frontal face checker. (Calculates from points of face landmark which was detected by using Dlib Face Landmark Detector)
11 | /// v 1.0.1
12 | ///
13 | public class FrontalFaceChecker
14 | {
15 | float imageWidth;
16 | float imageHeight;
17 | Point[] landmarkPoints = new Point[7];
18 | Matrix4x4 transformationM = new Matrix4x4();
19 | MatOfPoint3f objectPoints;
20 | MatOfPoint2f imagePoints;
21 | Mat rvec;
22 | Mat tvec;
23 | Mat rotM;
24 | Mat camMatrix;
25 | MatOfDouble distCoeffs;
26 | Matrix4x4 invertYM;
27 | Matrix4x4 invertZM;
28 |
29 | ///
30 | /// Initializes a new instance of the class.
31 | ///
32 | /// Width of the image which was used in the face landmark detection.
33 | /// Height of the image which was used in the face landmark detection.
34 | public FrontalFaceChecker(float width, float height)
35 | {
36 | imageWidth = width;
37 | imageHeight = height;
38 |
39 | for (int i = 0; i < landmarkPoints.Length; i++)
40 | {
41 | landmarkPoints[i] = new Point(0, 0);
42 | }
43 |
44 | objectPoints = new MatOfPoint3f(
45 | new Point3(-34, 90, 83),//l eye (Interpupillary breadth)
46 | new Point3(34, 90, 83),//r eye (Interpupillary breadth)
47 | new Point3(0.0, 50, 120),//nose (Nose top)
48 | new Point3(-26, 15, 83),//l mouse (Mouth breadth)
49 | new Point3(26, 15, 83),//r mouse (Mouth breadth)
50 | new Point3(-79, 90, 0.0),//l ear (Bitragion breadth)
51 | new Point3(79, 90, 0.0)//r ear (Bitragion breadth)
52 | );
53 |
54 | imagePoints = new MatOfPoint2f();
55 |
56 | rvec = new Mat(3, 1, CvType.CV_64FC1);
57 | tvec = new Mat(3, 1, CvType.CV_64FC1);
58 |
59 | rotM = new Mat(3, 3, CvType.CV_64FC1);
60 |
61 | float max_d = Mathf.Max(imageHeight, imageWidth);
62 | camMatrix = new Mat(3, 3, CvType.CV_64FC1);
63 | camMatrix.put(0, 0, max_d);
64 | camMatrix.put(0, 1, 0);
65 | camMatrix.put(0, 2, imageWidth / 2.0f);
66 | camMatrix.put(1, 0, 0);
67 | camMatrix.put(1, 1, max_d);
68 | camMatrix.put(1, 2, imageHeight / 2.0f);
69 | camMatrix.put(2, 0, 0);
70 | camMatrix.put(2, 1, 0);
71 | camMatrix.put(2, 2, 1.0f);
72 |
73 | distCoeffs = new MatOfDouble(0, 0, 0, 0);
74 |
75 | invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
76 | invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
77 | }
78 |
79 | public void Dispose()
80 | {
81 | if (objectPoints != null && !objectPoints.IsDisposed)
82 | objectPoints.Dispose();
83 |
84 | if (imagePoints != null && !imagePoints.IsDisposed)
85 | imagePoints.Dispose();
86 |
87 | if (rvec != null && !rvec.IsDisposed)
88 | rvec.Dispose();
89 |
90 | if (tvec != null && !tvec.IsDisposed)
91 | tvec.Dispose();
92 |
93 | if (rotM != null && !rotM.IsDisposed)
94 | rotM.Dispose();
95 |
96 | if (camMatrix != null && !camMatrix.IsDisposed)
97 | camMatrix.Dispose();
98 |
99 | if (distCoeffs != null && !distCoeffs.IsDisposed)
100 | distCoeffs.Dispose();
101 | }
102 |
103 | ///
104 | /// Gets the frontal face angles.
105 | ///
106 | /// Frontal face angles.
107 | /// Points of face landmark which was detected with Dlib.
108 | public Vector3 GetFrontalFaceAngles(List points)
109 | {
110 | if (points.Count < 68)
111 | throw new ArgumentException("Invalid face landmark points", "points");
112 |
113 | landmarkPoints[0].x = (points[38].x + points[41].x) / 2;
114 | landmarkPoints[0].y = (points[38].y + points[41].y) / 2;
115 | landmarkPoints[1].x = (points[43].x + points[46].x) / 2;
116 | landmarkPoints[1].y = (points[43].y + points[46].y) / 2;
117 | landmarkPoints[2].x = points[30].x;
118 | landmarkPoints[2].y = points[30].y;
119 | landmarkPoints[3].x = points[48].x;
120 | landmarkPoints[3].y = points[48].y;
121 | landmarkPoints[4].x = points[54].x;
122 | landmarkPoints[4].y = points[54].y;
123 | landmarkPoints[5].x = points[0].x;
124 | landmarkPoints[5].y = points[0].y;
125 | landmarkPoints[6].x = points[16].x;
126 | landmarkPoints[6].y = points[16].y;
127 |
128 | // Normalize points.
129 | Point centerOffset = landmarkPoints[2] - new Point(imageWidth / 2, imageHeight / 2);
130 | for (int i = 0; i < landmarkPoints.Length; i++)
131 | {
132 | landmarkPoints[i] = landmarkPoints[i] - centerOffset;
133 | }
134 |
135 | imagePoints.fromArray(landmarkPoints);
136 |
137 | Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
138 |
139 | double tvec_z = tvec.get(2, 0)[0];
140 |
141 | //Debug.Log (rvec.dump());
142 | //Debug.Log (tvec.dump());
143 |
144 | if (!double.IsNaN(tvec_z))
145 | {
146 | Calib3d.Rodrigues(rvec, rotM);
147 |
148 | //Debug.Log (rotM.dump());
149 |
150 | transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
151 | transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
152 | transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
153 | transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
154 |
155 | transformationM = invertYM * transformationM * invertZM;
156 |
157 | Vector3 angles = ExtractRotationFromMatrix(ref transformationM).eulerAngles;
158 |
159 | //Debug.Log ("angles " + angles.x + " " + angles.y + " " + angles.z);
160 |
161 | float rotationX = (angles.x > 180) ? angles.x - 360 : angles.x;
162 | float rotationY = (angles.y > 180) ? angles.y - 360 : angles.y;
163 | float rotationZ = (tvec_z >= 0) ? (angles.z > 180) ? angles.z - 360 : angles.z : 180 - angles.z;
164 |
165 | if (tvec_z < 0)
166 | {
167 | rotationX = -rotationX;
168 | rotationY = -rotationY;
169 | rotationZ = -rotationZ;
170 | }
171 |
172 | return new Vector3(rotationX, rotationY, rotationZ);
173 | }
174 | else
175 | {
176 | return new Vector3(0, 0, 0);
177 | }
178 | }
179 |
180 | ///
181 | /// Gets the frontal face rate.
182 | ///
183 | /// Frontal face rate.(a value of 0 to 1)
184 | /// Points of face landmark which was detected with Dlib.
185 | public float GetFrontalFaceRate(List points)
186 | {
187 | Vector3 angles = GetFrontalFaceAngles(points);
188 |
189 | //Debug.Log ("angles " + angles.x + " " + angles.y + " " + angles.z);
190 |
191 | float angle = Mathf.Max(Mathf.Abs(angles.x), Mathf.Abs(angles.y));
192 | float rate = (angle <= 90) ? angle / 90 : 1;
193 |
194 | //Debug.Log ("ratio " + (1.0f - rate));
195 |
196 | return 1.0f - rate;
197 | }
198 |
199 | ///
200 | /// Extract rotation quaternion from transform matrix.
201 | ///
202 | /// Transform matrix. This parameter is passed by reference
203 | /// to improve performance; no changes will be made to it.
204 | ///
205 | /// Quaternion representation of rotation transform.
206 | ///
207 | private Quaternion ExtractRotationFromMatrix(ref Matrix4x4 matrix)
208 | {
209 | Vector3 forward;
210 | forward.x = matrix.m02;
211 | forward.y = matrix.m12;
212 | forward.z = matrix.m22;
213 |
214 | Vector3 upwards;
215 | upwards.x = matrix.m01;
216 | upwards.y = matrix.m11;
217 | upwards.z = matrix.m21;
218 |
219 | return Quaternion.LookRotation(forward, upwards);
220 | }
221 | }
222 | }
223 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Scripts/NoiseFilter/OFPointsFilter.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using OpenCVForUnity.ImgprocModule;
3 | using OpenCVForUnity.VideoModule;
4 | using System;
5 | using System.Collections.Generic;
6 | using UnityEngine;
7 |
8 | namespace FaceSwapperExample
9 | {
10 | ///
11 | /// Optical Flow Points Filter.
12 | /// v 1.0.4
13 | ///
14 | public class OFPointsFilter : PointsFilterBase
15 | {
16 | public double diffCheckSensitivity = 1;
17 |
18 | bool flag = false;
19 | double diffDlib = 1;
20 | MatOfPoint prevTrackPtsMat;
21 |
22 | // Optical Flow
23 | Mat prevgray, gray;
24 | List prevTrackPts;
25 | List nextTrackPts;
26 | MatOfPoint2f mOP2fPrevTrackPts;
27 | MatOfPoint2f mOP2fNextTrackPts;
28 | MatOfByte status;
29 | MatOfFloat err;
30 |
31 | public OFPointsFilter(int numberOfElements) : base(numberOfElements)
32 | {
33 | diffDlib = diffDlib * (double)numberOfElements / 68.0;
34 | prevTrackPtsMat = new MatOfPoint();
35 |
36 | // Initialize Optical Flow
37 | InitializeOpticalFlow();
38 | }
39 |
40 | ///
41 | /// Processes points by filter.
42 | ///
43 | /// Image mat.
44 | /// Input points.
45 | /// Output points.
46 | /// if true, draws debug points.
47 | /// Output points.
48 | public override List Process(Mat img, List srcPoints, List dstPoints = null, bool drawDebugPoints = false)
49 | {
50 | if (srcPoints != null && srcPoints.Count != numberOfElements)
51 | {
52 | throw new ArgumentException("The number of elements is different.");
53 | }
54 |
55 | if (srcPoints == null)
56 | {
57 | return dstPoints == null ? srcPoints : dstPoints;
58 | }
59 |
60 | if (!flag)
61 | {
62 | if (img.channels() == 4)
63 | {
64 | Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGBA2GRAY);
65 | }
66 | else if (img.channels() == 3)
67 | {
68 | Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGB2GRAY);
69 | }
70 | else
71 | {
72 | if (prevgray.total() == 0)
73 | {
74 | prevgray = img.clone();
75 | }
76 | else
77 | {
78 | img.copyTo(prevgray);
79 | }
80 | }
81 |
82 | for (int i = 0; i < numberOfElements; i++)
83 | {
84 | prevTrackPts[i] = new Point(srcPoints[i].x, srcPoints[i].y);
85 | }
86 |
87 | flag = true;
88 | }
89 |
90 | if (srcPoints != null)
91 | {
92 |
93 | if (dstPoints == null)
94 | {
95 | dstPoints = new List();
96 | }
97 | if (dstPoints != null && dstPoints.Count != numberOfElements)
98 | {
99 | dstPoints.Clear();
100 | for (int i = 0; i < numberOfElements; i++)
101 | {
102 | dstPoints.Add(new Vector2());
103 | }
104 | }
105 |
106 | if (img.channels() == 4)
107 | {
108 | Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGBA2GRAY);
109 | }
110 | else if (img.channels() == 3)
111 | {
112 | Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGB2GRAY);
113 | }
114 | else
115 | {
116 | if (gray.total() == 0)
117 | {
118 | gray = img.clone();
119 | }
120 | else
121 | {
122 | img.copyTo(gray);
123 | }
124 | }
125 |
126 | if (prevgray.total() > 0)
127 | {
128 | mOP2fPrevTrackPts.fromList(prevTrackPts);
129 | mOP2fNextTrackPts.fromList(nextTrackPts);
130 | Video.calcOpticalFlowPyrLK(prevgray, gray, mOP2fPrevTrackPts, mOP2fNextTrackPts, status, err);
131 | prevTrackPts = mOP2fPrevTrackPts.toList();
132 | nextTrackPts = mOP2fNextTrackPts.toList();
133 |
134 | // clac diffDlib
135 | prevTrackPtsMat.fromList(prevTrackPts);
136 | OpenCVForUnity.CoreModule.Rect rect = Imgproc.boundingRect(prevTrackPtsMat);
137 | double diffDlib = this.diffDlib * rect.area() / 40000.0 * diffCheckSensitivity;
138 |
139 | // if the face is moving so fast, use dlib to detect the face
140 | double diff = calDistanceDiff(prevTrackPts, nextTrackPts);
141 | if (drawDebugPoints)
142 | Debug.Log("variance:" + diff);
143 | if (diff > diffDlib)
144 | {
145 | for (int i = 0; i < numberOfElements; i++)
146 | {
147 | nextTrackPts[i].x = srcPoints[i].x;
148 | nextTrackPts[i].y = srcPoints[i].y;
149 |
150 | dstPoints[i] = srcPoints[i];
151 | }
152 |
153 | if (drawDebugPoints)
154 | {
155 | Debug.Log("DLIB");
156 | for (int i = 0; i < numberOfElements; i++)
157 | {
158 | Imgproc.circle(img, new Point(srcPoints[i].x, srcPoints[i].y), 2, new Scalar(255, 0, 0, 255), -1);
159 | }
160 | }
161 | }
162 | else
163 | {
164 | // In this case, use Optical Flow
165 | for (int i = 0; i < numberOfElements; i++)
166 | {
167 | dstPoints[i] = new Vector2((float)nextTrackPts[i].x, (float)nextTrackPts[i].y);
168 | }
169 |
170 | if (drawDebugPoints)
171 | {
172 | Debug.Log("Optical Flow");
173 | for (int i = 0; i < numberOfElements; i++)
174 | {
175 | Imgproc.circle(img, nextTrackPts[i], 2, new Scalar(0, 0, 255, 255), -1);
176 | }
177 | }
178 | }
179 | }
180 | Swap(ref prevTrackPts, ref nextTrackPts);
181 | Swap(ref prevgray, ref gray);
182 | }
183 | return dstPoints;
184 | }
185 |
186 | ///
187 | /// Resets filter.
188 | ///
189 | public override void Reset()
190 | {
191 | flag = false;
192 |
193 | // Reset Optical Flow
194 | for (int i = 0; i < numberOfElements; i++)
195 | {
196 | prevTrackPts[i].x = 0.0;
197 | prevTrackPts[i].y = 0.0;
198 | }
199 | for (int i = 0; i < numberOfElements; i++)
200 | {
201 | nextTrackPts[i].x = 0.0;
202 | nextTrackPts[i].y = 0.0;
203 | }
204 |
205 | if (prevgray != null)
206 | {
207 | prevgray.Dispose();
208 | prevgray = new Mat();
209 | }
210 | if (gray != null)
211 | {
212 | gray.Dispose();
213 | gray = new Mat();
214 | }
215 | }
216 |
217 | ///
218 | /// To release the resources for the initialized method.
219 | ///
220 | public override void Dispose()
221 | {
222 | DisposeOpticalFlow();
223 |
224 | if (prevTrackPtsMat != null)
225 | prevTrackPtsMat.Dispose();
226 | }
227 |
228 | protected virtual void InitializeOpticalFlow()
229 | {
230 | prevTrackPts = new List();
231 | for (int i = 0; i < numberOfElements; i++)
232 | {
233 | prevTrackPts.Add(new Point(0, 0));
234 | }
235 | nextTrackPts = new List();
236 | for (int i = 0; i < numberOfElements; i++)
237 | {
238 | nextTrackPts.Add(new Point(0, 0));
239 | }
240 | prevgray = new Mat();
241 | gray = new Mat();
242 | mOP2fPrevTrackPts = new MatOfPoint2f();
243 | mOP2fNextTrackPts = new MatOfPoint2f();
244 | status = new MatOfByte();
245 | err = new MatOfFloat();
246 | }
247 |
248 | protected virtual void DisposeOpticalFlow()
249 | {
250 | if (prevTrackPts != null)
251 | prevTrackPts.Clear();
252 | if (nextTrackPts != null)
253 | nextTrackPts.Clear();
254 | if (prevgray != null)
255 | prevgray.Dispose();
256 | if (gray != null)
257 | gray.Dispose();
258 | if (mOP2fPrevTrackPts != null)
259 | mOP2fPrevTrackPts.Dispose();
260 | if (mOP2fNextTrackPts != null)
261 | mOP2fNextTrackPts.Dispose();
262 | if (status != null)
263 | status.Dispose();
264 | if (err != null)
265 | err.Dispose();
266 | }
267 | }
268 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Texture2DFaceChangerExample/Texture2DFaceChangerExample.cs:
--------------------------------------------------------------------------------
1 | using DlibFaceLandmarkDetector;
2 | using OpenCVForUnity.CoreModule;
3 | using OpenCVForUnity.FaceChange;
4 | using OpenCVForUnity.ImgprocModule;
5 | using OpenCVForUnity.ObjdetectModule;
6 | using System;
7 | using System.Collections;
8 | using System.Collections.Generic;
9 | using System.Linq;
10 | using UnityEngine;
11 | using UnityEngine.SceneManagement;
12 | using UnityEngine.UI;
13 | using Rect = OpenCVForUnity.CoreModule.Rect;
14 |
15 | namespace FaceSwapperExample
16 | {
17 | ///
18 | /// Texture2D FaceChanger Example
19 | ///
20 | public class Texture2DFaceChangerExample : MonoBehaviour
21 | {
22 | ///
23 | /// Determines if use dlib face detector.
24 | ///
25 | public bool useDlibFaceDetecter = true;
26 |
27 | ///
28 | /// The use dlib face detecter toggle.
29 | ///
30 | public Toggle useDlibFaceDetecterToggle;
31 |
32 | ///
33 | /// Determines if filters non frontal faces.
34 | ///
35 | public bool filterNonFrontalFaces;
36 |
37 | ///
38 | /// The filter non frontal faces toggle.
39 | ///
40 | public Toggle filterNonFrontalFacesToggle;
41 |
42 | ///
43 | /// The frontal face rate lower limit.
44 | ///
45 | [Range(0.0f, 1.0f)]
46 | public float frontalFaceRateLowerLimit;
47 |
48 | ///
49 | /// Determines if displays face rects.
50 | ///
51 | public bool displayFaceRects = false;
52 |
53 | ///
54 | /// The toggle for switching face rects display state.
55 | ///
56 | public Toggle displayFaceRectsToggle;
57 |
58 | ///
59 | /// Determines if displays debug face points.
60 | ///
61 | public bool displayDebugFacePoints = false;
62 |
63 | ///
64 | /// The toggle for switching debug face points display state.
65 | ///
66 | public Toggle displayDebugFacePointsToggle;
67 |
68 | ///
69 | /// The image texture.
70 | ///
71 | Texture2D imgTexture;
72 |
73 | ///
74 | /// The cascade.
75 | ///
76 | CascadeClassifier cascade;
77 |
78 | ///
79 | /// The face landmark detector.
80 | ///
81 | FaceLandmarkDetector faceLandmarkDetector;
82 |
83 | ///
84 | /// The haarcascade_frontalface_alt_xml_filepath.
85 | ///
86 | string haarcascade_frontalface_alt_xml_filepath;
87 |
88 | ///
89 | /// The sp_human_face_68_dat_filepath.
90 | ///
91 | string sp_human_face_68_dat_filepath;
92 |
93 | #if UNITY_WEBGL
94 | IEnumerator getFilePath_Coroutine;
95 | #endif
96 |
97 | // Use this for initialization
98 | void Start()
99 | {
100 | #if UNITY_WEBGL
101 | getFilePath_Coroutine = GetFilePath();
102 | StartCoroutine(getFilePath_Coroutine);
103 | #else
104 | haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml");
105 | sp_human_face_68_dat_filepath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_68.dat");
106 | Run();
107 | #endif
108 | }
109 |
110 | #if UNITY_WEBGL
111 | private IEnumerator GetFilePath()
112 | {
113 | var getFilePathAsync_0_Coroutine = OpenCVForUnity.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml", (result) =>
114 | {
115 | haarcascade_frontalface_alt_xml_filepath = result;
116 | });
117 | yield return getFilePathAsync_0_Coroutine;
118 |
119 | var getFilePathAsync_1_Coroutine = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/sp_human_face_68.dat", (result) =>
120 | {
121 | sp_human_face_68_dat_filepath = result;
122 | });
123 | yield return getFilePathAsync_1_Coroutine;
124 |
125 | getFilePath_Coroutine = null;
126 |
127 | Run();
128 | }
129 | #endif
130 |
131 | private void Run()
132 | {
133 | displayFaceRectsToggle.isOn = displayFaceRects;
134 | useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
135 | filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces;
136 | displayDebugFacePointsToggle.isOn = displayDebugFacePoints;
137 |
138 | if (imgTexture == null)
139 | imgTexture = Resources.Load("family") as Texture2D;
140 |
141 | gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
142 | Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
143 |
144 | float width = 0;
145 | float height = 0;
146 |
147 | width = gameObject.transform.localScale.x;
148 | height = gameObject.transform.localScale.y;
149 |
150 |
151 | float widthScale = (float)Screen.width / width;
152 | float heightScale = (float)Screen.height / height;
153 | if (widthScale < heightScale)
154 | {
155 | Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
156 | }
157 | else
158 | {
159 | Camera.main.orthographicSize = height / 2;
160 | }
161 |
162 | Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
163 |
164 | OpenCVForUnity.UnityUtils.Utils.texture2DToMat(imgTexture, rgbaMat);
165 | Debug.Log("rgbaMat ToString " + rgbaMat.ToString());
166 |
167 | if (faceLandmarkDetector == null)
168 | faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
169 |
170 | FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height);
171 |
172 | // detect faces.
173 | List detectResult = new List();
174 | if (useDlibFaceDetecter)
175 | {
176 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
177 | List result = faceLandmarkDetector.Detect();
178 |
179 | foreach (var unityRect in result)
180 | {
181 | detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
182 | }
183 | }
184 | else
185 | {
186 | if (cascade == null)
187 | cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
188 | //if (cascade.empty ()) {
189 | // Debug.LogError ("cascade file is not loaded. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
190 | //}
191 |
192 | // convert image to greyscale.
193 | Mat gray = new Mat();
194 | Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);
195 |
196 | MatOfRect faces = new MatOfRect();
197 | Imgproc.equalizeHist(gray, gray);
198 | cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
199 | //Debug.Log ("faces " + faces.dump ());
200 |
201 | detectResult = faces.toList();
202 |
203 | // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
204 | foreach (Rect r in detectResult)
205 | {
206 | r.y += (int)(r.height * 0.1f);
207 | }
208 |
209 | gray.Dispose();
210 | }
211 |
212 | // detect face landmark points.
213 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
214 | List> landmarkPoints = new List>();
215 | foreach (var openCVRect in detectResult)
216 | {
217 | UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
218 |
219 | Debug.Log("face : " + rect);
220 | //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);
221 |
222 | List points = faceLandmarkDetector.DetectLandmark(rect);
223 | //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
224 | landmarkPoints.Add(points);
225 | }
226 |
227 |
228 | // filter non frontal faces.
229 | if (filterNonFrontalFaces)
230 | {
231 | for (int i = 0; i < landmarkPoints.Count; i++)
232 | {
233 | if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit)
234 | {
235 | detectResult.RemoveAt(i);
236 | landmarkPoints.RemoveAt(i);
237 | i--;
238 | }
239 | }
240 | }
241 |
242 |
243 | // change faces.
244 | int[] face_nums = new int[landmarkPoints.Count];
245 | for (int i = 0; i < face_nums.Length; i++)
246 | {
247 | face_nums[i] = i;
248 | }
249 | face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray();
250 | if (landmarkPoints.Count >= 2)
251 | {
252 | DlibFaceChanger faceChanger = new DlibFaceChanger();
253 | faceChanger.isShowingDebugFacePoints = displayDebugFacePoints;
254 |
255 | faceChanger.SetTargetImage(rgbaMat);
256 |
257 | for (int i = 1; i < face_nums.Length; i++)
258 | {
259 | faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints[face_nums[0]], landmarkPoints[face_nums[i]], 1);
260 | }
261 |
262 | faceChanger.ChangeFace();
263 | faceChanger.Dispose();
264 | }
265 |
266 | // draw face rects.
267 | if (displayFaceRects && face_nums.Count() > 0)
268 | {
269 | int ann = face_nums[0];
270 | UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult[ann].x, detectResult[ann].y, detectResult[ann].width, detectResult[ann].height);
271 | OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, new Scalar(255, 255, 0, 255), 2);
272 |
273 | int bob = 0;
274 | for (int i = 1; i < face_nums.Length; i++)
275 | {
276 | bob = face_nums[i];
277 | UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult[bob].x, detectResult[bob].y, detectResult[bob].width, detectResult[bob].height);
278 | OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, new Scalar(255, 0, 0, 255), 2);
279 | }
280 | }
281 |
282 | frontalFaceChecker.Dispose();
283 |
284 | Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false);
285 | OpenCVForUnity.UnityUtils.Utils.matToTexture2D(rgbaMat, texture);
286 | gameObject.GetComponent().material.mainTexture = texture;
287 |
288 | rgbaMat.Dispose();
289 | }
290 |
291 | ///
292 | /// Raises the destroy event.
293 | ///
294 | void OnDestroy()
295 | {
296 | if (faceLandmarkDetector != null)
297 | faceLandmarkDetector.Dispose();
298 |
299 | if (cascade != null)
300 | cascade.Dispose();
301 |
302 | #if UNITY_WEBGL
303 | if (getFilePath_Coroutine != null)
304 | {
305 | StopCoroutine(getFilePath_Coroutine);
306 | ((IDisposable)getFilePath_Coroutine).Dispose();
307 | }
308 | #endif
309 | }
310 |
311 | ///
312 | /// Raises the back button click event.
313 | ///
314 | public void OnBackButtonClick()
315 | {
316 | SceneManager.LoadScene("FaceSwapperExample");
317 | }
318 |
319 | ///
320 | /// Raises the shuffle button click event.
321 | ///
322 | public void OnShuffleButtonClick()
323 | {
324 | if (imgTexture != null)
325 | Run();
326 | }
327 |
328 | ///
329 | /// Raises the use Dlib face detector toggle value changed event.
330 | ///
331 | public void OnUseDlibFaceDetecterToggleValueChanged()
332 | {
333 | if (useDlibFaceDetecterToggle.isOn)
334 | {
335 | useDlibFaceDetecter = true;
336 | }
337 | else
338 | {
339 | useDlibFaceDetecter = false;
340 | }
341 |
342 | if (imgTexture != null)
343 | Run();
344 | }
345 |
346 | ///
347 | /// Raises the filter non frontal faces toggle value changed event.
348 | ///
349 | public void OnFilterNonFrontalFacesToggleValueChanged()
350 | {
351 | if (filterNonFrontalFacesToggle.isOn)
352 | {
353 | filterNonFrontalFaces = true;
354 | }
355 | else
356 | {
357 | filterNonFrontalFaces = false;
358 | }
359 |
360 | if (imgTexture != null)
361 | Run();
362 | }
363 |
364 | ///
365 | /// Raises the display face rects toggle value changed event.
366 | ///
367 | public void OnDisplayFaceRectsToggleValueChanged()
368 | {
369 | if (displayFaceRectsToggle.isOn)
370 | {
371 | displayFaceRects = true;
372 | }
373 | else
374 | {
375 | displayFaceRects = false;
376 | }
377 |
378 | if (imgTexture != null)
379 | Run();
380 | }
381 |
382 | ///
383 | /// Raises the display debug face points toggle value changed event.
384 | ///
385 | public void OnDisplayDebugFacePointsToggleValueChanged()
386 | {
387 | if (displayDebugFacePointsToggle.isOn)
388 | {
389 | displayDebugFacePoints = true;
390 | }
391 | else
392 | {
393 | displayDebugFacePoints = false;
394 | }
395 |
396 | if (imgTexture != null)
397 | Run();
398 | }
399 | }
400 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/Texture2DFaceSwapperExample/Texture2DFaceSwapperExample.cs:
--------------------------------------------------------------------------------
1 | using DlibFaceLandmarkDetector;
2 | using OpenCVForUnity.CoreModule;
3 | using OpenCVForUnity.FaceSwap;
4 | using OpenCVForUnity.ImgprocModule;
5 | using OpenCVForUnity.ObjdetectModule;
6 | using System;
7 | using System.Collections;
8 | using System.Collections.Generic;
9 | using System.Linq;
10 | using UnityEngine;
11 | using UnityEngine.SceneManagement;
12 | using UnityEngine.UI;
13 | using Rect = OpenCVForUnity.CoreModule.Rect;
14 |
15 | namespace FaceSwapperExample
16 | {
17 | ///
18 | /// Texture2D FaceSwapper Example
19 | ///
20 | public class Texture2DFaceSwapperExample : MonoBehaviour
21 | {
22 | ///
23 | /// Determines if use dlib face detector.
24 | ///
25 | public bool useDlibFaceDetecter = true;
26 |
27 | ///
28 | /// The use dlib face detecter toggle.
29 | ///
30 | public Toggle useDlibFaceDetecterToggle;
31 |
32 | ///
33 | /// Determines if filters non frontal faces.
34 | ///
35 | public bool filterNonFrontalFaces;
36 |
37 | ///
38 | /// The filter non frontal faces toggle.
39 | ///
40 | public Toggle filterNonFrontalFacesToggle;
41 |
42 | ///
43 | /// The frontal face rate lower limit.
44 | ///
45 | [Range(0.0f, 1.0f)]
46 | public float frontalFaceRateLowerLimit;
47 |
48 | ///
49 | /// Determines if uses the seamless clone method for the face copy.
50 | ///
51 | public bool useSeamlessClone = false;
52 |
53 | ///
54 | /// The use seamless clone toggle.
55 | ///
56 | public Toggle useSeamlessCloneToggle;
57 |
58 | ///
59 | /// Determines if displays face rects.
60 | ///
61 | public bool displayFaceRects = false;
62 |
63 | ///
64 | /// The toggle for switching face rects display state.
65 | ///
66 | public Toggle displayFaceRectsToggle;
67 |
68 | ///
69 | /// Determines if displays debug face points.
70 | ///
71 | public bool displayDebugFacePoints = false;
72 |
73 | ///
74 | /// The toggle for switching debug face points display state.
75 | ///
76 | public Toggle displayDebugFacePointsToggle;
77 |
78 | ///
79 | /// The image texture.
80 | ///
81 | Texture2D imgTexture;
82 |
83 | ///
84 | /// The cascade.
85 | ///
86 | CascadeClassifier cascade;
87 |
88 | ///
89 | /// The face landmark detector.
90 | ///
91 | FaceLandmarkDetector faceLandmarkDetector;
92 |
93 | ///
94 | /// The haarcascade_frontalface_alt_xml_filepath.
95 | ///
96 | string haarcascade_frontalface_alt_xml_filepath;
97 |
98 | ///
99 | /// The sp_human_face_68_dat_filepath.
100 | ///
101 | string sp_human_face_68_dat_filepath;
102 |
103 | #if UNITY_WEBGL
104 | IEnumerator getFilePath_Coroutine;
105 | #endif
106 |
107 | // Use this for initialization
108 | void Start()
109 | {
110 | #if UNITY_WEBGL
111 | getFilePath_Coroutine = GetFilePath();
112 | StartCoroutine(getFilePath_Coroutine);
113 | #else
114 | haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml");
115 | sp_human_face_68_dat_filepath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_68.dat");
116 | Run();
117 | #endif
118 | }
119 |
120 | #if UNITY_WEBGL
121 | private IEnumerator GetFilePath()
122 | {
123 | var getFilePathAsync_0_Coroutine = OpenCVForUnity.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml", (result) =>
124 | {
125 | haarcascade_frontalface_alt_xml_filepath = result;
126 | });
127 | yield return getFilePathAsync_0_Coroutine;
128 |
129 | var getFilePathAsync_1_Coroutine = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/sp_human_face_68.dat", (result) =>
130 | {
131 | sp_human_face_68_dat_filepath = result;
132 | });
133 | yield return getFilePathAsync_1_Coroutine;
134 |
135 | getFilePath_Coroutine = null;
136 |
137 | Run();
138 | }
139 | #endif
140 |
141 | private void Run()
142 | {
143 | displayFaceRectsToggle.isOn = displayFaceRects;
144 | useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
145 | filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces;
146 | useSeamlessCloneToggle.isOn = useSeamlessClone;
147 | displayDebugFacePointsToggle.isOn = displayDebugFacePoints;
148 |
149 | if (imgTexture == null)
150 | imgTexture = Resources.Load("family") as Texture2D;
151 |
152 | gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
153 | Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
154 |
155 | float width = 0;
156 | float height = 0;
157 |
158 | width = gameObject.transform.localScale.x;
159 | height = gameObject.transform.localScale.y;
160 |
161 |
162 | float widthScale = (float)Screen.width / width;
163 | float heightScale = (float)Screen.height / height;
164 | if (widthScale < heightScale)
165 | {
166 | Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
167 | }
168 | else
169 | {
170 | Camera.main.orthographicSize = height / 2;
171 | }
172 |
173 | Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
174 |
175 | OpenCVForUnity.UnityUtils.Utils.texture2DToMat(imgTexture, rgbaMat);
176 | Debug.Log("rgbaMat ToString " + rgbaMat.ToString());
177 |
178 | if (faceLandmarkDetector == null)
179 | faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
180 |
181 | FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height);
182 |
183 | // detect faces.
184 | List detectResult = new List();
185 | if (useDlibFaceDetecter)
186 | {
187 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
188 | List result = faceLandmarkDetector.Detect();
189 |
190 | foreach (var unityRect in result)
191 | {
192 | detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
193 | }
194 | }
195 | else
196 | {
197 | if (cascade == null)
198 | cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
199 | //if (cascade.empty())
200 | //{
201 | // Debug.LogError("cascade file is not loaded. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
202 | //}
203 |
204 | // convert image to greyscale.
205 | Mat gray = new Mat();
206 | Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);
207 |
208 | MatOfRect faces = new MatOfRect();
209 | Imgproc.equalizeHist(gray, gray);
210 | cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
211 | //Debug.Log ("faces " + faces.dump ());
212 |
213 | detectResult = faces.toList();
214 |
215 | // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
216 | foreach (Rect r in detectResult)
217 | {
218 | r.y += (int)(r.height * 0.1f);
219 | }
220 |
221 | gray.Dispose();
222 | }
223 |
224 | // detect face landmark points.
225 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
226 | List> landmarkPoints = new List>();
227 | foreach (var openCVRect in detectResult)
228 | {
229 | UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
230 |
231 | Debug.Log("face : " + rect);
232 | //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);
233 |
234 | List points = faceLandmarkDetector.DetectLandmark(rect);
235 | //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
236 | landmarkPoints.Add(points);
237 | }
238 |
239 |
240 |
241 | // filter non frontal facea.
242 | if (filterNonFrontalFaces)
243 | {
244 | for (int i = 0; i < landmarkPoints.Count; i++)
245 | {
246 | if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit)
247 | {
248 | detectResult.RemoveAt(i);
249 | landmarkPoints.RemoveAt(i);
250 | i--;
251 | }
252 | }
253 | }
254 |
255 |
256 | // swap faces.
257 | int[] face_nums = new int[landmarkPoints.Count];
258 | for (int i = 0; i < face_nums.Length; i++)
259 | {
260 | face_nums[i] = i;
261 | }
262 | face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray();
263 | if (landmarkPoints.Count >= 2)
264 | {
265 | DlibFaceSwapper faceSwapper = new DlibFaceSwapper();
266 | faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone;
267 | faceSwapper.isShowingDebugFacePoints = displayDebugFacePoints;
268 |
269 | int ann = 0, bob = 0;
270 | for (int i = 0; i < face_nums.Length - 1; i += 2)
271 | {
272 | ann = face_nums[i];
273 | bob = face_nums[i + 1];
274 |
275 | faceSwapper.SwapFaces(rgbaMat, landmarkPoints[ann], landmarkPoints[bob], 1);
276 |
277 | }
278 | faceSwapper.Dispose();
279 | }
280 |
281 | // draw face rects.
282 | if (displayFaceRects)
283 | {
284 | int ann = 0, bob = 0;
285 | for (int i = 0; i < face_nums.Length - 1; i += 2)
286 | {
287 | ann = face_nums[i];
288 | bob = face_nums[i + 1];
289 |
290 | UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult[ann].x, detectResult[ann].y, detectResult[ann].width, detectResult[ann].height);
291 | UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult[bob].x, detectResult[bob].y, detectResult[bob].width, detectResult[bob].height);
292 | Scalar color = new Scalar(UnityEngine.Random.Range(0, 256), UnityEngine.Random.Range(0, 256), UnityEngine.Random.Range(0, 256), 255);
293 | OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, color, 2);
294 | OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, color, 2);
295 | //Imgproc.putText (rgbaMat, "" + i % 2, new Point (rect_ann.xMin, rect_ann.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
296 | //Imgproc.putText (rgbaMat, "" + (i % 2 + 1), new Point (rect_bob.xMin, rect_bob.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
297 | }
298 | }
299 |
300 | frontalFaceChecker.Dispose();
301 |
302 | Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false);
303 | OpenCVForUnity.UnityUtils.Utils.matToTexture2D(rgbaMat, texture);
304 | gameObject.GetComponent().material.mainTexture = texture;
305 |
306 | rgbaMat.Dispose();
307 | }
308 |
309 | ///
310 | /// Raises the destroy event.
311 | ///
312 | void OnDestroy()
313 | {
314 | if (faceLandmarkDetector != null)
315 | faceLandmarkDetector.Dispose();
316 |
317 | if (cascade != null)
318 | cascade.Dispose();
319 |
320 | #if UNITY_WEBGL
321 | if (getFilePath_Coroutine != null)
322 | {
323 | StopCoroutine(getFilePath_Coroutine);
324 | ((IDisposable)getFilePath_Coroutine).Dispose();
325 | }
326 | #endif
327 | }
328 |
329 | ///
330 | /// Raises the back button click event.
331 | ///
332 | public void OnBackButtonClick()
333 | {
334 | SceneManager.LoadScene("FaceSwapperExample");
335 | }
336 |
337 | ///
338 | /// Raises the shuffle button click event.
339 | ///
340 | public void OnShuffleButtonClick()
341 | {
342 | if (imgTexture != null)
343 | Run();
344 | }
345 |
346 | ///
347 | /// Raises the use Dlib face detector toggle value changed event.
348 | ///
349 | public void OnUseDlibFaceDetecterToggleValueChanged()
350 | {
351 | if (useDlibFaceDetecterToggle.isOn)
352 | {
353 | useDlibFaceDetecter = true;
354 | }
355 | else
356 | {
357 | useDlibFaceDetecter = false;
358 | }
359 |
360 | if (imgTexture != null)
361 | Run();
362 | }
363 |
364 | ///
365 | /// Raises the filter non frontal faces toggle value changed event.
366 | ///
367 | public void OnFilterNonFrontalFacesToggleValueChanged()
368 | {
369 | if (filterNonFrontalFacesToggle.isOn)
370 | {
371 | filterNonFrontalFaces = true;
372 | }
373 | else
374 | {
375 | filterNonFrontalFaces = false;
376 | }
377 |
378 | if (imgTexture != null)
379 | Run();
380 | }
381 |
382 | ///
383 | /// Raises the use seamless clone toggle value changed event.
384 | ///
385 | public void OnUseSeamlessCloneToggleValueChanged()
386 | {
387 | if (useSeamlessCloneToggle.isOn)
388 | {
389 | useSeamlessClone = true;
390 | }
391 | else
392 | {
393 | useSeamlessClone = false;
394 | }
395 |
396 | if (imgTexture != null)
397 | Run();
398 | }
399 |
400 | ///
401 | /// Raises the display face rects toggle value changed event.
402 | ///
403 | public void OnDisplayFaceRectsToggleValueChanged()
404 | {
405 | if (displayFaceRectsToggle.isOn)
406 | {
407 | displayFaceRects = true;
408 | }
409 | else
410 | {
411 | displayFaceRects = false;
412 | }
413 |
414 | if (imgTexture != null)
415 | Run();
416 | }
417 |
418 | ///
419 | /// Raises the display debug face points toggle value changed event.
420 | ///
421 | public void OnDisplayDebugFacePointsToggleValueChanged()
422 | {
423 | if (displayDebugFacePointsToggle.isOn)
424 | {
425 | displayDebugFacePoints = true;
426 | }
427 | else
428 | {
429 | displayDebugFacePoints = false;
430 | }
431 |
432 | if (imgTexture != null)
433 | Run();
434 | }
435 | }
436 | }
437 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/RectangleTracker/Scripts/RectangleTracker.cs:
--------------------------------------------------------------------------------
1 | using OpenCVForUnity.CoreModule;
2 | using System;
3 | using System.Collections.Generic;
4 | using System.Linq;
5 | using UnityEngine;
6 | using Rect = OpenCVForUnity.CoreModule.Rect;
7 |
8 | namespace OpenCVForUnity.RectangleTrack
9 | {
10 | ///
11 | /// Rectangle tracker.
12 | /// Referring to https://github.com/Itseez/opencv/blob/master/modules/objdetect/src/detection_based_tracker.cpp.
13 | /// v 1.0.4
14 | ///
15 | public class RectangleTracker
16 | {
17 | public List trackedObjects
18 | {
19 | get { return _trackedObjects; }
20 | }
21 |
22 | private List _trackedObjects;
23 |
24 |
25 | public TrackerParameters trackerParameters
26 | {
27 | get { return _trackerParameters; }
28 | set
29 | {
30 | if (value == null)
31 | {
32 | throw new ArgumentNullException("value");
33 | }
34 | _trackerParameters = value;
35 | }
36 | }
37 |
38 | private TrackerParameters _trackerParameters;
39 |
40 |
41 | public List weightsPositionsSmoothing
42 | {
43 | get { return _weightsPositionsSmoothing; }
44 | set
45 | {
46 | if (value == null)
47 | {
48 | throw new ArgumentNullException("value");
49 | }
50 | _weightsPositionsSmoothing = value;
51 | }
52 | }
53 |
54 | private List _weightsPositionsSmoothing = new List();
55 |
56 | public List weightsSizesSmoothing
57 | {
58 | get { return _weightsSizesSmoothing; }
59 | set
60 | {
61 | if (value == null)
62 | {
63 | throw new ArgumentNullException("value");
64 | }
65 | _weightsSizesSmoothing = value;
66 | }
67 | }
68 |
69 | private List _weightsSizesSmoothing = new List();
70 |
71 | public RectangleTracker(TrackerParameters trackerParamerers = null)
72 | {
73 | _trackedObjects = new List();
74 |
75 | if (trackerParamerers != null)
76 | {
77 | this._trackerParameters = trackerParamerers;
78 | }
79 | else
80 | {
81 | this._trackerParameters = new TrackerParameters();
82 | }
83 |
84 | _weightsPositionsSmoothing.Add(1);
85 | _weightsSizesSmoothing.Add(0.5f);
86 | _weightsSizesSmoothing.Add(0.3f);
87 | _weightsSizesSmoothing.Add(0.2f);
88 | }
89 |
90 | public enum TrackedRectState : int
91 | {
92 | NEW_RECTANGLE = -1,
93 | INTERSECTED_RECTANGLE = -2
94 | }
95 |
96 |
97 | public void GetObjects(List result, bool smoothing = true)
98 | {
99 | result.Clear();
100 |
101 | int count = _trackedObjects.Count;
102 | for (int i = 0; i < count; i++)
103 | {
104 | Rect r;
105 | if (smoothing)
106 | {
107 | r = GetSmoothingRect(i);
108 | }
109 | else
110 | {
111 | r = _trackedObjects[i].position;
112 | }
113 |
114 | if (_trackedObjects[i].state > TrackedState.NEW_DISPLAYED && _trackedObjects[i].state < TrackedState.NEW_HIDED)
115 | result.Add(r);
116 |
117 | //LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
118 | //Debug.Log("GetObjects" + r.width + " " + r.height + " " + r.x + " " + r.y + " " + r.width + " " + r.height + " " + trackedObjects[i].state + " " + trackedObjects[i].numDetectedFrames + " " + trackedObjects[i].numFramesNotDetected);
119 | }
120 | }
121 |
122 | public void GetObjects(List result, bool smoothing = true)
123 | {
124 | result.Clear();
125 |
126 | int count = _trackedObjects.Count;
127 | for (int i = 0; i < count; i++)
128 | {
129 | Rect r;
130 | if (smoothing)
131 | {
132 | r = GetSmoothingRect(i);
133 | }
134 | else
135 | {
136 | r = _trackedObjects[i].position;
137 | }
138 |
139 | result.Add(new TrackedRect(_trackedObjects[i].id, r, _trackedObjects[i].state, _trackedObjects[i].numDetectedFrames, _trackedObjects[i].numFramesNotDetected));
140 |
141 | //LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
142 | //Debug.Log("GetObjects" + r.width + " " + r.height + " " + r.x + " " + r.y + " " + r.width + " " + r.height + " " + trackedObjects[i].state + " " + trackedObjects[i].numDetectedFrames + " " + trackedObjects[i].numFramesNotDetected);
143 | }
144 | }
145 |
146 | public void UpdateTrackedObjects(List detectedObjects)
147 | {
148 | if (detectedObjects == null)
149 | throw new ArgumentNullException("detectedObjects");
150 |
151 | Rect[] correctionRects = CreateCorrectionBySpeedOfRects();
152 |
153 | int N1 = (int)_trackedObjects.Count;
154 | int N2 = (int)detectedObjects.Count;
155 |
156 | for (int i = 0; i < N1; i++)
157 | {
158 | _trackedObjects[i].numDetectedFrames++;
159 | }
160 |
161 | int[] correspondence = Enumerable.Repeat((int)TrackedRectState.NEW_RECTANGLE, N2).ToArray();
162 |
163 |
164 | for (int i = 0; i < N1; i++)
165 | {
166 | TrackedObject curObject = _trackedObjects[i];
167 |
168 | int bestIndex = -1;
169 | int bestArea = -1;
170 |
171 | //int numpositions = (int)curObject.lastPositions.Count;
172 | //if (numpositions > 0) UnityEngine.Debug.LogError("numpositions > 0 is false");
173 |
174 | //OpenCVRect prevRect = curObject.lastPositions[numpositions - 1];
175 | Rect prevRect = correctionRects[i];
176 |
177 | for (int j = 0; j < N2; j++)
178 | {
179 | if (correspondence[j] >= 0)
180 | {
181 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it has correspondence=" + correspondence[j]);
182 | continue;
183 | }
184 |
185 | if (correspondence[j] != (int)TrackedRectState.NEW_RECTANGLE)
186 | {
187 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it is intersected with another rectangle");
188 |
189 | continue;
190 | }
191 |
192 | if (IsCollideByRectangle(prevRect, detectedObjects[j], _trackerParameters.coeffRectangleOverlap))
193 | {
194 | Rect r = Intersect(prevRect, detectedObjects[j]);
195 | if ((r.width > 0) && (r.height > 0))
196 | {
197 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect r={" + r.x + ", " + r.y + ", " + r.width + ", " + r.height + "]");
198 |
199 | correspondence[j] = (int)TrackedRectState.INTERSECTED_RECTANGLE;
200 |
201 | if (r.area() > bestArea)
202 | {
203 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: The area of intersection is " + r.area() + " it is better than bestArea= " + bestArea);
204 |
205 | bestIndex = j;
206 | bestArea = (int)r.area();
207 | }
208 | }
209 | }
210 | }
211 |
212 | if (bestIndex >= 0)
213 | {
214 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=" + i + " is j=" + bestIndex);
215 |
216 | correspondence[bestIndex] = i;
217 |
218 | Rect bestRect = detectedObjects[bestIndex];
219 |
220 | for (int j = 0; j < N2; j++)
221 | {
222 | if (correspondence[j] >= 0)
223 | continue;
224 |
225 | if (IsCollideByRectangle(detectedObjects[j], bestRect, _trackerParameters.coeffRectangleOverlap))
226 | {
227 | Rect r = Intersect(detectedObjects[j], bestRect);
228 |
229 | if ((r.width > 0) && (r.height > 0))
230 | {
231 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: Found intersection between rectangles j= " + j + " and bestIndex= " + bestIndex + " rectangle j= " + j + " is marked as intersected");
232 |
233 | correspondence[j] = (int)TrackedRectState.INTERSECTED_RECTANGLE;
234 | }
235 | }
236 | }
237 | }
238 | else
239 | {
240 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i= " + i);
241 | curObject.numFramesNotDetected++;
242 | }
243 | }
244 |
245 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: start second cycle");
246 | for (int j = 0; j < N2; j++)
247 | {
248 | int i = correspondence[j];
249 | if (i >= 0)
250 | {//add position
251 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: add position");
252 |
253 | _trackedObjects[i].lastPositions.Add(detectedObjects[j]);
254 | while ((int)_trackedObjects[i].lastPositions.Count > (int)_trackerParameters.numLastPositionsToTrack)
255 | {
256 | _trackedObjects[i].lastPositions.Remove(_trackedObjects[i].lastPositions[0]);
257 | }
258 | _trackedObjects[i].numFramesNotDetected = 0;
259 | if (_trackedObjects[i].state != TrackedState.DELETED)
260 | _trackedObjects[i].state = TrackedState.DISPLAYED;
261 | }
262 | else if (i == (int)TrackedRectState.NEW_RECTANGLE)
263 | { //new object
264 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: new object");
265 |
266 | _trackedObjects.Add(new TrackedObject(detectedObjects[j]));
267 | }
268 | else
269 | {
270 | //Debug.Log("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection");
271 | }
272 | }
273 |
274 |
275 | int t = 0;
276 | TrackedObject it;
277 | while (t < _trackedObjects.Count)
278 | {
279 | it = _trackedObjects[t];
280 |
281 | if (it.state == TrackedState.DELETED)
282 | {
283 | _trackedObjects.Remove(it);
284 | }
285 | else if ((it.numFramesNotDetected > _trackerParameters.maxTrackLifetime)//ALL
286 | ||
287 | ((it.numDetectedFrames <= _trackerParameters.numStepsToWaitBeforeFirstShow)
288 | &&
289 | (it.numFramesNotDetected > _trackerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown)))
290 | {
291 | it.state = TrackedState.DELETED;
292 | t++;
293 | }
294 | else if (it.state >= TrackedState.DISPLAYED)
295 | {//DISPLAYED, NEW_DISPLAYED, HIDED
296 |
297 | if (it.numDetectedFrames < _trackerParameters.numStepsToWaitBeforeFirstShow)
298 | {
299 | it.state = TrackedState.PENDING;
300 | }
301 | else if (it.numDetectedFrames == _trackerParameters.numStepsToWaitBeforeFirstShow)
302 | {
303 | //i, trackedObjects[i].numDetectedFrames, innerParameters.numStepsToWaitBeforeFirstShow);
304 | it.state = TrackedState.NEW_DISPLAYED;
305 | }
306 | else if (it.numFramesNotDetected == _trackerParameters.numStepsToShowWithoutDetecting)
307 | {
308 | it.state = TrackedState.NEW_HIDED;
309 | }
310 | else if (it.numFramesNotDetected > _trackerParameters.numStepsToShowWithoutDetecting)
311 | {
312 | it.state = TrackedState.HIDED;
313 | }
314 |
315 | t++;
316 | }
317 | else
318 | {//NEW
319 | t++;
320 | }
321 | }
322 | }
323 |
324 | public Rect[] CreateCorrectionBySpeedOfRects()
325 | {
326 | //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
327 | Rect[] rectsWhereRegions = new Rect[_trackedObjects.Count];
328 |
329 | int count = _trackedObjects.Count;
330 | for (int i = 0; i < count; i++)
331 | {
332 | int n = _trackedObjects[i].lastPositions.Count;
333 | //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");
334 |
335 | Rect r = _trackedObjects[i].lastPositions[n - 1].clone();
336 |
337 | //if (r.area() == 0)
338 | //{
339 | // Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
340 | // continue;
341 | //}
342 |
343 | //correction by speed of rectangle
344 | if (n > 1)
345 | {
346 | Point center = CenterRect(r);
347 | Point center_prev = CenterRect(_trackedObjects[i].lastPositions[n - 2]);
348 | Point shift = new Point((center.x - center_prev.x) * _trackerParameters.coeffObjectSpeedUsingInPrediction,
349 | (center.y - center_prev.y) * _trackerParameters.coeffObjectSpeedUsingInPrediction);
350 |
351 | r.x += (int)Math.Round(shift.x);
352 | r.y += (int)Math.Round(shift.y);
353 | }
354 |
355 | rectsWhereRegions[i] = r;
356 | }
357 |
358 | return rectsWhereRegions;
359 | }
360 |
361 | public Rect[] CreateRawRects()
362 | {
363 | Rect[] rectsWhereRegions = new Rect[_trackedObjects.Count];
364 |
365 | int count = _trackedObjects.Count;
366 | for (int i = 0; i < count; i++)
367 | {
368 | rectsWhereRegions[i] = _trackedObjects[i].position;
369 | }
370 |
371 | return rectsWhereRegions;
372 | }
373 |
374 | private Point CenterRect(Rect r)
375 | {
376 | return new Point(r.x + (r.width / 2), r.y + (r.height / 2));
377 | }
378 |
379 | private Rect GetSmoothingRect(int i)
380 | {
381 | //Debug.Log("trackedObjects[i].numFramesNotDetected: " + trackedObjects[i].numFramesNotDetected);
382 |
383 | List weightsSizesSmoothing = _weightsSizesSmoothing;
384 | List weightsPositionsSmoothing = _weightsPositionsSmoothing;
385 |
386 | List lastPositions = _trackedObjects[i].lastPositions;
387 |
388 | int N = lastPositions.Count;
389 | if (N <= 0)
390 | {
391 | Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: no positions for i=" + i);
392 | return new Rect();
393 | }
394 |
395 | int Nsize = Math.Min(N, (int)weightsSizesSmoothing.Count);
396 | int Ncenter = Math.Min(N, (int)weightsPositionsSmoothing.Count);
397 |
398 | Point center = new Point();
399 | double w = 0, h = 0;
400 | if (Nsize > 0)
401 | {
402 | double sum = 0;
403 | for (int j = 0; j < Nsize; j++)
404 | {
405 | int k = N - j - 1;
406 | w += lastPositions[k].width * weightsSizesSmoothing[j];
407 | h += lastPositions[k].height * weightsSizesSmoothing[j];
408 | sum += weightsSizesSmoothing[j];
409 | }
410 | w /= sum;
411 | h /= sum;
412 | }
413 | else
414 | {
415 | w = lastPositions[N - 1].width;
416 | h = lastPositions[N - 1].height;
417 | }
418 |
419 | if (Ncenter > 0)
420 | {
421 | double sum = 0;
422 | for (int j = 0; j < Ncenter; j++)
423 | {
424 | int k = N - j - 1;
425 | Point tl = lastPositions[k].tl();
426 | Point br = lastPositions[k].br();
427 | Point c1;
428 |
429 | c1 = new Point(tl.x * 0.5f, tl.y * 0.5f);
430 | Point c2;
431 |
432 | c2 = new Point(br.x * 0.5f, br.y * 0.5f);
433 | c1 = new Point(c1.x + c2.x, c1.y + c2.y);
434 |
435 | center = new Point(center.x + (c1.x * weightsPositionsSmoothing[j]), center.y + (c1.y * weightsPositionsSmoothing[j]));
436 | sum += weightsPositionsSmoothing[j];
437 | }
438 | center = new Point(center.x * (1 / sum), center.y * (1 / sum));
439 | }
440 | else
441 | {
442 | int k = N - 1;
443 | Point tl = lastPositions[k].tl();
444 | Point br = lastPositions[k].br();
445 | Point c1;
446 |
447 | c1 = new Point(tl.x * 0.5f, tl.y * 0.5f);
448 | Point c2;
449 |
450 | c2 = new Point(br.x * 0.5f, br.y * 0.5f);
451 |
452 | center = new Point(c1.x + c2.x, c1.y + c2.y);
453 | }
454 | Point tl2 = new Point(center.x - (w * 0.5f), center.y - (h * 0.5f));
455 | Rect res = new Rect((int)Math.Round(tl2.x), (int)Math.Round(tl2.y), (int)Math.Round(w), (int)Math.Round(h));
456 |
457 | //Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: Result for i=" + i + ": {" + res.x + ", " + res.y + ", " + res.width + ", " + res.height + "}");
458 |
459 | return res;
460 | }
461 |
462 | public void Reset()
463 | {
464 | _trackedObjects.Clear();
465 | }
466 |
467 | private Rect Intersect(Rect a, Rect b)
468 | {
469 | int x1 = Math.Max(a.x, b.x);
470 | int x2 = Math.Min(a.x + a.width, b.x + b.width);
471 | int y1 = Math.Max(a.y, b.y);
472 | int y2 = Math.Min(a.y + a.height, b.y + b.height);
473 |
474 | if (x2 >= x1 && y2 >= y1)
475 | return new Rect(x1, y1, x2 - x1, y2 - y1);
476 | else
477 | return new Rect();
478 | }
479 |
480 | //private bool IsCollideByCircle(Rect a, Rect b, float coeffRectangleOverlap)
481 | //{
482 | // int r1 = (int)(a.width / 2.0f);
483 | // int r2 = (int)(b.width / 2.0f);
484 | // int px1 = a.x + r1;
485 | // int py1 = a.y + r1;
486 | // int px2 = b.x + r2;
487 | // int py2 = b.y + r2;
488 |
489 | // if ((px2 - px1) * (px2 - px1) + (py2 - py1) * (py2 - py1) <= (r1 + r2) * (r1 + r2) * coeffRectangleOverlap)
490 | // return true;
491 | // else
492 | // return false;
493 | //}
494 |
495 | private bool IsCollideByRectangle(Rect a, Rect b, float coeffRectangleOverlap)
496 | {
497 | int mw = (int)(a.width * coeffRectangleOverlap);
498 | int mh = (int)(a.height * coeffRectangleOverlap);
499 | int mx1 = (int)(a.x + (a.width - mw) / 2.0f);
500 | int my1 = (int)(a.y + (a.height - mh) / 2.0f);
501 | int mx2 = (int)(mx1 + mw);
502 | int my2 = (int)(my1 + mh);
503 |
504 | int ew = (int)(b.width * coeffRectangleOverlap);
505 | int eh = (int)(b.height * coeffRectangleOverlap);
506 | int ex1 = (int)(b.x + (b.width - ew) / 2.0f);
507 | int ey1 = (int)(b.y + (b.height - eh) / 2.0f);
508 | int ex2 = (int)(ex1 + ew);
509 | int ey2 = (int)(ey1 + eh);
510 |
511 | if (mx1 <= ex2 && ex1 <= mx2 && my1 <= ey2 && ey1 <= my2)
512 | return true;
513 | else
514 | return false;
515 | }
516 |
517 | public void Dispose()
518 | {
519 | Reset();
520 | }
521 | }
522 | }
523 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/WebCamTextureFaceSwapperExample/WebCamTextureFaceSwapperExample.cs:
--------------------------------------------------------------------------------
1 | using DlibFaceLandmarkDetector;
2 | using OpenCVForUnity.CoreModule;
3 | using OpenCVForUnity.FaceSwap;
4 | using OpenCVForUnity.ImgprocModule;
5 | using OpenCVForUnity.ObjdetectModule;
6 | using OpenCVForUnity.RectangleTrack;
7 | using OpenCVForUnity.UnityUtils.Helper;
8 | using System;
9 | using System.Collections;
10 | using System.Collections.Generic;
11 | using UnityEngine;
12 | using UnityEngine.SceneManagement;
13 | using UnityEngine.UI;
14 | using Rect = OpenCVForUnity.CoreModule.Rect;
15 |
16 | namespace FaceSwapperExample
17 | {
18 | ///
19 | /// WebCamTexture FaceSwapper Example
20 | ///
21 | [RequireComponent(typeof(WebCamTextureToMatHelper))]
22 | public class WebCamTextureFaceSwapperExample : MonoBehaviour
23 | {
24 | ///
25 | /// Determines if use dlib face detector.
26 | ///
27 | public bool useDlibFaceDetecter = false;
28 |
29 | ///
30 | /// The use dlib face detecter toggle.
31 | ///
32 | public Toggle useDlibFaceDetecterToggle;
33 |
34 | ///
35 | /// Determines if filters non frontal faces.
36 | ///
37 | public bool filterNonFrontalFaces;
38 |
39 | ///
40 | /// The filter non frontal faces toggle.
41 | ///
42 | public Toggle filterNonFrontalFacesToggle;
43 |
44 | ///
45 | /// The frontal face rate lower limit.
46 | ///
47 | [Range(0.0f, 1.0f)]
48 | public float frontalFaceRateLowerLimit;
49 |
50 | ///
51 | /// Determines if enables noise filter.
52 | ///
53 | public bool enableNoiseFilter = true;
54 |
55 | ///
56 | /// The enable noise filter toggle.
57 | ///
58 | public Toggle enableNoiseFilterToggle;
59 |
60 | ///
61 | /// Determines if uses the seamless clone method for the face copy.
62 | ///
63 | public bool useSeamlessClone = false;
64 |
65 | ///
66 | /// The use seamless clone toggle.
67 | ///
68 | public Toggle useSeamlessCloneToggle;
69 |
70 | ///
71 | /// Determines if displays face rects.
72 | ///
73 | public bool displayFaceRects = false;
74 |
75 | ///
76 | /// The toggle for switching face rects display state.
77 | ///
78 | public Toggle displayFaceRectsToggle;
79 |
80 | ///
81 | /// Determines if displays debug face points.
82 | ///
83 | public bool displayDebugFacePoints = false;
84 |
85 | ///
86 | /// The toggle for switching debug face points display state.
87 | ///
88 | public Toggle displayDebugFacePointsToggle;
89 |
90 | ///
91 | /// The gray mat.
92 | ///
93 | Mat grayMat;
94 |
95 | ///
96 | /// The texture.
97 | ///
98 | Texture2D texture;
99 |
100 | ///
101 | /// The cascade.
102 | ///
103 | CascadeClassifier cascade;
104 |
105 | ///
106 | /// The web cam texture to mat helper.
107 | ///
108 | WebCamTextureToMatHelper webCamTextureToMatHelper;
109 |
110 | ///
111 | /// The face landmark detector.
112 | ///
113 | FaceLandmarkDetector faceLandmarkDetector;
114 |
115 | ///
116 | /// The mean points filter dictionary.
117 | ///
118 | Dictionary lowPassFilterDict;
119 |
120 | ///
121 | /// The optical flow points filter dictionary.
122 | ///
123 | Dictionary opticalFlowFilterDict;
124 |
125 | ///
126 | /// The face Swaper.
127 | ///
128 | DlibFaceSwapper faceSwapper;
129 |
130 | ///
131 | /// The detection based tracker.
132 | ///
133 | RectangleTracker rectangleTracker;
134 |
135 | ///
136 | /// The frontal face checker.
137 | ///
138 | FrontalFaceChecker frontalFaceChecker;
139 |
140 | ///
141 | /// The haarcascade_frontalface_alt_xml_filepath.
142 | ///
143 | string haarcascade_frontalface_alt_xml_filepath;
144 |
145 | ///
146 | /// The sp_human_face_68_dat_filepath.
147 | ///
148 | string sp_human_face_68_dat_filepath;
149 |
150 | ///
151 | /// The FPS monitor.
152 | ///
153 | FpsMonitor fpsMonitor;
154 |
155 | #if UNITY_WEBGL
156 | IEnumerator getFilePath_Coroutine;
157 | #endif
158 |
159 | // Use this for initialization
160 | void Start()
161 | {
162 | fpsMonitor = GetComponent();
163 |
164 | webCamTextureToMatHelper = gameObject.GetComponent();
165 |
166 | #if UNITY_WEBGL
167 | getFilePath_Coroutine = GetFilePath();
168 | StartCoroutine(getFilePath_Coroutine);
169 | #else
170 | haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml");
171 | sp_human_face_68_dat_filepath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_68.dat");
172 | Run();
173 | #endif
174 | }
175 |
176 | #if UNITY_WEBGL
177 | private IEnumerator GetFilePath()
178 | {
179 | var getFilePathAsync_0_Coroutine = OpenCVForUnity.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml", (result) =>
180 | {
181 | haarcascade_frontalface_alt_xml_filepath = result;
182 | });
183 | yield return getFilePathAsync_0_Coroutine;
184 |
185 | var getFilePathAsync_1_Coroutine = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/sp_human_face_68.dat", (result) =>
186 | {
187 | sp_human_face_68_dat_filepath = result;
188 | });
189 | yield return getFilePathAsync_1_Coroutine;
190 |
191 | getFilePath_Coroutine = null;
192 |
193 | Run();
194 | }
195 | #endif
196 |
197 | private void Run()
198 | {
199 | rectangleTracker = new RectangleTracker();
200 |
201 | faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
202 |
203 | lowPassFilterDict = new Dictionary();
204 | opticalFlowFilterDict = new Dictionary();
205 |
206 | faceSwapper = new DlibFaceSwapper();
207 | faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone;
208 | faceSwapper.isShowingDebugFacePoints = displayDebugFacePoints;
209 |
210 | webCamTextureToMatHelper.Initialize();
211 |
212 | displayFaceRectsToggle.isOn = displayFaceRects;
213 | useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
214 | enableNoiseFilterToggle.isOn = enableNoiseFilter;
215 | filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces;
216 | useSeamlessCloneToggle.isOn = useSeamlessClone;
217 | displayDebugFacePointsToggle.isOn = displayDebugFacePoints;
218 | }
219 |
220 | ///
221 | /// Raises the web cam texture to mat helper initialized event.
222 | ///
223 | public void OnWebCamTextureToMatHelperInitialized()
224 | {
225 | Debug.Log("OnWebCamTextureToMatHelperInitialized");
226 |
227 | Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
228 |
229 | texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
230 |
231 |
232 | gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
233 | Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
234 |
235 | if (fpsMonitor != null)
236 | {
237 | fpsMonitor.Add("width", webCamTextureMat.width().ToString());
238 | fpsMonitor.Add("height", webCamTextureMat.height().ToString());
239 | fpsMonitor.Add("orientation", Screen.orientation.ToString());
240 | }
241 |
242 |
243 | float width = gameObject.transform.localScale.x;
244 | float height = gameObject.transform.localScale.y;
245 |
246 | float widthScale = (float)Screen.width / width;
247 | float heightScale = (float)Screen.height / height;
248 | if (widthScale < heightScale)
249 | {
250 | Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
251 | }
252 | else
253 | {
254 | Camera.main.orthographicSize = height / 2;
255 | }
256 |
257 | gameObject.GetComponent().material.mainTexture = texture;
258 |
259 |
260 |
261 | grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
262 | cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
263 | //if (cascade.empty())
264 | //{
265 | // Debug.LogError("cascade file is not loaded. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
266 | //}
267 |
268 | frontalFaceChecker = new FrontalFaceChecker(width, height);
269 | }
270 |
271 | ///
272 | /// Raises the web cam texture to mat helper disposed event.
273 | ///
274 | public void OnWebCamTextureToMatHelperDisposed()
275 | {
276 | Debug.Log("OnWebCamTextureToMatHelperDisposed");
277 |
278 | grayMat.Dispose();
279 |
280 | if (texture != null)
281 | {
282 | Texture2D.Destroy(texture);
283 | texture = null;
284 | }
285 |
286 | rectangleTracker.Reset();
287 |
288 | foreach (var key in lowPassFilterDict.Keys)
289 | {
290 | lowPassFilterDict[key].Dispose();
291 | }
292 | lowPassFilterDict.Clear();
293 | foreach (var key in opticalFlowFilterDict.Keys)
294 | {
295 | opticalFlowFilterDict[key].Dispose();
296 | }
297 | opticalFlowFilterDict.Clear();
298 |
299 | frontalFaceChecker.Dispose();
300 | }
301 |
302 | ///
303 | /// Raises the web cam texture to mat helper error occurred event.
304 | ///
305 | /// Error code.
306 | public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
307 | {
308 | Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
309 | }
310 |
311 | // Update is called once per frame
312 | void Update()
313 | {
314 |
315 | if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
316 | {
317 |
318 | Mat rgbaMat = webCamTextureToMatHelper.GetMat();
319 |
320 | // detect faces.
321 | List detectResult = new List();
322 | if (useDlibFaceDetecter)
323 | {
324 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
325 | List result = faceLandmarkDetector.Detect();
326 |
327 | foreach (var unityRect in result)
328 | {
329 | detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
330 | }
331 | }
332 | else
333 | {
334 | // convert image to greyscale.
335 | Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
336 |
337 | using (Mat equalizeHistMat = new Mat())
338 | using (MatOfRect faces = new MatOfRect())
339 | {
340 | Imgproc.equalizeHist(grayMat, equalizeHistMat);
341 |
342 | cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());
343 |
344 | detectResult = faces.toList();
345 |
346 | // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
347 | foreach (Rect r in detectResult)
348 | {
349 | r.y += (int)(r.height * 0.1f);
350 | }
351 | }
352 | }
353 |
354 | // face tracking.
355 | List trackedRects = new List();
356 | rectangleTracker.UpdateTrackedObjects(detectResult);
357 | rectangleTracker.GetObjects(trackedRects, true);
358 |
359 | // create noise filter.
360 | foreach (var openCVRect in trackedRects)
361 | {
362 | if (openCVRect.state == TrackedState.NEW)
363 | {
364 | if (!lowPassFilterDict.ContainsKey(openCVRect.id))
365 | lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
366 | if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
367 | opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
368 | }
369 | else if (openCVRect.state == TrackedState.DELETED)
370 | {
371 | if (lowPassFilterDict.ContainsKey(openCVRect.id))
372 | {
373 | lowPassFilterDict[openCVRect.id].Dispose();
374 | lowPassFilterDict.Remove(openCVRect.id);
375 | }
376 | if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
377 | {
378 | opticalFlowFilterDict[openCVRect.id].Dispose();
379 | opticalFlowFilterDict.Remove(openCVRect.id);
380 | }
381 | }
382 | }
383 |
384 | // detect face landmark points.
385 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
386 | List> landmarkPoints = new List>();
387 | foreach (var openCVRect in trackedRects)
388 | {
389 | if (openCVRect.state > TrackedState.NEW_DISPLAYED && openCVRect.state < TrackedState.NEW_HIDED)
390 | {
391 |
392 | UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
393 | List points = faceLandmarkDetector.DetectLandmark(rect);
394 |
395 | // apply noise filter.
396 | if (enableNoiseFilter)
397 | {
398 | opticalFlowFilterDict[openCVRect.id].Process(rgbaMat, points, points);
399 | lowPassFilterDict[openCVRect.id].Process(rgbaMat, points, points);
400 | }
401 |
402 | landmarkPoints.Add(points);
403 | }
404 | }
405 |
406 | // filter non frontal faces.
407 | if (filterNonFrontalFaces)
408 | {
409 | for (int i = 0; i < landmarkPoints.Count; i++)
410 | {
411 | if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit)
412 | {
413 | trackedRects.RemoveAt(i);
414 | landmarkPoints.RemoveAt(i);
415 | i--;
416 | }
417 | }
418 | }
419 |
420 | // face swapping.
421 | if (landmarkPoints.Count >= 2)
422 | {
423 | int ann = 0, bob = 1;
424 | for (int i = 0; i < landmarkPoints.Count - 1; i += 2)
425 | {
426 | ann = i;
427 | bob = i + 1;
428 |
429 | faceSwapper.SwapFaces(rgbaMat, landmarkPoints[ann], landmarkPoints[bob], 1);
430 | }
431 | }
432 |
433 | // draw face rects.
434 | if (displayFaceRects)
435 | {
436 | for (int i = 0; i < trackedRects.Count; i++)
437 | {
438 | Rect openCVRect = trackedRects[i];
439 | UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
440 | OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
441 | //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
442 | //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
443 | }
444 | }
445 |
446 | //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
447 |
448 | OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
449 | }
450 | }
451 |
452 |
453 | ///
454 | /// Raises the destroy event.
455 | ///
456 | void OnDestroy()
457 | {
458 | if (webCamTextureToMatHelper != null)
459 | webCamTextureToMatHelper.Dispose();
460 |
461 | if (cascade != null)
462 | cascade.Dispose();
463 |
464 | if (rectangleTracker != null)
465 | rectangleTracker.Dispose();
466 |
467 | if (faceLandmarkDetector != null)
468 | faceLandmarkDetector.Dispose();
469 |
470 | foreach (var key in lowPassFilterDict.Keys)
471 | {
472 | lowPassFilterDict[key].Dispose();
473 | }
474 | lowPassFilterDict.Clear();
475 | foreach (var key in opticalFlowFilterDict.Keys)
476 | {
477 | opticalFlowFilterDict[key].Dispose();
478 | }
479 | opticalFlowFilterDict.Clear();
480 |
481 | if (faceSwapper != null)
482 | faceSwapper.Dispose();
483 |
484 | #if UNITY_WEBGL
485 | if (getFilePath_Coroutine != null)
486 | {
487 | StopCoroutine(getFilePath_Coroutine);
488 | ((IDisposable)getFilePath_Coroutine).Dispose();
489 | }
490 | #endif
491 | }
492 |
493 | ///
494 | /// Raises the back button click event.
495 | ///
496 | public void OnBackButtonClick()
497 | {
498 | SceneManager.LoadScene("FaceSwapperExample");
499 | }
500 |
501 | ///
502 | /// Raises the play button click event.
503 | ///
504 | public void OnPlayButtonClick()
505 | {
506 | webCamTextureToMatHelper.Play();
507 | }
508 |
509 | ///
510 | /// Raises the pause button click event.
511 | ///
512 | public void OnPauseButtonClick()
513 | {
514 | webCamTextureToMatHelper.Pause();
515 | }
516 |
517 | ///
518 | /// Raises the stop button click event.
519 | ///
520 | public void OnStopButtonClick()
521 | {
522 | webCamTextureToMatHelper.Stop();
523 | }
524 |
525 | ///
526 | /// Raises the change camera button click event.
527 | ///
528 | public void OnChangeCameraButtonClick()
529 | {
530 | webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
531 | }
532 |
533 | ///
534 | /// Raises the use Dlib face detector toggle value changed event.
535 | ///
536 | public void OnUseDlibFaceDetecterToggleValueChanged()
537 | {
538 | if (useDlibFaceDetecterToggle.isOn)
539 | {
540 | useDlibFaceDetecter = true;
541 | }
542 | else
543 | {
544 | useDlibFaceDetecter = false;
545 | }
546 | }
547 |
548 | ///
549 | /// Raises the enable noise filter toggle value changed event.
550 | ///
551 | public void OnEnableNoiseFilterToggleValueChanged()
552 | {
553 | if (enableNoiseFilterToggle.isOn)
554 | {
555 | enableNoiseFilter = true;
556 | foreach (var key in lowPassFilterDict.Keys)
557 | {
558 | lowPassFilterDict[key].Reset();
559 | }
560 | foreach (var key in opticalFlowFilterDict.Keys)
561 | {
562 | opticalFlowFilterDict[key].Reset();
563 | }
564 | }
565 | else
566 | {
567 | enableNoiseFilter = false;
568 | }
569 | }
570 |
571 | ///
572 | /// Raises the filter non frontal faces toggle value changed event.
573 | ///
574 | public void OnFilterNonFrontalFacesToggleValueChanged()
575 | {
576 | if (filterNonFrontalFacesToggle.isOn)
577 | {
578 | filterNonFrontalFaces = true;
579 | }
580 | else
581 | {
582 | filterNonFrontalFaces = false;
583 | }
584 | }
585 |
586 | ///
587 | /// Raises the use seamless clone toggle value changed event.
588 | ///
589 | public void OnUseSeamlessCloneToggleValueChanged()
590 | {
591 | if (useSeamlessCloneToggle.isOn)
592 | {
593 | useSeamlessClone = true;
594 | }
595 | else
596 | {
597 | useSeamlessClone = false;
598 | }
599 | faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone;
600 | }
601 |
602 | ///
603 | /// Raises the display face rects toggle value changed event.
604 | ///
605 | public void OnDisplayFaceRectsToggleValueChanged()
606 | {
607 | if (displayFaceRectsToggle.isOn)
608 | {
609 | displayFaceRects = true;
610 | }
611 | else
612 | {
613 | displayFaceRects = false;
614 | }
615 | }
616 |
617 | ///
618 | /// Raises the display debug face points toggle value changed event.
619 | ///
620 | public void OnDisplayDebugFacePointsToggleValueChanged()
621 | {
622 | if (displayDebugFacePointsToggle.isOn)
623 | {
624 | displayDebugFacePoints = true;
625 | }
626 | else
627 | {
628 | displayDebugFacePoints = false;
629 | }
630 | if (faceSwapper != null)
631 | faceSwapper.isShowingDebugFacePoints = displayDebugFacePoints;
632 | }
633 | }
634 | }
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/VideoCaptureFaceSwapperExample/VideoCaptureFaceSwapperExample.cs:
--------------------------------------------------------------------------------
1 | using DlibFaceLandmarkDetector;
2 | using OpenCVForUnity.CoreModule;
3 | using OpenCVForUnity.FaceSwap;
4 | using OpenCVForUnity.ImgprocModule;
5 | using OpenCVForUnity.ObjdetectModule;
6 | using OpenCVForUnity.RectangleTrack;
7 | using OpenCVForUnity.UnityUtils.Helper;
8 | using System;
9 | using System.Collections;
10 | using System.Collections.Generic;
11 | using UnityEngine;
12 | using UnityEngine.SceneManagement;
13 | using UnityEngine.UI;
14 | using Rect = OpenCVForUnity.CoreModule.Rect;
15 |
16 | namespace FaceSwapperExample
17 | {
18 | ///
19 | /// VideoCapture FaceSwapper Example
20 | ///
21 | [RequireComponent(typeof(VideoCaptureToMatHelper))]
22 | public class VideoCaptureFaceSwapperExample : MonoBehaviour
23 | {
24 | ///
25 | /// Determines if use dlib face detector.
26 | ///
27 | public bool useDlibFaceDetecter = false;
28 |
29 | ///
30 | /// The use dlib face detecter toggle.
31 | ///
32 | public Toggle useDlibFaceDetecterToggle;
33 |
34 | ///
35 | /// Determines if filters non frontal faces.
36 | ///
37 | public bool filterNonFrontalFaces;
38 |
39 | ///
40 | /// The filter non frontal faces toggle.
41 | ///
42 | public Toggle filterNonFrontalFacesToggle;
43 |
44 | ///
45 | /// The frontal face rate lower limit.
46 | ///
47 | [Range(0.0f, 1.0f)]
48 | public float frontalFaceRateLowerLimit;
49 |
50 | ///
51 | /// Determines if enables noise filter.
52 | ///
53 | public bool enableNoiseFilter = true;
54 |
55 | ///
56 | /// The enable noise filter toggle.
57 | ///
58 | public Toggle enableNoiseFilterToggle;
59 |
60 | ///
61 | /// Determines if uses the seamless clone method for the face copy.
62 | ///
63 | public bool useSeamlessClone = false;
64 |
65 | ///
66 | /// The use seamless clone toggle.
67 | ///
68 | public Toggle useSeamlessCloneToggle;
69 |
70 | ///
71 | /// Determines if displays face rects.
72 | ///
73 | public bool displayFaceRects = false;
74 |
75 | ///
76 | /// The toggle for switching face rects display state.
77 | ///
78 | public Toggle displayFaceRectsToggle;
79 |
80 | ///
81 | /// Determines if displays debug face points.
82 | ///
83 | public bool displayDebugFacePoints = false;
84 |
85 | ///
86 | /// The toggle for switching debug face points display state.
87 | ///
88 | public Toggle displayDebugFacePointsToggle;
89 |
90 | ///
91 | /// The gray mat.
92 | ///
93 | Mat grayMat;
94 |
95 | ///
96 | /// The texture.
97 | ///
98 | Texture2D texture;
99 |
100 | ///
101 | /// The cascade.
102 | ///
103 | CascadeClassifier cascade;
104 |
105 | ///
106 | /// The video capture to mat helper.
107 | ///
108 | VideoCaptureToMatHelper sourceToMatHelper;
109 |
110 | ///
111 | /// VIDEO_FILENAME
112 | ///
113 | protected static readonly string VIDEO_FILENAME = "DlibFaceLandmarkDetector/couple_mjpeg.mjpeg";
114 |
115 | ///
116 | /// The face landmark detector.
117 | ///
118 | FaceLandmarkDetector faceLandmarkDetector;
119 |
120 | ///
121 | /// The mean points filter dictionary.
122 | ///
123 | Dictionary lowPassFilterDict;
124 |
125 | ///
126 | /// The optical flow points filter dictionary.
127 | ///
128 | Dictionary opticalFlowFilterDict;
129 |
130 | ///
131 | /// The face Swaper.
132 | ///
133 | DlibFaceSwapper faceSwapper;
134 |
135 | ///
136 | /// The detection based tracker.
137 | ///
138 | RectangleTracker rectangleTracker;
139 |
140 | ///
141 | /// The frontal face checker.
142 | ///
143 | FrontalFaceChecker frontalFaceChecker;
144 |
145 | ///
146 | /// The haarcascade_frontalface_alt_xml_filepath.
147 | ///
148 | string haarcascade_frontalface_alt_xml_filepath;
149 |
150 | ///
151 | /// The sp_human_face_68_dat_filepath.
152 | ///
153 | string sp_human_face_68_dat_filepath;
154 |
155 | ///
156 | /// The FPS monitor.
157 | ///
158 | FpsMonitor fpsMonitor;
159 |
160 | #if UNITY_WEBGL
161 | IEnumerator getFilePath_Coroutine;
162 | #endif
163 |
164 | // Use this for initialization
165 | void Start()
166 | {
167 | fpsMonitor = GetComponent();
168 |
169 | sourceToMatHelper = gameObject.GetComponent();
170 |
171 | #if UNITY_WEBGL
172 | getFilePath_Coroutine = GetFilePath();
173 | StartCoroutine(getFilePath_Coroutine);
174 | #else
175 | haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml");
176 | sp_human_face_68_dat_filepath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_68.dat");
177 | Run();
178 | #endif
179 | }
180 |
181 | #if UNITY_WEBGL
182 | private IEnumerator GetFilePath()
183 | {
184 | var getFilePathAsync_0_Coroutine = OpenCVForUnity.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml", (result) =>
185 | {
186 | haarcascade_frontalface_alt_xml_filepath = result;
187 | });
188 | yield return getFilePathAsync_0_Coroutine;
189 |
190 | var getFilePathAsync_1_Coroutine = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/sp_human_face_68.dat", (result) =>
191 | {
192 | sp_human_face_68_dat_filepath = result;
193 | });
194 | yield return getFilePathAsync_1_Coroutine;
195 |
196 | getFilePath_Coroutine = null;
197 |
198 | Run();
199 | }
200 | #endif
201 |
202 | private void Run()
203 | {
204 | rectangleTracker = new RectangleTracker();
205 |
206 | faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
207 |
208 | lowPassFilterDict = new Dictionary();
209 | opticalFlowFilterDict = new Dictionary();
210 |
211 | faceSwapper = new DlibFaceSwapper();
212 | faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone;
213 | faceSwapper.isShowingDebugFacePoints = displayDebugFacePoints;
214 |
215 | if (string.IsNullOrEmpty(sourceToMatHelper.requestedVideoFilePath))
216 | sourceToMatHelper.requestedVideoFilePath = VIDEO_FILENAME;
217 | sourceToMatHelper.outputColorFormat = VideoCaptureToMatHelper.ColorFormat.RGB;
218 | sourceToMatHelper.Initialize();
219 |
220 | displayFaceRectsToggle.isOn = displayFaceRects;
221 | useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
222 | enableNoiseFilterToggle.isOn = enableNoiseFilter;
223 | filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces;
224 | useSeamlessCloneToggle.isOn = useSeamlessClone;
225 | displayDebugFacePointsToggle.isOn = displayDebugFacePoints;
226 | }
227 |
228 | ///
229 | /// Raises the video capture to mat helper initialized event.
230 | ///
231 | public void OnVideoCaptureToMatHelperInitialized()
232 | {
233 | Debug.Log("OnVideoCaptureToMatHelperInitialized");
234 |
235 | Mat rgbMat = sourceToMatHelper.GetMat();
236 |
237 | texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGB24, false);
238 |
239 |
240 | gameObject.transform.localScale = new Vector3(rgbMat.cols(), rgbMat.rows(), 1);
241 | Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
242 |
243 | if (fpsMonitor != null)
244 | {
245 | fpsMonitor.Add("width", rgbMat.width().ToString());
246 | fpsMonitor.Add("height", rgbMat.height().ToString());
247 | fpsMonitor.Add("orientation", Screen.orientation.ToString());
248 | }
249 |
250 |
251 | float width = gameObject.transform.localScale.x;
252 | float height = gameObject.transform.localScale.y;
253 |
254 | float widthScale = (float)Screen.width / width;
255 | float heightScale = (float)Screen.height / height;
256 | if (widthScale < heightScale)
257 | {
258 | Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
259 | }
260 | else
261 | {
262 | Camera.main.orthographicSize = height / 2;
263 | }
264 |
265 | gameObject.GetComponent().material.mainTexture = texture;
266 |
267 |
268 |
269 | grayMat = new Mat(rgbMat.rows(), rgbMat.cols(), CvType.CV_8UC1);
270 | cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
271 | //if (cascade.empty())
272 | //{
273 | // Debug.LogError("cascade file is not loaded. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
274 | //}
275 |
276 | frontalFaceChecker = new FrontalFaceChecker(width, height);
277 | }
278 |
279 | ///
280 | /// Raises the video capture to mat helper disposed event.
281 | ///
282 | public void OnVideoCaptureToMatHelperDisposed()
283 | {
284 | Debug.Log("OnVideoCaptureToMatHelperDisposed");
285 |
286 | grayMat.Dispose();
287 |
288 | if (texture != null)
289 | {
290 | Texture2D.Destroy(texture);
291 | texture = null;
292 | }
293 |
294 | rectangleTracker.Reset();
295 |
296 | foreach (var key in lowPassFilterDict.Keys)
297 | {
298 | lowPassFilterDict[key].Dispose();
299 | }
300 | lowPassFilterDict.Clear();
301 | foreach (var key in opticalFlowFilterDict.Keys)
302 | {
303 | opticalFlowFilterDict[key].Dispose();
304 | }
305 | opticalFlowFilterDict.Clear();
306 |
307 | frontalFaceChecker.Dispose();
308 | }
309 |
310 | ///
311 | /// Raises the video capture to mat helper error occurred event.
312 | ///
313 | /// Error code.
314 | public void OnVideoCaptureToMatHelperErrorOccurred(VideoCaptureToMatHelper.ErrorCode errorCode)
315 | {
316 | Debug.Log("OnVideoCaptureToMatHelperErrorOccurred " + errorCode);
317 |
318 | if (fpsMonitor != null)
319 | {
320 | fpsMonitor.consoleText = "ErrorCode: " + errorCode;
321 | }
322 | }
323 |
324 | // Update is called once per frame
325 | void Update()
326 | {
327 |
328 | if (sourceToMatHelper.IsPlaying() && sourceToMatHelper.DidUpdateThisFrame())
329 | {
330 |
331 | Mat rgbMat = sourceToMatHelper.GetMat();
332 |
333 | // detect faces.
334 | List detectResult = new List();
335 | if (useDlibFaceDetecter)
336 | {
337 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
338 | List result = faceLandmarkDetector.Detect();
339 |
340 | foreach (var unityRect in result)
341 | {
342 | detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
343 | }
344 | }
345 | else
346 | {
347 | // convert image to greyscale.
348 | Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
349 |
350 | using (Mat equalizeHistMat = new Mat())
351 | using (MatOfRect faces = new MatOfRect())
352 | {
353 | Imgproc.equalizeHist(grayMat, equalizeHistMat);
354 |
355 | cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());
356 |
357 | detectResult = faces.toList();
358 |
359 | // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
360 | foreach (Rect r in detectResult)
361 | {
362 | r.y += (int)(r.height * 0.1f);
363 | }
364 | }
365 | }
366 |
367 | // face tracking.
368 | List trackedRects = new List();
369 | rectangleTracker.UpdateTrackedObjects(detectResult);
370 | rectangleTracker.GetObjects(trackedRects, true);
371 |
372 | // create noise filter.
373 | foreach (var openCVRect in trackedRects)
374 | {
375 | if (openCVRect.state == TrackedState.NEW)
376 | {
377 | if (!lowPassFilterDict.ContainsKey(openCVRect.id))
378 | lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
379 | if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
380 | opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
381 | }
382 | else if (openCVRect.state == TrackedState.DELETED)
383 | {
384 | if (lowPassFilterDict.ContainsKey(openCVRect.id))
385 | {
386 | lowPassFilterDict[openCVRect.id].Dispose();
387 | lowPassFilterDict.Remove(openCVRect.id);
388 | }
389 | if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
390 | {
391 | opticalFlowFilterDict[openCVRect.id].Dispose();
392 | opticalFlowFilterDict.Remove(openCVRect.id);
393 | }
394 | }
395 | }
396 |
397 | // detect face landmark points.
398 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
399 | List> landmarkPoints = new List>();
400 | foreach (var openCVRect in trackedRects)
401 | {
402 | if (openCVRect.state > TrackedState.NEW_DISPLAYED && openCVRect.state < TrackedState.NEW_HIDED)
403 | {
404 |
405 | UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
406 | List points = faceLandmarkDetector.DetectLandmark(rect);
407 |
408 | // apply noise filter.
409 | if (enableNoiseFilter)
410 | {
411 | opticalFlowFilterDict[openCVRect.id].Process(rgbMat, points, points);
412 | lowPassFilterDict[openCVRect.id].Process(rgbMat, points, points);
413 | }
414 |
415 | landmarkPoints.Add(points);
416 | }
417 | }
418 |
419 | // filter non frontal faces.
420 | if (filterNonFrontalFaces)
421 | {
422 | for (int i = 0; i < landmarkPoints.Count; i++)
423 | {
424 | if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit)
425 | {
426 | trackedRects.RemoveAt(i);
427 | landmarkPoints.RemoveAt(i);
428 | i--;
429 | }
430 | }
431 | }
432 |
433 | // face swapping.
434 | if (landmarkPoints.Count >= 2)
435 | {
436 | int ann = 0, bob = 1;
437 | for (int i = 0; i < landmarkPoints.Count - 1; i += 2)
438 | {
439 | ann = i;
440 | bob = i + 1;
441 |
442 | faceSwapper.SwapFaces(rgbMat, landmarkPoints[ann], landmarkPoints[bob], 1);
443 | }
444 | }
445 |
446 | // draw face rects.
447 | if (displayFaceRects)
448 | {
449 | for (int i = 0; i < trackedRects.Count; i++)
450 | {
451 | Rect openCVRect = trackedRects[i];
452 | UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
453 | OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
454 | //Imgproc.putText (rgbMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
455 | //Imgproc.putText (rgbMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
456 | }
457 | }
458 |
459 | //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
460 |
461 | OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbMat, texture);
462 | }
463 | }
464 |
465 |
466 | ///
467 | /// Raises the destroy event.
468 | ///
469 | void OnDestroy()
470 | {
471 | if (sourceToMatHelper != null)
472 | sourceToMatHelper.Dispose();
473 |
474 | if (cascade != null)
475 | cascade.Dispose();
476 |
477 | if (rectangleTracker != null)
478 | rectangleTracker.Dispose();
479 |
480 | if (faceLandmarkDetector != null)
481 | faceLandmarkDetector.Dispose();
482 |
483 | foreach (var key in lowPassFilterDict.Keys)
484 | {
485 | lowPassFilterDict[key].Dispose();
486 | }
487 | lowPassFilterDict.Clear();
488 | foreach (var key in opticalFlowFilterDict.Keys)
489 | {
490 | opticalFlowFilterDict[key].Dispose();
491 | }
492 | opticalFlowFilterDict.Clear();
493 |
494 | if (faceSwapper != null)
495 | faceSwapper.Dispose();
496 |
497 | #if UNITY_WEBGL
498 | if (getFilePath_Coroutine != null)
499 | {
500 | StopCoroutine(getFilePath_Coroutine);
501 | ((IDisposable)getFilePath_Coroutine).Dispose();
502 | }
503 | #endif
504 | }
505 |
506 | ///
507 | /// Raises the back button click event.
508 | ///
509 | public void OnBackButtonClick()
510 | {
511 | SceneManager.LoadScene("FaceSwapperExample");
512 | }
513 |
514 | ///
515 | /// Raises the play button click event.
516 | ///
517 | public void OnPlayButtonClick()
518 | {
519 | sourceToMatHelper.Play();
520 | }
521 |
522 | ///
523 | /// Raises the pause button click event.
524 | ///
525 | public void OnPauseButtonClick()
526 | {
527 | sourceToMatHelper.Pause();
528 | }
529 |
530 | ///
531 | /// Raises the stop button click event.
532 | ///
533 | public void OnStopButtonClick()
534 | {
535 | sourceToMatHelper.Stop();
536 | }
537 |
538 | ///
539 | /// Raises the use Dlib face detector toggle value changed event.
540 | ///
541 | public void OnUseDlibFaceDetecterToggleValueChanged()
542 | {
543 | if (useDlibFaceDetecterToggle.isOn)
544 | {
545 | useDlibFaceDetecter = true;
546 | }
547 | else
548 | {
549 | useDlibFaceDetecter = false;
550 | }
551 | }
552 |
553 | ///
554 | /// Raises the enable noise filter toggle value changed event.
555 | ///
556 | public void OnEnableNoiseFilterToggleValueChanged()
557 | {
558 | if (enableNoiseFilterToggle.isOn)
559 | {
560 | enableNoiseFilter = true;
561 | foreach (var key in lowPassFilterDict.Keys)
562 | {
563 | lowPassFilterDict[key].Reset();
564 | }
565 | foreach (var key in opticalFlowFilterDict.Keys)
566 | {
567 | opticalFlowFilterDict[key].Reset();
568 | }
569 | }
570 | else
571 | {
572 | enableNoiseFilter = false;
573 | }
574 | }
575 |
576 | ///
577 | /// Raises the filter non frontal faces toggle value changed event.
578 | ///
579 | public void OnFilterNonFrontalFacesToggleValueChanged()
580 | {
581 | if (filterNonFrontalFacesToggle.isOn)
582 | {
583 | filterNonFrontalFaces = true;
584 | }
585 | else
586 | {
587 | filterNonFrontalFaces = false;
588 | }
589 | }
590 |
591 | ///
592 | /// Raises the use seamless clone toggle value changed event.
593 | ///
594 | public void OnUseSeamlessCloneToggleValueChanged()
595 | {
596 | if (useSeamlessCloneToggle.isOn)
597 | {
598 | useSeamlessClone = true;
599 | }
600 | else
601 | {
602 | useSeamlessClone = false;
603 | }
604 | faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone;
605 | }
606 |
607 | ///
608 | /// Raises the display face rects toggle value changed event.
609 | ///
610 | public void OnDisplayFaceRectsToggleValueChanged()
611 | {
612 | if (displayFaceRectsToggle.isOn)
613 | {
614 | displayFaceRects = true;
615 | }
616 | else
617 | {
618 | displayFaceRects = false;
619 | }
620 | }
621 |
622 | ///
623 | /// Raises the display debug face points toggle value changed event.
624 | ///
625 | public void OnDisplayDebugFacePointsToggleValueChanged()
626 | {
627 | if (displayDebugFacePointsToggle.isOn)
628 | {
629 | displayDebugFacePoints = true;
630 | }
631 | else
632 | {
633 | displayDebugFacePoints = false;
634 | }
635 | if (faceSwapper != null)
636 | faceSwapper.isShowingDebugFacePoints = displayDebugFacePoints;
637 | }
638 | }
639 | }
640 |
--------------------------------------------------------------------------------
/Assets/FaceSwapperExample/WebCamTextureFaceChangerExample/WebCamTextureFaceChangerExample.cs:
--------------------------------------------------------------------------------
1 | using DlibFaceLandmarkDetector;
2 | using OpenCVForUnity.CoreModule;
3 | using OpenCVForUnity.FaceChange;
4 | using OpenCVForUnity.ImgprocModule;
5 | using OpenCVForUnity.ObjdetectModule;
6 | using OpenCVForUnity.RectangleTrack;
7 | using OpenCVForUnity.UnityUtils.Helper;
8 | using System;
9 | using System.Collections;
10 | using System.Collections.Generic;
11 | using UnityEngine;
12 | using UnityEngine.SceneManagement;
13 | using UnityEngine.UI;
14 | using Rect = OpenCVForUnity.CoreModule.Rect;
15 |
16 | namespace FaceSwapperExample
17 | {
18 | ///
19 | /// WebCamTexture FaceChanger Example
20 | ///
21 | [RequireComponent(typeof(WebCamTextureToMatHelper))]
22 | public class WebCamTextureFaceChangerExample : MonoBehaviour
23 | {
24 | ///
25 | /// Determines if use dlib face detector.
26 | ///
27 | public bool useDlibFaceDetecter = false;
28 |
29 | ///
30 | /// The use dlib face detecter toggle.
31 | ///
32 | public Toggle useDlibFaceDetecterToggle;
33 |
34 | ///
35 | /// Determines if filters non frontal faces.
36 | ///
37 | public bool filterNonFrontalFaces = false;
38 |
39 | ///
40 | /// The filter non frontal faces toggle.
41 | ///
42 | public Toggle filterNonFrontalFacesToggle;
43 |
44 | ///
45 | /// The frontal face rate lower limit.
46 | ///
47 | [Range(0.0f, 1.0f)]
48 | public float frontalFaceRateLowerLimit;
49 |
50 | ///
51 | /// Determines if enables noise filter.
52 | ///
53 | public bool enableNoiseFilter = true;
54 |
55 | ///
56 | /// The enable noise filter toggle.
57 | ///
58 | public Toggle enableNoiseFilterToggle;
59 |
60 | ///
61 | /// Determines if displays face rects.
62 | ///
63 | public bool displayFaceRects = false;
64 |
65 | ///
66 | /// The toggle for switching face rects display state.
67 | ///
68 | public Toggle displayFaceRectsToggle;
69 |
70 | ///
71 | /// Determines if displays debug face points.
72 | ///
73 | public bool displayDebugFacePoints = false;
74 |
75 | ///
76 | /// The toggle for switching debug face points display state.
77 | ///
78 | public Toggle displayDebugFacePointsToggle;
79 |
80 | ///
81 | /// The gray mat.
82 | ///
83 | Mat grayMat;
84 |
85 | ///
86 | /// The texture.
87 | ///
88 | Texture2D texture;
89 |
90 | ///
91 | /// The cascade.
92 | ///
93 | CascadeClassifier cascade;
94 |
95 | ///
96 | /// The web cam texture to mat helper.
97 | ///
98 | WebCamTextureToMatHelper webCamTextureToMatHelper;
99 |
100 | ///
101 | /// The face landmark detector.
102 | ///
103 | FaceLandmarkDetector faceLandmarkDetector;
104 |
105 | ///
106 | /// The mean points filter dictionary.
107 | ///
108 | Dictionary lowPassFilterDict;
109 |
110 | ///
111 | /// The optical flow points filter dictionary.
112 | ///
113 | Dictionary opticalFlowFilterDict;
114 |
115 | ///
116 | /// The face Changer
117 | ///
118 | DlibFaceChanger faceChanger;
119 |
120 | ///
121 | /// The detection based tracker.
122 | ///
123 | RectangleTracker rectangleTracker;
124 |
125 | ///
126 | /// The frontal face checker.
127 | ///
128 | FrontalFaceChecker frontalFaceChecker;
129 |
130 | ///
131 | /// The face mask texture.
132 | ///
133 | Texture2D faceMaskTexture;
134 |
135 | ///
136 | /// The face mask mat.
137 | ///
138 | Mat faceMaskMat;
139 |
140 | ///
141 | /// The detected face rect.
142 | ///
143 | UnityEngine.Rect detectedFaceRect;
144 |
145 | ///
146 | /// The haarcascade_frontalface_alt_xml_filepath.
147 | ///
148 | string haarcascade_frontalface_alt_xml_filepath;
149 |
150 | ///
151 | /// The sp_human_face_68_dat_filepath.
152 | ///
153 | string sp_human_face_68_dat_filepath;
154 |
155 | ///
156 | /// The FPS monitor.
157 | ///
158 | FpsMonitor fpsMonitor;
159 |
160 | #if UNITY_WEBGL
161 | IEnumerator getFilePath_Coroutine;
162 | #endif
163 |
164 | // Use this for initialization
165 | void Start()
166 | {
167 | fpsMonitor = GetComponent();
168 |
169 | webCamTextureToMatHelper = gameObject.GetComponent();
170 |
171 | #if UNITY_WEBGL
172 | getFilePath_Coroutine = GetFilePath();
173 | StartCoroutine(getFilePath_Coroutine);
174 | #else
175 | haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml");
176 | sp_human_face_68_dat_filepath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("DlibFaceLandmarkDetector/sp_human_face_68.dat");
177 | Run();
178 | #endif
179 | }
180 |
181 | #if UNITY_WEBGL
182 | private IEnumerator GetFilePath()
183 | {
184 | var getFilePathAsync_0_Coroutine = OpenCVForUnity.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/haarcascade_frontalface_alt.xml", (result) =>
185 | {
186 | haarcascade_frontalface_alt_xml_filepath = result;
187 | });
188 | yield return getFilePathAsync_0_Coroutine;
189 |
190 | var getFilePathAsync_1_Coroutine = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsync("DlibFaceLandmarkDetector/sp_human_face_68.dat", (result) =>
191 | {
192 | sp_human_face_68_dat_filepath = result;
193 | });
194 | yield return getFilePathAsync_1_Coroutine;
195 |
196 | getFilePath_Coroutine = null;
197 |
198 | Run();
199 | }
200 | #endif
201 |
202 | private void Run()
203 | {
204 | rectangleTracker = new RectangleTracker();
205 |
206 | faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
207 |
208 | lowPassFilterDict = new Dictionary();
209 | opticalFlowFilterDict = new Dictionary();
210 |
211 | faceChanger = new DlibFaceChanger();
212 | faceChanger.isShowingDebugFacePoints = displayDebugFacePoints;
213 |
214 | webCamTextureToMatHelper.Initialize();
215 |
216 | displayFaceRectsToggle.isOn = displayFaceRects;
217 | useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
218 | enableNoiseFilterToggle.isOn = enableNoiseFilter;
219 | filterNonFrontalFacesToggle.isOn = filterNonFrontalFaces;
220 | displayDebugFacePointsToggle.isOn = displayDebugFacePoints;
221 | }
222 |
223 | ///
224 | /// Raises the web cam texture to mat helper initialized event.
225 | ///
226 | public void OnWebCamTextureToMatHelperInitialized()
227 | {
228 | Debug.Log("OnWebCamTextureToMatHelperInitialized");
229 |
230 | Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
231 |
232 | texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
233 |
234 |
235 | gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
236 | Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
237 |
238 | if (fpsMonitor != null)
239 | {
240 | fpsMonitor.Add("width", webCamTextureMat.width().ToString());
241 | fpsMonitor.Add("height", webCamTextureMat.height().ToString());
242 | fpsMonitor.Add("orientation", Screen.orientation.ToString());
243 | }
244 |
245 |
246 | float width = gameObject.transform.localScale.x;
247 | float height = gameObject.transform.localScale.y;
248 |
249 | float widthScale = (float)Screen.width / width;
250 | float heightScale = (float)Screen.height / height;
251 | if (widthScale < heightScale)
252 | {
253 | Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
254 | }
255 | else
256 | {
257 | Camera.main.orthographicSize = height / 2;
258 | }
259 |
260 | gameObject.GetComponent().material.mainTexture = texture;
261 |
262 | grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
263 | cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
264 | //if (cascade.empty())
265 | //{
266 | // Debug.LogError("cascade file is not loaded. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. ");
267 | //}
268 |
269 | frontalFaceChecker = new FrontalFaceChecker(width, height);
270 | }
271 |
272 | ///
273 | /// Raises the web cam texture to mat helper disposed event.
274 | ///
275 | public void OnWebCamTextureToMatHelperDisposed()
276 | {
277 | Debug.Log("OnWebCamTextureToMatHelperDisposed");
278 |
279 | grayMat.Dispose();
280 |
281 | if (texture != null)
282 | {
283 | Texture2D.Destroy(texture);
284 | texture = null;
285 | }
286 |
287 | rectangleTracker.Reset();
288 |
289 | foreach (var key in lowPassFilterDict.Keys)
290 | {
291 | lowPassFilterDict[key].Dispose();
292 | }
293 | lowPassFilterDict.Clear();
294 | foreach (var key in opticalFlowFilterDict.Keys)
295 | {
296 | opticalFlowFilterDict[key].Dispose();
297 | }
298 | opticalFlowFilterDict.Clear();
299 |
300 | frontalFaceChecker.Dispose();
301 | }
302 |
303 | ///
304 | /// Raises the web cam texture to mat helper error occurred event.
305 | ///
306 | /// Error code.
307 | public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
308 | {
309 | Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
310 | }
311 |
312 | // Update is called once per frame
313 | void Update()
314 | {
315 |
316 | if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
317 | {
318 |
319 | Mat rgbaMat = webCamTextureToMatHelper.GetMat();
320 |
321 | // detect faces.
322 | List detectResult;
323 | DetectFaces(rgbaMat, out detectResult, useDlibFaceDetecter);
324 |
325 | // face tracking.
326 | List trackedRects = new List();
327 | rectangleTracker.UpdateTrackedObjects(detectResult);
328 | rectangleTracker.GetObjects(trackedRects, true);
329 |
330 | // create noise filter.
331 | foreach (var openCVRect in trackedRects)
332 | {
333 | if (openCVRect.state == TrackedState.NEW)
334 | {
335 | if (!lowPassFilterDict.ContainsKey(openCVRect.id))
336 | lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
337 | if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
338 | opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
339 | }
340 | else if (openCVRect.state == TrackedState.DELETED)
341 | {
342 | if (lowPassFilterDict.ContainsKey(openCVRect.id))
343 | {
344 | lowPassFilterDict[openCVRect.id].Dispose();
345 | lowPassFilterDict.Remove(openCVRect.id);
346 | }
347 | if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
348 | {
349 | opticalFlowFilterDict[openCVRect.id].Dispose();
350 | opticalFlowFilterDict.Remove(openCVRect.id);
351 | }
352 | }
353 | }
354 |
355 | // detect face landmark points.
356 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
357 | List> landmarkPoints = new List>();
358 | foreach (var openCVRect in trackedRects)
359 | {
360 | if (openCVRect.state > TrackedState.NEW_DISPLAYED && openCVRect.state < TrackedState.NEW_HIDED)
361 | {
362 |
363 | UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
364 | List points = faceLandmarkDetector.DetectLandmark(rect);
365 |
366 | // apply noise filter.
367 | if (enableNoiseFilter)
368 | {
369 | opticalFlowFilterDict[openCVRect.id].Process(rgbaMat, points, points);
370 | lowPassFilterDict[openCVRect.id].Process(rgbaMat, points, points);
371 | }
372 |
373 | landmarkPoints.Add(points);
374 | }
375 | }
376 |
377 | // filter non frontal faces.
378 | if (filterNonFrontalFaces)
379 | {
380 | for (int i = 0; i < landmarkPoints.Count; i++)
381 | {
382 | if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit)
383 | {
384 | trackedRects.RemoveAt(i);
385 | landmarkPoints.RemoveAt(i);
386 | i--;
387 | }
388 | }
389 | }
390 |
391 | // face changing.
392 | if (faceMaskTexture != null && landmarkPoints.Count >= 1)
393 | { // Apply face changing between detected faces and a mask image.
394 |
395 | if (detectedFaceRect.width == 0.0f || detectedFaceRect.height == 0.0f)
396 | {
397 | DetectFaces(faceMaskMat, out detectResult, useDlibFaceDetecter);
398 | if (detectResult.Count >= 1)
399 | {
400 | detectedFaceRect = new UnityEngine.Rect(detectResult[0].x, detectResult[0].y, detectResult[0].width, detectResult[0].height);
401 | }
402 | }
403 |
404 | if (detectedFaceRect.width > 0 || detectedFaceRect.height > 0)
405 | {
406 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, faceMaskMat);
407 | List souseLandmarkPoint = faceLandmarkDetector.DetectLandmark(detectedFaceRect);
408 |
409 | faceChanger.SetTargetImage(rgbaMat);
410 | for (int i = 0; i < landmarkPoints.Count; i++)
411 | {
412 | faceChanger.AddFaceChangeData(faceMaskMat, souseLandmarkPoint, landmarkPoints[i], 1);
413 | }
414 | faceChanger.ChangeFace();
415 |
416 | if (displayFaceRects)
417 | {
418 | OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, detectedFaceRect, new Scalar(255, 0, 0, 255), 2);
419 | }
420 | }
421 | }
422 | else if (landmarkPoints.Count >= 2)
423 | { // Apply face changing between detected faces.
424 | faceChanger.SetTargetImage(rgbaMat);
425 | for (int i = 1; i < landmarkPoints.Count; i++)
426 | {
427 | faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints[0], landmarkPoints[i], 1);
428 | }
429 | faceChanger.ChangeFace();
430 | }
431 |
432 | // draw face rects.
433 | if (displayFaceRects)
434 | {
435 | for (int i = 0; i < trackedRects.Count; i++)
436 | {
437 | Rect openCVRect = trackedRects[i];
438 | UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
439 | OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
440 | //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
441 | //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
442 | }
443 | }
444 |
445 | // display face mask image.
446 | if (faceMaskMat != null)
447 | {
448 | float scale = (rgbaMat.width() / 4f) / faceMaskMat.width();
449 | float tx = rgbaMat.width() - faceMaskMat.width() * scale;
450 | float ty = 0.0f;
451 | Mat trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
452 | trans.put(0, 0, scale);
453 | trans.put(0, 1, 0.0f);
454 | trans.put(0, 2, tx);
455 | trans.put(1, 0, 0.0f);
456 | trans.put(1, 1, scale);
457 | trans.put(1, 2, ty);
458 |
459 | Imgproc.warpAffine(faceMaskMat, rgbaMat, trans, rgbaMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));
460 |
461 | if (displayFaceRects || displayDebugFacePoints)
462 | OpenCVForUnity.UnityUtils.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);
463 | }
464 |
465 | //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
466 |
467 | OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
468 | }
469 | }
470 |
471 | private void DetectFaces(Mat rgbaMat, out List detectResult, bool useDlibFaceDetecter)
472 | {
473 | detectResult = new List();
474 |
475 | if (useDlibFaceDetecter)
476 | {
477 | OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
478 | List result = faceLandmarkDetector.Detect();
479 |
480 | foreach (var unityRect in result)
481 | {
482 | detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
483 | }
484 | }
485 | else
486 | {
487 | // convert image to greyscale.
488 | Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
489 |
490 | using (Mat equalizeHistMat = new Mat())
491 | using (MatOfRect faces = new MatOfRect())
492 | {
493 | Imgproc.equalizeHist(grayMat, equalizeHistMat);
494 |
495 | cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());
496 |
497 | detectResult = faces.toList();
498 |
499 | // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
500 | foreach (Rect r in detectResult)
501 | {
502 | r.y += (int)(r.height * 0.1f);
503 | }
504 | }
505 | }
506 | }
507 |
508 | ///
509 | /// Raises the destroy event.
510 | ///
511 | void OnDestroy()
512 | {
513 | if (webCamTextureToMatHelper != null)
514 | webCamTextureToMatHelper.Dispose();
515 |
516 | if (cascade != null)
517 | cascade.Dispose();
518 |
519 | if (rectangleTracker != null)
520 | rectangleTracker.Dispose();
521 |
522 | if (faceLandmarkDetector != null)
523 | faceLandmarkDetector.Dispose();
524 |
525 | foreach (var key in lowPassFilterDict.Keys)
526 | {
527 | lowPassFilterDict[key].Dispose();
528 | }
529 | lowPassFilterDict.Clear();
530 | foreach (var key in opticalFlowFilterDict.Keys)
531 | {
532 | opticalFlowFilterDict[key].Dispose();
533 | }
534 | opticalFlowFilterDict.Clear();
535 |
536 | if (faceChanger != null)
537 | faceChanger.Dispose();
538 |
539 | if (faceMaskMat != null)
540 | faceMaskMat.Dispose();
541 |
542 | #if UNITY_WEBGL
543 | if (getFilePath_Coroutine != null)
544 | {
545 | StopCoroutine(getFilePath_Coroutine);
546 | ((IDisposable)getFilePath_Coroutine).Dispose();
547 | }
548 | #endif
549 | }
550 |
551 | ///
552 | /// Raises the back button click event.
553 | ///
554 | public void OnBackButtonClick()
555 | {
556 | SceneManager.LoadScene("FaceSwapperExample");
557 | }
558 |
559 | ///
560 | /// Raises the play button click event.
561 | ///
562 | public void OnPlayButtonClick()
563 | {
564 | webCamTextureToMatHelper.Play();
565 | }
566 |
567 | ///
568 | /// Raises the pause button click event.
569 | ///
570 | public void OnPauseButtonClick()
571 | {
572 | webCamTextureToMatHelper.Pause();
573 | }
574 |
575 | ///
576 | /// Raises the stop button click event.
577 | ///
578 | public void OnStopButtonClick()
579 | {
580 | webCamTextureToMatHelper.Stop();
581 | }
582 |
583 | ///
584 | /// Raises the change camera button click event.
585 | ///
586 | public void OnChangeCameraButtonClick()
587 | {
588 | webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
589 | }
590 |
591 | ///
592 | /// Raises the use Dlib face detector toggle value changed event.
593 | ///
594 | public void OnUseDlibFaceDetecterToggleValueChanged()
595 | {
596 | if (useDlibFaceDetecterToggle.isOn)
597 | {
598 | useDlibFaceDetecter = true;
599 | }
600 | else
601 | {
602 | useDlibFaceDetecter = false;
603 | }
604 | }
605 |
606 | ///
607 | /// Raises the enable noise filter toggle value changed event.
608 | ///
609 | public void OnEnableNoiseFilterToggleValueChanged()
610 | {
611 | if (enableNoiseFilterToggle.isOn)
612 | {
613 | enableNoiseFilter = true;
614 | foreach (var key in lowPassFilterDict.Keys)
615 | {
616 | lowPassFilterDict[key].Reset();
617 | }
618 | foreach (var key in opticalFlowFilterDict.Keys)
619 | {
620 | opticalFlowFilterDict[key].Reset();
621 | }
622 | }
623 | else
624 | {
625 | enableNoiseFilter = false;
626 | }
627 | }
628 |
629 | ///
630 | /// Raises the filter non frontal faces toggle value changed event.
631 | ///
632 | public void OnFilterNonFrontalFacesToggleValueChanged()
633 | {
634 | if (filterNonFrontalFacesToggle.isOn)
635 | {
636 | filterNonFrontalFaces = true;
637 | }
638 | else
639 | {
640 | filterNonFrontalFaces = false;
641 | }
642 | }
643 |
644 | ///
645 | /// Raises the display face rects toggle value changed event.
646 | ///
647 | public void OnDisplayFaceRectsToggleValueChanged()
648 | {
649 | if (displayFaceRectsToggle.isOn)
650 | {
651 | displayFaceRects = true;
652 | }
653 | else
654 | {
655 | displayFaceRects = false;
656 | }
657 | }
658 |
659 | ///
660 | /// Raises the display debug face points toggle value changed event.
661 | ///
662 | public void OnDisplayDebugFacePointsToggleValueChanged()
663 | {
664 | if (displayDebugFacePointsToggle.isOn)
665 | {
666 | displayDebugFacePoints = true;
667 | }
668 | else
669 | {
670 | displayDebugFacePoints = false;
671 | }
672 | if (faceChanger != null)
673 | faceChanger.isShowingDebugFacePoints = displayDebugFacePoints;
674 | }
675 |
676 | ///
677 | /// Raises the set face mask button click event.
678 | ///
679 | public void OnSetFaceMaskButtonClick()
680 | {
681 | if (faceMaskMat != null)
682 | {
683 | faceMaskMat.Dispose();
684 | }
685 |
686 | faceMaskTexture = Resources.Load("face_mask") as Texture2D;
687 | faceMaskMat = new Mat(faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);
688 | OpenCVForUnity.UnityUtils.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);
689 | Debug.Log("faceMaskMat ToString " + faceMaskMat.ToString());
690 | detectedFaceRect = new UnityEngine.Rect();
691 | }
692 |
693 | ///
694 | /// Raises the reset face mask button click event.
695 | ///
696 | public void OnResetFaceMaskButtonClick()
697 | {
698 | if (faceMaskTexture != null)
699 | {
700 | faceMaskTexture = null;
701 | faceMaskMat.Dispose();
702 | faceMaskMat = null;
703 | }
704 | }
705 | }
706 | }
--------------------------------------------------------------------------------