├── .gitignore
├── .nuget
├── NuGet.Config
├── NuGet.exe
└── NuGet.targets
├── CameraOpenCV
├── Algorithm.cs
├── AlgorithmProperty.cs
├── App.xaml
├── App.xaml.cs
├── Assets
│ ├── microsoft-sdk.png
│ ├── smalltile-sdk.png
│ ├── splash-sdk.png
│ ├── squaretile-sdk.png
│ ├── storelogo-sdk.png
│ ├── tile-sdk.png
│ └── windows-sdk.png
├── CameraOpenCV.csproj
├── FrameRenderer.cs
├── MainPage.xaml
├── MainPage.xaml.cs
├── OcvOp.cs
├── Package.appxmanifest
├── Properties
│ ├── AssemblyInfo.cs
│ └── Default.rd.xml
├── SampleConfiguration.cs
├── Scenario1_ExampleOperations.xaml
├── Scenario1_ExampleOperations.xaml.cs
├── Scenario2_ImageOperations.xaml
├── Scenario2_ImageOperations.xaml.cs
└── Styles
│ └── Styles.xaml
├── LICENSE
├── OpenCvSharpSamples.sln
├── OpenCvSharpSamples.sln.DotSettings
├── README.md
├── SampleBase
├── Console
│ ├── ConsoleMessagePrinter.cs
│ ├── ConsoleTestBase.cs
│ └── ConsoleTestManager.cs
├── Data
│ ├── Image
│ │ ├── 16bit.png
│ │ ├── Balloon.png
│ │ ├── Calibration
│ │ │ ├── 00.JPG
│ │ │ ├── 01.JPG
│ │ │ ├── 02.JPG
│ │ │ ├── left01.jpg
│ │ │ ├── left02.jpg
│ │ │ ├── left03.jpg
│ │ │ ├── left04.jpg
│ │ │ ├── left05.jpg
│ │ │ ├── left06.jpg
│ │ │ ├── left07.jpg
│ │ │ ├── left08.jpg
│ │ │ ├── left09.jpg
│ │ │ ├── left10.jpg
│ │ │ ├── left11.jpg
│ │ │ ├── left12.jpg
│ │ │ ├── left13.jpg
│ │ │ ├── right01.jpg
│ │ │ ├── right02.jpg
│ │ │ ├── right03.jpg
│ │ │ ├── right04.jpg
│ │ │ ├── right05.jpg
│ │ │ ├── right06.jpg
│ │ │ ├── right07.jpg
│ │ │ ├── right08.jpg
│ │ │ ├── right09.jpg
│ │ │ ├── right10.jpg
│ │ │ ├── right11.jpg
│ │ │ ├── right12.jpg
│ │ │ └── right13.jpg
│ │ ├── Girl.bmp
│ │ ├── Mandrill.bmp
│ │ ├── Squares
│ │ │ ├── pic1.png
│ │ │ ├── pic2.png
│ │ │ ├── pic3.png
│ │ │ ├── pic4.png
│ │ │ ├── pic5.png
│ │ │ └── pic6.png
│ │ ├── aruco_markers_photo.jpg
│ │ ├── aruco_markers_source.jpg
│ │ ├── asahiyama.jpg
│ │ ├── binarization_sample.bmp
│ │ ├── box.png
│ │ ├── box_in_scene.png
│ │ ├── cake.bmp
│ │ ├── cat.jpg
│ │ ├── fruits.jpg
│ │ ├── goryokaku.jpg
│ │ ├── hand_p.jpg
│ │ ├── lenna.png
│ │ ├── lenna511.png
│ │ ├── maltese.jpg
│ │ ├── match1.png
│ │ ├── match2.png
│ │ ├── penguin1.png
│ │ ├── penguin1b.png
│ │ ├── penguin2.png
│ │ ├── shapes.png
│ │ ├── space_shuttle.jpg
│ │ ├── tsukuba_left.png
│ │ ├── tsukuba_right.png
│ │ ├── very_old_newspaper.png
│ │ ├── walkman.jpg
│ │ └── yalta.jpg
│ ├── Model
│ │ └── FSRCNN_x4.pb
│ ├── Movie
│ │ ├── bach.mp4
│ │ └── hara.flv
│ └── Text
│ │ ├── agaricus-lepiota.data
│ │ ├── bvlc_googlenet.prototxt
│ │ ├── camera.xml
│ │ ├── cat.xml
│ │ ├── haarcascade_frontalface_alt.xml
│ │ ├── haarcascade_frontalface_default.xml
│ │ ├── lbpcascade_frontalface.xml
│ │ ├── letter-recognition.data
│ │ └── synset_words.txt
├── Interfaces
│ ├── IMessagePrinter.cs
│ ├── ITestBase.cs
│ └── ITestManager.cs
├── MyProcess.cs
├── Path.cs
└── SampleBase.csproj
├── SamplesCore.Windows
├── Data
│ └── Image
│ │ ├── box_in_scene.png
│ │ └── fruits.jpg
├── FilePath.cs
├── ISample.cs
├── Program.cs
├── Samples
│ ├── MatToWriteableBitmap.cs
│ └── WindowGUISample.cs
└── SamplesCore.Windows.csproj
├── SamplesCore
├── Program.cs
├── Properties
│ └── AssemblyInfo.cs
├── Samples
│ ├── ArucoSample.cs
│ ├── BRISKSample.cs
│ ├── BgSubtractorMOG.cs
│ ├── BinarizerSample.cs
│ ├── CaffeSample.cs
│ ├── CameraCaptureSample.cs
│ ├── ClaheSample.cs
│ ├── ConnectedComponentsSample.cs
│ ├── DFT.cs
│ ├── DnnSuperresSample.cs
│ ├── DrawBestMatch.cs
│ ├── FASTSample.cs
│ ├── FREAKSample.cs
│ ├── FaceDetection.cs
│ ├── FaceDetectionDNN.cs
│ ├── FlannSample.cs
│ ├── HOGSample.cs
│ ├── HandPose.cs
│ ├── HistSample.cs
│ ├── HoughLinesSample.cs
│ ├── InpaintSample.cs
│ ├── KAZESample.cs
│ ├── KAZESample2.cs
│ ├── MDS.cs
│ ├── MSERSample.cs
│ ├── MatOperations.cs
│ ├── MergeSplitSample.cs
│ ├── MorphologySample.cs
│ ├── NormalArrayOperations.cs
│ ├── OpenVinoFaceDetection.cs
│ ├── PerspectiveTransformSample.cs
│ ├── PhotoMethods.cs
│ ├── PixelAccess.cs
│ ├── Pose.cs
│ ├── SVMSample.cs
│ ├── SeamlessClone.cs
│ ├── SiftSurfSample.cs
│ ├── SimpleBlobDetectorSample.cs
│ ├── SolveEquation.cs
│ ├── StarDetectorSample.cs
│ ├── Stitching.cs
│ ├── Subdiv2DSample.cs
│ ├── SuperResolutionSample.cs
│ ├── VideoCaptureSample.cs
│ ├── VideoWriterSample.cs
│ └── WatershedSample.cs
└── SamplesCore.csproj
├── SamplesLegacy
├── App.config
├── Program.cs
├── Properties
│ └── AssemblyInfo.cs
├── Samples
│ ├── ArucoSample.cs
│ ├── BRISKSample.cs
│ ├── BgSubtractorMOG.cs
│ ├── BinarizerSample.cs
│ ├── CaffeSample.cs
│ ├── CameraCaptureSample.cs
│ ├── ClaheSample.cs
│ ├── ConnectedComponentsSample.cs
│ ├── DFT.cs
│ ├── DnnSuperresSample.cs
│ ├── DrawBestMatch.cs
│ ├── FASTSample.cs
│ ├── FREAKSample.cs
│ ├── FaceDetection.cs
│ ├── FaceDetectionDNN.cs
│ ├── FlannSample.cs
│ ├── HOGSample.cs
│ ├── HandPose.cs
│ ├── HistSample.cs
│ ├── HoughLinesSample.cs
│ ├── InpaintSample.cs
│ ├── KAZESample.cs
│ ├── KAZESample2.cs
│ ├── MDS.cs
│ ├── MSERSample.cs
│ ├── MatOperations.cs
│ ├── MergeSplitSample.cs
│ ├── MorphologySample.cs
│ ├── NormalArrayOperations.cs
│ ├── OpenVinoFaceDetection.cs
│ ├── PerspectiveTransformSample.cs
│ ├── PhotoMethods.cs
│ ├── PixelAccess.cs
│ ├── Pose.cs
│ ├── SVMSample.cs
│ ├── SeamlessClone.cs
│ ├── SiftSurfSample.cs
│ ├── SimpleBlobDetectorSample.cs
│ ├── SolveEquation.cs
│ ├── StarDetectorSample.cs
│ ├── Stitching.cs
│ ├── Subdiv2DSample.cs
│ ├── SuperResolutionSample.cs
│ ├── VideoCaptureSample.cs
│ ├── VideoWriterSample.cs
│ └── WatershedSample.cs
├── SamplesLegacy.csproj
└── packages.config
├── SamplesVB
├── Program.vb
├── Samples
│ ├── FASTSample.vb
│ ├── FlannSample.vb
│ ├── HOGSample.vb
│ ├── HoughLinesSample.vb
│ └── StarDetectorSample.vb
└── SamplesVB.vbproj
├── VideoCaptureForm
├── App.config
├── Program.cs
├── Properties
│ ├── AssemblyInfo.cs
│ ├── Resources.Designer.cs
│ ├── Resources.resx
│ ├── Settings.Designer.cs
│ └── Settings.settings
├── VideoCaptureForm.Designer.cs
├── VideoCaptureForm.cs
├── VideoCaptureForm.csproj
├── VideoCaptureForm.resx
├── haarcascade_frontalface_default.xml
└── packages.config
└── VideoCaptureWPF
├── App.xaml
├── App.xaml.cs
├── MainWindow.xaml
├── MainWindow.xaml.cs
├── VideoCaptureWPF.csproj
└── haarcascade_frontalface_default.xml
/.nuget/NuGet.Config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.nuget/NuGet.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/.nuget/NuGet.exe
--------------------------------------------------------------------------------
/CameraOpenCV/App.xaml:
--------------------------------------------------------------------------------
1 |
13 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/CameraOpenCV/Assets/microsoft-sdk.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/CameraOpenCV/Assets/microsoft-sdk.png
--------------------------------------------------------------------------------
/CameraOpenCV/Assets/smalltile-sdk.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/CameraOpenCV/Assets/smalltile-sdk.png
--------------------------------------------------------------------------------
/CameraOpenCV/Assets/splash-sdk.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/CameraOpenCV/Assets/splash-sdk.png
--------------------------------------------------------------------------------
/CameraOpenCV/Assets/squaretile-sdk.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/CameraOpenCV/Assets/squaretile-sdk.png
--------------------------------------------------------------------------------
/CameraOpenCV/Assets/storelogo-sdk.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/CameraOpenCV/Assets/storelogo-sdk.png
--------------------------------------------------------------------------------
/CameraOpenCV/Assets/tile-sdk.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/CameraOpenCV/Assets/tile-sdk.png
--------------------------------------------------------------------------------
/CameraOpenCV/Assets/windows-sdk.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/CameraOpenCV/Assets/windows-sdk.png
--------------------------------------------------------------------------------
/CameraOpenCV/Package.appxmanifest:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
9 |
13 |
14 |
15 |
16 |
17 | CameraOpenCV C# Sample
18 | Microsoft Corporation
19 | Assets\StoreLogo-sdk.png
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/CameraOpenCV/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.InteropServices;
3 |
4 | // General Information about an assembly is controlled through the following
5 | // set of attributes. Change these attribute values to modify the information
6 | // associated with an assembly.
7 | [assembly: AssemblyTitle("SDKTemplate")]
8 | [assembly: AssemblyDescription("")]
9 | [assembly: AssemblyConfiguration("")]
10 | [assembly: AssemblyCompany("Microsoft Corporation")]
11 | [assembly: AssemblyProduct("Windows Samples")]
12 | [assembly: AssemblyCopyright("Copyright @Microsoft Corporation")]
13 | [assembly: AssemblyTrademark("")]
14 | [assembly: AssemblyCulture("")]
15 |
16 | // Version information for an assembly consists of the following four values:
17 | //
18 | // Major Version
19 | // Minor Version
20 | // Build Number
21 | // Revision
22 | //
23 | // You can specify all the values or you can default the Build and Revision Numbers
24 | // by using the '*' as shown below:
25 | // [assembly: AssemblyVersion("1.0.*")]
26 | [assembly: AssemblyVersion("1.0.0.0")]
27 | [assembly: AssemblyFileVersion("1.0.0.0")]
28 | [assembly: ComVisible(false)]
29 |
--------------------------------------------------------------------------------
/CameraOpenCV/Properties/Default.rd.xml:
--------------------------------------------------------------------------------
1 |
17 |
18 |
19 |
20 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/OpenCvSharpSamples.sln.DotSettings:
--------------------------------------------------------------------------------
1 |
2 | HSV
3 | SVM
4 | <Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" />
5 | True
6 | True
7 | True
8 | True
9 | True
10 | True
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # opencvsharp_samples
2 |
3 | Code samples of https://github.com/shimat/opencvsharp
4 |
5 | - `SamplesCore` C# (.NET Core / .NET Framework) samples
6 | - `SamplesVB` VB.NET samples
7 | - `SampleBase` common library
8 | - `CameraOpenCV` UWP sample
9 |
--------------------------------------------------------------------------------
/SampleBase/Console/ConsoleMessagePrinter.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using SampleBase.Interfaces;
3 |
4 | namespace SampleBase.Console
5 | {
6 | public class ConsoleMessagePrinter : IMessagePrinter
7 | {
8 | public ConsoleMessagePrinter()
9 | {
10 | }
11 |
12 | public void PrintColorInfo(string message, bool newLine = true, ConsoleColor consoleColor = ConsoleColor.White)
13 | {
14 | PrintObject(message, newLine, consoleColor);
15 | }
16 |
17 | public void PrintObject(object obj, bool newLine = true, ConsoleColor consoleColor = ConsoleColor.White)
18 | {
19 | System.Console.ForegroundColor = consoleColor;
20 | if (newLine)
21 | System.Console.WriteLine(obj);
22 | else
23 | System.Console.Write(obj);
24 | System.Console.ResetColor();
25 | }
26 |
27 | public void PrintInfo(string message, bool newLine = true)
28 | {
29 | PrintColorInfo(message, newLine);
30 | }
31 |
32 | public void PrintWarning(string message, bool newLine = true)
33 | {
34 | PrintColorInfo(message, newLine, ConsoleColor.Yellow);
35 | }
36 |
37 | public void PrintError(string message, bool newLine = true)
38 | {
39 | PrintColorInfo(message, newLine, ConsoleColor.DarkRed);
40 | }
41 |
42 | public void PrintSuccess(string message, bool newLine = true)
43 | {
44 | PrintColorInfo(message, newLine, ConsoleColor.DarkGreen);
45 | }
46 |
47 | public void PrintDateTime(DateTime? time, bool newLine = true)
48 | {
49 | if (!time.HasValue)
50 | time = DateTime.Now;
51 | PrintColorInfo(time.Value.ToString("yyyy-MM-dd HH:mm:ss"), newLine);
52 | }
53 |
54 | public void PrintTime(DateTime? time, bool newLine = true)
55 | {
56 | if (!time.HasValue)
57 | time = DateTime.Now;
58 | PrintColorInfo(time.Value.ToString("HH:mm:ss"));
59 | }
60 |
61 | public void PrintLine(bool newLine = true)
62 | {
63 | PrintColorInfo("----------------------------------------", newLine);
64 | }
65 |
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/SampleBase/Console/ConsoleTestBase.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using SampleBase.Interfaces;
3 |
4 | namespace SampleBase.Console
5 | {
6 | public abstract class ConsoleTestBase : ITestBase
7 | {
8 | private readonly IMessagePrinter msgPrinter;
9 |
10 | public string Name { get; set; }
11 |
12 | protected ConsoleTestBase(string title)
13 | {
14 | Name = title;
15 | msgPrinter = new ConsoleMessagePrinter();
16 | }
17 |
18 | protected ConsoleTestBase()
19 | {
20 | Name = GetType().Name;
21 | msgPrinter = new ConsoleMessagePrinter();
22 | }
23 |
24 | public abstract void RunTest();
25 |
26 | public void PrintInfo(string message, bool newLine = true)
27 | {
28 | msgPrinter.PrintInfo(message, newLine);
29 | }
30 |
31 | public void PrintObject(object obj, bool newLine = true, ConsoleColor consoleColor = ConsoleColor.White)
32 | {
33 | msgPrinter.PrintObject(obj, newLine, consoleColor);
34 | }
35 |
36 | public void PrintWarning(string message, bool newLine = true)
37 | {
38 | msgPrinter.PrintWarning(message, newLine);
39 | }
40 |
41 | public void PrintError(string message, bool newLine = true)
42 | {
43 | msgPrinter.PrintError(message, newLine);
44 | }
45 |
46 | public void PrintSuccess(string message, bool newLine = true)
47 | {
48 | msgPrinter.PrintSuccess(message, newLine);
49 | }
50 |
51 | public IMessagePrinter GetMessagePrinter()
52 | {
53 | return msgPrinter;
54 | }
55 |
56 | public string? WaitToInput()
57 | {
58 | return System.Console.ReadLine();
59 | }
60 |
61 | public void WaitToContinue(string? tip = null)
62 | {
63 | if (tip != null)
64 | System.Console.WriteLine(tip);
65 | System.Console.ReadLine();
66 | }
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/SampleBase/Data/Image/16bit.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/16bit.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Balloon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Balloon.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/00.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/00.JPG
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/01.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/01.JPG
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/02.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/02.JPG
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left01.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left01.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left02.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left02.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left03.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left03.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left04.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left04.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left05.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left05.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left06.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left06.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left07.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left07.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left08.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left08.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left09.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left09.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left10.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left10.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left11.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left11.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left12.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left12.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/left13.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/left13.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right01.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right01.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right02.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right02.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right03.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right03.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right04.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right04.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right05.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right05.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right06.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right06.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right07.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right07.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right08.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right08.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right09.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right09.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right10.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right10.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right11.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right11.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right12.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right12.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Calibration/right13.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Calibration/right13.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Girl.bmp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Girl.bmp
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Mandrill.bmp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Mandrill.bmp
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Squares/pic1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Squares/pic1.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Squares/pic2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Squares/pic2.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Squares/pic3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Squares/pic3.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Squares/pic4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Squares/pic4.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Squares/pic5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Squares/pic5.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/Squares/pic6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/Squares/pic6.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/aruco_markers_photo.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/aruco_markers_photo.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/aruco_markers_source.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/aruco_markers_source.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/asahiyama.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/asahiyama.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/binarization_sample.bmp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/binarization_sample.bmp
--------------------------------------------------------------------------------
/SampleBase/Data/Image/box.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/box.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/box_in_scene.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/box_in_scene.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/cake.bmp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/cake.bmp
--------------------------------------------------------------------------------
/SampleBase/Data/Image/cat.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/cat.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/fruits.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/fruits.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/goryokaku.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/goryokaku.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/hand_p.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/hand_p.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/lenna.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/lenna.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/lenna511.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/lenna511.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/maltese.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/maltese.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/match1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/match1.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/match2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/match2.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/penguin1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/penguin1.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/penguin1b.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/penguin1b.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/penguin2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/penguin2.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/shapes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/shapes.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/space_shuttle.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/space_shuttle.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/tsukuba_left.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/tsukuba_left.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/tsukuba_right.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/tsukuba_right.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/very_old_newspaper.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/very_old_newspaper.png
--------------------------------------------------------------------------------
/SampleBase/Data/Image/walkman.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/walkman.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Image/yalta.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Image/yalta.jpg
--------------------------------------------------------------------------------
/SampleBase/Data/Model/FSRCNN_x4.pb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Model/FSRCNN_x4.pb
--------------------------------------------------------------------------------
/SampleBase/Data/Movie/bach.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Movie/bach.mp4
--------------------------------------------------------------------------------
/SampleBase/Data/Movie/hara.flv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Movie/hara.flv
--------------------------------------------------------------------------------
/SampleBase/Data/Text/camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 3
5 | 3
6 | f
7 |
8 | 479.56344604 0. 307.93475342 0. 478.32070923 235.81101990 0. 0. 1.
9 |
10 | 1
11 | 3
12 | f
13 |
14 | 3.07101631 0.23497446 0.44556716
15 |
16 | 1
17 | 3
18 | f
19 |
20 | -74.40477753 74.23780060 291.24246216
21 |
22 | 1
23 | 4
24 | f
25 |
26 | -0.23750813 0.13474102 8.73253666e-05 -3.74954601e-04
27 |
28 |
--------------------------------------------------------------------------------
/SampleBase/Data/Text/cat.xml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SampleBase/Data/Text/cat.xml
--------------------------------------------------------------------------------
/SampleBase/Interfaces/IMessagePrinter.cs:
--------------------------------------------------------------------------------
1 | using System;
2 |
3 | namespace SampleBase.Interfaces
4 | {
5 | public interface IMessagePrinter
6 | {
7 | void PrintColorInfo(string message, bool newLine = true, ConsoleColor consoleColor = ConsoleColor.White);
8 |
9 | void PrintObject(object obj, bool newLine = true, ConsoleColor consoleColor = ConsoleColor.White);
10 |
11 | void PrintInfo(string message, bool newLine = true);
12 |
13 | void PrintWarning(string message, bool newLine = true);
14 |
15 | void PrintError(string message, bool newLine = true);
16 |
17 | void PrintSuccess(string message, bool newLine = true);
18 |
19 | void PrintDateTime(DateTime? time, bool newLine = true);
20 |
21 | void PrintTime(DateTime? time, bool newLine = true);
22 |
23 | void PrintLine(bool newLine = true);
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/SampleBase/Interfaces/ITestBase.cs:
--------------------------------------------------------------------------------
1 | using System;
2 |
3 | namespace SampleBase.Interfaces
4 | {
5 | ///
6 | /// Basic interface for test calsses
7 | ///
8 | public interface ITestBase
9 | {
10 | ///
11 | /// Test name, which is used to distinguish between different test cases
12 | ///
13 | string Name { get; }
14 |
15 | ///
16 | /// Print normal message
17 | ///
18 | ///
19 | ///
20 | void PrintInfo(string message, bool newLine = true);
21 |
22 | ///
23 | /// Print onject info
24 | ///
25 | ///
26 | ///
27 | ///
28 | void PrintObject(object obj, bool newLine = true, ConsoleColor consoleColor = ConsoleColor.White);
29 |
30 | ///
31 | /// Print warnning message
32 | ///
33 | ///
34 | ///
35 | void PrintWarning(string message, bool newLine = true);
36 |
37 | ///
38 | /// Print error message
39 | ///
40 | ///
41 | ///
42 | void PrintError(string message, bool newLine = true);
43 |
44 | ///
45 | /// Print success message
46 | ///
47 | ///
48 | ///
49 | void PrintSuccess(string message, bool newLine = true);
50 |
51 | ///
52 | /// Print message printer of current test class
53 | ///
54 | ///
55 | IMessagePrinter GetMessagePrinter();
56 |
57 | ///
58 | /// Run current test
59 | ///
60 | void RunTest();
61 |
62 | ///
63 | /// Waiting for input to complete, and take it as return value
64 | ///
65 | ///
66 | string? WaitToInput();
67 |
68 | ///
69 | /// Show a tip message and wait util input anything
70 | ///
71 | /// Information string to be shown
72 | void WaitToContinue(string? tip = null);
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/SampleBase/Interfaces/ITestManager.cs:
--------------------------------------------------------------------------------
1 | using System.Collections.Generic;
2 |
3 | namespace SampleBase.Interfaces
4 | {
5 | ///
6 | /// Basic interface for manager class of test calsses
7 | ///
8 | public interface ITestManager
9 | {
10 | ///
11 | /// Add a to the management collection
12 | ///
13 | ///
14 | void AddTest(ITestBase test);
15 |
16 | ///
17 | /// Remove a from the management collection
18 | ///
19 | ///
20 | void RemoveTest(ITestBase test);
21 |
22 | ///
23 | /// Remove all in management collection
24 | ///
25 | void ClearTests();
26 |
27 | ///
28 | /// Show a list of all tests
29 | ///
30 | void ShowTestNames();
31 |
32 | ///
33 | /// Show a acess list of all tests
34 | ///
35 | void ShowTestEntrance();
36 |
37 | ///
38 | /// Get a by test name
39 | ///
40 | ///
41 | ///
42 | ITestBase? GetTest(string testName);
43 |
44 | ///
45 | /// Get all by test name
46 | ///
47 | ///
48 | IReadOnlyList GetAllTests();
49 | }
50 |
51 | public static class TestManagerExtensions
52 | {
53 | public static void AddTests(this ITestManager testManager, params ITestBase[] tests)
54 | {
55 | foreach (var test in tests)
56 | {
57 | testManager.AddTest(test);
58 | }
59 | }
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/SampleBase/MyProcess.cs:
--------------------------------------------------------------------------------
1 | using System.Diagnostics;
2 |
3 | namespace SampleBase
4 | {
5 | ///
6 | ///
7 | ///
8 | public static class MyProcess
9 | {
10 | ///
11 | /// 物理メモリ使用量
12 | ///
13 | ///
14 | public static long WorkingSet64
15 | {
16 | get
17 | {
18 | using (var proc = GetCurrentProcess())
19 | {
20 | return proc.WorkingSet64;
21 | }
22 | }
23 | }
24 |
25 | ///
26 | /// 仮想メモリ使用量
27 | ///
28 | ///
29 | public static long VirtualMemorySize64
30 | {
31 | get
32 | {
33 | using (var proc = GetCurrentProcess())
34 | {
35 | return proc.VirtualMemorySize64;
36 | }
37 | }
38 | }
39 |
40 | ///
41 | /// 物理メモリ最大使用量
42 | ///
43 | ///
44 | public static long PeakPagedMemorySize64
45 | {
46 | get
47 | {
48 | using (var proc = GetCurrentProcess())
49 | {
50 | return proc.PeakPagedMemorySize64;
51 | }
52 | }
53 | }
54 |
55 | ///
56 | /// 仮想メモリ最大使用量
57 | ///
58 | ///
59 | public static long PeakVirtualMemorySize64
60 | {
61 | get
62 | {
63 | using (var proc = GetCurrentProcess())
64 | {
65 | return proc.PeakVirtualMemorySize64;
66 | }
67 | }
68 | }
69 |
70 | private static Process GetCurrentProcess()
71 | {
72 | var proc = Process.GetCurrentProcess();
73 | proc.Refresh();
74 | return proc;
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/SampleBase/SampleBase.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | net48;net8.0;
5 | 11
6 | enable
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | PreserveNewest
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/SamplesCore.Windows/Data/Image/box_in_scene.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SamplesCore.Windows/Data/Image/box_in_scene.png
--------------------------------------------------------------------------------
/SamplesCore.Windows/Data/Image/fruits.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shimat/opencvsharp_samples/42322d297b313d9f115cba80773f2496a8c9cd9f/SamplesCore.Windows/Data/Image/fruits.jpg
--------------------------------------------------------------------------------
/SamplesCore.Windows/FilePath.cs:
--------------------------------------------------------------------------------
1 | namespace SamplesCore
2 | {
3 | ///
4 | /// Paths
5 | ///
6 | internal static class FilePath
7 | {
8 | public static class Image
9 | {
10 | public const string Fruits = "Data/Image/fruits.jpg";
11 | public const string SurfBoxinscene = "Data/Image/box_in_scene.png";
12 | }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/SamplesCore.Windows/ISample.cs:
--------------------------------------------------------------------------------
1 | namespace SamplesCore.Windows
2 | {
3 | interface ISample
4 | {
5 | void Run();
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/SamplesCore.Windows/Program.cs:
--------------------------------------------------------------------------------
1 | using System;
2 |
3 | namespace SamplesCore.Windows
4 | {
5 | class Program
6 | {
7 | [STAThread]
8 | static void Main(string[] args)
9 | {
10 | ISample sample =
11 | new MatToWriteableBitmap();
12 |
13 | sample.Run();
14 | }
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/SamplesCore.Windows/Samples/MatToWriteableBitmap.cs:
--------------------------------------------------------------------------------
1 | using System.Windows;
2 | using OpenCvSharp;
3 | using OpenCvSharp.WpfExtensions;
4 |
5 | namespace SamplesCore.Windows
6 | {
7 | ///
8 | ///
9 | ///
10 | class MatToWriteableBitmap : ISample
11 | {
12 | public void Run()
13 | {
14 | using var mat = new Mat(FilePath.Image.Fruits, ImreadModes.Color); // width % 4 != 0
15 |
16 | var wb = WriteableBitmapConverter.ToWriteableBitmap(mat);
17 |
18 | var image = new System.Windows.Controls.Image
19 | {
20 | Source = wb
21 | };
22 |
23 | var window = new System.Windows.Window
24 | {
25 | Content = image
26 | };
27 |
28 | var app = new Application();
29 | app.Run(window);
30 | }
31 | }
32 | }
--------------------------------------------------------------------------------
/SamplesCore.Windows/SamplesCore.Windows.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | WinExe
5 | net8.0-windows
6 | true
7 | true
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 | PreserveNewest
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | PreserveNewest
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/SamplesCore/Program.cs:
--------------------------------------------------------------------------------
1 | using SampleBase.Interfaces;
2 | using System;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | public static class Program
8 | {
9 | [STAThread]
10 | public static void Main()
11 | {
12 | Console.WriteLine("Runtime Version = {0}", Environment.Version);
13 |
14 | ITestManager testManager = new ConsoleTestManager();
15 |
16 | testManager.AddTests(
17 | new ArucoSample(),
18 | new BgSubtractorMOG(),
19 | new BinarizerSample(),
20 | new BRISKSample(),
21 | new CaffeSample(),
22 | new CameraCaptureSample(),
23 | new ClaheSample(),
24 | new ConnectedComponentsSample(),
25 | new DFT(),
26 | new DnnSuperresSample(),
27 | new DrawBestMatchRectangle(),
28 | new FaceDetection(),
29 | new FaceDetectionDNN(),
30 | new FASTSample(),
31 | new FlannSample(),
32 | new FREAKSample(),
33 | new HandPose(),
34 | new HistSample(),
35 | new HOGSample(),
36 | new HoughLinesSample(),
37 | new InpaintSample(),
38 | new KAZESample(),
39 | new KAZESample2(),
40 | new MatOperations(),
41 | new MDS(),
42 | new MergeSplitSample(),
43 | new MorphologySample(),
44 | new MSERSample(),
45 | new NormalArrayOperations(),
46 | new OpenVinoFaceDetection(),
47 | new PhotoMethods(),
48 | new PixelAccess(),
49 | new Pose(),
50 | new SeamlessClone(),
51 | new SiftSurfSample(),
52 | new SimpleBlobDetectorSample(),
53 | new SolveEquation(),
54 | new StarDetectorSample(),
55 | new Stitching(),
56 | new Subdiv2DSample(),
57 | new SuperResolutionSample(),
58 | new SVMSample(),
59 | new VideoWriterSample(),
60 | new VideoCaptureSample(),
61 | new WatershedSample());
62 |
63 | testManager.ShowTestEntrance();
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/SamplesCore/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.InteropServices;
3 |
4 | // General Information about an assembly is controlled through the following
5 | // set of attributes. Change these attribute values to modify the information
6 | // associated with an assembly.
7 | [assembly: AssemblyConfiguration("")]
8 | [assembly: AssemblyCompany("")]
9 | [assembly: AssemblyProduct("SamplesCore")]
10 | [assembly: AssemblyTrademark("")]
11 |
12 | // Setting ComVisible to false makes the types in this assembly not visible
13 | // to COM components. If you need to access a type in this assembly from
14 | // COM, set the ComVisible attribute to true on that type.
15 | [assembly: ComVisible(false)]
16 |
17 | // The following GUID is for the ID of the typelib if this project is exposed to COM
18 | [assembly: Guid("ad7c47b3-93c8-4e55-ab7e-1efc450abf30")]
19 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/BRISKSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// Retrieves keypoints using the BRISK algorithm.
9 | ///
10 | class BRISKSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
15 | var dst = new Mat(ImagePath.Lenna, ImreadModes.Color);
16 |
17 | using var brisk = BRISK.Create();
18 | KeyPoint[] keypoints = brisk.Detect(gray);
19 |
20 | if (keypoints != null)
21 | {
22 | var color = new Scalar(0, 255, 0);
23 | foreach (KeyPoint kpt in keypoints)
24 | {
25 | float r = kpt.Size / 2;
26 | Cv2.Circle(dst, (Point)kpt.Pt, (int)r, color);
27 | Cv2.Line(dst,
28 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y + r),
29 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y - r),
30 | color);
31 | Cv2.Line(dst,
32 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y + r),
33 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y - r),
34 | color);
35 | }
36 | }
37 |
38 | using (new Window("BRISK features", dst))
39 | {
40 | Cv2.WaitKey();
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/BgSubtractorMOG.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | class BgSubtractorMOG : ConsoleTestBase
8 | {
9 | public override void RunTest()
10 | {
11 | using var capture = new VideoCapture(MoviePath.Bach);
12 | using var mog = BackgroundSubtractorMOG.Create();
13 | using var windowSrc = new Window("src");
14 | using var windowDst = new Window("dst");
15 |
16 | using var frame = new Mat();
17 | using var fg = new Mat();
18 | while (true)
19 | {
20 | capture.Read(frame);
21 | if (frame.Empty())
22 | break;
23 | mog.Apply(frame, fg, 0.01);
24 |
25 | windowSrc.Image = frame;
26 | windowDst.Image = fg;
27 | Cv2.WaitKey(50);
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/BinarizerSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using OpenCvSharp;
4 | using OpenCvSharp.XImgProc;
5 | using SampleBase;
6 | using SampleBase.Console;
7 |
8 | namespace SamplesCore;
9 |
10 | internal class BinarizerSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using var src = Cv2.ImRead(ImagePath.Binarization, ImreadModes.Grayscale);
15 | using var niblack = new Mat();
16 | using var sauvola = new Mat();
17 | using var nick = new Mat();
18 | int kernelSize = 51;
19 |
20 | var sw = new Stopwatch();
21 | sw.Start();
22 | CvXImgProc.NiblackThreshold(src, niblack, 255, ThresholdTypes.Binary, kernelSize, -0.2, LocalBinarizationMethods.Niblack);
23 | sw.Stop();
24 | Console.WriteLine($"Niblack {sw.ElapsedMilliseconds} ms");
25 |
26 | sw.Restart();
27 | CvXImgProc.NiblackThreshold(src, sauvola, 255, ThresholdTypes.Binary, kernelSize, 0.1, LocalBinarizationMethods.Sauvola);
28 | sw.Stop();
29 | Console.WriteLine($"Sauvola {sw.ElapsedMilliseconds} ms");
30 |
31 | sw.Restart();
32 | CvXImgProc.NiblackThreshold(src, nick, 255, ThresholdTypes.Binary, kernelSize, -0.14, LocalBinarizationMethods.Nick);
33 | sw.Stop();
34 | Console.WriteLine($"Nick {sw.ElapsedMilliseconds} ms");
35 |
36 | using (new Window("src", src, WindowFlags.AutoSize))
37 | using (new Window("Niblack", niblack, WindowFlags.AutoSize))
38 | using (new Window("Sauvola", sauvola, WindowFlags.AutoSize))
39 | using (new Window("Nick", nick, WindowFlags.AutoSize))
40 | {
41 | Cv2.WaitKey();
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/CameraCaptureSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase.Console;
3 |
4 | namespace SamplesCore;
5 |
6 | ///
7 | ///
8 | ///
9 | class CameraCaptureSample : ConsoleTestBase
10 | {
11 | public override void RunTest()
12 | {
13 | using var capture = new VideoCapture(0, VideoCaptureAPIs.DSHOW);
14 | if (!capture.IsOpened())
15 | return;
16 |
17 | capture.FrameWidth = 1920;
18 | capture.FrameHeight = 1280;
19 | capture.AutoFocus = true;
20 |
21 | const int sleepTime = 10;
22 |
23 | using var window = new Window("capture");
24 | var image = new Mat();
25 |
26 | while (true)
27 | {
28 | capture.Read(image);
29 | if (image.Empty())
30 | break;
31 |
32 | window.ShowImage(image);
33 | int c = Cv2.WaitKey(sleepTime);
34 | if (c >= 0)
35 | {
36 | break;
37 | }
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/ClaheSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | class ClaheSample : ConsoleTestBase
8 | {
9 | public override void RunTest()
10 | {
11 | using var src = new Mat(ImagePath.TsukubaLeft, ImreadModes.Grayscale);
12 | using var dst1 = new Mat();
13 | using var dst2 = new Mat();
14 | using var dst3 = new Mat();
15 |
16 | using (var clahe = Cv2.CreateCLAHE())
17 | {
18 | clahe.ClipLimit = 20;
19 | clahe.Apply(src, dst1);
20 | clahe.ClipLimit = 40;
21 | clahe.Apply(src, dst2);
22 | clahe.TilesGridSize = new Size(4, 4);
23 | clahe.Apply(src, dst3);
24 | }
25 |
26 | Window.ShowImages(
27 | new[] { src, dst1, dst2, dst3 },
28 | new[] { "src", "dst clip20", "dst clip40", "dst tile4x4" });
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/ConnectedComponentsSample.cs:
--------------------------------------------------------------------------------
1 | using System.Linq;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | ///
10 | ///
11 | class ConnectedComponentsSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | using var src = new Mat(ImagePath.Shapes, ImreadModes.Color);
16 | using var gray = src.CvtColor(ColorConversionCodes.BGR2GRAY);
17 | using var binary = gray.Threshold(0, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);
18 | using var labelView = src.EmptyClone();
19 | using var rectView = binary.CvtColor(ColorConversionCodes.GRAY2BGR);
20 |
21 | var cc = Cv2.ConnectedComponentsEx(binary);
22 | if (cc.LabelCount <= 1)
23 | return;
24 |
25 | // draw labels
26 | cc.RenderBlobs(labelView);
27 |
28 | // draw bonding boxes except background
29 | foreach (var blob in cc.Blobs.Skip(1))
30 | {
31 | rectView.Rectangle(blob.Rect, Scalar.Red);
32 | }
33 |
34 | // filter maximum blob
35 | var maxBlob = cc.GetLargestBlob();
36 | var filtered = new Mat();
37 | cc.FilterByBlob(src, filtered, maxBlob);
38 |
39 | using (new Window("src", src))
40 | using (new Window("binary", binary))
41 | using (new Window("labels", labelView))
42 | using (new Window("bonding boxes", rectView))
43 | using (new Window("maximum blob", filtered))
44 | {
45 | Cv2.WaitKey();
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/DnnSuperresSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.DnnSuperres;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | class DnnSuperresSample : ConsoleTestBase
9 | {
10 | // https://github.com/Saafke/FSRCNN_Tensorflow/tree/master/models
11 | private const string ModelFileName = "Data/Model/FSRCNN_x4.pb";
12 |
13 | public override void RunTest()
14 | {
15 | using var dnn = new DnnSuperResImpl("fsrcnn", 4);
16 | dnn.ReadModel(ModelFileName);
17 |
18 | using var src = new Mat(ImagePath.Mandrill, ImreadModes.Color);
19 | using var dst = new Mat();
20 | dnn.Upsample(src, dst);
21 |
22 | Window.ShowImages(
23 | new[] { src, dst },
24 | new[] { "src", "dst0" });
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/DrawBestMatch.cs:
--------------------------------------------------------------------------------
1 | using System.Linq;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | /// https://stackoverflow.com/questions/51606215/how-to-draw-bounding-box-on-best-matches/51607041#51607041
10 | ///
11 | class DrawBestMatchRectangle : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | using var img1 = new Mat(ImagePath.Match1, ImreadModes.Color);
16 | using var img2 = new Mat(ImagePath.Match2, ImreadModes.Color);
17 |
18 | using var orb = ORB.Create(1000);
19 | using var descriptors1 = new Mat();
20 | using var descriptors2 = new Mat();
21 | orb.DetectAndCompute(img1, null, out var keyPoints1, descriptors1);
22 | orb.DetectAndCompute(img2, null, out var keyPoints2, descriptors2);
23 |
24 | using var bf = new BFMatcher(NormTypes.Hamming, crossCheck: true);
25 | var matches = bf.Match(descriptors1, descriptors2);
26 |
27 | var goodMatches = matches
28 | .OrderBy(x => x.Distance)
29 | .Take(10)
30 | .ToArray();
31 |
32 | var srcPts = goodMatches.Select(m => keyPoints1[m.QueryIdx].Pt).Select(p => new Point2d(p.X, p.Y));
33 | var dstPts = goodMatches.Select(m => keyPoints2[m.TrainIdx].Pt).Select(p => new Point2d(p.X, p.Y));
34 |
35 | using var homography = Cv2.FindHomography(srcPts, dstPts, HomographyMethods.Ransac, 5, null);
36 |
37 | int h = img1.Height, w = img1.Width;
38 | var img2Bounds = new[]
39 | {
40 | new Point2d(0, 0),
41 | new Point2d(0, h-1),
42 | new Point2d(w-1, h-1),
43 | new Point2d(w-1, 0),
44 | };
45 | var img2BoundsTransformed = Cv2.PerspectiveTransform(img2Bounds, homography);
46 |
47 | using var view = img2.Clone();
48 | var drawingPoints = img2BoundsTransformed.Select(p => (Point)p).ToArray();
49 | Cv2.Polylines(view, new[] { drawingPoints }, true, Scalar.Red, 3);
50 |
51 | using (new Window("view", view))
52 | {
53 | Cv2.WaitKey();
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/FASTSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// cv::FAST
9 | ///
10 | class FASTSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using Mat imgSrc = new Mat(ImagePath.Lenna, ImreadModes.Color);
15 | using Mat imgGray = new Mat();
16 | using Mat imgDst = imgSrc.Clone();
17 | Cv2.CvtColor(imgSrc, imgGray, ColorConversionCodes.BGR2GRAY, 0);
18 |
19 | KeyPoint[] keypoints = Cv2.FAST(imgGray, 50, true);
20 |
21 | foreach (KeyPoint kp in keypoints)
22 | {
23 | imgDst.Circle((Point)kp.Pt, 3, Scalar.Red, -1, LineTypes.AntiAlias, 0);
24 | }
25 |
26 | Cv2.ImShow("FAST", imgDst);
27 | Cv2.WaitKey(0);
28 | Cv2.DestroyAllWindows();
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/FREAKSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.XFeatures2D;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | /// Retrieves keypoints using the FREAK algorithm.
10 | ///
11 | class FREAKSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | using var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
16 | using var dst = new Mat(ImagePath.Lenna, ImreadModes.Color);
17 |
18 | // ORB
19 | using var orb = ORB.Create(1000);
20 | KeyPoint[] keypoints = orb.Detect(gray);
21 |
22 | // FREAK
23 | using var freak = FREAK.Create();
24 | Mat freakDescriptors = new Mat();
25 | freak.Compute(gray, ref keypoints, freakDescriptors);
26 |
27 | if (keypoints != null)
28 | {
29 | var color = new Scalar(0, 255, 0);
30 | foreach (KeyPoint kpt in keypoints)
31 | {
32 | float r = kpt.Size / 2;
33 | Cv2.Circle(dst, (Point)kpt.Pt, (int)r, color);
34 | Cv2.Line(dst,
35 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y + r),
36 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y - r),
37 | color);
38 | Cv2.Line(dst,
39 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y + r),
40 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y - r),
41 | color);
42 | }
43 | }
44 |
45 | using (new Window("FREAK", dst))
46 | {
47 | Cv2.WaitKey();
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/FaceDetection.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// Human face detection
9 | /// http://docs.opencv.org/doc/tutorials/objdetect/cascade_classifier/cascade_classifier.html
10 | ///
11 | class FaceDetection : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | // Load the cascades
16 | using var haarCascade = new CascadeClassifier(TextPath.HaarCascade);
17 | using var lbpCascade = new CascadeClassifier(TextPath.LbpCascade);
18 |
19 | // Detect faces
20 | Mat haarResult = DetectFace(haarCascade);
21 | Mat lbpResult = DetectFace(lbpCascade);
22 |
23 | Cv2.ImShow("Faces by Haar", haarResult);
24 | Cv2.ImShow("Faces by LBP", lbpResult);
25 | Cv2.WaitKey(0);
26 | Cv2.DestroyAllWindows();
27 | }
28 |
29 | ///
30 | ///
31 | ///
32 | ///
33 | ///
34 | private Mat DetectFace(CascadeClassifier cascade)
35 | {
36 | Mat result;
37 |
38 | using (var src = new Mat(ImagePath.Yalta, ImreadModes.Color))
39 | using (var gray = new Mat())
40 | {
41 | result = src.Clone();
42 | Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
43 |
44 | // Detect faces
45 | Rect[] faces = cascade.DetectMultiScale(
46 | gray, 1.08, 2, HaarDetectionTypes.ScaleImage, new Size(30, 30));
47 |
48 | // Render all detected faces
49 | foreach (Rect face in faces)
50 | {
51 | var center = new Point
52 | {
53 | X = (int)(face.X + face.Width * 0.5),
54 | Y = (int)(face.Y + face.Height * 0.5)
55 | };
56 | var axes = new Size
57 | {
58 | Width = (int)(face.Width * 0.5),
59 | Height = (int)(face.Height * 0.5)
60 | };
61 | Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4);
62 | }
63 | }
64 | return result;
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/FaceDetectionDNN.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.Dnn;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// To run this example first download the face model available here: https://github.com/spmallick/learnopencv/tree/master/FaceDetectionComparison/models
9 | /// Add the files to the bin folder.
10 | /// You should also prepare the input images (faces.jpg) yourself.
11 | ///
12 | internal class FaceDetectionDNN : ConsoleTestBase
13 | {
14 | const string configFile = "deploy.prototxt";
15 | const string faceModel = "res10_300x300_ssd_iter_140000_fp16.caffemodel";
16 | const string image = "faces.jpg";
17 |
18 | public override void RunTest()
19 | {
20 | // Read sample image
21 | using var frame = Cv2.ImRead(image);
22 | int frameHeight = frame.Rows;
23 | int frameWidth = frame.Cols;
24 | using var faceNet = CvDnn.ReadNetFromCaffe(configFile, faceModel);
25 | using var blob = CvDnn.BlobFromImage(frame, 1.0, new Size(300, 300), new Scalar(104, 117, 123), false, false);
26 | faceNet.SetInput(blob, "data");
27 |
28 | using var detection = faceNet.Forward("detection_out");
29 | using var detectionMat = Mat.FromPixelData(detection.Size(2), detection.Size(3), MatType.CV_32F, detection.Ptr(0));
30 | for (int i = 0; i < detectionMat.Rows; i++)
31 | {
32 | float confidence = detectionMat.At(i, 2);
33 |
34 | if (confidence > 0.7)
35 | {
36 | int x1 = (int)(detectionMat.At(i, 3) * frameWidth);
37 | int y1 = (int)(detectionMat.At(i, 4) * frameHeight);
38 | int x2 = (int)(detectionMat.At(i, 5) * frameWidth);
39 | int y2 = (int)(detectionMat.At(i, 6) * frameHeight);
40 |
41 | Cv2.Rectangle(frame, new Point(x1, y1), new Point(x2, y2), new Scalar(0, 255, 0), 2, LineTypes.Link4);
42 | }
43 | }
44 |
45 | Window.ShowImages(frame);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/FlannSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using OpenCvSharp.Flann;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | /// cv::flann
10 | ///
11 | class FlannSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | Console.WriteLine("===== FlannTest =====");
16 |
17 | // creates data set
18 | using (var features = new Mat(10000, 2, MatType.CV_32FC1))
19 | {
20 | var rand = new Random();
21 | for (int i = 0; i < features.Rows; i++)
22 | {
23 | features.Set(i, 0, rand.Next(10000));
24 | features.Set(i, 1, rand.Next(10000));
25 | }
26 |
27 | // query
28 | var queryPoint = new Point2f(7777, 7777);
29 | var queries = new Mat(1, 2, MatType.CV_32FC1);
30 | queries.Set(0, 0, queryPoint.X);
31 | queries.Set(0, 1, queryPoint.Y);
32 | Console.WriteLine("query:({0}, {1})", queryPoint.X, queryPoint.Y);
33 | Console.WriteLine("-----");
34 |
35 | // knnSearch
36 | using var nnIndex = new OpenCvSharp.Flann.Index(features, new KDTreeIndexParams(4));
37 | const int Knn = 1;
38 | nnIndex.KnnSearch(queries, out int[] indices, out float[] dists, Knn, new SearchParams(32));
39 |
40 | for (int i = 0; i < Knn; i++)
41 | {
42 | int index = indices[i];
43 | float dist = dists[i];
44 | var pt = new Point2f(features.Get(index, 0), features.Get(index, 1));
45 | Console.Write("No.{0}\t", i);
46 | Console.Write("index:{0}", index);
47 | Console.Write(" distance:{0}", dist);
48 | Console.Write(" data:({0}, {1})", pt.X, pt.Y);
49 | Console.WriteLine();
50 | }
51 | }
52 | Console.Read();
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/HOGSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesCore;
8 |
9 | ///
10 | /// samples/c/peopledetect.c
11 | ///
12 | internal class HOGSample : ConsoleTestBase
13 | {
14 | public HOGSample()
15 | {
16 | }
17 |
18 | public override void RunTest()
19 | {
20 | using var img = Cv2.ImRead(ImagePath.Asahiyama, ImreadModes.Color);
21 |
22 | using var hog = new HOGDescriptor();
23 | hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
24 |
25 | bool b = hog.CheckDetectorSize();
26 | Console.WriteLine("CheckDetectorSize: {0}", b);
27 |
28 | var watch = Stopwatch.StartNew();
29 |
30 | // run the detector with default parameters. to get a higher hit-rate
31 | // (and more false alarms, respectively), decrease the hitThreshold and
32 | // groupThreshold (set groupThreshold to 0 to turn off the grouping completely).
33 | Rect[] found = hog.DetectMultiScale(img, 0, new Size(8, 8), new Size(24, 16), 1.05, 2);
34 |
35 | watch.Stop();
36 | Console.WriteLine("Detection time = {0}ms", watch.ElapsedMilliseconds);
37 | Console.WriteLine("{0} region(s) found", found.Length);
38 |
39 | foreach (Rect rect in found)
40 | {
41 | // the HOG detector returns slightly larger rectangles than the real objects.
42 | // so we slightly shrink the rectangles to get a nicer output.
43 | var r = new Rect
44 | {
45 | X = rect.X + (int)Math.Round(rect.Width * 0.1),
46 | Y = rect.Y + (int)Math.Round(rect.Height * 0.1),
47 | Width = (int)Math.Round(rect.Width * 0.8),
48 | Height = (int)Math.Round(rect.Height * 0.8)
49 | };
50 | img.Rectangle(r.TopLeft, r.BottomRight, Scalar.Red, 3);
51 | }
52 |
53 | using var window = new Window("people detector", img, WindowFlags.Normal);
54 | window.SetProperty(WindowPropertyFlags.Fullscreen, 1);
55 | Cv2.WaitKey(0);
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/HistSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// Histogram sample
9 | /// http://book.mynavi.jp/support/pc/opencv2/c3/opencv_img.html
10 | ///
11 | class HistSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | using var src = Cv2.ImRead(ImagePath.Lenna, ImreadModes.Grayscale);
16 |
17 | // Histogram view
18 | const int Width = 260, Height = 200;
19 | using var render = new Mat(new Size(Width, Height), MatType.CV_8UC3, Scalar.All(255));
20 |
21 | // Calculate histogram
22 | var hist = new Mat();
23 | int[] hdims = { 256 }; // Histogram size for each dimension
24 | Rangef[] ranges = { new Rangef(0, 256), }; // min/max
25 | Cv2.CalcHist(
26 | new Mat[] { src },
27 | new int[] { 0 },
28 | null,
29 | hist,
30 | 1,
31 | hdims,
32 | ranges);
33 |
34 | // Get the max value of histogram
35 | Cv2.MinMaxLoc(hist, out _, out double maxVal);
36 |
37 | var color = Scalar.All(100);
38 | // Scales and draws histogram
39 | hist = hist * (maxVal != 0 ? Height / maxVal : 0.0);
40 | for (int j = 0; j < hdims[0]; ++j)
41 | {
42 | int binW = (int)((double)Width / hdims[0]);
43 | render.Rectangle(
44 | new Point(j * binW, render.Rows - (int)hist.Get(j)),
45 | new Point((j + 1) * binW, render.Rows),
46 | color,
47 | -1);
48 | }
49 |
50 | using (new Window("Image", src, WindowFlags.AutoSize | WindowFlags.FreeRatio))
51 | using (new Window("Histogram", render, WindowFlags.AutoSize | WindowFlags.FreeRatio))
52 | {
53 | Cv2.WaitKey();
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/HoughLinesSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | /// Hough Transform Sample / ハフ変換による直線検出
10 | ///
11 | /// http://opencv.jp/sample/special_transforms.html#hough_line
12 | class HoughLinesSample : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | SampleCpp();
17 | }
18 |
19 | ///
20 | /// sample of new C++ style wrapper
21 | ///
22 | private void SampleCpp()
23 | {
24 | // (1) Load the image
25 | using var imgGray = new Mat(ImagePath.Goryokaku, ImreadModes.Grayscale);
26 | using var imgStd = new Mat(ImagePath.Goryokaku, ImreadModes.Color);
27 | using var imgProb = imgStd.Clone();
28 | // Preprocess
29 | Cv2.Canny(imgGray, imgGray, 50, 200, 3, false);
30 |
31 | // (3) Run Standard Hough Transform
32 | LineSegmentPolar[] segStd = Cv2.HoughLines(imgGray, 1, Math.PI / 180, 50, 0, 0);
33 | int limit = Math.Min(segStd.Length, 10);
34 | for (int i = 0; i < limit; i++)
35 | {
36 | // Draws result lines
37 | float rho = segStd[i].Rho;
38 | float theta = segStd[i].Theta;
39 | double a = Math.Cos(theta);
40 | double b = Math.Sin(theta);
41 | double x0 = a * rho;
42 | double y0 = b * rho;
43 | Point pt1 = new Point { X = (int)Math.Round(x0 + 1000 * (-b)), Y = (int)Math.Round(y0 + 1000 * (a)) };
44 | Point pt2 = new Point { X = (int)Math.Round(x0 - 1000 * (-b)), Y = (int)Math.Round(y0 - 1000 * (a)) };
45 | imgStd.Line(pt1, pt2, Scalar.Red, 3, LineTypes.AntiAlias, 0);
46 | }
47 |
48 | // (4) Run Probabilistic Hough Transform
49 | LineSegmentPoint[] segProb = Cv2.HoughLinesP(imgGray, 1, Math.PI / 180, 50, 50, 10);
50 | foreach (LineSegmentPoint s in segProb)
51 | {
52 | imgProb.Line(s.P1, s.P2, Scalar.Red, 3, LineTypes.AntiAlias, 0);
53 | }
54 |
55 | // (5) Show results
56 | using (new Window("Hough_line_standard", imgStd, WindowFlags.AutoSize))
57 | using (new Window("Hough_line_probabilistic", imgProb, WindowFlags.AutoSize))
58 | {
59 | Window.WaitKey(0);
60 | }
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/KAZESample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesCore;
8 |
9 | ///
10 | /// Retrieves keypoints using the KAZE and AKAZE algorithm.
11 | ///
12 | internal class KAZESample : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
17 | var kaze = KAZE.Create();
18 | var akaze = AKAZE.Create();
19 |
20 | var kazeDescriptors = new Mat();
21 | var akazeDescriptors = new Mat();
22 | KeyPoint[] kazeKeyPoints = null, akazeKeyPoints = null;
23 | var kazeTime = MeasureTime(() =>
24 | kaze.DetectAndCompute(gray, null, out kazeKeyPoints, kazeDescriptors));
25 | var akazeTime = MeasureTime(() =>
26 | akaze.DetectAndCompute(gray, null, out akazeKeyPoints, akazeDescriptors));
27 |
28 | var dstKaze = new Mat();
29 | var dstAkaze = new Mat();
30 | Cv2.DrawKeypoints(gray, kazeKeyPoints, dstKaze);
31 | Cv2.DrawKeypoints(gray, akazeKeyPoints, dstAkaze);
32 |
33 | using (new Window(String.Format("KAZE [{0:F2}ms]", kazeTime.TotalMilliseconds), dstKaze))
34 | using (new Window(String.Format("AKAZE [{0:F2}ms]", akazeTime.TotalMilliseconds), dstAkaze))
35 | {
36 | Cv2.WaitKey();
37 | }
38 | }
39 |
40 | private TimeSpan MeasureTime(Action action)
41 | {
42 | var watch = Stopwatch.StartNew();
43 | action();
44 | watch.Stop();
45 | return watch.Elapsed;
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/MSERSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// Maximally Stable Extremal Regions
9 | ///
10 | class MSERSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using Mat src = new Mat(ImagePath.Distortion, ImreadModes.Color);
15 | using Mat gray = new Mat();
16 | using Mat dst = src.Clone();
17 | Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
18 |
19 | CppStyleMSER(gray, dst); // C++ style
20 |
21 | using (new Window("MSER src", src))
22 | using (new Window("MSER gray", gray))
23 | using (new Window("MSER dst", dst))
24 | {
25 | Cv2.WaitKey();
26 | }
27 | }
28 |
29 | ///
30 | /// Extracts MSER by C++-style code (cv::MSER)
31 | ///
32 | ///
33 | ///
34 | private void CppStyleMSER(Mat gray, Mat dst)
35 | {
36 | MSER mser = MSER.Create();
37 | mser.DetectRegions(gray, out Point[][] contours, out _);
38 | foreach (Point[] pts in contours)
39 | {
40 | Scalar color = Scalar.RandomColor();
41 | foreach (Point p in pts)
42 | {
43 | dst.Circle(p, 1, color);
44 | }
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/MatOperations.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | ///
10 | ///
11 | class MatOperations : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | SubMat();
16 | RowColRangeOperation();
17 | RowColOperation();
18 | }
19 |
20 | ///
21 | /// Submatrix operations
22 | ///
23 | private void SubMat()
24 | {
25 | using var src = Cv2.ImRead(ImagePath.Lenna);
26 |
27 | // Assign small image to mat
28 | using var small = new Mat();
29 | Cv2.Resize(src, small, new Size(100, 100));
30 | src[10, 110, 10, 110] = small;
31 | src[370, 470, 400, 500] = small.T();
32 | // ↑ This is same as the following:
33 | //small.T().CopyTo(src[370, 470, 400, 500]);
34 |
35 | // Get partial mat (similar to cvSetImageROI)
36 | Mat part = src[200, 400, 200, 360];
37 | // Invert partial pixel values
38 | Cv2.BitwiseNot(part, part);
39 |
40 | // Fill the region (50..100, 100..150) with color (128, 0, 0)
41 | part = src.SubMat(50, 100, 400, 450);
42 | part.SetTo(128);
43 |
44 | using (new Window("SubMat", src))
45 | {
46 | Cv2.WaitKey();
47 | }
48 |
49 | part.Dispose();
50 | }
51 |
52 | ///
53 | /// Submatrix operations
54 | ///
55 | private void RowColRangeOperation()
56 | {
57 | using var src = Cv2.ImRead(ImagePath.Lenna);
58 |
59 | Cv2.GaussianBlur(
60 | src.RowRange(100, 200),
61 | src.RowRange(200, 300),
62 | new Size(7, 7), 20);
63 |
64 | Cv2.GaussianBlur(
65 | src.ColRange(200, 300),
66 | src.ColRange(100, 200),
67 | new Size(7, 7), 20);
68 |
69 | using (new Window("RowColRangeOperation", src))
70 | {
71 | Cv2.WaitKey();
72 | }
73 | }
74 |
75 | ///
76 | /// Submatrix expression operations
77 | ///
78 | private void RowColOperation()
79 | {
80 | using var src = Cv2.ImRead(ImagePath.Lenna);
81 |
82 | var rand = new Random();
83 | for (int i = 0; i < 200; i++)
84 | {
85 | int c1 = rand.Next(100, 400);
86 | int c2 = rand.Next(100, 400);
87 | using Mat temp = src.Row(c1).Clone();
88 | src.Row(c2).CopyTo(src.Row(c1));
89 | temp.CopyTo(src.Row(c2));
90 | }
91 |
92 | ((Mat)~src.ColRange(450, 500)).CopyTo(src.ColRange(0, 50));
93 |
94 | src.RowRange(450, 460).SetTo(new Scalar(0, 0, 255));
95 |
96 | using (new Window("RowColOperation", src))
97 | {
98 | Cv2.WaitKey();
99 | }
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/MergeSplitSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | ///
9 | ///
10 | class MergeSplitSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | // Split/Merge Test
15 | {
16 | using var src = new Mat(ImagePath.Lenna, ImreadModes.Color);
17 |
18 | // Split each plane
19 | Cv2.Split(src, out var planes);
20 |
21 | Cv2.ImShow("planes 0", planes[0]);
22 | Cv2.ImShow("planes 1", planes[1]);
23 | Cv2.ImShow("planes 2", planes[2]);
24 | Cv2.WaitKey();
25 | Cv2.DestroyAllWindows();
26 |
27 | // Invert G plane
28 | Cv2.BitwiseNot(planes[1], planes[1]);
29 |
30 | // Merge
31 | using var merged = new Mat();
32 | Cv2.Merge(planes, merged);
33 |
34 | Cv2.ImShow("src", src);
35 | Cv2.ImShow("merged", merged);
36 | Cv2.WaitKey();
37 | Cv2.DestroyAllWindows();
38 | }
39 |
40 | // MixChannels Test
41 | {
42 | using var rgba = new Mat(300, 300, MatType.CV_8UC4, new Scalar(50, 100, 150, 200));
43 | using var bgr = new Mat(rgba.Rows, rgba.Cols, MatType.CV_8UC3);
44 | using var alpha = new Mat(rgba.Rows, rgba.Cols, MatType.CV_8UC1);
45 |
46 | Mat[] input = { rgba };
47 | Mat[] output = { bgr, alpha };
48 | // rgba[0] -> bgr[2], rgba[1] -> bgr[1],
49 | // rgba[2] -> bgr[0], rgba[3] -> alpha[0]
50 | int[] fromTo = { 0, 2, 1, 1, 2, 0, 3, 3 };
51 | Cv2.MixChannels(input, output, fromTo);
52 |
53 | Cv2.ImShow("rgba", rgba);
54 | Cv2.ImShow("bgr", bgr);
55 | Cv2.ImShow("alpha", alpha);
56 | Cv2.WaitKey();
57 | Cv2.DestroyAllWindows();
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/MorphologySample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | ///
9 | ///
10 | class MorphologySample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
15 | using var binary = new Mat();
16 | using var dilate1 = new Mat();
17 | using var dilate2 = new Mat();
18 | byte[] kernelValues = { 0, 1, 0, 1, 1, 1, 0, 1, 0 }; // cross (+)
19 | using var kernel = Mat.FromPixelData(3, 3, MatType.CV_8UC1, kernelValues);
20 |
21 | // Binarize
22 | Cv2.Threshold(gray, binary, 0, 255, ThresholdTypes.Otsu);
23 |
24 | // empty kernel
25 | Cv2.Dilate(binary, dilate1, null);
26 | // + kernel
27 | Cv2.Dilate(binary, dilate2, kernel);
28 |
29 | Cv2.ImShow("binary", binary);
30 | Cv2.ImShow("dilate (kernel = null)", dilate1);
31 | Cv2.ImShow("dilate (kernel = +)", dilate2);
32 | Cv2.WaitKey(0);
33 | Cv2.DestroyAllWindows();
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/NormalArrayOperations.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using OpenCvSharp;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | ///
10 | ///
11 | class NormalArrayOperations : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | Threshold1();
16 | Threshold2();
17 | Threshold3();
18 | }
19 |
20 | ///
21 | /// Run thresholding to byte array
22 | ///
23 | private void Threshold1()
24 | {
25 | const int T = 3;
26 | const int Max = 5;
27 |
28 | byte[] input = { 1, 2, 3, 4, 5, };
29 | var output = new List();
30 |
31 | Cv2.Threshold(InputArray.Create(input), OutputArray.Create(output),
32 | T, Max, ThresholdTypes.Binary);
33 |
34 | Console.WriteLine("Threshold: {0}", T);
35 | Console.WriteLine("input: {0}", string.Join(",", input));
36 | Console.WriteLine("output: {0}", string.Join(",", output));
37 | }
38 |
39 | ///
40 | /// Run thresholding to short array
41 | ///
42 | private void Threshold2()
43 | {
44 | const int T = 150;
45 | const int Max = 250;
46 |
47 | short[] input = { 50, 100, 150, 200, 250, };
48 | var output = new List();
49 |
50 | Cv2.Threshold(InputArray.Create(input), OutputArray.Create(output),
51 | T, Max, ThresholdTypes.Binary);
52 |
53 | Console.WriteLine("Threshold: {0}", T);
54 | Console.WriteLine("input: {0}", string.Join(",", input));
55 | Console.WriteLine("output: {0}", string.Join(",", output));
56 | }
57 |
58 | ///
59 | /// Run thresholding to struct array
60 | ///
61 | private void Threshold3()
62 | {
63 | const double T = 2000;
64 | const double Max = 5000;
65 |
66 | // threshold does not support Point (int)
67 | Point2f[] input = {
68 | new Point2f(1000, 1500),
69 | new Point2f(2000, 2001),
70 | new Point2f(500, 5000),
71 | };
72 | var output = new List();
73 |
74 | Cv2.Threshold(InputArray.Create(input), OutputArray.Create(output),
75 | T, Max, ThresholdTypes.Binary);
76 |
77 | Console.WriteLine("Threshold: {0}", T);
78 | Console.WriteLine("input: {0}", string.Join(",", input));
79 | Console.WriteLine("output: {0}", string.Join(",", output));
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/OpenVinoFaceDetection.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.Dnn;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// To run this example first you nedd to compile OPENCV with Intel OpenVino
9 | /// Download the face detection model available here: https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/intel/face-detection-adas-0001
10 | /// Add the files to the bin folder
11 | ///
12 | internal class OpenVinoFaceDetection : ConsoleTestBase
13 | {
14 | const string modelFace = "face-detection-adas-0001.bin";
15 | const string modelFaceTxt = "face-detection-adas-0001.xml";
16 | const string sampleImage = "sample.jpg";
17 | const string outputLoc = "sample_output.jpg";
18 |
19 | public override void RunTest()
20 | {
21 | using var frame = Cv2.ImRead(sampleImage);
22 | int frameHeight = frame.Rows;
23 | int frameWidth = frame.Cols;
24 |
25 | using var netFace = CvDnn.ReadNet(modelFace, modelFaceTxt);
26 | netFace.SetPreferableBackend(Backend.INFERENCE_ENGINE);
27 | netFace.SetPreferableTarget(Target.CPU);
28 |
29 | using var blob = CvDnn.BlobFromImage(frame, 1.0, new Size(672, 384), new Scalar(0, 0, 0), false, false);
30 | netFace.SetInput(blob);
31 |
32 | using (var detection = netFace.Forward())
33 | {
34 | using var detectionMat = Mat.FromPixelData(detection.Size(2), detection.Size(3), MatType.CV_32F, detection.Ptr(0));
35 |
36 | for (int i = 0; i < detectionMat.Rows; i++)
37 | {
38 | float confidence = detectionMat.At(i, 2);
39 |
40 | if (confidence > 0.7)
41 | {
42 | int x1 = (int)(detectionMat.At(i, 3) * frameWidth); //xmin
43 | int y1 = (int)(detectionMat.At(i, 4) * frameHeight); //ymin
44 | int x2 = (int)(detectionMat.At(i, 5) * frameWidth); //xmax
45 | int y2 = (int)(detectionMat.At(i, 6) * frameHeight); //ymax
46 |
47 | var roi = new Rect(x1, y1, (x2 - x1), (y2 - y1));
48 | roi = AdjustBoundingBox(roi);
49 | Cv2.Rectangle(frame, roi, new Scalar(0, 255, 0), 2, LineTypes.Link4);
50 | }
51 | }
52 | }
53 |
54 | var finalOutput = outputLoc;
55 | Cv2.ImWrite(finalOutput, frame);
56 | }
57 |
58 | private Rect AdjustBoundingBox(Rect faceRect)
59 | {
60 | int w = faceRect.Width;
61 | int h = faceRect.Height;
62 |
63 | faceRect.X -= (int)(0.067 * w);
64 | faceRect.Y -= (int)(0.028 * h);
65 |
66 | faceRect.Width += (int)(0.15 * w);
67 | faceRect.Height += (int)(0.13 * h);
68 |
69 | if (faceRect.Width < faceRect.Height)
70 | {
71 | var dx = (faceRect.Height - faceRect.Width);
72 | faceRect.X -= dx / 2;
73 | faceRect.Width += dx;
74 | }
75 | else
76 | {
77 | var dy = (faceRect.Width - faceRect.Height);
78 | faceRect.Y -= dy / 2;
79 | faceRect.Height += dy;
80 | }
81 | return faceRect;
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/PerspectiveTransformSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using System;
3 | using System.Collections.Generic;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesCore;
8 |
9 | public class PerspectiveTransformSample : ConsoleTestBase
10 | {
11 | private readonly List point2Fs = new List();
12 |
13 | private Point2f[] srcPoints = new Point2f[] {
14 | new Point2f(0, 0),
15 | new Point2f(0, 0),
16 | new Point2f(0, 0),
17 | new Point2f(0, 0),
18 | };
19 |
20 | private readonly Point2f[] dstPoints = new Point2f[] {
21 | new Point2f(0, 0),
22 | new Point2f(0, 480),
23 | new Point2f(640, 480),
24 | new Point2f(640, 0),
25 | };
26 |
27 | private Mat OriginalImage;
28 |
29 | public override void RunTest()
30 | {
31 | OriginalImage = new Mat(ImagePath.SurfBoxinscene, ImreadModes.AnyColor);
32 | using var Window = new Window("result", OriginalImage);
33 |
34 | Cv2.SetMouseCallback(Window.Name, CallbackOpenCVAnnotate);
35 | Window.WaitKey();
36 | }
37 |
38 | private void CallbackOpenCVAnnotate(MouseEventTypes e, int x, int y, MouseEventFlags flags, IntPtr userdata)
39 | {
40 | if (e == MouseEventTypes.LButtonDown)
41 | {
42 | point2Fs.Add(new Point2f(x, y));
43 | if (point2Fs.Count == 4)
44 | {
45 | srcPoints = point2Fs.ToArray();
46 | using var matrix = Cv2.GetPerspectiveTransform(srcPoints, dstPoints);
47 | using var dst = new Mat(new Size(640, 480), MatType.CV_8UC3);
48 | Cv2.WarpPerspective(OriginalImage, dst, matrix, dst.Size());
49 | using var dsts = new Window("dst", dst);
50 | point2Fs.Clear();
51 | Window.WaitKey();
52 | }
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/PhotoMethods.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// sample of photo module methods
9 | ///
10 | class PhotoMethods : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using var src = new Mat(ImagePath.Fruits, ImreadModes.Color);
15 |
16 | using var normconv = new Mat();
17 | using var recursFiltered = new Mat();
18 | Cv2.EdgePreservingFilter(src, normconv, EdgePreservingMethods.NormconvFilter);
19 | Cv2.EdgePreservingFilter(src, recursFiltered, EdgePreservingMethods.RecursFilter);
20 |
21 | using var detailEnhance = new Mat();
22 | Cv2.DetailEnhance(src, detailEnhance);
23 |
24 | using var pencil1 = new Mat();
25 | using var pencil2 = new Mat();
26 | Cv2.PencilSketch(src, pencil1, pencil2);
27 |
28 | using var stylized = new Mat();
29 | Cv2.Stylization(src, stylized);
30 |
31 | using (new Window("src", src))
32 | using (new Window("edgePreservingFilter - NormconvFilter", normconv))
33 | using (new Window("edgePreservingFilter - RecursFilter", recursFiltered))
34 | using (new Window("detailEnhance", detailEnhance))
35 | using (new Window("pencilSketch grayscale", pencil1))
36 | using (new Window("pencilSketch color", pencil2))
37 | using (new Window("stylized", stylized))
38 | {
39 | Cv2.WaitKey();
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/PixelAccess.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesCore;
8 |
9 | ///
10 | /// Swaps B for R
11 | ///
12 | class PixelAccess : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | Console.WriteLine("Get/Set: {0}ms", MeasureTime(GetSet));
17 | Console.WriteLine("GenericIndexer: {0}ms", MeasureTime(GenericIndexer));
18 | Console.WriteLine("TypeSpecificMat: {0}ms", MeasureTime(TypeSpecificMat));
19 | Console.Read();
20 | }
21 |
22 | ///
23 | /// Slow
24 | ///
25 | private void GetSet()
26 | {
27 | using var mat = new Mat(ImagePath.Lenna, ImreadModes.Color);
28 | for (int y = 0; y < mat.Height; y++)
29 | {
30 | for (int x = 0; x < mat.Width; x++)
31 | {
32 | Vec3b color = mat.Get(y, x);
33 | Vec3b newColor = new Vec3b(color.Item2, color.Item1, color.Item0);
34 | mat.Set(y, x, newColor);
35 | }
36 | }
37 | //Cv2.ImShow("Slow", mat);
38 | //Cv2.WaitKey(0);
39 | //Cv2.DestroyAllWindows();
40 | }
41 |
42 | ///
43 | /// Reasonably fast
44 | ///
45 | private void GenericIndexer()
46 | {
47 | using var mat = new Mat(ImagePath.Lenna, ImreadModes.Color);
48 | var indexer = mat.GetGenericIndexer();
49 | for (int y = 0; y < mat.Height; y++)
50 | {
51 | for (int x = 0; x < mat.Width; x++)
52 | {
53 | Vec3b color = indexer[y, x];
54 | Vec3b newColor = new Vec3b(color.Item2, color.Item1, color.Item0);
55 | indexer[y, x] = newColor;
56 | }
57 | }
58 | //Cv2.ImShow("GenericIndexer", mat);
59 | //Cv2.WaitKey(0);
60 | //Cv2.DestroyAllWindows();
61 | }
62 |
63 | ///
64 | /// Faster
65 | ///
66 | private void TypeSpecificMat()
67 | {
68 | using var mat = new Mat(ImagePath.Lenna, ImreadModes.Color);
69 | var mat3 = new Mat(mat);
70 | var indexer = mat3.GetIndexer();
71 | for (int y = 0; y < mat.Height; y++)
72 | {
73 | for (int x = 0; x < mat.Width; x++)
74 | {
75 | Vec3b color = indexer[y, x];
76 | Vec3b newColor = new Vec3b(color.Item2, color.Item1, color.Item0);
77 | indexer[y, x] = newColor;
78 | }
79 | }
80 | //Cv2.ImShow("TypeSpecificMat", mat);
81 | //Cv2.WaitKey(0);
82 | //Cv2.DestroyAllWindows();
83 | }
84 |
85 | private static long MeasureTime(Action action)
86 | {
87 | var watch = Stopwatch.StartNew();
88 | action();
89 | watch.Stop();
90 | return watch.ElapsedMilliseconds;
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/SeamlessClone.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | /// cv::seamlessClone
9 | ///
10 | class SeamlessClone : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | Mat src = new Mat(ImagePath.Girl, ImreadModes.Color);
15 | Mat dst = new Mat(ImagePath.Lenna, ImreadModes.Color);
16 | Mat src0 = src.Resize(dst.Size(), 0, 0, InterpolationFlags.Lanczos4);
17 | Mat mask = Mat.Zeros(src0.Size(), MatType.CV_8UC3);
18 |
19 | mask.Circle(200, 200, 100, Scalar.White, -1);
20 |
21 | Mat blend1 = new Mat();
22 | Mat blend2 = new Mat();
23 | Mat blend3 = new Mat();
24 | Cv2.SeamlessClone(
25 | src0, dst, mask, new Point(260, 270), blend1,
26 | SeamlessCloneMethods.NormalClone);
27 | Cv2.SeamlessClone(
28 | src0, dst, mask, new Point(260, 270), blend2,
29 | SeamlessCloneMethods.MonochromeTransfer);
30 | Cv2.SeamlessClone(
31 | src0, dst, mask, new Point(260, 270), blend3,
32 | SeamlessCloneMethods.MixedClone);
33 |
34 | using (new Window("src", src0))
35 | using (new Window("dst", dst))
36 | using (new Window("mask", mask))
37 | using (new Window("blend NormalClone", blend1))
38 | using (new Window("blend MonochromeTransfer", blend2))
39 | using (new Window("blend MixedClone", blend3))
40 | {
41 | Cv2.WaitKey();
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/SimpleBlobDetectorSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | internal class SimpleBlobDetectorSample : ConsoleTestBase
8 | {
9 | public override void RunTest()
10 | {
11 | using var src = Cv2.ImRead(ImagePath.Shapes);
12 | using var detectedCircles = new Mat();
13 | using var detectedOvals = new Mat();
14 |
15 | // Invert the image. Shapes has a black background and SimpleBlobDetector doesn't seem to work well with that.
16 | Cv2.BitwiseNot(src, src);
17 |
18 | // Parameters tuned to detect only circles
19 | var circleParams = new SimpleBlobDetector.Params
20 | {
21 | MinThreshold = 10,
22 | MaxThreshold = 230,
23 |
24 | // The area is the number of pixels in the blob.
25 | FilterByArea = true,
26 | MinArea = 500,
27 | MaxArea = 50000,
28 |
29 | // Circularity is a ratio of the area to the perimeter. Polygons with more sides are more circular.
30 | FilterByCircularity = true,
31 | MinCircularity = 0.9f,
32 |
33 | // Convexity is the ratio of the area of the blob to the area of its convex hull.
34 | FilterByConvexity = true,
35 | MinConvexity = 0.95f,
36 |
37 | // A circle's inertia ratio is 1. A line's is 0. An oval is between 0 and 1.
38 | FilterByInertia = true,
39 | MinInertiaRatio = 0.95f
40 | };
41 |
42 | // Parameters tuned to find the ovals in the Shapes image.
43 | var ovalParams = new SimpleBlobDetector.Params
44 | {
45 | MinThreshold = 10,
46 | MaxThreshold = 230,
47 | FilterByArea = true,
48 | MinArea = 500,
49 | // The ovals are the smallest blobs in Shapes, so we limit the max area to eliminate the larger blobs.
50 | MaxArea = 10000,
51 | FilterByCircularity = true,
52 | MinCircularity = 0.58f,
53 | FilterByConvexity = true,
54 | MinConvexity = 0.96f,
55 | FilterByInertia = true,
56 | MinInertiaRatio = 0.1f
57 | };
58 |
59 | using var circleDetector = SimpleBlobDetector.Create(circleParams);
60 | var circleKeyPoints = circleDetector.Detect(src);
61 | Cv2.DrawKeypoints(src, circleKeyPoints, detectedCircles, Scalar.HotPink, DrawMatchesFlags.DrawRichKeypoints);
62 |
63 | using var ovalDetector = SimpleBlobDetector.Create(ovalParams);
64 | var ovalKeyPoints = ovalDetector.Detect(src);
65 | Cv2.DrawKeypoints(src, ovalKeyPoints, detectedOvals, Scalar.HotPink, DrawMatchesFlags.DrawRichKeypoints);
66 |
67 | using var w1 = new Window("Detected Circles", detectedCircles);
68 | using var w2 = new Window("Detected Ovals", detectedOvals);
69 |
70 | Cv2.WaitKey();
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/SolveEquation.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using OpenCvSharp;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | ///
10 | ///
11 | class SolveEquation : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | ByMat();
16 | ByNormalArray();
17 |
18 | Console.Read();
19 | }
20 |
21 | ///
22 | /// Solve equation AX = Y
23 | ///
24 | private void ByMat()
25 | {
26 | // x + y = 10
27 | // 2x + 3y = 26
28 | // (x=4, y=6)
29 |
30 | double[,] av = {{1, 1},
31 | {2, 3}};
32 | double[] yv = { 10, 26 };
33 |
34 | using var a = Mat.FromPixelData(2, 2, MatType.CV_64FC1, av);
35 | using var y = Mat.FromPixelData(2, 1, MatType.CV_64FC1, yv);
36 | using var x = new Mat();
37 |
38 | Cv2.Solve(a, y, x, DecompTypes.LU);
39 |
40 | Console.WriteLine("ByMat:");
41 | Console.WriteLine("X1 = {0}, X2 = {1}", x.At(0), x.At(1));
42 | }
43 |
44 | ///
45 | /// Solve equation AX = Y
46 | ///
47 | private void ByNormalArray()
48 | {
49 | // x + y = 10
50 | // 2x + 3y = 26
51 | // (x=4, y=6)
52 |
53 | double[,] a = {{1, 1},
54 | {2, 3}};
55 |
56 | double[] y = { 10, 26 };
57 |
58 | var x = new List();
59 |
60 | Cv2.Solve(
61 | InputArray.Create(a), InputArray.Create(y),
62 | OutputArray.Create(x),
63 | DecompTypes.LU);
64 |
65 | Console.WriteLine("ByNormalArray:");
66 | Console.WriteLine("X1 = {0}, X2 = {1}", x[0], x[1]);
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/StarDetectorSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.XFeatures2D;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | /// Retrieves keypoints using the StarDetector algorithm.
10 | ///
11 | class StarDetectorSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | var dst = new Mat(ImagePath.Lenna, ImreadModes.Color);
16 | var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
17 |
18 | StarDetector detector = StarDetector.Create(45);
19 | KeyPoint[] keypoints = detector.Detect(gray);
20 |
21 | if (keypoints != null)
22 | {
23 | var color = new Scalar(0, 255, 0);
24 | foreach (KeyPoint kpt in keypoints)
25 | {
26 | float r = kpt.Size / 2;
27 | Cv2.Circle(dst, (Point)kpt.Pt, (int)r, color);
28 | Cv2.Line(dst,
29 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y + r),
30 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y - r),
31 | color);
32 | Cv2.Line(dst,
33 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y + r),
34 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y - r),
35 | color);
36 | }
37 | }
38 |
39 | using (new Window("StarDetector features", dst))
40 | {
41 | Cv2.WaitKey();
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/Stitching.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using OpenCvSharp;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | class Stitching : ConsoleTestBase
9 | {
10 | public override void RunTest()
11 | {
12 | Mat[] images = SelectStitchingImages(200, 200, 10);
13 |
14 | using var stitcher = Stitcher.Create(Stitcher.Mode.Scans);
15 | using var pano = new Mat();
16 |
17 | Console.Write("Stitching start...");
18 | // TODO: does not work??
19 | var status = stitcher.Stitch(images, pano);
20 | Console.WriteLine(" finish (status:{0})", status);
21 |
22 | Window.ShowImages(pano);
23 |
24 | foreach (var image in images)
25 | {
26 | image.Dispose();
27 | }
28 | }
29 |
30 | private static Mat[] SelectStitchingImages(int width, int height, int count)
31 | {
32 | using var source = new Mat(@"Data\Image\lenna.png", ImreadModes.Color);
33 | using var result = source.Clone();
34 |
35 | var rand = new Random();
36 | var mats = new List();
37 | for (int i = 0; i < count; i++)
38 | {
39 | int x1 = rand.Next(source.Cols - width);
40 | int y1 = rand.Next(source.Rows - height);
41 | int x2 = x1 + width;
42 | int y2 = y1 + height;
43 |
44 | result.Line(new Point(x1, y1), new Point(x1, y2), new Scalar(0, 0, 255));
45 | result.Line(new Point(x1, y2), new Point(x2, y2), new Scalar(0, 0, 255));
46 | result.Line(new Point(x2, y2), new Point(x2, y1), new Scalar(0, 0, 255));
47 | result.Line(new Point(x2, y1), new Point(x1, y1), new Scalar(0, 0, 255));
48 |
49 | using var m = source[new Rect(x1, y1, width, height)];
50 | mats.Add(m.Clone());
51 | }
52 |
53 | using (new Window("stitching", result))
54 | {
55 | Cv2.WaitKey();
56 | }
57 |
58 | return mats.ToArray();
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/Subdiv2DSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Linq;
3 | using OpenCvSharp;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | /// cv::Subdiv2D test
10 | ///
11 | class Subdiv2DSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | const int Size = 600;
16 |
17 | // Creates random point list
18 | var rand = new Random();
19 | var points = Enumerable.Range(0, 100).Select(_ =>
20 | new Point2f(rand.Next(0, Size), rand.Next(0, Size))).ToArray();
21 |
22 | using var imgExpr = Mat.Zeros(Size, Size, MatType.CV_8UC3);
23 | using var img = imgExpr.ToMat();
24 | foreach (var p in points)
25 | {
26 | img.Circle((Point)p, 4, Scalar.Red, -1);
27 | }
28 |
29 | // Initializes Subdiv2D
30 | using var subdiv = new Subdiv2D();
31 | subdiv.InitDelaunay(new Rect(0, 0, Size, Size));
32 | subdiv.Insert(points);
33 |
34 | // Draws voronoi diagram
35 | subdiv.GetVoronoiFacetList(null, out var facetList, out var facetCenters);
36 |
37 | using var vonoroi = img.Clone();
38 | foreach (var list in facetList)
39 | {
40 | var before = list.Last();
41 | foreach (var p in list)
42 | {
43 | vonoroi.Line((Point)before, (Point)p, new Scalar(64, 255, 128), 1);
44 | before = p;
45 | }
46 | }
47 |
48 | // Draws delaunay diagram
49 | Vec4f[] edgeList = subdiv.GetEdgeList();
50 | using var delaunay = img.Clone();
51 | foreach (var edge in edgeList)
52 | {
53 | var p1 = new Point(edge.Item0, edge.Item1);
54 | var p2 = new Point(edge.Item2, edge.Item3);
55 | delaunay.Line(p1, p2, new Scalar(64, 255, 128), 1);
56 | }
57 |
58 | Cv2.ImShow("voronoi", vonoroi);
59 | Cv2.ImShow("delaunay", delaunay);
60 | Cv2.WaitKey();
61 | Cv2.DestroyAllWindows();
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/SuperResolutionSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesCore;
6 |
7 | ///
8 | ///
9 | ///
10 | class SuperResolutionSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | var capture = new VideoCapture();
15 | capture.Set(VideoCaptureProperties.FrameWidth, 640);
16 | capture.Set(VideoCaptureProperties.FrameHeight, 480);
17 | capture.Open(-1);
18 | if (!capture.IsOpened())
19 | throw new Exception("capture initialization failed");
20 |
21 | var fs = FrameSource.CreateFrameSource_Camera(-1);
22 | var sr = SuperResolution.CreateBTVL1();
23 | sr.SetInput(fs);
24 |
25 | using var normalWindow = new Window("normal");
26 | using var srWindow = new Window("super resolution");
27 | var normalFrame = new Mat();
28 | var srFrame = new Mat();
29 | while (true)
30 | {
31 | capture.Read(normalFrame);
32 | sr.NextFrame(srFrame);
33 | if (normalFrame.Empty() || srFrame.Empty())
34 | break;
35 | normalWindow.ShowImage(normalFrame);
36 | srWindow.ShowImage(srFrame);
37 | Cv2.WaitKey(100);
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/VideoCaptureSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | ///
10 | ///
11 | class VideoCaptureSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | // Opens MP4 file (ffmpeg is probably needed)
16 | using var capture = new VideoCapture(MoviePath.Bach);
17 | if (!capture.IsOpened())
18 | return;
19 |
20 | int sleepTime = (int)Math.Round(1000 / capture.Fps);
21 |
22 | using var window = new Window("capture");
23 | // Frame image buffer
24 | var image = new Mat();
25 |
26 | // When the movie playback reaches end, Mat.data becomes NULL.
27 | while (true)
28 | {
29 | capture.Read(image); // same as cvQueryFrame
30 | if(image.Empty())
31 | break;
32 |
33 | window.ShowImage(image);
34 | Cv2.WaitKey(sleepTime);
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/VideoWriterSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | ///
10 | ///
11 | class VideoWriterSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | const string OutVideoFile = "out.avi";
16 |
17 | // Opens MP4 file (ffmpeg is probably needed)
18 | using var capture = new VideoCapture(MoviePath.Bach);
19 |
20 | // Read movie frames and write them to VideoWriter
21 | var dsize = new Size(640, 480);
22 | using (var writer = new VideoWriter(OutVideoFile, -1, capture.Fps, dsize))
23 | {
24 | Console.WriteLine("Converting each movie frames...");
25 | using var frame = new Mat();
26 | while (true)
27 | {
28 | // Read image
29 | capture.Read(frame);
30 | if (frame.Empty())
31 | break;
32 |
33 | Console.CursorLeft = 0;
34 | Console.Write("{0} / {1}", capture.PosFrames, capture.FrameCount);
35 |
36 | // grayscale -> canny -> resize
37 | using var gray = new Mat();
38 | using var canny = new Mat();
39 | using var dst = new Mat();
40 | Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
41 | Cv2.Canny(gray, canny, 100, 180);
42 | Cv2.Resize(canny, dst, dsize, 0, 0, InterpolationFlags.Linear);
43 | // Write mat to VideoWriter
44 | writer.Write(dst);
45 | }
46 | Console.WriteLine();
47 | }
48 |
49 | // Watch result movie
50 | using (var capture2 = new VideoCapture(OutVideoFile))
51 | using (var window = new Window("result"))
52 | {
53 | int sleepTime = (int)(1000 / capture.Fps);
54 |
55 | using var frame = new Mat();
56 | while (true)
57 | {
58 | capture2.Read(frame);
59 | if (frame.Empty())
60 | break;
61 |
62 | window.ShowImage(frame);
63 | Cv2.WaitKey(sleepTime);
64 | }
65 | }
66 | }
67 |
68 | }
69 |
--------------------------------------------------------------------------------
/SamplesCore/Samples/WatershedSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesCore;
7 |
8 | ///
9 | /// Watershed algorithm sample
10 | ///
11 | /// http://opencv.jp/sample/segmentation_and_connection.html#watershed
12 | public class WatershedSample : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | using var srcImg = Cv2.ImRead(ImagePath.Lenna, ImreadModes.AnyDepth | ImreadModes.AnyColor);
17 | using var markers = new Mat(srcImg.Size(), MatType.CV_32SC1, Scalar.All(0));
18 |
19 | using (var window = new Window("image", srcImg))
20 | {
21 | using var dspImg = srcImg.Clone();
22 |
23 | // Mouse event
24 | int seedNum = 0;
25 | window.SetMouseCallback((MouseEventTypes ev, int x, int y, MouseEventFlags flags, IntPtr userdata) =>
26 | {
27 | if (ev == MouseEventTypes.LButtonDown)
28 | {
29 | seedNum++;
30 | var pt = new Point(x, y);
31 | markers.Circle(pt, 10, Scalar.All(seedNum), Cv2.FILLED, LineTypes.Link8);
32 | dspImg.Circle(pt, 10, Scalar.White, 3, LineTypes.Link8);
33 | window.Image = dspImg;
34 | }
35 | });
36 | Window.WaitKey();
37 | }
38 |
39 | Cv2.Watershed(srcImg, markers);
40 |
41 | // draws watershed
42 | using var dstImg = srcImg.Clone();
43 | for (int y = 0; y < markers.Height; y++)
44 | {
45 | for (int x = 0; x < markers.Width; x++)
46 | {
47 | int idx = markers.Get(y, x);
48 | if (idx == -1)
49 | {
50 | dstImg.Rectangle(new Rect(x, y, 2, 2), Scalar.Red, -1);
51 | }
52 | }
53 | }
54 |
55 | using (new Window("watershed transform", dstImg))
56 | {
57 | Window.WaitKey();
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/SamplesCore/SamplesCore.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | net8.0;net48;
5 | true
6 | SamplesCore
7 | Exe
8 |
11 | SamplesCore
12 | false
13 | false
14 | false
15 | 11
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 | PreserveNewest
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/SamplesLegacy/App.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/SamplesLegacy/Program.cs:
--------------------------------------------------------------------------------
1 | using SampleBase;
2 | using SampleBase.Interfaces;
3 | using System;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | public static class Program
9 | {
10 | [STAThread]
11 | public static void Main(string[] args)
12 | {
13 | Console.WriteLine("Runtime Version = {0}", Environment.Version);
14 |
15 | ITestManager testManager = new ConsoleTestManager();
16 |
17 | testManager.AddTests(
18 | new ArucoSample(),
19 | new BgSubtractorMOG(),
20 | new BinarizerSample(),
21 | new BRISKSample(),
22 | new CaffeSample(),
23 | new CameraCaptureSample(),
24 | new ClaheSample(),
25 | new ConnectedComponentsSample(),
26 | new DFT(),
27 | new DnnSuperresSample(),
28 | new DrawBestMatchRectangle(),
29 | new FaceDetection(),
30 | new FaceDetectionDNN(),
31 | new FASTSample(),
32 | new FlannSample(),
33 | new FREAKSample(),
34 | new HandPose(),
35 | new HistSample(),
36 | new HOGSample(),
37 | new HoughLinesSample(),
38 | new InpaintSample(),
39 | new KAZESample(),
40 | new KAZESample2(),
41 | new MatOperations(),
42 | new MDS(),
43 | new MergeSplitSample(),
44 | new MorphologySample(),
45 | new MSERSample(),
46 | new NormalArrayOperations(),
47 | new OpenVinoFaceDetection(),
48 | new PhotoMethods(),
49 | new PixelAccess(),
50 | new Pose(),
51 | new SeamlessClone(),
52 | new SiftSurfSample(),
53 | new SimpleBlobDetectorSample(),
54 | new SolveEquation(),
55 | new StarDetectorSample(),
56 | new Stitching(),
57 | new Subdiv2DSample(),
58 | new SuperResolutionSample(),
59 | new SVMSample(),
60 | new VideoWriterSample(),
61 | new VideoCaptureSample(),
62 | new WatershedSample());
63 |
64 | testManager.ShowTestEntrance();
65 | }
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/SamplesLegacy/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.InteropServices;
3 |
4 | // アセンブリに関する一般情報は以下の属性セットをとおして制御されます。
5 | // 制御されます。アセンブリに関連付けられている情報を変更するには、
6 | // これらの属性値を変更します。
7 | [assembly: AssemblyTitle("SamplesLegacy")]
8 | [assembly: AssemblyDescription("")]
9 | [assembly: AssemblyConfiguration("")]
10 | [assembly: AssemblyCompany("")]
11 | [assembly: AssemblyProduct("SamplesLegacy")]
12 | [assembly: AssemblyCopyright("Copyright © 2021")]
13 | [assembly: AssemblyTrademark("")]
14 | [assembly: AssemblyCulture("")]
15 |
16 | // ComVisible を false に設定すると、このアセンブリ内の型は COM コンポーネントから
17 | // 参照できなくなります。COM からこのアセンブリ内の型にアクセスする必要がある場合は、
18 | // その型の ComVisible 属性を true に設定します。
19 | [assembly: ComVisible(false)]
20 |
21 | // このプロジェクトが COM に公開される場合、次の GUID が typelib の ID になります
22 | [assembly: Guid("6b44a902-63d5-4191-8a0d-e37e75e49599")]
23 |
24 | // アセンブリのバージョン情報は次の 4 つの値で構成されています:
25 | //
26 | // メジャー バージョン
27 | // マイナー バージョン
28 | // ビルド番号
29 | // リビジョン
30 | //
31 | // すべての値を指定するか、次を使用してビルド番号とリビジョン番号を既定に設定できます
32 | // 既定値にすることができます:
33 | // [assembly: AssemblyVersion("1.0.*")]
34 | [assembly: AssemblyVersion("1.0.0.0")]
35 | [assembly: AssemblyFileVersion("1.0.0.0")]
36 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/BRISKSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | /// Retrieves keypoints using the BRISK algorithm.
9 | ///
10 | class BRISKSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
15 | var dst = new Mat(ImagePath.Lenna, ImreadModes.Color);
16 |
17 | using var brisk = BRISK.Create();
18 | KeyPoint[] keypoints = brisk.Detect(gray);
19 |
20 | if (keypoints != null)
21 | {
22 | var color = new Scalar(0, 255, 0);
23 | foreach (KeyPoint kpt in keypoints)
24 | {
25 | float r = kpt.Size / 2;
26 | Cv2.Circle(dst, (Point)kpt.Pt, (int)r, color);
27 | Cv2.Line(dst,
28 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y + r),
29 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y - r),
30 | color);
31 | Cv2.Line(dst,
32 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y + r),
33 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y - r),
34 | color);
35 | }
36 | }
37 |
38 | using (new Window("BRISK features", dst))
39 | {
40 | Cv2.WaitKey();
41 | }
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/BgSubtractorMOG.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | class BgSubtractorMOG : ConsoleTestBase
8 | {
9 | public override void RunTest()
10 | {
11 | using var capture = new VideoCapture(MoviePath.Bach);
12 | using var mog = BackgroundSubtractorMOG.Create();
13 | using var windowSrc = new Window("src");
14 | using var windowDst = new Window("dst");
15 |
16 | using var frame = new Mat();
17 | using var fg = new Mat();
18 | while (true)
19 | {
20 | capture.Read(frame);
21 | if (frame.Empty())
22 | break;
23 | mog.Apply(frame, fg, 0.01);
24 |
25 | windowSrc.Image = frame;
26 | windowDst.Image = fg;
27 | Cv2.WaitKey(50);
28 | }
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/BinarizerSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using OpenCvSharp;
4 | using OpenCvSharp.XImgProc;
5 | using SampleBase;
6 | using SampleBase.Console;
7 |
8 | namespace SamplesLegacy
9 | {
10 | internal class BinarizerSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using var src = Cv2.ImRead(ImagePath.Binarization, ImreadModes.Grayscale);
15 | using var niblack = new Mat();
16 | using var sauvola = new Mat();
17 | using var nick = new Mat();
18 | int kernelSize = 51;
19 |
20 | var sw = new Stopwatch();
21 | sw.Start();
22 | CvXImgProc.NiblackThreshold(src, niblack, 255, ThresholdTypes.Binary, kernelSize, -0.2, LocalBinarizationMethods.Niblack);
23 | sw.Stop();
24 | Console.WriteLine($"Niblack {sw.ElapsedMilliseconds} ms");
25 |
26 | sw.Restart();
27 | CvXImgProc.NiblackThreshold(src, sauvola, 255, ThresholdTypes.Binary, kernelSize, 0.1, LocalBinarizationMethods.Sauvola);
28 | sw.Stop();
29 | Console.WriteLine($"Sauvola {sw.ElapsedMilliseconds} ms");
30 |
31 | sw.Restart();
32 | CvXImgProc.NiblackThreshold(src, nick, 255, ThresholdTypes.Binary, kernelSize, -0.14, LocalBinarizationMethods.Nick);
33 | sw.Stop();
34 | Console.WriteLine($"Nick {sw.ElapsedMilliseconds} ms");
35 |
36 | using (new Window("src", src, WindowFlags.AutoSize))
37 | using (new Window("Niblack", niblack, WindowFlags.AutoSize))
38 | using (new Window("Sauvola", sauvola, WindowFlags.AutoSize))
39 | using (new Window("Nick", nick, WindowFlags.AutoSize))
40 | {
41 | Cv2.WaitKey();
42 | }
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/CameraCaptureSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | ///
9 | ///
10 | class CameraCaptureSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using var capture = new VideoCapture(0, VideoCaptureAPIs.DSHOW);
15 | if (!capture.IsOpened())
16 | return;
17 |
18 | capture.FrameWidth = 1920;
19 | capture.FrameHeight = 1280;
20 | capture.AutoFocus = true;
21 |
22 | const int sleepTime = 10;
23 |
24 | using var window = new Window("capture");
25 | var image = new Mat();
26 |
27 | while (true)
28 | {
29 | capture.Read(image);
30 | if (image.Empty())
31 | break;
32 |
33 | window.ShowImage(image);
34 | int c = Cv2.WaitKey(sleepTime);
35 | if (c >= 0)
36 | {
37 | break;
38 | }
39 | }
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/ClaheSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | class ClaheSample : ConsoleTestBase
8 | {
9 | public override void RunTest()
10 | {
11 | using var src = new Mat(ImagePath.TsukubaLeft, ImreadModes.Grayscale);
12 | using var dst1 = new Mat();
13 | using var dst2 = new Mat();
14 | using var dst3 = new Mat();
15 |
16 | using (var clahe = Cv2.CreateCLAHE())
17 | {
18 | clahe.ClipLimit = 20;
19 | clahe.Apply(src, dst1);
20 | clahe.ClipLimit = 40;
21 | clahe.Apply(src, dst2);
22 | clahe.TilesGridSize = new Size(4, 4);
23 | clahe.Apply(src, dst3);
24 | }
25 |
26 | Window.ShowImages(
27 | new[]{src, dst1, dst2, dst3},
28 | new[]{"src", "dst clip20", "dst clip40", "dst tile4x4"});
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/ConnectedComponentsSample.cs:
--------------------------------------------------------------------------------
1 | using System.Linq;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | ///
10 | ///
11 | class ConnectedComponentsSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | using var src = new Mat(ImagePath.Shapes, ImreadModes.Color);
16 | using var gray = src.CvtColor(ColorConversionCodes.BGR2GRAY);
17 | using var binary = gray.Threshold(0, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);
18 | using var labelView = src.EmptyClone();
19 | using var rectView = binary.CvtColor(ColorConversionCodes.GRAY2BGR);
20 |
21 | var cc = Cv2.ConnectedComponentsEx(binary);
22 | if (cc.LabelCount <= 1)
23 | return;
24 |
25 | // draw labels
26 | cc.RenderBlobs(labelView);
27 |
28 | // draw bonding boxes except background
29 | foreach (var blob in cc.Blobs.Skip(1))
30 | {
31 | rectView.Rectangle(blob.Rect, Scalar.Red);
32 | }
33 |
34 | // filter maximum blob
35 | var maxBlob = cc.GetLargestBlob();
36 | var filtered = new Mat();
37 | cc.FilterByBlob(src, filtered, maxBlob);
38 |
39 | using (new Window("src", src))
40 | using (new Window("binary", binary))
41 | using (new Window("labels", labelView))
42 | using (new Window("bonding boxes", rectView))
43 | using (new Window("maximum blob", filtered))
44 | {
45 | Cv2.WaitKey();
46 | }
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/DnnSuperresSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.DnnSuperres;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | class DnnSuperresSample : ConsoleTestBase
9 | {
10 | // https://github.com/Saafke/FSRCNN_Tensorflow/tree/master/models
11 | private const string ModelFileName = "Data/Model/FSRCNN_x4.pb";
12 |
13 | public override void RunTest()
14 | {
15 | using var dnn = new DnnSuperResImpl("fsrcnn", 4);
16 | dnn.ReadModel(ModelFileName);
17 |
18 | using var src = new Mat(ImagePath.Mandrill, ImreadModes.Color);
19 | using var dst = new Mat();
20 | dnn.Upsample(src, dst);
21 |
22 | Window.ShowImages(
23 | new[]{src, dst},
24 | new[]{"src", "dst0"});
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/DrawBestMatch.cs:
--------------------------------------------------------------------------------
1 | using System.Linq;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | /// https://stackoverflow.com/questions/51606215/how-to-draw-bounding-box-on-best-matches/51607041#51607041
10 | ///
11 | class DrawBestMatchRectangle : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | using var img1 = new Mat(ImagePath.Match1, ImreadModes.Color);
16 | using var img2 = new Mat(ImagePath.Match2, ImreadModes.Color);
17 |
18 | using var orb = ORB.Create(1000);
19 | using var descriptors1 = new Mat();
20 | using var descriptors2 = new Mat();
21 | orb.DetectAndCompute(img1, null, out var keyPoints1, descriptors1);
22 | orb.DetectAndCompute(img2, null, out var keyPoints2, descriptors2);
23 |
24 | using var bf = new BFMatcher(NormTypes.Hamming, crossCheck: true);
25 | var matches = bf.Match(descriptors1, descriptors2);
26 |
27 | var goodMatches = matches
28 | .OrderBy(x => x.Distance)
29 | .Take(10)
30 | .ToArray();
31 |
32 | var srcPts = goodMatches.Select(m => keyPoints1[m.QueryIdx].Pt).Select(p => new Point2d(p.X, p.Y));
33 | var dstPts = goodMatches.Select(m => keyPoints2[m.TrainIdx].Pt).Select(p => new Point2d(p.X, p.Y));
34 |
35 | using var homography = Cv2.FindHomography(srcPts, dstPts, HomographyMethods.Ransac, 5, null);
36 |
37 | int h = img1.Height, w = img1.Width;
38 | var img2Bounds = new[]
39 | {
40 | new Point2d(0, 0),
41 | new Point2d(0, h-1),
42 | new Point2d(w-1, h-1),
43 | new Point2d(w-1, 0),
44 | };
45 | var img2BoundsTransformed = Cv2.PerspectiveTransform(img2Bounds, homography);
46 |
47 | using var view = img2.Clone();
48 | var drawingPoints = img2BoundsTransformed.Select(p => (Point) p).ToArray();
49 | Cv2.Polylines(view, new []{drawingPoints}, true, Scalar.Red, 3);
50 |
51 | using (new Window("view", view))
52 | {
53 | Cv2.WaitKey();
54 | }
55 | }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/FASTSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | /// cv::FAST
9 | ///
10 | class FASTSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using Mat imgSrc = new Mat(ImagePath.Lenna, ImreadModes.Color);
15 | using Mat imgGray = new Mat();
16 | using Mat imgDst = imgSrc.Clone();
17 | Cv2.CvtColor(imgSrc, imgGray, ColorConversionCodes.BGR2GRAY, 0);
18 |
19 | KeyPoint[] keypoints = Cv2.FAST(imgGray, 50, true);
20 |
21 | foreach (KeyPoint kp in keypoints)
22 | {
23 | imgDst.Circle((Point)kp.Pt, 3, Scalar.Red, -1, LineTypes.AntiAlias, 0);
24 | }
25 |
26 | Cv2.ImShow("FAST", imgDst);
27 | Cv2.WaitKey(0);
28 | Cv2.DestroyAllWindows();
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/FREAKSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.XFeatures2D;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | /// Retrieves keypoints using the FREAK algorithm.
10 | ///
11 | class FREAKSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | using var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
16 | using var dst = new Mat(ImagePath.Lenna, ImreadModes.Color);
17 |
18 | // ORB
19 | using var orb = ORB.Create(1000);
20 | KeyPoint[] keypoints = orb.Detect(gray);
21 |
22 | // FREAK
23 | using var freak = FREAK.Create();
24 | Mat freakDescriptors = new Mat();
25 | freak.Compute(gray, ref keypoints, freakDescriptors);
26 |
27 | if (keypoints != null)
28 | {
29 | var color = new Scalar(0, 255, 0);
30 | foreach (KeyPoint kpt in keypoints)
31 | {
32 | float r = kpt.Size / 2;
33 | Cv2.Circle(dst, (Point)kpt.Pt, (int)r, color);
34 | Cv2.Line(dst,
35 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y + r),
36 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y - r),
37 | color);
38 | Cv2.Line(dst,
39 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y + r),
40 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y - r),
41 | color);
42 | }
43 | }
44 |
45 | using (new Window("FREAK", dst))
46 | {
47 | Cv2.WaitKey();
48 | }
49 | }
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/FaceDetection.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | /// Human face detection
9 | /// http://docs.opencv.org/doc/tutorials/objdetect/cascade_classifier/cascade_classifier.html
10 | ///
11 | class FaceDetection : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | // Load the cascades
16 | using var haarCascade = new CascadeClassifier(TextPath.HaarCascade);
17 | using var lbpCascade = new CascadeClassifier(TextPath.LbpCascade);
18 |
19 | // Detect faces
20 | Mat haarResult = DetectFace(haarCascade);
21 | Mat lbpResult = DetectFace(lbpCascade);
22 |
23 | Cv2.ImShow("Faces by Haar", haarResult);
24 | Cv2.ImShow("Faces by LBP", lbpResult);
25 | Cv2.WaitKey(0);
26 | Cv2.DestroyAllWindows();
27 | }
28 |
29 | ///
30 | ///
31 | ///
32 | ///
33 | ///
34 | private Mat DetectFace(CascadeClassifier cascade)
35 | {
36 | Mat result;
37 |
38 | using (var src = new Mat(ImagePath.Yalta, ImreadModes.Color))
39 | using (var gray = new Mat())
40 | {
41 | result = src.Clone();
42 | Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
43 |
44 | // Detect faces
45 | Rect[] faces = cascade.DetectMultiScale(
46 | gray, 1.08, 2, HaarDetectionTypes.ScaleImage, new Size(30, 30));
47 |
48 | // Render all detected faces
49 | foreach (Rect face in faces)
50 | {
51 | var center = new Point
52 | {
53 | X = (int)(face.X + face.Width * 0.5),
54 | Y = (int)(face.Y + face.Height * 0.5)
55 | };
56 | var axes = new Size
57 | {
58 | Width = (int)(face.Width * 0.5),
59 | Height = (int)(face.Height * 0.5)
60 | };
61 | Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4);
62 | }
63 | }
64 | return result;
65 | }
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/FaceDetectionDNN.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.Dnn;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | /// To run this example first download the face model available here:https://github.com/spmallick/learnopencv/tree/master/FaceDetectionComparison/models
10 | /// Add the files to the bin folder
11 | ///
12 | internal class FaceDetectionDNN : ConsoleTestBase
13 | {
14 | const string configFile = "deploy.prototxt";
15 | const string faceModel = "res10_300x300_ssd_iter_140000_fp16.caffemodel";
16 | const string finalOutput = "DetectedFaces.jpg";
17 | const string image = "faces.jpg";
18 |
19 | public override void RunTest()
20 | {
21 | // Read sample image
22 | using var frame = Cv2.ImRead(image);
23 | int frameHeight = frame.Rows;
24 | int frameWidth = frame.Cols;
25 | using var faceNet = CvDnn.ReadNetFromCaffe(configFile, faceModel);
26 | using var blob = CvDnn.BlobFromImage(frame, 1.0, new Size(300, 300),
27 | new Scalar(104, 117, 123), false, false);
28 | faceNet.SetInput(blob, "data");
29 |
30 | using var detection = faceNet.Forward("detection_out");
31 | using var detectionMat = Mat.FromPixelData(detection.Size(2), detection.Size(3), MatType.CV_32F, detection.Ptr(0));
32 | for (int i = 0; i < detectionMat.Rows; i++)
33 | {
34 | float confidence = detectionMat.At(i, 2);
35 |
36 | if (confidence > 0.7)
37 | {
38 | int x1 = (int)(detectionMat.At(i, 3) * frameWidth);
39 | int y1 = (int)(detectionMat.At(i, 4) * frameHeight);
40 | int x2 = (int)(detectionMat.At(i, 5) * frameWidth);
41 | int y2 = (int)(detectionMat.At(i, 6) * frameHeight);
42 |
43 | Cv2.Rectangle(frame, new Point(x1, y1), new Point(x2, y2), new Scalar(0, 255, 0), 2,
44 | LineTypes.Link4);
45 | }
46 | }
47 |
48 | Cv2.ImWrite(finalOutput, frame);
49 | }
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/FlannSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using OpenCvSharp.Flann;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | ///
10 | /// cv::flann
11 | ///
12 | class FlannSample : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | Console.WriteLine("===== FlannTest =====");
17 |
18 | // creates data set
19 | using (var features = new Mat(10000, 2, MatType.CV_32FC1))
20 | {
21 | var rand = new Random();
22 | for (int i = 0; i < features.Rows; i++)
23 | {
24 | features.Set(i, 0, rand.Next(10000));
25 | features.Set(i, 1, rand.Next(10000));
26 | }
27 |
28 | // query
29 | var queryPoint = new Point2f(7777, 7777);
30 | var queries = new Mat(1, 2, MatType.CV_32FC1);
31 | queries.Set(0, 0, queryPoint.X);
32 | queries.Set(0, 1, queryPoint.Y);
33 | Console.WriteLine("query:({0}, {1})", queryPoint.X, queryPoint.Y);
34 | Console.WriteLine("-----");
35 |
36 | // knnSearch
37 | using var nnIndex = new OpenCvSharp.Flann.Index(features, new KDTreeIndexParams(4));
38 | const int Knn = 1;
39 | nnIndex.KnnSearch(queries, out int[] indices, out float[] dists, Knn, new SearchParams(32));
40 |
41 | for (int i = 0; i < Knn; i++)
42 | {
43 | int index = indices[i];
44 | float dist = dists[i];
45 | var pt = new Point2f(features.Get(index, 0), features.Get(index, 1));
46 | Console.Write("No.{0}\t", i);
47 | Console.Write("index:{0}", index);
48 | Console.Write(" distance:{0}", dist);
49 | Console.Write(" data:({0}, {1})", pt.X, pt.Y);
50 | Console.WriteLine();
51 | }
52 | }
53 | Console.Read();
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/HOGSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | ///
10 | /// samples/c/peopledetect.c
11 | ///
12 | internal class HOGSample : ConsoleTestBase
13 | {
14 | public HOGSample()
15 | {
16 | }
17 |
18 | public override void RunTest()
19 | {
20 | using var img = Cv2.ImRead(ImagePath.Asahiyama, ImreadModes.Color);
21 |
22 | using var hog = new HOGDescriptor();
23 | hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
24 |
25 | bool b = hog.CheckDetectorSize();
26 | Console.WriteLine("CheckDetectorSize: {0}", b);
27 |
28 | var watch = Stopwatch.StartNew();
29 |
30 | // run the detector with default parameters. to get a higher hit-rate
31 | // (and more false alarms, respectively), decrease the hitThreshold and
32 | // groupThreshold (set groupThreshold to 0 to turn off the grouping completely).
33 | Rect[] found = hog.DetectMultiScale(img, 0, new Size(8, 8), new Size(24, 16), 1.05, 2);
34 |
35 | watch.Stop();
36 | Console.WriteLine("Detection time = {0}ms", watch.ElapsedMilliseconds);
37 | Console.WriteLine("{0} region(s) found", found.Length);
38 |
39 | foreach (Rect rect in found)
40 | {
41 | // the HOG detector returns slightly larger rectangles than the real objects.
42 | // so we slightly shrink the rectangles to get a nicer output.
43 | var r = new Rect
44 | {
45 | X = rect.X + (int)Math.Round(rect.Width * 0.1),
46 | Y = rect.Y + (int)Math.Round(rect.Height * 0.1),
47 | Width = (int)Math.Round(rect.Width * 0.8),
48 | Height = (int)Math.Round(rect.Height * 0.8)
49 | };
50 | img.Rectangle(r.TopLeft, r.BottomRight, Scalar.Red, 3);
51 | }
52 |
53 | using var window = new Window("people detector", img, WindowFlags.Normal);
54 | window.SetProperty(WindowPropertyFlags.Fullscreen, 1);
55 | Cv2.WaitKey(0);
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/HistSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | /// Histogram sample
9 | /// http://book.mynavi.jp/support/pc/opencv2/c3/opencv_img.html
10 | ///
11 | class HistSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | using var src = Cv2.ImRead(ImagePath.Lenna, ImreadModes.Grayscale);
16 |
17 | // Histogram view
18 | const int Width = 260, Height = 200;
19 | using var render = new Mat(new Size(Width, Height), MatType.CV_8UC3, Scalar.All(255));
20 |
21 | // Calculate histogram
22 | var hist = new Mat();
23 | int[] hdims = {256}; // Histogram size for each dimension
24 | Rangef[] ranges = { new Rangef(0,256), }; // min/max
25 | Cv2.CalcHist(
26 | new Mat[]{src},
27 | new int[]{0},
28 | null,
29 | hist,
30 | 1,
31 | hdims,
32 | ranges);
33 |
34 | // Get the max value of histogram
35 | Cv2.MinMaxLoc(hist, out _, out double maxVal);
36 |
37 | var color = Scalar.All(100);
38 | // Scales and draws histogram
39 | hist = hist * (maxVal != 0 ? Height / maxVal : 0.0);
40 | for (int j = 0; j < hdims[0]; ++j)
41 | {
42 | int binW = (int)((double)Width / hdims[0]);
43 | render.Rectangle(
44 | new Point(j * binW, render.Rows - (int)hist.Get(j)),
45 | new Point((j + 1) * binW, render.Rows),
46 | color,
47 | -1);
48 | }
49 |
50 | using (new Window("Image", src, WindowFlags.AutoSize | WindowFlags.FreeRatio))
51 | using (new Window("Histogram", render, WindowFlags.AutoSize | WindowFlags.FreeRatio))
52 | {
53 | Cv2.WaitKey();
54 | }
55 | }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/HoughLinesSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | /// Hough Transform Sample / ハフ変換による直線検出
10 | ///
11 | /// http://opencv.jp/sample/special_transforms.html#hough_line
12 | class HoughLinesSample : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | SampleCpp();
17 | }
18 |
19 | ///
20 | /// sample of new C++ style wrapper
21 | ///
22 | private void SampleCpp()
23 | {
24 | // (1) Load the image
25 | using var imgGray = new Mat(ImagePath.Goryokaku, ImreadModes.Grayscale);
26 | using var imgStd = new Mat(ImagePath.Goryokaku, ImreadModes.Color);
27 | using var imgProb = imgStd.Clone();
28 | // Preprocess
29 | Cv2.Canny(imgGray, imgGray, 50, 200, 3, false);
30 |
31 | // (3) Run Standard Hough Transform
32 | LineSegmentPolar[] segStd = Cv2.HoughLines(imgGray, 1, Math.PI / 180, 50, 0, 0);
33 | int limit = Math.Min(segStd.Length, 10);
34 | for (int i = 0; i < limit; i++)
35 | {
36 | // Draws result lines
37 | float rho = segStd[i].Rho;
38 | float theta = segStd[i].Theta;
39 | double a = Math.Cos(theta);
40 | double b = Math.Sin(theta);
41 | double x0 = a * rho;
42 | double y0 = b * rho;
43 | Point pt1 = new Point { X = (int)Math.Round(x0 + 1000 * (-b)), Y = (int)Math.Round(y0 + 1000 * (a)) };
44 | Point pt2 = new Point { X = (int)Math.Round(x0 - 1000 * (-b)), Y = (int)Math.Round(y0 - 1000 * (a)) };
45 | imgStd.Line(pt1, pt2, Scalar.Red, 3, LineTypes.AntiAlias, 0);
46 | }
47 |
48 | // (4) Run Probabilistic Hough Transform
49 | LineSegmentPoint[] segProb = Cv2.HoughLinesP(imgGray, 1, Math.PI / 180, 50, 50, 10);
50 | foreach (LineSegmentPoint s in segProb)
51 | {
52 | imgProb.Line(s.P1, s.P2, Scalar.Red, 3, LineTypes.AntiAlias, 0);
53 | }
54 |
55 | // (5) Show results
56 | using (new Window("Hough_line_standard", imgStd, WindowFlags.AutoSize))
57 | using (new Window("Hough_line_probabilistic", imgProb, WindowFlags.AutoSize))
58 | {
59 | Window.WaitKey(0);
60 | }
61 | }
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/KAZESample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | ///
10 | /// Retrieves keypoints using the KAZE and AKAZE algorithm.
11 | ///
12 | internal class KAZESample : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
17 | var kaze = KAZE.Create();
18 | var akaze = AKAZE.Create();
19 |
20 | var kazeDescriptors = new Mat();
21 | var akazeDescriptors = new Mat();
22 | KeyPoint[] kazeKeyPoints = null, akazeKeyPoints = null;
23 | var kazeTime = MeasureTime(() =>
24 | kaze.DetectAndCompute(gray, null, out kazeKeyPoints, kazeDescriptors));
25 | var akazeTime = MeasureTime(() =>
26 | akaze.DetectAndCompute(gray, null, out akazeKeyPoints, akazeDescriptors));
27 |
28 | var dstKaze = new Mat();
29 | var dstAkaze = new Mat();
30 | Cv2.DrawKeypoints(gray, kazeKeyPoints, dstKaze);
31 | Cv2.DrawKeypoints(gray, akazeKeyPoints, dstAkaze);
32 |
33 | using (new Window(String.Format("KAZE [{0:F2}ms]", kazeTime.TotalMilliseconds), dstKaze))
34 | using (new Window(String.Format("AKAZE [{0:F2}ms]", akazeTime.TotalMilliseconds), dstAkaze))
35 | {
36 | Cv2.WaitKey();
37 | }
38 | }
39 |
40 | private TimeSpan MeasureTime(Action action)
41 | {
42 | var watch = Stopwatch.StartNew();
43 | action();
44 | watch.Stop();
45 | return watch.Elapsed;
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/MSERSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | /// Maximally Stable Extremal Regions
9 | ///
10 | class MSERSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using Mat src = new Mat(ImagePath.Distortion, ImreadModes.Color);
15 | using Mat gray = new Mat();
16 | using Mat dst = src.Clone();
17 | Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);
18 |
19 | CppStyleMSER(gray, dst); // C++ style
20 |
21 | using (new Window("MSER src", src))
22 | using (new Window("MSER gray", gray))
23 | using (new Window("MSER dst", dst))
24 | {
25 | Cv2.WaitKey();
26 | }
27 | }
28 |
29 | ///
30 | /// Extracts MSER by C++-style code (cv::MSER)
31 | ///
32 | ///
33 | ///
34 | private void CppStyleMSER(Mat gray, Mat dst)
35 | {
36 | MSER mser = MSER.Create();
37 | mser.DetectRegions(gray, out Point[][] contours, out _);
38 | foreach (Point[] pts in contours)
39 | {
40 | Scalar color = Scalar.RandomColor();
41 | foreach (Point p in pts)
42 | {
43 | dst.Circle(p, 1, color);
44 | }
45 | }
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/MergeSplitSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | ///
9 | ///
10 | class MergeSplitSample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | // Split/Merge Test
15 | {
16 | using var src = new Mat(ImagePath.Lenna, ImreadModes.Color);
17 |
18 | // Split each plane
19 | Cv2.Split(src, out var planes);
20 |
21 | Cv2.ImShow("planes 0", planes[0]);
22 | Cv2.ImShow("planes 1", planes[1]);
23 | Cv2.ImShow("planes 2", planes[2]);
24 | Cv2.WaitKey();
25 | Cv2.DestroyAllWindows();
26 |
27 | // Invert G plane
28 | Cv2.BitwiseNot(planes[1], planes[1]);
29 |
30 | // Merge
31 | using var merged = new Mat();
32 | Cv2.Merge(planes, merged);
33 |
34 | Cv2.ImShow("src", src);
35 | Cv2.ImShow("merged", merged);
36 | Cv2.WaitKey();
37 | Cv2.DestroyAllWindows();
38 | }
39 |
40 | // MixChannels Test
41 | {
42 | using var rgba = new Mat(300, 300, MatType.CV_8UC4, new Scalar(50, 100, 150, 200));
43 | using var bgr = new Mat(rgba.Rows, rgba.Cols, MatType.CV_8UC3);
44 | using var alpha = new Mat(rgba.Rows, rgba.Cols, MatType.CV_8UC1);
45 |
46 | Mat[] input = { rgba };
47 | Mat[] output = { bgr, alpha };
48 | // rgba[0] -> bgr[2], rgba[1] -> bgr[1],
49 | // rgba[2] -> bgr[0], rgba[3] -> alpha[0]
50 | int[] fromTo = { 0, 2, 1, 1, 2, 0, 3, 3 };
51 | Cv2.MixChannels(input, output, fromTo);
52 |
53 | Cv2.ImShow("rgba", rgba);
54 | Cv2.ImShow("bgr", bgr);
55 | Cv2.ImShow("alpha", alpha);
56 | Cv2.WaitKey();
57 | Cv2.DestroyAllWindows();
58 | }
59 | }
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/MorphologySample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | ///
9 | ///
10 | class MorphologySample : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
15 | using var binary = new Mat();
16 | using var dilate1 = new Mat();
17 | using var dilate2 = new Mat();
18 | byte[] kernelValues = { 0, 1, 0, 1, 1, 1, 0, 1, 0 }; // cross (+)
19 | using var kernel = Mat.FromPixelData(3, 3, MatType.CV_8UC1, kernelValues);
20 |
21 | // Binarize
22 | Cv2.Threshold(gray, binary, 0, 255, ThresholdTypes.Otsu);
23 |
24 | // empty kernel
25 | Cv2.Dilate(binary, dilate1, null);
26 | // + kernel
27 | Cv2.Dilate(binary, dilate2, kernel);
28 |
29 | Cv2.ImShow("binary", binary);
30 | Cv2.ImShow("dilate (kernel = null)", dilate1);
31 | Cv2.ImShow("dilate (kernel = +)", dilate2);
32 | Cv2.WaitKey(0);
33 | Cv2.DestroyAllWindows();
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/NormalArrayOperations.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | ///
10 | ///
11 | ///
12 | class NormalArrayOperations : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | Threshold1();
17 | Threshold2();
18 | Threshold3();
19 | }
20 |
21 | ///
22 | /// Run thresholding to byte array
23 | ///
24 | private void Threshold1()
25 | {
26 | const int T = 3;
27 | const int Max = 5;
28 |
29 | byte[] input = {1, 2, 3, 4, 5, };
30 | var output = new List();
31 |
32 | Cv2.Threshold(InputArray.Create(input), OutputArray.Create(output),
33 | T, Max, ThresholdTypes.Binary);
34 |
35 | Console.WriteLine("Threshold: {0}", T);
36 | Console.WriteLine("input: {0}", string.Join(",", input));
37 | Console.WriteLine("output: {0}", string.Join(",", output));
38 | }
39 |
40 | ///
41 | /// Run thresholding to short array
42 | ///
43 | private void Threshold2()
44 | {
45 | const int T = 150;
46 | const int Max = 250;
47 |
48 | short[] input = { 50, 100, 150, 200, 250, };
49 | var output = new List();
50 |
51 | Cv2.Threshold(InputArray.Create(input), OutputArray.Create(output),
52 | T, Max, ThresholdTypes.Binary);
53 |
54 | Console.WriteLine("Threshold: {0}", T);
55 | Console.WriteLine("input: {0}", string.Join(",", input));
56 | Console.WriteLine("output: {0}", string.Join(",", output));
57 | }
58 |
59 | ///
60 | /// Run thresholding to struct array
61 | ///
62 | private void Threshold3()
63 | {
64 | const double T = 2000;
65 | const double Max = 5000;
66 |
67 | // threshold does not support Point (int)
68 | Point2f[] input = {
69 | new Point2f(1000, 1500),
70 | new Point2f(2000, 2001),
71 | new Point2f(500, 5000),
72 | };
73 | var output = new List();
74 |
75 | Cv2.Threshold(InputArray.Create(input), OutputArray.Create(output),
76 | T, Max, ThresholdTypes.Binary);
77 |
78 | Console.WriteLine("Threshold: {0}", T);
79 | Console.WriteLine("input: {0}", string.Join(",", input));
80 | Console.WriteLine("output: {0}", string.Join(",", output));
81 | }
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/PerspectiveTransformSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using System;
3 | using System.Collections.Generic;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | public class PerspectiveTransformSample : ConsoleTestBase
10 | {
11 | private readonly List point2Fs = new List();
12 |
13 | private Point2f[] srcPoints = new Point2f[] {
14 | new Point2f(0, 0),
15 | new Point2f(0, 0),
16 | new Point2f(0, 0),
17 | new Point2f(0, 0),
18 | };
19 |
20 | private readonly Point2f[] dstPoints = new Point2f[] {
21 | new Point2f(0, 0),
22 | new Point2f(0, 480),
23 | new Point2f(640, 480),
24 | new Point2f(640, 0),
25 | };
26 |
27 | private Mat OriginalImage;
28 |
29 | public override void RunTest()
30 | {
31 | OriginalImage = new Mat(ImagePath.SurfBoxinscene, ImreadModes.AnyColor);
32 | using var Window = new Window("result", OriginalImage);
33 |
34 | Cv2.SetMouseCallback(Window.Name, CallbackOpenCVAnnotate);
35 | Window.WaitKey();
36 | }
37 |
38 | private void CallbackOpenCVAnnotate(MouseEventTypes e, int x, int y, MouseEventFlags flags, IntPtr userdata)
39 | {
40 | if (e == MouseEventTypes.LButtonDown)
41 | {
42 | point2Fs.Add(new Point2f(x, y));
43 | if (point2Fs.Count == 4)
44 | {
45 | srcPoints = point2Fs.ToArray();
46 | using var matrix = Cv2.GetPerspectiveTransform(srcPoints, dstPoints);
47 | using var dst = new Mat(new Size(640, 480), MatType.CV_8UC3);
48 | Cv2.WarpPerspective(OriginalImage, dst, matrix, dst.Size());
49 | using var dsts = new Window("dst", dst);
50 | point2Fs.Clear();
51 | Window.WaitKey();
52 | }
53 | }
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/PhotoMethods.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | /// sample of photo module methods
9 | ///
10 | class PhotoMethods : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | using var src = new Mat(ImagePath.Fruits, ImreadModes.Color);
15 |
16 | using var normconv = new Mat();
17 | using var recursFiltered = new Mat();
18 | Cv2.EdgePreservingFilter(src, normconv, EdgePreservingMethods.NormconvFilter);
19 | Cv2.EdgePreservingFilter(src, recursFiltered, EdgePreservingMethods.RecursFilter);
20 |
21 | using var detailEnhance = new Mat();
22 | Cv2.DetailEnhance(src, detailEnhance);
23 |
24 | using var pencil1 = new Mat();
25 | using var pencil2 = new Mat();
26 | Cv2.PencilSketch(src, pencil1, pencil2);
27 |
28 | using var stylized = new Mat();
29 | Cv2.Stylization(src, stylized);
30 |
31 | using (new Window("src", src))
32 | using (new Window("edgePreservingFilter - NormconvFilter", normconv))
33 | using (new Window("edgePreservingFilter - RecursFilter", recursFiltered))
34 | using (new Window("detailEnhance", detailEnhance))
35 | using (new Window("pencilSketch grayscale", pencil1))
36 | using (new Window("pencilSketch color", pencil2))
37 | using (new Window("stylized", stylized))
38 | {
39 | Cv2.WaitKey();
40 | }
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/PixelAccess.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | ///
10 | /// Swaps B for R
11 | ///
12 | class PixelAccess : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | Console.WriteLine("Get/Set: {0}ms", MeasureTime(GetSet));
17 | Console.WriteLine("GenericIndexer: {0}ms", MeasureTime(GenericIndexer));
18 | Console.WriteLine("TypeSpecificMat: {0}ms", MeasureTime(TypeSpecificMat));
19 | Console.Read();
20 | }
21 |
22 | ///
23 | /// Slow
24 | ///
25 | private void GetSet()
26 | {
27 | using var mat = new Mat(ImagePath.Lenna, ImreadModes.Color);
28 | for (int y = 0; y < mat.Height; y++)
29 | {
30 | for (int x = 0; x < mat.Width; x++)
31 | {
32 | Vec3b color = mat.Get(y, x);
33 | Vec3b newColor = new Vec3b(color.Item2, color.Item1, color.Item0);
34 | mat.Set(y, x, newColor);
35 | }
36 | }
37 | //Cv2.ImShow("Slow", mat);
38 | //Cv2.WaitKey(0);
39 | //Cv2.DestroyAllWindows();
40 | }
41 |
42 | ///
43 | /// Reasonably fast
44 | ///
45 | private void GenericIndexer()
46 | {
47 | using var mat = new Mat(ImagePath.Lenna, ImreadModes.Color);
48 | var indexer = mat.GetGenericIndexer();
49 | for (int y = 0; y < mat.Height; y++)
50 | {
51 | for (int x = 0; x < mat.Width; x++)
52 | {
53 | Vec3b color = indexer[y, x];
54 | Vec3b newColor = new Vec3b(color.Item2, color.Item1, color.Item0);
55 | indexer[y, x] = newColor;
56 | }
57 | }
58 | //Cv2.ImShow("GenericIndexer", mat);
59 | //Cv2.WaitKey(0);
60 | //Cv2.DestroyAllWindows();
61 | }
62 |
63 | ///
64 | /// Faster
65 | ///
66 | private void TypeSpecificMat()
67 | {
68 | using var mat = new Mat(ImagePath.Lenna, ImreadModes.Color);
69 | var mat3 = new Mat(mat);
70 | var indexer = mat3.GetIndexer();
71 | for (int y = 0; y < mat.Height; y++)
72 | {
73 | for (int x = 0; x < mat.Width; x++)
74 | {
75 | Vec3b color = indexer[y, x];
76 | Vec3b newColor = new Vec3b(color.Item2, color.Item1, color.Item0);
77 | indexer[y, x] = newColor;
78 | }
79 | }
80 | //Cv2.ImShow("TypeSpecificMat", mat);
81 | //Cv2.WaitKey(0);
82 | //Cv2.DestroyAllWindows();
83 | }
84 |
85 | private static long MeasureTime(Action action)
86 | {
87 | var watch = Stopwatch.StartNew();
88 | action();
89 | watch.Stop();
90 | return watch.ElapsedMilliseconds;
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/SeamlessClone.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | ///
8 | /// cv::seamlessClone
9 | ///
10 | class SeamlessClone : ConsoleTestBase
11 | {
12 | public override void RunTest()
13 | {
14 | Mat src = new Mat(ImagePath.Girl, ImreadModes.Color);
15 | Mat dst = new Mat(ImagePath.Lenna, ImreadModes.Color);
16 | Mat src0 = src.Resize(dst.Size(), 0, 0, InterpolationFlags.Lanczos4);
17 | Mat mask = Mat.Zeros(src0.Size(), MatType.CV_8UC3);
18 |
19 | mask.Circle(200, 200, 100, Scalar.White, -1);
20 |
21 | Mat blend1 = new Mat();
22 | Mat blend2 = new Mat();
23 | Mat blend3 = new Mat();
24 | Cv2.SeamlessClone(
25 | src0, dst, mask, new Point(260, 270), blend1,
26 | SeamlessCloneMethods.NormalClone);
27 | Cv2.SeamlessClone(
28 | src0, dst, mask, new Point(260, 270), blend2,
29 | SeamlessCloneMethods.MonochromeTransfer);
30 | Cv2.SeamlessClone(
31 | src0, dst, mask, new Point(260, 270), blend3,
32 | SeamlessCloneMethods.MixedClone);
33 |
34 | using (new Window("src", src0))
35 | using (new Window("dst", dst))
36 | using (new Window("mask", mask))
37 | using (new Window("blend NormalClone", blend1))
38 | using (new Window("blend MonochromeTransfer", blend2))
39 | using (new Window("blend MixedClone", blend3))
40 | {
41 | Cv2.WaitKey();
42 | }
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/SimpleBlobDetectorSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using SampleBase;
3 | using SampleBase.Console;
4 |
5 | namespace SamplesLegacy
6 | {
7 | internal class SimpleBlobDetectorSample : ConsoleTestBase
8 | {
9 | public override void RunTest()
10 | {
11 | using var src = Cv2.ImRead(ImagePath.Shapes);
12 | using var detectedCircles = new Mat();
13 | using var detectedOvals = new Mat();
14 | // Invert the image. Shapes has a black background and SimpleBlobDetector doesn't seem to work well with that.
15 | Cv2.BitwiseNot(src, src);
16 |
17 | // Parameters tuned to detect only circles
18 | var circleParams = new SimpleBlobDetector.Params
19 | {
20 | MinThreshold = 10,
21 | MaxThreshold = 230,
22 |
23 | // The area is the number of pixels in the blob.
24 | FilterByArea = true,
25 | MinArea = 500,
26 | MaxArea = 50000,
27 |
28 | // Circularity is a ratio of the area to the perimeter. Polygons with more sides are more circular.
29 | FilterByCircularity = true,
30 | MinCircularity = 0.9f,
31 |
32 | // Convexity is the ratio of the area of the blob to the area of its convex hull.
33 | FilterByConvexity = true,
34 | MinConvexity = 0.95f,
35 |
36 | // A circle's inertia ratio is 1. A line's is 0. An oval is between 0 and 1.
37 | FilterByInertia = true,
38 | MinInertiaRatio = 0.95f
39 | };
40 |
41 | // Parameters tuned to find the ovals in the Shapes image.
42 | var ovalParams = new SimpleBlobDetector.Params
43 | {
44 | MinThreshold = 10,
45 | MaxThreshold = 230,
46 | FilterByArea = true,
47 | MinArea = 500,
48 | // The ovals are the smallest blobs in Shapes, so we limit the max area to eliminate the larger blobs.
49 | MaxArea = 10000,
50 | FilterByCircularity = true,
51 | MinCircularity = 0.58f,
52 | FilterByConvexity = true,
53 | MinConvexity = 0.96f,
54 | FilterByInertia = true,
55 | MinInertiaRatio = 0.1f
56 | };
57 |
58 | using var circleDetector = SimpleBlobDetector.Create(circleParams);
59 | using var ovalDetector = SimpleBlobDetector.Create(ovalParams);
60 | var circleKeyPoints = circleDetector.Detect(src);
61 | Cv2.DrawKeypoints(src, circleKeyPoints, detectedCircles, Scalar.HotPink, DrawMatchesFlags.DrawRichKeypoints);
62 |
63 | var ovalKeyPoints = ovalDetector.Detect(src);
64 | Cv2.DrawKeypoints(src, ovalKeyPoints, detectedOvals, Scalar.HotPink, DrawMatchesFlags.DrawRichKeypoints);
65 |
66 | using var w1 = new Window("Detected Circles", detectedCircles);
67 | using var w2 = new Window("Detected Ovals", detectedOvals);
68 | Cv2.WaitKey();
69 | }
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/SolveEquation.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | ///
10 | ///
11 | ///
12 | class SolveEquation : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | ByMat();
17 | ByNormalArray();
18 |
19 | Console.Read();
20 | }
21 |
22 | ///
23 | /// Solve equation AX = Y
24 | ///
25 | private void ByMat()
26 | {
27 | // x + y = 10
28 | // 2x + 3y = 26
29 | // (x=4, y=6)
30 |
31 | double[,] av = {{1, 1},
32 | {2, 3}};
33 | double[] yv = { 10, 26 };
34 |
35 | using var a = Mat.FromPixelData(2, 2, MatType.CV_64FC1, av);
36 | using var y = Mat.FromPixelData(2, 1, MatType.CV_64FC1, yv);
37 | using var x = new Mat();
38 |
39 | Cv2.Solve(a, y, x, DecompTypes.LU);
40 |
41 | Console.WriteLine("ByMat:");
42 | Console.WriteLine("X1 = {0}, X2 = {1}", x.At(0), x.At(1));
43 | }
44 |
45 | ///
46 | /// Solve equation AX = Y
47 | ///
48 | private void ByNormalArray()
49 | {
50 | // x + y = 10
51 | // 2x + 3y = 26
52 | // (x=4, y=6)
53 |
54 | double[,] a = {{1, 1},
55 | {2, 3}};
56 |
57 | double[] y = { 10, 26 };
58 |
59 | var x = new List();
60 |
61 | Cv2.Solve(
62 | InputArray.Create(a), InputArray.Create(y),
63 | OutputArray.Create(x),
64 | DecompTypes.LU);
65 |
66 | Console.WriteLine("ByNormalArray:");
67 | Console.WriteLine("X1 = {0}, X2 = {1}", x[0], x[1]);
68 | }
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/StarDetectorSample.cs:
--------------------------------------------------------------------------------
1 | using OpenCvSharp;
2 | using OpenCvSharp.XFeatures2D;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | /// Retrieves keypoints using the StarDetector algorithm.
10 | ///
11 | class StarDetectorSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | var dst = new Mat(ImagePath.Lenna, ImreadModes.Color);
16 | var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
17 |
18 | StarDetector detector = StarDetector.Create(45);
19 | KeyPoint[] keypoints = detector.Detect(gray);
20 |
21 | if (keypoints != null)
22 | {
23 | var color = new Scalar(0, 255, 0);
24 | foreach (KeyPoint kpt in keypoints)
25 | {
26 | float r = kpt.Size / 2;
27 | Cv2.Circle(dst, (Point)kpt.Pt, (int)r, color);
28 | Cv2.Line(dst,
29 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y + r),
30 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y - r),
31 | color);
32 | Cv2.Line(dst,
33 | (Point)new Point2f(kpt.Pt.X - r, kpt.Pt.Y + r),
34 | (Point)new Point2f(kpt.Pt.X + r, kpt.Pt.Y - r),
35 | color);
36 | }
37 | }
38 |
39 | using (new Window("StarDetector features", dst))
40 | {
41 | Cv2.WaitKey();
42 | }
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/Stitching.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | class Stitching : ConsoleTestBase
10 | {
11 | public override void RunTest()
12 | {
13 | Mat[] images = SelectStitchingImages(200, 200, 10);
14 |
15 | using var stitcher = Stitcher.Create(Stitcher.Mode.Scans);
16 | using var pano = new Mat();
17 |
18 | Console.Write("Stitching start...");
19 | // TODO: does not work??
20 | var status = stitcher.Stitch(images, pano);
21 | Console.WriteLine(" finish (status:{0})", status);
22 |
23 | Window.ShowImages(pano);
24 |
25 | foreach (var image in images)
26 | {
27 | image.Dispose();
28 | }
29 | }
30 |
31 | private static Mat[] SelectStitchingImages(int width, int height, int count)
32 | {
33 | using var source = new Mat(@"Data\Image\lenna.png", ImreadModes.Color);
34 | using var result = source.Clone();
35 |
36 | var rand = new Random();
37 | var mats = new List();
38 | for (int i = 0; i < count; i++)
39 | {
40 | int x1 = rand.Next(source.Cols - width);
41 | int y1 = rand.Next(source.Rows - height);
42 | int x2 = x1 + width;
43 | int y2 = y1 + height;
44 |
45 | result.Line(new Point(x1, y1), new Point(x1, y2), new Scalar(0, 0, 255));
46 | result.Line(new Point(x1, y2), new Point(x2, y2), new Scalar(0, 0, 255));
47 | result.Line(new Point(x2, y2), new Point(x2, y1), new Scalar(0, 0, 255));
48 | result.Line(new Point(x2, y1), new Point(x1, y1), new Scalar(0, 0, 255));
49 |
50 | using var m = source[new Rect(x1, y1, width, height)];
51 | mats.Add(m.Clone());
52 | }
53 |
54 | using (new Window("stitching", result))
55 | {
56 | Cv2.WaitKey();
57 | }
58 |
59 | return mats.ToArray();
60 | }
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/Subdiv2DSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Linq;
3 | using OpenCvSharp;
4 | using SampleBase;
5 | using SampleBase.Console;
6 |
7 | namespace SamplesLegacy
8 | {
9 | ///
10 | /// cv::Subdiv2D test
11 | ///
12 | class Subdiv2DSample : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | const int Size = 600;
17 |
18 | // Creates random point list
19 | var rand = new Random();
20 | var points = Enumerable.Range(0, 100).Select(_ =>
21 | new Point2f(rand.Next(0, Size), rand.Next(0, Size))).ToArray();
22 |
23 | using var imgExpr = Mat.Zeros(Size, Size, MatType.CV_8UC3);
24 | using var img = imgExpr.ToMat();
25 | foreach (var p in points)
26 | {
27 | img.Circle((Point)p, 4, Scalar.Red, -1);
28 | }
29 |
30 | // Initializes Subdiv2D
31 | using var subdiv = new Subdiv2D();
32 | subdiv.InitDelaunay(new Rect(0, 0, Size, Size));
33 | subdiv.Insert(points);
34 |
35 | // Draws voronoi diagram
36 | subdiv.GetVoronoiFacetList(null, out var facetList, out var facetCenters);
37 |
38 | using var vonoroi = img.Clone();
39 | foreach (var list in facetList)
40 | {
41 | var before = list.Last();
42 | foreach (var p in list)
43 | {
44 | vonoroi.Line((Point)before, (Point)p, new Scalar(64, 255, 128), 1);
45 | before = p;
46 | }
47 | }
48 |
49 | // Draws delaunay diagram
50 | Vec4f[] edgeList = subdiv.GetEdgeList();
51 | using var delaunay = img.Clone();
52 | foreach (var edge in edgeList)
53 | {
54 | var p1 = new Point(edge.Item0, edge.Item1);
55 | var p2 = new Point(edge.Item2, edge.Item3);
56 | delaunay.Line(p1, p2, new Scalar(64, 255, 128), 1);
57 | }
58 |
59 | Cv2.ImShow("voronoi", vonoroi);
60 | Cv2.ImShow("delaunay", delaunay);
61 | Cv2.WaitKey();
62 | Cv2.DestroyAllWindows();
63 | }
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/SuperResolutionSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | ///
10 | ///
11 | class SuperResolutionSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | var capture = new VideoCapture();
16 | capture.Set(VideoCaptureProperties.FrameWidth, 640);
17 | capture.Set(VideoCaptureProperties.FrameHeight, 480);
18 | capture.Open(-1);
19 | if (!capture.IsOpened())
20 | throw new Exception("capture initialization failed");
21 |
22 | var fs = FrameSource.CreateFrameSource_Camera(-1);
23 | var sr = SuperResolution.CreateBTVL1();
24 | sr.SetInput(fs);
25 |
26 | using var normalWindow = new Window("normal");
27 | using var srWindow = new Window("super resolution");
28 | var normalFrame = new Mat();
29 | var srFrame = new Mat();
30 | while (true)
31 | {
32 | capture.Read(normalFrame);
33 | sr.NextFrame(srFrame);
34 | if (normalFrame.Empty() || srFrame.Empty())
35 | break;
36 | normalWindow.ShowImage(normalFrame);
37 | srWindow.ShowImage(srFrame);
38 | Cv2.WaitKey(100);
39 | }
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/VideoCaptureSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | ///
10 | ///
11 | class VideoCaptureSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | // Opens MP4 file (ffmpeg is probably needed)
16 | using var capture = new VideoCapture(MoviePath.Bach);
17 | if (!capture.IsOpened())
18 | return;
19 |
20 | int sleepTime = (int)Math.Round(1000 / capture.Fps);
21 |
22 | using var window = new Window("capture");
23 | // Frame image buffer
24 | var image = new Mat();
25 |
26 | // When the movie playback reaches end, Mat.data becomes NULL.
27 | while (true)
28 | {
29 | capture.Read(image); // same as cvQueryFrame
30 | if(image.Empty())
31 | break;
32 |
33 | window.ShowImage(image);
34 | Cv2.WaitKey(sleepTime);
35 | }
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/VideoWriterSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | ///
10 | ///
11 | class VideoWriterSample : ConsoleTestBase
12 | {
13 | public override void RunTest()
14 | {
15 | const string OutVideoFile = "out.avi";
16 |
17 | // Opens MP4 file (ffmpeg is probably needed)
18 | using var capture = new VideoCapture(MoviePath.Bach);
19 |
20 | // Read movie frames and write them to VideoWriter
21 | var dsize = new Size(640, 480);
22 | using (var writer = new VideoWriter(OutVideoFile, -1, capture.Fps, dsize))
23 | {
24 | Console.WriteLine("Converting each movie frames...");
25 | using var frame = new Mat();
26 | while(true)
27 | {
28 | // Read image
29 | capture.Read(frame);
30 | if(frame.Empty())
31 | break;
32 |
33 | Console.CursorLeft = 0;
34 | Console.Write("{0} / {1}", capture.PosFrames, capture.FrameCount);
35 |
36 | // grayscale -> canny -> resize
37 | using var gray = new Mat();
38 | using var canny = new Mat();
39 | using var dst = new Mat();
40 | Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
41 | Cv2.Canny(gray, canny, 100, 180);
42 | Cv2.Resize(canny, dst, dsize, 0, 0, InterpolationFlags.Linear);
43 | // Write mat to VideoWriter
44 | writer.Write(dst);
45 | }
46 | Console.WriteLine();
47 | }
48 |
49 | // Watch result movie
50 | using (var capture2 = new VideoCapture(OutVideoFile))
51 | using (var window = new Window("result"))
52 | {
53 | int sleepTime = (int)(1000 / capture.Fps);
54 |
55 | using var frame = new Mat();
56 | while (true)
57 | {
58 | capture2.Read(frame);
59 | if(frame.Empty())
60 | break;
61 |
62 | window.ShowImage(frame);
63 | Cv2.WaitKey(sleepTime);
64 | }
65 | }
66 | }
67 |
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/SamplesLegacy/Samples/WatershedSample.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using OpenCvSharp;
3 | using SampleBase;
4 | using SampleBase.Console;
5 |
6 | namespace SamplesLegacy
7 | {
8 | ///
9 | /// Watershed algorithm sample
10 | ///
11 | /// http://opencv.jp/sample/segmentation_and_connection.html#watershed
12 | public class WatershedSample : ConsoleTestBase
13 | {
14 | public override void RunTest()
15 | {
16 | using var srcImg = Cv2.ImRead(ImagePath.Lenna, ImreadModes.AnyDepth | ImreadModes.AnyColor);
17 | using var markers = new Mat(srcImg.Size(), MatType.CV_32SC1, Scalar.All(0));
18 | using var dspImg = srcImg.Clone();
19 | using var window = new Window("image", srcImg);
20 | // Mouse event
21 | int seedNum = 0;
22 | window.SetMouseCallback((MouseEventTypes ev, int x, int y, MouseEventFlags flags, IntPtr userdata) =>
23 | {
24 | if (ev == MouseEventTypes.LButtonDown)
25 | {
26 | seedNum++;
27 | var pt = new Point(x, y);
28 | markers.Circle(pt, 10, Scalar.All(seedNum), Cv2.FILLED, LineTypes.Link8);
29 | dspImg.Circle(pt, 10, Scalar.White, 3, LineTypes.Link8);
30 | window.Image = dspImg;
31 | }
32 | });
33 | Window.WaitKey();
34 |
35 | Cv2.Watershed(srcImg, markers);
36 |
37 | // draws watershed
38 | using (var dstImg = srcImg.Clone())
39 | using (new Window("watershed transform", dstImg))
40 | {
41 | for (int y = 0; y < markers.Height; y++)
42 | {
43 | for (int x = 0; x < markers.Width; x++)
44 | {
45 | int idx = markers.Get(y, x);
46 | if (idx == -1)
47 | {
48 | dstImg.Rectangle(new Rect(x, y, 2, 2), Scalar.Red, -1);
49 | }
50 | }
51 | }
52 | Window.WaitKey();
53 | }
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/SamplesLegacy/packages.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/SamplesVB/Program.vb:
--------------------------------------------------------------------------------
1 | Imports System
2 | Imports System.Collections.Generic
3 | Imports System.Diagnostics
4 | Imports System.Linq
5 | Imports System.Runtime.InteropServices
6 | Imports System.Text
7 |
8 | Imports OpenCvSharp
9 |
10 | Friend NotInheritable Class Program
11 |
12 | Shared Sub Main()
13 | FASTSample.Start()
14 | FlannSample.Start()
15 | HOGSample.Start()
16 | HoughLinesSample.Start()
17 | StarDetectorSample.Start()
18 | End Sub
19 |
20 | End Class
21 |
--------------------------------------------------------------------------------
/SamplesVB/Samples/FASTSample.vb:
--------------------------------------------------------------------------------
1 | Imports System
2 | Imports System.Collections.Generic
3 | Imports System.Linq
4 | Imports System.Text
5 | Imports OpenCvSharp
6 | Imports SampleBase
7 |
8 | '''
9 | ''' cv::FAST
10 | '''
11 | Friend Module FASTSample
12 | Public Sub Start()
13 | Using imgSrc As New Mat(ImagePath.Lenna, ImreadModes.Color),
14 | imgGray As New Mat(imgSrc.Size, MatType.CV_8UC1),
15 | imgDst As Mat = imgSrc.Clone()
16 | Cv2.CvtColor(imgSrc, imgGray, ColorConversionCodes.BGR2GRAY, 0)
17 |
18 | Dim keypoints() = Cv2.FAST(imgGray, 50, True)
19 |
20 | For Each kp As KeyPoint In keypoints
21 | imgDst.Circle(kp.Pt, 3, Scalar.Red, -1, LineTypes.AntiAlias, 0)
22 | Next kp
23 |
24 | Cv2.ImShow("FAST", imgDst)
25 | Cv2.WaitKey(0)
26 | Cv2.DestroyAllWindows()
27 | End Using
28 | End Sub
29 | End Module
30 | ' End Namespace
31 |
--------------------------------------------------------------------------------
/SamplesVB/Samples/FlannSample.vb:
--------------------------------------------------------------------------------
1 | Imports System
2 | Imports System.Collections.Generic
3 | Imports System.Linq
4 | Imports System.Text
5 | Imports OpenCvSharp
6 |
7 | ' Namespace OpenCvSharpSamplesVB
8 | '''
9 | ''' cv::flann
10 | '''
11 | Friend Module FlannSample
12 | Public Sub Start()
13 | Console.WriteLine(Environment.NewLine & String.Format(("===== FlannTest =====")))
14 |
15 | ' creates data set
16 | Using features As New Mat(10000, 2, MatType.CV_32FC1)
17 | Dim rand As New Random()
18 | For i As Integer = 0 To features.Rows - 1
19 | features.Set(Of Single)(i, 0, rand.Next(10000))
20 | features.Set(Of Single)(i, 1, rand.Next(10000))
21 | Next i
22 |
23 | ' query
24 | Dim queryPoint As New Point2f(7777, 7777)
25 | Dim queries As New Mat(1, 2, MatType.CV_32FC1)
26 | queries.Set(Of Single)(0, 0, queryPoint.X)
27 | queries.Set(Of Single)(0, 1, queryPoint.Y)
28 | Console.WriteLine(Environment.NewLine & String.Format("query:({0}, {1})", queryPoint.X, queryPoint.Y))
29 | Console.WriteLine(Environment.NewLine & String.Format("-----"))
30 |
31 | ' knnSearch
32 | Using nnIndex As New Flann.Index(features, New Flann.KDTreeIndexParams(4))
33 | Dim knn As Integer = 1
34 | Dim indices() As Integer = Nothing
35 | Dim dists() As Single = Nothing
36 | nnIndex.KnnSearch(queries, indices, dists, knn, New Flann.SearchParams(32))
37 |
38 | For i As Integer = 0 To knn - 1
39 | Dim index As Integer = indices(i)
40 | Dim dist As Single = dists(i)
41 | Dim pt As New Point2f(features.Get(Of Single)(index, 0), features.Get(Of Single)(index, 1))
42 | Console.WriteLine(Environment.NewLine & String.Format("No.{0}" & vbTab, i))
43 | Console.WriteLine(Environment.NewLine & String.Format("index:{0}", index))
44 | Console.WriteLine(Environment.NewLine & String.Format(" distance:{0}", dist))
45 | Console.WriteLine(Environment.NewLine & String.Format(" data:({0}, {1})", pt.X, pt.Y))
46 | Console.WriteLine(Environment.NewLine & " ")
47 | Next i
48 | 'Console.Read()
49 | End Using
50 | End Using
51 | End Sub
52 | End Module
53 | ' End Namespace
54 |
--------------------------------------------------------------------------------
/SamplesVB/Samples/HOGSample.vb:
--------------------------------------------------------------------------------
1 | Imports System
2 | Imports System.Collections.Generic
3 | Imports System.Diagnostics
4 | Imports System.Linq
5 | Imports System.Text
6 | Imports OpenCvSharp
7 | Imports SampleBase
8 | 'using GPU = OpenCvSharp.Gpu;
9 |
10 | ' Namespace OpenCvSharpSamplesVB
11 | '''
12 | ''' samples/c/peopledetect.c
13 | '''
14 | Friend Module HOGSample
15 | Public Sub Start()
16 | Dim img As Mat = Cv2.ImRead(ImagePath.Asahiyama, ImreadModes.Color)
17 |
18 | Dim hog As New HOGDescriptor()
19 | hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector())
20 |
21 | Dim b As Boolean = hog.CheckDetectorSize()
22 | b.ToString()
23 |
24 | Dim watch As Stopwatch = Stopwatch.StartNew()
25 |
26 | ' run the detector with default parameters. to get a higher hit-rate
27 | ' (and more false alarms, respectively), decrease the hitThreshold and
28 | ' groupThreshold (set groupThreshold to 0 to turn off the grouping completely).
29 | Dim found() As Rect = hog.DetectMultiScale(img, 0, New Size(8, 8), New Size(24, 16), 1.05, 2)
30 |
31 | watch.Stop()
32 | Console.WriteLine(Environment.NewLine & String.Format("Detection time = {0}ms", watch.ElapsedMilliseconds))
33 | Console.WriteLine(Environment.NewLine & String.Format("{0} region(s) found", found.Length))
34 |
35 | For Each rect As Rect In found
36 | ' the HOG detector returns slightly larger rectangles than the real objects.
37 | ' so we slightly shrink the rectangles to get a nicer output.
38 | Dim r As Rect = New Rect With {
39 | .X = rect.X + CInt(Math.Truncate(Math.Round(rect.Width * 0.1))),
40 | .Y = rect.Y + CInt(Math.Truncate(Math.Round(rect.Height * 0.1))),
41 | .Width = CInt(Math.Truncate(Math.Round(rect.Width * 0.8))),
42 | .Height = CInt(Math.Truncate(Math.Round(rect.Height * 0.8)))
43 | }
44 | img.Rectangle(r.TopLeft, r.BottomRight, Scalar.Red, 3, LineTypes.Link8, 0)
45 | Next rect
46 |
47 | Using window As New Window("people detector", img, WindowFlags.Normal)
48 | window.SetProperty(WindowPropertyFlags.Fullscreen, 1)
49 | Cv2.WaitKey(0)
50 | End Using
51 | End Sub
52 | End Module
53 | ' End Namespace
54 |
--------------------------------------------------------------------------------
/SamplesVB/Samples/HoughLinesSample.vb:
--------------------------------------------------------------------------------
1 | Imports OpenCvSharp
2 |
3 | ' Namespace OpenCvSharpSamplesVB
4 | Imports SampleBase
5 |
6 | '''
7 | ''' ハフ変換による直線検出
8 | '''
9 | ''' http://opencv.jp/sample/special_transforms.html#hough_line
10 | Friend Module HoughLinesSample
11 | Public Sub Start()
12 |
13 | ' (1)画像の読み込み
14 | Using imgGray As New Mat(ImagePath.Goryokaku, ImreadModes.Grayscale),
15 | imgStd As New Mat(ImagePath.Goryokaku, ImreadModes.Color),
16 | imgProb As Mat = imgStd.Clone()
17 | ' Preprocess
18 | Cv2.Canny(imgGray, imgGray, 50, 200, 3, False)
19 |
20 | ' (3)標準的ハフ変換による線の検出と検出した線の描画
21 | Dim segStd() As LineSegmentPolar = Cv2.HoughLines(imgGray, 1, Math.PI / 180, 50, 0, 0)
22 | Dim limit As Integer = Math.Min(segStd.Length, 10)
23 | For i As Integer = 0 To limit - 1
24 | Dim rho As Single = segStd(i).Rho
25 | Dim theta As Single = segStd(i).Theta
26 |
27 | Dim a As Double = Math.Cos(theta)
28 | Dim b As Double = Math.Sin(theta)
29 | Dim x0 As Double = a * rho
30 | Dim y0 As Double = b * rho
31 | Dim pt1 As Point = New Point With {.X = Math.Round(x0 + 1000 * (-b)), .Y = Math.Round(y0 + 1000 * (a))}
32 | Dim pt2 As Point = New Point With {.X = Math.Round(x0 - 1000 * (-b)), .Y = Math.Round(y0 - 1000 * (a))}
33 | imgStd.Line(pt1, pt2, New Scalar(0, 0, 255), 3, LineTypes.AntiAlias, 0)
34 | Next i
35 |
36 | ' (4)確率的ハフ変換による線分の検出と検出した線分の描画
37 | Dim segProb() As LineSegmentPoint = Cv2.HoughLinesP(imgGray, 1, Math.PI / 180, 50, 50, 10)
38 | For Each s As LineSegmentPoint In segProb
39 | imgProb.Line(s.P1, s.P2, New Scalar(0, 0, 255), 3, LineTypes.AntiAlias, 0)
40 | Next s
41 |
42 |
43 | ' (5)検出結果表示用のウィンドウを確保し表示する
44 | Using tempCvWindow = New Window("Hough_line_standard", imgStd),
45 | TempCvWindowProb = New Window("Hough_line_probabilistic", imgProb)
46 | Window.WaitKey(0)
47 | End Using
48 | End Using
49 | End Sub
50 |
51 | End Module
52 | ' End Namespace
53 |
--------------------------------------------------------------------------------
/SamplesVB/Samples/StarDetectorSample.vb:
--------------------------------------------------------------------------------
1 | Imports System
2 | Imports OpenCvSharp
3 | Imports OpenCvSharp.XFeatures2D
4 |
5 | ' Namespace OpenCvSharpSamplesVB
6 | Imports SampleBase
7 |
8 | '''
9 | ''' Retrieves keypoints using the StarDetector algorithm.
10 | '''
11 | Friend Module StarDetectorSample
12 | Public Sub Start()
13 | Using src As New Mat(ImagePath.Lenna, ImreadModes.Grayscale),
14 | dst As New Mat()
15 | Cv2.CvtColor(src, dst, ColorConversionCodes.GRAY2BGR)
16 |
17 | CppStyleStarDetector(src, dst) ' C++-style
18 |
19 | Using w1 As New Window("img", src),
20 | w2 As New Window("features", dst)
21 | Cv2.WaitKey()
22 | End Using
23 | End Using
24 | End Sub
25 |
26 | '''
27 | ''' Extracts keypoints by C++-style code (cv::StarDetector)
28 | '''
29 | '''
30 | '''
31 | Private Sub CppStyleStarDetector(src As Mat, dst As Mat)
32 | Dim detector As StarDetector = StarDetector.Create()
33 | Dim keypoints() As KeyPoint = detector.Detect(src, Nothing)
34 |
35 | If keypoints IsNot Nothing Then
36 | For Each kpt As KeyPoint In keypoints
37 | Dim r As Single = kpt.Size / 2
38 | Dim a = kpt.Pt
39 |
40 | Cv2.Circle(dst, kpt.Pt, Math.Truncate(r), New Scalar(0, 255, 0), 1, LineTypes.Link8, 0)
41 | Cv2.Line(dst, New Point(kpt.Pt.X + r, kpt.Pt.Y + r), New Point(kpt.Pt.X - r, kpt.Pt.Y - r), New Scalar(0, 255, 0), 1, LineTypes.Link8, 0)
42 | Cv2.Line(dst, New Point(kpt.Pt.X - r, kpt.Pt.Y + r), New Point(kpt.Pt.X + r, kpt.Pt.Y - r), New Scalar(0, 255, 0), 1, LineTypes.Link8, 0)
43 | Next kpt
44 | End If
45 |
46 | End Sub
47 | End Module
48 | ' End Namespace
49 |
--------------------------------------------------------------------------------
/SamplesVB/SamplesVB.vbproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | SamplesVB2
6 | net8.0
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/VideoCaptureForm/App.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/VideoCaptureForm/Program.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Windows.Forms;
3 |
4 | namespace VideoCaptureForm
5 | {
6 | static class Program
7 | {
8 | ///
9 | /// アプリケーションのメイン エントリ ポイントです。
10 | ///
11 | [STAThread]
12 | static void Main()
13 | {
14 | Application.EnableVisualStyles();
15 | Application.SetCompatibleTextRenderingDefault(false);
16 | Application.Run(new VideoCaptureForm());
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/VideoCaptureForm/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.InteropServices;
3 |
4 | // アセンブリに関する一般情報は以下の属性セットをとおして制御されます。
5 | // 制御されます。アセンブリに関連付けられている情報を変更するには、
6 | // これらの属性値を変更します。
7 | [assembly: AssemblyTitle("VideoCaptureForm")]
8 | [assembly: AssemblyDescription("")]
9 | [assembly: AssemblyConfiguration("")]
10 | [assembly: AssemblyCompany("")]
11 | [assembly: AssemblyProduct("VideoCaptureForm")]
12 | [assembly: AssemblyCopyright("Copyright © 2020")]
13 | [assembly: AssemblyTrademark("")]
14 | [assembly: AssemblyCulture("")]
15 |
16 | // ComVisible を false に設定すると、このアセンブリ内の型は COM コンポーネントから
17 | // 参照できなくなります。COM からこのアセンブリ内の型にアクセスする必要がある場合は、
18 | // その型の ComVisible 属性を true に設定してください。
19 | [assembly: ComVisible(false)]
20 |
21 | // このプロジェクトが COM に公開される場合、次の GUID が typelib の ID になります
22 | [assembly: Guid("9dcc89f3-9d9f-4813-a4a3-36f8457e7f85")]
23 |
24 | // アセンブリのバージョン情報は、以下の 4 つの値で構成されています:
25 | //
26 | // メジャー バージョン
27 | // マイナー バージョン
28 | // ビルド番号
29 | // リビジョン
30 | //
31 | // すべての値を指定するか、次を使用してビルド番号とリビジョン番号を既定に設定できます
32 | // 既定値にすることができます:
33 | // [assembly: AssemblyVersion("1.0.*")]
34 | [assembly: AssemblyVersion("1.0.0.0")]
35 | [assembly: AssemblyFileVersion("1.0.0.0")]
36 |
--------------------------------------------------------------------------------
/VideoCaptureForm/Properties/Resources.Designer.cs:
--------------------------------------------------------------------------------
1 | //------------------------------------------------------------------------------
2 | //
3 | // このコードはツールによって生成されました。
4 | // ランタイム バージョン:4.0.30319.42000
5 | //
6 | // このファイルへの変更は、以下の状況下で不正な動作の原因になったり、
7 | // コードが再生成されるときに損失したりします。
8 | //
9 | //------------------------------------------------------------------------------
10 |
11 | namespace VideoCaptureForm.Properties {
12 | using System;
13 |
14 |
15 | ///
16 | /// ローカライズされた文字列などを検索するための、厳密に型指定されたリソース クラスです。
17 | ///
18 | // このクラスは StronglyTypedResourceBuilder クラスが ResGen
19 | // または Visual Studio のようなツールを使用して自動生成されました。
20 | // メンバーを追加または削除するには、.ResX ファイルを編集して、/str オプションと共に
21 | // ResGen を実行し直すか、または VS プロジェクトをビルドし直します。
22 | [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")]
23 | [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
24 | [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
25 | internal class Resources {
26 |
27 | private static global::System.Resources.ResourceManager resourceMan;
28 |
29 | private static global::System.Globalization.CultureInfo resourceCulture;
30 |
31 | [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
32 | internal Resources() {
33 | }
34 |
35 | ///
36 | /// このクラスで使用されているキャッシュされた ResourceManager インスタンスを返します。
37 | ///
38 | [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
39 | internal static global::System.Resources.ResourceManager ResourceManager {
40 | get {
41 | if (object.ReferenceEquals(resourceMan, null)) {
42 | global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("VideoCaptureForm.Properties.Resources", typeof(Resources).Assembly);
43 | resourceMan = temp;
44 | }
45 | return resourceMan;
46 | }
47 | }
48 |
49 | ///
50 | /// すべてについて、現在のスレッドの CurrentUICulture プロパティをオーバーライドします
51 | /// 現在のスレッドの CurrentUICulture プロパティをオーバーライドします。
52 | ///
53 | [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
54 | internal static global::System.Globalization.CultureInfo Culture {
55 | get {
56 | return resourceCulture;
57 | }
58 | set {
59 | resourceCulture = value;
60 | }
61 | }
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/VideoCaptureForm/Properties/Settings.Designer.cs:
--------------------------------------------------------------------------------
1 | //------------------------------------------------------------------------------
2 | //
3 | // このコードはツールによって生成されました。
4 | // ランタイム バージョン:4.0.30319.42000
5 | //
6 | // このファイルへの変更は、以下の状況下で不正な動作の原因になったり、
7 | // コードが再生成されるときに損失したりします。
8 | //
9 | //------------------------------------------------------------------------------
10 |
11 | namespace VideoCaptureForm.Properties {
12 |
13 |
14 | [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
15 | [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "17.2.0.0")]
16 | internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
17 |
18 | private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
19 |
20 | public static Settings Default {
21 | get {
22 | return defaultInstance;
23 | }
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/VideoCaptureForm/Properties/Settings.settings:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/VideoCaptureForm/VideoCaptureForm.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.ComponentModel;
3 | using System.Drawing;
4 | using System.Threading;
5 | using System.Windows.Forms;
6 | using OpenCvSharp;
7 | using OpenCvSharp.Extensions;
8 |
9 | namespace VideoCaptureForm
10 | {
11 | public partial class VideoCaptureForm : Form
12 | {
13 | private readonly VideoCapture capture;
14 | private readonly CascadeClassifier cascadeClassifier;
15 |
16 | public VideoCaptureForm()
17 | {
18 | InitializeComponent();
19 |
20 | capture = new VideoCapture();
21 | cascadeClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml");
22 | }
23 |
24 | private void VideoCaptureForm_Load(object sender, EventArgs e)
25 | {
26 | capture.Open(0, VideoCaptureAPIs.ANY);
27 | if (!capture.IsOpened())
28 | {
29 | Close();
30 | return;
31 | }
32 |
33 | ClientSize = new System.Drawing.Size(capture.FrameWidth, capture.FrameHeight);
34 |
35 | backgroundWorker1.RunWorkerAsync();
36 | }
37 |
38 | private void VideoCaptureForm_FormClosing(object sender, FormClosingEventArgs e)
39 | {
40 | backgroundWorker1.CancelAsync();
41 | capture.Dispose();
42 | cascadeClassifier.Dispose();
43 | }
44 |
45 | private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
46 | {
47 | var bgWorker = (BackgroundWorker) sender;
48 |
49 | while (!bgWorker.CancellationPending)
50 | {
51 | using (var frameMat = capture.RetrieveMat())
52 | {
53 | var rects = cascadeClassifier.DetectMultiScale(frameMat, 1.1, 5, HaarDetectionTypes.ScaleImage, new OpenCvSharp.Size(30, 30));
54 | if (rects.Length > 0)
55 | {
56 | Cv2.Rectangle(frameMat, rects[0], Scalar.Red);
57 | }
58 |
59 | var frameBitmap = BitmapConverter.ToBitmap(frameMat);
60 | bgWorker.ReportProgress(0, frameBitmap);
61 | }
62 | Thread.Sleep(100);
63 | }
64 | }
65 |
66 | private void backgroundWorker1_ProgressChanged(object sender, ProgressChangedEventArgs e)
67 | {
68 | var frameBitmap = (Bitmap)e.UserState;
69 | pictureBox1.Image?.Dispose();
70 | pictureBox1.Image = frameBitmap;
71 | }
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/VideoCaptureForm/packages.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/VideoCaptureWPF/App.xaml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/VideoCaptureWPF/App.xaml.cs:
--------------------------------------------------------------------------------
1 | using System.Windows;
2 |
3 | namespace VideoCaptureWPF
4 | {
5 | ///
6 | /// Interaction logic for App.xaml
7 | ///
8 | public partial class App : Application
9 | {
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/VideoCaptureWPF/MainWindow.xaml:
--------------------------------------------------------------------------------
1 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/VideoCaptureWPF/MainWindow.xaml.cs:
--------------------------------------------------------------------------------
1 | using System.ComponentModel;
2 | using System.Threading;
3 |
4 | using OpenCvSharp;
5 | using OpenCvSharp.WpfExtensions;
6 |
7 | namespace VideoCaptureWPF
8 | {
9 | ///
10 | /// Interaction logic for MainWindow.xaml
11 | ///
12 | public partial class MainWindow : System.Windows.Window
13 | {
14 | private readonly VideoCapture capture;
15 | private readonly CascadeClassifier cascadeClassifier;
16 |
17 | private readonly BackgroundWorker bkgWorker;
18 |
19 | public MainWindow()
20 | {
21 | InitializeComponent();
22 |
23 | capture = new VideoCapture();
24 | cascadeClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml");
25 |
26 | bkgWorker = new BackgroundWorker { WorkerSupportsCancellation = true };
27 | bkgWorker.DoWork += Worker_DoWork;
28 |
29 | Loaded += MainWindow_Loaded;
30 | Closing += MainWindow_Closing;
31 | }
32 |
33 | private void MainWindow_Loaded(object sender, System.Windows.RoutedEventArgs e)
34 | {
35 | capture.Open(0, VideoCaptureAPIs.ANY);
36 | if (!capture.IsOpened())
37 | {
38 | Close();
39 | return;
40 | }
41 |
42 | bkgWorker.RunWorkerAsync();
43 | }
44 |
45 | private void MainWindow_Closing(object sender, CancelEventArgs e)
46 | {
47 | bkgWorker.CancelAsync();
48 |
49 | capture.Dispose();
50 | cascadeClassifier.Dispose();
51 | }
52 |
53 | private void Worker_DoWork(object sender, DoWorkEventArgs e)
54 | {
55 | var worker = (BackgroundWorker)sender;
56 | while (!worker.CancellationPending)
57 | {
58 | using (var frameMat = capture.RetrieveMat())
59 | {
60 | var rects = cascadeClassifier.DetectMultiScale(frameMat, 1.1, 5, HaarDetectionTypes.ScaleImage, new OpenCvSharp.Size(30, 30));
61 |
62 | foreach (var rect in rects)
63 | {
64 | Cv2.Rectangle(frameMat, rect, Scalar.Red);
65 | }
66 |
67 | // Must create and use WriteableBitmap in the same thread(UI Thread).
68 | Dispatcher.Invoke(() =>
69 | {
70 | FrameImage.Source = frameMat.ToWriteableBitmap();
71 | });
72 | }
73 |
74 | Thread.Sleep(30);
75 | }
76 | }
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/VideoCaptureWPF/VideoCaptureWPF.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | WinExe
5 | true
6 | net8.0-windows
7 | VideoCaptureWPF
8 | VideoCaptureWPF
9 | 4
10 |
11 |
12 |
13 |
14 | PreserveNewest
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------