├── .gitattributes ├── .gitignore ├── PaddleInferenceSharp.sln ├── PaddleInferenceSharp ├── NativeMethods.cs ├── PaddleInfer.cs └── PaddleInferenceSharp.csproj ├── README.md ├── doc └── image │ ├── paddleinferencesharp.drawio.png │ └── paddle名称.drawio.png ├── paddle_infrer_api ├── dll │ ├── PaddleInferAPI.dll │ ├── PaddleInferAPI.exp │ └── PaddleInferAPI.lib ├── paddle_infrer_api.vcxproj ├── paddle_infrer_api.vcxproj.filters └── src │ └── paddle_infer_cpp_api.cpp └── text_paddle_infer ├── PP-Yoloe.cs ├── Program.cs ├── ResNet50.cs └── text_paddle_infer.csproj /.gitattributes: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Set default behavior to automatically normalize line endings. 3 | ############################################################################### 4 | * text=auto 5 | 6 | ############################################################################### 7 | # Set default behavior for command prompt diff. 8 | # 9 | # This is need for earlier builds of msysgit that does not have it on by 10 | # default for csharp files. 11 | # Note: This is only used by command line 12 | ############################################################################### 13 | #*.cs diff=csharp 14 | 15 | ############################################################################### 16 | # Set the merge driver for project and solution files 17 | # 18 | # Merging from the command prompt will add diff markers to the files if there 19 | # are conflicts (Merging from VS is not affected by the settings below, in VS 20 | # the diff markers are never inserted). Diff markers may cause the following 21 | # file extensions to fail to load in VS. An alternative would be to treat 22 | # these files as binary and thus will always conflict and require user 23 | # intervention with every merge. To do so, just uncomment the entries below 24 | ############################################################################### 25 | #*.sln merge=binary 26 | #*.csproj merge=binary 27 | #*.vbproj merge=binary 28 | #*.vcxproj merge=binary 29 | #*.vcproj merge=binary 30 | #*.dbproj merge=binary 31 | #*.fsproj merge=binary 32 | #*.lsproj merge=binary 33 | #*.wixproj merge=binary 34 | #*.modelproj merge=binary 35 | #*.sqlproj merge=binary 36 | #*.wwaproj merge=binary 37 | 38 | ############################################################################### 39 | # behavior for image files 40 | # 41 | # image files are treated as binary by default. 42 | ############################################################################### 43 | #*.jpg binary 44 | #*.png binary 45 | #*.gif binary 46 | 47 | ############################################################################### 48 | # diff behavior for common document formats 49 | # 50 | # Convert binary document formats to text before diffing them. This feature 51 | # is only available from the command line. Turn it on by uncommenting the 52 | # entries below. 53 | ############################################################################### 54 | #*.doc diff=astextplain 55 | #*.DOC diff=astextplain 56 | #*.docx diff=astextplain 57 | #*.DOCX diff=astextplain 58 | #*.dot diff=astextplain 59 | #*.DOT diff=astextplain 60 | #*.pdf diff=astextplain 61 | #*.PDF diff=astextplain 62 | #*.rtf diff=astextplain 63 | #*.RTF diff=astextplain 64 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # User-specific files 7 | *.rsuser 8 | *.suo 9 | *.user 10 | *.userosscache 11 | *.sln.docstates 12 | 13 | # User-specific files (MonoDevelop/Xamarin Studio) 14 | *.userprefs 15 | 16 | # Mono auto generated files 17 | mono_crash.* 18 | 19 | # Build results 20 | [Dd]ebug/ 21 | [Dd]ebugPublic/ 22 | [Rr]elease/ 23 | [Rr]eleases/ 24 | x64/ 25 | x86/ 26 | [Ww][Ii][Nn]32/ 27 | [Aa][Rr][Mm]/ 28 | [Aa][Rr][Mm]64/ 29 | bld/ 30 | [Bb]in/ 31 | [Oo]bj/ 32 | [Oo]ut/ 33 | [Ll]og/ 34 | [Ll]ogs/ 35 | 36 | # Visual Studio 2015/2017 cache/options directory 37 | .vs/ 38 | # Uncomment if you have tasks that create the project's static files in wwwroot 39 | #wwwroot/ 40 | 41 | # Visual Studio 2017 auto generated files 42 | Generated\ Files/ 43 | 44 | # MSTest test Results 45 | [Tt]est[Rr]esult*/ 46 | [Bb]uild[Ll]og.* 47 | 48 | # NUnit 49 | *.VisualState.xml 50 | TestResult.xml 51 | nunit-*.xml 52 | 53 | # Build Results of an ATL Project 54 | [Dd]ebugPS/ 55 | [Rr]eleasePS/ 56 | dlldata.c 57 | 58 | # Benchmark Results 59 | BenchmarkDotNet.Artifacts/ 60 | 61 | # .NET Core 62 | project.lock.json 63 | project.fragment.lock.json 64 | artifacts/ 65 | 66 | # ASP.NET Scaffolding 67 | ScaffoldingReadMe.txt 68 | 69 | # StyleCop 70 | StyleCopReport.xml 71 | 72 | # Files built by Visual Studio 73 | *_i.c 74 | *_p.c 75 | *_h.h 76 | *.ilk 77 | *.meta 78 | *.obj 79 | *.iobj 80 | *.pch 81 | *.pdb 82 | *.ipdb 83 | *.pgc 84 | *.pgd 85 | *.rsp 86 | *.sbr 87 | *.tlb 88 | *.tli 89 | *.tlh 90 | *.tmp 91 | *.tmp_proj 92 | *_wpftmp.csproj 93 | *.log 94 | *.vspscc 95 | *.vssscc 96 | .builds 97 | *.pidb 98 | *.svclog 99 | *.scc 100 | 101 | # Chutzpah Test files 102 | _Chutzpah* 103 | 104 | # Visual C++ cache files 105 | ipch/ 106 | *.aps 107 | *.ncb 108 | *.opendb 109 | *.opensdf 110 | *.sdf 111 | *.cachefile 112 | *.VC.db 113 | *.VC.VC.opendb 114 | 115 | # Visual Studio profiler 116 | *.psess 117 | *.vsp 118 | *.vspx 119 | *.sap 120 | 121 | # Visual Studio Trace Files 122 | *.e2e 123 | 124 | # TFS 2012 Local Workspace 125 | $tf/ 126 | 127 | # Guidance Automation Toolkit 128 | *.gpState 129 | 130 | # ReSharper is a .NET coding add-in 131 | _ReSharper*/ 132 | *.[Rr]e[Ss]harper 133 | *.DotSettings.user 134 | 135 | # TeamCity is a build add-in 136 | _TeamCity* 137 | 138 | # DotCover is a Code Coverage Tool 139 | *.dotCover 140 | 141 | # AxoCover is a Code Coverage Tool 142 | .axoCover/* 143 | !.axoCover/settings.json 144 | 145 | # Coverlet is a free, cross platform Code Coverage Tool 146 | coverage*.json 147 | coverage*.xml 148 | coverage*.info 149 | 150 | # Visual Studio code coverage results 151 | *.coverage 152 | *.coveragexml 153 | 154 | # NCrunch 155 | _NCrunch_* 156 | .*crunch*.local.xml 157 | nCrunchTemp_* 158 | 159 | # MightyMoose 160 | *.mm.* 161 | AutoTest.Net/ 162 | 163 | # Web workbench (sass) 164 | .sass-cache/ 165 | 166 | # Installshield output folder 167 | [Ee]xpress/ 168 | 169 | # DocProject is a documentation generator add-in 170 | DocProject/buildhelp/ 171 | DocProject/Help/*.HxT 172 | DocProject/Help/*.HxC 173 | DocProject/Help/*.hhc 174 | DocProject/Help/*.hhk 175 | DocProject/Help/*.hhp 176 | DocProject/Help/Html2 177 | DocProject/Help/html 178 | 179 | # Click-Once directory 180 | publish/ 181 | 182 | # Publish Web Output 183 | *.[Pp]ublish.xml 184 | *.azurePubxml 185 | # Note: Comment the next line if you want to checkin your web deploy settings, 186 | # but database connection strings (with potential passwords) will be unencrypted 187 | *.pubxml 188 | *.publishproj 189 | 190 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 191 | # checkin your Azure Web App publish settings, but sensitive information contained 192 | # in these scripts will be unencrypted 193 | PublishScripts/ 194 | 195 | # NuGet Packages 196 | *.nupkg 197 | # NuGet Symbol Packages 198 | *.snupkg 199 | # The packages folder can be ignored because of Package Restore 200 | **/[Pp]ackages/* 201 | # except build/, which is used as an MSBuild target. 202 | !**/[Pp]ackages/build/ 203 | # Uncomment if necessary however generally it will be regenerated when needed 204 | #!**/[Pp]ackages/repositories.config 205 | # NuGet v3's project.json files produces more ignorable files 206 | *.nuget.props 207 | *.nuget.targets 208 | 209 | # Microsoft Azure Build Output 210 | csx/ 211 | *.build.csdef 212 | 213 | # Microsoft Azure Emulator 214 | ecf/ 215 | rcf/ 216 | 217 | # Windows Store app package directories and files 218 | AppPackages/ 219 | BundleArtifacts/ 220 | Package.StoreAssociation.xml 221 | _pkginfo.txt 222 | *.appx 223 | *.appxbundle 224 | *.appxupload 225 | 226 | # Visual Studio cache files 227 | # files ending in .cache can be ignored 228 | *.[Cc]ache 229 | # but keep track of directories ending in .cache 230 | !?*.[Cc]ache/ 231 | 232 | # Others 233 | ClientBin/ 234 | ~$* 235 | *~ 236 | *.dbmdl 237 | *.dbproj.schemaview 238 | *.jfm 239 | *.pfx 240 | *.publishsettings 241 | orleans.codegen.cs 242 | 243 | # Including strong name files can present a security risk 244 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 245 | #*.snk 246 | 247 | # Since there are multiple workflows, uncomment next line to ignore bower_components 248 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 249 | #bower_components/ 250 | 251 | # RIA/Silverlight projects 252 | Generated_Code/ 253 | 254 | # Backup & report files from converting an old project file 255 | # to a newer Visual Studio version. Backup files are not needed, 256 | # because we have git ;-) 257 | _UpgradeReport_Files/ 258 | Backup*/ 259 | UpgradeLog*.XML 260 | UpgradeLog*.htm 261 | ServiceFabricBackup/ 262 | *.rptproj.bak 263 | 264 | # SQL Server files 265 | *.mdf 266 | *.ldf 267 | *.ndf 268 | 269 | # Business Intelligence projects 270 | *.rdl.data 271 | *.bim.layout 272 | *.bim_*.settings 273 | *.rptproj.rsuser 274 | *- [Bb]ackup.rdl 275 | *- [Bb]ackup ([0-9]).rdl 276 | *- [Bb]ackup ([0-9][0-9]).rdl 277 | 278 | # Microsoft Fakes 279 | FakesAssemblies/ 280 | 281 | # GhostDoc plugin setting file 282 | *.GhostDoc.xml 283 | 284 | # Node.js Tools for Visual Studio 285 | .ntvs_analysis.dat 286 | node_modules/ 287 | 288 | # Visual Studio 6 build log 289 | *.plg 290 | 291 | # Visual Studio 6 workspace options file 292 | *.opt 293 | 294 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 295 | *.vbw 296 | 297 | # Visual Studio LightSwitch build output 298 | **/*.HTMLClient/GeneratedArtifacts 299 | **/*.DesktopClient/GeneratedArtifacts 300 | **/*.DesktopClient/ModelManifest.xml 301 | **/*.Server/GeneratedArtifacts 302 | **/*.Server/ModelManifest.xml 303 | _Pvt_Extensions 304 | 305 | # Paket dependency manager 306 | .paket/paket.exe 307 | paket-files/ 308 | 309 | # FAKE - F# Make 310 | .fake/ 311 | 312 | # CodeRush personal settings 313 | .cr/personal 314 | 315 | # Python Tools for Visual Studio (PTVS) 316 | __pycache__/ 317 | *.pyc 318 | 319 | # Cake - Uncomment if you are using it 320 | # tools/** 321 | # !tools/packages.config 322 | 323 | # Tabs Studio 324 | *.tss 325 | 326 | # Telerik's JustMock configuration file 327 | *.jmconfig 328 | 329 | # BizTalk build output 330 | *.btp.cs 331 | *.btm.cs 332 | *.odx.cs 333 | *.xsd.cs 334 | 335 | # OpenCover UI analysis results 336 | OpenCover/ 337 | 338 | # Azure Stream Analytics local run output 339 | ASALocalRun/ 340 | 341 | # MSBuild Binary and Structured Log 342 | *.binlog 343 | 344 | # NVidia Nsight GPU debugger configuration file 345 | *.nvuser 346 | 347 | # MFractors (Xamarin productivity tool) working folder 348 | .mfractor/ 349 | 350 | # Local History for Visual Studio 351 | .localhistory/ 352 | 353 | # BeatPulse healthcheck temp database 354 | healthchecksdb 355 | 356 | # Backup folder for Package Reference Convert tool in Visual Studio 2017 357 | MigrationBackup/ 358 | 359 | # Ionide (cross platform F# VS Code tools) working folder 360 | .ionide/ 361 | 362 | # Fody - auto-generated XML schema 363 | FodyWeavers.xsd -------------------------------------------------------------------------------- /PaddleInferenceSharp.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 17 4 | VisualStudioVersion = 17.2.32602.215 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "paddle_infrer_api", "paddle_infrer_api\paddle_infrer_api.vcxproj", "{CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}" 7 | EndProject 8 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PaddleInferenceSharp", "PaddleInferenceSharp\PaddleInferenceSharp.csproj", "{9C435BE5-E669-4FF4-8375-5B559EF7DA24}" 9 | EndProject 10 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "text_paddle_infer", "text_paddle_infer\text_paddle_infer.csproj", "{4A429A5A-3043-44E9-8A7B-F95D42775B6F}" 11 | EndProject 12 | Global 13 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 14 | Debug|Any CPU = Debug|Any CPU 15 | Debug|x64 = Debug|x64 16 | Debug|x86 = Debug|x86 17 | Release|Any CPU = Release|Any CPU 18 | Release|x64 = Release|x64 19 | Release|x86 = Release|x86 20 | EndGlobalSection 21 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 22 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Debug|Any CPU.ActiveCfg = Debug|x64 23 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Debug|Any CPU.Build.0 = Debug|x64 24 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Debug|x64.ActiveCfg = Debug|x64 25 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Debug|x64.Build.0 = Debug|x64 26 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Debug|x86.ActiveCfg = Debug|Win32 27 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Debug|x86.Build.0 = Debug|Win32 28 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Release|Any CPU.ActiveCfg = Release|x64 29 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Release|Any CPU.Build.0 = Release|x64 30 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Release|x64.ActiveCfg = Release|x64 31 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Release|x64.Build.0 = Release|x64 32 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Release|x86.ActiveCfg = Release|Win32 33 | {CD3C1D0F-9F39-4F8D-8230-FDA3D50DEB98}.Release|x86.Build.0 = Release|Win32 34 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 35 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Debug|Any CPU.Build.0 = Debug|Any CPU 36 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Debug|x64.ActiveCfg = Debug|Any CPU 37 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Debug|x64.Build.0 = Debug|Any CPU 38 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Debug|x86.ActiveCfg = Debug|Any CPU 39 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Debug|x86.Build.0 = Debug|Any CPU 40 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Release|Any CPU.ActiveCfg = Release|Any CPU 41 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Release|Any CPU.Build.0 = Release|Any CPU 42 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Release|x64.ActiveCfg = Release|Any CPU 43 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Release|x64.Build.0 = Release|Any CPU 44 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Release|x86.ActiveCfg = Release|Any CPU 45 | {9C435BE5-E669-4FF4-8375-5B559EF7DA24}.Release|x86.Build.0 = Release|Any CPU 46 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 47 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Debug|Any CPU.Build.0 = Debug|Any CPU 48 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Debug|x64.ActiveCfg = Debug|Any CPU 49 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Debug|x64.Build.0 = Debug|Any CPU 50 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Debug|x86.ActiveCfg = Debug|Any CPU 51 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Debug|x86.Build.0 = Debug|Any CPU 52 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Release|Any CPU.ActiveCfg = Release|Any CPU 53 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Release|Any CPU.Build.0 = Release|Any CPU 54 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Release|x64.ActiveCfg = Release|Any CPU 55 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Release|x64.Build.0 = Release|Any CPU 56 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Release|x86.ActiveCfg = Release|Any CPU 57 | {4A429A5A-3043-44E9-8A7B-F95D42775B6F}.Release|x86.Build.0 = Release|Any CPU 58 | EndGlobalSection 59 | GlobalSection(SolutionProperties) = preSolution 60 | HideSolutionNode = FALSE 61 | EndGlobalSection 62 | GlobalSection(ExtensibilityGlobals) = postSolution 63 | SolutionGuid = {9BF03D9D-5E08-4FB3-B729-39D0797F7C44} 64 | EndGlobalSection 65 | EndGlobal 66 | -------------------------------------------------------------------------------- /PaddleInferenceSharp/NativeMethods.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Linq; 4 | using System.Runtime.InteropServices; 5 | using System.Text; 6 | using System.Threading.Tasks; 7 | 8 | namespace PaddleInferenceSharp 9 | { 10 | public class NativeMethods 11 | { 12 | private const string paddle_infer_path = @"E:\Git_space\PaddleInferenceSharp\paddle_infrer_api\dll\PaddleInferAPI.dll"; 13 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 14 | public static extern IntPtr paddle_infer_init(); 15 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 16 | public static extern IntPtr set_model(IntPtr paddle_infer, string model_path, string params_path); 17 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 18 | public static extern IntPtr predictor_device_cpu(IntPtr paddle_infer, int cpu_math_library_num_threads); 19 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 20 | public static extern IntPtr predictor_device_gpu(IntPtr paddle_infer, ulong memory_init_size, int device_id, int workspace_size); 21 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 22 | public static extern IntPtr predictor_device_ONNX_runtime(IntPtr paddle_infer, int cpu_math_library_num_threads); 23 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 24 | public static extern IntPtr predictor_device_oneDNN(IntPtr paddle_infer, int capacity); 25 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 26 | public static extern string get_input_names(IntPtr paddle_infer, ref int length); 27 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 28 | public static extern IntPtr set_input_shape(IntPtr paddle_infer, string input_name, ref int input_shape, int length); 29 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 30 | public static extern IntPtr load_input_image_data(IntPtr paddle_infer, string input_name, ref byte image_data, ulong image_size, int type); 31 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 32 | public static extern IntPtr load_input_data(IntPtr paddle_infer, string input_name, ref float data); 33 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 34 | public static extern IntPtr infer(IntPtr paddle_infer); 35 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 36 | public static extern string get_output_names(IntPtr paddle_infer, ref int length); 37 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 38 | public static extern void get_node_shape(IntPtr paddle_infer_ptr, string node_name, ref int shape, ref int dimension); 39 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 40 | public static extern void read_result_data_F32(IntPtr paddle_infer, string output_name, ref float result); 41 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 42 | public static extern void read_result_data_I32(IntPtr paddle_infer, string output_name, ref int result); 43 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 44 | public static extern void read_result_data_I64(IntPtr paddle_infer, string output_name, ref long result); 45 | [DllImport(paddle_infer_path, CharSet = CharSet.Unicode, CallingConvention = CallingConvention.Cdecl)] 46 | public static extern void dispose(IntPtr paddle_infer); 47 | 48 | 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /PaddleInferenceSharp/PaddleInfer.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Linq; 4 | using System.Text; 5 | using System.Threading.Tasks; 6 | 7 | namespace PaddleInferenceSharp 8 | { 9 | /// 10 | /// PaddlePaddle Inference 模型推理类 11 | /// 12 | public class PaddleInfer 13 | { 14 | // 推理核心地址 15 | private IntPtr paddle_infer; 16 | 17 | /// 18 | /// 构造函数 19 | /// 20 | /// 模型地址 21 | /// 模型参数地址 22 | public PaddleInfer(string model_path, string params_path=" ") 23 | { 24 | paddle_infer = NativeMethods.paddle_infer_init(); 25 | paddle_infer = NativeMethods.set_model(paddle_infer, model_path, params_path); 26 | } 27 | /// 28 | /// 设置推理设备 29 | /// 0.CPU,1.GPU,2.ONNX runtime,3.oneDNN 30 | /// 31 | /// 设备选择 32 | /// 对于CPU、ONNX runtime代表线程数,默认为10;对于GPU代表显卡编号,默认为0;对于oneDNN代表cache数量,默认为1 33 | /// 显存分配空间(尽在使用GPU时作用) 34 | /// 显存工作空间(尽在使用GPU时作用) 35 | public void set_divice(Divice divice, int num = 0, ulong memory_init_size = 500, int workspace_size = 30) 36 | { 37 | if (divice == Divice.CPU) 38 | { 39 | if (num == 0) 40 | { 41 | num = 10; 42 | } 43 | paddle_infer = NativeMethods.predictor_device_cpu(paddle_infer, num); 44 | } 45 | else if (divice == Divice.ONNX_runtime) 46 | { 47 | if (num == 0) 48 | { 49 | num = 10; 50 | } 51 | paddle_infer = NativeMethods.predictor_device_ONNX_runtime(paddle_infer, num); 52 | } 53 | else if (divice == Divice.oneDNN) 54 | { 55 | if (num == 0) 56 | { 57 | num = 1; 58 | } 59 | paddle_infer = NativeMethods.predictor_device_oneDNN(paddle_infer, num); 60 | } 61 | else if (divice == Divice.GPU) 62 | { 63 | paddle_infer = NativeMethods.predictor_device_gpu(paddle_infer, memory_init_size, num, workspace_size); 64 | } 65 | } 66 | /// 67 | /// 获取输入节点名字 68 | /// 69 | /// 输入节点列表 70 | public List get_input_names() 71 | { 72 | int[] length = new int[5]; 73 | string input = NativeMethods.get_input_names(paddle_infer,ref length[0]); 74 | List input_name = new List(); 75 | int l = 0; 76 | for (int i = 0; i < length.Length; i++) 77 | { 78 | string str = input.Substring(l, length[i]); 79 | input_name.Add(str); 80 | l += length[i]; 81 | if (l >= input.Length) 82 | { 83 | break; 84 | } 85 | } 86 | return input_name; 87 | } 88 | /// 89 | /// 设置输入节点形状 90 | /// 91 | /// 形状数组 92 | /// 节点名称 93 | public void set_input_shape(int[] input_shape, string input_name) 94 | { 95 | paddle_infer = NativeMethods.set_input_shape(paddle_infer, input_name, ref input_shape[0], input_shape.Length); 96 | } 97 | 98 | /// 99 | /// 加载普通数据 100 | /// 101 | /// 输入节点名称 102 | /// 输入数据 103 | public void load_input_data(string input_name, float[] input_data) 104 | { 105 | paddle_infer = NativeMethods.load_input_data(paddle_infer, input_name, ref input_data[0]); 106 | } 107 | /// 108 | /// 加载图片数据 109 | /// 110 | /// 输入节点名称 111 | /// 图片数据 112 | /// 图片长度 113 | /// 数据处理类型 114 | public void load_input_data(string input_name, byte[] image_data, ulong image_size, int type) 115 | { 116 | paddle_infer = NativeMethods.load_input_image_data(paddle_infer, input_name, ref image_data[0], image_size, type); 117 | } 118 | /// 119 | /// 模型推理 120 | /// 121 | public void infer() 122 | { 123 | paddle_infer = NativeMethods.infer(paddle_infer); 124 | } 125 | /// 126 | /// 获取输出节点名字 127 | /// 128 | /// 输出节点列表 129 | public List get_output_names() 130 | { 131 | int[] length = new int[5]; 132 | string output = NativeMethods.get_output_names(paddle_infer, ref length[0]); 133 | List output_name = new List(); 134 | int l = 0; 135 | for (int i = 0; i < length.Length; i++) 136 | { 137 | string str = output.Substring(l, length[i]); 138 | output_name.Add(str); 139 | l += length[i]; 140 | if (l >= output.Length) 141 | { 142 | break; 143 | } 144 | } 145 | return output_name; 146 | } 147 | /// 148 | /// 获取指定节点形状 149 | /// 150 | /// 节点名称 151 | /// 152 | public List get_shape(string node_name) 153 | { 154 | int[] shape = new int[5]; 155 | int dimension = 0; 156 | List shape_out = new List(); 157 | NativeMethods.get_node_shape(paddle_infer, node_name, ref shape[0], ref dimension); 158 | for (int i = 0; i < dimension; i++) 159 | { 160 | shape_out.Add(shape[i]); 161 | } 162 | return shape_out; 163 | } 164 | 165 | /// 166 | /// 读取推理结果数据 167 | /// 168 | /// 数据类型 169 | /// 输出节点名 170 | /// 输出数据长度 171 | /// 推理结果数组 172 | public T[] read_infer_result(string output_name, int data_size) 173 | { 174 | // 获取设定类型 175 | string t = typeof(T).ToString(); 176 | // 新建返回值数组 177 | T[] result = new T[data_size]; 178 | if (t == "System.Int32") 179 | { // 读取数据类型为整形数据 180 | int[] inference_result = new int[data_size]; 181 | NativeMethods.read_result_data_I32(paddle_infer, output_name, ref inference_result[0]); 182 | result = (T[])Convert.ChangeType(inference_result, typeof(T[])); 183 | return result; 184 | } 185 | else if (t == "System.Int64") 186 | { 187 | long[] inference_result = new long[data_size]; 188 | NativeMethods.read_result_data_I64(paddle_infer, output_name, ref inference_result[0]); 189 | result = (T[])Convert.ChangeType(inference_result, typeof(T[])); 190 | return result; 191 | } 192 | else 193 | { // 读取数据类型为浮点型数据 194 | float[] inference_result = new float[data_size]; 195 | NativeMethods.read_result_data_F32(paddle_infer, output_name, ref inference_result[0]); 196 | result = (T[])Convert.ChangeType(inference_result, typeof(T[])); 197 | return result; 198 | } 199 | } 200 | /// 201 | /// 删除内存地址 202 | /// 203 | public void delet() 204 | { 205 | NativeMethods.dispose(paddle_infer); 206 | } 207 | 208 | } 209 | 210 | 211 | /// 212 | /// 设备名称 213 | /// 214 | public enum Divice { 215 | CPU, 216 | GPU, 217 | ONNX_runtime, 218 | oneDNN 219 | } 220 | } -------------------------------------------------------------------------------- /PaddleInferenceSharp/PaddleInferenceSharp.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | net6.0 5 | enable 6 | enable 7 | true 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 基于C#和Paddle Inference 部署PaddlePaddle模型——PaddleInferenceSharp 2 | 3 | ![paddle名称.drawio](./doc/image/paddle名称.drawio.png) 4 | 5 | # 1. 项目介绍 6 | 7 |   Paddle Inference 是飞桨的原生推理库, 提供服务器端的高性能推理能力,直接基于飞桨的训练算子,因此它支持飞桨训练出的所有模型的推理;Paddle Inference 功能特性丰富,性能优异,针对不同平台不同的应用场景进行了深度的适配优化,做到高吞吐、低时延,保证了飞桨模型在服务器端即训即用,快速部署。 8 | 9 |   但由于Paddle Inference目前只提供了Python、C++、C、Go语言方法接口,C#无法直接使用。近几年以来 ,C#语言快速发展,并跻身于编程语言排行榜前几名的位置,为了能够在C#语言中调用Paddle Inference模型推理库,根据C++动态链接库的原理,推出PaddleInferenceSharp,实现直接在C#平台调用Paddle Inference部署深度学习模型。其实现原理可以参考下图: 10 | 11 | ![paddleinferencesharp.drawio](./doc/image/paddleinferencesharp.drawio.png) 12 | 13 | 14 | 15 | # 2.项目环境配置 16 | 17 |  为了防止复现代码出现问题,列出以下代码开发环境,可以根据自己需求设置。 18 | 19 | - 操作系统:Windows 11 20 | - CUDA:11.4 21 | - cuDNN:8.2.4 22 | - TensorRT:8.4.0.6 23 | - OpenCV:4.5.5 24 | - Visual Studio:2022 25 | - C#框架:.NET 6.0 26 | - OpenCvSharp:OpenCvSharp4 27 | 28 |  此处最重要的是需要安装Paddle Inference C++版,具体安装方式可以参考下面的连接:[Paddle Inference C++ 依赖库安装(Windows)](https://blog.csdn.net/grape_yan/article/details/127322648?spm=1001.2014.3001.5501)。对于其他依赖项安装可以参考以下链接:[NVIDIA TensorR 安装 (Windows C++)](https://blog.csdn.net/grape_yan/article/details/127320959?spm=1001.2014.3001.5501)、[OpenCV C++安装和配置](https://blog.csdn.net/grape_yan/article/details/126954261?spm=1001.2014.3001.5501)。 29 | 30 | # 3.项目下载方式 31 | 32 |  项目所使用的源码均已经在Github和Gitee上开源, 33 | 34 | ```shell 35 | Github: 36 | 37 | git clone https://github.com/guojin-yan/PaddleInferenceSharp.git 38 | 39 | Gitee: 40 | 41 | git clone https://gitee.com/guojin-yan/PaddleInferenceSharp.git 42 | ``` 43 | 44 | 45 | 46 | # 4. PaddleInfer类 47 | 48 | ## 4.1 API 方法 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | ## 4.2 枚举 207 | 208 |
序号API参数解释说明
1方法PaddleInfer()构造函数,初始化推理核心,读取本地模型
参数string model_path静态图模型文件
string params_path模型配置文件信息,默认为空
2方法void set_divice()设置推理设备支持 CPU、GPU、ONNX runtime、oneDNN
参数Divice divice设备名称选择
int num 对于CPU、ONNX runtime代表线程数,默认为10;
对于GPU代表显卡编号,默认为0;
对于oneDNN代表cache数量,默认为1
ulong memory_init_size显存分配空间(在使用GPU时作用),默认为500
int workspace_size显存工作空间(在使用GPU时作用),默认为30
3方法List <string> get_input_names()获取输入节点名字
4方法void set_input_shape()设置输入节点形状根据节点维度设置
参数int[] input_shape形状数组
string input_name节点名称
5方法void load_input_data()设置图片/普通输入数据方法重载
参数string input_name输入节点名称
float[] input_data输入数据
参数string input_name输入节点名称
byte[] image_data图片数据
ulong image_size图片长度
int type数据处理类型:
type == 0: 均值方差归一化、直接缩放
type == 1: 普通归一化、直接缩放
type == 2: 均值方差归一化、仿射变换
6方法void infer()模型推理
7方法List <string> get_output_names()获取输出节点名字
8方法List <int> get_shape()获取指定节点形状
参数string node_name节点名称
9方法void T[] read_infer_result <T>()读取推理结果数据支持读取Float32、Int32、Int64格式数据
参数string output_name输出节点名
int data_size输出数据长度
10方法void delet()删除内存地址
209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 |  关于上述方法的使用,后续会更新县官的案例教程以及详细的技术文档,敬请期待。 235 | -------------------------------------------------------------------------------- /doc/image/paddleinferencesharp.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guojin-yan/PaddleInferenceSharp/732b9cc3401bf657264b9d03bb06756a75e38e01/doc/image/paddleinferencesharp.drawio.png -------------------------------------------------------------------------------- /doc/image/paddle名称.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guojin-yan/PaddleInferenceSharp/732b9cc3401bf657264b9d03bb06756a75e38e01/doc/image/paddle名称.drawio.png -------------------------------------------------------------------------------- /paddle_infrer_api/dll/PaddleInferAPI.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guojin-yan/PaddleInferenceSharp/732b9cc3401bf657264b9d03bb06756a75e38e01/paddle_infrer_api/dll/PaddleInferAPI.dll -------------------------------------------------------------------------------- /paddle_infrer_api/dll/PaddleInferAPI.exp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guojin-yan/PaddleInferenceSharp/732b9cc3401bf657264b9d03bb06756a75e38e01/paddle_infrer_api/dll/PaddleInferAPI.exp -------------------------------------------------------------------------------- /paddle_infrer_api/dll/PaddleInferAPI.lib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guojin-yan/PaddleInferenceSharp/732b9cc3401bf657264b9d03bb06756a75e38e01/paddle_infrer_api/dll/PaddleInferAPI.lib -------------------------------------------------------------------------------- /paddle_infrer_api/paddle_infrer_api.vcxproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Debug 6 | Win32 7 | 8 | 9 | Release 10 | Win32 11 | 12 | 13 | Debug 14 | x64 15 | 16 | 17 | Release 18 | x64 19 | 20 | 21 | 22 | 16.0 23 | Win32Proj 24 | {cd3c1d0f-9f39-4f8d-8230-fda3d50deb98} 25 | paddleinfrerapi 26 | 10.0 27 | 28 | 29 | 30 | Application 31 | true 32 | v143 33 | Unicode 34 | 35 | 36 | Application 37 | false 38 | v143 39 | true 40 | Unicode 41 | 42 | 43 | Application 44 | true 45 | v143 46 | Unicode 47 | 48 | 49 | DynamicLibrary 50 | false 51 | v143 52 | true 53 | Unicode 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | D:\Program Files\paddle_inference\paddle\include;E:\OpenCV Source\opencv-4.5.5\build\include;E:\OpenCV Source\opencv-4.5.5\build\include\opencv2;$(IncludePath) 75 | D:\Program Files\paddle_inference\paddle\lib;E:\OpenCV Source\opencv-4.5.5\build\x64\vc15\lib;$(LibraryPath) 76 | PaddleInferAPI 77 | dll/ 78 | 79 | 80 | 81 | Level3 82 | true 83 | WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions) 84 | true 85 | 86 | 87 | Console 88 | true 89 | 90 | 91 | 92 | 93 | Level3 94 | true 95 | true 96 | true 97 | WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions) 98 | true 99 | 100 | 101 | Console 102 | true 103 | true 104 | true 105 | 106 | 107 | 108 | 109 | Level3 110 | true 111 | _DEBUG;_CONSOLE;%(PreprocessorDefinitions) 112 | true 113 | 114 | 115 | Console 116 | true 117 | 118 | 119 | 120 | 121 | Level3 122 | true 123 | true 124 | true 125 | NDEBUG;_CONSOLE;%(PreprocessorDefinitions) 126 | true 127 | 128 | 129 | Console 130 | true 131 | true 132 | true 133 | opencv_world455.lib;paddle_inference.lib;%(AdditionalDependencies) 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | -------------------------------------------------------------------------------- /paddle_infrer_api/paddle_infrer_api.vcxproj.filters: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | 5 | {4FC737F1-C7A5-4376-A066-2A32D752A2FF} 6 | cpp;c;cc;cxx;c++;cppm;ixx;def;odl;idl;hpj;bat;asm;asmx 7 | 8 | 9 | {93995380-89BD-4b04-88EB-625FBE52EBFB} 10 | h;hh;hpp;hxx;h++;hm;inl;inc;ipp;xsd 11 | 12 | 13 | {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} 14 | rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms 15 | 16 | 17 | 18 | 19 | 源文件 20 | 21 | 22 | -------------------------------------------------------------------------------- /paddle_infrer_api/src/paddle_infer_cpp_api.cpp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guojin-yan/PaddleInferenceSharp/732b9cc3401bf657264b9d03bb06756a75e38e01/paddle_infrer_api/src/paddle_infer_cpp_api.cpp -------------------------------------------------------------------------------- /text_paddle_infer/PP-Yoloe.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Linq; 4 | using System.Text; 5 | using System.Threading.Tasks; 6 | using PaddleInferenceSharp; 7 | using OpenCvSharp; 8 | 9 | namespace text_paddle_infer 10 | { 11 | internal class PP_Yoloe 12 | { 13 | public static void paddle_deploy_ppyoloe() 14 | { 15 | //----------------------1.模型相关信息----------------------// 16 | string model_path = @"E:\Text_Model\ppyoloe_plus_crn_l_80e_coco\model.pdmodel"; 17 | string params_path = @"E:\Text_Model\ppyoloe_plus_crn_l_80e_coco\model.pdiparams"; 18 | string image_path = @"E:\Text_dataset\YOLOv5\0003.jpg"; 19 | DateTime begin, end; 20 | TimeSpan t0, t1, t2, t3; 21 | //----------------------2. 创建并配置预测器------------------// 22 | begin = DateTime.Now; 23 | PaddleInfer predictor = new PaddleInfer(model_path, params_path); 24 | // 设置设备类型 25 | //predictor.set_divice(Divice.CPU, 10); // CPU 26 | //predictor.set_divice(Divice.GPU, 0, 500, 30); // GPU 27 | predictor.set_divice(Divice.ONNX_runtime, 10); // ONNX_runtime 28 | //predictor.set_divice(Divice.oneDNN, 1); // oneDNN 29 | // 获取输入节点 30 | List input_name = predictor.get_input_names(); 31 | for (int i = 0; i < input_name.Count; i++) 32 | { 33 | Console.WriteLine("模型输入 {0} {1}", i, input_name[i]); 34 | } 35 | // 设置输入形状 36 | int[] input_size1 = new int[4] { 1, 3, 640, 640 }; 37 | predictor.set_input_shape(input_size1, input_name[0]); 38 | int[] input_size2 = new int[2] { 1,2}; 39 | predictor.set_input_shape(input_size2, input_name[1]); 40 | end = DateTime.Now; 41 | t0 = end - begin; 42 | //----------------------3. 加载推理数据------------------// 43 | begin = DateTime.Now; 44 | Mat image = Cv2.ImRead(image_path); 45 | // 将图片放在矩形背景下 46 | int max_image_length = image.Cols > image.Rows ? image.Cols : image.Rows; 47 | Mat max_image = Mat.Zeros(new Size(max_image_length, max_image_length), MatType.CV_8UC3); 48 | Rect roi = new Rect(0, 0, image.Cols, image.Rows); 49 | image.CopyTo(new Mat(max_image, roi)); 50 | byte[] input_image_data = max_image.ImEncode(".bmp"); 51 | // 数据长度 52 | ulong input_image_length = Convert.ToUInt64(input_image_data.Length); 53 | predictor.load_input_data(input_name[0], input_image_data, input_image_length, 2); 54 | float scale_factor = 640.0f/ max_image_length; 55 | float[] input_scale = new float[] { scale_factor, scale_factor }; 56 | predictor.load_input_data(input_name[1], input_scale); 57 | end = DateTime.Now; 58 | t1 = end - begin; 59 | //----------------------4. 模型推理------------------// 60 | begin = DateTime.Now; 61 | predictor.infer(); 62 | end = DateTime.Now; 63 | t2 = end - begin; 64 | //----------------------5. 模型推理结果------------------// 65 | begin = DateTime.Now; 66 | int[] leng = new int[4]; 67 | List output_name = predictor.get_output_names(); 68 | for (int i = 0; i < input_name.Count; i++) 69 | { 70 | Console.WriteLine("模型输出 {0} {1}", i, output_name[i]); 71 | } 72 | List output_shape = predictor.get_shape(output_name[0]); 73 | Console.WriteLine("output_shape:{0} × {1}", output_shape[0], output_shape[1]); 74 | int[] output_num = predictor.read_infer_result(output_name[1], 1); 75 | Console.WriteLine(output_num[0]); 76 | float[] output = predictor.read_infer_result(output_name[0], output_num[0] * 6); 77 | List boxes = new List(); 78 | List classes = new List(); 79 | List scores = new List(); 80 | for (int i = 0; i < output_num[0]; i++) 81 | { 82 | if (output[6 * i + 1] > 0.4) 83 | { 84 | scores.Add(output[6 * i + 1]); 85 | classes.Add((int)output[6 * i]); 86 | Rect rect = new Rect((int)output[6 * i + 2], (int)output[6 * i + 3], 87 | (int)(output[6 * i + 4] - output[6 * i + 2]), (int)(output[6 * i + 5] - output[6 * i + 3])); 88 | boxes.Add(rect); 89 | } 90 | 91 | } 92 | end = DateTime.Now; 93 | t3 = end - begin; 94 | Console.WriteLine("模型加载时间:{0}", t0.TotalMilliseconds); 95 | Console.WriteLine("推理数据加载时间:{0}", t1.TotalMilliseconds); 96 | Console.WriteLine("模型推理时间:{0}", t2.TotalMilliseconds); 97 | Console.WriteLine("结果处理时间:{0}", t3.TotalMilliseconds); 98 | 99 | for (int i = 0; i < classes.Count; i++) 100 | { 101 | Cv2.Rectangle(image, boxes[i], new Scalar(0, 0, 255), 1, LineTypes.Link8); 102 | Cv2.PutText(image, scores[i].ToString(), new Point(boxes[i].X, boxes[i].Y - 5), 103 | HersheyFonts.HersheySimplex, 0.5, new Scalar(0, 255, 0)); 104 | } 105 | Cv2.ImShow("result", image); 106 | Cv2.WaitKey(0); 107 | } 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /text_paddle_infer/Program.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using OpenCvSharp; 3 | using PaddleInferenceSharp; 4 | namespace text_paddle_infer // Note: actual namespace depends on the project name. 5 | { 6 | internal class Program 7 | { 8 | static void Main(string[] args) 9 | { 10 | Console.WriteLine("Hello World!"); 11 | //ResNet50.paddle_deploy_resnet50(); 12 | PP_Yoloe.paddle_deploy_ppyoloe(); 13 | } 14 | } 15 | } -------------------------------------------------------------------------------- /text_paddle_infer/ResNet50.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Linq; 4 | using System.Text; 5 | using System.Threading.Tasks; 6 | using PaddleInferenceSharp; 7 | using OpenCvSharp; 8 | 9 | namespace text_paddle_infer 10 | { 11 | internal class ResNet50 12 | { 13 | public static void paddle_deploy_resnet50() 14 | { 15 | //----------------------1.模型相关信息----------------------// 16 | string model_path = "E:/Text_Model/flowerclas/inference.pdmodel"; 17 | string params_path = "E:/Text_Model/flowerclas/inference.pdiparams"; 18 | string image_path = "E:/Text_dataset/flowers102/jpg/image_00005.jpg"; 19 | DateTime begin, end; 20 | TimeSpan t0, t1, t2, t3; 21 | //----------------------2. 创建并配置预测器------------------// 22 | begin = DateTime.Now; 23 | PaddleInfer predictor = new PaddleInfer(model_path, params_path); 24 | // 设置设备类型 25 | predictor.set_divice(Divice.CPU, 10); // CPU 26 | //predictor.set_divice(Divice.GPU, 0, 500, 30); // GPU 27 | //predictor.set_divice(Divice.ONNX_runtime, 10); // ONNX_runtime 28 | //predictor.set_divice(Divice.oneDNN, 1); // oneDNN 29 | // 获取输入节点 30 | List input_name = predictor.get_input_names(); 31 | for (int i = 0; i < input_name.Count; i++) 32 | { 33 | Console.WriteLine("模型输入 {0} {1}", i, input_name[i]); 34 | } 35 | // 设置输入形状 36 | int[] input_size = new int[4] { 1, 3, 224, 224 }; 37 | predictor.set_input_shape(input_size, input_name[0]); 38 | end = DateTime.Now; 39 | t0 = end - begin; 40 | //----------------------3. 加载推理数据------------------// 41 | begin = DateTime.Now; 42 | Mat image = Cv2.ImRead(image_path); 43 | byte[] input_image_data = image.ImEncode(".bmp"); 44 | // 数据长度 45 | ulong input_image_length = Convert.ToUInt64(input_image_data.Length); 46 | predictor.load_input_data(input_name[0], input_image_data, input_image_length, 0); 47 | end = DateTime.Now; 48 | t1 = end - begin; 49 | //----------------------4. 模型推理------------------// 50 | begin = DateTime.Now; 51 | predictor.infer(); 52 | end = DateTime.Now; 53 | t2 = end - begin; 54 | //----------------------5. 模型推理结果------------------// 55 | begin = DateTime.Now; 56 | int[] leng = new int[4]; 57 | List output_name = predictor.get_output_names(); 58 | for (int i = 0; i < input_name.Count; i++) 59 | { 60 | Console.WriteLine("模型输出 {0} {1}", i, output_name[i]); 61 | } 62 | float[] output = predictor.read_infer_result(output_name[0], 102); 63 | float max; 64 | int index = max_indax(output, out max); 65 | end = DateTime.Now; 66 | t3 = end - begin; 67 | Console.WriteLine("最大类别为:{0},分数:{1}。", index, max); 68 | Console.WriteLine("模型加载时间:{0}", t0.TotalMilliseconds); 69 | Console.WriteLine("推理数据加载时间:{0}", t1.TotalMilliseconds); 70 | Console.WriteLine("模型推理时间:{0}", t2.TotalMilliseconds); 71 | Console.WriteLine("结果处理时间:{0}", t3.TotalMilliseconds); 72 | 73 | 74 | } 75 | 76 | static int max_indax(T[] data, out T max) where T : IComparable 77 | { 78 | int index = 0; 79 | max = data[0]; 80 | for (int i = 0; i < data.Length; i++) 81 | { 82 | if (data[i].CompareTo(max) > 0) 83 | { 84 | index = i; 85 | max = data[i]; 86 | } 87 | } 88 | 89 | 90 | 91 | 92 | return index; 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /text_paddle_infer/text_paddle_infer.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | net6.0 6 | enable 7 | enable 8 | true 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | --------------------------------------------------------------------------------
序号枚举名枚举变量含义
1Divice
设备名称
CPU使用CPU推理
GPU使用GPU推理
ONNX_runtime使用ONNX_runtime推理
oneDNN使用oneDNN推理