├── docs
├── styles
│ ├── main.css
│ ├── main.js
│ └── search-worker.js
├── favicon.ico
├── fonts
│ ├── glyphicons-halflings-regular.eot
│ ├── glyphicons-halflings-regular.ttf
│ ├── glyphicons-halflings-regular.woff
│ └── glyphicons-halflings-regular.woff2
├── README.md
├── Makefile
├── logo.svg
└── search-stopwords.json
├── DotnetCLIVersion.txt
├── BuildToolsVersion.txt
├── DotnetCLIVersion.netcoreapp.latest.txt
├── src
├── Native
│ ├── UnixSal.h
│ ├── LibTorchSharp
│ │ ├── THSAutograd.h
│ │ ├── THSAutograd.cpp
│ │ ├── Utils.cpp
│ │ ├── THSTorch.cpp
│ │ ├── THSTorch.h
│ │ ├── Utils.h
│ │ ├── CMakeLists.txt
│ │ ├── THSJIT.h
│ │ ├── THSJIT.cpp
│ │ ├── cifar10.h
│ │ ├── THSData.h
│ │ ├── cifar10.cpp
│ │ └── THSData.cpp
│ ├── Stdafx.h
│ ├── gen-buildsys-win.bat
│ ├── probe-win.ps1
│ └── build.sh
├── Redist
│ ├── LibTorch.Redist
│ │ ├── libtorch-macos-1.0.1.zip.sha
│ │ ├── libtorch-macos-1.1.0.zip.sha
│ │ ├── mklml_mac_2019.0.5.20190502.tgz.sha
│ │ ├── libtorch-shared-with-deps-1.0.1.zip.sha
│ │ ├── libtorch-shared-with-deps-1.1.0.zip.sha
│ │ ├── libtorch-win-shared-with-deps-1.0.1.zip.sha
│ │ └── libtorch-win-shared-with-deps-1.1.0.zip.sha
│ ├── LibTorch.Cuda.9.0.Redist
│ │ ├── libtorch-shared-with-deps-1.0.1.zip.sha
│ │ └── libtorch-win-shared-with-deps-1.0.1.zip.sha
│ └── build.proj
├── stylecop.json
├── TorchSharp
│ ├── JIT
│ │ └── Type
│ │ │ ├── DynamicType .cs
│ │ │ ├── TensorType.cs
│ │ │ └── Type.cs
│ ├── ExtensionMethods.cs
│ ├── NN
│ │ ├── ProvidedModule.cs
│ │ ├── Parameter.cs
│ │ ├── Conv2D.cs
│ │ ├── FeatureDropout.cs
│ │ ├── LogSoftMax.cs
│ │ ├── AdaptiveAvgPool2D.cs
│ │ ├── ReLu.cs
│ │ ├── AvgPool2D.cs
│ │ ├── Dropout.cs
│ │ ├── MaxPool2D.cs
│ │ ├── FunctionalModule.cs
│ │ ├── Init.cs
│ │ ├── Sequential.cs
│ │ ├── LossFunction.cs
│ │ └── Linear.cs
│ ├── Autograd.cs
│ ├── Torch.cs
│ ├── TorchSharp.csproj
│ ├── Tensor
│ │ └── Types.ttinclude
│ ├── Data
│ │ └── Loader.cs
│ ├── PinnedArray.cs
│ └── Scalar.cs
├── Examples
│ ├── README.md
│ └── Examples.csproj
├── Directory.Build.targets
└── Directory.Build.props
├── docfx
├── toc.yml
├── api
│ ├── index.md
│ ├── TorchSharp.Data.yml
│ ├── TorchSharp.yml
│ ├── TorchSharp.JIT.yml
│ ├── AllocatePinnedArray.yml
│ ├── TorchSharp.NN
│ │ ├── TorchSharp.NN.LossFunction.Loss.yml
│ │ └── TorchSharp.NN.Reduction.yml
│ ├── TorchSharp.JIT
│ │ └── TorchSharp.JIT.DynamicType.yml
│ ├── TorchSharp.Tensor.yml
│ ├── toc.yml
│ └── TorchSharp
│ │ ├── TorchSharp.TorchHandle.yml
│ │ └── TorchSharp.Torch.yml
├── index.md
└── docfx.json
├── .editorconfig
├── pkg
├── TorchSharp
│ ├── TorchSharp.symbols.nupkgproj
│ └── TorchSharp.nupkgproj
├── LibTorch.Redist
│ └── LibTorch.Redist.nupkgproj
├── LibTorch.Cuda.9.0.Redist
│ └── LibTorch.Cuda.9.0.Redist.nupkgproj
├── common
│ └── CommonPackage.props
└── Directory.Build.props
├── ecmadocs
└── en
│ ├── ns-TorchSharp.xml
│ ├── ns-TorchSharp.NN.xml
│ ├── ns-TorchSharp.Data.xml
│ ├── ns-TorchSharp.JIT.xml
│ ├── ns-TorchSharp.Tensor.xml
│ ├── TorchSharp.JIT
│ ├── DynamicType.xml
│ ├── TensorType.xml
│ └── Type.xml
│ ├── TorchSharp.NN
│ ├── ProvidedModule.xml
│ ├── LossFunction+Loss.xml
│ ├── Conv2D.xml
│ ├── Dropout.xml
│ ├── MaxPool2D.xml
│ ├── LogSoftMax.xml
│ ├── AdaptiveAvgPool2D.xml
│ ├── FeatureDropout.xml
│ ├── Reduction.xml
│ ├── ReLU.xml
│ ├── Parameter.xml
│ └── Init.xml
│ ├── AllocatePinnedArray.xml
│ ├── TorchSharp
│ ├── Torch.xml
│ └── AutoGradMode.xml
│ ├── TorchSharp.Data
│ ├── Loader.xml
│ └── DataIterator+HType.xml
│ └── TorchSharp.Tensor
│ └── ATenScalarMapping.xml
├── .gitattributes
├── codecov.yml
├── Makefile
├── test
├── Directory.Build.targets
├── run-tests.proj
├── TorchSharpTest
│ └── TorchSharpTest.csproj
├── Test.csproj
└── Directory.Build.props
├── run.sh
├── tools-local
└── Microsoft.ML.InternalCodeAnalyzer
│ ├── Properties
│ └── AssemblyInfo.cs
│ ├── Microsoft.ML.InternalCodeAnalyzer.csproj
│ ├── TypeParamNameAnalyzer.cs
│ ├── SingleVariableDeclarationAnalyzer.cs
│ ├── ParameterVariableNameAnalyzer.cs
│ ├── Utils.cs
│ ├── BestFriendOnPublicDeclarationsAnalyzer.cs
│ └── InstanceInitializerAnalyzer.cs
├── init-tools.msbuild
├── run.cmd
├── LICENSE
├── Directory.Build.targets
├── README.md
└── dir.traversal.targets
/docs/styles/main.css:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/DotnetCLIVersion.txt:
--------------------------------------------------------------------------------
1 | 2.1.503
--------------------------------------------------------------------------------
/BuildToolsVersion.txt:
--------------------------------------------------------------------------------
1 | 3.0.0-preview1-03801-01
2 |
--------------------------------------------------------------------------------
/DotnetCLIVersion.netcoreapp.latest.txt:
--------------------------------------------------------------------------------
1 | 3.0.100-preview5-011568
--------------------------------------------------------------------------------
/docs/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/interesaaat/TorchSharp/HEAD/docs/favicon.ico
--------------------------------------------------------------------------------
/src/Native/UnixSal.h:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/interesaaat/TorchSharp/HEAD/src/Native/UnixSal.h
--------------------------------------------------------------------------------
/docfx/toc.yml:
--------------------------------------------------------------------------------
1 | - name: Articles
2 | href: articles/
3 | - name: Api Documentation
4 | href: api/
5 | homepage: api/index.md
6 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*.cs]
4 | # Sort using directives with System.* appearing first
5 | dotnet_sort_system_directives_first = true
--------------------------------------------------------------------------------
/docs/fonts/glyphicons-halflings-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/interesaaat/TorchSharp/HEAD/docs/fonts/glyphicons-halflings-regular.eot
--------------------------------------------------------------------------------
/docs/fonts/glyphicons-halflings-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/interesaaat/TorchSharp/HEAD/docs/fonts/glyphicons-halflings-regular.ttf
--------------------------------------------------------------------------------
/docs/fonts/glyphicons-halflings-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/interesaaat/TorchSharp/HEAD/docs/fonts/glyphicons-halflings-regular.woff
--------------------------------------------------------------------------------
/docs/fonts/glyphicons-halflings-regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/interesaaat/TorchSharp/HEAD/docs/fonts/glyphicons-halflings-regular.woff2
--------------------------------------------------------------------------------
/docfx/api/index.md:
--------------------------------------------------------------------------------
1 |
2 | Welcome to the TorchSharp API documentation.
3 |
4 | Expand the node on the left to explore the .NET API for TorchSharp.
5 |
--------------------------------------------------------------------------------
/pkg/TorchSharp/TorchSharp.symbols.nupkgproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/docs/styles/main.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft. All rights reserved. Licensed under the MIT license. See LICENSE file in the project root for full license information.
2 |
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Redist/libtorch-macos-1.0.1.zip.sha:
--------------------------------------------------------------------------------
1 | B0D57D3B99DB40D3A9CC50CB3A77E40159C62E269CEE8F0DC509ABC332DED09EEF712742FBA0BF0569053D2B9CCD742E14AEBE14ED60241EBF93AAD2103B519E
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Redist/libtorch-macos-1.1.0.zip.sha:
--------------------------------------------------------------------------------
1 | F36D37A447F8B8022EDCDE84D6B75273EEC19E5B84B50EC3EC38E4C309625FB98BE5BFC1B00913472CCB89BA20F3161B63F51B1269A6EA4633565A2FE6A7A9CE
--------------------------------------------------------------------------------
/ecmadocs/en/ns-TorchSharp.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | To be added.
4 | To be added.
5 |
6 |
7 |
--------------------------------------------------------------------------------
/ecmadocs/en/ns-TorchSharp.NN.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | To be added.
4 | To be added.
5 |
6 |
7 |
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Redist/mklml_mac_2019.0.5.20190502.tgz.sha:
--------------------------------------------------------------------------------
1 | 92A2FC804446FBF383AF67745BCC2C459A24F7CB0A9A5CE13B4D96C040C1B8D87293B1264E4DD755DCE0E9F32A6AA82F72379FD86CD9D0E656B9B61AF1CA5B76
--------------------------------------------------------------------------------
/ecmadocs/en/ns-TorchSharp.Data.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | To be added.
4 | To be added.
5 |
6 |
7 |
--------------------------------------------------------------------------------
/ecmadocs/en/ns-TorchSharp.JIT.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | To be added.
4 | To be added.
5 |
6 |
7 |
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Redist/libtorch-shared-with-deps-1.0.1.zip.sha:
--------------------------------------------------------------------------------
1 | 0F50F172A6A12606A2F5F401133D56CE1F0FA2B317C4F292C575DD4A6EEF1CEDCBD405024A1BB0304348FD232EB59751D3EC8B415F2C4B8A91845A49CB9FBD57
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Redist/libtorch-shared-with-deps-1.1.0.zip.sha:
--------------------------------------------------------------------------------
1 | B32C1B3FF538C9307742807908C489AF31433285B847B84E2F8A05CBE5826515BEB46C2F289211271A7FA841FECAFA45551F9C77731B9D8035790E24D219DBE8
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Redist/libtorch-win-shared-with-deps-1.0.1.zip.sha:
--------------------------------------------------------------------------------
1 | D4E74FC7D4360E67DB54B934E8CF9C0411710CE56E6E87EE73C049B38B591ED2F56AE5062CDC51593004796AB774DEA36BF834D0D24DAA53D52E506B71C96973
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Redist/libtorch-win-shared-with-deps-1.1.0.zip.sha:
--------------------------------------------------------------------------------
1 | BB3681C883095C3A708CCFE7C4521DEC9C4281466C2D830ECF4CB86CF743276DC9E4B09435DB883DD03DA80714197B85D143D7D35AF06D56379A1AFF30E472E5
--------------------------------------------------------------------------------
/ecmadocs/en/ns-TorchSharp.Tensor.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | To be added.
4 | To be added.
5 |
6 |
7 |
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Cuda.9.0.Redist/libtorch-shared-with-deps-1.0.1.zip.sha:
--------------------------------------------------------------------------------
1 | EB08310AD859F6633014358613D9DB59ACD610BAA11C6997B893FEC280C655D1473DB8FF63342BFFA7B01C1B30BF2F7769063AA9E77C91822284F3F10CF45BB9
--------------------------------------------------------------------------------
/src/Redist/LibTorch.Cuda.9.0.Redist/libtorch-win-shared-with-deps-1.0.1.zip.sha:
--------------------------------------------------------------------------------
1 | 56CAAECE1C9252DECFB2175DE879FA245471E4424FE6A17FA74A2CEA89882DEAC71B27BF6EE730D798A1E547F0EDE48432F79DFD20A580830A1B0790D6AE8434
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | This directory contains the generated documentation from the input files,
2 | if you want to edit the documentation do so in the `docfx` directory
3 | or in the C# source files, and run the top-level Makefile docs target
--------------------------------------------------------------------------------
/src/Redist/build.proj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/src/stylecop.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json",
3 | "settings": {
4 | "orderingRules": {
5 | "usingDirectivesPlacement": "outsideNamespace"
6 | }
7 | }
8 | }
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/THSAutograd.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include "../Stdafx.h"
4 |
5 | #include "Utils.h"
6 |
7 | // Returns whether the grad is enabled or not.
8 | EXPORT_API(bool) THSAutograd_isGradEnabled();
9 |
10 | // Enables / disables grad.
11 | EXPORT_API(void) THSAutograd_setGrad(bool enabled);
12 |
--------------------------------------------------------------------------------
/src/TorchSharp/JIT/Type/DynamicType .cs:
--------------------------------------------------------------------------------
1 | using System;
2 |
3 | namespace TorchSharp.JIT
4 | {
5 | public sealed class DynamicType : Type
6 | {
7 | internal DynamicType(IntPtr handle) : base(handle)
8 | {
9 | this.handle = new HType(handle, true);
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/THSAutograd.cpp:
--------------------------------------------------------------------------------
1 | #include "THSAutograd.h"
2 |
3 | #include "torch/torch.h"
4 |
5 | bool THSAutograd_isGradEnabled()
6 | {
7 | bool result = torch::autograd::GradMode::is_enabled();
8 | return result;
9 | }
10 |
11 | void THSAutograd_setGrad(bool enabled)
12 | {
13 | torch::autograd::GradMode::set_enabled(enabled);
14 | }
--------------------------------------------------------------------------------
/src/Examples/README.md:
--------------------------------------------------------------------------------
1 | Examples
2 | ===========
3 |
4 | MNIST
5 | ----------
6 | To run MNIST
7 | you need to download the [dataset](http://yann.lecun.com/exdb/mnist/) and unzip it into `Examples/Data`.
8 | To run AlexNet you need to download the CIFAR10 [binary dataset](https://www.cs.toronto.edu/~kriz/cifar.html) and unzip it into the same `Examples/Data` folder.
9 |
--------------------------------------------------------------------------------
/docfx/index.md:
--------------------------------------------------------------------------------
1 |
2 | TorchSharp are .NET bindings to the Torch library published
3 | here:
4 |
5 | https://pytorch.org/get-started/locally/
6 |
7 | This surfaces the C API as a strongly-typed C# API.
8 |
9 | ## Getting Started
10 |
11 | Check the [GitHub project page](https://github.com/xamarin/TorchSharp) for
12 | TorchSharp.
13 |
14 | ## API documentation
15 |
16 | The [API Documentation](api/TorchSharp.html)
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/Utils.cpp:
--------------------------------------------------------------------------------
1 | #include "Utils.h"
2 |
3 | #include
4 | #include
5 |
6 | thread_local char * torch_last_err = NULL;
7 |
8 | const char * make_sharable_string(const std::string str)
9 | {
10 | size_t size = sizeof(str);
11 | char* result = new char[size];
12 | strncpy(result, str.c_str(), size);
13 | result[size - 1] = '\0';
14 | return result;
15 | }
16 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Set default behavior to automatically normalize line endings.
3 | ###############################################################################
4 | * text=auto
5 |
6 | # Force bash scripts to always use lf line endings so that if a repo is accessed
7 | # in Unix via a file share from Windows, the scripts will work.
8 | *.sh text eol=lf
9 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | # https://docs.codecov.io/docs/codecov-yaml
2 | # https://github.com/codecov/support/wiki/Codecov-Yaml
3 |
4 | coverage:
5 | status:
6 | project:
7 | default: false
8 | patch:
9 | default: false
10 | fixes:
11 | - "build/::/"
12 |
13 | comment:
14 | layout: "diff, flags, files"
15 |
16 | flags:
17 | production:
18 | paths:
19 | - src/
20 | test:
21 | paths:
22 | - test/
23 |
--------------------------------------------------------------------------------
/src/TorchSharp/ExtensionMethods.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 |
5 | namespace TorchSharp
6 | {
7 | internal static class ExtensionMethods
8 | {
9 | internal static void Deconstruct(this IList list, out T head, out IList tail)
10 | {
11 | head = list.FirstOrDefault();
12 | tail = new List(list.Skip(1));
13 | }
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | all:
2 | sh build.sh
3 |
4 | docs: doc-update yaml
5 |
6 | doc-update:
7 | mdoc update -i ./bin/obj/packages/TorchSharp/lib/netstandard2.0/TorchSharp.xml -o ecmadocs/en ./bin/obj/packages/TorchSharp/lib/netstandard2.0/TorchSharp.dll
8 |
9 | yaml:
10 | -rm ecmadocs/en/ns-.xml
11 | mono /cvs/ECMA2Yaml/ECMA2Yaml/ECMA2Yaml/bin/Debug/ECMA2Yaml.exe --source=`pwd`/ecmadocs/en --output=`pwd`/docfx/api
12 | (cd docfx; mono ~/Downloads/docfx/docfx.exe build)
13 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | all:
2 | msbuild /p:Configuration=Release
3 |
4 | docs: doc-update yaml
5 |
6 | doc-update:
7 | mdoc update -i ./TorchSharp/bin/Release/netstandard2.0/TorchSharp.xml -o ecmadocs/en ./TorchSharp/bin/Release/netstandard2.0/TorchSharp.dll
8 |
9 | yaml:
10 | -rm ecmadocs/en/ns-.xml
11 | mono /cvs/ECMA2Yaml/ECMA2Yaml/ECMA2Yaml/bin/Debug/ECMA2Yaml.exe --source=`pwd`/ecmadocs/en --output=`pwd`/docfx/api
12 | (cd docfx; mono ~/Downloads/docfx/docfx.exe build)
13 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/ProvidedModule.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// This class is used to represent a functional module (e.g., ReLU).
9 | ///
10 | public abstract class ProvidedModule : Module
11 | {
12 | internal ProvidedModule() : base(IntPtr.Zero)
13 | {
14 | }
15 |
16 | internal ProvidedModule(IntPtr handle) : base(handle)
17 | {
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/Parameter.cs:
--------------------------------------------------------------------------------
1 | using TorchSharp.Tensor;
2 |
3 | namespace TorchSharp.NN
4 | {
5 | public struct Parameter
6 | {
7 | public string Name { get; set; }
8 | public TorchTensor Tensor { get; set; }
9 | public bool WithGrad { get; set; }
10 |
11 | public Parameter(string name, TorchTensor parameter, bool? withGrad = null)
12 | {
13 | Name = name;
14 | Tensor = parameter;
15 | WithGrad = withGrad ?? parameter.IsGradRequired;
16 | }
17 | };
18 | }
19 |
--------------------------------------------------------------------------------
/test/Directory.Build.targets:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
8 | $(AllowedReferenceRelatedFileExtensions);.runtimeconfig.json;.runtimeconfig.dev.json;.deps.json
9 |
10 |
11 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/Conv2D.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | public class Conv2D : ProvidedModule
8 | {
9 | internal Conv2D(IntPtr handle) : base(handle)
10 | {
11 | }
12 |
13 | [DllImport("LibTorchSharp")]
14 | private static extern IntPtr THSNN_conv2DModuleApply(Module.HType module, IntPtr tensor);
15 |
16 | public override TorchTensor Forward(TorchTensor tensor)
17 | {
18 | return new TorchTensor(THSNN_conv2DModuleApply(handle, tensor.Handle));
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/run.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | __scriptpath=$(cd "$(dirname "$0")"; pwd -P)
4 |
5 | # Disable telemetry, first time experience, and global sdk look for the CLI
6 | export DOTNET_CLI_TELEMETRY_OPTOUT=1
7 | export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
8 | export DOTNET_MULTILEVEL_LOOKUP=0
9 |
10 | # Source the init-tools.sh script rather than execute in order to preserve ulimit values in child-processes. https://github.com/dotnet/corefx/issues/19152
11 | . "$__scriptpath/init-tools.sh"
12 |
13 | __toolRuntime=$__scriptpath/Tools
14 | __dotnet=$__toolRuntime/dotnetcli/dotnet
15 |
16 | cd "$__scriptpath"
17 | "$__dotnet" "$__toolRuntime/run.exe" "$__scriptpath/config.json" "$@"
18 | exit $?
19 |
--------------------------------------------------------------------------------
/tools-local/Microsoft.ML.InternalCodeAnalyzer/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | // Licensed to the .NET Foundation under one or more agreements.
2 | // The .NET Foundation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System.Runtime.CompilerServices;
6 |
7 | [assembly: InternalsVisibleTo("Microsoft.ML.CodeAnalyzer.Tests, PublicKey=002400000480000094000000060200000024000052534131000400000100010015c01ae1f50e8cc09ba9eac9147cf8fd9fce2cfe9f8dce4f7301c4132ca9fb50ce8cbf1df4dc18dd4d210e4345c744ecb3365ed327efdbc52603faa5e21daa11234c8c4a73e51f03bf192544581ebe107adee3a34928e39d04e524a9ce729d5090bfd7dad9d10c722c0def9ccc08ff0a03790e48bcd1f9b6c476063e1966a1c4")]
8 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.JIT/DynamicType.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.JIT.Type
10 |
11 |
12 |
13 | To be added.
14 | To be added.
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/src/Native/Stdafx.h:
--------------------------------------------------------------------------------
1 | // Licensed to the .NET Foundation under one or more agreements.
2 | // The .NET Foundation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | #pragma once
6 | #include
7 | #include
8 | #include
9 | #include
10 |
11 | #define UNUSED(x) (void)(x)
12 | #define DEBUG_ONLY(x) (void)(x)
13 |
14 | #ifdef _WIN32
15 | #include
16 |
17 | #define EXPORT_API(ret) extern "C" __declspec(dllexport) ret
18 | #else
19 | #include "UnixSal.h"
20 |
21 | #define EXPORT_API(ret) extern "C" __attribute__((visibility("default"))) ret
22 |
23 | #define __forceinline __attribute__((always_inline)) inline
24 | #endif
--------------------------------------------------------------------------------
/init-tools.msbuild:
--------------------------------------------------------------------------------
1 |
2 |
3 | netcoreapp1.0
4 | false
5 | true
6 | $(MSBuildThisFileDirectory)Tools/$(BuildToolsPackageVersion)
7 | Microsoft.SymbolUploader.Build.Task
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/FeatureDropout.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// This class is used to represent a dropout module for 2d/3d convolutational layers.
9 | ///
10 | public class FeatureDropout : FunctionalModule
11 | {
12 | internal FeatureDropout() : base()
13 | {
14 | }
15 |
16 | [DllImport("LibTorchSharp")]
17 | private static extern IntPtr THSNN_featureDropoutApply(IntPtr tensor);
18 |
19 | public override TorchTensor Forward(TorchTensor tensor)
20 | {
21 | return new TorchTensor(THSNN_featureDropoutApply(tensor.Handle));
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/pkg/TorchSharp/TorchSharp.nupkgproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netstandard2.0
5 | netstandard2.0;netcoreapp3.0
6 | .NET Bindings for Torch.
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/ProvidedModule.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.NN.Module
10 |
11 |
12 |
13 |
14 | This class is used to represent a functional module (e.g., ReLU).
15 |
16 | To be added.
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/LogSoftMax.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// This class is used to represent a log softmax module.
9 | ///
10 | public class LogSoftMax : FunctionalModule
11 | {
12 | private long _dimension;
13 |
14 | internal LogSoftMax(long dimension) : base()
15 | {
16 | _dimension = dimension;
17 | }
18 |
19 | [DllImport("LibTorchSharp")]
20 | private static extern IntPtr THSNN_logSoftMaxApply(IntPtr tensor, long dimension);
21 |
22 | public override TorchTensor Forward(TorchTensor tensor)
23 | {
24 | return new TorchTensor(THSNN_logSoftMaxApply(tensor.Handle, _dimension));
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/test/run-tests.proj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/pkg/LibTorch.Redist/LibTorch.Redist.nupkgproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Copyright (c) Microsoft Corporation
6 | Microsoft Corporation
7 | netstandard2.0
8 | $(MSBuildProjectName) contains the PyTorch C++ frontend LibTorch library version $(LibTorchVersion)
9 | and MKL-DNN library version $(MklDnnVersion) redistributed as a NuGet package.
10 | true
11 | https://pytorch.org/
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/AdaptiveAvgPool2D.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// This class is used to represent a ReLu module.
9 | ///
10 | public class AdaptiveAvgPool2D : FunctionalModule
11 | {
12 | private long[] _outputSize;
13 |
14 | internal AdaptiveAvgPool2D(params long []outputSize) : base()
15 | {
16 | _outputSize = outputSize;
17 | }
18 |
19 | [DllImport("LibTorchSharp")]
20 | private static extern IntPtr THSNN_adaptiveAvgPool2DApply(IntPtr tensor, int length, long[] outputSize);
21 |
22 | public override TorchTensor Forward(TorchTensor tensor)
23 | {
24 | return new TorchTensor(THSNN_adaptiveAvgPool2DApply(tensor.Handle, _outputSize.Length, _outputSize));
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/ReLu.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// This class is used to represent a ReLU module.
9 | ///
10 | public class ReLU : FunctionalModule
11 | {
12 | private readonly bool _inPlace;
13 |
14 | internal ReLU(bool inPlace = false) : base()
15 | {
16 | _inPlace = inPlace;
17 | }
18 |
19 | [DllImport("LibTorchSharp")]
20 | private static extern IntPtr THSNN_reluApply(IntPtr tensor, bool inPlace);
21 |
22 | public override TorchTensor Forward(TorchTensor tensor)
23 | {
24 | return new TorchTensor(THSNN_reluApply(tensor.Handle, _inPlace));
25 | }
26 |
27 | public override string GetName()
28 | {
29 | return typeof(ReLU).Name;
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/AvgPool2D.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | public class AvgPool2D : FunctionalModule
8 | {
9 | private readonly long[] _kernelSize;
10 | private readonly long[] _stride;
11 |
12 | internal AvgPool2D(long[] kernelSize, long[] stride) : base()
13 | {
14 | _kernelSize = kernelSize;
15 | _stride = stride ?? new long[0];
16 | }
17 |
18 | [DllImport("LibTorchSharp")]
19 | private static extern IntPtr THSNN_avgPool2DApply(IntPtr tensor, int kernelSizeLength, long[] kernelSize, int strideLength, long[] stride);
20 |
21 | public override TorchTensor Forward(TorchTensor tensor)
22 | {
23 | return new TorchTensor(THSNN_avgPool2DApply(tensor.Handle, _kernelSize.Length, _kernelSize, _stride.Length, _stride));
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/run.cmd:
--------------------------------------------------------------------------------
1 | @if not defined _echo @echo off
2 | setlocal
3 |
4 | :: Clear the 'Platform' env variable for this session, as it's a per-project setting within the build, and
5 | :: misleading value (such as 'MCD' in HP PCs) may lead to build breakage (corefx issue: #69).
6 | set Platform=
7 |
8 | :: Disable telemetry, first time experience, and global sdk look for the CLI
9 | set DOTNET_CLI_TELEMETRY_OPTOUT=1
10 | set DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
11 | set DOTNET_MULTILEVEL_LOOKUP=0
12 |
13 | :: Restore the Tools directory
14 | call "%~dp0init-tools.cmd" %*
15 | if NOT [%ERRORLEVEL%]==[0] exit /b 1
16 |
17 | set _toolRuntime=%~dp0Tools
18 |
19 | set _dotnet=%_toolRuntime%\dotnetcli\dotnet.exe
20 | set _json=%~dp0config.json
21 |
22 | :: run.exe depends on running in the root directory, notably because the config.json specifies
23 | :: a relative path to the binclash logger
24 |
25 | pushd "%~dp0"
26 | call "%_dotnet%" "%_toolRuntime%\run.exe" "%_json%" %*
27 | popd
28 |
29 | exit /b %ERRORLEVEL%
--------------------------------------------------------------------------------
/src/TorchSharp/NN/Dropout.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// This class is used to represent a dropout module.
9 | ///
10 | public class Dropout : FunctionalModule
11 | {
12 | private double _probability;
13 | private bool _isTraining;
14 |
15 | internal Dropout(bool isTraining, double probability = 0.5) : base()
16 | {
17 | _probability = probability;
18 | _isTraining = isTraining;
19 | }
20 |
21 | [DllImport("LibTorchSharp")]
22 | private static extern IntPtr THSNN_dropoutModuleApply(IntPtr tensor, double probability, bool isTraining);
23 |
24 | public override TorchTensor Forward(TorchTensor tensor)
25 | {
26 | return new TorchTensor(THSNN_dropoutModuleApply(tensor.Handle, _probability, _isTraining));
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/pkg/LibTorch.Cuda.9.0.Redist/LibTorch.Cuda.9.0.Redist.nupkgproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Copyright (c) Microsoft Corporation
6 | Microsoft Corporation
7 | netstandard2.0
8 | $(MSBuildProjectName) contains the PyTorch C++ frontend LibTorch library version $(LibTorchVersion)
9 | for GPU with Cuda 9.0 redistributed as a NuGet package.
10 | true
11 | https://pytorch.org/
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/MaxPool2D.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// This class is used to represent a ReLu module.
9 | ///
10 | public class MaxPool2D : FunctionalModule
11 | {
12 | private readonly long[] _kernelSize;
13 | private readonly long[] _stride;
14 |
15 | internal MaxPool2D(long[] kernelSize, long[] stride) : base()
16 | {
17 | _kernelSize = kernelSize;
18 | _stride = stride?? new long[0];
19 | }
20 |
21 | [DllImport("LibTorchSharp")]
22 | private static extern IntPtr THSNN_maxPool2DApply(IntPtr tensor, int kernelSizeLength, long[] kernelSize, int strideLength, long[] stride);
23 |
24 | public override TorchTensor Forward(TorchTensor tensor)
25 | {
26 | return new TorchTensor(THSNN_maxPool2DApply(tensor.Handle, _kernelSize.Length, _kernelSize, _stride.Length, _stride));
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/tools-local/Microsoft.ML.InternalCodeAnalyzer/Microsoft.ML.InternalCodeAnalyzer.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netstandard1.3
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | True
16 | True
17 | Descriptions.resx
18 |
19 |
20 |
21 |
22 |
23 | ResXFileCodeGenerator
24 | Descriptions.Designer.cs
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/THSTorch.cpp:
--------------------------------------------------------------------------------
1 | #include "THSTorch.h"
2 |
3 | #include "torch/torch.h"
4 |
5 | void THSTorch_seed(const int64_t seed)
6 | {
7 | torch::manual_seed(seed);
8 | }
9 |
10 | int THSTorch_isCudaAvailable()
11 | {
12 | return torch::cuda::is_available();
13 | }
14 |
15 | const char * THSTorch_get_and_reset_last_err()
16 | {
17 | char *tmp = torch_last_err;
18 | torch_last_err = nullptr;
19 | return tmp;
20 | }
21 |
22 | Scalar THSTorch_btos(char value)
23 | {
24 | return new torch::Scalar(value);
25 | }
26 |
27 | Scalar THSTorch_stos(short value)
28 | {
29 | return new torch::Scalar(value);
30 | }
31 |
32 | Scalar THSTorch_itos(int value)
33 | {
34 | return new torch::Scalar(value);
35 | }
36 |
37 | Scalar THSTorch_ltos(long value)
38 | {
39 | return new torch::Scalar(int64_t(value));
40 | }
41 |
42 | Scalar THSTorch_ftos(float value)
43 | {
44 | return new torch::Scalar(value);
45 | }
46 |
47 | Scalar THSTorch_dtos(double value)
48 | {
49 | return new torch::Scalar(value);
50 | }
51 |
52 | void THSThorch_dispose_scalar(Scalar scalar)
53 | {
54 | delete scalar;
55 | }
56 |
--------------------------------------------------------------------------------
/docfx/api/TorchSharp.Data.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp.Data
4 | commentId: N:TorchSharp.Data
5 | id: TorchSharp.Data
6 | children:
7 | - TorchSharp.Data.DataIterator
8 | - TorchSharp.Data.DataIterator.HType
9 | - TorchSharp.Data.Loader
10 | langs:
11 | - csharp
12 | name: TorchSharp.Data
13 | nameWithType: TorchSharp.Data
14 | fullName: TorchSharp.Data
15 | type: Namespace
16 | references:
17 | - uid: TorchSharp.Data.DataIterator
18 | parent: TorchSharp.Data
19 | isExternal: false
20 | name: DataIterator
21 | nameWithType: TorchSharp.Data.DataIterator
22 | fullName: TorchSharp.Data.DataIterator
23 | type: class
24 | - uid: TorchSharp.Data.DataIterator.HType
25 | parent: TorchSharp.Data
26 | isExternal: false
27 | name: DataIterator.HType
28 | nameWithType: TorchSharp.Data.DataIterator.HType
29 | fullName: TorchSharp.Data.DataIterator.HType
30 | type: class
31 | - uid: TorchSharp.Data.Loader
32 | parent: TorchSharp.Data
33 | isExternal: false
34 | name: Loader
35 | nameWithType: TorchSharp.Data.Loader
36 | fullName: TorchSharp.Data.Loader
37 | type: class
38 |
--------------------------------------------------------------------------------
/src/TorchSharp/Autograd.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 |
4 | namespace TorchSharp
5 | {
6 | public sealed class AutoGradMode : IDisposable
7 | {
8 | private bool _isPrevGrad;
9 |
10 | [DllImport("LibTorchSharp")]
11 | private static extern bool THSAutograd_isGradEnabled();
12 |
13 | [DllImport("LibTorchSharp")]
14 | private static extern void THSAutograd_setGrad(bool enabled);
15 |
16 | public AutoGradMode(bool enabled)
17 | {
18 | _isPrevGrad = THSAutograd_isGradEnabled();
19 | THSAutograd_setGrad(enabled);
20 | }
21 |
22 | public void Dispose()
23 | {
24 | Dispose(true);
25 | GC.SuppressFinalize(this);
26 | }
27 |
28 | public void Dispose(bool disposing)
29 | {
30 | if (disposing)
31 | {
32 | THSAutograd_setGrad(_isPrevGrad);
33 | }
34 | }
35 |
36 | public static bool IsAutogradEnabled()
37 | {
38 | return THSAutograd_isGradEnabled();
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Microsoft Corp
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/pkg/common/CommonPackage.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
10 |
12 | PreserveNewest
13 | false
14 | %(Filename)%(Extension)
15 |
16 |
18 | PreserveNewest
19 | false
20 | %(Filename)%(Extension)
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/THSTorch.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include "../Stdafx.h"
4 |
5 | #include "Utils.h"
6 |
7 | // API.
8 |
9 | // Sets manually the seed.
10 | EXPORT_API(void) THSTorch_seed(const int64_t seed);
11 |
12 | // Sets manually the seed.
13 | EXPORT_API(int) THSTorch_isCudaAvailable();
14 |
15 | // Returns the latest error. This is thread-local.
16 | EXPORT_API(const char *) THSTorch_get_and_reset_last_err();
17 |
18 | // Returns a Scalar object from a char value.
19 | EXPORT_API(Scalar) THSTorch_btos(char value);
20 |
21 | // Returns a Scalar object from a short value.
22 | EXPORT_API(Scalar) THSTorch_stos(short value);
23 |
24 | // Returns a Scalar object from an int value.
25 | EXPORT_API(Scalar) THSTorch_itos(int value);
26 |
27 | // Returns a Scalar object from a long value.
28 | EXPORT_API(Scalar) THSTorch_ltos(long value);
29 |
30 | // Returns a Scalar object from a float value.
31 | EXPORT_API(Scalar) THSTorch_ftos(float value);
32 |
33 | // Returns a Scalar object from a double value.
34 | EXPORT_API(Scalar) THSTorch_dtos(double value);
35 |
36 | // Dispose the scalar.
37 | EXPORT_API(void) THSThorch_dispose_scalar(Scalar scalar);
38 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/LossFunction+Loss.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Delegate
10 |
11 |
12 |
13 |
14 |
15 |
16 | TorchSharp.Tensor.TorchTensor
17 |
18 |
19 | To be added.
20 | To be added.
21 | To be added.
22 | To be added.
23 | To be added.
24 |
25 |
26 |
--------------------------------------------------------------------------------
/ecmadocs/en/AllocatePinnedArray.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Delegate
10 |
11 |
12 |
13 | System.Runtime.InteropServices.UnmanagedFunctionPointer(System.Runtime.InteropServices.CallingConvention.Cdecl)
14 |
15 |
16 |
17 |
18 |
19 |
20 | System.IntPtr
21 |
22 |
23 | To be added.
24 | To be added.
25 | To be added.
26 | To be added.
27 |
28 |
29 |
--------------------------------------------------------------------------------
/src/TorchSharp/Torch.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Diagnostics;
3 | using System.Runtime.InteropServices;
4 |
5 | namespace TorchSharp
6 | {
7 | using Debug = System.Diagnostics.Debug;
8 |
9 | public static class Torch
10 | {
11 | [DllImport("LibTorchSharp")]
12 | private static extern void THSTorch_seed(long seed);
13 |
14 | public static void SetSeed(long seed)
15 | {
16 | THSTorch_seed(seed);
17 | }
18 |
19 | [DllImport("LibTorchSharp")]
20 | private static extern bool THSTorch_isCudaAvailable();
21 |
22 | public static bool IsCudaAvailable()
23 | {
24 | return THSTorch_isCudaAvailable();
25 | }
26 |
27 | [DllImport("LibTorchSharp")]
28 | private static extern IntPtr THSTorch_get_and_reset_last_err();
29 |
30 | [Conditional("DEBUG")]
31 | internal static void AssertNoErrors()
32 | {
33 | var error = THSTorch_get_and_reset_last_err();
34 |
35 | if (error != IntPtr.Zero)
36 | {
37 | throw new ExternalException(Marshal.PtrToStringAnsi(error));
38 | }
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/FunctionalModule.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// This class is used to represent a functional module (e.g., ReLU).
9 | ///
10 | public abstract class FunctionalModule : ProvidedModule
11 | {
12 | internal FunctionalModule() : base(IntPtr.Zero)
13 | {
14 | }
15 |
16 | public override void RegisterModule(Module module)
17 | {
18 | }
19 |
20 | public override void ZeroGrad()
21 | {
22 | }
23 |
24 | public override IEnumerable<(string name, TorchTensor parameter)> NamedParameters()
25 | {
26 | return new List<(string, TorchTensor)>();
27 | }
28 |
29 | public override IEnumerable Parameters()
30 | {
31 | return new List();
32 | }
33 |
34 | public override IEnumerable GetModules()
35 | {
36 | return new string[0];
37 | }
38 |
39 | public override string GetName()
40 | {
41 | return typeof(T).Name;
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/Utils.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 |
5 | #include "TH/THGeneral.h"
6 | #include "torch/torch.h"
7 |
8 | extern thread_local char *torch_last_err;
9 |
10 | typedef torch::Tensor *Tensor;
11 | typedef torch::Scalar *Scalar;
12 | typedef std::shared_ptr * NNModule;
13 | typedef std::shared_ptr * Optimizer;
14 | typedef std::shared_ptr * JITModule;
15 | typedef std::shared_ptr * JITType;
16 |
17 | #define THS_API TH_API
18 |
19 | #ifdef DEBUG
20 | #define CATCH(x) \
21 | try { \
22 | x \
23 | } catch (const c10::Error e) { \
24 | torch_last_err = strdup(e.what()); \
25 | }
26 | #else
27 | #define CATCH(x) x
28 | #endif
29 |
30 | // Utility method used to built sharable strings.
31 | const char * make_sharable_string(const std::string str);
32 |
33 | // Method concerting arrays of tensor pointers into arrays of tensors.
34 | template
35 | std::vector toTensors(torch::Tensor ** tensorPtrs, const int length)
36 | {
37 | std::vector tensors;
38 |
39 | for (int i = 0; i < length; i++)
40 | {
41 | tensors.push_back(*tensorPtrs[i]);
42 | }
43 |
44 | return tensors;
45 | }
46 |
--------------------------------------------------------------------------------
/docfx/api/TorchSharp.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp
4 | commentId: N:TorchSharp
5 | id: TorchSharp
6 | children:
7 | - TorchSharp.AutoGradMode
8 | - TorchSharp.Scalar
9 | - TorchSharp.ScalarExtensionMethods
10 | - TorchSharp.Torch
11 | langs:
12 | - csharp
13 | name: TorchSharp
14 | nameWithType: TorchSharp
15 | fullName: TorchSharp
16 | type: Namespace
17 | references:
18 | - uid: TorchSharp.AutoGradMode
19 | parent: TorchSharp
20 | isExternal: false
21 | name: AutoGradMode
22 | nameWithType: TorchSharp.AutoGradMode
23 | fullName: TorchSharp.AutoGradMode
24 | type: class
25 | - uid: TorchSharp.Scalar
26 | parent: TorchSharp
27 | isExternal: false
28 | name: Scalar
29 | nameWithType: TorchSharp.Scalar
30 | fullName: TorchSharp.Scalar
31 | type: class
32 | - uid: TorchSharp.ScalarExtensionMethods
33 | parent: TorchSharp
34 | isExternal: false
35 | name: ScalarExtensionMethods
36 | nameWithType: TorchSharp.ScalarExtensionMethods
37 | fullName: TorchSharp.ScalarExtensionMethods
38 | type: class
39 | - uid: TorchSharp.Torch
40 | parent: TorchSharp
41 | isExternal: false
42 | name: Torch
43 | nameWithType: TorchSharp.Torch
44 | fullName: TorchSharp.Torch
45 | type: class
46 |
--------------------------------------------------------------------------------
/src/TorchSharp/TorchSharp.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netstandard2.0
5 | TorchSharp
6 | true
7 | false
8 | false
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 | TextTemplatingFileGenerator
18 | TorchTensorTyped.generated.cs
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 | True
33 | True
34 | TorchTensorTyped.tt
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/src/Directory.Build.targets:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
7 |
8 |
9 |
11 |
13 |
15 |
16 |
17 |
18 |
20 |
22 |
23 |
24 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/docfx/api/TorchSharp.JIT.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp.JIT
4 | commentId: N:TorchSharp.JIT
5 | id: TorchSharp.JIT
6 | children:
7 | - TorchSharp.JIT.DynamicType
8 | - TorchSharp.JIT.Module
9 | - TorchSharp.JIT.TensorType
10 | - TorchSharp.JIT.Type
11 | langs:
12 | - csharp
13 | name: TorchSharp.JIT
14 | nameWithType: TorchSharp.JIT
15 | fullName: TorchSharp.JIT
16 | type: Namespace
17 | references:
18 | - uid: TorchSharp.JIT.DynamicType
19 | parent: TorchSharp.JIT
20 | isExternal: false
21 | name: DynamicType
22 | nameWithType: TorchSharp.JIT.DynamicType
23 | fullName: TorchSharp.JIT.DynamicType
24 | type: class
25 | - uid: TorchSharp.JIT.Module
26 | parent: TorchSharp.JIT
27 | isExternal: false
28 | name: Module
29 | nameWithType: TorchSharp.JIT.Module
30 | fullName: TorchSharp.JIT.Module
31 | type: class
32 | - uid: TorchSharp.JIT.TensorType
33 | parent: TorchSharp.JIT
34 | isExternal: false
35 | name: TensorType
36 | nameWithType: TorchSharp.JIT.TensorType
37 | fullName: TorchSharp.JIT.TensorType
38 | type: class
39 | - uid: TorchSharp.JIT.Type
40 | parent: TorchSharp.JIT
41 | isExternal: false
42 | name: Type
43 | nameWithType: TorchSharp.JIT.Type
44 | fullName: TorchSharp.JIT.Type
45 | type: class
46 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | project(LibTorchSharp)
2 |
3 | find_package(Torch REQUIRED PATHS ${LIBTORCH_PATH})
4 |
5 | set(SOURCES
6 | cifar10.h
7 | THSAutograd.h
8 | THSData.h
9 | THSJIT.h
10 | THSNN.h
11 | THSTensor.h
12 | THSTorch.h
13 | Utils.h
14 | cifar10.cpp
15 | THSAutograd.cpp
16 | THSData.cpp
17 | THSJIT.cpp
18 | THSNN.cpp
19 | THSTensor.cpp
20 | THSTorch.cpp
21 | Utils.cpp)
22 |
23 | if(NOT WIN32)
24 | list(APPEND SOURCES ${VERSION_FILE_PATH})
25 | if(NOT APPLE)
26 | SET(CMAKE_SKIP_BUILD_RPATH FALSE)
27 | SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
28 | SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
29 | SET(CMAKE_INSTALL_RPATH "$ORIGIN/")
30 | else()
31 | SET(CMAKE_MACOSX_RPATH TRUE)
32 | endif()
33 | endif()
34 |
35 | # Add libTorch bindings
36 | include_directories(${TORCH_INCLUDE_DIRS})
37 |
38 | add_library(LibTorchSharp SHARED ${SOURCES} ${RESOURCES})
39 |
40 | target_link_libraries(LibTorchSharp ${TORCH_LIBRARIES})
41 | set_property(TARGET LibTorchSharp PROPERTY CXX_STANDARD 14)
42 |
43 | if(APPLE)
44 | set_target_properties(LibTorchSharp PROPERTIES INSTALL_RPATH "@loader_path;@executable_path;")
45 | endif()
46 |
47 | install_library_and_symbols (LibTorchSharp)
48 |
--------------------------------------------------------------------------------
/src/TorchSharp/Tensor/Types.ttinclude:
--------------------------------------------------------------------------------
1 | <#+
2 | public class TorchTypeDef {
3 |
4 | public readonly string Name;
5 | public readonly string Storage;
6 | public readonly string CSType;
7 |
8 | public readonly bool IsInt;
9 | public readonly bool IsFloat;
10 | public readonly bool IsLong;
11 |
12 | private TorchTypeDef(string name, string storage, string cstype) {
13 | this.Name = name;
14 | this.Storage = storage;
15 | this.CSType = cstype;
16 |
17 | this.IsInt = name == "Short" || name == "Int" || name == "Long";
18 | this.IsFloat = name == "Float" || name == "Double";
19 | this.IsLong = name == "Long";
20 | }
21 |
22 | public static readonly TorchTypeDef[] Types = {
23 | new TorchTypeDef("Byte", "byte", "Byte" ),
24 | new TorchTypeDef("Short", "short", "Short" ),
25 | new TorchTypeDef("Int", "int", "Int" ),
26 | new TorchTypeDef("Long", "long", "Long" ),
27 | new TorchTypeDef("Double", "double", "Double"),
28 | new TorchTypeDef("Float", "float", "Single"),
29 | };
30 |
31 | public readonly string Ptr = "IntPtr"; // "HType";
32 | public readonly string PtrDiff = "int /* ptrdiff_t */";
33 | }
34 | #>
35 |
--------------------------------------------------------------------------------
/docs/logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
4 |
26 |
--------------------------------------------------------------------------------
/src/TorchSharp/JIT/Type/TensorType.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 |
4 | namespace TorchSharp.JIT
5 | {
6 | public sealed class TensorType : Type
7 | {
8 | internal TensorType(IntPtr handle) : base(handle)
9 | {
10 | this.handle = new HType(handle, true);
11 | }
12 |
13 | internal TensorType(Type type) : base()
14 | {
15 | handle = type.handle;
16 | type.handle = new HType(IntPtr.Zero, true);
17 | type.Dispose();
18 | }
19 |
20 | [DllImport("LibTorchSharp")]
21 | private static extern short THSJIT_getScalarFromTensorType(HType handle);
22 |
23 | public Tensor.ATenScalarMapping GetScalarType()
24 | {
25 | return (Tensor.ATenScalarMapping)THSJIT_getScalarFromTensorType(handle);
26 | }
27 |
28 | [DllImport("LibTorchSharp")]
29 | private static extern int THSJIT_getTensorTypeDimensions(HType handle);
30 |
31 | public int GetDimensions()
32 | {
33 | return THSJIT_getTensorTypeDimensions(handle);
34 | }
35 |
36 | [DllImport("LibTorchSharp")]
37 | private static extern string THSJIT_getTensorDevice(HType handle);
38 |
39 | public string GetDevice()
40 | {
41 | return THSJIT_getTensorDevice(handle);
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/Native/gen-buildsys-win.bat:
--------------------------------------------------------------------------------
1 | @if "%_echo%" neq "on" echo off
2 | rem
3 | rem This file invokes cmake and generates the build system for windows.
4 |
5 | set argC=0
6 | for %%x in (%*) do Set /A argC+=1
7 |
8 | if NOT %argC%==3 GOTO :USAGE
9 | if %1=="/?" GOTO :USAGE
10 |
11 | setlocal
12 | set __sourceDir=%~dp0
13 |
14 | set __ExtraCmakeParams=
15 |
16 | set __VSString=%2
17 | :: Remove quotes
18 | set __VSString=%__VSString:"=%
19 |
20 |
21 |
22 | if defined CMakePath goto DoGen
23 |
24 | :: Eval the output from probe-win1.ps1
25 | pushd "%__sourceDir%"
26 | for /f "delims=" %%a in ('powershell -NoProfile -ExecutionPolicy ByPass "& .\probe-win.ps1"') do %%a
27 | popd
28 |
29 | :DoGen
30 | :: Set the target architecture to a format cmake understands.
31 | if /i "%3" == "x64" (set __ExtraCmakeParams=%__ExtraCmakeParams% -A x64)
32 | if /i "%3" == "x86" (set __ExtraCmakeParams=%__ExtraCmakeParams% -A Win32)
33 |
34 | "%CMakePath%" "-DCMAKE_BUILD_TYPE=%CMAKE_BUILD_TYPE%" "-DCMAKE_INSTALL_PREFIX=%__CMakeBinDir%" "-DLIBTORCH_PATH=%LIBTORCH_PATH%" -G "Visual Studio %__VSString%" %__ExtraCmakeParams% -B. -H%1
35 | endlocal
36 | GOTO :DONE
37 |
38 | :USAGE
39 | echo "Usage..."
40 | echo "gen-buildsys-win.bat "
41 | echo "Specify the VSVersion to be used - VS2015, VS2017 or VS2019"
42 | echo "Specify the Target Architecture - x86, or x64."
43 | EXIT /B 1
44 |
45 | :DONE
46 | EXIT /B 0
47 |
--------------------------------------------------------------------------------
/docfx/api/AllocatePinnedArray.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: AllocatePinnedArray
4 | id: AllocatePinnedArray
5 | langs:
6 | - csharp
7 | name: AllocatePinnedArray
8 | nameWithType: AllocatePinnedArray
9 | fullName: AllocatePinnedArray
10 | type: Delegate
11 | assemblies:
12 | - TorchSharp
13 | namespace: ''
14 | syntax:
15 | content: >-
16 | [System.Runtime.InteropServices.UnmanagedFunctionPointer(System.Runtime.InteropServices.CallingConvention.Cdecl)]
17 |
18 | public delegate IntPtr AllocatePinnedArray(IntPtr length);
19 | parameters:
20 | - id: length
21 | type: System.IntPtr
22 | description: ''
23 | return:
24 | type: System.IntPtr
25 | description: ''
26 | content.csharp: >-
27 | [System.Runtime.InteropServices.UnmanagedFunctionPointer(System.Runtime.InteropServices.CallingConvention.Cdecl)]
28 |
29 | public delegate IntPtr AllocatePinnedArray(IntPtr length);
30 | inheritance:
31 | - System.Delegate
32 | attributes:
33 | - type: System.Runtime.InteropServices.UnmanagedFunctionPointerAttribute
34 | modifiers.csharp:
35 | - public
36 | references:
37 | - uid: System.Delegate
38 | parent: System
39 | isExternal: true
40 | name: Delegate
41 | nameWithType: Delegate
42 | fullName: System.Delegate
43 | - uid: System.IntPtr
44 | parent: System
45 | isExternal: true
46 | name: IntPtr
47 | nameWithType: IntPtr
48 | fullName: System.IntPtr
49 |
--------------------------------------------------------------------------------
/docfx/docfx.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": [
3 | {
4 | "src": [
5 | {
6 | "files": [
7 | "TorchSharp/TorchSharp.csproj"
8 | ],
9 | "exclude": [
10 | "**/obj/**",
11 | "**/bin/**",
12 | "_site/**"
13 | ]
14 | }
15 | ],
16 | "dest": "api"
17 | }
18 | ],
19 | "build": {
20 | "content": [
21 | {
22 | "files": [
23 | "api/**.yml",
24 | "api/index.md"
25 | ]
26 | },
27 | {
28 | "files": [
29 | "articles/**.md",
30 | "articles/**/toc.yml",
31 | "toc.yml",
32 | "*.md"
33 | ],
34 | "exclude": [
35 | "obj/**",
36 | "_site/**"
37 | ]
38 | }
39 | ],
40 | "resource": [
41 | {
42 | "files": [
43 | "images/**"
44 | ],
45 | "exclude": [
46 | "obj/**",
47 | "_site/**"
48 | ]
49 | }
50 | ],
51 | "overwrite": [
52 | {
53 | "files": [
54 | "apidoc/**.md"
55 | ],
56 | "exclude": [
57 | "obj/**",
58 | "_site/**"
59 | ]
60 | }
61 | ],
62 | "dest": "../docs",
63 | "globalMetadataFiles": [],
64 | "fileMetadataFiles": [],
65 | "template": [
66 | "default"
67 | ],
68 | "postProcessors": [],
69 | "noLangKeyword": false,
70 | "keepFileLink": false
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/docfx/api/TorchSharp.NN/TorchSharp.NN.LossFunction.Loss.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp.NN.LossFunction.Loss
4 | id: LossFunction.Loss
5 | langs:
6 | - csharp
7 | name: LossFunction.Loss
8 | nameWithType: TorchSharp.NN.LossFunction.Loss
9 | fullName: TorchSharp.NN.LossFunction.Loss
10 | type: Delegate
11 | assemblies:
12 | - TorchSharp
13 | namespace: TorchSharp.NN
14 | syntax:
15 | content: public delegate TorchSharp.Tensor.TorchTensor LossFunction.Loss(TorchTensor source, TorchTensor target);
16 | parameters:
17 | - id: source
18 | type: TorchSharp.Tensor.TorchTensor
19 | description: ''
20 | - id: target
21 | type: TorchSharp.Tensor.TorchTensor
22 | description: ''
23 | return:
24 | type: TorchSharp.Tensor.TorchTensor
25 | description: ''
26 | content.csharp: public delegate TorchSharp.Tensor.TorchTensor LossFunction.Loss(TorchTensor source, TorchTensor target);
27 | inheritance:
28 | - System.Delegate
29 | modifiers.csharp:
30 | - public
31 | references:
32 | - uid: TorchSharp.NN
33 | commentId: N:TorchSharp.NN
34 | isExternal: false
35 | name: TorchSharp.NN
36 | nameWithType: TorchSharp.NN
37 | fullName: TorchSharp.NN
38 | type: namespace
39 | - uid: System.Delegate
40 | parent: System
41 | isExternal: true
42 | name: Delegate
43 | nameWithType: Delegate
44 | fullName: System.Delegate
45 | - uid: TorchSharp.Tensor.TorchTensor
46 | parent: TorchSharp.Tensor
47 | isExternal: false
48 | name: TorchTensor
49 | nameWithType: TorchTensor
50 | fullName: TorchSharp.Tensor.TorchTensor
51 |
--------------------------------------------------------------------------------
/docfx/api/TorchSharp.JIT/TorchSharp.JIT.DynamicType.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp.JIT.DynamicType
4 | id: DynamicType
5 | children: []
6 | langs:
7 | - csharp
8 | name: DynamicType
9 | nameWithType: TorchSharp.JIT.DynamicType
10 | fullName: TorchSharp.JIT.DynamicType
11 | type: Class
12 | assemblies:
13 | - TorchSharp
14 | namespace: TorchSharp.JIT
15 | syntax:
16 | content: 'public sealed class DynamicType : TorchSharp.JIT.Type'
17 | content.csharp: 'public sealed class DynamicType : TorchSharp.JIT.Type'
18 | inheritance:
19 | - System.Object
20 | - TorchSharp.JIT.Type
21 | implements: []
22 | inheritedMembers:
23 | - TorchSharp.JIT.Type.Dispose
24 | - TorchSharp.JIT.Type.Dispose(System.Boolean)
25 | modifiers.csharp:
26 | - public
27 | - sealed
28 | references:
29 | - uid: TorchSharp.JIT
30 | commentId: N:TorchSharp.JIT
31 | isExternal: false
32 | name: TorchSharp.JIT
33 | nameWithType: TorchSharp.JIT
34 | fullName: TorchSharp.JIT
35 | type: namespace
36 | - uid: TorchSharp.JIT.Type
37 | parent: TorchSharp.JIT
38 | isExternal: false
39 | name: Type
40 | nameWithType: Type
41 | fullName: TorchSharp.JIT.Type
42 | - uid: TorchSharp.JIT.Type.Dispose
43 | parent: TorchSharp.JIT.Type
44 | isExternal: false
45 | name: Dispose()
46 | nameWithType: Type.Dispose()
47 | fullName: TorchSharp.JIT.Type.Dispose()
48 | type: method
49 | - uid: TorchSharp.JIT.Type.Dispose(System.Boolean)
50 | parent: TorchSharp.JIT.Type
51 | isExternal: false
52 | name: Dispose(Boolean)
53 | nameWithType: Type.Dispose(Boolean)
54 | fullName: TorchSharp.JIT.Type.Dispose(Boolean)
55 | type: method
56 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/THSJIT.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include "../Stdafx.h"
4 |
5 | #include "torch/script.h"
6 |
7 | #include "Utils.h"
8 |
9 | // Copied from libtorch to share the type as an int8_t.
10 | enum TypeKind : int8_t {
11 | #define DEFINE_TYPE(T) T,
12 | C10_FORALL_TYPES(DEFINE_TYPE)
13 | #undef DEFINE_TYPE
14 | };
15 |
16 | // API.
17 |
18 | // Loads a TorchScript module from a file.
19 | EXPORT_API(JITModule) THSJIT_loadModule(const char* filename);
20 |
21 | // Gets the number of submodules contained into the source module.
22 | EXPORT_API(long) THSJIT_getNumModules(const JITModule module);
23 |
24 | // Gets the sub-module contained into the input wrapper with the given name.
25 | EXPORT_API(JITModule) THSJIT_getModuleFromName(const JITModule module, const char* name);
26 |
27 | // Returns the number of inputs expected by the input module.
28 | EXPORT_API(int) THSJIT_getNumberOfInputs(const JITModule module);
29 |
30 | // Returns the number of outputs generated by the input module.
31 | EXPORT_API(int) THSJIT_getNumberOfOutputs(const JITModule module);
32 |
33 | // Returns the type of the nth-input.
34 | EXPORT_API(JITType) THSJIT_getInputType(const JITModule module, const int n);
35 |
36 | // Returns the type of the nth-output.
37 | EXPORT_API(JITType) THSJIT_getOutputType(const JITModule module, const int n);
38 |
39 | // Forward pass over the input module using the input tensor.
40 | EXPORT_API(Tensor) THSJIT_forward(const JITModule module, const Tensor * tensorPtrs, const int length);
41 |
42 | // Disposes the module.
43 | EXPORT_API(void) THSJIT_moduleDispose(const JITModule module);
44 |
45 | // Disposes the type.
46 | EXPORT_API(void) THSJIT_typeDispose(const JITType type);
47 |
--------------------------------------------------------------------------------
/src/TorchSharp/Data/Loader.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 |
4 | namespace TorchSharp.Data
5 | {
6 | public class Loader
7 | {
8 | [DllImport("LibTorchSharp")]
9 | private static extern IntPtr THSData_loaderMNIST(string filename, long batchSize, bool isTrain);
10 |
11 | ///
12 | /// Create an iterator scanning the MNIST dataset.
13 | ///
14 | /// The position of the MNIST dataset
15 | /// The required batch size
16 | /// Wheter the iterator is for training or testing
17 | ///
18 | static public DataIterator MNIST(string filename, long batchSize, bool isTrain = true)
19 | {
20 | return new DataIterator(THSData_loaderMNIST(filename, batchSize, isTrain));
21 | }
22 |
23 | [DllImport("LibTorchSharp")]
24 | private static extern IntPtr THSData_loaderCIFAR10(string path, long batchSize, bool isTrain);
25 |
26 | ///
27 | /// Create an iterator scanning the CIFAR10 dataset.
28 | ///
29 | /// The position of the CIFAR10 dataset
30 | /// The required batch size
31 | /// Wheter the iterator is for training or testing
32 | ///
33 | static public DataIterator CIFAR10(string path, long batchSize, bool isTrain = true)
34 | {
35 | return new DataIterator(THSData_loaderCIFAR10(path, batchSize, isTrain));
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/Conv2D.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.NN.ProvidedModule
10 |
11 |
12 |
13 | To be added.
14 | To be added.
15 |
16 |
17 |
18 |
19 |
20 | Method
21 |
22 | 1.0.0.0
23 |
24 |
25 | TorchSharp.Tensor.TorchTensor
26 |
27 |
28 |
29 |
30 |
31 | To be added.
32 | To be added.
33 | To be added.
34 | To be added.
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/Init.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | public static class Init
8 | {
9 | [DllImport("LibTorchSharp")]
10 | private static extern void THSNN_initUniform(IntPtr src, double low, double high);
11 |
12 | public static void Uniform(TorchTensor tensor, double low = 0, double high = 1)
13 | {
14 | THSNN_initUniform(tensor.Handle, low, high);
15 | }
16 |
17 | [DllImport("LibTorchSharp")]
18 | private static extern void THSNN_initKaimingUniform(IntPtr src, double a);
19 |
20 | public static void KaimingUniform(TorchTensor tensor, double a = 0)
21 | {
22 | THSNN_initKaimingUniform(tensor.Handle, a);
23 | }
24 |
25 | public static (long fanIn, long fanOut) CalculateFanInAndFanOut(TorchTensor tensor)
26 | {
27 | var dimensions = tensor.Dimensions;
28 |
29 | if (dimensions < 2)
30 | {
31 | throw new ArgumentException("Fan in and fan out can not be computed for tensor with fewer than 2 dimensions");
32 | }
33 |
34 | var shape = tensor.Shape;
35 | // Linear
36 | if (dimensions == 2)
37 | {
38 | return (shape[1], shape[0]);
39 | }
40 | else
41 | {
42 | var numInputFMaps = tensor.Shape[1];
43 | var numOutputFMaps = tensor.Shape[0];
44 | var receptiveFieldSize = tensor[0, 0].NumberOfElements;
45 |
46 | return (numInputFMaps * receptiveFieldSize, numOutputFMaps * receptiveFieldSize);
47 | }
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/docs/styles/search-worker.js:
--------------------------------------------------------------------------------
1 | (function () {
2 | importScripts('lunr.min.js');
3 |
4 | var lunrIndex = lunr(function () {
5 | this.pipeline.remove(lunr.stopWordFilter);
6 | this.ref('href');
7 | this.field('title', { boost: 50 });
8 | this.field('keywords', { boost: 20 });
9 | });
10 | lunr.tokenizer.seperator = /[\s\-\.]+/;
11 |
12 | var stopWordsRequest = new XMLHttpRequest();
13 | stopWordsRequest.open('GET', '../search-stopwords.json');
14 | stopWordsRequest.onload = function () {
15 | if (this.status != 200) {
16 | return;
17 | }
18 | var stopWords = JSON.parse(this.responseText);
19 | var docfxStopWordFilter = lunr.generateStopWordFilter(stopWords);
20 | lunr.Pipeline.registerFunction(docfxStopWordFilter, 'docfxStopWordFilter');
21 | lunrIndex.pipeline.add(docfxStopWordFilter);
22 | }
23 | stopWordsRequest.send();
24 |
25 | var searchData = {};
26 | var searchDataRequest = new XMLHttpRequest();
27 |
28 | searchDataRequest.open('GET', '../index.json');
29 | searchDataRequest.onload = function () {
30 | if (this.status != 200) {
31 | return;
32 | }
33 | searchData = JSON.parse(this.responseText);
34 | for (var prop in searchData) {
35 | if (searchData.hasOwnProperty(prop)) {
36 | lunrIndex.add(searchData[prop]);
37 | }
38 | }
39 | postMessage({ e: 'index-ready' });
40 | }
41 | searchDataRequest.send();
42 |
43 | onmessage = function (oEvent) {
44 | var q = oEvent.data.q;
45 | var hits = lunrIndex.search(q);
46 | var results = [];
47 | hits.forEach(function (hit) {
48 | var item = searchData[hit.ref];
49 | results.push({ 'href': item.href, 'title': item.title, 'keywords': item.keywords });
50 | });
51 | postMessage({ e: 'query-ready', q: q, d: results });
52 | }
53 | })();
54 |
--------------------------------------------------------------------------------
/docs/search-stopwords.json:
--------------------------------------------------------------------------------
1 | [
2 | "a",
3 | "able",
4 | "about",
5 | "across",
6 | "after",
7 | "all",
8 | "almost",
9 | "also",
10 | "am",
11 | "among",
12 | "an",
13 | "and",
14 | "any",
15 | "are",
16 | "as",
17 | "at",
18 | "be",
19 | "because",
20 | "been",
21 | "but",
22 | "by",
23 | "can",
24 | "cannot",
25 | "could",
26 | "dear",
27 | "did",
28 | "do",
29 | "does",
30 | "either",
31 | "else",
32 | "ever",
33 | "every",
34 | "for",
35 | "from",
36 | "get",
37 | "got",
38 | "had",
39 | "has",
40 | "have",
41 | "he",
42 | "her",
43 | "hers",
44 | "him",
45 | "his",
46 | "how",
47 | "however",
48 | "i",
49 | "if",
50 | "in",
51 | "into",
52 | "is",
53 | "it",
54 | "its",
55 | "just",
56 | "least",
57 | "let",
58 | "like",
59 | "likely",
60 | "may",
61 | "me",
62 | "might",
63 | "most",
64 | "must",
65 | "my",
66 | "neither",
67 | "no",
68 | "nor",
69 | "not",
70 | "of",
71 | "off",
72 | "often",
73 | "on",
74 | "only",
75 | "or",
76 | "other",
77 | "our",
78 | "own",
79 | "rather",
80 | "said",
81 | "say",
82 | "says",
83 | "she",
84 | "should",
85 | "since",
86 | "so",
87 | "some",
88 | "than",
89 | "that",
90 | "the",
91 | "their",
92 | "them",
93 | "then",
94 | "there",
95 | "these",
96 | "they",
97 | "this",
98 | "tis",
99 | "to",
100 | "too",
101 | "twas",
102 | "us",
103 | "wants",
104 | "was",
105 | "we",
106 | "were",
107 | "what",
108 | "when",
109 | "where",
110 | "which",
111 | "while",
112 | "who",
113 | "whom",
114 | "why",
115 | "will",
116 | "with",
117 | "would",
118 | "yet",
119 | "you",
120 | "your"
121 | ]
122 |
--------------------------------------------------------------------------------
/Directory.Build.targets:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
6 |
7 |
8 |
10 |
11 |
12 | lib
13 | .dll
14 | .so
15 | .dylib
16 |
17 |
18 |
19 |
20 | $(NativeOutputPath)$(LibPrefix)%(NativeAssemblyReference.Identity)$(LibExtension)%(NativeAssemblyReference.ExtraExtension)
21 |
22 |
23 |
24 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/THSJIT.cpp:
--------------------------------------------------------------------------------
1 | #include "THSJIT.h"
2 |
3 | JITModule THSJIT_loadModule(const char* filename)
4 | {
5 | return new std::shared_ptr(torch::jit::load(filename));
6 | }
7 |
8 | long THSJIT_getNumModules(const JITModule module)
9 | {
10 | return (*module)->get_modules().size();
11 | }
12 |
13 | JITModule THSJIT_getModuleFromName(const JITModule module, const char* name)
14 | {
15 | return new std::shared_ptr((*module)->get_module(name));
16 | }
17 |
18 | int THSJIT_getNumberOfInputs(const JITModule module)
19 | {
20 | auto method = (*module)->find_method("forward");
21 | auto args = method->getSchema().arguments();
22 | return args.size();
23 | }
24 |
25 | int THSJIT_getNumberOfOutputs(const JITModule module)
26 | {
27 | auto method = (*module)->find_method("forward");
28 | auto outputs = method->getSchema().returns();
29 | return outputs.size();
30 | }
31 |
32 | JITType THSJIT_getInputType(const JITModule module, const int n)
33 | {
34 | auto method = (*module)->find_method("forward");
35 | auto args = method->getSchema().arguments();
36 | auto type = args[n].type();
37 |
38 | return new std::shared_ptr(type);
39 | }
40 |
41 | JITType THSJIT_getOutputType(const JITModule module, const int n)
42 | {
43 | auto method = (*module)->find_method("forward");
44 | auto outputs = method->getSchema().returns();
45 | auto type = outputs[n].type();
46 |
47 | return new std::shared_ptr(type);
48 | }
49 |
50 | Tensor THSJIT_forward(const JITModule module, const Tensor* tensorPtrs, const int length)
51 | {
52 | return new torch::Tensor((*module)->forward(toTensors((torch::Tensor**)tensorPtrs, length)).toTensor());
53 | }
54 |
55 | void THSJIT_moduleDispose(const JITModule module)
56 | {
57 | delete module;
58 | }
59 |
60 | void THSJIT_typeDispose(const JITType type)
61 | {
62 | delete type;
63 | }
--------------------------------------------------------------------------------
/src/TorchSharp/PinnedArray.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 |
4 | ///
5 | /// Allocator of T[] that pins the memory and handles unpinning.
6 | /// (taken from StackOverflow)
7 | ///
8 | ///
9 | internal sealed class PinnedArray : IDisposable where T : struct
10 | {
11 | private GCHandle handle;
12 |
13 | public T[] Array { get; private set; }
14 |
15 | public IntPtr CreateArray(int length)
16 | {
17 | FreeHandle();
18 |
19 | Array = new T[length];
20 |
21 | // try... finally trick to be sure that the code isn't interrupted by asynchronous exceptions
22 | try
23 | {
24 | }
25 | finally
26 | {
27 | handle = GCHandle.Alloc(Array, GCHandleType.Pinned);
28 | }
29 |
30 | return handle.AddrOfPinnedObject();
31 | }
32 |
33 | public IntPtr CreateArray(IntPtr length)
34 | {
35 | return CreateArray((int)length);
36 | }
37 |
38 | public IntPtr CreateArray(T[] array)
39 | {
40 | FreeHandle();
41 |
42 | Array = array;
43 |
44 | // try... finally trick to be sure that the code isn't interrupted by asynchronous exceptions
45 | try
46 | {
47 | }
48 | finally
49 | {
50 | handle = GCHandle.Alloc(Array, GCHandleType.Pinned);
51 | }
52 |
53 | return handle.AddrOfPinnedObject();
54 | }
55 |
56 | public void Dispose()
57 | {
58 | foreach (var val in Array)
59 | {
60 | (val as IDisposable)?.Dispose();
61 | }
62 | FreeHandle();
63 | }
64 |
65 | ~PinnedArray()
66 | {
67 | FreeHandle();
68 | }
69 |
70 | private void FreeHandle()
71 | {
72 | if (handle.IsAllocated)
73 | {
74 | handle.Free();
75 | }
76 | }
77 | }
78 |
79 | [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
80 | public delegate IntPtr AllocatePinnedArray(IntPtr length);
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/Dropout.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.NN.FunctionalModule<TorchSharp.NN.Dropout>
10 |
11 | TorchSharp.NN.Dropout
12 |
13 |
14 |
15 |
16 |
17 | This class is used to represent a dropout module.
18 |
19 | To be added.
20 |
21 |
22 |
23 |
24 |
25 | Method
26 |
27 | 1.0.0.0
28 |
29 |
30 | TorchSharp.Tensor.TorchTensor
31 |
32 |
33 |
34 |
35 |
36 | To be added.
37 | To be added.
38 | To be added.
39 | To be added.
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/MaxPool2D.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.NN.FunctionalModule<TorchSharp.NN.MaxPool2D>
10 |
11 | TorchSharp.NN.MaxPool2D
12 |
13 |
14 |
15 |
16 |
17 | This class is used to represent a ReLu module.
18 |
19 | To be added.
20 |
21 |
22 |
23 |
24 |
25 | Method
26 |
27 | 1.0.0.0
28 |
29 |
30 | TorchSharp.Tensor.TorchTensor
31 |
32 |
33 |
34 |
35 |
36 | To be added.
37 | To be added.
38 | To be added.
39 | To be added.
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/src/Directory.Build.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | true
7 |
11 | $(NoWarn);1591
12 | $(WarningsNotAsErrors);1591
13 |
14 | $(MSBuildThisFileDirectory)\Source.ruleset
15 |
16 |
18 | $(BaseOutputPath)$(TargetArchitecture).Release\Native
19 |
20 | win
21 | linux
22 | osx
23 | $(PackageRid)-$(TargetArchitecture)
24 |
25 |
26 |
27 |
30 | false
31 | Analyzer
32 |
33 |
35 |
36 |
37 |
38 |
39 | stylecop.json
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/LogSoftMax.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.NN.FunctionalModule<TorchSharp.NN.LogSoftMax>
10 |
11 | TorchSharp.NN.LogSoftMax
12 |
13 |
14 |
15 |
16 |
17 | This class is used to represent a log softmax module.
18 |
19 | To be added.
20 |
21 |
22 |
23 |
24 |
25 | Method
26 |
27 | 1.0.0.0
28 |
29 |
30 | TorchSharp.Tensor.TorchTensor
31 |
32 |
33 |
34 |
35 |
36 | To be added.
37 | To be added.
38 | To be added.
39 | To be added.
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/AdaptiveAvgPool2D.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.NN.FunctionalModule<TorchSharp.NN.AdaptiveAvgPool2D>
10 |
11 | TorchSharp.NN.AdaptiveAvgPool2D
12 |
13 |
14 |
15 |
16 |
17 | This class is used to represent a ReLu module.
18 |
19 | To be added.
20 |
21 |
22 |
23 |
24 |
25 | Method
26 |
27 | 1.0.0.0
28 |
29 |
30 | TorchSharp.Tensor.TorchTensor
31 |
32 |
33 |
34 |
35 |
36 | To be added.
37 | To be added.
38 | To be added.
39 | To be added.
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/cifar10.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 | #include
5 | #include
6 | #include
7 |
8 | #include "torch/torch.h"
9 |
10 | #include
11 |
12 | #include
13 | #include
14 |
15 | std::string join_paths(std::string head, const std::string& tail);
16 | std::pair read_dir(const std::string& root, bool train);
17 | std::pair read_cifar10(std::string path);
18 |
19 | namespace torch {
20 | namespace data {
21 | namespace datasets {
22 | /// The CIFAR10 dataset.
23 | class CIFAR10 : public Dataset {
24 | public:
25 | /// The mode in which the dataset is loaded.
26 | enum class Mode { kTrain, kTest };
27 |
28 | /// Loads the CIFAR10 dataset from the `root` path.
29 | ///
30 | /// The supplied `root` path should contain the *content* of the unzipped
31 | /// CIFAR10 dataset, available from https://www.cs.toronto.edu/~kriz/cifar.html.
32 | explicit CIFAR10(const std::string& root, Mode mode = Mode::kTrain);
33 |
34 | /// Returns the `Example` at the given `index`.
35 | Example<> get(size_t index) override;
36 |
37 | /// Returns the size of the dataset.
38 | optional size() const override;
39 |
40 | /// Returns true if this is the training subset of MNIST.
41 | bool is_train() const noexcept;
42 |
43 | /// Returns all images stacked into a single tensor.
44 | const Tensor& images() const;
45 |
46 | /// Returns all targets stacked into a single tensor.
47 | const Tensor& targets() const;
48 |
49 | private:
50 | Tensor images_, targets_;
51 | bool is_training;
52 | };
53 | } // namespace datasets
54 | } // namespace data
55 | } // namespace torch
56 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/FeatureDropout.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.NN.FunctionalModule<TorchSharp.NN.FeatureDropout>
10 |
11 | TorchSharp.NN.FeatureDropout
12 |
13 |
14 |
15 |
16 |
17 | This class is used to represent a dropout module for 2d/3d convolutational layers.
18 |
19 | To be added.
20 |
21 |
22 |
23 |
24 |
25 | Method
26 |
27 | 1.0.0.0
28 |
29 |
30 | TorchSharp.Tensor.TorchTensor
31 |
32 |
33 |
34 |
35 |
36 | To be added.
37 | To be added.
38 | To be added.
39 | To be added.
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/Sequential.cs:
--------------------------------------------------------------------------------
1 |
2 | using System;
3 | using System.Collections.Generic;
4 | using System.Linq;
5 | using TorchSharp.Tensor;
6 |
7 | namespace TorchSharp.NN
8 | {
9 | public class Sequential : ProvidedModule
10 | {
11 | internal Sequential(IntPtr handle) : base(handle)
12 | {
13 | }
14 |
15 | public Sequential(IEnumerable modules) : base(IntPtr.Zero)
16 | {
17 | foreach (var module in modules)
18 | {
19 | RegisterModule(module);
20 | }
21 | }
22 |
23 | public override TorchTensor Forward(TorchTensor tensor)
24 | {
25 | if (!Modules.Any())
26 | {
27 | throw new ArgumentException("Cannot do forward pass over empty Sequence module.");
28 | }
29 |
30 | var (head, tail) = Modules;
31 | var result = head.Forward(tensor);
32 |
33 | foreach (var module in tail)
34 | {
35 | var tmp = module.Forward(result);
36 | result.Dispose();
37 | result = tmp;
38 | }
39 |
40 | return result;
41 | }
42 |
43 | public override void ZeroGrad()
44 | {
45 | foreach (var module in Modules)
46 | {
47 | module.ZeroGrad();
48 | }
49 | }
50 |
51 | public override IEnumerable GetModules()
52 | {
53 | List result = new List();
54 |
55 | foreach (var module in Modules)
56 | {
57 | result.Add(module.GetName());
58 | }
59 |
60 | return result;
61 | }
62 |
63 | public override void Train()
64 | {
65 | foreach (var module in Modules)
66 | {
67 | module.Train();
68 | }
69 | }
70 |
71 | public override void Eval()
72 | {
73 | foreach (var module in Modules)
74 | {
75 | module.Eval();
76 | }
77 | }
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp/Torch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Object
10 |
11 |
12 |
13 | To be added.
14 | To be added.
15 |
16 |
17 |
18 |
19 |
20 | Method
21 |
22 | 1.0.0.0
23 |
24 |
25 | System.Boolean
26 |
27 |
28 |
29 | To be added.
30 | To be added.
31 | To be added.
32 |
33 |
34 |
35 |
36 |
37 | Method
38 |
39 | 1.0.0.0
40 |
41 |
42 | System.Void
43 |
44 |
45 |
46 |
47 |
48 | To be added.
49 | To be added.
50 | To be added.
51 |
52 |
53 |
54 |
55 |
--------------------------------------------------------------------------------
/tools-local/Microsoft.ML.InternalCodeAnalyzer/TypeParamNameAnalyzer.cs:
--------------------------------------------------------------------------------
1 | // Licensed to the .NET Foundation under one or more agreements.
2 | // The .NET Foundation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System.Collections.Immutable;
6 | using Microsoft.CodeAnalysis;
7 | using Microsoft.CodeAnalysis.CSharp;
8 | using Microsoft.CodeAnalysis.CSharp.Syntax;
9 | using Microsoft.CodeAnalysis.Diagnostics;
10 |
11 | namespace Microsoft.ML.InternalCodeAnalyzer
12 | {
13 | [DiagnosticAnalyzer(LanguageNames.CSharp)]
14 | public sealed class TypeParamNameAnalyzer : DiagnosticAnalyzer
15 | {
16 | private const string Category = "Naming";
17 |
18 | internal const string Id = "MSML_TypeParamName";
19 | private const string Title = "Type parameter name not standard";
20 | private const string Format = "Type parameter name '{0}' not standard";
21 | private const string Description =
22 | "Type parameter names should start with 'T' and the remainder PascalCased.";
23 |
24 | private static DiagnosticDescriptor Rule =
25 | new DiagnosticDescriptor(Id, Title, Format, Category,
26 | DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description);
27 |
28 | public override ImmutableArray SupportedDiagnostics =>
29 | ImmutableArray.Create(Rule);
30 |
31 | public override void Initialize(AnalysisContext context)
32 | {
33 | context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
34 | context.RegisterSyntaxNodeAction(Analyze, SyntaxKind.TypeParameter);
35 | }
36 |
37 | private static void Analyze(SyntaxNodeAnalysisContext context)
38 | {
39 | var node = (TypeParameterSyntax)context.Node;
40 | var identifier = node.Identifier;
41 | var name = identifier.Text;
42 | if (name == null || (name.StartsWith("T") && Utils.NameIsGood(name, 1, true)))
43 | return;
44 | context.ReportDiagnostic(NameAnalyzer.CreateDiagnostic(Rule, identifier, NameType.TPascalCased));
45 | }
46 | }
47 | }
--------------------------------------------------------------------------------
/src/Examples/Examples.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp2.1
6 | false
7 | false
8 | TorchSharp.Examples.AlexNet
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
--------------------------------------------------------------------------------
/test/TorchSharpTest/TorchSharpTest.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | TorchSharpTests
5 | true
6 | false
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/Reduction.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Enum
10 |
11 |
12 | To be added.
13 | To be added.
14 |
15 |
16 |
17 |
18 |
19 | Field
20 |
21 | 1.0.0.0
22 |
23 |
24 | TorchSharp.NN.Reduction
25 |
26 |
27 | To be added.
28 |
29 |
30 |
31 |
32 |
33 | Field
34 |
35 | 1.0.0.0
36 |
37 |
38 | TorchSharp.NN.Reduction
39 |
40 |
41 | To be added.
42 |
43 |
44 |
45 |
46 |
47 | Field
48 |
49 | 1.0.0.0
50 |
51 |
52 | TorchSharp.NN.Reduction
53 |
54 |
55 | To be added.
56 |
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/src/Native/probe-win.ps1:
--------------------------------------------------------------------------------
1 | # This file probes for the prerequisites for the build system, and outputs commands for eval'ing
2 | # from the cmd scripts to set variables (and exit on error)
3 |
4 | function GetCMakeVersions
5 | {
6 | $items = @()
7 | $items += @(Get-ChildItem hklm:\SOFTWARE\Wow6432Node\Kitware -ErrorAction SilentlyContinue)
8 | $items += @(Get-ChildItem hklm:\SOFTWARE\Kitware -ErrorAction SilentlyContinue)
9 | return $items | Where-Object { $_.PSChildName.StartsWith("CMake ") }
10 | }
11 |
12 | function GetCMakeInfo($regKey)
13 | {
14 | # This no longer works for versions 3.5+
15 | try {
16 | $version = [System.Version] $regKey.PSChildName.Split(' ')[1]
17 | }
18 | catch {
19 | return $null
20 | }
21 | $cmakeDir = (Get-ItemProperty $regKey.PSPath).'(default)'
22 | $cmakePath = [System.IO.Path]::Combine($cmakeDir, "bin\cmake.exe")
23 | if (![System.IO.File]::Exists($cmakePath)) {
24 | return $null
25 | }
26 | return @{'version' = $version; 'path' = $cmakePath}
27 | }
28 |
29 | function LocateCMake
30 | {
31 | $errorMsg = "CMake is a pre-requisite to build this repository but it was not found on the path. Please install CMake from https://www.cmake.org/download/ and ensure it is on your path."
32 | $inPathPath = (get-command cmake.exe -ErrorAction SilentlyContinue).Path
33 | if ($inPathPath -ne $null) {
34 | return $inPathPath
35 | }
36 | # Check the default installation directory
37 | $inDefaultDir = [System.IO.Path]::Combine(${Env:ProgramFiles(x86)}, "CMake\bin\cmake.exe")
38 | if ([System.IO.File]::Exists($inDefaultDir)) {
39 | return $inDefaultDir
40 | }
41 | # Let us hope that CMake keep using their current version scheme
42 | $validVersions = @()
43 | foreach ($regKey in GetCMakeVersions) {
44 | $info = GetCMakeInfo($regKey)
45 | if ($info -ne $null) {
46 | $validVersions += @($info)
47 | }
48 | }
49 | $newestCMakePath = ($validVersions |
50 | Sort-Object -property @{Expression={$_.version}; Ascending=$false} |
51 | Select-Object -first 1).path
52 | if ($newestCMakePath -eq $null) {
53 | Throw $errorMsg
54 | }
55 | return $newestCMakePath
56 | }
57 |
58 | try {
59 | $cmakePath = LocateCMake
60 | [System.Console]::WriteLine("set CMakePath=" + $cmakePath)
61 | }
62 | catch {
63 | [System.Console]::Error.WriteLine($_.Exception.Message)
64 | [System.Console]::WriteLine("exit /b 1")
65 | }
66 |
--------------------------------------------------------------------------------
/tools-local/Microsoft.ML.InternalCodeAnalyzer/SingleVariableDeclarationAnalyzer.cs:
--------------------------------------------------------------------------------
1 | // Licensed to the .NET Foundation under one or more agreements.
2 | // The .NET Foundation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System.Collections.Immutable;
6 | using System.Linq;
7 | using Microsoft.CodeAnalysis;
8 | using Microsoft.CodeAnalysis.CSharp;
9 | using Microsoft.CodeAnalysis.CSharp.Syntax;
10 | using Microsoft.CodeAnalysis.Diagnostics;
11 |
12 | namespace Microsoft.ML.InternalCodeAnalyzer
13 | {
14 | [DiagnosticAnalyzer(LanguageNames.CSharp)]
15 | public sealed class SingleVariableDeclarationAnalyzer : DiagnosticAnalyzer
16 | {
17 | private const string Category = "Declaration";
18 | internal const string DiagnosticId = "MSML_SingleVariableDeclaration";
19 |
20 | private const string Title = "Have only a single variable present per declaration";
21 | private const string Format = "Variables '{0}' were all part of a single declaration, and should be broken up";
22 | private const string Description =
23 | "We prefer to have one variable per declaration.";
24 |
25 | private static DiagnosticDescriptor Rule =
26 | new DiagnosticDescriptor(DiagnosticId, Title, Format, Category,
27 | DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description);
28 |
29 | public override ImmutableArray SupportedDiagnostics =>
30 | ImmutableArray.Create(Rule);
31 |
32 | public override void Initialize(AnalysisContext context)
33 | {
34 | context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
35 | context.RegisterSyntaxNodeAction(Analyze, SyntaxKind.VariableDeclaration);
36 | }
37 |
38 | private static void Analyze(SyntaxNodeAnalysisContext context)
39 | {
40 | var node = (VariableDeclarationSyntax)context.Node;
41 | var vars = node.Variables;
42 | if (vars.Count <= 1 || node.Parent.IsKind(SyntaxKind.ForStatement))
43 | return;
44 | string jointVariableNames = string.Join("', '", vars.Select(v => v.Identifier.Text));
45 | var diagnostic = Diagnostic.Create(Rule, context.Node.GetLocation(), jointVariableNames);
46 | context.ReportDiagnostic(diagnostic);
47 | }
48 | }
49 | }
--------------------------------------------------------------------------------
/test/Test.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netcoreapp2.1
5 |
6 | false
7 |
8 |
9 |
10 | true
11 |
12 |
13 |
14 | x64
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 | TextTemplatingFileGenerator
32 | SNTUnitTestGenerator.cs
33 |
34 |
35 | TextTemplatingFileGenerator
36 | SNTUnitTestGenerator.cs
37 |
38 |
39 | TextTemplatingFileGenerator
40 | SNTUnitTestGenerator.cs
41 |
42 |
43 |
44 |
45 |
46 | True
47 | True
48 | SNTUnitTestGenerator.tt
49 |
50 |
51 | True
52 | True
53 | SNTUnitTestGenerator.tt
54 |
55 |
56 | True
57 | True
58 | SNTUnitTestGenerator.tt
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/LossFunction.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 | using TorchSharp.Tensor;
4 |
5 | namespace TorchSharp.NN
6 | {
7 | ///
8 | /// Class maintaing the supported loss functions.
9 | ///
10 | public class LossFunction
11 | {
12 | public delegate TorchTensor Loss(TorchTensor source, TorchTensor target);
13 |
14 | [DllImport("LibTorchSharp")]
15 | private static extern IntPtr THSNN_lossBCE(IntPtr srct, IntPtr trgt, IntPtr wgt, long reduction);
16 |
17 | public static Loss BCE(TorchTensor? weigths = null, Reduction reduction = Reduction.Mean)
18 | {
19 | return (TorchTensor src, TorchTensor target) => new TorchTensor(THSNN_lossBCE(src.Handle, target.Handle, weigths?.Handle ?? IntPtr.Zero, (long)reduction));
20 | }
21 |
22 | [DllImport("LibTorchSharp")]
23 | private static extern IntPtr THSNN_lossMSE(IntPtr srct, IntPtr trgt, long reduction);
24 |
25 | public static Loss MSE(Reduction reduction = Reduction.Mean)
26 | {
27 | return (TorchTensor src, TorchTensor target) => new TorchTensor(THSNN_lossMSE(src.Handle, target.Handle, (long)reduction));
28 | }
29 |
30 | [DllImport("LibTorchSharp")]
31 | private static extern IntPtr THSNN_lossNLL(IntPtr srct, IntPtr trgt, IntPtr wgt, long reduction);
32 |
33 | public static Loss NLL(TorchTensor? weigths = null, Reduction reduction = Reduction.Mean)
34 | {
35 | return (TorchTensor src, TorchTensor target) => new TorchTensor(THSNN_lossNLL(src.Handle, target.Handle, weigths?.Handle ?? IntPtr.Zero, (long)reduction));
36 | }
37 |
38 | [DllImport("LibTorchSharp")]
39 | private static extern IntPtr THSNN_loss_poisson_nll(IntPtr srct, IntPtr trgt, bool logInput, bool full, float eps, long reduction);
40 |
41 | public static Loss PoissonNLL(bool logInput = true, bool full = false, float eps = 1e-8f, Reduction reduction = Reduction.Mean)
42 | {
43 | return (TorchTensor src, TorchTensor target) =>
44 | {
45 | var tptr = THSNN_loss_poisson_nll(src.Handle, target.Handle, logInput, full, eps, (long)reduction);
46 | Torch.AssertNoErrors();
47 | return new TorchTensor(tptr);
48 | };
49 | }
50 | }
51 |
52 | public enum Reduction : long
53 | {
54 | None = 0,
55 | Mean = 1,
56 | Sum = 2
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/pkg/Directory.Build.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | true
6 | false
7 | false
8 |
9 | true
10 | $(MSBuildProjectName.Replace('.symbols', ''))
11 |
12 |
17 | $(IntermediateOutputRootPath)$(MSBuildProjectName).NupkgProj\
18 | $(IntermediateOutputPath)
19 |
20 |
21 |
22 |
23 | Microsoft
24 | LICENSE
25 | https://github.com/xamarin/TorchSharp
26 |
27 | TorchSharp LibTorch PyTorch Torch DL DNN Deep ML Machine Learning Neural Network
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | None
52 |
53 |
54 |
55 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/ReLU.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.NN.FunctionalModule<TorchSharp.NN.ReLU>
10 |
11 | TorchSharp.NN.ReLU
12 |
13 |
14 |
15 |
16 |
17 | This class is used to represent a ReLU module.
18 |
19 | To be added.
20 |
21 |
22 |
23 |
24 |
25 | Method
26 |
27 | 1.0.0.0
28 |
29 |
30 | TorchSharp.Tensor.TorchTensor
31 |
32 |
33 |
34 |
35 |
36 | To be added.
37 | To be added.
38 | To be added.
39 | To be added.
40 |
41 |
42 |
43 |
44 |
45 | Method
46 |
47 | 1.0.0.0
48 |
49 |
50 | System.String
51 |
52 |
53 |
54 | To be added.
55 | To be added.
56 | To be added.
57 |
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/src/TorchSharp/NN/Linear.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Runtime.InteropServices;
4 | using TorchSharp.Tensor;
5 |
6 | namespace TorchSharp.NN
7 | {
8 | public class Linear : ProvidedModule
9 | {
10 | public Linear(IntPtr handle) : base(handle)
11 | {
12 | }
13 |
14 | [DllImport("LibTorchSharp")]
15 | extern static IntPtr THSNN_linearModule(long input_size, long output_size, bool with_bias);
16 |
17 | public Linear(long inputSize, long outputSize, bool hasBias = false) : base()
18 | {
19 | handle = new HType(THSNN_linearModule(inputSize, outputSize, hasBias), true);
20 | }
21 |
22 | [DllImport("LibTorchSharp")]
23 | extern static bool THSNN_linear_with_bias(Module.HType module);
24 |
25 | public bool WithBias
26 | {
27 | get { return THSNN_linear_with_bias(handle); }
28 | }
29 |
30 | [DllImport("LibTorchSharp")]
31 | extern static IntPtr THSNN_linear_get_bias(Module.HType module);
32 |
33 | [DllImport("LibTorchSharp")]
34 | extern static void THSNN_linear_set_bias(Module.HType module, IntPtr tensor);
35 |
36 | public TorchTensor? Bias
37 | {
38 | get
39 | {
40 | var bias = THSNN_linear_get_bias(handle);
41 | return bias == IntPtr.Zero ? (TorchTensor?)null : new TorchTensor(bias);
42 | }
43 | set { THSNN_linear_set_bias(handle, value?.Handle ?? throw new ArgumentNullException("bias")); }
44 | }
45 |
46 | [DllImport("LibTorchSharp")]
47 | extern static IntPtr THSNN_linear_get_weight(Module.HType module);
48 |
49 | [DllImport("LibTorchSharp")]
50 | extern static void THSNN_linear_set_weight(Module.HType module, IntPtr tensor);
51 |
52 | public TorchTensor Weight
53 | {
54 | get
55 | {
56 | return new TorchTensor(THSNN_linear_get_weight(handle));
57 | }
58 | set { THSNN_linear_set_weight(handle, value.Handle); }
59 | }
60 |
61 | public override IEnumerable Parameters()
62 | {
63 | var parameters = new List();
64 |
65 | parameters.Add(Weight);
66 |
67 | if (WithBias)
68 | {
69 | parameters.Add(Bias.Value);
70 | }
71 | return parameters;
72 | }
73 |
74 | [DllImport("LibTorchSharp")]
75 | extern static IntPtr THSNN_linearModuleApply(Module.HType module, IntPtr tensor);
76 |
77 | public override TorchTensor Forward(TorchTensor tensor)
78 | {
79 | return new TorchTensor(THSNN_linearModuleApply(handle, tensor.Handle));
80 | }
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/docfx/api/TorchSharp.Tensor.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp.Tensor
4 | commentId: N:TorchSharp.Tensor
5 | id: TorchSharp.Tensor
6 | children:
7 | - TorchSharp.Tensor.ATenScalarMapping
8 | - TorchSharp.Tensor.ByteTensor
9 | - TorchSharp.Tensor.DoubleTensor
10 | - TorchSharp.Tensor.FloatTensor
11 | - TorchSharp.Tensor.IntTensor
12 | - TorchSharp.Tensor.LongTensor
13 | - TorchSharp.Tensor.ShortTensor
14 | - TorchSharp.Tensor.TensorExtensionMethods
15 | - TorchSharp.Tensor.TorchTensor
16 | langs:
17 | - csharp
18 | name: TorchSharp.Tensor
19 | nameWithType: TorchSharp.Tensor
20 | fullName: TorchSharp.Tensor
21 | type: Namespace
22 | references:
23 | - uid: TorchSharp.Tensor.ATenScalarMapping
24 | parent: TorchSharp.Tensor
25 | isExternal: false
26 | name: ATenScalarMapping
27 | nameWithType: TorchSharp.Tensor.ATenScalarMapping
28 | fullName: TorchSharp.Tensor.ATenScalarMapping
29 | type: enum
30 | - uid: TorchSharp.Tensor.ByteTensor
31 | parent: TorchSharp.Tensor
32 | isExternal: false
33 | name: ByteTensor
34 | nameWithType: TorchSharp.Tensor.ByteTensor
35 | fullName: TorchSharp.Tensor.ByteTensor
36 | type: class
37 | - uid: TorchSharp.Tensor.DoubleTensor
38 | parent: TorchSharp.Tensor
39 | isExternal: false
40 | name: DoubleTensor
41 | nameWithType: TorchSharp.Tensor.DoubleTensor
42 | fullName: TorchSharp.Tensor.DoubleTensor
43 | type: class
44 | - uid: TorchSharp.Tensor.FloatTensor
45 | parent: TorchSharp.Tensor
46 | isExternal: false
47 | name: FloatTensor
48 | nameWithType: TorchSharp.Tensor.FloatTensor
49 | fullName: TorchSharp.Tensor.FloatTensor
50 | type: class
51 | - uid: TorchSharp.Tensor.IntTensor
52 | parent: TorchSharp.Tensor
53 | isExternal: false
54 | name: IntTensor
55 | nameWithType: TorchSharp.Tensor.IntTensor
56 | fullName: TorchSharp.Tensor.IntTensor
57 | type: class
58 | - uid: TorchSharp.Tensor.LongTensor
59 | parent: TorchSharp.Tensor
60 | isExternal: false
61 | name: LongTensor
62 | nameWithType: TorchSharp.Tensor.LongTensor
63 | fullName: TorchSharp.Tensor.LongTensor
64 | type: class
65 | - uid: TorchSharp.Tensor.ShortTensor
66 | parent: TorchSharp.Tensor
67 | isExternal: false
68 | name: ShortTensor
69 | nameWithType: TorchSharp.Tensor.ShortTensor
70 | fullName: TorchSharp.Tensor.ShortTensor
71 | type: class
72 | - uid: TorchSharp.Tensor.TensorExtensionMethods
73 | parent: TorchSharp.Tensor
74 | isExternal: false
75 | name: TensorExtensionMethods
76 | nameWithType: TorchSharp.Tensor.TensorExtensionMethods
77 | fullName: TorchSharp.Tensor.TensorExtensionMethods
78 | type: class
79 | - uid: TorchSharp.Tensor.TorchTensor
80 | parent: TorchSharp.Tensor
81 | isExternal: false
82 | name: TorchTensor
83 | nameWithType: TorchSharp.Tensor.TorchTensor
84 | fullName: TorchSharp.Tensor.TorchTensor
85 | type: struct
86 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.JIT/TensorType.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | TorchSharp.JIT.Type
10 |
11 |
12 |
13 | To be added.
14 | To be added.
15 |
16 |
17 |
18 |
19 |
20 | Method
21 |
22 | 1.0.0.0
23 |
24 |
25 | System.String
26 |
27 |
28 |
29 | To be added.
30 | To be added.
31 | To be added.
32 |
33 |
34 |
35 |
36 |
37 | Method
38 |
39 | 1.0.0.0
40 |
41 |
42 | System.Int32
43 |
44 |
45 |
46 | To be added.
47 | To be added.
48 | To be added.
49 |
50 |
51 |
52 |
53 |
54 | Method
55 |
56 | 1.0.0.0
57 |
58 |
59 | TorchSharp.Tensor.ATenScalarMapping
60 |
61 |
62 |
63 | To be added.
64 | To be added.
65 | To be added.
66 |
67 |
68 |
69 |
70 |
--------------------------------------------------------------------------------
/tools-local/Microsoft.ML.InternalCodeAnalyzer/ParameterVariableNameAnalyzer.cs:
--------------------------------------------------------------------------------
1 | // Licensed to the .NET Foundation under one or more agreements.
2 | // The .NET Foundation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System.Collections.Immutable;
6 | using System.Linq;
7 | using Microsoft.CodeAnalysis;
8 | using Microsoft.CodeAnalysis.CSharp;
9 | using Microsoft.CodeAnalysis.CSharp.Syntax;
10 | using Microsoft.CodeAnalysis.Diagnostics;
11 |
12 | namespace Microsoft.ML.InternalCodeAnalyzer
13 | {
14 | [DiagnosticAnalyzer(LanguageNames.CSharp)]
15 | public sealed class ParameterVariableNameAnalyzer : DiagnosticAnalyzer
16 | {
17 | private const string Category = "Naming";
18 |
19 | internal const string Id = "MSML_ParameterLocalVarName";
20 | private const string Title = "Parameter or local variable name not standard";
21 | private const string Format = "{1} name '{0}' not standard";
22 | private const string Description =
23 | "Parameter and local variable names should be lowerCamelCased.";
24 |
25 | private static DiagnosticDescriptor Rule =
26 | new DiagnosticDescriptor(Id, Title, Format, Category,
27 | DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description);
28 |
29 | public override ImmutableArray SupportedDiagnostics =>
30 | ImmutableArray.Create(Rule);
31 |
32 | public override void Initialize(AnalysisContext context)
33 | {
34 | context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
35 | context.RegisterSyntaxNodeAction(AnalyzeParameter, SyntaxKind.Parameter);
36 | context.RegisterSyntaxNodeAction(AnalyzeLocal, SyntaxKind.LocalDeclarationStatement);
37 | }
38 |
39 | private static void AnalyzeParameter(SyntaxNodeAnalysisContext context)
40 | {
41 | var node = (ParameterSyntax)context.Node;
42 | AnalyzeCore(context, node.Identifier, "parameter");
43 | }
44 |
45 | private static void AnalyzeLocal(SyntaxNodeAnalysisContext context)
46 | {
47 | var node = (LocalDeclarationStatementSyntax)context.Node;
48 | foreach (var dec in node.DescendantNodesAndSelf().Where(s => s.IsKind(SyntaxKind.VariableDeclarator)))
49 | AnalyzeCore(context, ((VariableDeclaratorSyntax)dec).Identifier, "local variable");
50 | }
51 |
52 | private static void AnalyzeCore(SyntaxNodeAnalysisContext context, SyntaxToken identifier, string type)
53 | {
54 | var name = identifier.Text;
55 | if (name == null || Utils.NameIsGood(name, 0, false))
56 | return;
57 | context.ReportDiagnostic(NameAnalyzer.CreateDiagnostic(Rule, identifier, NameType.CamelCased, type));
58 | }
59 | }
60 | }
--------------------------------------------------------------------------------
/tools-local/Microsoft.ML.InternalCodeAnalyzer/Utils.cs:
--------------------------------------------------------------------------------
1 | // Licensed to the .NET Foundation under one or more agreements.
2 | // The .NET Foundation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | namespace Microsoft.ML.InternalCodeAnalyzer
6 | {
7 | internal static class Utils
8 | {
9 | ///
10 | /// Checks whether a name is properly camelCased or PascalCased.
11 | /// Also disallows things like HTMLStream while preferring IOStream.
12 | ///
13 | /// The symbol name to analyze
14 | /// The position in the name to start
15 | /// Whether it should be PascalCased
16 | /// Whether this name is good
17 | public static bool NameIsGood(string name, int min, bool upper)
18 | {
19 | // C# naming guidelines say, any initialism greater than two characters should not
20 | // be all upper cased. So: _readIOStream is good, and _readHttpStream is good. You
21 | // could imagine having two two-letter initialisms, like: _readIOUI, where you use
22 | // two two character initialism, but I'm going to suppose that never happens since
23 | // if someone is doing that, that's pretty odd. The upshot is:
24 | const int maxConsecutive = 3;
25 | // Force the first after the _ to be lower case.
26 | int consecutive = upper ? 0 : maxConsecutive;
27 | // Specific to numbers. You could imagine counterexamples, like, say, d3js. Should
28 | // we be even more strict, and say that the numbers should only appear potentially
29 | // in suffixes?
30 | for (int i = min; i < name.Length; ++i)
31 | {
32 | char c = name[i];
33 | // Only letters and digits.
34 | if (!char.IsLetterOrDigit(c))
35 | return false;
36 | if (char.IsDigit(c))
37 | {
38 | // Consider digits as being effectively upper case letters, where they appear.
39 | upper = false;
40 | consecutive = 0;
41 | continue;
42 | }
43 | if (char.IsUpper(c))
44 | {
45 | upper = false;
46 | if (++consecutive > maxConsecutive)
47 | return false;
48 | continue;
49 | }
50 | if (upper)
51 | return false;
52 | consecutive = 0;
53 | }
54 | // Don't allow maxConsecutive on the end. So: IOStream is fine, but IOS is not.
55 | return consecutive < maxConsecutive;
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/docfx/api/toc.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:TableOfContent
2 | - name: global
3 | items:
4 | - uid: AllocatePinnedArray
5 | name: AllocatePinnedArray
6 | - uid: TorchSharp
7 | name: TorchSharp
8 | items:
9 | - uid: TorchSharp.AutoGradMode
10 | name: AutoGradMode
11 | - uid: TorchSharp.Scalar
12 | name: Scalar
13 | - uid: TorchSharp.ScalarExtensionMethods
14 | name: ScalarExtensionMethods
15 | - uid: TorchSharp.Torch
16 | name: Torch
17 | - uid: TorchSharp.Data
18 | name: TorchSharp.Data
19 | items:
20 | - uid: TorchSharp.Data.DataIterator
21 | name: DataIterator
22 | - uid: TorchSharp.Data.DataIterator.HType
23 | name: DataIterator.HType
24 | - uid: TorchSharp.Data.Loader
25 | name: Loader
26 | - uid: TorchSharp.JIT
27 | name: TorchSharp.JIT
28 | items:
29 | - uid: TorchSharp.JIT.DynamicType
30 | name: DynamicType
31 | - uid: TorchSharp.JIT.Module
32 | name: Module
33 | - uid: TorchSharp.JIT.TensorType
34 | name: TensorType
35 | - uid: TorchSharp.JIT.Type
36 | name: Type
37 | - uid: TorchSharp.NN
38 | name: TorchSharp.NN
39 | items:
40 | - uid: TorchSharp.NN.AdaptiveAvgPool2D
41 | name: AdaptiveAvgPool2D
42 | - uid: TorchSharp.NN.Conv2D
43 | name: Conv2D
44 | - uid: TorchSharp.NN.Dropout
45 | name: Dropout
46 | - uid: TorchSharp.NN.FeatureDropout
47 | name: FeatureDropout
48 | - uid: TorchSharp.NN.FunctionalModule`1
49 | name: FunctionalModule
50 | - uid: TorchSharp.NN.Init
51 | name: Init
52 | - uid: TorchSharp.NN.Linear
53 | name: Linear
54 | - uid: TorchSharp.NN.LogSoftMax
55 | name: LogSoftMax
56 | - uid: TorchSharp.NN.LossFunction
57 | name: LossFunction
58 | - uid: TorchSharp.NN.LossFunction.Loss
59 | name: LossFunction.Loss
60 | - uid: TorchSharp.NN.MaxPool2D
61 | name: MaxPool2D
62 | - uid: TorchSharp.NN.Module
63 | name: Module
64 | - uid: TorchSharp.NN.Optimizer
65 | name: Optimizer
66 | - uid: TorchSharp.NN.Parameter
67 | name: Parameter
68 | - uid: TorchSharp.NN.ProvidedModule
69 | name: ProvidedModule
70 | - uid: TorchSharp.NN.Reduction
71 | name: Reduction
72 | - uid: TorchSharp.NN.ReLU
73 | name: ReLU
74 | - uid: TorchSharp.NN.Sequential
75 | name: Sequential
76 | - uid: TorchSharp.Tensor
77 | name: TorchSharp.Tensor
78 | items:
79 | - uid: TorchSharp.Tensor.ATenScalarMapping
80 | name: ATenScalarMapping
81 | - uid: TorchSharp.Tensor.ByteTensor
82 | name: ByteTensor
83 | - uid: TorchSharp.Tensor.DoubleTensor
84 | name: DoubleTensor
85 | - uid: TorchSharp.Tensor.FloatTensor
86 | name: FloatTensor
87 | - uid: TorchSharp.Tensor.IntTensor
88 | name: IntTensor
89 | - uid: TorchSharp.Tensor.LongTensor
90 | name: LongTensor
91 | - uid: TorchSharp.Tensor.ShortTensor
92 | name: ShortTensor
93 | - uid: TorchSharp.Tensor.TensorExtensionMethods
94 | name: TensorExtensionMethods
95 | - uid: TorchSharp.Tensor.TorchTensor
96 | name: TorchTensor
97 |
--------------------------------------------------------------------------------
/test/Directory.Build.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | netcoreapp2.1
6 | net461
7 | netcoreapp3.0
8 | win-x64
9 | false
10 | true
11 |
12 |
19 | $(NoWarn),1573,1591,1712
20 |
21 |
22 |
23 | trx
24 | $(OutputPath)
25 | $(ToolsDir)Test.snk
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 | true
38 | true
39 | true
40 | opencover
41 | $(BaseOutputPath)$(PlatformConfig)\coverage\$(MSBuildProjectName).coverage
42 | [Microsoft.ML.*]*
43 |
46 | [*]Microsoft.ML.*Contracts*,[*]Microsoft.ML.Internal.Utilities*,[*]Microsoft.ML.Data.VBuffer*
47 | Obsolete,ExcludeFromCodeCoverage
48 | $(BaseOutputPath)..\src\Microsoft.ML.OnnxConverter\OnnxMl.cs,$(BaseOutputPath)..\src\Microsoft.ML.TensorFlow\TensorFlow\Buffer.cs,$(BaseOutputPath)..\src\Microsoft.ML.TensorFlow\TensorFlow\Tensor.cs,$(BaseOutputPath)..\src\Microsoft.ML.TensorFlow\TensorFlow\Tensorflow.cs
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/src/TorchSharp/JIT/Type/Type.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 |
4 | namespace TorchSharp.JIT
5 | {
6 | public class Type : IDisposable
7 | {
8 | ///
9 | /// Class wrapping PyTorch's type object reference.
10 | ///
11 | internal sealed class HType : SafeHandle
12 | {
13 | public HType(IntPtr preexistingHandle, bool ownsHandle) : base(IntPtr.Zero, ownsHandle)
14 | {
15 | SetHandle(preexistingHandle);
16 | }
17 |
18 | public override bool IsInvalid => handle == IntPtr.Zero;
19 |
20 | // This is just for marshalling
21 | internal HType() : base(IntPtr.Zero, true)
22 | {
23 | }
24 |
25 | [DllImport("LibTorchSharp")]
26 | private static extern void THSJIT_typeDispose(HType handle);
27 |
28 | protected override bool ReleaseHandle()
29 | {
30 | THSJIT_typeDispose(this);
31 | return true;
32 | }
33 |
34 | protected override void Dispose(bool disposing)
35 | {
36 | if (disposing)
37 | {
38 | ReleaseHandle();
39 | }
40 | }
41 | }
42 |
43 | internal HType handle;
44 |
45 | internal Type(IntPtr handle)
46 | {
47 | this.handle = new HType(handle, true);
48 | }
49 |
50 | protected Type()
51 | {
52 | }
53 |
54 | ~Type()
55 | {
56 | Dispose(false);
57 | }
58 |
59 | ///
60 | /// Releases the storage.
61 | ///
62 | public void Dispose()
63 | {
64 | Dispose(true);
65 | GC.SuppressFinalize(this);
66 | }
67 |
68 | ///
69 | /// Implements the .NET Dispose pattern.
70 | ///
71 | protected void Dispose(bool disposing)
72 | {
73 | if (disposing)
74 | {
75 | handle.Dispose();
76 | handle.SetHandleAsInvalid();
77 | }
78 | }
79 |
80 | [DllImport("LibTorchSharp")]
81 | private static extern sbyte THSJIT_typeKind(HType handle);
82 |
83 | internal TypeKind Kind
84 | {
85 | get { return (TypeKind)THSJIT_typeKind(handle); }
86 | }
87 |
88 | [DllImport("LibTorchSharp")]
89 | private static extern IntPtr THSJIT_typeCast(HType module);
90 |
91 | internal TensorType AsTensorType()
92 | {
93 | return new TensorType(THSJIT_typeCast(handle));
94 | }
95 |
96 | internal DynamicType AsDynamicType()
97 | {
98 | return new DynamicType(THSJIT_typeCast(handle));
99 | }
100 |
101 | internal enum TypeKind : sbyte
102 | {
103 | DynamicType = 0,
104 | TensorType = 1
105 | }
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendOnPublicDeclarationsAnalyzer.cs:
--------------------------------------------------------------------------------
1 | // Licensed to the .NET Foundation under one or more agreements.
2 | // The .NET Foundation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System.Collections.Generic;
6 | using System.Collections.Immutable;
7 | using System.Linq;
8 | using Microsoft.CodeAnalysis;
9 | using Microsoft.CodeAnalysis.Diagnostics;
10 |
11 | namespace Microsoft.ML.InternalCodeAnalyzer
12 | {
13 | [DiagnosticAnalyzer(LanguageNames.CSharp)]
14 | public sealed class BestFriendOnPublicDeclarationsAnalyzer : DiagnosticAnalyzer
15 | {
16 | private const string Category = "Access";
17 | internal const string DiagnosticId = "MSML_BestFriendOnPublicDeclaration";
18 |
19 | private const string Title = "Public declarations should not have " + AttributeName + " attribute.";
20 | private const string Format = "The " + AttributeName + " should not be applied to publicly visible members.";
21 |
22 | private const string Description =
23 | "The " + AttributeName + " attribute is not valid on public identifiers.";
24 |
25 | private static DiagnosticDescriptor Rule =
26 | new DiagnosticDescriptor(DiagnosticId, Title, Format, Category,
27 | DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description);
28 |
29 | private const string AttributeName = "Microsoft.ML.BestFriendAttribute";
30 |
31 | public override ImmutableArray SupportedDiagnostics =>
32 | ImmutableArray.Create(Rule);
33 |
34 | public override void Initialize(AnalysisContext context)
35 | {
36 | context.EnableConcurrentExecution();
37 | context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
38 |
39 | context.RegisterCompilationStartAction(CompilationStart);
40 | }
41 |
42 | private void CompilationStart(CompilationStartAnalysisContext context)
43 | {
44 | var list = new List { AttributeName, "Microsoft.ML.Internal.CpuMath.Core.BestFriendAttribute" };
45 |
46 | foreach (var attributeName in list)
47 | {
48 | var attribute = context.Compilation.GetTypeByMetadataName(attributeName);
49 |
50 | if (attribute == null)
51 | continue;
52 |
53 | context.RegisterSymbolAction(c => AnalyzeCore(c, attribute), SymbolKind.NamedType, SymbolKind.Method, SymbolKind.Field, SymbolKind.Property);
54 | }
55 | }
56 |
57 | private void AnalyzeCore(SymbolAnalysisContext context, INamedTypeSymbol attributeType)
58 | {
59 | if (context.Symbol.DeclaredAccessibility != Accessibility.Public)
60 | return;
61 |
62 | var attribute = context.Symbol.GetAttributes().FirstOrDefault(a => a.AttributeClass == attributeType);
63 | if (attribute == null)
64 | return;
65 |
66 | var diagnostic = Diagnostic.Create(Rule, attribute.ApplicationSyntaxReference.GetSyntax().GetLocation(), context.Symbol.Name);
67 | context.ReportDiagnostic(diagnostic);
68 | }
69 | }
70 | }
--------------------------------------------------------------------------------
/docfx/api/TorchSharp.NN/TorchSharp.NN.Reduction.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp.NN.Reduction
4 | id: Reduction
5 | children:
6 | - TorchSharp.NN.Reduction.Mean
7 | - TorchSharp.NN.Reduction.None
8 | - TorchSharp.NN.Reduction.Sum
9 | langs:
10 | - csharp
11 | name: Reduction
12 | nameWithType: TorchSharp.NN.Reduction
13 | fullName: TorchSharp.NN.Reduction
14 | type: Enum
15 | assemblies:
16 | - TorchSharp
17 | namespace: TorchSharp.NN
18 | syntax:
19 | content: public enum Reduction
20 | content.csharp: public enum Reduction
21 | inheritance:
22 | - System.Enum
23 | modifiers.csharp:
24 | - public
25 | - uid: TorchSharp.NN.Reduction.Mean
26 | id: Mean
27 | parent: TorchSharp.NN.Reduction
28 | langs:
29 | - csharp
30 | name: Mean
31 | nameWithType: Reduction.Mean
32 | fullName: TorchSharp.NN.Reduction.Mean
33 | type: Field
34 | assemblies:
35 | - TorchSharp
36 | namespace: TorchSharp.NN
37 | syntax:
38 | content: Mean
39 | return:
40 | type: TorchSharp.NN.Reduction
41 | description: ''
42 | content.csharp: Mean
43 | exceptions: []
44 | - uid: TorchSharp.NN.Reduction.None
45 | id: None
46 | parent: TorchSharp.NN.Reduction
47 | langs:
48 | - csharp
49 | name: None
50 | nameWithType: Reduction.None
51 | fullName: TorchSharp.NN.Reduction.None
52 | type: Field
53 | assemblies:
54 | - TorchSharp
55 | namespace: TorchSharp.NN
56 | syntax:
57 | content: None
58 | return:
59 | type: TorchSharp.NN.Reduction
60 | description: ''
61 | content.csharp: None
62 | exceptions: []
63 | - uid: TorchSharp.NN.Reduction.Sum
64 | id: Sum
65 | parent: TorchSharp.NN.Reduction
66 | langs:
67 | - csharp
68 | name: Sum
69 | nameWithType: Reduction.Sum
70 | fullName: TorchSharp.NN.Reduction.Sum
71 | type: Field
72 | assemblies:
73 | - TorchSharp
74 | namespace: TorchSharp.NN
75 | syntax:
76 | content: Sum
77 | return:
78 | type: TorchSharp.NN.Reduction
79 | description: ''
80 | content.csharp: Sum
81 | exceptions: []
82 | references:
83 | - uid: TorchSharp.NN
84 | commentId: N:TorchSharp.NN
85 | isExternal: false
86 | name: TorchSharp.NN
87 | nameWithType: TorchSharp.NN
88 | fullName: TorchSharp.NN
89 | type: namespace
90 | - uid: System.Enum
91 | parent: System
92 | isExternal: true
93 | name: Enum
94 | nameWithType: Enum
95 | fullName: System.Enum
96 | - uid: TorchSharp.NN.Reduction.Mean
97 | parent: TorchSharp.NN.Reduction
98 | isExternal: false
99 | name: Mean
100 | nameWithType: Reduction.Mean
101 | fullName: TorchSharp.NN.Reduction.Mean
102 | type: field
103 | - uid: TorchSharp.NN.Reduction
104 | parent: TorchSharp.NN
105 | isExternal: false
106 | name: Reduction
107 | nameWithType: Reduction
108 | fullName: TorchSharp.NN.Reduction
109 | - uid: TorchSharp.NN.Reduction.None
110 | parent: TorchSharp.NN.Reduction
111 | isExternal: false
112 | name: None
113 | nameWithType: Reduction.None
114 | fullName: TorchSharp.NN.Reduction.None
115 | type: field
116 | - uid: TorchSharp.NN.Reduction.Sum
117 | parent: TorchSharp.NN.Reduction
118 | isExternal: false
119 | name: Sum
120 | nameWithType: Reduction.Sum
121 | fullName: TorchSharp.NN.Reduction.Sum
122 | type: field
123 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/THSData.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include "../Stdafx.h"
4 |
5 | #include "Utils.h"
6 |
7 | // Inter-op classes.
8 |
9 | // Base non-generic interator class. Used to communicate with C#.
10 | class DatasetIteratorBase
11 | {
12 | public:
13 | explicit
14 | DatasetIteratorBase() {}
15 | virtual size_t getSize() = 0;
16 | virtual bool moveNext() = 0;
17 | virtual void current(Tensor* data, Tensor* target) = 0;
18 | virtual void reset() = 0;
19 | virtual ~DatasetIteratorBase() {}
20 | };
21 |
22 | // Generic version of the iterator class.
23 | template
24 | class DatasetIterator : public DatasetIteratorBase
25 | {
26 | public:
27 | DatasetIterator(
28 | torch::data::Iterator> i,
29 | size_t s,
30 | std::shared_ptr l) :
31 | DatasetIteratorBase(),
32 | currentIter(torch::data::Iterator>(i)),
33 | size(s),
34 | loaderPointer(l) {}
35 |
36 | size_t getSize();
37 | bool moveNext();
38 | void current(Tensor* data, Tensor* target);
39 | void reset();
40 |
41 | private:
42 | std::shared_ptr loaderPointer;
43 | torch::data::Iterator> currentIter;
44 | size_t size;
45 | };
46 |
47 | // Class-related methods.
48 |
49 | // Get the total size in bytes of the input dataset.
50 | template
51 | inline size_t DatasetIterator::getSize()
52 | {
53 | return size;
54 | }
55 |
56 | // Advance the iterator.
57 | template
58 | inline bool DatasetIterator::moveNext()
59 | {
60 | ++currentIter;
61 |
62 | return currentIter != loaderPointer->end();
63 | }
64 |
65 | // Get the current object pointed by the iterator.
66 | template
67 | inline void DatasetIterator::current(Tensor* data, Tensor* target)
68 | {
69 | data[0] = new torch::Tensor(currentIter->data);
70 | target[0] = new torch::Tensor(currentIter->target);
71 | }
72 |
73 | // Reset the iterator to start from the beginning.
74 | template
75 | inline void DatasetIterator::reset()
76 | {
77 | currentIter = loaderPointer->begin();
78 | }
79 |
80 | // API.
81 |
82 | // Load a MNIST dataset from a directory.
83 | EXPORT_API(DatasetIteratorBase *) THSData_loaderMNIST(
84 | const char* filename,
85 | int64_t batchSize,
86 | bool isTrain);
87 |
88 | // Load a MNIST dataset from a directory.
89 | EXPORT_API(DatasetIteratorBase *) THSData_loaderCIFAR10(
90 | const char* filename,
91 | int64_t batchSize,
92 | bool isTrain);
93 |
94 | // Gets the size in byte of some dataset wrapped as iterator.
95 | EXPORT_API(size_t) THSData_size(DatasetIteratorBase * iterator);
96 |
97 | // Advances the pointer of the target iterator.
98 | EXPORT_API(bool) THSData_moveNext(DatasetIteratorBase * iterator);
99 |
100 | // Gets the curret data and target tensors pointed by the iterator.
101 | EXPORT_API(void) THSData_current(DatasetIteratorBase * iterator, Tensor* data, Tensor* target);
102 |
103 | // Resets the iterator.
104 | EXPORT_API(void) THSData_reset(DatasetIteratorBase * iterator);
105 |
106 | // Disposes the iterator.
107 | EXPORT_API(void) THSData_dispose(DatasetIteratorBase * iterator);
108 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://migueldeicaza.visualstudio.com/TorchSharp/_build/latest?definitionId=5)
2 |
3 | TorchSharp
4 | ==========
5 |
6 | TorchSharp is a .NET library that provides access to the library that powers
7 | PyTorch. It is a work in progress, but already provides a .NET API that can
8 | be used to perform (1) various operations on ATen Tensors; (2) scoring of
9 | TorchScript models; (3) Training of simple neural networks.
10 |
11 | Our current focus is to bind the entire API surfaced by libtorch.
12 |
13 | Things that you can try:
14 |
15 | ```csharp
16 | using AtenSharp;
17 |
18 | var x = new FloatTensor (100); // 1D-tensor with 100 elements
19 | FloatTensor result = new FloatTensor (100);
20 |
21 | FloatTensor.Add (x, 23, result);
22 |
23 | Console.WriteLine (x [12]);
24 | ```
25 |
26 | Discussions
27 | ===========
28 |
29 | We have a chat room on Gitter [](https://gitter.im/xamarin/TorchSharp?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
30 |
31 | Building
32 | ============
33 |
34 |
35 | Windows
36 | -----------------------------
37 |
38 | Requirements:
39 | - Visual Studio
40 | - git
41 | - cmake (tested with 3.14)
42 |
43 | Commands:
44 | - Building: `build.cmd`
45 | - Building from Visual Studio: first build using the command line
46 | - See all configurations: `build.cmd -?`
47 | - Run tests from command line: `build.cmd -runtests`
48 | - Build packages: `build.cmd -buildpackages`
49 |
50 |
51 | Linux/Mac
52 | -----------------------------
53 | Requirements:
54 | - requirements to run .NET Core 2.0
55 | - git
56 | - cmake (tested with 3.14)
57 | - clang 3.9
58 |
59 | Example to fulfill the requirements in Ubuntu 16:
60 | ```
61 | sudo apt-get update
62 | sudo apt-get install git clang cmake libunwind8 curl
63 | sudo apt-get install libssl1.0.0
64 | sudo apt-get install libomp-dev
65 | ```
66 |
67 | Commands:
68 | - Building: `./build.sh`
69 | - Building from Visual Studio: first build using the command line
70 | - See all configurations: `./build.sh -?`
71 | - Run tests from command line: `./build.sh -runtests`
72 | - Build packages: `./build.sh -buildpackages`
73 |
74 | Updating package version for new release
75 | -----------------------------
76 | To change the package version update this [file](https://github.com/xamarin/TorchSharp/blob/master/build/BranchInfo.props).
77 | Everything is currently considered in preview.
78 |
79 | Use the following two MSBuild arguments in order to control the -preview and the build numbers in the name of the nuget packages produced (use one of the two generally):
80 |
81 | |Name | Value| Example Version Output|
82 | |---|---|---|
83 | |StabilizePackageVersion | true | 1.0.0|
84 | |IncludeBuildNumberInPackageVersion | false | 1.0.0-preview|
85 |
86 | Sample command: `./build.cmd -release -buildpackages -- /p:StabilizePackageVersion=true`
87 |
88 | GPU support
89 | ============
90 | For GPU support it is required to install CUDA 9.0 and make it available to the dynamic linker.
91 |
92 | Examples
93 | ===========
94 | Porting of the more famous network architectures to TorchSharp is in progress. For the moment we only support [MNIST](https://github.com/xamarin/TorchSharp/blob/master/src/Examples/MNIST.cs) and [AlexNet](https://github.com/xamarin/TorchSharp/blob/master/src/Examples/AlexNet.cs)
95 |
--------------------------------------------------------------------------------
/dir.traversal.targets:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | $(MSBuildProjectDefaultTargets)
7 |
8 |
9 |
11 |
16 |
17 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 | Clean
31 |
32 |
33 |
35 |
40 |
41 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 | BuildAllProjects;
55 | $(TraversalBuildDependsOn);
56 |
57 |
58 |
59 | CleanAllProjects;
60 | $(TraversalCleanDependsOn);
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.JIT/Type.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Object
10 |
11 |
12 |
13 | System.IDisposable
14 |
15 |
16 |
17 | To be added.
18 | To be added.
19 |
20 |
21 |
22 |
23 |
24 | Constructor
25 |
26 | 1.0.0.0
27 |
28 |
29 |
30 | To be added.
31 | To be added.
32 |
33 |
34 |
35 |
36 |
37 | Method
38 |
39 | 1.0.0.0
40 |
41 |
42 | System.Void
43 |
44 |
45 |
46 |
47 | Releases the storage.
48 |
49 | To be added.
50 |
51 |
52 |
53 |
54 |
55 | Method
56 |
57 | 1.0.0.0
58 |
59 |
60 | System.Void
61 |
62 |
63 |
64 |
65 |
66 | To be added.
67 |
68 | Implements the .NET Dispose pattern.
69 |
70 | To be added.
71 |
72 |
73 |
74 |
75 |
76 | Method
77 |
78 | 1.0.0.0
79 |
80 |
81 | System.Void
82 |
83 |
84 |
85 | To be added.
86 | To be added.
87 |
88 |
89 |
90 |
91 |
--------------------------------------------------------------------------------
/src/TorchSharp/Scalar.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Runtime.InteropServices;
3 |
4 | namespace TorchSharp
5 | {
6 | public sealed class Scalar : IDisposable
7 | {
8 | internal IntPtr Handle { get; private set; }
9 |
10 | internal Scalar(IntPtr handle)
11 | {
12 | Handle = handle;
13 | }
14 |
15 | public static implicit operator Scalar(byte value)
16 | {
17 | return value.ToScalar();
18 | }
19 |
20 | public static implicit operator Scalar(short value)
21 | {
22 | return value.ToScalar();
23 | }
24 |
25 | public static implicit operator Scalar(int value)
26 | {
27 | return value.ToScalar();
28 | }
29 |
30 | public static implicit operator Scalar(long value)
31 | {
32 | return value.ToScalar();
33 | }
34 |
35 | public static implicit operator Scalar(float value)
36 | {
37 | return value.ToScalar();
38 | }
39 |
40 | public static implicit operator Scalar(double value)
41 | {
42 | return value.ToScalar();
43 | }
44 |
45 | ///
46 | /// Releases the storage.
47 | ///
48 | public void Dispose()
49 | {
50 | Dispose(true);
51 | GC.SuppressFinalize(this);
52 | }
53 |
54 | [DllImport("LibTorchSharp")]
55 | extern static void THSThorch_dispose_scalar(IntPtr handle);
56 |
57 | ///
58 | /// Implements the .NET Dispose pattern.
59 | ///
60 | internal void Dispose(bool disposing)
61 | {
62 | if (disposing)
63 | {
64 | THSThorch_dispose_scalar(Handle);
65 | Handle = IntPtr.Zero;
66 | }
67 | }
68 | }
69 |
70 | public static class ScalarExtensionMethods
71 | {
72 | [DllImport("LibTorchSharp")]
73 | extern static IntPtr THSTorch_btos(byte hanvaluedle);
74 |
75 | public static Scalar ToScalar(this byte value)
76 | {
77 | return new Scalar(THSTorch_btos(value));
78 | }
79 |
80 | [DllImport("LibTorchSharp")]
81 | extern static IntPtr THSTorch_stos(short hanvaluedle);
82 |
83 | public static Scalar ToScalar(this short value)
84 | {
85 | return new Scalar(THSTorch_stos(value));
86 | }
87 |
88 | [DllImport("LibTorchSharp")]
89 | extern static IntPtr THSTorch_itos(int hanvaluedle);
90 |
91 | public static Scalar ToScalar(this int value)
92 | {
93 | return new Scalar(THSTorch_itos(value));
94 | }
95 |
96 | [DllImport("LibTorchSharp")]
97 | extern static IntPtr THSTorch_ltos(long hanvaluedle);
98 |
99 | public static Scalar ToScalar(this long value)
100 | {
101 | return new Scalar(THSTorch_ltos(value));
102 | }
103 |
104 | [DllImport("LibTorchSharp")]
105 | extern static IntPtr THSTorch_ftos(float hanvaluedle);
106 |
107 | public static Scalar ToScalar(this float value)
108 | {
109 | return new Scalar(THSTorch_ftos(value));
110 | }
111 |
112 | [DllImport("LibTorchSharp")]
113 | extern static IntPtr THSTorch_dtos(double hanvaluedle);
114 |
115 | public static Scalar ToScalar(this double value)
116 | {
117 | return new Scalar(THSTorch_dtos(value));
118 | }
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp/AutoGradMode.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Object
10 |
11 |
12 |
13 | System.IDisposable
14 |
15 |
16 |
17 | To be added.
18 | To be added.
19 |
20 |
21 |
22 |
23 |
24 | Constructor
25 |
26 | 1.0.0.0
27 |
28 |
29 |
30 |
31 |
32 | To be added.
33 | To be added.
34 | To be added.
35 |
36 |
37 |
38 |
39 |
40 | Method
41 |
42 | 1.0.0.0
43 |
44 |
45 | System.Void
46 |
47 |
48 |
49 | To be added.
50 | To be added.
51 |
52 |
53 |
54 |
55 |
56 | Method
57 |
58 | 1.0.0.0
59 |
60 |
61 | System.Void
62 |
63 |
64 |
65 |
66 |
67 | To be added.
68 | To be added.
69 | To be added.
70 |
71 |
72 |
73 |
74 |
75 | Method
76 |
77 | 1.0.0.0
78 |
79 |
80 | System.Boolean
81 |
82 |
83 |
84 | To be added.
85 | To be added.
86 | To be added.
87 |
88 |
89 |
90 |
91 |
--------------------------------------------------------------------------------
/docfx/api/TorchSharp/TorchSharp.TorchHandle.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp.TorchHandle
4 | id: TorchHandle
5 | children:
6 | - TorchSharp.TorchHandle.#ctor(System.IntPtr,System.Boolean)
7 | - TorchSharp.TorchHandle.ReleaseHandle
8 | langs:
9 | - csharp
10 | name: TorchHandle
11 | nameWithType: TorchHandle
12 | fullName: TorchSharp.TorchHandle
13 | type: Class
14 | assemblies:
15 | - TorchSharp
16 | namespace: TorchSharp
17 | syntax:
18 | content: 'public sealed class TorchHandle : Microsoft.Win32.SafeHandles.SafeHandleZeroOrMinusOneIsInvalid'
19 | inheritance:
20 | - Microsoft.Win32.SafeHandles.SafeHandleZeroOrMinusOneIsInvalid
21 | implements: []
22 | inheritedMembers: []
23 | - uid: TorchSharp.TorchHandle.#ctor(System.IntPtr,System.Boolean)
24 | id: '#ctor(System.IntPtr,System.Boolean)'
25 | parent: TorchSharp.TorchHandle
26 | langs:
27 | - csharp
28 | name: TorchHandle(IntPtr, Boolean)
29 | nameWithType: TorchHandle.TorchHandle(IntPtr, Boolean)
30 | fullName: TorchHandle.TorchHandle(IntPtr, Boolean)
31 | type: Constructor
32 | assemblies:
33 | - TorchSharp
34 | namespace: TorchSharp
35 | syntax:
36 | content: public TorchHandle (IntPtr preexistingHandle, bool ownsHandle);
37 | parameters:
38 | - id: preexistingHandle
39 | type: System.IntPtr
40 | description: To be added.
41 | - id: ownsHandle
42 | type: System.Boolean
43 | description: To be added.
44 | overload: TorchSharp.TorchHandle.#ctor*
45 | exceptions: []
46 | - uid: TorchSharp.TorchHandle.ReleaseHandle
47 | id: ReleaseHandle
48 | parent: TorchSharp.TorchHandle
49 | langs:
50 | - csharp
51 | name: ReleaseHandle()
52 | nameWithType: TorchHandle.ReleaseHandle()
53 | fullName: TorchHandle.ReleaseHandle()
54 | type: Method
55 | assemblies:
56 | - TorchSharp
57 | namespace: TorchSharp
58 | syntax:
59 | content: protected override bool ReleaseHandle ();
60 | parameters: []
61 | return:
62 | type: System.Boolean
63 | description: To be added.
64 | overload: TorchSharp.TorchHandle.ReleaseHandle*
65 | exceptions: []
66 | references:
67 | - uid: Microsoft.Win32.SafeHandles.SafeHandleZeroOrMinusOneIsInvalid
68 | parent: Microsoft.Win32.SafeHandles
69 | isExternal: true
70 | name: SafeHandleZeroOrMinusOneIsInvalid
71 | nameWithType: SafeHandleZeroOrMinusOneIsInvalid
72 | fullName: Microsoft.Win32.SafeHandles.SafeHandleZeroOrMinusOneIsInvalid
73 | - uid: TorchSharp.TorchHandle.#ctor(System.IntPtr,System.Boolean)
74 | parent: TorchSharp.TorchHandle
75 | isExternal: false
76 | name: TorchHandle(IntPtr, Boolean)
77 | nameWithType: TorchHandle.TorchHandle(IntPtr, Boolean)
78 | fullName: TorchHandle.TorchHandle(IntPtr, Boolean)
79 | - uid: System.IntPtr
80 | parent: System
81 | isExternal: true
82 | name: IntPtr
83 | nameWithType: IntPtr
84 | fullName: System.IntPtr
85 | - uid: System.Boolean
86 | parent: System
87 | isExternal: true
88 | name: Boolean
89 | nameWithType: Boolean
90 | fullName: System.Boolean
91 | - uid: TorchSharp.TorchHandle.ReleaseHandle
92 | parent: TorchSharp.TorchHandle
93 | isExternal: false
94 | name: ReleaseHandle()
95 | nameWithType: TorchHandle.ReleaseHandle()
96 | fullName: TorchHandle.ReleaseHandle()
97 | - uid: TorchSharp.TorchHandle.#ctor*
98 | parent: TorchSharp.TorchHandle
99 | isExternal: false
100 | name: TorchHandle
101 | nameWithType: TorchHandle.TorchHandle
102 | fullName: TorchHandle.TorchHandle
103 | - uid: TorchSharp.TorchHandle.ReleaseHandle*
104 | parent: TorchSharp.TorchHandle
105 | isExternal: false
106 | name: ReleaseHandle
107 | nameWithType: TorchHandle.ReleaseHandle
108 | fullName: TorchHandle.ReleaseHandle
109 |
--------------------------------------------------------------------------------
/docfx/api/TorchSharp/TorchSharp.Torch.yml:
--------------------------------------------------------------------------------
1 | ### YamlMime:ManagedReference
2 | items:
3 | - uid: TorchSharp.Torch
4 | id: Torch
5 | children:
6 | - TorchSharp.Torch.IsCudaAvailable
7 | - TorchSharp.Torch.SetSeed(System.Int64)
8 | langs:
9 | - csharp
10 | name: Torch
11 | nameWithType: TorchSharp.Torch
12 | fullName: TorchSharp.Torch
13 | type: Class
14 | assemblies:
15 | - TorchSharp
16 | namespace: TorchSharp
17 | syntax:
18 | content: public static class Torch
19 | content.csharp: public static class Torch
20 | inheritance:
21 | - System.Object
22 | implements: []
23 | modifiers.csharp:
24 | - public
25 | - static
26 | - uid: TorchSharp.Torch.IsCudaAvailable
27 | id: IsCudaAvailable
28 | parent: TorchSharp.Torch
29 | langs:
30 | - csharp
31 | name: IsCudaAvailable()
32 | nameWithType: Torch.IsCudaAvailable()
33 | fullName: TorchSharp.Torch.IsCudaAvailable()
34 | type: Method
35 | assemblies:
36 | - TorchSharp
37 | namespace: TorchSharp
38 | syntax:
39 | content: public static bool IsCudaAvailable ();
40 | parameters: []
41 | return:
42 | type: System.Boolean
43 | description: ''
44 | content.csharp: public static bool IsCudaAvailable ();
45 | overload: TorchSharp.Torch.IsCudaAvailable*
46 | exceptions: []
47 | modifiers.csharp:
48 | - public
49 | - static
50 | - uid: TorchSharp.Torch.SetSeed(System.Int64)
51 | id: SetSeed(System.Int64)
52 | parent: TorchSharp.Torch
53 | langs:
54 | - csharp
55 | name: SetSeed(Int64)
56 | nameWithType: Torch.SetSeed(Int64)
57 | fullName: TorchSharp.Torch.SetSeed(Int64)
58 | type: Method
59 | assemblies:
60 | - TorchSharp
61 | namespace: TorchSharp
62 | syntax:
63 | content: public static void SetSeed (long seed);
64 | parameters:
65 | - id: seed
66 | type: System.Int64
67 | description: ''
68 | content.csharp: public static void SetSeed (long seed);
69 | overload: TorchSharp.Torch.SetSeed*
70 | exceptions: []
71 | modifiers.csharp:
72 | - public
73 | - static
74 | references:
75 | - uid: TorchSharp
76 | commentId: N:TorchSharp
77 | isExternal: false
78 | name: TorchSharp
79 | nameWithType: TorchSharp
80 | fullName: TorchSharp
81 | type: namespace
82 | - uid: System.Object
83 | parent: System
84 | isExternal: true
85 | name: Object
86 | nameWithType: Object
87 | fullName: System.Object
88 | - uid: TorchSharp.Torch.IsCudaAvailable
89 | parent: TorchSharp.Torch
90 | isExternal: false
91 | name: IsCudaAvailable()
92 | nameWithType: Torch.IsCudaAvailable()
93 | fullName: TorchSharp.Torch.IsCudaAvailable()
94 | type: method
95 | - uid: System.Boolean
96 | parent: System
97 | isExternal: true
98 | name: Boolean
99 | nameWithType: Boolean
100 | fullName: System.Boolean
101 | - uid: TorchSharp.Torch.SetSeed(System.Int64)
102 | parent: TorchSharp.Torch
103 | isExternal: false
104 | name: SetSeed(Int64)
105 | nameWithType: Torch.SetSeed(Int64)
106 | fullName: TorchSharp.Torch.SetSeed(Int64)
107 | type: method
108 | - uid: System.Int64
109 | parent: System
110 | isExternal: true
111 | name: Int64
112 | nameWithType: Int64
113 | fullName: System.Int64
114 | - uid: TorchSharp.Torch.IsCudaAvailable*
115 | commentId: Overload:TorchSharp.Torch.IsCudaAvailable
116 | parent: TorchSharp.Torch
117 | isExternal: false
118 | name: IsCudaAvailable
119 | nameWithType: Torch.IsCudaAvailable
120 | fullName: TorchSharp.Torch.IsCudaAvailable
121 | type: method
122 | - uid: TorchSharp.Torch.SetSeed*
123 | commentId: Overload:TorchSharp.Torch.SetSeed
124 | parent: TorchSharp.Torch
125 | isExternal: false
126 | name: SetSeed
127 | nameWithType: Torch.SetSeed
128 | fullName: TorchSharp.Torch.SetSeed
129 | type: method
130 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/Parameter.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.ValueType
10 |
11 |
12 |
13 | To be added.
14 | To be added.
15 |
16 |
17 |
18 |
19 |
20 | Constructor
21 |
22 | 1.0.0.0
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 | To be added.
31 | To be added.
32 | To be added.
33 | To be added.
34 | To be added.
35 |
36 |
37 |
38 |
39 |
40 | Property
41 |
42 | 1.0.0.0
43 |
44 |
45 | System.String
46 |
47 |
48 | To be added.
49 | To be added.
50 | To be added.
51 |
52 |
53 |
54 |
55 |
56 | Property
57 |
58 | 1.0.0.0
59 |
60 |
61 | TorchSharp.Tensor.TorchTensor
62 |
63 |
64 | To be added.
65 | To be added.
66 | To be added.
67 |
68 |
69 |
70 |
71 |
72 | Property
73 |
74 | 1.0.0.0
75 |
76 |
77 | System.Boolean
78 |
79 |
80 | To be added.
81 | To be added.
82 | To be added.
83 |
84 |
85 |
86 |
87 |
--------------------------------------------------------------------------------
/tools-local/Microsoft.ML.InternalCodeAnalyzer/InstanceInitializerAnalyzer.cs:
--------------------------------------------------------------------------------
1 | // Licensed to the .NET Foundation under one or more agreements.
2 | // The .NET Foundation licenses this file to you under the MIT license.
3 | // See the LICENSE file in the project root for more information.
4 |
5 | using System.Collections.Immutable;
6 | using System.Linq;
7 | using System.Reflection;
8 | using Microsoft.CodeAnalysis;
9 | using Microsoft.CodeAnalysis.CSharp;
10 | using Microsoft.CodeAnalysis.Diagnostics;
11 |
12 | namespace Microsoft.ML.InternalCodeAnalyzer
13 | {
14 | [DiagnosticAnalyzer(LanguageNames.CSharp)]
15 | public sealed class InstanceInitializerAnalyzer : DiagnosticAnalyzer
16 | {
17 | private const string Category = "Declaration";
18 | internal const string DiagnosticId = "MSML_NoInstanceInitializers";
19 |
20 | private const string Title = "No initializers on instance fields or properties";
21 | private const string Format = "Member {0} has a {1} initializer outside the constructor";
22 |
23 | private static DiagnosticDescriptor Rule =
24 | new DiagnosticDescriptor(DiagnosticId, Title, Format, Category,
25 | DiagnosticSeverity.Warning, isEnabledByDefault: true,
26 | description: Descriptions.InstanceInitializerInConstructor);
27 |
28 | public override ImmutableArray SupportedDiagnostics =>
29 | ImmutableArray.Create(Rule);
30 |
31 | public override void Initialize(AnalysisContext context)
32 | {
33 | context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
34 | context.RegisterSymbolAction(AnalyzeField, SymbolKind.Field);
35 | context.RegisterSymbolAction(AnalyzeProperty, SymbolKind.Property);
36 | }
37 |
38 | private static void AnalyzeField(SymbolAnalysisContext context)
39 | {
40 | var symbol = (IFieldSymbol)context.Symbol;
41 | // Constant or static field initializers are desirable. If implicitly
42 | // declared, then we can't very well ask the developer to fix.
43 | if (symbol.IsConst || symbol.IsStatic || symbol.IsImplicitlyDeclared)
44 | return;
45 | // Exempt argument attributes from the test. Note that because we cannot
46 | // depend on the Microsoft.ML source itself, we have to identify this class by name.
47 | if (symbol.GetAttributes().Any(i => i.AttributeClass.Name == "ArgumentAttribute"))
48 | return;
49 |
50 | var typeInfo = symbol.GetType().GetTypeInfo();
51 | var hasInitProp = typeInfo.GetDeclaredProperty("HasInitializer");
52 | if (hasInitProp?.PropertyType != typeof(bool))
53 | return;
54 | bool hasInit = (bool)hasInitProp.GetValue(symbol);
55 | if (!hasInit)
56 | return;
57 | var diagnostic = Diagnostic.Create(Rule, symbol.Locations[0], symbol.Name, "field");
58 | context.ReportDiagnostic(diagnostic);
59 | }
60 |
61 | private static void AnalyzeProperty(SymbolAnalysisContext context)
62 | {
63 | var symbol = (IPropertySymbol)context.Symbol;
64 | if (symbol.IsAbstract || symbol.IsImplicitlyDeclared || symbol.IsStatic)
65 | return;
66 | var syntaxRefs = symbol.DeclaringSyntaxReferences;
67 | if (syntaxRefs.IsEmpty)
68 | return;
69 | var syntax = syntaxRefs[0].GetSyntax();
70 | if (!syntax.ChildNodes().Any(s => s.IsKind(SyntaxKind.EqualsValueClause)))
71 | return;
72 |
73 | var diagnostic = Diagnostic.Create(Rule, symbol.Locations[0], symbol.Name, "property");
74 | context.ReportDiagnostic(diagnostic);
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.Data/Loader.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Object
10 |
11 |
12 |
13 | To be added.
14 | To be added.
15 |
16 |
17 |
18 |
19 |
20 | Constructor
21 |
22 | 1.0.0.0
23 |
24 |
25 |
26 | To be added.
27 | To be added.
28 |
29 |
30 |
31 |
32 |
33 | Method
34 |
35 | 1.0.0.0
36 |
37 |
38 | TorchSharp.Data.DataIterator
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 | The position of the CIFAR10 dataset
47 | The required batch size
48 | Wheter the iterator is for training or testing
49 |
50 | Create an iterator scanning the CIFAR10 dataset.
51 |
52 |
53 | To be added.
54 |
55 |
56 |
57 |
58 |
59 | Method
60 |
61 | 1.0.0.0
62 |
63 |
64 | TorchSharp.Data.DataIterator
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 | The position of the MNIST dataset
73 | The required batch size
74 | Wheter the iterator is for training or testing
75 |
76 | Create an iterator scanning the MNIST dataset.
77 |
78 |
79 | To be added.
80 |
81 |
82 |
83 |
84 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/cifar10.cpp:
--------------------------------------------------------------------------------
1 | #include "cifar10.h"
2 |
3 | #include
4 | #include
5 |
6 | const std::string kTrainImagesTargetsFilename[] = {
7 | "data_batch_1.bin",
8 | "data_batch_2.bin",
9 | "data_batch_3.bin",
10 | "data_batch_4.bin",
11 | "data_batch_5.bin" };
12 | constexpr const char* kTestImagesTargetsFilename = "test_batch.bin";
13 |
14 | constexpr uint32_t kWidth = 32;
15 | constexpr uint32_t kHight = 32;
16 | constexpr uint32_t kChannel = 3;
17 | constexpr uint64_t kBytesPerImage = kWidth * kHight * kChannel + 1;
18 | constexpr uint32_t kImagesPerFile = 10000;
19 |
20 | std::string join_paths(std::string head, const std::string& tail) {
21 | if (head.back() != '/') {
22 | head.push_back('/');
23 | }
24 | head += tail;
25 | return head;
26 | }
27 |
28 | std::pair read_dir(const std::string& root, bool train) {
29 | if (train) {
30 | std::vector> images_targets;
31 | std::vector images;
32 | std::vector targets;
33 | for (auto path : kTrainImagesTargetsFilename) {
34 | const std::string completePath = join_paths(root, path);
35 | images_targets.push_back(read_cifar10(completePath));
36 | }
37 |
38 | for (auto pair : images_targets) {
39 | images.push_back(pair.first);
40 | targets.push_back(pair.second);
41 | }
42 |
43 | return std::pair(torch::cat(images, 0), torch::cat(targets, 0));
44 | }
45 |
46 | auto path = join_paths(root, kTestImagesTargetsFilename);
47 | return read_cifar10(path);
48 | }
49 |
50 | std::pair read_cifar10(std::string path) {
51 | std::ifstream data(path, std::ios::binary);
52 | AT_CHECK(data, "Error opening data file at ", path);
53 | auto content = torch::empty(kImagesPerFile * kBytesPerImage, torch::kByte);
54 | at::Tensor images = torch::zeros({ kImagesPerFile, kChannel, kHight, kWidth }, torch::kFloat32);
55 | at::Tensor labels = torch::zeros({ kImagesPerFile, }, torch::kInt64);
56 |
57 | data.read(reinterpret_cast(content.data_ptr()), content.numel());
58 |
59 | for (int32_t i = 0; i < kImagesPerFile; i++)
60 | {
61 | auto offset = kBytesPerImage * i;
62 | labels.narrow(0, i, 1).copy_(content.narrow(0, offset, 1));
63 | images.narrow(0, i, 1).copy_(content
64 | .narrow(0, 1 + offset, kBytesPerImage - 1)
65 | .view({ 1, kChannel, kHight, kWidth })
66 | .toType(torch::kFloat32));
67 | }
68 | return std::pair(images.div_(255.0), labels);
69 | }
70 |
71 | namespace torch {
72 | namespace data {
73 | namespace datasets {
74 | CIFAR10::CIFAR10(const std::string& root, Mode mode) {
75 | is_training = mode == Mode::kTrain;
76 | auto images_targets = read_dir(root, is_training);
77 | images_ = images_targets.first;
78 | targets_ = images_targets.second;
79 | }
80 |
81 | Example<> CIFAR10::get(size_t index) {
82 | return { images_[index], targets_[index] };
83 | }
84 |
85 | optional CIFAR10::size() const {
86 | return images_.size(0);
87 | }
88 |
89 | bool CIFAR10::is_train() const noexcept {
90 | return is_training;
91 | }
92 |
93 | const Tensor& CIFAR10::images() const {
94 | return images_;
95 | }
96 |
97 | const Tensor& CIFAR10::targets() const {
98 | return targets_;
99 | }
100 | } // namespace datasets
101 | } // namespace data
102 | } // namespace torch
103 |
--------------------------------------------------------------------------------
/src/Native/build.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | usage()
5 | {
6 | echo "Usage: $0 --arch "
7 | echo ""
8 | echo "Options:"
9 | echo " --arch Target Architecture (x64, x86)"
10 | echo " --configuration Build Configuration (Debug, Release)"
11 | echo " --stripSymbols Enable symbol stripping (to external file)"
12 | echo " --libtorchpath Path to libtorch TorchConfig.cmake"
13 | exit 1
14 | }
15 |
16 | SOURCE="${BASH_SOURCE[0]}"
17 | while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
18 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
19 | SOURCE="$(readlink "$SOURCE")"
20 | [[ "$SOURCE" != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
21 | done
22 | DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
23 | RootRepo="$DIR/../.."
24 |
25 | __build_arch=
26 | __strip_argument=
27 | __libtorchpath=
28 | __configuration=Debug
29 | __rootBinPath="$RootRepo/bin"
30 | __baseIntermediateOutputPath="$__rootBinPath/obj"
31 | __versionSourceFile="$__baseIntermediateOutputPath/version.c"
32 |
33 | while [ "$1" != "" ]; do
34 | lowerI="$(echo $1 | awk '{print tolower($0)}')"
35 | case $lowerI in
36 | -h|--help)
37 | usage
38 | exit 1
39 | ;;
40 | --arch)
41 | shift
42 | __build_arch=$1
43 | ;;
44 | --configuration)
45 | shift
46 | __configuration=$1
47 | ;;
48 | --stripsymbols)
49 | __strip_argument="-DSTRIP_SYMBOLS=true"
50 | ;;
51 | --libtorchpath)
52 | shift
53 | __libtorchpath=$1
54 | ;;
55 | *)
56 | echo "Unknown argument to build.sh $1"; usage; exit 1
57 | esac
58 | shift
59 | done
60 |
61 | # Force the build to be release since libtorch is in release.
62 | __cmake_defines="-DCMAKE_BUILD_TYPE=Release ${__strip_argument} -DLIBTORCH_PATH=${__libtorchpath}"
63 |
64 | __IntermediatesDir="$__baseIntermediateOutputPath/$__build_arch.$__configuration/Native"
65 | __BinDir="$__rootBinPath/$__build_arch.$__configuration/Native"
66 |
67 | mkdir -p "$__BinDir"
68 | mkdir -p "$__IntermediatesDir"
69 |
70 | # Set up the environment to be used for building with clang.
71 | if command -v "clang-3.5" > /dev/null 2>&1; then
72 | export CC="$(command -v clang-3.5)"
73 | export CXX="$(command -v clang++-3.5)"
74 | elif command -v "clang-3.6" > /dev/null 2>&1; then
75 | export CC="$(command -v clang-3.6)"
76 | export CXX="$(command -v clang++-3.6)"
77 | elif command -v "clang-3.9" > /dev/null 2>&1; then
78 | export CC="$(command -v clang-3.9)"
79 | export CXX="$(command -v clang++-3.9)"
80 | elif command -v clang > /dev/null 2>&1; then
81 | export CC="$(command -v clang)"
82 | export CXX="$(command -v clang++)"
83 | else
84 | echo "Unable to find Clang Compiler"
85 | echo "Install clang-3.5 or clang3.6 or clang3.9"
86 | exit 1
87 | fi
88 |
89 | # Specify path to be set for CMAKE_INSTALL_PREFIX.
90 | # This is where all built native libraries will copied to.
91 | export __CMakeBinDir="$__BinDir"
92 |
93 | if [ ! -f $__versionSourceFile ]; then
94 | __versionSourceLine="static char sccsid[] __attribute__((used)) = \"@(#)No version information produced\";"
95 | echo $__versionSourceLine > $__versionSourceFile
96 | fi
97 |
98 | __cmake_defines="${__cmake_defines} -DVERSION_FILE_PATH:STRING=${__versionSourceFile}"
99 |
100 | cd "$__IntermediatesDir"
101 |
102 | echo "Building Machine Learning native components from $DIR to $(pwd)"
103 | set -x # turn on trace
104 | cmake "$DIR" -G "Unix Makefiles" $__cmake_defines
105 | set +x # turn off trace
106 | make install
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.Data/DataIterator+HType.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Runtime.InteropServices.SafeHandle
10 |
11 |
12 |
13 |
14 | Class wrapping PyTorch's iterator object reference.
15 |
16 | To be added.
17 |
18 |
19 |
20 |
21 |
22 | Constructor
23 |
24 | 1.0.0.0
25 |
26 |
27 |
28 |
29 |
30 |
31 | To be added.
32 | To be added.
33 | To be added.
34 | To be added.
35 |
36 |
37 |
38 |
39 |
40 | Method
41 |
42 | 1.0.0.0
43 |
44 |
45 | System.Void
46 |
47 |
48 |
49 |
50 |
51 | To be added.
52 | To be added.
53 | To be added.
54 |
55 |
56 |
57 |
58 |
59 | Property
60 |
61 | 1.0.0.0
62 |
63 |
64 | System.Boolean
65 |
66 |
67 | To be added.
68 | To be added.
69 | To be added.
70 |
71 |
72 |
73 |
74 |
75 | Method
76 |
77 | 1.0.0.0
78 |
79 |
80 | System.Boolean
81 |
82 |
83 |
84 | To be added.
85 | To be added.
86 | To be added.
87 |
88 |
89 |
90 |
91 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.NN/Init.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Object
10 |
11 |
12 |
13 | To be added.
14 | To be added.
15 |
16 |
17 |
18 |
19 |
20 | Method
21 |
22 | 1.0.0.0
23 |
24 |
25 | System.ValueTuple<System.Int64,System.Int64>
26 |
27 |
28 | System.Runtime.CompilerServices.TupleElementNames(Mono.Cecil.CustomAttributeArgument[])
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 | To be added.
40 | To be added.
41 | To be added.
42 | To be added.
43 | To be added.
44 |
45 |
46 |
47 |
48 |
49 | Method
50 |
51 | 1.0.0.0
52 |
53 |
54 | System.Void
55 |
56 |
57 |
58 |
59 |
60 |
61 | To be added.
62 | To be added.
63 | To be added.
64 | To be added.
65 |
66 |
67 |
68 |
69 |
70 | Method
71 |
72 | 1.0.0.0
73 |
74 |
75 | System.Void
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 | To be added.
84 | To be added.
85 | To be added.
86 | To be added.
87 | To be added.
88 |
89 |
90 |
91 |
92 |
--------------------------------------------------------------------------------
/ecmadocs/en/TorchSharp.Tensor/ATenScalarMapping.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | TorchSharp
6 | 1.0.0.0
7 |
8 |
9 | System.Enum
10 |
11 |
12 | To be added.
13 | To be added.
14 |
15 |
16 |
17 |
18 |
19 | Field
20 |
21 | 1.0.0.0
22 |
23 |
24 | TorchSharp.Tensor.ATenScalarMapping
25 |
26 |
27 | To be added.
28 |
29 |
30 |
31 |
32 |
33 | Field
34 |
35 | 1.0.0.0
36 |
37 |
38 | TorchSharp.Tensor.ATenScalarMapping
39 |
40 |
41 | To be added.
42 |
43 |
44 |
45 |
46 |
47 | Field
48 |
49 | 1.0.0.0
50 |
51 |
52 | TorchSharp.Tensor.ATenScalarMapping
53 |
54 |
55 | To be added.
56 |
57 |
58 |
59 |
60 |
61 | Field
62 |
63 | 1.0.0.0
64 |
65 |
66 | TorchSharp.Tensor.ATenScalarMapping
67 |
68 |
69 | To be added.
70 |
71 |
72 |
73 |
74 |
75 | Field
76 |
77 | 1.0.0.0
78 |
79 |
80 | TorchSharp.Tensor.ATenScalarMapping
81 |
82 |
83 | To be added.
84 |
85 |
86 |
87 |
88 |
89 | Field
90 |
91 | 1.0.0.0
92 |
93 |
94 | TorchSharp.Tensor.ATenScalarMapping
95 |
96 |
97 | To be added.
98 |
99 |
100 |
101 |
102 |
--------------------------------------------------------------------------------
/src/Native/LibTorchSharp/THSData.cpp:
--------------------------------------------------------------------------------
1 | #include "THSData.h"
2 | #include "cifar10.h"
3 |
4 | #include
5 |
6 | // Typedefs for the iterators.
7 | typedef torch::data::StatelessDataLoader>, torch::data::transforms::Stack>>, torch::data::samplers::RandomSampler> MNISTTest_t;
8 |
9 | typedef torch::data::StatelessDataLoader>, torch::data::transforms::Stack>>, torch::data::samplers::SequentialSampler> MNISTTrain_t;
10 |
11 | typedef torch::data::StatelessDataLoader>>, torch::data::samplers::RandomSampler> CIFAR10Test_t;
12 |
13 | typedef torch::data::StatelessDataLoader>>, torch::data::samplers::SequentialSampler> CIFAR10Train_t;
14 |
15 | // Load an MNIST dataset from a file
16 | DatasetIteratorBase * THSData_loaderMNIST(
17 | const char* filename,
18 | int64_t batchSize,
19 | bool isTrain)
20 | {
21 | torch::data::datasets::MNIST::Mode mode = torch::data::datasets::MNIST::Mode::kTrain;
22 |
23 | if (!isTrain)
24 | {
25 | mode = torch::data::datasets::MNIST::Mode::kTest;
26 |
27 | }
28 |
29 | auto dataset = torch::data::datasets::MNIST(filename, mode)
30 | .map(torch::data::transforms::Normalize<>(0.1307, 0.3081))
31 | .map(torch::data::transforms::Stack<>());
32 |
33 | size_t size = dataset.size().value();
34 |
35 | if (isTrain)
36 | {
37 | auto loader = torch::data::make_data_loader(
38 | std::move(dataset), batchSize);
39 |
40 | std::shared_ptr shared = std::move(loader);
41 |
42 | return new DatasetIterator(shared->begin(), size, shared);
43 | }
44 | else
45 | {
46 | auto loader = torch::data::make_data_loader(
47 | std::move(dataset), batchSize);
48 |
49 | std::shared_ptr shared = std::move(loader);
50 |
51 | return new DatasetIterator(shared->begin(), size, shared);
52 | }
53 | }
54 |
55 | // Load an CIFAR10 dataset from a file
56 | DatasetIteratorBase * THSData_loaderCIFAR10(
57 | const char* filename,
58 | int64_t batchSize,
59 | bool isTrain)
60 | {
61 | torch::data::datasets::CIFAR10::Mode mode = torch::data::datasets::CIFAR10::Mode::kTrain;
62 |
63 | if (!isTrain)
64 | {
65 | mode = torch::data::datasets::CIFAR10::Mode::kTest;
66 |
67 | }
68 |
69 | auto dataset = torch::data::datasets::CIFAR10(filename, mode).map(torch::data::transforms::Stack<>());
70 | size_t size = dataset.size().value();
71 |
72 | if (isTrain)
73 | {
74 | auto loader = torch::data::make_data_loader(std::move(dataset), batchSize);
75 |
76 | std::shared_ptr shared = std::move(loader);
77 |
78 | return new DatasetIterator(shared->begin(), size, shared);
79 | }
80 | else
81 | {
82 | auto loader = torch::data::make_data_loader(
83 | std::move(dataset), batchSize);
84 |
85 | std::shared_ptr shared = std::move(loader);
86 |
87 | return new DatasetIterator(shared->begin(), size, shared);
88 | }
89 | }
90 |
91 | size_t THSData_size(DatasetIteratorBase * iterator)
92 | {
93 | return iterator->getSize();
94 | }
95 |
96 | bool THSData_moveNext(DatasetIteratorBase * iterator)
97 | {
98 | bool result = iterator->moveNext();
99 | return result;
100 | }
101 |
102 | void THSData_current(DatasetIteratorBase * iterator, Tensor* data, Tensor* target)
103 | {
104 | iterator->current(data, target);
105 | }
106 |
107 | void THSData_reset(DatasetIteratorBase * iterator)
108 | {
109 | iterator->reset();
110 | }
111 |
112 | void THSData_dispose(DatasetIteratorBase * iterator)
113 | {
114 | delete iterator;
115 | }
--------------------------------------------------------------------------------