├── bin └── .gitignore ├── wrap └── .gitignore ├── example ├── example │ ├── models │ │ └── graph.pb │ ├── graphgen.py │ ├── Properties │ │ └── AssemblyInfo.cs │ ├── example.csproj │ └── Program.cs └── example.sln ├── callback.h ├── callback.cc ├── BUILD ├── deallocator.h ├── README.md ├── tensor_c_api.i ├── deallocator.cc └── LICENSE /bin/.gitignore: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /wrap/.gitignore: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /example/example/models/graph.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/unrealwill/tensorflow-csharp-c-api/HEAD/example/example/models/graph.pb -------------------------------------------------------------------------------- /callback.h: -------------------------------------------------------------------------------- 1 | #ifndef _CALLBACK_H 2 | #define _CALLBACK_H 3 | #include 4 | #include 5 | 6 | class Callback { 7 | public: 8 | virtual ~Callback(); 9 | virtual void run(); 10 | }; 11 | 12 | 13 | #endif 14 | 15 | -------------------------------------------------------------------------------- /callback.cc: -------------------------------------------------------------------------------- 1 | 2 | /* File : callback.cc */ 3 | 4 | #include "callback.h" 5 | 6 | Callback::~Callback() 7 | { 8 | //std::cout << "Callback::~Callback()" << std:: endl; 9 | } 10 | 11 | void Callback::run() 12 | { 13 | // std::cout << "Callback::run()" << std::endl; 14 | } 15 | 16 | -------------------------------------------------------------------------------- /BUILD: -------------------------------------------------------------------------------- 1 | cc_binary( 2 | name = "libtensorflow_c_api.so", 3 | srcs = ["wrapcxx/tensor_c_api_wrap.cc", 4 | "wrapcxx/tensor_c_api_wrap.h", 5 | "deallocator.cc", 6 | "deallocator.h", 7 | "callback.cc", 8 | "callback.h"], 9 | linkshared = 1, 10 | deps = [ 11 | "//tensorflow/core:tensorflow", 12 | ], 13 | ) 14 | 15 | -------------------------------------------------------------------------------- /deallocator.h: -------------------------------------------------------------------------------- 1 | #ifndef DEALLOCATOR_H 2 | #define DEALLOCATOR_H 3 | 4 | #include "tensorflow/core/public/tensor_c_api.h" 5 | #include "callback.h" 6 | 7 | extern TF_Tensor* TF_NewTensorAllocated(TF_DataType dt, long long* dims, int num_dims); 8 | 9 | extern TF_Tensor* TF_NewTensorCB(TF_DataType dt, long long* dims, int num_dims, 10 | void* data, size_t len, 11 | Callback* cb); 12 | 13 | #endif 14 | -------------------------------------------------------------------------------- /example/example/graphgen.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | 4 | with tf.Session() as sess: 5 | a = tf.Variable(np.arange(10).astype(np.float32), name="a") 6 | b = tf.Variable(np.arange(10).astype(np.float32), name="b") 7 | #d = tf.Variable(np.arange(10000000), name='d') 8 | #a = tf.constant(5.0) 9 | #b = tf.constant(6.0) 10 | c = tf.mul(a, b, name="output") 11 | sess.run( tf.initialize_variables(tf.all_variables(),name = 'i')) 12 | 13 | print a.eval() # 5.0 14 | print b.eval() # 6.0 15 | print c.eval() # 30.0 16 | 17 | tf.train.write_graph(sess.graph_def, 'models', 'graph.pb', as_text=False) 18 | -------------------------------------------------------------------------------- /example/example.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 2012 4 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "example", "example\example.csproj", "{C0E9FEF0-4D18-4DE7-BDBD-F76DE367D7DB}" 5 | EndProject 6 | Global 7 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 8 | Debug|x86 = Debug|x86 9 | Release|x86 = Release|x86 10 | EndGlobalSection 11 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 12 | {C0E9FEF0-4D18-4DE7-BDBD-F76DE367D7DB}.Debug|x86.ActiveCfg = Debug|x86 13 | {C0E9FEF0-4D18-4DE7-BDBD-F76DE367D7DB}.Debug|x86.Build.0 = Debug|x86 14 | {C0E9FEF0-4D18-4DE7-BDBD-F76DE367D7DB}.Release|x86.ActiveCfg = Release|x86 15 | {C0E9FEF0-4D18-4DE7-BDBD-F76DE367D7DB}.Release|x86.Build.0 = Release|x86 16 | EndGlobalSection 17 | EndGlobal 18 | -------------------------------------------------------------------------------- /example/example/Properties/AssemblyInfo.cs: -------------------------------------------------------------------------------- 1 | using System.Reflection; 2 | using System.Runtime.CompilerServices; 3 | 4 | // Information about this assembly is defined by the following attributes. 5 | // Change them to the values specific to your project. 6 | 7 | [assembly: AssemblyTitle("example")] 8 | [assembly: AssemblyDescription("")] 9 | [assembly: AssemblyConfiguration("")] 10 | [assembly: AssemblyCompany("")] 11 | [assembly: AssemblyProduct("")] 12 | [assembly: AssemblyCopyright("darkblue")] 13 | [assembly: AssemblyTrademark("")] 14 | [assembly: AssemblyCulture("")] 15 | 16 | // The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}". 17 | // The form "{Major}.{Minor}.*" will automatically update the build and revision, 18 | // and "{Major}.{Minor}.{Build}.*" will update just the revision. 19 | 20 | [assembly: AssemblyVersion("1.0.*")] 21 | 22 | // The following attributes are used to specify the signing key for the assembly, 23 | // if desired. See the Mono documentation for more information about signing. 24 | 25 | //[assembly: AssemblyDelaySign(false)] 26 | //[assembly: AssemblyKeyFile("")] 27 | 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # tensorflow-csharp-c-api 2 | Port of the tensorflow c api to csharp 3 | 4 | Adaptation of this : 5 | https://medium.com/jim-fleming/loading-tensorflow-graphs-via-host-languages-be10fd81876f 6 | to CSharp using SWIG 7 | 8 | Status : 9 | Example Graph Successfully Runs 10 | Still a few TODOs before fully fonctional 11 | 12 | 13 | Prerequisites: 14 | tensorflow (so you should already have swig and g++) 15 | 16 | Install instructions (ubuntu 14.04) : 17 | Clone tensorflow-csharp-c-api repository 18 | cd to tensorflow-csharp-c-api repository 19 | look and adapt build.sh so that its path to tensorflow are correct 20 | run build.sh 21 | 22 | The generated libraries should be in the bin folder 23 | 24 | Add reference to the generated dll 25 | Build csharp project 26 | 27 | Create symlinks or copy the libtensorflow_c_api.so and libtensorflow.so alongside the binary exe so that it can load the dll 28 | 29 | Look at example project for usage 30 | 31 | 32 | Alternatively you could use Bazel by copying it into the tensorflow/tensorflow directory 33 | generate wrapper with swig 34 | compile all cs into a dll 35 | then bazel build :all to build the shared library 36 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /example/example/example.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Debug 5 | x86 6 | {C0E9FEF0-4D18-4DE7-BDBD-F76DE367D7DB} 7 | Exe 8 | example 9 | example 10 | v4.5 11 | 12 | 13 | true 14 | full 15 | false 16 | bin\Debug 17 | DEBUG; 18 | prompt 19 | 4 20 | true 21 | x86 22 | 23 | 24 | full 25 | true 26 | bin\Release 27 | prompt 28 | 4 29 | true 30 | x86 31 | 32 | 33 | 34 | 35 | ..\..\bin\tensor_c_api.dll 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /tensor_c_api.i: -------------------------------------------------------------------------------- 1 | 2 | %module(directors="1") tensorflow_c_api 3 | %{ 4 | #include "tensorflow/core/public/tensor_c_api.h" 5 | #include "deallocator.h" 6 | #include "callback.h" 7 | %} 8 | 9 | %include "carrays.i" 10 | 11 | %include 12 | 13 | CSHARP_ARRAYS(char *, string) 14 | %typemap(imtype, inattributes="[In, MarshalAs(UnmanagedType.LPArray, SizeParamIndex=0, ArraySubType=UnmanagedType.LPStr)]") char *INPUT[] "string[]" 15 | 16 | %apply char *INPUT[] { char ** } 17 | 18 | 19 | %apply long long INPUT[] { long long* } 20 | 21 | %array_functions(TF_Tensor*, tensorArray); 22 | 23 | //TODO:use a cleaner csharparray interface 24 | //this is the carrays interface from swig 25 | //It is not as clean as we would like 26 | //But I tried using a managed array approach using CSHARP_ARRAYS, it compiles but crash upon marshalling 27 | //Because SWIGTYPE_p_TF_Tensor doesn't have a Stuct Layout attribute because it is a generated wrapper by swig 28 | //We would like to marshall it as an array containing the pointers : SWIGTYPE_p_TF_Tensor.getCPtr() 29 | 30 | //CSHARP_ARRAYS(TF_Tensor *, SWIGTYPE_p_TF_Tensor) 31 | //%typemap(imtype, inattributes="[In, MarshalAs(UnmanagedType.LPArray, SizeParamIndex=0, ArraySubType=UnmanagedType.ByValArray)]") TF_Tensor *INPUT[] "SWIGTYPE_p_TF_Tensor[]" 32 | //%apply TF_Tensor *INPUT[] { TF_Tensor** } 33 | 34 | %feature("director") Callback; 35 | 36 | 37 | //Copy pasted from the web 38 | %define %cs_marshal_intptr(TYPE, ARGNAME...) 39 | %typemap(ctype) TYPE ARGNAME "void*" 40 | %typemap(imtype) TYPE ARGNAME "IntPtr" 41 | %typemap(cstype) TYPE ARGNAME "IntPtr" 42 | %typemap(in) TYPE ARGNAME %{ $1 = ($1_ltype)$input; /* IntPtr */ %} 43 | %typemap(csin) TYPE ARGNAME "$csinput" 44 | 45 | %typemap(out) TYPE ARGNAME %{ $result = $1; %} 46 | %typemap(csout, excode=SWIGEXCODE) TYPE ARGNAME { 47 | IntPtr cPtr = $imcall;$excode 48 | return cPtr; 49 | } 50 | %typemap(csvarout, excode=SWIGEXCODE2) TYPE ARGNAME %{ 51 | get { 52 | IntPtr cPtr = $imcall;$excode 53 | return cPtr; 54 | } 55 | %} 56 | 57 | %typemap(ctype) TYPE& ARGNAME "void**" 58 | %typemap(imtype) TYPE& ARGNAME "ref IntPtr" 59 | %typemap(cstype) TYPE& ARGNAME "ref IntPtr" 60 | %typemap(in) TYPE& ARGNAME %{ $1 = ($1_ltype)$input; %} 61 | %typemap(csin) TYPE& ARGNAME "ref $csinput" 62 | %enddef 63 | 64 | %cs_marshal_intptr(void*); 65 | 66 | %include "tensorflow/core/public/tensor_c_api.h" 67 | %include "deallocator.h" 68 | %include "callback.h" 69 | 70 | //tensorflow/tensor_c_api/ 71 | 72 | 73 | -------------------------------------------------------------------------------- /deallocator.cc: -------------------------------------------------------------------------------- 1 | #include "tensorflow/core/public/tensor_c_api.h" 2 | #include 3 | //#include "tensorflow/core/framework/tensor.h" 4 | #include "tensorflow/tensor_c_api/deallocator.h" 5 | #include 6 | #include 7 | 8 | static void Deallocator(void* data, size_t, void* arg) { 9 | //std::cout<< "Deallocator free" << std::endl; 10 | std::free(data); 11 | //tensorflow::cpu_allocator()->DeallocateRaw(data); 12 | *reinterpret_cast(arg) = true; 13 | } 14 | 15 | TF_Tensor* TF_NewTensorAllocated(TF_DataType dt, long long* dims, int num_dims) 16 | { 17 | int nbElem = 1; 18 | for( int i = 0 ; i < num_dims ; i++) 19 | nbElem *= dims[i]; 20 | unsigned int len = 0; 21 | void* data = NULL; 22 | switch( dt ) 23 | { 24 | case TF_FLOAT : 25 | len = nbElem * sizeof(float); 26 | data = std::malloc( len ); 27 | break; 28 | case TF_DOUBLE : 29 | len = nbElem * sizeof(double); 30 | data = std::malloc( len ); 31 | break; 32 | case TF_INT32 : 33 | len = nbElem * sizeof(double); 34 | data = std::malloc( len ); 35 | break; 36 | case TF_UINT8 : 37 | len = nbElem * sizeof(uint8_t); 38 | data = std::malloc( len ); 39 | break; 40 | case TF_INT16 : 41 | len = nbElem * sizeof(int16_t); 42 | data = std::malloc( len ); 43 | break; 44 | case TF_INT8 : 45 | len = nbElem * sizeof(int8_t); 46 | data = std::malloc( len ); 47 | break; 48 | case TF_STRING : 49 | //TODO 50 | break; 51 | case TF_COMPLEX : 52 | //TODO 53 | break; 54 | case TF_INT64 : 55 | len = nbElem * sizeof(int64_t); 56 | data = std::malloc( len ); 57 | break; 58 | case TF_BOOL : 59 | len = nbElem * sizeof(bool); 60 | data = std::malloc( len ); 61 | break; 62 | case TF_QINT8 : 63 | 64 | break; 65 | case TF_QUINT8 : 66 | 67 | break; 68 | case TF_QINT32 : 69 | 70 | break; 71 | case TF_BFLOAT16 : 72 | 73 | break; 74 | case TF_QINT16 : 75 | 76 | break; 77 | case TF_QUINT16 : 78 | 79 | break; 80 | case TF_UINT16 : 81 | len = nbElem * sizeof(uint16_t); 82 | data = std::malloc( len ); 83 | break; 84 | } 85 | 86 | /* 87 | float* values = 88 | reinterpret_cast(tensorflow::cpu_allocator()->AllocateRaw( 89 | EIGEN_MAX_ALIGN_BYTES, 10 * sizeof(float))); 90 | tensorflow::int64 dims2[] = {10}; 91 | */ 92 | 93 | bool deallocator_called = false; 94 | TF_Tensor* t = TF_NewTensor(dt, dims,num_dims, data, len, 95 | &Deallocator, &deallocator_called); 96 | 97 | return t; 98 | } 99 | 100 | void callbackDeallocator(void* data, size_t len, 101 | void* arg) 102 | { 103 | //std::cout << "callbackDeallocator " << arg << std::endl; 104 | ((Callback*)arg)->run(); 105 | } 106 | 107 | // memory managed by something like numpy. 108 | TF_Tensor* TF_NewTensorCB(TF_DataType dt, long long* dims, int num_dims, 109 | void* data, size_t len, 110 | Callback* cb) 111 | { 112 | return TF_NewTensor(dt,dims,num_dims,data,len, callbackDeallocator,cb); 113 | } 114 | -------------------------------------------------------------------------------- /example/example/Program.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Runtime.InteropServices; 3 | using System.IO; 4 | using System.Text; 5 | 6 | namespace example 7 | { 8 | //To create a callback which will be called in csharp upon deallocation 9 | //Just inherit from Callback 10 | public class CSharpCallback : Callback 11 | { 12 | public CSharpCallback() 13 | : base() 14 | { 15 | } 16 | 17 | public override void run() 18 | { 19 | Console.WriteLine("CSharpCallback.run()"); 20 | //In this simple callback we don't release memory 21 | } 22 | } 23 | 24 | class MainClass 25 | { 26 | public static void Main(string[] args) 27 | { 28 | var options = tensorflow_c_api.TF_NewSessionOptions(); 29 | 30 | var status = tensorflow_c_api.TF_NewStatus(); 31 | var session = tensorflow_c_api.TF_NewSession(options, status); 32 | if (tensorflow_c_api.TF_GetCode(status) == TF_Code.TF_OK) 33 | Console.WriteLine("OK"); 34 | else 35 | Console.WriteLine("Not OK"); 36 | 37 | byte[] data = File.ReadAllBytes("../../models/graph.pb"); 38 | 39 | IntPtr ptr = Marshal.AllocHGlobal(data.Length); 40 | Marshal.Copy(data,0,ptr,data.Length); 41 | 42 | 43 | 44 | var tens = new float[10]; 45 | for (int i = 0; i < tens.Length; i++) 46 | tens[i] = (float)(5+i); 47 | 48 | IntPtr ptrTensor = Marshal.AllocHGlobal(tens.Length*sizeof(float) ); 49 | Marshal.Copy(tens,0,ptrTensor,tens.Length); 50 | 51 | try 52 | { 53 | 54 | var dims = new long[]{tens.Length}; 55 | //This callbacks are called back upon destruction of the tensor so that we can handle release the memory in csharp 56 | //You should make sure this callback object doesn't get garbage collected before it is called 57 | var cb = new CSharpCallback(); 58 | 59 | //We can use two tensors pointing to the same array 60 | var TFtens = tensorflow_c_api.TF_NewTensorCB(TF_DataType.TF_FLOAT, dims, 1,ptrTensor,(uint)(tens.Length*sizeof(float)), cb); 61 | 62 | //Alternatively we can use the library to allocate and release memory directly 63 | //But this mean additional copies 64 | var TFtens2 = tensorflow_c_api.TF_NewTensorAllocated(TF_DataType.TF_FLOAT,dims,1); 65 | //The tensor data is allocated but not yet initialized 66 | var tens2IntPtr = tensorflow_c_api.TF_TensorData( TFtens2 ); 67 | Marshal.Copy(tens,0,tens2IntPtr,tens.Length); 68 | 69 | //We construct the graph from the binary proto file 70 | tensorflow_c_api.TF_ExtendGraph( session,ptr,(uint)(data.Length),status ); 71 | 72 | if (tensorflow_c_api.TF_GetCode(status) == TF_Code.TF_OK) 73 | { 74 | Console.WriteLine("Graph Creation OK"); 75 | } 76 | else 77 | { 78 | Console.WriteLine("Graph Creation Not OK"); 79 | return; 80 | } 81 | 82 | //We create an array of tensors 83 | //This array will have to be deleted with 84 | //tensorflow_c_api.delete_tensorArray 85 | var inputs = tensorflow_c_api.new_tensorArray(2); 86 | tensorflow_c_api.tensorArray_setitem(inputs, 0, TFtens); 87 | //Note that we have two different tensors as input 88 | //using tensorflow_c_api.tensorArray_setitem(inputs, 1, TFtens); 89 | //would result in an error upon TF_RUN which delete its inputs 90 | tensorflow_c_api.tensorArray_setitem(inputs, 1, TFtens2); 91 | var outputs = tensorflow_c_api.new_tensorArray(1); 92 | 93 | //We run the "i" OpDef to initialize the load the shared variables into the variables nodes 94 | //This OpDef was created in python with tf.initialize_variables(tf.all_variables(),name = 'i') 95 | 96 | tensorflow_c_api.TF_Run( session, new string[0],inputs,0,new string[0],outputs,0,new string[]{"i"},1,status); 97 | if (tensorflow_c_api.TF_GetCode(status) == TF_Code.TF_OK) 98 | Console.WriteLine("Init Run OK"); 99 | else 100 | { 101 | Console.WriteLine("Init Run Not OK"); 102 | Console.WriteLine( tensorflow_c_api.TF_Message( status) ); 103 | return; 104 | } 105 | 106 | //We run the graph which uses the previously initialized variables 107 | tensorflow_c_api.TF_Run( session, new string[0]{} ,inputs,0,new string[]{"output"},outputs,1,new string[0],0,status); 108 | if (tensorflow_c_api.TF_GetCode(status) == TF_Code.TF_OK) 109 | Console.WriteLine("Run using initialized variable OK"); 110 | else 111 | { 112 | Console.WriteLine("Run using initialized variable Not OK"); 113 | Console.WriteLine( tensorflow_c_api.TF_Message( status) ); 114 | return; 115 | } 116 | 117 | { 118 | var outtensor = tensorflow_c_api.tensorArray_getitem(outputs,0); 119 | var resu = tensorflow_c_api.TF_TensorData( outtensor ); 120 | var resuf = new float[10]; 121 | Marshal.Copy(resu,resuf,0,10); 122 | Console.WriteLine("The result from the execution of the graph is : resu[3] : {0}",resuf[3]); 123 | //We release the tensor which now is under our responsibility 124 | tensorflow_c_api.TF_DeleteTensor( outtensor ); 125 | } 126 | 127 | //We run the graph feeding it some inputs 128 | //It will call TF_DeleteTensor on the inputs 129 | tensorflow_c_api.TF_Run( session, new string[2]{ "a","b" } ,inputs,2,new string[]{"output"},outputs,1,new string[0],0,status); 130 | if (tensorflow_c_api.TF_GetCode(status) == TF_Code.TF_OK) 131 | Console.WriteLine("Run feeding input variables OK"); 132 | else 133 | { 134 | Console.WriteLine("Run feeding input variables Not OK"); 135 | Console.WriteLine( tensorflow_c_api.TF_Message( status) ); 136 | return; 137 | } 138 | 139 | { 140 | var outtensor = tensorflow_c_api.tensorArray_getitem(outputs,0); 141 | var resu = tensorflow_c_api.TF_TensorData( outtensor ); 142 | var resuf = new float[10]; 143 | Marshal.Copy(resu,resuf,0,10); 144 | Console.WriteLine("The result from the execution of the graph is : resu[3] : {0}",resuf[3]); 145 | tensorflow_c_api.TF_DeleteTensor( outtensor ); 146 | } 147 | 148 | //We delete the tensors 149 | tensorflow_c_api.delete_tensorArray(inputs); 150 | tensorflow_c_api.delete_tensorArray(outputs); 151 | 152 | } 153 | finally 154 | { 155 | //We release The binary protobuf of the graph 156 | Marshal.FreeHGlobal(ptr); 157 | //We release Our tensor data 158 | Marshal.FreeHGlobal(ptrTensor); 159 | } 160 | 161 | 162 | 163 | 164 | 165 | } 166 | } 167 | } 168 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------