diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.cs b/csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.cs
index 03bab8e920..3b8cb75787 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/DisposableNamedOnnxValue.cs
@@ -51,7 +51,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// This class serves as a container for model run output values including
- /// tensors, sequences of tensors, sequences and maps
+ /// tensors, sequences of tensors, sequences and maps.
/// It extends NamedOnnxValue, exposes the OnnxValueType and Tensor type
/// The class must be disposed of.
/// It disposes of _ortValueHolder that owns the underlying Ort output value and
@@ -419,6 +419,10 @@ namespace Microsoft.ML.OnnxRuntime
#region IDisposable Support
+ ///
+ /// IDisposable implementation
+ ///
+ /// true if invoked by Dispose()
protected virtual void Dispose(bool disposing)
{
if(_disposed)
@@ -439,6 +443,9 @@ namespace Microsoft.ML.OnnxRuntime
_disposed = true;
}
+ ///
+ /// IDisposable implementation
+ ///
public void Dispose()
{
// Do not change this code. Put cleanup code in Dispose(bool disposing) above.
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/FixedBufferOnnxValue.cs b/csharp/src/Microsoft.ML.OnnxRuntime/FixedBufferOnnxValue.cs
index 4444ed85e6..77994186d8 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/FixedBufferOnnxValue.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/FixedBufferOnnxValue.cs
@@ -45,6 +45,10 @@ namespace Microsoft.ML.OnnxRuntime
#region IDisposable Support
+ ///
+ /// IDisposable implementation
+ ///
+ /// true if invoked from Dispose()
protected virtual void Dispose(bool disposing)
{
if(_disposed)
@@ -60,6 +64,9 @@ namespace Microsoft.ML.OnnxRuntime
_disposed = true;
}
+ ///
+ /// IDisposable implementation
+ ///
public void Dispose()
{
Dispose(true);
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.cs b/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.cs
index 9e6184eac9..e63efe5ff5 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.cs
@@ -12,11 +12,22 @@ using System.Buffers;
namespace Microsoft.ML.OnnxRuntime
{
///
- /// Represents an Inference Session on an ONNX Model
+ /// Represents an Inference Session on an ONNX Model.
+ /// This is a IDisposable class and it must be disposed of
+ /// using either a explicit call to Dispose() method or
+ /// a pattern of using() block. If this is a member of another
+ /// class that class must also become IDisposable and it must
+ /// dispose of InferfenceSession in its Dispose() method.
///
public class InferenceSession : IDisposable
{
+ ///
+ /// A pointer to a underlying native instance of OrtSession
+ ///
protected IntPtr _nativeHandle;
+ ///
+ /// Dictionaries that represent input/output/overridableInitializers metadata
+ ///
protected Dictionary _inputMetadata, _outputMetadata, _overridableInitializerMetadata;
private SessionOptions _builtInSessionOptions = null;
private RunOptions _builtInRunOptions = null;
@@ -272,13 +283,12 @@ namespace Microsoft.ML.OnnxRuntime
}
///
- ///
/// Runs the loaded model for the given inputs and outputs.
///
/// Outputs need to be created with correct type and dimension to receive the fetched data.
///
/// Specify a collection of that indicates the input values.
- /// Specify a collection of that indicates the output values.
+ /// Specify a collection of that indicates the output values.
public void Run(
IReadOnlyCollection inputs,
IReadOnlyCollection outputs)
@@ -293,7 +303,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Outputs need to be created with correct type and dimension to receive the fetched data.
///
/// Specify a collection of that indicates the input values.
- /// Specify a collection of that indicates the output values.
+ /// Specify a collection of that indicates the output values.
///
public void Run(
IReadOnlyCollection inputs,
@@ -388,7 +398,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Specify a collection of string that indicates the input names. Should match .
/// Specify a collection of that indicates the input values.
- /// Specify a collection of that indicates the output values.
+ /// Specify a collection of that indicates the output values.
public void Run(
IReadOnlyCollection inputNames,
IReadOnlyCollection inputValues,
@@ -405,7 +415,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Specify a collection of string that indicates the input names. Should match .
/// Specify a collection of that indicates the input values.
- /// Specify a collection of that indicates the output values.
+ /// Specify a collection of that indicates the output values.
///
public void Run(
IReadOnlyCollection inputNames,
@@ -478,6 +488,7 @@ namespace Microsoft.ML.OnnxRuntime
/// call to retrieve output names. They will be paired with the returned OrtValues and combined into DisposbleNamedOnnxValues.
/// Otherwise, the method will retrieve output names from the OrtIoBinding instance.
/// It is an error if you supply a different number of names than the returned outputs
+ /// A disposable collection of DisposableNamedOnnxValue that encapsulate output OrtValues
public IDisposableReadOnlyCollection RunWithBindingAndNames(RunOptions runOptions, OrtIoBinding ioBinding, string[] names = null)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtRunWithBinding(Handle, runOptions.Handle, ioBinding.Handle));
@@ -514,8 +525,9 @@ namespace Microsoft.ML.OnnxRuntime
}
///
- /// Ends profiling for the session. Returns the profile file name.
- ///
+ /// Ends profiling for the session.
+ ///
+ /// Returns the profile file name.
public string EndProfiling()
{
IntPtr nameHandle = IntPtr.Zero;
@@ -642,6 +654,11 @@ namespace Microsoft.ML.OnnxRuntime
return result;
}
+ ///
+ /// This property queries model metadata, constructs
+ /// an instance of ModelMetadata and caches it
+ ///
+ /// Instance of ModelMetdata
public ModelMetadata ModelMetadata
{
get
@@ -931,12 +948,19 @@ namespace Microsoft.ML.OnnxRuntime
Dispose(false);
}
+ ///
+ /// IDisposable implementation
+ ///
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
+ ///
+ /// IDisposable implementation
+ ///
+ /// true if invoked from Dispose() method
protected virtual void Dispose(bool disposing)
{
if(_disposed)
@@ -978,51 +1002,42 @@ namespace Microsoft.ML.OnnxRuntime
///
public class NodeMetadata
{
- private OnnxValueType _onnxValueType;
- private int[] _dimensions;
- private string[] _symbolicDimensions;
- private Type _type;
-
internal NodeMetadata(OnnxValueType onnxValueType, int[] dimensions, string[] symbolicDimensions, Type type)
{
- _onnxValueType = onnxValueType;
- _dimensions = dimensions;
- _symbolicDimensions = symbolicDimensions;
- _type = type;
+ OnnxValueType = onnxValueType;
+ Dimensions = dimensions;
+ SymbolicDimensions = symbolicDimensions;
+ ElementType = type;
}
- public OnnxValueType OnnxValueType
- {
- get
- {
- return _onnxValueType;
- }
- }
+ ///
+ /// Type value of the node
+ ///
+ /// A value of OnnxValueType enum
+ public OnnxValueType OnnxValueType { get; }
- public int[] Dimensions
- {
- get
- {
- return _dimensions;
- }
- }
+ ///
+ /// Shape
+ ///
+ /// Array of dimensions
+ public int[] Dimensions { get; }
- public string[] SymbolicDimensions
- {
- get
- {
- return _symbolicDimensions;
- }
- }
+ ///
+ /// Symbolic dimensions
+ ///
+ /// Array of symbolic dimensions if present.
+ public string[] SymbolicDimensions { get; }
- public System.Type ElementType
- {
- get
- {
- return _type;
- }
- }
+ ///
+ /// .NET type that corresponds to this Node.
+ ///
+ /// System.Type
+ public System.Type ElementType { get; }
+ ///
+ /// Whether it is a Tensor
+ ///
+ /// currently always returns true
public bool IsTensor
{
get
@@ -1033,6 +1048,10 @@ namespace Microsoft.ML.OnnxRuntime
}
+ ///
+ /// A class that queries and caches model metadata and exposes
+ /// it as properties
+ ///
public class ModelMetadata
{
private string _producerName;
@@ -1138,6 +1157,10 @@ namespace Microsoft.ML.OnnxRuntime
}
+ ///
+ /// Producer name string
+ ///
+ /// producer name string
public string ProducerName
{
get
@@ -1146,6 +1169,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Graph name for this model
+ ///
+ /// graph name string
public string GraphName
{
get
@@ -1154,6 +1181,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Domain for this model
+ ///
+ /// domain name string
public string Domain
{
get
@@ -1162,6 +1193,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Unstructured model description
+ ///
+ /// description string
public string Description
{
get
@@ -1170,6 +1205,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Version number
+ ///
+ /// long version integer
public long Version
{
get
@@ -1178,6 +1217,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Custom metadata key/value pairs
+ ///
+ /// An instance of a Dictionary
public Dictionary CustomMetadataMap
{
get
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.cs b/csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.cs
index d87de4db93..233e5fa9af 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.cs
@@ -15,9 +15,22 @@ namespace Microsoft.ML.OnnxRuntime
///
public class NamedOnnxValue
{
+ ///
+ /// Managed Tensor, Dictionary or IList
+ ///
protected Object _value;
+ ///
+ /// Name of the instance, model input/output
+ ///
protected string _name;
+ ///
+ /// Constructs an instance of NamedOnnxValue and represents
+ /// a model input to an inference session. It also represents a modle output
+ /// when serves as a base for DisposablenamedOnnxvalue
+ ///
+ /// input/output name
+ /// Object that may be a tensor, Dictionary, IList
protected NamedOnnxValue(string name, Object value)
{
_name = name;
@@ -37,7 +50,15 @@ namespace Microsoft.ML.OnnxRuntime
return new NamedOnnxValue(name, value);
}
+ ///
+ /// Exposes the name of the of the model input/output
+ ///
+ /// name string
public string Name { get { return _name; } set { _name = value; } }
+ ///
+ /// Exposes the underlying managed object
+ ///
+ /// object
public Object Value { get { return _value; } set { _value = value; } }
///
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OnnxRuntime.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OnnxRuntime.cs
index f2c58a7776..7f0208e007 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/OnnxRuntime.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/OnnxRuntime.cs
@@ -15,13 +15,19 @@ namespace Microsoft.ML.OnnxRuntime
public LogLevel LogLevel { get; set; }
}
+ ///
+ /// Logging level used to specify amount of logging when
+ /// creating environment. The lower the value is the more logging
+ /// will be output. A specific value output includes everything
+ /// that higher values output.
+ ///
public enum LogLevel
{
- Verbose = 0,
- Info = 1,
- Warning = 2,
- Error = 3,
- Fatal = 4
+ Verbose = 0, // Everything
+ Info = 1, // Informational
+ Warning = 2, // Warnings
+ Error = 3, // Errors
+ Fatal = 4 // Results in the termination of the application.
}
///
@@ -82,6 +88,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Returns an instance of OrtEnv
/// It returns the same instance on every call - `OrtEnv` is singleton
///
+ /// Returns a singleton instance of OrtEnv that represents native OrtEnv object
public static OrtEnv Instance() { return _instance.Value; }
///
@@ -104,6 +111,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Queries all the execution providers supported in the native onnxruntime shared library
///
+ /// an array of strings that represent execution provider names
public string[] GetAvailableProviders()
{
IntPtr availableProvidersHandle = IntPtr.Zero;
@@ -134,6 +142,10 @@ namespace Microsoft.ML.OnnxRuntime
#endregion
#region SafeHandle
+ ///
+ /// Overrides SafeHandle.IsInvalid
+ ///
+ /// returns true if handle is equal to Zero
public override bool IsInvalid
{
get
@@ -142,6 +154,11 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Overrides SafeHandle.ReleaseHandle() to properly dispose of
+ /// the native instance of OrtEnv
+ ///
+ /// always returns true
protected override bool ReleaseHandle()
{
NativeMethods.OrtReleaseEnv(handle);
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OrtAllocator.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OrtAllocator.cs
index 0f610d2a23..e9419da5c2 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/OrtAllocator.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/OrtAllocator.cs
@@ -12,8 +12,8 @@ namespace Microsoft.ML.OnnxRuntime
///
public enum OrtAllocatorType
{
- DeviceAllocator = 0,
- ArenaAllocator = 1
+ DeviceAllocator = 0, // Device specific allocator
+ ArenaAllocator = 1 // Memory arena
}
///
@@ -53,6 +53,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Default CPU based instance
///
+ /// Singleton instance of a CpuMemoryInfo
public static OrtMemoryInfo DefaultInstance
{
get
@@ -69,6 +70,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Overrides SafeHandle.IsInvalid
+ ///
+ /// returns true if handle is equal to Zero
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
///
@@ -84,10 +89,20 @@ namespace Microsoft.ML.OnnxRuntime
_owned = owned;
}
- // Predefined utf8 encoded allocator names. Use them to construct an instance of
- // OrtMemoryInfo to avoid UTF-16 to UTF-8 conversion
+ ///
+ /// Predefined utf8 encoded allocator names. Use them to construct an instance of
+ /// OrtMemoryInfo to avoid UTF-16 to UTF-8 conversion costs.
+ ///
public static readonly byte[] allocatorCPU = Encoding.UTF8.GetBytes("Cpu" + Char.MinValue);
+ ///
+ /// Predefined utf8 encoded allocator names. Use them to construct an instance of
+ /// OrtMemoryInfo to avoid UTF-16 to UTF-8 conversion costs.
+ ///
public static readonly byte[] allocatorCUDA = Encoding.UTF8.GetBytes("Cuda" + Char.MinValue);
+ ///
+ /// Predefined utf8 encoded allocator names. Use them to construct an instance of
+ /// OrtMemoryInfo to avoid UTF-16 to UTF-8 conversion costs.
+ ///
public static readonly byte[] allocatorCUDA_PINNED = Encoding.UTF8.GetBytes("CudaPinned" + Char.MinValue);
///
/// Create an instance of OrtMemoryInfo according to the specification
@@ -142,6 +157,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Returns device ID
///
+ /// returns integer Id value
public int Id
{
get
@@ -157,7 +173,7 @@ namespace Microsoft.ML.OnnxRuntime
/// as names would conflict with the returned type. Also, there are native
/// calls behind them so exposing them as Get() would be appropriate.
///
- ///
+ /// OrtMemoryType for the instance
public OrtMemType GetMemoryType()
{
OrtMemType memoryType = OrtMemType.Default;
@@ -166,9 +182,9 @@ namespace Microsoft.ML.OnnxRuntime
}
///
- /// Returns alloctor type
+ /// Fetches allocator type from the underlying OrtAllocator
///
- ///
+ /// Returns allocator type
public OrtAllocatorType GetAllocatorType()
{
OrtAllocatorType allocatorType = OrtAllocatorType.ArenaAllocator;
@@ -176,6 +192,11 @@ namespace Microsoft.ML.OnnxRuntime
return allocatorType;
}
+ ///
+ /// Overrides System.Object.Equals(object)
+ ///
+ /// object to compare to
+ /// true if obj is an instance of OrtMemoryInfo and is equal to this
public override bool Equals(object obj)
{
var other = obj as OrtMemoryInfo;
@@ -186,6 +207,11 @@ namespace Microsoft.ML.OnnxRuntime
return Equals(other);
}
+ ///
+ /// Compares this instance with another
+ ///
+ /// OrtMemoryInfo to compare to
+ /// true if instances are equal according to OrtCompareMemoryInfo.
public bool Equals(OrtMemoryInfo other)
{
if(this == other)
@@ -197,12 +223,21 @@ namespace Microsoft.ML.OnnxRuntime
return (result == 0);
}
+ ///
+ /// Overrides System.Object.GetHashCode()
+ ///
+ /// integer hash value
public override int GetHashCode()
{
return Pointer.ToInt32();
}
#region SafeHandle
+ ///
+ /// Overrides SafeHandle.ReleaseHandle() to properly dispose of
+ /// the native instance of OrtMmeoryInfo
+ ///
+ /// always returns true
protected override bool ReleaseHandle()
{
// If this instance exposes OrtMemoryInfo that belongs
@@ -255,13 +290,22 @@ namespace Microsoft.ML.OnnxRuntime
///
internal IntPtr Pointer { get { return handle; } }
+ ///
+ /// Overrides SafeHandle.IsInvalid
+ ///
+ /// returns true if handle is equal to Zero
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
///
- /// Returns the size of the allocation
+ /// Size of the allocation
///
+ /// uint size of the allocation in bytes
public uint Size { get; private set; }
+ ///
+ /// Memory Information about this allocation
+ ///
+ /// Returns OrtMemoryInfo from the allocator
public OrtMemoryInfo Info
{
get
@@ -270,6 +314,11 @@ namespace Microsoft.ML.OnnxRuntime
}
}
#region SafeHandle
+ ///
+ /// Overrides SafeHandle.ReleaseHandle() to deallocate
+ /// a chunk of memory using the specified allocator.
+ ///
+ /// always returns true
protected override bool ReleaseHandle()
{
_allocator.FreeMemory(handle);
@@ -318,6 +367,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Overrides SafeHandle.IsInvalid
+ ///
+ /// returns true if handle is equal to Zero
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
///
@@ -349,6 +402,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// OrtMemoryInfo instance owned by the allocator
///
+ /// Instance of OrtMemoryInfo describing this allocator
public OrtMemoryInfo Info
{
get
@@ -363,8 +417,8 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Allocate native memory. Returns a disposable instance of OrtMemoryAllocation
///
- ///
- ///
+ /// number of bytes to allocate
+ /// Instance of OrtMemoryAllocation
public OrtMemoryAllocation Allocate(uint size)
{
IntPtr allocation = IntPtr.Zero;
@@ -373,15 +427,20 @@ namespace Microsoft.ML.OnnxRuntime
}
///
- /// This internal interface is used for freeing memory
+ /// This internal interface is used for freeing memory.
///
- ///
+ /// pointer to a native memory chunk allocated by this allocator instance
internal void FreeMemory(IntPtr allocation)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtAllocatorFree(handle, allocation));
}
#region SafeHandle
+ ///
+ /// Overrides SafeHandle.ReleaseHandle() to properly dispose of
+ /// the native instance of OrtAllocator
+ ///
+ /// always returns true
protected override bool ReleaseHandle()
{
// Singleton default allocator is not owned
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OrtIoBinding.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OrtIoBinding.cs
index 92ce948251..09c86ad330 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/OrtIoBinding.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/OrtIoBinding.cs
@@ -34,6 +34,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
+ ///
+ /// Overrides SafeHandle.IsInvalid
+ ///
+ /// returns true if handle is equal to Zero
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
///
@@ -238,6 +242,11 @@ namespace Microsoft.ML.OnnxRuntime
}
#region SafeHandle
+ ///
+ /// Overrides SafeHandle.ReleaseHandle() to properly dispose of
+ /// the native instance of OrtIoBidning
+ ///
+ /// always returns true
protected override bool ReleaseHandle()
{
NativeMethods.OrtReleaseIoBinding(handle);
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.cs
index c7ebcf2820..b75d6df1a8 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.cs
@@ -9,26 +9,32 @@ using System.Runtime.InteropServices;
namespace Microsoft.ML.OnnxRuntime
{
+ ///
+ /// A type of data that OrtValue encapsulates.
+ ///
public enum OnnxValueType
{
- ONNX_TYPE_UNKNOWN = 0,
- ONNX_TYPE_TENSOR = 1,
- ONNX_TYPE_SEQUENCE = 2,
- ONNX_TYPE_MAP = 3,
- ONNX_TYPE_OPAQUE = 4,
- ONNX_TYPE_SPARSETENSOR = 5,
+ ONNX_TYPE_UNKNOWN = 0, // Not set
+ ONNX_TYPE_TENSOR = 1, // It's a Tensor
+ ONNX_TYPE_SEQUENCE = 2, // It's an Onnx sequence which may be a sequence of Tensors/Maps/Sequences
+ ONNX_TYPE_MAP = 3, // It's a map
+ ONNX_TYPE_OPAQUE = 4, // It's an experimental Opaque object
+ ONNX_TYPE_SPARSETENSOR = 5, // It's a Sparse Tensor
}
///
- /// Represents a disposable OrtValue
+ /// Represents a disposable OrtValue.
+ /// This class exposes a native instance of OrtValue.
+ /// The class implements IDisposable via SafeHandle and must
+ /// be disposed.
///
public class OrtValue : SafeHandle
{
///
- /// Use factory methods to instantiate
+ /// Use factory methods to instantiate this class
///
- ///
- /// Default true, own the raw handle
+ /// Pointer to a native instance of OrtValue
+ /// Default true, own the raw handle. Otherwise, the handle is owned by another instance
/// However, we use this class to expose OrtValue that is owned by DisposableNamedOnnxValue
///
internal OrtValue(IntPtr handle, bool owned = true)
@@ -39,21 +45,19 @@ namespace Microsoft.ML.OnnxRuntime
internal IntPtr Handle { get { return handle; } }
+ ///
+ /// Overrides SafeHandle.IsInvalid
+ ///
+ /// returns true if handle is equal to Zero
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
#region NamedOnnxValue/DisposableOnnxValue accommodations
- // DisposableOnnxValue class owns Native handle to OrtValue
- // NamedOnnxValue does not own anything but creates a new one
- // which presents a fundamental semantic difference to ToOrtValue interface.
- //
- // We provide a way to relinquish ownership as well as return an instance of
- // OrtValue that is still disposable but does not have ownership
- //
///
/// This internal interface is used to transfer ownership elsewhere.
/// This instance must still be disposed in case there are other native
- /// objects still owned.
+ /// objects still owned. This is a convenience method to ensure that an underlying
+ /// OrtValue is disposed exactly once when exception is thrown.
///
///
internal IntPtr Disown()
@@ -74,7 +78,7 @@ namespace Microsoft.ML.OnnxRuntime
/// or a piece of pinned managed memory.
///
/// The resulting OrtValue does not own the underlying memory buffer and will not attempt to
- /// deallocated it.
+ /// deallocate it.
///
/// Memory Info. For managed memory it is a default cpu.
/// For Native memory must be obtained from the allocator or OrtMemoryAllocation instance
@@ -117,14 +121,17 @@ namespace Microsoft.ML.OnnxRuntime
///
/// This is a factory method creates a native Onnxruntime OrtValue containing a tensor.
- /// However, it re-uses managed memory if possible.
+ /// The method will attempt to pin managed memory so no copying occurs when data is passed down
+ /// to native code.
///
/// Tensor object
/// For all tensor types but string tensors we endeavor to use managed memory
- /// to avoid additional allocation and copy. This out parameter represents a chunk of pinned memory
+ /// to avoid additional allocation and copy. This out parameter represents a chunk of pinned memory which will need
+ /// to be disposed when no longer needed. The lifespan of memoryHandle should eclipse the lifespan of the corresponding
+ /// OrtValue.
///
/// discovered tensor element type
- ///
+ /// And instance of OrtValue constructed on top of the object
public static OrtValue CreateFromTensorObject(Object value, out MemoryHandle? memoryHandle,
out TensorElementType elementType)
{
@@ -352,6 +359,11 @@ namespace Microsoft.ML.OnnxRuntime
}
#region SafeHandle
+ ///
+ /// Overrides SafeHandle.ReleaseHandle() to properly dispose of
+ /// the native instance of OrtValue
+ ///
+ /// always returns true
protected override bool ReleaseHandle()
{
// We have to surrender ownership to some legacy classes
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/RunOptions.cs b/csharp/src/Microsoft.ML.OnnxRuntime/RunOptions.cs
index ed2e8cebee..b37db212c0 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/RunOptions.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/RunOptions.cs
@@ -5,7 +5,9 @@ using System.Runtime.InteropServices;
namespace Microsoft.ML.OnnxRuntime
{
- /// Sets various runtime options.
+ ///
+ /// Sets various runtime options.
+ ///
public class RunOptions : SafeHandle
{
internal IntPtr Handle
@@ -16,13 +18,19 @@ namespace Microsoft.ML.OnnxRuntime
}
}
-
+ ///
+ /// Default __ctor. Creates default RuntimeOptions
+ ///
public RunOptions()
:base(IntPtr.Zero, true)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtCreateRunOptions(out handle));
}
+ ///
+ /// Overrides SafeHandle.IsInvalid
+ ///
+ /// returns true if handle is equal to Zero
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
///
@@ -85,6 +93,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Sets a flag to terminate all Run() calls that are currently using this RunOptions object
/// Default = false
///
+ /// terminate flag value
public bool Terminate
{
get
@@ -109,7 +118,11 @@ namespace Microsoft.ML.OnnxRuntime
#region SafeHandle
-
+ ///
+ /// Overrides SafeHandle.ReleaseHandle() to properly dispose of
+ /// the native instance of RunOptions
+ ///
+ /// always returns true
protected override bool ReleaseHandle()
{
NativeMethods.OrtReleaseRunOptions(handle);
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.cs b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.cs
index a84a6b3094..548359bc29 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.cs
@@ -2,6 +2,7 @@
// Licensed under the MIT License.
using System;
+using System.Runtime.InteropServices;
using System.Text;
using System.Runtime.InteropServices;
using System.IO;
@@ -9,7 +10,8 @@ using System.IO;
namespace Microsoft.ML.OnnxRuntime
{
///
- /// TODO Add documentation about which optimizations are enabled for each value.
+ /// Graph optimization level to use with SessionOptions
+ /// [https://github.com/microsoft/onnxruntime/blob/master/docs/ONNX_Runtime_Graph_Optimizations.md]
///
public enum GraphOptimizationLevel
{
@@ -36,7 +38,7 @@ namespace Microsoft.ML.OnnxRuntime
///
public class SessionOptions : SafeHandle
{
- // Delayloaded CUDA or cuDNN DLLs. Currently, delayload is disabled. See cmake/CMakeLists.txt for more information.
+ // Delay-loaded CUDA or cuDNN DLLs. Currently, delayload is disabled. See cmake/CMakeLists.txt for more information.
private static string[] cudaDelayLoadedLibs = { };
#region Constructor and Factory methods
@@ -91,6 +93,10 @@ namespace Microsoft.ML.OnnxRuntime
#endregion
#region ExecutionProviderAppends
+ ///
+ /// Appends CPU EP to a list of available execution providers for the session.
+ ///
+ /// 1 - use arena, 0 - do not use arena
public void AppendExecutionProvider_CPU(int useArena)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_CPU(handle, useArena));
@@ -99,6 +105,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
+ /// 1 - use allocation arena, 0 - otherwise
public void AppendExecutionProvider_Dnnl(int useArena)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Dnnl(handle, useArena));
@@ -107,6 +114,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
+ /// integer device ID
public void AppendExecutionProvider_CUDA(int deviceId)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_CUDA(handle, deviceId));
@@ -115,14 +123,17 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
+ /// device identification
public void AppendExecutionProvider_DML(int deviceId)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_DML(handle, deviceId));
}
+
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
+ /// device identification, default empty string
public void AppendExecutionProvider_OpenVINO(string deviceId = "")
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_OpenVINO(handle, deviceId));
@@ -131,6 +142,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
+ /// device identification
public void AppendExecutionProvider_Tensorrt(int deviceId)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Tensorrt(handle, deviceId));
@@ -139,6 +151,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
+ /// device identification
public void AppendExecutionProvider_MIGraphX(int deviceId)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_MIGraphX(handle, deviceId));
@@ -147,6 +160,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
+ /// nnapi specific flag mask
public void AppendExecutionProvider_Nnapi(uint nnapi_flags)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Nnapi(handle, nnapi_flags));
@@ -155,6 +169,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Use only if you have the onnxruntime package specific to this Execution Provider.
///
+ /// string with Nuphar specific settings
public void AppendExecutionProvider_Nuphar(string settings = "")
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Nuphar(handle, 1, settings));
@@ -167,9 +182,10 @@ namespace Microsoft.ML.OnnxRuntime
/// (Deprecated) Loads a DLL named 'libraryPath' and looks for this entry point:
/// OrtStatus* RegisterCustomOps(OrtSessionOptions* options, const OrtApiBase* api);
/// It then passes in the provided session options to this function along with the api base.
- /// Deprecated in favor of RegisterCustomOpLibraryV2() because it provides users with the library handle
+ /// Deprecated in favor of RegisterCustomOpLibraryV2() because it provides users with the library handle
/// to release when all sessions relying on it are destroyed
///
+ /// path to the custom op library
[ObsoleteAttribute("RegisterCustomOpLibrary(...) is obsolete. Use RegisterCustomOpLibraryV2(...) instead.", false)]
public void RegisterCustomOpLibrary(string libraryPath)
{
@@ -190,6 +206,8 @@ namespace Microsoft.ML.OnnxRuntime
/// session options are destroyed, or if an error occurs and it is non null.
/// Hint: .NET Core 3.1 has a 'NativeLibrary' class that can be used to free the library handle
///
+ /// Custom op library path
+ /// out parameter, library handle
public void RegisterCustomOpLibraryV2(string libraryPath, out IntPtr libraryHandle)
{
var libraryPathPinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(libraryPath), GCHandleType.Pinned);
@@ -204,11 +222,10 @@ namespace Microsoft.ML.OnnxRuntime
/// that is same as the name passed to this API call, ORT will use this initializer instance
/// instead of deserializing one from the model file. This is useful when you want to share
/// the same initializer across sessions.
- /// \param name name of the initializer
- /// \param val OrtValue containing the initializer. Lifetime of 'val' and the underlying initializer buffer must be
- /// managed by the user (created using the CreateTensorWithDataAsOrtValue API) and it must outlive the session object
- /// to which it is added.
///
+ /// name of the initializer
+ /// OrtValue containing the initializer. Lifetime of 'val' and the underlying initializer buffer must be
+ /// managed by the user (created using the CreateTensorWithDataAsOrtValue API) and it must outlive the session object
public void AddInitializer(string name, OrtValue ortValue)
{
var utf8NamePinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(name), GCHandleType.Pinned);
@@ -222,6 +239,8 @@ namespace Microsoft.ML.OnnxRuntime
/// Set a single session configuration entry as a pair of strings
/// If a configuration with same key exists, this will overwrite the configuration with the given configValue
///
+ /// config key name
+ /// config key value
public void AddSessionConfigEntry(string configKey, string configValue)
{
var utf8NameConfigKeyPinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(configKey), GCHandleType.Pinned);
@@ -230,7 +249,7 @@ namespace Microsoft.ML.OnnxRuntime
using (var pinnedConfigKeyName = new PinnedGCHandle(utf8NameConfigKeyPinned))
using (var pinnedConfigValueName = new PinnedGCHandle(utf8NameConfigValuePinned))
{
- NativeApiStatus.VerifySuccess(NativeMethods.OrtAddSessionConfigEntry(handle,
+ NativeApiStatus.VerifySuccess(NativeMethods.OrtAddSessionConfigEntry(handle,
pinnedConfigKeyName.Pointer, pinnedConfigValueName.Pointer));
}
}
@@ -239,6 +258,8 @@ namespace Microsoft.ML.OnnxRuntime
/// Override symbolic dimensions (by specific denotation strings) with actual values if known at session initialization time to enable
/// optimizations that can take advantage of fixed values (such as memory planning, etc)
///
+ /// denotation name
+ /// denotation value
public void AddFreeDimensionOverride(string dimDenotation, long dimValue)
{
var utf8DimDenotationPinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(dimDenotation), GCHandleType.Pinned);
@@ -252,6 +273,8 @@ namespace Microsoft.ML.OnnxRuntime
/// Override symbolic dimensions (by specific name strings) with actual values if known at session initialization time to enable
/// optimizations that can take advantage of fixed values (such as memory planning, etc)
///
+ /// dimension name
+ /// dimension value
public void AddFreeDimensionOverrideByName(string dimName, long dimValue)
{
var utf8DimNamePinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(dimName), GCHandleType.Pinned);
@@ -271,11 +294,16 @@ namespace Microsoft.ML.OnnxRuntime
}
#region Public Properties
+ ///
+ /// Overrides SafeHandle.IsInvalid
+ ///
+ /// returns true if handle is equal to Zero
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
///
/// Enables the use of the memory allocation patterns in the first Run() call for subsequent runs. Default = true.
///
+ /// returns enableMemoryPattern flag value
public bool EnableMemoryPattern
{
get
@@ -311,6 +339,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Enables profiling of InferenceSession.Run() calls. Default is false
///
+ /// returns _enableProfiling flag value
public bool EnableProfiling
{
get
@@ -336,6 +365,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Set filepath to save optimized model after graph level transformations. Default is empty, which implies saving is disabled.
///
+ /// returns _optimizedModelFilePath flag value
public string OptimizedModelFilePath
{
get
@@ -358,6 +388,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Enables Arena allocator for the CPU memory allocations. Default is true.
///
+ /// returns _enableCpuMemArena flag value
public bool EnableCpuMemArena
{
get
@@ -383,8 +414,8 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Log Id to be used for the session. Default is empty string.
- /// TODO: Should it be named LogTag as in RunOptions?
///
+ /// returns _logId value
public string LogId
{
get
@@ -403,6 +434,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Log Severity Level for the session logs. Default = ORT_LOGGING_LEVEL_WARNING
///
+ /// returns _logSeverityLevel value
public OrtLoggingLevel LogSeverityLevel
{
get
@@ -421,6 +453,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Log Verbosity Level for the session logs. Default = 0. Valid values are >=0.
/// This takes into effect only when the LogSeverityLevel is set to ORT_LOGGING_LEVEL_VERBOSE.
///
+ /// returns _logVerbosityLevel value
public int LogVerbosityLevel
{
get
@@ -440,6 +473,7 @@ namespace Microsoft.ML.OnnxRuntime
// Sets the number of threads used to parallelize the execution within nodes
// A value of 0 means ORT will pick a default
///
+ /// returns _intraOpNumThreads value
public int IntraOpNumThreads
{
get
@@ -459,6 +493,7 @@ namespace Microsoft.ML.OnnxRuntime
// If sequential execution is enabled this value is ignored
// A value of 0 means ORT will pick a default
///
+ /// returns _interOpNumThreads value
public int InterOpNumThreads
{
get
@@ -476,6 +511,7 @@ namespace Microsoft.ML.OnnxRuntime
///
/// Sets the graph optimization level for the session. Default is set to ORT_ENABLE_ALL.
///
+ /// returns _graphOptimizationLevel value
public GraphOptimizationLevel GraphOptimizationLevel
{
get
@@ -494,6 +530,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Sets the execution mode for the session. Default is set to ORT_SEQUENTIAL.
/// See [ONNX_Runtime_Perf_Tuning.md] for more details.
///
+ /// returns _executionMode value
public ExecutionMode ExecutionMode
{
get
@@ -543,7 +580,11 @@ namespace Microsoft.ML.OnnxRuntime
#endregion
#region SafeHandle
-
+ ///
+ /// Overrides SafeHandle.ReleaseHandle() to properly dispose of
+ /// the native instance of SessionOptions
+ ///
+ /// always returns true
protected override bool ReleaseHandle()
{
NativeMethods.OrtReleaseSessionOptions(handle);
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/ArrayTensorExtensions.cs b/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/ArrayTensorExtensions.cs
index 5189ddf71e..b26d215840 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/ArrayTensorExtensions.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/ArrayTensorExtensions.cs
@@ -14,6 +14,9 @@ using System;
namespace Microsoft.ML.OnnxRuntime.Tensors
{
+ ///
+ /// A static class that houses static DenseTensor extension methods
+ ///
public static class ArrayTensorExtensions
{
///
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/DenseTensor.cs b/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/DenseTensor.cs
index d71ff5ddca..5752a3f65c 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/DenseTensor.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/DenseTensor.cs
@@ -113,6 +113,12 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
Buffer.Span[index] = value;
}
+ ///
+ /// Overrides Tensor.CopyTo(). Copies the content of the Tensor
+ /// to the specified array starting with arrayIndex
+ ///
+ /// destination array
+ /// start index
protected override void CopyTo(T[] array, int arrayIndex)
{
if (array == null)
@@ -127,6 +133,11 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
Buffer.Span.CopyTo(array.AsSpan(arrayIndex));
}
+ ///
+ /// Determines the index of a specific item in the Tensor<T>.
+ ///
+ /// Object to locate
+ /// The index of item if found in the tensor; otherwise, -1
protected override int IndexOf(T item)
{
// TODO: use Span.IndexOf when/if it removes the IEquatable type constraint
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/Tensor.cs b/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/Tensor.cs
index eb56c10763..21613a2fab 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/Tensor.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/Tensors/Tensor.cs
@@ -64,6 +64,9 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
///
public struct Float16
{
+ ///
+ /// float16 representation bits
+ ///
public ushort value;
///
/// Ctor
@@ -77,6 +80,7 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
/// Converts to ushort
///
/// instance of Float16
+ /// value member
public static implicit operator ushort (Float16 f) { return f.value; }
///
/// Converts a 16-bit unsigned integer to a Float16.
@@ -143,6 +147,9 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
///
public struct BFloat16
{
+ ///
+ /// bfloat16 representation bits
+ ///
public ushort value;
///
/// Ctor
@@ -156,6 +163,7 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
/// Converts to ushort
///
/// instance of BFloat16
+ /// value member
public static implicit operator ushort(BFloat16 bf) { return bf.value; }
///
/// Converts a 16-bit unsigned integer to a BFloat16.
@@ -219,9 +227,26 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
///
public class TensorTypeInfo
{
+ ///
+ /// TensorElementType enum
+ ///
+ /// type enum value
public TensorElementType ElementType { get; private set; }
+ ///
+ /// Size of the stored primitive type in bytes
+ ///
+ /// size in bytes
public int TypeSize { get; private set; }
+ ///
+ /// Is the type is a string
+ ///
+ /// true if Tensor element type is a string
public bool IsString { get { return ElementType == TensorElementType.String; } }
+ ///
+ /// Ctor
+ ///
+ /// TensorElementType value
+ /// size fo the type in bytes
public TensorTypeInfo(TensorElementType elementType, int typeSize)
{
ElementType = elementType;
@@ -234,9 +259,26 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
///
public class TensorElementTypeInfo
{
+ ///
+ /// Tensor element type
+ ///
+ /// System.Type
public Type TensorType { get; private set; }
+ ///
+ /// Size of the stored primitive type in bytes
+ ///
+ /// size in bytes
public int TypeSize { get; private set; }
+ ///
+ /// Is the type is a string
+ ///
+ /// true if Tensor element type is a string
public bool IsString { get; private set; }
+ ///
+ /// Ctor
+ ///
+ /// Tensor element type
+ /// typesize
public TensorElementTypeInfo(Type type, int typeSize)
{
TensorType = type;
@@ -281,6 +323,10 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
}
private readonly Type _primitiveType;
+ ///
+ /// Constructs TensorBae
+ ///
+ /// primitive type the deriving class is using
protected TensorBase(Type primitiveType)
{
// Should hold as we rely on this to pass arrays of these
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests/Microsoft.ML.OnnxRuntime.Tests.csproj b/csharp/test/Microsoft.ML.OnnxRuntime.Tests/Microsoft.ML.OnnxRuntime.Tests.csproj
index abdaae0209..896c890d8e 100644
--- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests/Microsoft.ML.OnnxRuntime.Tests.csproj
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests/Microsoft.ML.OnnxRuntime.Tests.csproj
@@ -65,7 +65,7 @@
-
+
diff --git a/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Microsoft.ML.OnnxRuntime.PerfTool.csproj b/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Microsoft.ML.OnnxRuntime.PerfTool.csproj
index a42af0df09..361e880444 100644
--- a/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Microsoft.ML.OnnxRuntime.PerfTool.csproj
+++ b/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Microsoft.ML.OnnxRuntime.PerfTool.csproj
@@ -58,7 +58,7 @@
-
+
diff --git a/tools/ci_build/github/Doxyfile_csharp.cfg b/tools/ci_build/github/Doxyfile_csharp.cfg
new file mode 100644
index 0000000000..adbea31332
--- /dev/null
+++ b/tools/ci_build/github/Doxyfile_csharp.cfg
@@ -0,0 +1,343 @@
+## Onnxruntime C# API Doxygen configuration file
+# Doxyfile 1.8.20
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+DOXYFILE_ENCODING = UTF-8
+PROJECT_NAME = "Onnxruntime"
+PROJECT_NUMBER =
+PROJECT_BRIEF =
+PROJECT_LOGO =
+OUTPUT_DIRECTORY = $(ORT_DOXY_OUT)\csharp_dox
+CREATE_SUBDIRS = NO
+ALLOW_UNICODE_NAMES = NO
+OUTPUT_LANGUAGE = English
+OUTPUT_TEXT_DIRECTION = None
+BRIEF_MEMBER_DESC = YES
+REPEAT_BRIEF = YES
+ABBREVIATE_BRIEF = "The $name class" \
+ "The $name widget" \
+ "The $name file" \
+ is \
+ provides \
+ specifies \
+ contains \
+ represents \
+ a \
+ an \
+ the
+ALWAYS_DETAILED_SEC = NO
+INLINE_INHERITED_MEMB = NO
+FULL_PATH_NAMES = YES
+STRIP_FROM_PATH =
+STRIP_FROM_INC_PATH =
+SHORT_NAMES = NO
+JAVADOC_AUTOBRIEF = NO
+JAVADOC_BANNER = NO
+QT_AUTOBRIEF = NO
+MULTILINE_CPP_IS_BRIEF = NO
+PYTHON_DOCSTRING = YES
+INHERIT_DOCS = YES
+SEPARATE_MEMBER_PAGES = NO
+TAB_SIZE = 4
+ALIASES =
+OPTIMIZE_OUTPUT_FOR_C = NO
+OPTIMIZE_OUTPUT_JAVA = NO
+OPTIMIZE_FOR_FORTRAN = NO
+OPTIMIZE_OUTPUT_VHDL = NO
+OPTIMIZE_OUTPUT_SLICE = NO
+EXTENSION_MAPPING =
+MARKDOWN_SUPPORT = YES
+TOC_INCLUDE_HEADINGS = 5
+AUTOLINK_SUPPORT = YES
+BUILTIN_STL_SUPPORT = NO
+CPP_CLI_SUPPORT = NO
+SIP_SUPPORT = NO
+IDL_PROPERTY_SUPPORT = YES
+DISTRIBUTE_GROUP_DOC = NO
+GROUP_NESTED_COMPOUNDS = NO
+SUBGROUPING = YES
+INLINE_GROUPED_CLASSES = NO
+INLINE_SIMPLE_STRUCTS = NO
+TYPEDEF_HIDES_STRUCT = NO
+LOOKUP_CACHE_SIZE = 0
+NUM_PROC_THREADS = 1
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+EXTRACT_ALL = NO
+EXTRACT_PRIVATE = NO
+EXTRACT_PRIV_VIRTUAL = NO
+EXTRACT_PACKAGE = NO
+EXTRACT_STATIC = NO
+EXTRACT_LOCAL_CLASSES = YES
+EXTRACT_LOCAL_METHODS = NO
+EXTRACT_ANON_NSPACES = NO
+HIDE_UNDOC_MEMBERS = NO
+HIDE_UNDOC_CLASSES = NO
+HIDE_FRIEND_COMPOUNDS = NO
+HIDE_IN_BODY_DOCS = NO
+INTERNAL_DOCS = NO
+CASE_SENSE_NAMES = NO
+HIDE_SCOPE_NAMES = NO
+HIDE_COMPOUND_REFERENCE= NO
+SHOW_INCLUDE_FILES = YES
+SHOW_GROUPED_MEMB_INC = NO
+FORCE_LOCAL_INCLUDES = NO
+INLINE_INFO = YES
+SORT_MEMBER_DOCS = YES
+SORT_BRIEF_DOCS = NO
+SORT_MEMBERS_CTORS_1ST = NO
+SORT_GROUP_NAMES = NO
+SORT_BY_SCOPE_NAME = NO
+STRICT_PROTO_MATCHING = NO
+GENERATE_TODOLIST = YES
+GENERATE_TESTLIST = YES
+GENERATE_BUGLIST = YES
+GENERATE_DEPRECATEDLIST= YES
+ENABLED_SECTIONS =
+MAX_INITIALIZER_LINES = 30
+SHOW_USED_FILES = NO
+SHOW_FILES = YES
+SHOW_NAMESPACES = YES
+FILE_VERSION_FILTER = "git -C $(ORT_DOXY_SRC) log -n 1 --format=%h -- afile"
+LAYOUT_FILE =
+CITE_BIB_FILES =
+#---------------------------------------------------------------------------
+# Configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+QUIET = NO
+WARNINGS = YES
+WARN_IF_UNDOCUMENTED = YES
+WARN_IF_DOC_ERROR = YES
+WARN_NO_PARAMDOC = YES
+WARN_AS_ERROR = YES
+WARN_FORMAT = "$file:$line: $text"
+WARN_LOGFILE =
+#---------------------------------------------------------------------------
+# Configuration options related to the input files
+#---------------------------------------------------------------------------
+INPUT = $(ORT_DOXY_SRC)\csharp\src\Microsoft.ML.OnnxRuntime \
+ $(ORT_DOXY_SRC)\csharp\src\Microsoft.ML.OnnxRuntime\Tensors
+INPUT_ENCODING = UTF-8
+FILE_PATTERNS = *.cs
+RECURSIVE = NO
+EXCLUDE =
+EXCLUDE_SYMLINKS = NO
+EXCLUDE_PATTERNS = Native*.cs
+EXCLUDE_SYMBOLS =
+EXAMPLE_PATH = $(ORT_DOXY_SRC)\csharp\sample\Microsoft.ML.OnnxRuntime.InferenceSample\Program.cs
+EXAMPLE_PATTERNS = *
+EXAMPLE_RECURSIVE = NO
+IMAGE_PATH =
+INPUT_FILTER =
+FILTER_PATTERNS =
+FILTER_SOURCE_FILES = NO
+FILTER_SOURCE_PATTERNS =
+USE_MDFILE_AS_MAINPAGE =
+#---------------------------------------------------------------------------
+# Configuration options related to source browsing
+#---------------------------------------------------------------------------
+SOURCE_BROWSER = NO
+INLINE_SOURCES = NO
+STRIP_CODE_COMMENTS = YES
+REFERENCED_BY_RELATION = NO
+REFERENCES_RELATION = NO
+REFERENCES_LINK_SOURCE = YES
+SOURCE_TOOLTIPS = YES
+USE_HTAGS = NO
+VERBATIM_HEADERS = YES
+CLANG_ASSISTED_PARSING = NO
+CLANG_OPTIONS =
+CLANG_DATABASE_PATH =
+#---------------------------------------------------------------------------
+# Configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+ALPHABETICAL_INDEX = YES
+COLS_IN_ALPHA_INDEX = 5
+IGNORE_PREFIX =
+#---------------------------------------------------------------------------
+# Configuration options related to the HTML output
+#---------------------------------------------------------------------------
+GENERATE_HTML = YES
+HTML_OUTPUT = html
+HTML_FILE_EXTENSION = .html
+HTML_HEADER =
+HTML_FOOTER =
+HTML_STYLESHEET =
+HTML_EXTRA_STYLESHEET =
+HTML_EXTRA_FILES =
+HTML_COLORSTYLE_HUE = 220
+HTML_COLORSTYLE_SAT = 100
+HTML_COLORSTYLE_GAMMA = 80
+HTML_TIMESTAMP = NO
+HTML_DYNAMIC_MENUS = YES
+HTML_DYNAMIC_SECTIONS = NO
+HTML_INDEX_NUM_ENTRIES = 100
+GENERATE_DOCSET = NO
+DOCSET_FEEDNAME = "Doxygen generated docs"
+DOCSET_BUNDLE_ID = org.doxygen.Project
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+DOCSET_PUBLISHER_NAME = Publisher
+GENERATE_HTMLHELP = NO
+CHM_FILE =
+HHC_LOCATION =
+GENERATE_CHI = NO
+CHM_INDEX_ENCODING =
+BINARY_TOC = NO
+TOC_EXPAND = NO
+GENERATE_QHP = NO
+QCH_FILE =
+QHP_NAMESPACE = org.doxygen.Project
+QHP_VIRTUAL_FOLDER = doc
+QHP_CUST_FILTER_NAME =
+QHP_CUST_FILTER_ATTRS =
+QHP_SECT_FILTER_ATTRS =
+QHG_LOCATION =
+GENERATE_ECLIPSEHELP = NO
+ECLIPSE_DOC_ID = org.doxygen.Project
+DISABLE_INDEX = NO
+GENERATE_TREEVIEW = NO
+ENUM_VALUES_PER_LINE = 4
+TREEVIEW_WIDTH = 250
+EXT_LINKS_IN_WINDOW = NO
+HTML_FORMULA_FORMAT = png
+FORMULA_FONTSIZE = 10
+FORMULA_TRANSPARENT = YES
+FORMULA_MACROFILE =
+USE_MATHJAX = NO
+MATHJAX_FORMAT = HTML-CSS
+MATHJAX_RELPATH = https://cdn.jsdelivr.net/npm/mathjax@2
+MATHJAX_EXTENSIONS =
+MATHJAX_CODEFILE =
+SEARCHENGINE = YES
+SERVER_BASED_SEARCH = NO
+EXTERNAL_SEARCH = NO
+SEARCHENGINE_URL =
+SEARCHDATA_FILE = searchdata.xml
+EXTERNAL_SEARCH_ID =
+EXTRA_SEARCH_MAPPINGS =
+#---------------------------------------------------------------------------
+# Configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+GENERATE_LATEX = NO
+LATEX_OUTPUT = latex
+LATEX_CMD_NAME =
+MAKEINDEX_CMD_NAME = makeindex
+LATEX_MAKEINDEX_CMD = makeindex
+COMPACT_LATEX = NO
+PAPER_TYPE = a4
+EXTRA_PACKAGES =
+LATEX_HEADER =
+LATEX_FOOTER =
+LATEX_EXTRA_STYLESHEET =
+LATEX_EXTRA_FILES =
+PDF_HYPERLINKS = YES
+USE_PDFLATEX = YES
+LATEX_BATCHMODE = NO
+LATEX_HIDE_INDICES = NO
+LATEX_SOURCE_CODE = NO
+LATEX_BIB_STYLE = plain
+LATEX_TIMESTAMP = NO
+LATEX_EMOJI_DIRECTORY =
+#---------------------------------------------------------------------------
+# Configuration options related to the RTF output
+#---------------------------------------------------------------------------
+GENERATE_RTF = NO
+RTF_OUTPUT = rtf
+COMPACT_RTF = NO
+RTF_HYPERLINKS = NO
+RTF_STYLESHEET_FILE =
+RTF_EXTENSIONS_FILE =
+RTF_SOURCE_CODE = NO
+#---------------------------------------------------------------------------
+# Configuration options related to the man page output
+#---------------------------------------------------------------------------
+GENERATE_MAN = NO
+MAN_OUTPUT = man
+MAN_EXTENSION = .3
+MAN_SUBDIR =
+MAN_LINKS = NO
+#---------------------------------------------------------------------------
+# Configuration options related to the XML output
+#---------------------------------------------------------------------------
+GENERATE_XML = NO
+XML_OUTPUT = xml
+XML_PROGRAMLISTING = YES
+XML_NS_MEMB_FILE_SCOPE = NO
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
+GENERATE_DOCBOOK = NO
+DOCBOOK_OUTPUT = docbook
+DOCBOOK_PROGRAMLISTING = NO
+#---------------------------------------------------------------------------
+# Configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+GENERATE_AUTOGEN_DEF = NO
+#---------------------------------------------------------------------------
+# Configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+GENERATE_PERLMOD = NO
+PERLMOD_LATEX = NO
+PERLMOD_PRETTY = YES
+PERLMOD_MAKEVAR_PREFIX =
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+ENABLE_PREPROCESSING = YES
+MACRO_EXPANSION = NO
+EXPAND_ONLY_PREDEF = NO
+SEARCH_INCLUDES = YES
+INCLUDE_PATH =
+INCLUDE_FILE_PATTERNS =
+PREDEFINED =
+EXPAND_AS_DEFINED =
+SKIP_FUNCTION_MACROS = YES
+#---------------------------------------------------------------------------
+# Configuration options related to external references
+#---------------------------------------------------------------------------
+TAGFILES =
+GENERATE_TAGFILE =
+ALLEXTERNALS = NO
+EXTERNAL_GROUPS = YES
+EXTERNAL_PAGES = YES
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+CLASS_DIAGRAMS = YES
+DIA_PATH =
+HIDE_UNDOC_RELATIONS = YES
+HAVE_DOT = NO
+DOT_NUM_THREADS = 0
+DOT_FONTNAME = Helvetica
+DOT_FONTSIZE = 10
+DOT_FONTPATH =
+CLASS_GRAPH = YES
+COLLABORATION_GRAPH = YES
+GROUP_GRAPHS = YES
+UML_LOOK = NO
+UML_LIMIT_NUM_FIELDS = 10
+TEMPLATE_RELATIONS = NO
+INCLUDE_GRAPH = YES
+INCLUDED_BY_GRAPH = YES
+CALL_GRAPH = NO
+CALLER_GRAPH = NO
+GRAPHICAL_HIERARCHY = YES
+DIRECTORY_GRAPH = YES
+DOT_IMAGE_FORMAT = png
+INTERACTIVE_SVG = NO
+DOT_PATH =
+DOTFILE_DIRS =
+MSCFILE_DIRS =
+DIAFILE_DIRS =
+PLANTUML_JAR_PATH =
+PLANTUML_CFG_FILE =
+PLANTUML_INCLUDE_PATH =
+DOT_GRAPH_MAX_NODES = 50
+MAX_DOT_GRAPH_DEPTH = 0
+DOT_TRANSPARENT = NO
+DOT_MULTI_TARGETS = NO
+GENERATE_LEGEND = YES
+DOT_CLEANUP = YES
diff --git a/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml
index 26b1864f80..6d4f02c5c4 100644
--- a/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml
+++ b/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml
@@ -38,6 +38,16 @@ jobs:
modifyEnvironment: true
workingFolder: '$(Build.BinariesDirectory)'
+ - script: |
+ set ORT_DOXY_SRC=$(Build.SourcesDirectory)
+ set ORT_DOXY_OUT=$(Build.BinariesDirectory)\$(BuildConfig)\$(BuildConfig)
+ mkdir %ORT_DOXY_SRC%
+ mkdir %ORT_DOXY_OUT%
+ "C:\Program Files\doxygen\bin\doxygen.exe" $(Build.SourcesDirectory)\tools\ci_build\github\Doxyfile_csharp.cfg
+
+ workingDirectory: '$(Build.SourcesDirectory)'
+ displayName: 'API Documentation Check and generate'
+
- script: |
python -m pip install -q pyopenssl setuptools wheel numpy flake8
workingDirectory: '$(Build.BinariesDirectory)'
@@ -102,7 +112,7 @@ jobs:
projects: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln'
configuration: '$(BuildConfig)'
arguments: '--configuration $(BuildConfig) -p:Platform="Any CPU" -p:OrtPackageId=$(OrtPackageId)'
- workingDirectory: '$(Build.SourcesDirectory)\csharp'
+ workingDirectory: '$(Build.SourcesDirectory)\csharp'
- task: DotNetCoreCLI@2
displayName: 'Build C#'