Document all C# API pubic interfaces (#5853)

Address documentation shortcomings.
 Document all required public interfaces.
 Add pipeline configuration.
Make Doxygen lookup a env vars for paths.
This commit is contained in:
Dmitri Smirnov 2020-11-20 14:03:55 -08:00 committed by GitHub
parent ab9d4b366b
commit ceedf5630b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 743 additions and 101 deletions

View file

@ -51,7 +51,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// This class serves as a container for model run output values including
/// tensors, sequences of tensors, sequences and maps
/// tensors, sequences of tensors, sequences and maps.
/// It extends NamedOnnxValue, exposes the OnnxValueType and Tensor type
/// The class must be disposed of.
/// It disposes of _ortValueHolder that owns the underlying Ort output value and
@ -419,6 +419,10 @@ namespace Microsoft.ML.OnnxRuntime
#region IDisposable Support
/// <summary>
/// IDisposable implementation
/// </summary>
/// <param name="disposing">true if invoked by Dispose()</param>
protected virtual void Dispose(bool disposing)
{
if(_disposed)
@ -439,6 +443,9 @@ namespace Microsoft.ML.OnnxRuntime
_disposed = true;
}
/// <summary>
/// IDisposable implementation
/// </summary>
public void Dispose()
{
// Do not change this code. Put cleanup code in Dispose(bool disposing) above.

View file

@ -45,6 +45,10 @@ namespace Microsoft.ML.OnnxRuntime
#region IDisposable Support
/// <summary>
/// IDisposable implementation
/// </summary>
/// <param name="disposing">true if invoked from Dispose()</param>
protected virtual void Dispose(bool disposing)
{
if(_disposed)
@ -60,6 +64,9 @@ namespace Microsoft.ML.OnnxRuntime
_disposed = true;
}
/// <summary>
/// IDisposable implementation
/// </summary>
public void Dispose()
{
Dispose(true);

View file

@ -12,11 +12,22 @@ using System.Buffers;
namespace Microsoft.ML.OnnxRuntime
{
/// <summary>
/// Represents an Inference Session on an ONNX Model
/// Represents an Inference Session on an ONNX Model.
/// This is a IDisposable class and it must be disposed of
/// using either a explicit call to Dispose() method or
/// a pattern of using() block. If this is a member of another
/// class that class must also become IDisposable and it must
/// dispose of InferfenceSession in its Dispose() method.
/// </summary>
public class InferenceSession : IDisposable
{
/// <summary>
/// A pointer to a underlying native instance of OrtSession
/// </summary>
protected IntPtr _nativeHandle;
/// <summary>
/// Dictionaries that represent input/output/overridableInitializers metadata
/// </summary>
protected Dictionary<string, NodeMetadata> _inputMetadata, _outputMetadata, _overridableInitializerMetadata;
private SessionOptions _builtInSessionOptions = null;
private RunOptions _builtInRunOptions = null;
@ -272,13 +283,12 @@ namespace Microsoft.ML.OnnxRuntime
}
/// <summary>
///
/// Runs the loaded model for the given inputs and outputs.
///
/// Outputs need to be created with correct type and dimension to receive the fetched data.
/// </summary>
/// <param name="inputs">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the input values.</param>
/// <param name="output">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the output values.</param>
/// <param name="outputs">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the output values.</param>
public void Run(
IReadOnlyCollection<NamedOnnxValue> inputs,
IReadOnlyCollection<NamedOnnxValue> outputs)
@ -293,7 +303,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Outputs need to be created with correct type and dimension to receive the fetched data.
/// </summary>
/// <param name="inputs">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the input values.</param>
/// <param name="output">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the output values.</param>
/// <param name="outputs">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the output values.</param>
/// <param name="options"></param>
public void Run(
IReadOnlyCollection<NamedOnnxValue> inputs,
@ -388,7 +398,7 @@ namespace Microsoft.ML.OnnxRuntime
/// </summary>
/// <param name="inputNames">Specify a collection of string that indicates the input names. Should match <paramref name="inputValues"/>.</param>
/// <param name="inputValues">Specify a collection of <see cref="FixedBufferOnnxValue"/> that indicates the input values.</param>
/// <param name="output">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the output values.</param>
/// <param name="outputs">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the output values.</param>
public void Run(
IReadOnlyCollection<string> inputNames,
IReadOnlyCollection<FixedBufferOnnxValue> inputValues,
@ -405,7 +415,7 @@ namespace Microsoft.ML.OnnxRuntime
/// </summary>
/// <param name="inputNames">Specify a collection of string that indicates the input names. Should match <paramref name="inputValues"/>.</param>
/// <param name="inputValues">Specify a collection of <see cref="FixedBufferOnnxValue"/> that indicates the input values.</param>
/// <param name="output">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the output values.</param>
/// <param name="outputs">Specify a collection of <see cref="NamedOnnxValue"/> that indicates the output values.</param>
/// <param name="options"></param>
public void Run(
IReadOnlyCollection<string> inputNames,
@ -478,6 +488,7 @@ namespace Microsoft.ML.OnnxRuntime
/// call to retrieve output names. They will be paired with the returned OrtValues and combined into DisposbleNamedOnnxValues.
/// Otherwise, the method will retrieve output names from the OrtIoBinding instance.
/// It is an error if you supply a different number of names than the returned outputs</param>
/// <returns>A disposable collection of DisposableNamedOnnxValue that encapsulate output OrtValues</returns>
public IDisposableReadOnlyCollection<DisposableNamedOnnxValue> RunWithBindingAndNames(RunOptions runOptions, OrtIoBinding ioBinding, string[] names = null)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtRunWithBinding(Handle, runOptions.Handle, ioBinding.Handle));
@ -514,8 +525,9 @@ namespace Microsoft.ML.OnnxRuntime
}
/// <summary>
/// Ends profiling for the session. Returns the profile file name.
///
/// Ends profiling for the session.
/// </summary>
/// <returns> Returns the profile file name.</returns>
public string EndProfiling()
{
IntPtr nameHandle = IntPtr.Zero;
@ -642,6 +654,11 @@ namespace Microsoft.ML.OnnxRuntime
return result;
}
/// <summary>
/// This property queries model metadata, constructs
/// an instance of ModelMetadata and caches it
/// </summary>
/// <returns>Instance of ModelMetdata</returns>
public ModelMetadata ModelMetadata
{
get
@ -931,12 +948,19 @@ namespace Microsoft.ML.OnnxRuntime
Dispose(false);
}
/// <summary>
/// IDisposable implementation
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// IDisposable implementation
/// </summary>
/// <param name="disposing">true if invoked from Dispose() method</param>
protected virtual void Dispose(bool disposing)
{
if(_disposed)
@ -978,51 +1002,42 @@ namespace Microsoft.ML.OnnxRuntime
/// </summary>
public class NodeMetadata
{
private OnnxValueType _onnxValueType;
private int[] _dimensions;
private string[] _symbolicDimensions;
private Type _type;
internal NodeMetadata(OnnxValueType onnxValueType, int[] dimensions, string[] symbolicDimensions, Type type)
{
_onnxValueType = onnxValueType;
_dimensions = dimensions;
_symbolicDimensions = symbolicDimensions;
_type = type;
OnnxValueType = onnxValueType;
Dimensions = dimensions;
SymbolicDimensions = symbolicDimensions;
ElementType = type;
}
public OnnxValueType OnnxValueType
{
get
{
return _onnxValueType;
}
}
/// <summary>
/// Type value of the node
/// </summary>
/// <value>A value of OnnxValueType enum</value>
public OnnxValueType OnnxValueType { get; }
public int[] Dimensions
{
get
{
return _dimensions;
}
}
/// <summary>
/// Shape
/// </summary>
/// <value>Array of dimensions</value>
public int[] Dimensions { get; }
public string[] SymbolicDimensions
{
get
{
return _symbolicDimensions;
}
}
/// <summary>
/// Symbolic dimensions
/// </summary>
/// <value>Array of symbolic dimensions if present.</value>
public string[] SymbolicDimensions { get; }
public System.Type ElementType
{
get
{
return _type;
}
}
/// <summary>
/// .NET type that corresponds to this Node.
/// </summary>
/// <value>System.Type</value>
public System.Type ElementType { get; }
/// <summary>
/// Whether it is a Tensor
/// </summary>
/// <value>currently always returns true</value>
public bool IsTensor
{
get
@ -1033,6 +1048,10 @@ namespace Microsoft.ML.OnnxRuntime
}
/// <summary>
/// A class that queries and caches model metadata and exposes
/// it as properties
/// </summary>
public class ModelMetadata
{
private string _producerName;
@ -1138,6 +1157,10 @@ namespace Microsoft.ML.OnnxRuntime
}
/// <summary>
/// Producer name string
/// </summary>
/// <value>producer name string</value>
public string ProducerName
{
get
@ -1146,6 +1169,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Graph name for this model
/// </summary>
/// <value>graph name string</value>
public string GraphName
{
get
@ -1154,6 +1181,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Domain for this model
/// </summary>
/// <value>domain name string</value>
public string Domain
{
get
@ -1162,6 +1193,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Unstructured model description
/// </summary>
/// <value>description string</value>
public string Description
{
get
@ -1170,6 +1205,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Version number
/// </summary>
/// <value>long version integer</value>
public long Version
{
get
@ -1178,6 +1217,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Custom metadata key/value pairs
/// </summary>
/// <value>An instance of a Dictionary<string,string></value>
public Dictionary<string, string> CustomMetadataMap
{
get

View file

@ -15,9 +15,22 @@ namespace Microsoft.ML.OnnxRuntime
/// </summary>
public class NamedOnnxValue
{
/// <summary>
/// Managed Tensor, Dictionary or IList
/// </summary>
protected Object _value;
/// <summary>
/// Name of the instance, model input/output
/// </summary>
protected string _name;
/// <summary>
/// Constructs an instance of NamedOnnxValue and represents
/// a model input to an inference session. It also represents a modle output
/// when serves as a base for DisposablenamedOnnxvalue
/// </summary>
/// <param name="name">input/output name</param>
/// <param name="value">Object that may be a tensor, Dictionary, IList</param>
protected NamedOnnxValue(string name, Object value)
{
_name = name;
@ -37,7 +50,15 @@ namespace Microsoft.ML.OnnxRuntime
return new NamedOnnxValue(name, value);
}
/// <summary>
/// Exposes the name of the of the model input/output
/// </summary>
/// <value>name string</value>
public string Name { get { return _name; } set { _name = value; } }
/// <summary>
/// Exposes the underlying managed object
/// </summary>
/// <value>object</value>
public Object Value { get { return _value; } set { _value = value; } }
/// <summary>

View file

@ -15,13 +15,19 @@ namespace Microsoft.ML.OnnxRuntime
public LogLevel LogLevel { get; set; }
}
/// <summary>
/// Logging level used to specify amount of logging when
/// creating environment. The lower the value is the more logging
/// will be output. A specific value output includes everything
/// that higher values output.
/// </summary>
public enum LogLevel
{
Verbose = 0,
Info = 1,
Warning = 2,
Error = 3,
Fatal = 4
Verbose = 0, // Everything
Info = 1, // Informational
Warning = 2, // Warnings
Error = 3, // Errors
Fatal = 4 // Results in the termination of the application.
}
/// <summary>
@ -82,6 +88,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Returns an instance of OrtEnv
/// It returns the same instance on every call - `OrtEnv` is singleton
/// </summary>
/// <returns>Returns a singleton instance of OrtEnv that represents native OrtEnv object</returns>
public static OrtEnv Instance() { return _instance.Value; }
/// <summary>
@ -104,6 +111,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Queries all the execution providers supported in the native onnxruntime shared library
/// </summary>
/// <returns>an array of strings that represent execution provider names</returns>
public string[] GetAvailableProviders()
{
IntPtr availableProvidersHandle = IntPtr.Zero;
@ -134,6 +142,10 @@ namespace Microsoft.ML.OnnxRuntime
#endregion
#region SafeHandle
/// <summary>
/// Overrides SafeHandle.IsInvalid
/// </summary>
/// <value>returns true if handle is equal to Zero</value>
public override bool IsInvalid
{
get
@ -142,6 +154,11 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Overrides SafeHandle.ReleaseHandle() to properly dispose of
/// the native instance of OrtEnv
/// </summary>
/// <returns>always returns true</returns>
protected override bool ReleaseHandle()
{
NativeMethods.OrtReleaseEnv(handle);

View file

@ -12,8 +12,8 @@ namespace Microsoft.ML.OnnxRuntime
/// </summary>
public enum OrtAllocatorType
{
DeviceAllocator = 0,
ArenaAllocator = 1
DeviceAllocator = 0, // Device specific allocator
ArenaAllocator = 1 // Memory arena
}
/// <summary>
@ -53,6 +53,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Default CPU based instance
/// </summary>
/// <value>Singleton instance of a CpuMemoryInfo</value>
public static OrtMemoryInfo DefaultInstance
{
get
@ -69,6 +70,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Overrides SafeHandle.IsInvalid
/// </summary>
/// <value>returns true if handle is equal to Zero</value>
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
/// <summary>
@ -84,10 +89,20 @@ namespace Microsoft.ML.OnnxRuntime
_owned = owned;
}
// Predefined utf8 encoded allocator names. Use them to construct an instance of
// OrtMemoryInfo to avoid UTF-16 to UTF-8 conversion
/// <summary>
/// Predefined utf8 encoded allocator names. Use them to construct an instance of
/// OrtMemoryInfo to avoid UTF-16 to UTF-8 conversion costs.
/// </summary>
public static readonly byte[] allocatorCPU = Encoding.UTF8.GetBytes("Cpu" + Char.MinValue);
/// <summary>
/// Predefined utf8 encoded allocator names. Use them to construct an instance of
/// OrtMemoryInfo to avoid UTF-16 to UTF-8 conversion costs.
/// </summary>
public static readonly byte[] allocatorCUDA = Encoding.UTF8.GetBytes("Cuda" + Char.MinValue);
/// <summary>
/// Predefined utf8 encoded allocator names. Use them to construct an instance of
/// OrtMemoryInfo to avoid UTF-16 to UTF-8 conversion costs.
/// </summary>
public static readonly byte[] allocatorCUDA_PINNED = Encoding.UTF8.GetBytes("CudaPinned" + Char.MinValue);
/// <summary>
/// Create an instance of OrtMemoryInfo according to the specification
@ -142,6 +157,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Returns device ID
/// </summary>
/// <value>returns integer Id value</value>
public int Id
{
get
@ -157,7 +173,7 @@ namespace Microsoft.ML.OnnxRuntime
/// as names would conflict with the returned type. Also, there are native
/// calls behind them so exposing them as Get() would be appropriate.
/// </summary>
/// <returns></returns>
/// <returns>OrtMemoryType for the instance</returns>
public OrtMemType GetMemoryType()
{
OrtMemType memoryType = OrtMemType.Default;
@ -166,9 +182,9 @@ namespace Microsoft.ML.OnnxRuntime
}
/// <summary>
/// Returns alloctor type
/// Fetches allocator type from the underlying OrtAllocator
/// </summary>
/// <returns></returns>
/// <returns>Returns allocator type</returns>
public OrtAllocatorType GetAllocatorType()
{
OrtAllocatorType allocatorType = OrtAllocatorType.ArenaAllocator;
@ -176,6 +192,11 @@ namespace Microsoft.ML.OnnxRuntime
return allocatorType;
}
/// <summary>
/// Overrides System.Object.Equals(object)
/// </summary>
/// <param name="obj">object to compare to</param>
/// <returns>true if obj is an instance of OrtMemoryInfo and is equal to this</returns>
public override bool Equals(object obj)
{
var other = obj as OrtMemoryInfo;
@ -186,6 +207,11 @@ namespace Microsoft.ML.OnnxRuntime
return Equals(other);
}
/// <summary>
/// Compares this instance with another
/// </summary>
/// <param name="other">OrtMemoryInfo to compare to</param>
/// <returns>true if instances are equal according to OrtCompareMemoryInfo.</returns>
public bool Equals(OrtMemoryInfo other)
{
if(this == other)
@ -197,12 +223,21 @@ namespace Microsoft.ML.OnnxRuntime
return (result == 0);
}
/// <summary>
/// Overrides System.Object.GetHashCode()
/// </summary>
/// <returns>integer hash value</returns>
public override int GetHashCode()
{
return Pointer.ToInt32();
}
#region SafeHandle
/// <summary>
/// Overrides SafeHandle.ReleaseHandle() to properly dispose of
/// the native instance of OrtMmeoryInfo
/// </summary>
/// <returns>always returns true</returns>
protected override bool ReleaseHandle()
{
// If this instance exposes OrtMemoryInfo that belongs
@ -255,13 +290,22 @@ namespace Microsoft.ML.OnnxRuntime
/// </summary>
internal IntPtr Pointer { get { return handle; } }
/// <summary>
/// Overrides SafeHandle.IsInvalid
/// </summary>
/// <value>returns true if handle is equal to Zero</value>
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
/// <summary>
/// Returns the size of the allocation
/// Size of the allocation
/// </summary>
/// <value>uint size of the allocation in bytes</value>
public uint Size { get; private set; }
/// <summary>
/// Memory Information about this allocation
/// </summary>
/// <value>Returns OrtMemoryInfo from the allocator</value>
public OrtMemoryInfo Info
{
get
@ -270,6 +314,11 @@ namespace Microsoft.ML.OnnxRuntime
}
}
#region SafeHandle
/// <summary>
/// Overrides SafeHandle.ReleaseHandle() to deallocate
/// a chunk of memory using the specified allocator.
/// </summary>
/// <returns>always returns true</returns>
protected override bool ReleaseHandle()
{
_allocator.FreeMemory(handle);
@ -318,6 +367,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Overrides SafeHandle.IsInvalid
/// </summary>
/// <value>returns true if handle is equal to Zero</value>
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
/// <summary>
@ -349,6 +402,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// OrtMemoryInfo instance owned by the allocator
/// </summary>
/// <value>Instance of OrtMemoryInfo describing this allocator</value>
public OrtMemoryInfo Info
{
get
@ -363,8 +417,8 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Allocate native memory. Returns a disposable instance of OrtMemoryAllocation
/// </summary>
/// <param name="size"></param>
/// <returns></returns>
/// <param name="size">number of bytes to allocate</param>
/// <returns>Instance of OrtMemoryAllocation</returns>
public OrtMemoryAllocation Allocate(uint size)
{
IntPtr allocation = IntPtr.Zero;
@ -373,15 +427,20 @@ namespace Microsoft.ML.OnnxRuntime
}
/// <summary>
/// This internal interface is used for freeing memory
/// This internal interface is used for freeing memory.
/// </summary>
/// <param name="allocation"></param>
/// <param name="allocation">pointer to a native memory chunk allocated by this allocator instance</param>
internal void FreeMemory(IntPtr allocation)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtAllocatorFree(handle, allocation));
}
#region SafeHandle
/// <summary>
/// Overrides SafeHandle.ReleaseHandle() to properly dispose of
/// the native instance of OrtAllocator
/// </summary>
/// <returns>always returns true</returns>
protected override bool ReleaseHandle()
{
// Singleton default allocator is not owned

View file

@ -34,6 +34,10 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Overrides SafeHandle.IsInvalid
/// </summary>
/// <value>returns true if handle is equal to Zero</value>
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
/// <summary>
@ -238,6 +242,11 @@ namespace Microsoft.ML.OnnxRuntime
}
#region SafeHandle
/// <summary>
/// Overrides SafeHandle.ReleaseHandle() to properly dispose of
/// the native instance of OrtIoBidning
/// </summary>
/// <returns>always returns true</returns>
protected override bool ReleaseHandle()
{
NativeMethods.OrtReleaseIoBinding(handle);

View file

@ -9,26 +9,32 @@ using System.Runtime.InteropServices;
namespace Microsoft.ML.OnnxRuntime
{
/// <summary>
/// A type of data that OrtValue encapsulates.
/// </summary>
public enum OnnxValueType
{
ONNX_TYPE_UNKNOWN = 0,
ONNX_TYPE_TENSOR = 1,
ONNX_TYPE_SEQUENCE = 2,
ONNX_TYPE_MAP = 3,
ONNX_TYPE_OPAQUE = 4,
ONNX_TYPE_SPARSETENSOR = 5,
ONNX_TYPE_UNKNOWN = 0, // Not set
ONNX_TYPE_TENSOR = 1, // It's a Tensor
ONNX_TYPE_SEQUENCE = 2, // It's an Onnx sequence which may be a sequence of Tensors/Maps/Sequences
ONNX_TYPE_MAP = 3, // It's a map
ONNX_TYPE_OPAQUE = 4, // It's an experimental Opaque object
ONNX_TYPE_SPARSETENSOR = 5, // It's a Sparse Tensor
}
/// <summary>
/// Represents a disposable OrtValue
/// Represents a disposable OrtValue.
/// This class exposes a native instance of OrtValue.
/// The class implements IDisposable via SafeHandle and must
/// be disposed.
/// </summary>
public class OrtValue : SafeHandle
{
/// <summary>
/// Use factory methods to instantiate
/// Use factory methods to instantiate this class
/// </summary>
/// <param name="handle"></param>
/// <param name="owned">Default true, own the raw handle
/// <param name="handle">Pointer to a native instance of OrtValue</param>
/// <param name="owned">Default true, own the raw handle. Otherwise, the handle is owned by another instance
/// However, we use this class to expose OrtValue that is owned by DisposableNamedOnnxValue
/// </param>
internal OrtValue(IntPtr handle, bool owned = true)
@ -39,21 +45,19 @@ namespace Microsoft.ML.OnnxRuntime
internal IntPtr Handle { get { return handle; } }
/// <summary>
/// Overrides SafeHandle.IsInvalid
/// </summary>
/// <value>returns true if handle is equal to Zero</value>
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
#region NamedOnnxValue/DisposableOnnxValue accommodations
// DisposableOnnxValue class owns Native handle to OrtValue
// NamedOnnxValue does not own anything but creates a new one
// which presents a fundamental semantic difference to ToOrtValue interface.
//
// We provide a way to relinquish ownership as well as return an instance of
// OrtValue that is still disposable but does not have ownership
//
/// <summary>
/// This internal interface is used to transfer ownership elsewhere.
/// This instance must still be disposed in case there are other native
/// objects still owned.
/// objects still owned. This is a convenience method to ensure that an underlying
/// OrtValue is disposed exactly once when exception is thrown.
/// </summary>
/// <returns></returns>
internal IntPtr Disown()
@ -74,7 +78,7 @@ namespace Microsoft.ML.OnnxRuntime
/// or a piece of pinned managed memory.
///
/// The resulting OrtValue does not own the underlying memory buffer and will not attempt to
/// deallocated it.
/// deallocate it.
/// </summary>
/// <param name="memInfo">Memory Info. For managed memory it is a default cpu.
/// For Native memory must be obtained from the allocator or OrtMemoryAllocation instance</param>
@ -117,14 +121,17 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// This is a factory method creates a native Onnxruntime OrtValue containing a tensor.
/// However, it re-uses managed memory if possible.
/// The method will attempt to pin managed memory so no copying occurs when data is passed down
/// to native code.
/// </summary>
/// <param name="value">Tensor object</param>
/// <param name="memoryHandle">For all tensor types but string tensors we endeavor to use managed memory
/// to avoid additional allocation and copy. This out parameter represents a chunk of pinned memory
/// to avoid additional allocation and copy. This out parameter represents a chunk of pinned memory which will need
/// to be disposed when no longer needed. The lifespan of memoryHandle should eclipse the lifespan of the corresponding
/// OrtValue.
/// </param>
/// <param name="elementType">discovered tensor element type</param>
/// <returns></returns>
/// <returns>And instance of OrtValue constructed on top of the object</returns>
public static OrtValue CreateFromTensorObject(Object value, out MemoryHandle? memoryHandle,
out TensorElementType elementType)
{
@ -352,6 +359,11 @@ namespace Microsoft.ML.OnnxRuntime
}
#region SafeHandle
/// <summary>
/// Overrides SafeHandle.ReleaseHandle() to properly dispose of
/// the native instance of OrtValue
/// </summary>
/// <returns>always returns true</returns>
protected override bool ReleaseHandle()
{
// We have to surrender ownership to some legacy classes

View file

@ -5,7 +5,9 @@ using System.Runtime.InteropServices;
namespace Microsoft.ML.OnnxRuntime
{
/// Sets various runtime options.
/// <summary>
/// Sets various runtime options.
/// </summary>
public class RunOptions : SafeHandle
{
internal IntPtr Handle
@ -16,13 +18,19 @@ namespace Microsoft.ML.OnnxRuntime
}
}
/// <summary>
/// Default __ctor. Creates default RuntimeOptions
/// </summary>
public RunOptions()
:base(IntPtr.Zero, true)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtCreateRunOptions(out handle));
}
/// <summary>
/// Overrides SafeHandle.IsInvalid
/// </summary>
/// <value>returns true if handle is equal to Zero</value>
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
/// <summary>
@ -85,6 +93,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Sets a flag to terminate all Run() calls that are currently using this RunOptions object
/// Default = false
/// </summary>
/// <value>terminate flag value</value>
public bool Terminate
{
get
@ -109,7 +118,11 @@ namespace Microsoft.ML.OnnxRuntime
#region SafeHandle
/// <summary>
/// Overrides SafeHandle.ReleaseHandle() to properly dispose of
/// the native instance of RunOptions
/// </summary>
/// <returns>always returns true</returns>
protected override bool ReleaseHandle()
{
NativeMethods.OrtReleaseRunOptions(handle);

View file

@ -2,6 +2,7 @@
// Licensed under the MIT License.
using System;
using System.Runtime.InteropServices;
using System.Text;
using System.Runtime.InteropServices;
using System.IO;
@ -9,7 +10,8 @@ using System.IO;
namespace Microsoft.ML.OnnxRuntime
{
/// <summary>
/// TODO Add documentation about which optimizations are enabled for each value.
/// Graph optimization level to use with SessionOptions
/// [https://github.com/microsoft/onnxruntime/blob/master/docs/ONNX_Runtime_Graph_Optimizations.md]
/// </summary>
public enum GraphOptimizationLevel
{
@ -36,7 +38,7 @@ namespace Microsoft.ML.OnnxRuntime
/// </summary>
public class SessionOptions : SafeHandle
{
// Delayloaded CUDA or cuDNN DLLs. Currently, delayload is disabled. See cmake/CMakeLists.txt for more information.
// Delay-loaded CUDA or cuDNN DLLs. Currently, delayload is disabled. See cmake/CMakeLists.txt for more information.
private static string[] cudaDelayLoadedLibs = { };
#region Constructor and Factory methods
@ -91,6 +93,10 @@ namespace Microsoft.ML.OnnxRuntime
#endregion
#region ExecutionProviderAppends
/// <summary>
/// Appends CPU EP to a list of available execution providers for the session.
/// </summary>
/// <param name="useArena">1 - use arena, 0 - do not use arena</param>
public void AppendExecutionProvider_CPU(int useArena)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_CPU(handle, useArena));
@ -99,6 +105,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Use only if you have the onnxruntime package specific to this Execution Provider.
/// </summary>
/// <param name="useArena">1 - use allocation arena, 0 - otherwise</param>
public void AppendExecutionProvider_Dnnl(int useArena)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Dnnl(handle, useArena));
@ -107,6 +114,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Use only if you have the onnxruntime package specific to this Execution Provider.
/// </summary>
/// <param name="deviceId">integer device ID</param>
public void AppendExecutionProvider_CUDA(int deviceId)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_CUDA(handle, deviceId));
@ -115,14 +123,17 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Use only if you have the onnxruntime package specific to this Execution Provider.
/// </summary>
/// <param name="deviceId">device identification</param>
public void AppendExecutionProvider_DML(int deviceId)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_DML(handle, deviceId));
}
/// <summary>
/// Use only if you have the onnxruntime package specific to this Execution Provider.
/// </summary>
/// <param name="deviceId">device identification, default empty string</param>
public void AppendExecutionProvider_OpenVINO(string deviceId = "")
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_OpenVINO(handle, deviceId));
@ -131,6 +142,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Use only if you have the onnxruntime package specific to this Execution Provider.
/// </summary>
/// <param name="deviceId">device identification</param>
public void AppendExecutionProvider_Tensorrt(int deviceId)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Tensorrt(handle, deviceId));
@ -139,6 +151,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Use only if you have the onnxruntime package specific to this Execution Provider.
/// </summary>
/// <param name="deviceId">device identification</param>
public void AppendExecutionProvider_MIGraphX(int deviceId)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_MIGraphX(handle, deviceId));
@ -147,6 +160,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Use only if you have the onnxruntime package specific to this Execution Provider.
/// </summary>
/// <param name="nnapi_flags">nnapi specific flag mask</param>
public void AppendExecutionProvider_Nnapi(uint nnapi_flags)
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Nnapi(handle, nnapi_flags));
@ -155,6 +169,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Use only if you have the onnxruntime package specific to this Execution Provider.
/// </summary>
/// <param name="settings">string with Nuphar specific settings</param>
public void AppendExecutionProvider_Nuphar(string settings = "")
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtSessionOptionsAppendExecutionProvider_Nuphar(handle, 1, settings));
@ -167,9 +182,10 @@ namespace Microsoft.ML.OnnxRuntime
/// (Deprecated) Loads a DLL named 'libraryPath' and looks for this entry point:
/// OrtStatus* RegisterCustomOps(OrtSessionOptions* options, const OrtApiBase* api);
/// It then passes in the provided session options to this function along with the api base.
/// Deprecated in favor of RegisterCustomOpLibraryV2() because it provides users with the library handle
/// Deprecated in favor of RegisterCustomOpLibraryV2() because it provides users with the library handle
/// to release when all sessions relying on it are destroyed
/// </summary>
/// <param name="libraryPath">path to the custom op library</param>
[ObsoleteAttribute("RegisterCustomOpLibrary(...) is obsolete. Use RegisterCustomOpLibraryV2(...) instead.", false)]
public void RegisterCustomOpLibrary(string libraryPath)
{
@ -190,6 +206,8 @@ namespace Microsoft.ML.OnnxRuntime
/// session options are destroyed, or if an error occurs and it is non null.
/// Hint: .NET Core 3.1 has a 'NativeLibrary' class that can be used to free the library handle
/// </summary>
/// <param name="libraryPath">Custom op library path</param>
/// <param name="libraryHandle">out parameter, library handle</param>
public void RegisterCustomOpLibraryV2(string libraryPath, out IntPtr libraryHandle)
{
var libraryPathPinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(libraryPath), GCHandleType.Pinned);
@ -204,11 +222,10 @@ namespace Microsoft.ML.OnnxRuntime
/// that is same as the name passed to this API call, ORT will use this initializer instance
/// instead of deserializing one from the model file. This is useful when you want to share
/// the same initializer across sessions.
/// \param name name of the initializer
/// \param val OrtValue containing the initializer. Lifetime of 'val' and the underlying initializer buffer must be
/// managed by the user (created using the CreateTensorWithDataAsOrtValue API) and it must outlive the session object
/// to which it is added.
/// </summary>
/// <param name="name">name of the initializer</param>
/// <param name="ortValue">OrtValue containing the initializer. Lifetime of 'val' and the underlying initializer buffer must be
/// managed by the user (created using the CreateTensorWithDataAsOrtValue API) and it must outlive the session object</param>
public void AddInitializer(string name, OrtValue ortValue)
{
var utf8NamePinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(name), GCHandleType.Pinned);
@ -222,6 +239,8 @@ namespace Microsoft.ML.OnnxRuntime
/// Set a single session configuration entry as a pair of strings
/// If a configuration with same key exists, this will overwrite the configuration with the given configValue
/// </summary>
/// <param name="configKey">config key name</param>
/// <param name="configValue">config key value</param>
public void AddSessionConfigEntry(string configKey, string configValue)
{
var utf8NameConfigKeyPinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(configKey), GCHandleType.Pinned);
@ -230,7 +249,7 @@ namespace Microsoft.ML.OnnxRuntime
using (var pinnedConfigKeyName = new PinnedGCHandle(utf8NameConfigKeyPinned))
using (var pinnedConfigValueName = new PinnedGCHandle(utf8NameConfigValuePinned))
{
NativeApiStatus.VerifySuccess(NativeMethods.OrtAddSessionConfigEntry(handle,
NativeApiStatus.VerifySuccess(NativeMethods.OrtAddSessionConfigEntry(handle,
pinnedConfigKeyName.Pointer, pinnedConfigValueName.Pointer));
}
}
@ -239,6 +258,8 @@ namespace Microsoft.ML.OnnxRuntime
/// Override symbolic dimensions (by specific denotation strings) with actual values if known at session initialization time to enable
/// optimizations that can take advantage of fixed values (such as memory planning, etc)
/// </summary>
/// <param name="dimDenotation">denotation name</param>
/// <param name="dimValue">denotation value</param>
public void AddFreeDimensionOverride(string dimDenotation, long dimValue)
{
var utf8DimDenotationPinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(dimDenotation), GCHandleType.Pinned);
@ -252,6 +273,8 @@ namespace Microsoft.ML.OnnxRuntime
/// Override symbolic dimensions (by specific name strings) with actual values if known at session initialization time to enable
/// optimizations that can take advantage of fixed values (such as memory planning, etc)
/// </summary>
/// <param name="dimName">dimension name</param>
/// <param name="dimValue">dimension value</param>
public void AddFreeDimensionOverrideByName(string dimName, long dimValue)
{
var utf8DimNamePinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(dimName), GCHandleType.Pinned);
@ -271,11 +294,16 @@ namespace Microsoft.ML.OnnxRuntime
}
#region Public Properties
/// <summary>
/// Overrides SafeHandle.IsInvalid
/// </summary>
/// <value>returns true if handle is equal to Zero</value>
public override bool IsInvalid { get { return handle == IntPtr.Zero; } }
/// <summary>
/// Enables the use of the memory allocation patterns in the first Run() call for subsequent runs. Default = true.
/// </summary>
/// <value>returns enableMemoryPattern flag value</value>
public bool EnableMemoryPattern
{
get
@ -311,6 +339,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Enables profiling of InferenceSession.Run() calls. Default is false
/// </summary>
/// <value>returns _enableProfiling flag value</value>
public bool EnableProfiling
{
get
@ -336,6 +365,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Set filepath to save optimized model after graph level transformations. Default is empty, which implies saving is disabled.
/// </summary>
/// <value>returns _optimizedModelFilePath flag value</value>
public string OptimizedModelFilePath
{
get
@ -358,6 +388,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Enables Arena allocator for the CPU memory allocations. Default is true.
/// </summary>
/// <value>returns _enableCpuMemArena flag value</value>
public bool EnableCpuMemArena
{
get
@ -383,8 +414,8 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Log Id to be used for the session. Default is empty string.
/// TODO: Should it be named LogTag as in RunOptions?
/// </summary>
/// <value>returns _logId value</value>
public string LogId
{
get
@ -403,6 +434,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Log Severity Level for the session logs. Default = ORT_LOGGING_LEVEL_WARNING
/// </summary>
/// <value>returns _logSeverityLevel value</value>
public OrtLoggingLevel LogSeverityLevel
{
get
@ -421,6 +453,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Log Verbosity Level for the session logs. Default = 0. Valid values are >=0.
/// This takes into effect only when the LogSeverityLevel is set to ORT_LOGGING_LEVEL_VERBOSE.
/// </summary>
/// <value>returns _logVerbosityLevel value</value>
public int LogVerbosityLevel
{
get
@ -440,6 +473,7 @@ namespace Microsoft.ML.OnnxRuntime
// Sets the number of threads used to parallelize the execution within nodes
// A value of 0 means ORT will pick a default
/// </summary>
/// <value>returns _intraOpNumThreads value</value>
public int IntraOpNumThreads
{
get
@ -459,6 +493,7 @@ namespace Microsoft.ML.OnnxRuntime
// If sequential execution is enabled this value is ignored
// A value of 0 means ORT will pick a default
/// </summary>
/// <value>returns _interOpNumThreads value</value>
public int InterOpNumThreads
{
get
@ -476,6 +511,7 @@ namespace Microsoft.ML.OnnxRuntime
/// <summary>
/// Sets the graph optimization level for the session. Default is set to ORT_ENABLE_ALL.
/// </summary>
/// <value>returns _graphOptimizationLevel value</value>
public GraphOptimizationLevel GraphOptimizationLevel
{
get
@ -494,6 +530,7 @@ namespace Microsoft.ML.OnnxRuntime
/// Sets the execution mode for the session. Default is set to ORT_SEQUENTIAL.
/// See [ONNX_Runtime_Perf_Tuning.md] for more details.
/// </summary>
/// <value>returns _executionMode value</value>
public ExecutionMode ExecutionMode
{
get
@ -543,7 +580,11 @@ namespace Microsoft.ML.OnnxRuntime
#endregion
#region SafeHandle
/// <summary>
/// Overrides SafeHandle.ReleaseHandle() to properly dispose of
/// the native instance of SessionOptions
/// </summary>
/// <returns>always returns true</returns>
protected override bool ReleaseHandle()
{
NativeMethods.OrtReleaseSessionOptions(handle);

View file

@ -14,6 +14,9 @@ using System;
namespace Microsoft.ML.OnnxRuntime.Tensors
{
/// <summary>
/// A static class that houses static DenseTensor<T> extension methods
/// </summary>
public static class ArrayTensorExtensions
{
/// <summary>

View file

@ -113,6 +113,12 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
Buffer.Span[index] = value;
}
/// <summary>
/// Overrides Tensor.CopyTo(). Copies the content of the Tensor
/// to the specified array starting with arrayIndex
/// </summary>
/// <param name="array">destination array</param>
/// <param name="arrayIndex">start index</param>
protected override void CopyTo(T[] array, int arrayIndex)
{
if (array == null)
@ -127,6 +133,11 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
Buffer.Span.CopyTo(array.AsSpan(arrayIndex));
}
/// <summary>
/// Determines the index of a specific item in the Tensor&lt;T&gt;.
/// </summary>
/// <param name="item">Object to locate</param>
/// <returns>The index of item if found in the tensor; otherwise, -1</returns>
protected override int IndexOf(T item)
{
// TODO: use Span.IndexOf when/if it removes the IEquatable type constraint

View file

@ -64,6 +64,9 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
/// </summary>
public struct Float16
{
/// <summary>
/// float16 representation bits
/// </summary>
public ushort value;
/// <summary>
/// Ctor
@ -77,6 +80,7 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
/// Converts to ushort
/// </summary>
/// <param name="f">instance of Float16</param>
/// <returns>value member</returns>
public static implicit operator ushort (Float16 f) { return f.value; }
/// <summary>
/// Converts a 16-bit unsigned integer to a Float16.
@ -143,6 +147,9 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
/// </summary>
public struct BFloat16
{
/// <summary>
/// bfloat16 representation bits
/// </summary>
public ushort value;
/// <summary>
/// Ctor
@ -156,6 +163,7 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
/// Converts to ushort
/// </summary>
/// <param name="bf">instance of BFloat16</param>
/// <returns>value member</returns>
public static implicit operator ushort(BFloat16 bf) { return bf.value; }
/// <summary>
/// Converts a 16-bit unsigned integer to a BFloat16.
@ -219,9 +227,26 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
/// </summary>
public class TensorTypeInfo
{
/// <summary>
/// TensorElementType enum
/// </summary>
/// <value>type enum value</value>
public TensorElementType ElementType { get; private set; }
/// <summary>
/// Size of the stored primitive type in bytes
/// </summary>
/// <value>size in bytes</value>
public int TypeSize { get; private set; }
/// <summary>
/// Is the type is a string
/// </summary>
/// <value>true if Tensor element type is a string</value>
public bool IsString { get { return ElementType == TensorElementType.String; } }
/// <summary>
/// Ctor
/// </summary>
/// <param name="elementType">TensorElementType value</param>
/// <param name="typeSize">size fo the type in bytes</param>
public TensorTypeInfo(TensorElementType elementType, int typeSize)
{
ElementType = elementType;
@ -234,9 +259,26 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
/// </summary>
public class TensorElementTypeInfo
{
/// <summary>
/// Tensor element type
/// </summary>
/// <value>System.Type</value>
public Type TensorType { get; private set; }
/// <summary>
/// Size of the stored primitive type in bytes
/// </summary>
/// <value>size in bytes</value>
public int TypeSize { get; private set; }
/// <summary>
/// Is the type is a string
/// </summary>
/// <value>true if Tensor element type is a string</value>
public bool IsString { get; private set; }
/// <summary>
/// Ctor
/// </summary>
/// <param name="type">Tensor element type</param>
/// <param name="typeSize">typesize</param>
public TensorElementTypeInfo(Type type, int typeSize)
{
TensorType = type;
@ -281,6 +323,10 @@ namespace Microsoft.ML.OnnxRuntime.Tensors
}
private readonly Type _primitiveType;
/// <summary>
/// Constructs TensorBae
/// </summary>
/// <param name="primitiveType">primitive type the deriving class is using</param>
protected TensorBase(Type primitiveType)
{
// Should hold as we rely on this to pass arrays of these

View file

@ -65,7 +65,7 @@
<PackageReference Include="Google.Protobuf" Version="3.11.3" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.2.0" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
</ItemGroup>
<ItemGroup>

View file

@ -58,7 +58,7 @@
<ItemGroup>
<ProjectReference Include="$(OnnxRuntimeCSharpRoot)\src\Microsoft.ML.OnnxRuntime\Microsoft.ML.OnnxRuntime.csproj" />
<PackageReference Include="CommandLineParser" Version="2.4.3" />
<PackageReference Include="Google.Protobuf" Version="3.13.0" />
<PackageReference Include="Google.Protobuf" Version="3.13.0" />
</ItemGroup>
<Target Name="ProtoGen" BeforeTargets="Build">

View file

@ -0,0 +1,343 @@
## Onnxruntime C# API Doxygen configuration file
# Doxyfile 1.8.20
#---------------------------------------------------------------------------
# Project related configuration options
#---------------------------------------------------------------------------
DOXYFILE_ENCODING = UTF-8
PROJECT_NAME = "Onnxruntime"
PROJECT_NUMBER =
PROJECT_BRIEF =
PROJECT_LOGO =
OUTPUT_DIRECTORY = $(ORT_DOXY_OUT)\csharp_dox
CREATE_SUBDIRS = NO
ALLOW_UNICODE_NAMES = NO
OUTPUT_LANGUAGE = English
OUTPUT_TEXT_DIRECTION = None
BRIEF_MEMBER_DESC = YES
REPEAT_BRIEF = YES
ABBREVIATE_BRIEF = "The $name class" \
"The $name widget" \
"The $name file" \
is \
provides \
specifies \
contains \
represents \
a \
an \
the
ALWAYS_DETAILED_SEC = NO
INLINE_INHERITED_MEMB = NO
FULL_PATH_NAMES = YES
STRIP_FROM_PATH =
STRIP_FROM_INC_PATH =
SHORT_NAMES = NO
JAVADOC_AUTOBRIEF = NO
JAVADOC_BANNER = NO
QT_AUTOBRIEF = NO
MULTILINE_CPP_IS_BRIEF = NO
PYTHON_DOCSTRING = YES
INHERIT_DOCS = YES
SEPARATE_MEMBER_PAGES = NO
TAB_SIZE = 4
ALIASES =
OPTIMIZE_OUTPUT_FOR_C = NO
OPTIMIZE_OUTPUT_JAVA = NO
OPTIMIZE_FOR_FORTRAN = NO
OPTIMIZE_OUTPUT_VHDL = NO
OPTIMIZE_OUTPUT_SLICE = NO
EXTENSION_MAPPING =
MARKDOWN_SUPPORT = YES
TOC_INCLUDE_HEADINGS = 5
AUTOLINK_SUPPORT = YES
BUILTIN_STL_SUPPORT = NO
CPP_CLI_SUPPORT = NO
SIP_SUPPORT = NO
IDL_PROPERTY_SUPPORT = YES
DISTRIBUTE_GROUP_DOC = NO
GROUP_NESTED_COMPOUNDS = NO
SUBGROUPING = YES
INLINE_GROUPED_CLASSES = NO
INLINE_SIMPLE_STRUCTS = NO
TYPEDEF_HIDES_STRUCT = NO
LOOKUP_CACHE_SIZE = 0
NUM_PROC_THREADS = 1
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
EXTRACT_ALL = NO
EXTRACT_PRIVATE = NO
EXTRACT_PRIV_VIRTUAL = NO
EXTRACT_PACKAGE = NO
EXTRACT_STATIC = NO
EXTRACT_LOCAL_CLASSES = YES
EXTRACT_LOCAL_METHODS = NO
EXTRACT_ANON_NSPACES = NO
HIDE_UNDOC_MEMBERS = NO
HIDE_UNDOC_CLASSES = NO
HIDE_FRIEND_COMPOUNDS = NO
HIDE_IN_BODY_DOCS = NO
INTERNAL_DOCS = NO
CASE_SENSE_NAMES = NO
HIDE_SCOPE_NAMES = NO
HIDE_COMPOUND_REFERENCE= NO
SHOW_INCLUDE_FILES = YES
SHOW_GROUPED_MEMB_INC = NO
FORCE_LOCAL_INCLUDES = NO
INLINE_INFO = YES
SORT_MEMBER_DOCS = YES
SORT_BRIEF_DOCS = NO
SORT_MEMBERS_CTORS_1ST = NO
SORT_GROUP_NAMES = NO
SORT_BY_SCOPE_NAME = NO
STRICT_PROTO_MATCHING = NO
GENERATE_TODOLIST = YES
GENERATE_TESTLIST = YES
GENERATE_BUGLIST = YES
GENERATE_DEPRECATEDLIST= YES
ENABLED_SECTIONS =
MAX_INITIALIZER_LINES = 30
SHOW_USED_FILES = NO
SHOW_FILES = YES
SHOW_NAMESPACES = YES
FILE_VERSION_FILTER = "git -C $(ORT_DOXY_SRC) log -n 1 --format=%h -- afile"
LAYOUT_FILE =
CITE_BIB_FILES =
#---------------------------------------------------------------------------
# Configuration options related to warning and progress messages
#---------------------------------------------------------------------------
QUIET = NO
WARNINGS = YES
WARN_IF_UNDOCUMENTED = YES
WARN_IF_DOC_ERROR = YES
WARN_NO_PARAMDOC = YES
WARN_AS_ERROR = YES
WARN_FORMAT = "$file:$line: $text"
WARN_LOGFILE =
#---------------------------------------------------------------------------
# Configuration options related to the input files
#---------------------------------------------------------------------------
INPUT = $(ORT_DOXY_SRC)\csharp\src\Microsoft.ML.OnnxRuntime \
$(ORT_DOXY_SRC)\csharp\src\Microsoft.ML.OnnxRuntime\Tensors
INPUT_ENCODING = UTF-8
FILE_PATTERNS = *.cs
RECURSIVE = NO
EXCLUDE =
EXCLUDE_SYMLINKS = NO
EXCLUDE_PATTERNS = Native*.cs
EXCLUDE_SYMBOLS =
EXAMPLE_PATH = $(ORT_DOXY_SRC)\csharp\sample\Microsoft.ML.OnnxRuntime.InferenceSample\Program.cs
EXAMPLE_PATTERNS = *
EXAMPLE_RECURSIVE = NO
IMAGE_PATH =
INPUT_FILTER =
FILTER_PATTERNS =
FILTER_SOURCE_FILES = NO
FILTER_SOURCE_PATTERNS =
USE_MDFILE_AS_MAINPAGE =
#---------------------------------------------------------------------------
# Configuration options related to source browsing
#---------------------------------------------------------------------------
SOURCE_BROWSER = NO
INLINE_SOURCES = NO
STRIP_CODE_COMMENTS = YES
REFERENCED_BY_RELATION = NO
REFERENCES_RELATION = NO
REFERENCES_LINK_SOURCE = YES
SOURCE_TOOLTIPS = YES
USE_HTAGS = NO
VERBATIM_HEADERS = YES
CLANG_ASSISTED_PARSING = NO
CLANG_OPTIONS =
CLANG_DATABASE_PATH =
#---------------------------------------------------------------------------
# Configuration options related to the alphabetical class index
#---------------------------------------------------------------------------
ALPHABETICAL_INDEX = YES
COLS_IN_ALPHA_INDEX = 5
IGNORE_PREFIX =
#---------------------------------------------------------------------------
# Configuration options related to the HTML output
#---------------------------------------------------------------------------
GENERATE_HTML = YES
HTML_OUTPUT = html
HTML_FILE_EXTENSION = .html
HTML_HEADER =
HTML_FOOTER =
HTML_STYLESHEET =
HTML_EXTRA_STYLESHEET =
HTML_EXTRA_FILES =
HTML_COLORSTYLE_HUE = 220
HTML_COLORSTYLE_SAT = 100
HTML_COLORSTYLE_GAMMA = 80
HTML_TIMESTAMP = NO
HTML_DYNAMIC_MENUS = YES
HTML_DYNAMIC_SECTIONS = NO
HTML_INDEX_NUM_ENTRIES = 100
GENERATE_DOCSET = NO
DOCSET_FEEDNAME = "Doxygen generated docs"
DOCSET_BUNDLE_ID = org.doxygen.Project
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
DOCSET_PUBLISHER_NAME = Publisher
GENERATE_HTMLHELP = NO
CHM_FILE =
HHC_LOCATION =
GENERATE_CHI = NO
CHM_INDEX_ENCODING =
BINARY_TOC = NO
TOC_EXPAND = NO
GENERATE_QHP = NO
QCH_FILE =
QHP_NAMESPACE = org.doxygen.Project
QHP_VIRTUAL_FOLDER = doc
QHP_CUST_FILTER_NAME =
QHP_CUST_FILTER_ATTRS =
QHP_SECT_FILTER_ATTRS =
QHG_LOCATION =
GENERATE_ECLIPSEHELP = NO
ECLIPSE_DOC_ID = org.doxygen.Project
DISABLE_INDEX = NO
GENERATE_TREEVIEW = NO
ENUM_VALUES_PER_LINE = 4
TREEVIEW_WIDTH = 250
EXT_LINKS_IN_WINDOW = NO
HTML_FORMULA_FORMAT = png
FORMULA_FONTSIZE = 10
FORMULA_TRANSPARENT = YES
FORMULA_MACROFILE =
USE_MATHJAX = NO
MATHJAX_FORMAT = HTML-CSS
MATHJAX_RELPATH = https://cdn.jsdelivr.net/npm/mathjax@2
MATHJAX_EXTENSIONS =
MATHJAX_CODEFILE =
SEARCHENGINE = YES
SERVER_BASED_SEARCH = NO
EXTERNAL_SEARCH = NO
SEARCHENGINE_URL =
SEARCHDATA_FILE = searchdata.xml
EXTERNAL_SEARCH_ID =
EXTRA_SEARCH_MAPPINGS =
#---------------------------------------------------------------------------
# Configuration options related to the LaTeX output
#---------------------------------------------------------------------------
GENERATE_LATEX = NO
LATEX_OUTPUT = latex
LATEX_CMD_NAME =
MAKEINDEX_CMD_NAME = makeindex
LATEX_MAKEINDEX_CMD = makeindex
COMPACT_LATEX = NO
PAPER_TYPE = a4
EXTRA_PACKAGES =
LATEX_HEADER =
LATEX_FOOTER =
LATEX_EXTRA_STYLESHEET =
LATEX_EXTRA_FILES =
PDF_HYPERLINKS = YES
USE_PDFLATEX = YES
LATEX_BATCHMODE = NO
LATEX_HIDE_INDICES = NO
LATEX_SOURCE_CODE = NO
LATEX_BIB_STYLE = plain
LATEX_TIMESTAMP = NO
LATEX_EMOJI_DIRECTORY =
#---------------------------------------------------------------------------
# Configuration options related to the RTF output
#---------------------------------------------------------------------------
GENERATE_RTF = NO
RTF_OUTPUT = rtf
COMPACT_RTF = NO
RTF_HYPERLINKS = NO
RTF_STYLESHEET_FILE =
RTF_EXTENSIONS_FILE =
RTF_SOURCE_CODE = NO
#---------------------------------------------------------------------------
# Configuration options related to the man page output
#---------------------------------------------------------------------------
GENERATE_MAN = NO
MAN_OUTPUT = man
MAN_EXTENSION = .3
MAN_SUBDIR =
MAN_LINKS = NO
#---------------------------------------------------------------------------
# Configuration options related to the XML output
#---------------------------------------------------------------------------
GENERATE_XML = NO
XML_OUTPUT = xml
XML_PROGRAMLISTING = YES
XML_NS_MEMB_FILE_SCOPE = NO
#---------------------------------------------------------------------------
# Configuration options related to the DOCBOOK output
#---------------------------------------------------------------------------
GENERATE_DOCBOOK = NO
DOCBOOK_OUTPUT = docbook
DOCBOOK_PROGRAMLISTING = NO
#---------------------------------------------------------------------------
# Configuration options for the AutoGen Definitions output
#---------------------------------------------------------------------------
GENERATE_AUTOGEN_DEF = NO
#---------------------------------------------------------------------------
# Configuration options related to the Perl module output
#---------------------------------------------------------------------------
GENERATE_PERLMOD = NO
PERLMOD_LATEX = NO
PERLMOD_PRETTY = YES
PERLMOD_MAKEVAR_PREFIX =
#---------------------------------------------------------------------------
# Configuration options related to the preprocessor
#---------------------------------------------------------------------------
ENABLE_PREPROCESSING = YES
MACRO_EXPANSION = NO
EXPAND_ONLY_PREDEF = NO
SEARCH_INCLUDES = YES
INCLUDE_PATH =
INCLUDE_FILE_PATTERNS =
PREDEFINED =
EXPAND_AS_DEFINED =
SKIP_FUNCTION_MACROS = YES
#---------------------------------------------------------------------------
# Configuration options related to external references
#---------------------------------------------------------------------------
TAGFILES =
GENERATE_TAGFILE =
ALLEXTERNALS = NO
EXTERNAL_GROUPS = YES
EXTERNAL_PAGES = YES
#---------------------------------------------------------------------------
# Configuration options related to the dot tool
#---------------------------------------------------------------------------
CLASS_DIAGRAMS = YES
DIA_PATH =
HIDE_UNDOC_RELATIONS = YES
HAVE_DOT = NO
DOT_NUM_THREADS = 0
DOT_FONTNAME = Helvetica
DOT_FONTSIZE = 10
DOT_FONTPATH =
CLASS_GRAPH = YES
COLLABORATION_GRAPH = YES
GROUP_GRAPHS = YES
UML_LOOK = NO
UML_LIMIT_NUM_FIELDS = 10
TEMPLATE_RELATIONS = NO
INCLUDE_GRAPH = YES
INCLUDED_BY_GRAPH = YES
CALL_GRAPH = NO
CALLER_GRAPH = NO
GRAPHICAL_HIERARCHY = YES
DIRECTORY_GRAPH = YES
DOT_IMAGE_FORMAT = png
INTERACTIVE_SVG = NO
DOT_PATH =
DOTFILE_DIRS =
MSCFILE_DIRS =
DIAFILE_DIRS =
PLANTUML_JAR_PATH =
PLANTUML_CFG_FILE =
PLANTUML_INCLUDE_PATH =
DOT_GRAPH_MAX_NODES = 50
MAX_DOT_GRAPH_DEPTH = 0
DOT_TRANSPARENT = NO
DOT_MULTI_TARGETS = NO
GENERATE_LEGEND = YES
DOT_CLEANUP = YES

View file

@ -38,6 +38,16 @@ jobs:
modifyEnvironment: true
workingFolder: '$(Build.BinariesDirectory)'
- script: |
set ORT_DOXY_SRC=$(Build.SourcesDirectory)
set ORT_DOXY_OUT=$(Build.BinariesDirectory)\$(BuildConfig)\$(BuildConfig)
mkdir %ORT_DOXY_SRC%
mkdir %ORT_DOXY_OUT%
"C:\Program Files\doxygen\bin\doxygen.exe" $(Build.SourcesDirectory)\tools\ci_build\github\Doxyfile_csharp.cfg
workingDirectory: '$(Build.SourcesDirectory)'
displayName: 'API Documentation Check and generate'
- script: |
python -m pip install -q pyopenssl setuptools wheel numpy flake8
workingDirectory: '$(Build.BinariesDirectory)'
@ -102,7 +112,7 @@ jobs:
projects: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln'
configuration: '$(BuildConfig)'
arguments: '--configuration $(BuildConfig) -p:Platform="Any CPU" -p:OrtPackageId=$(OrtPackageId)'
workingDirectory: '$(Build.SourcesDirectory)\csharp'
workingDirectory: '$(Build.SourcesDirectory)\csharp'
- task: DotNetCoreCLI@2
displayName: 'Build C#'