diff --git a/.gitignore b/.gitignore
index 87424510f2..a3e9f552dd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -31,6 +31,7 @@ onnxruntime_profile*.json
/docs/python/*_LICENSE
/csharp/**/obj/
/csharp/**/bin/
+/csharp/Directory.Build.props
docs/python/*.onnx
*.onnx
onnxprofile_profile_test_*.json
diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.cs b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.cs
index 9a22b1297e..9aded53148 100644
--- a/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.cs
+++ b/csharp/src/Microsoft.ML.OnnxRuntime/SessionOptions.cs
@@ -50,6 +50,24 @@ namespace Microsoft.ML.OnnxRuntime
return result == 0;
}
+ ///
+ /// Enable Sequential Execution. By default, it is enabled.
+ ///
+ ///
+ public void EnableSequentialExecution()
+ {
+ NativeMethods.OrtEnableSequentialExecution(_nativePtr);
+ }
+
+ ///
+ /// Disable Sequential Execution and enable Parallel Execution.
+ ///
+ ///
+ public void DisableSequentialExecution()
+ {
+ NativeMethods.OrtDisableSequentialExecution(_nativePtr);
+ }
+
///
/// Default instance
///
diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests/InferenceTest.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests/InferenceTest.cs
index af019ecbbe..30141cf5d7 100644
--- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests/InferenceTest.cs
+++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests/InferenceTest.cs
@@ -51,15 +51,18 @@ namespace Microsoft.ML.OnnxRuntime.Tests
}
[Theory]
- [InlineData(0)]
- [InlineData(2)]
- private void CanRunInferenceOnAModel(uint graphOptimizationLevel)
+ [InlineData(0, true)]
+ [InlineData(0, false)]
+ [InlineData(2, true)]
+ [InlineData(2, false)]
+ private void CanRunInferenceOnAModel(uint graphOptimizationLevel, bool disableSequentialExecution)
{
string modelPath = Path.Combine(Directory.GetCurrentDirectory(), "squeezenet.onnx");
// Set the graph optimization level for this session.
SessionOptions options = new SessionOptions();
Assert.True(options.SetSessionGraphOptimizationLevel(graphOptimizationLevel));
+ if(disableSequentialExecution) options.DisableSequentialExecution();
using (var session = new InferenceSession(modelPath, options))
{
@@ -215,11 +218,10 @@ namespace Microsoft.ML.OnnxRuntime.Tests
foreach (var opset in opsets)
{
var modelRoot = new DirectoryInfo(Path.Combine(modelsDir, opset));
- //var cwd = Directory.GetCurrentDirectory();
foreach (var modelDir in modelRoot.EnumerateDirectories())
{
String onnxModelFileName = null;
-
+
if (skipModels.Contains(modelDir.Name))
continue;
diff --git a/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Microsoft.ML.OnnxRuntime.PerfTool.csproj b/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Microsoft.ML.OnnxRuntime.PerfTool.csproj
index 8592e401b9..7d167dd064 100644
--- a/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Microsoft.ML.OnnxRuntime.PerfTool.csproj
+++ b/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Microsoft.ML.OnnxRuntime.PerfTool.csproj
@@ -32,6 +32,7 @@
+
diff --git a/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Program.cs b/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Program.cs
index 7524276549..ee77e5f484 100644
--- a/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Program.cs
+++ b/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/Program.cs
@@ -3,12 +3,9 @@
using System;
using System.Collections.Generic;
-using System.Text;
-using System.IO;
-using Microsoft.ML.OnnxRuntime;
using System.Numerics.Tensors;
using System.Diagnostics;
-
+using CommandLine;
namespace Microsoft.ML.OnnxRuntime.PerfTool
{
@@ -21,38 +18,51 @@ namespace Microsoft.ML.OnnxRuntime.PerfTool
TotalCount = 4
}
+ class CommandOptions
+ {
+ [Option('m', "model_file", Required = true, HelpText = "Model Path.")]
+ public string ModelFile { get; set; }
+
+ [Option('i', "input_file", Required = true, HelpText = "Input path.")]
+ public string InputFile { get; set; }
+
+ [Option('c', "iteration_count", Required = true, HelpText = "Iteration to run.")]
+ public int IterationCount { get; set; }
+
+ [Option('p', Required = false, HelpText = "Run with parallel exection. Default is false")]
+ public bool ParallelExecution { get; set; } = false;
+
+ [Option('o', "optimization_level", Required = false, HelpText = "Optimization Level. Default is 1, partial optimization.")]
+ public uint OptimizationLevel { get; set; } = 1;
+ }
+
class Program
{
-
public static void Main(string[] args)
{
- /*
- args[0] = model-file-name
- args[1] = input-file-name
- args[2] = iteration count
- */
-
- if (args.Length < 3)
- {
- PrintUsage();
- Environment.Exit(1);
- }
-
- string modelPath = args[0];
- string inputPath = args[1];
- int iteration = Int32.Parse(args[2]);
- Console.WriteLine("Running model {0} in OnnxRuntime with input {1} for {2} times", modelPath, inputPath, iteration);
+ var cmdOptions = Parser.Default.ParseArguments(args);
+ cmdOptions.WithParsed(
+ options => {
+ Main(options);
+ });
+ }
+ public static void Main(CommandOptions options)
+ {
+ string modelPath = options.ModelFile;
+ string inputPath = options.InputFile;
+ int iteration = options.IterationCount;
+ bool parallelExecution = options.ParallelExecution;
+ uint optLevel = options.OptimizationLevel;
+ Console.WriteLine("Running model {0} in OnnxRuntime:", modelPath);
+ Console.WriteLine("input:{0}", inputPath);
+ Console.WriteLine("iteration count:{0}", iteration);
+ Console.WriteLine("parallel execution:{0}", parallelExecution);
+ Console.WriteLine("optimization level:{0}", optLevel);
DateTime[] timestamps = new DateTime[(int)TimingPoint.TotalCount];
- RunModelOnnxRuntime(modelPath, inputPath, iteration, timestamps);
+ RunModelOnnxRuntime(modelPath, inputPath, iteration, timestamps, parallelExecution, optLevel);
PrintReport(timestamps, iteration);
Console.WriteLine("Done");
-
- Console.WriteLine("Running model {0} in Sonoma with input {1} for {2} times", modelPath, inputPath, iteration);
- RunModelOnnxRuntime(modelPath, inputPath, iteration, timestamps);
- PrintReport(timestamps, iteration);
- Console.WriteLine("Done");
-
}
@@ -74,7 +84,7 @@ namespace Microsoft.ML.OnnxRuntime.PerfTool
return tensorData.ToArray();
}
- static void RunModelOnnxRuntime(string modelPath, string inputPath, int iteration, DateTime[] timestamps)
+ static void RunModelOnnxRuntime(string modelPath, string inputPath, int iteration, DateTime[] timestamps, bool parallelExecution, uint optLevel)
{
if (timestamps.Length != (int)TimingPoint.TotalCount)
{
@@ -82,8 +92,10 @@ namespace Microsoft.ML.OnnxRuntime.PerfTool
}
timestamps[(int)TimingPoint.Start] = DateTime.Now;
-
- using (var session = new InferenceSession(modelPath))
+ SessionOptions options = new SessionOptions();
+ if (parallelExecution) options.DisableSequentialExecution();
+ options.SetSessionGraphOptimizationLevel(optLevel);
+ using (var session = new InferenceSession(modelPath, options))
{
timestamps[(int)TimingPoint.ModelLoaded] = DateTime.Now;
var inputMeta = session.InputMetadata;
diff --git a/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/SonomaRunner.cs b/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/SonomaRunner.cs
deleted file mode 100644
index 92b8763f67..0000000000
--- a/csharp/tools/Microsoft.ML.OnnxRuntime.PerfTool/SonomaRunner.cs
+++ /dev/null
@@ -1,58 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Text;
-using Microsoft.ML.Scoring;
-using System.Diagnostics;
-
-namespace Microsoft.ML.OnnxRuntime.PerfTool
-{
- public class SonomaRunner
- {
- public static void RunModelSonoma(string modelPath, string inputPath, int iteration, DateTime[] timestamps)
- {
- if (timestamps.Length != (int)TimingPoint.TotalCount)
- {
- throw new ArgumentException("Timestamps array must have " + (int)TimingPoint.TotalCount + " size");
- }
-
- timestamps[(int)TimingPoint.Start] = DateTime.Now;
-
- var modelName = "lotusrt_squeezenet";
- using (var modelManager = new ModelManager(modelPath, true))
- {
- modelManager.InitOnnxModel(modelName, int.MaxValue);
- timestamps[(int)TimingPoint.ModelLoaded] = DateTime.Now;
-
- Tensor[] inputs = new Tensor[1];
- var inputShape = new long[] { 1, 3, 224, 224 }; // hardcoded values
-
- float[] inputData0 = Program.LoadTensorFromFile(inputPath);
- inputs[0] = Tensor.Create(inputData0, inputShape);
- string[] inputNames = new string[] {"data_0"};
- string[] outputNames = new string[] { "softmaxout_1" };
-
- timestamps[(int)TimingPoint.InputLoaded] = DateTime.Now;
-
- for (int i = 0; i < iteration; i++)
- {
- var outputs = modelManager.RunModel(
- modelName,
- int.MaxValue,
- inputNames,
- inputs,
- outputNames
- );
- Debug.Assert(outputs != null);
- Debug.Assert(outputs.Length == 1);
- }
-
- timestamps[(int)TimingPoint.RunComplete] = DateTime.Now;
- }
-
-
-
- }
-
-
- }
-}
diff --git a/docs/CSharp_API.md b/docs/CSharp_API.md
index 8f4c309917..97976ae921 100644
--- a/docs/CSharp_API.md
+++ b/docs/CSharp_API.md
@@ -111,6 +111,12 @@ Sets the graph optimization level for the session. Default is set to 1. Availabl
* 1 -> Enable basic optimizations such as redundant node removals and constant folding
* 2 -> Enable all optimizations (includes Level1 and more complex optimizations such as node fusions)
+ EnableSequentialExecution();
+Enable Sequential Execution. By default, it is enabled.
+
+ DisableSequentialExecution();
+Disable Sequential Execution and enable Parallel Execution.
+
AppendExecutionProvider(ExecutionProvider provider);
Appends execution provider to the session. For any operator in the graph the first execution provider that implements the operator will be user. ExecutionProvider is defined as the following enum.
diff --git a/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml
index 2340c150dd..e53eabe3e1 100644
--- a/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml
+++ b/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml
@@ -4,6 +4,10 @@ jobs:
buildDirectory: '$(Build.BinariesDirectory)'
steps:
- template: templates/set-test-data-variables-step.yml
+ - task: NuGetToolInstaller@0
+ displayName: Use Nuget 4.3
+ inputs:
+ versionSpec: 4.3.0
- task: NuGetCommand@2
displayName: 'NuGet restore'
inputs:
diff --git a/tools/ci_build/github/azure-pipelines/win-gpu-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-gpu-ci-pipeline.yml
index 0d0c7367e3..3eeff55d18 100644
--- a/tools/ci_build/github/azure-pipelines/win-gpu-ci-pipeline.yml
+++ b/tools/ci_build/github/azure-pipelines/win-gpu-ci-pipeline.yml
@@ -5,6 +5,10 @@ jobs:
CUDA_VERSION: '10.0'
steps:
- template: templates/set-test-data-variables-step.yml
+ - task: NuGetToolInstaller@0
+ displayName: Use Nuget 4.3
+ inputs:
+ versionSpec: 4.3.0
- task: NuGetCommand@2
displayName: 'NuGet restore'
inputs:
diff --git a/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml
index 986e9ca7df..3240058163 100644
--- a/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml
+++ b/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml
@@ -10,6 +10,10 @@ jobs:
steps:
# - template: templates/set-test-data-variables-step.yml
+ - task: NuGetToolInstaller@0
+ displayName: Use Nuget 4.3
+ inputs:
+ versionSpec: 4.3.0
- task: NuGetCommand@2
displayName: 'NuGet restore'
inputs: