2018-11-20 00:48:22 +00:00
|
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
|
|
|
|
|
// Licensed under the MIT License.
|
|
|
|
|
|
|
|
|
|
using System;
|
|
|
|
|
using System.Collections.Generic;
|
|
|
|
|
using System.Text;
|
|
|
|
|
using System.IO;
|
|
|
|
|
using Microsoft.ML.OnnxRuntime;
|
|
|
|
|
using System.Numerics.Tensors;
|
|
|
|
|
|
|
|
|
|
namespace CSharpUsage
|
|
|
|
|
{
|
|
|
|
|
class Program
|
|
|
|
|
{
|
|
|
|
|
public static void Main(string[] args)
|
|
|
|
|
{
|
|
|
|
|
Console.WriteLine("Using API");
|
|
|
|
|
UseApi();
|
|
|
|
|
Console.WriteLine("Done");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
static void UseApi()
|
|
|
|
|
{
|
|
|
|
|
string modelPath = Directory.GetCurrentDirectory() + @"\squeezenet.onnx";
|
|
|
|
|
|
2019-04-03 00:23:14 +00:00
|
|
|
// Optional : Create session options and set the graph optimization level for the session
|
|
|
|
|
SessionOptions options = new SessionOptions();
|
|
|
|
|
options.SetSessionGraphOptimizationLevel(2);
|
2018-11-20 00:48:22 +00:00
|
|
|
|
2019-04-03 00:23:14 +00:00
|
|
|
using (var session = new InferenceSession(modelPath, options))
|
2018-11-20 00:48:22 +00:00
|
|
|
{
|
|
|
|
|
var inputMeta = session.InputMetadata;
|
2018-11-23 04:56:43 +00:00
|
|
|
var container = new List<NamedOnnxValue>();
|
2018-11-20 00:48:22 +00:00
|
|
|
|
2018-11-23 04:56:43 +00:00
|
|
|
float[] inputData = LoadTensorFromFile(@"bench.in"); // this is the data for only one input tensor for this model
|
2018-11-20 00:48:22 +00:00
|
|
|
|
2018-11-23 04:56:43 +00:00
|
|
|
foreach (var name in inputMeta.Keys)
|
|
|
|
|
{
|
|
|
|
|
var tensor = new DenseTensor<float>(inputData, inputMeta[name].Dimensions);
|
2018-11-26 06:46:21 +00:00
|
|
|
container.Add(NamedOnnxValue.CreateFromTensor<float>(name, tensor));
|
2018-11-23 04:56:43 +00:00
|
|
|
}
|
2018-11-20 00:48:22 +00:00
|
|
|
|
|
|
|
|
// Run the inference
|
2019-01-29 05:40:19 +00:00
|
|
|
using (var results = session.Run(container)) // results is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
|
2018-11-20 00:48:22 +00:00
|
|
|
{
|
2019-01-29 05:40:19 +00:00
|
|
|
// dump the results
|
|
|
|
|
foreach (var r in results)
|
|
|
|
|
{
|
|
|
|
|
Console.WriteLine("Output for {0}", r.Name);
|
|
|
|
|
Console.WriteLine(r.AsTensor<float>().GetArrayString());
|
|
|
|
|
}
|
2018-11-20 00:48:22 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-23 04:56:43 +00:00
|
|
|
static float[] LoadTensorFromFile(string filename)
|
2018-11-20 00:48:22 +00:00
|
|
|
{
|
2018-11-23 04:56:43 +00:00
|
|
|
var tensorData = new List<float>();
|
2018-11-20 00:48:22 +00:00
|
|
|
|
|
|
|
|
// read data from file
|
2018-11-23 04:56:43 +00:00
|
|
|
using (var inputFile = new System.IO.StreamReader(filename))
|
2018-11-20 00:48:22 +00:00
|
|
|
{
|
|
|
|
|
inputFile.ReadLine(); //skip the input name
|
|
|
|
|
string[] dataStr = inputFile.ReadLine().Split(new char[] { ',', '[', ']' }, StringSplitOptions.RemoveEmptyEntries);
|
|
|
|
|
for (int i = 0; i < dataStr.Length; i++)
|
|
|
|
|
{
|
2018-11-23 04:56:43 +00:00
|
|
|
tensorData.Add(Single.Parse(dataStr[i]));
|
2018-11-20 00:48:22 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-23 04:56:43 +00:00
|
|
|
return tensorData.ToArray();
|
2018-11-20 00:48:22 +00:00
|
|
|
}
|
|
|
|
|
|
2018-11-23 04:56:43 +00:00
|
|
|
|
2018-11-20 00:48:22 +00:00
|
|
|
}
|
|
|
|
|
}
|