mirror of
https://github.com/saymrwulf/onnxruntime.git
synced 2026-05-16 21:00:14 +00:00
Add Xamarin support to the ORT nuget packages. - Update C# code to support Xamarin builds for iOS and Android - refactor some things to split out common code - include iOS and Android ORT native shared library in native nuget package
51 lines
2 KiB
C#
51 lines
2 KiB
C#
using System;
|
|
using Xamarin.Forms;
|
|
|
|
namespace Microsoft.ML.OnnxRuntime.InferenceSample.Forms
|
|
{
|
|
public partial class MainPage : ContentPage
|
|
{
|
|
public MainPage()
|
|
{
|
|
InitializeComponent();
|
|
|
|
// in general create the inference session (which loads and optimizes the model) once and not per inference
|
|
// as it can be expensive and time consuming.
|
|
inferenceSampleApi = new InferenceSampleApi();
|
|
}
|
|
|
|
protected override void OnAppearing()
|
|
{
|
|
base.OnAppearing();
|
|
|
|
Console.WriteLine("Using API");
|
|
inferenceSampleApi.Execute();
|
|
Console.WriteLine("Done");
|
|
|
|
// demonstrate a range of usages by recreating the inference session with different session options.
|
|
Console.WriteLine("Using API (using default platform-specific session options)");
|
|
inferenceSampleApi.CreateInferenceSession(SessionOptionsContainer.Create());
|
|
inferenceSampleApi.Execute();
|
|
Console.WriteLine("Done");
|
|
|
|
Console.WriteLine("Using API (using named platform-specific session options)");
|
|
inferenceSampleApi.CreateInferenceSession(SessionOptionsContainer.Create("ort_with_npu"));
|
|
inferenceSampleApi.Execute();
|
|
Console.WriteLine("Done");
|
|
|
|
Console.WriteLine(
|
|
"Using API (using default platform-specific session options via ApplyConfiguration extension)");
|
|
inferenceSampleApi.CreateInferenceSession(new SessionOptions().ApplyConfiguration());
|
|
inferenceSampleApi.Execute();
|
|
Console.WriteLine("Done");
|
|
|
|
Console.WriteLine(
|
|
"Using API (using named platform-specific session options via ApplyConfiguration extension)");
|
|
inferenceSampleApi.CreateInferenceSession(new SessionOptions().ApplyConfiguration("ort_with_npu"));
|
|
inferenceSampleApi.Execute();
|
|
Console.WriteLine("Done");
|
|
}
|
|
|
|
private readonly InferenceSampleApi inferenceSampleApi;
|
|
}
|
|
}
|