onnxruntime/csharp/sample/InferenceSample/Microsoft.ML.OnnxRuntime.InferenceSample.Maui/MainPage.xaml.cs
Scott McKay 0e85af6990
Add MAUI csharp\sample\InferenceSample\ project (#12356)
Add csharp\sample\InferenceSample\Microsoft.ML.OnnxRuntime.InferenceSample.Maui so we have an equivalent setup for MAUI as for the other platforms.

This provides a setup to do some basic local testing of using an InferenceSession in a MAUI app.
2022-07-29 07:22:36 +10:00

84 lines
3.3 KiB
C#

namespace Microsoft.ML.OnnxRuntime.InferenceSample.Maui;
//using Microsoft.Maui.Controls;
public partial class MainPage : ContentPage
{
public MainPage()
{
InitializeComponent();
// Best Practice: create the inference session (which loads and optimizes the model) once and not per inference
// as it can be expensive and time consuming.
inferenceSampleApi = new InferenceSampleApi();
}
protected override void OnAppearing()
{
base.OnAppearing();
OutputLabel.Text = "Press 'Run Tests'.\n";
}
private readonly InferenceSampleApi inferenceSampleApi;
private async Task ExecuteTests()
{
Action<Label, string> addOutput = (label, text) =>
{
Application.Current.Dispatcher.Dispatch(() => { label.Text += text; });
//Device.BeginInvokeOnMainThread(() => { label.Text += text; });
Console.Write(text);
};
OutputLabel.Text = "Testing execution\nComplete output is written to Console in this trivial example.\n\n";
// run the testing in a background thread so updates to the UI aren't blocked
await Task.Run(() =>
{
addOutput(OutputLabel, "Testing using default platform-specific session options... ");
inferenceSampleApi.Execute();
addOutput(OutputLabel, "done.\n");
Thread.Sleep(1000); // artificial delay so the UI updates gradually
// demonstrate a range of usages by recreating the inference session with different session options.
addOutput(OutputLabel, "Testing using default platform-specific session options... ");
inferenceSampleApi.CreateInferenceSession(SessionOptionsContainer.Create());
inferenceSampleApi.Execute();
addOutput(OutputLabel, "done.\n");
Thread.Sleep(1000);
addOutput(OutputLabel, "Testing using named platform-specific session options... ");
inferenceSampleApi.CreateInferenceSession(SessionOptionsContainer.Create("ort_with_npu"));
inferenceSampleApi.Execute();
addOutput(OutputLabel, "done.\n");
Thread.Sleep(1000);
addOutput(OutputLabel, "Testing using default platform-specific session options via ApplyConfiguration extension... ");
inferenceSampleApi.CreateInferenceSession(new SessionOptions().ApplyConfiguration());
inferenceSampleApi.Execute();
addOutput(OutputLabel, "done.\n");
Thread.Sleep(1000);
addOutput(OutputLabel, "Testing using named platform-specific session options via ApplyConfiguration extension... ");
inferenceSampleApi.CreateInferenceSession(new SessionOptions().ApplyConfiguration("ort_with_npu"));
inferenceSampleApi.Execute();
addOutput(OutputLabel, "done.\n\n");
Thread.Sleep(1000);
});
addOutput(OutputLabel, "Testing successfully completed! See the Console log for more info.");
}
private async void Start_Clicked(object sender, EventArgs e)
{
await ExecuteTests()
.ContinueWith(
(task) =>
{
if (task.IsFaulted)
MainThread.BeginInvokeOnMainThread(() => DisplayAlert("Error", task.Exception.Message, "OK"));
})
.ConfigureAwait(false);
}
}