mirror of
https://github.com/saymrwulf/onnxruntime.git
synced 2026-05-15 20:50:42 +00:00
### Description
See
454996d496
for manual changes (excluded auto-generated formatting changes)
### Why
Because the toolsets for old clang-format is out-of-date. This reduces
the development efficiency.
- The NPM package `clang-format` is already in maintenance mode. not
updated since 2 years ago.
- The VSCode extension for clang-format is not maintained for a while,
and a recent Node.js security update made it not working at all in
Windows.
No one in community seems interested in fixing those.
Choose Prettier as it is the most popular TS/JS formatter.
### How to merge
It's easy to break the build:
- Be careful of any new commits on main not included in this PR.
- Be careful that after this PR is merged, other PRs that already passed
CI can merge.
So, make sure there is no new commits before merging this one, and
invalidate js PRs that already passed CI, force them to merge to latest.
77 lines
2.3 KiB
TypeScript
77 lines
2.3 KiB
TypeScript
// Copyright (c) Microsoft Corporation. All rights reserved.
|
|
// Licensed under the MIT License.
|
|
|
|
import { Backend, InferenceSession, InferenceSessionHandler, SessionHandler } from 'onnxruntime-common';
|
|
|
|
import { Binding, binding } from './binding';
|
|
|
|
class OnnxruntimeSessionHandler implements InferenceSessionHandler {
|
|
#inferenceSession: Binding.InferenceSession;
|
|
|
|
constructor(pathOrBuffer: string | Uint8Array, options: InferenceSession.SessionOptions) {
|
|
this.#inferenceSession = new binding.InferenceSession();
|
|
if (typeof pathOrBuffer === 'string') {
|
|
this.#inferenceSession.loadModel(pathOrBuffer, options);
|
|
} else {
|
|
this.#inferenceSession.loadModel(pathOrBuffer.buffer, pathOrBuffer.byteOffset, pathOrBuffer.byteLength, options);
|
|
}
|
|
this.inputNames = this.#inferenceSession.inputNames;
|
|
this.outputNames = this.#inferenceSession.outputNames;
|
|
}
|
|
|
|
async dispose(): Promise<void> {
|
|
this.#inferenceSession.dispose();
|
|
}
|
|
|
|
readonly inputNames: string[];
|
|
readonly outputNames: string[];
|
|
|
|
startProfiling(): void {
|
|
// TODO: implement profiling
|
|
}
|
|
endProfiling(): void {
|
|
// TODO: implement profiling
|
|
}
|
|
|
|
async run(
|
|
feeds: SessionHandler.FeedsType,
|
|
fetches: SessionHandler.FetchesType,
|
|
options: InferenceSession.RunOptions,
|
|
): Promise<SessionHandler.ReturnType> {
|
|
return new Promise((resolve, reject) => {
|
|
setImmediate(() => {
|
|
try {
|
|
resolve(this.#inferenceSession.run(feeds, fetches, options));
|
|
} catch (e) {
|
|
// reject if any error is thrown
|
|
reject(e);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
}
|
|
|
|
class OnnxruntimeBackend implements Backend {
|
|
async init(): Promise<void> {
|
|
return Promise.resolve();
|
|
}
|
|
|
|
async createInferenceSessionHandler(
|
|
pathOrBuffer: string | Uint8Array,
|
|
options?: InferenceSession.SessionOptions,
|
|
): Promise<InferenceSessionHandler> {
|
|
return new Promise((resolve, reject) => {
|
|
setImmediate(() => {
|
|
try {
|
|
resolve(new OnnxruntimeSessionHandler(pathOrBuffer, options || {}));
|
|
} catch (e) {
|
|
// reject if any error is thrown
|
|
reject(e);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
}
|
|
|
|
export const onnxruntimeBackend = new OnnxruntimeBackend();
|
|
export const listSupportedBackends = binding.listSupportedBackends;
|