mirror of
https://github.com/saymrwulf/onnxruntime.git
synced 2026-05-16 21:00:14 +00:00
* initial commit for Node.js binding * add c++ code * add inference session impl * e2e working * add settings.json * add test data * adjust binding declaration * refine tensor constructor declaration * update tests * enable onnx tests * simply refine readme * refine cpp impl * refine tests * formatting * add linting * move bin folder * fix linux build * manually update test filter list * update C++ API headers: fix crash in release build * make (manually) prebuild work * add test into prepack script * specify prebuild runtime type (N-API) * build.ts: update rebuild and include regex * fix lazy load on electron.js * update dev version, git link and binary host * support session options and run options * bump dev version * update README * add 1 example * move folder * adjust path * update document for examples * rename example 01 * add example 02 * add session option: log severity level * add example 04 * resolve comments * fix typo * remove double guard in header files * add copyright banner * move BUILD outside from README * consume test filter list from onnxruntime
37 lines
1.5 KiB
JavaScript
37 lines
1.5 KiB
JavaScript
const fs = require('fs');
|
|
const util = require('util');
|
|
const InferenceSession = require('onnxruntime').InferenceSession;
|
|
|
|
// use an async context to call onnxruntime functions.
|
|
async function main() {
|
|
try {
|
|
// session options: please refer to the other example for details usage for session options
|
|
const options = { intraOpNumThreads: 1 };
|
|
|
|
//
|
|
// create inference session from a ONNX model file path
|
|
//
|
|
const session01 = await InferenceSession.create('./model.onnx');
|
|
const session01_B = await InferenceSession.create('./model.onnx', options); // specify options
|
|
|
|
//
|
|
// create inference session from an Node.js Buffer (Uint8Array)
|
|
//
|
|
const buffer02 = await util.promisify(fs.readFile)('./model.onnx'); // buffer is Uint8Array
|
|
const session02 = await InferenceSession.create(buffer02);
|
|
const session02_B = await InferenceSession.create(buffer02, options); // specify options
|
|
|
|
//
|
|
// create inference session from an ArrayBuffer
|
|
//
|
|
const arrayBuffer03 = buffer02.buffer;
|
|
const offset03 = buffer02.byteOffset;
|
|
const length03 = buffer02.byteLength;
|
|
const session03 = await InferenceSession.create(arrayBuffer03, offset03, length03);
|
|
const session03_B = await InferenceSession.create(arrayBuffer03, offset03, length03); // specify options
|
|
} catch (e) {
|
|
console.error(`failed to inference ONNX model: ${e}.`);
|
|
}
|
|
}
|
|
|
|
main();
|