mirror of
https://github.com/saymrwulf/onnxruntime.git
synced 2026-05-14 20:48:00 +00:00
* Initial update of readme * Readme updates * Review of consolidated README (#3930) * Proposed updates for readme (#3953) I found some of the information was duplicated within the doc, so attempted to streamline * Fix links * More updates - fix build instructions - nodejs doc reorganization - roadmap update - version fixes * Update ORT Server build instructions * More doc cleanup * fix python dev notes name * Update nodejs and some links * sync eigen version back to master * Minor fixes * add nodsjs to sample table of content * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md * address PR feedback * address PR feedback * nodejs build instruction * Update Java instructions to include gradle * Roadmap refresh Reformat some data, fix link, minor rewording * Clarify Visual C++ runtime req Co-authored-by: Nat Kershaw (MSFT) <nakersha@microsoft.com> Co-authored-by: Prasanth Pulavarthi <prasantp@microsoft.com> Co-authored-by: manashgoswami <magoswam@microsoft.com>
37 lines
1.5 KiB
JavaScript
37 lines
1.5 KiB
JavaScript
const fs = require('fs');
|
|
const util = require('util');
|
|
const InferenceSession = require('onnxruntime').InferenceSession;
|
|
|
|
// use an async context to call onnxruntime functions.
|
|
async function main() {
|
|
try {
|
|
// session options: please refer to the other example for details usage for session options
|
|
const options = { intraOpNumThreads: 1 };
|
|
|
|
//
|
|
// create inference session from a ONNX model file path
|
|
//
|
|
const session01 = await InferenceSession.create('./model.onnx');
|
|
const session01_B = await InferenceSession.create('./model.onnx', options); // specify options
|
|
|
|
//
|
|
// create inference session from an Node.js Buffer (Uint8Array)
|
|
//
|
|
const buffer02 = await util.promisify(fs.readFile)('./model.onnx'); // buffer is Uint8Array
|
|
const session02 = await InferenceSession.create(buffer02);
|
|
const session02_B = await InferenceSession.create(buffer02, options); // specify options
|
|
|
|
//
|
|
// create inference session from an ArrayBuffer
|
|
//
|
|
const arrayBuffer03 = buffer02.buffer;
|
|
const offset03 = buffer02.byteOffset;
|
|
const length03 = buffer02.byteLength;
|
|
const session03 = await InferenceSession.create(arrayBuffer03, offset03, length03);
|
|
const session03_B = await InferenceSession.create(arrayBuffer03, offset03, length03); // specify options
|
|
} catch (e) {
|
|
console.error(`failed to inference ONNX model: ${e}.`);
|
|
}
|
|
}
|
|
|
|
main();
|