mirror of
https://github.com/saymrwulf/onnxruntime.git
synced 2026-05-16 21:00:14 +00:00
### Description <!-- Describe your changes. --> - Create `OnnxruntimeJSIHelper` native module to provide two JSI functions - `jsiOnnxruntimeStoreArrayBuffer`: Store buffer in Blob Manager & return blob object (iOS: RCTBlobManager, Android: BlobModule) - `jsiOnnxruntimeResolveArrayBuffer`: Use blob object to get buffer - The part of implementation is reference to [react-native-blob-jsi-helper](https://github.com/mrousavy/react-native-blob-jsi-helper) - Replace base64 encode/decode - `loadModelFromBlob`: Rename from `loadModelFromBase64EncodedBuffer` - `run`: Use blob object to replace input.data & results[].data For [this context](https://github.com/microsoft/onnxruntime/issues/16031#issuecomment-1556527812), it saved a lot of time and avoid JS thread blocking in decode return type, it is 3700ms -> 5~20ms for the case. (resolve function only takes 0.x ms) ### Motivation and Context <!-- - Why is this change required? What problem does it solve? - If it fixes an open issue, please link to the issue here. --> It’s related to #16031, but not a full implementation for migrate to JSI. It just uses JSI through BlobManager to replace the slow part (base64 encode / decode). Rewriting it entirely in JSI could be complicated, like type convertion and threading. This PR might be considered a minor change. /cc @skottmckay
57 lines
2.4 KiB
Objective-C
57 lines
2.4 KiB
Objective-C
// Copyright (c) Microsoft Corporation. All rights reserved.
|
|
// Licensed under the MIT License.
|
|
|
|
#ifndef TensorHelper_h
|
|
#define TensorHelper_h
|
|
|
|
#import <Foundation/Foundation.h>
|
|
#import <React/RCTBlobManager.h>
|
|
|
|
// Note: Using below syntax for including ort c api and ort extensions headers to resolve a compiling error happened
|
|
// in an expo react native ios app (a redefinition error happened with multiple object types defined within
|
|
// ORT C API header). It's an edge case that the compiler allows both ort c api headers to be included when #include
|
|
// syntax doesn't match. For the case when extensions not enabled, it still requires a onnxruntime prefix directory for
|
|
// searching paths. Also in general, it's a convention to use #include for C/C++ headers rather then #import. See:
|
|
// https://google.github.io/styleguide/objcguide.html#import-and-include
|
|
// https://microsoft.github.io/objc-guide/Headers/ImportAndInclude.html
|
|
#ifdef ORT_ENABLE_EXTENSIONS
|
|
#include "onnxruntime_cxx_api.h"
|
|
#else
|
|
#include "onnxruntime/onnxruntime_cxx_api.h"
|
|
#endif
|
|
|
|
@interface TensorHelper : NSObject
|
|
|
|
/**
|
|
* Supported tensor data type
|
|
*/
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeBool;
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeUnsignedByte;
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeByte;
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeShort;
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeInt;
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeLong;
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeFloat;
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeDouble;
|
|
FOUNDATION_EXPORT NSString* const JsTensorTypeString;
|
|
|
|
/**
|
|
* It creates an input tensor from a map passed by react native js.
|
|
* 'data' is blob object and the buffer is stored in RCTBlobManager. It first resolve it and creates a tensor.
|
|
*/
|
|
+(Ort::Value)createInputTensor:(RCTBlobManager *)blobManager
|
|
input:(NSDictionary*)input
|
|
ortAllocator:(OrtAllocator*)ortAllocator
|
|
allocations:(std::vector<Ort::MemoryAllocation>&)allocations;
|
|
|
|
/**
|
|
* It creates an output map from an output tensor.
|
|
* a data array is store in RCTBlobManager.
|
|
*/
|
|
+(NSDictionary*)createOutputTensor:(RCTBlobManager *)blobManager
|
|
outputNames:(const std::vector<const char*>&)outputNames
|
|
values:(const std::vector<Ort::Value>&)values;
|
|
|
|
@end
|
|
|
|
#endif /* TensorHelper_h */
|