onnxruntime/js/react_native/ios/TensorHelper.h
Sunghoon 887c3149e3
[js/react_native] Use a mobile ORT instead of a full ORT (#8042)
* Change full ort to mobile ort

* Update Android example to load mobile ort

* Change the format of test models to ort

* update ios to use mobile ort

* revise README

* use onnxruntime-mobile-c CocoaPods in a npm package
2021-06-15 13:36:05 -07:00

41 lines
1.4 KiB
Objective-C

// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef TensorHelper_h
#define TensorHelper_h
#import <Foundation/Foundation.h>
#import <onnxruntime/onnxruntime_cxx_api.h>
@interface TensorHelper : NSObject
/**
* Supported tensor data type
*/
FOUNDATION_EXPORT NSString* const JsTensorTypeBool;
FOUNDATION_EXPORT NSString* const JsTensorTypeByte;
FOUNDATION_EXPORT NSString* const JsTensorTypeShort;
FOUNDATION_EXPORT NSString* const JsTensorTypeInt;
FOUNDATION_EXPORT NSString* const JsTensorTypeLong;
FOUNDATION_EXPORT NSString* const JsTensorTypeFloat;
FOUNDATION_EXPORT NSString* const JsTensorTypeDouble;
FOUNDATION_EXPORT NSString* const JsTensorTypeString;
/**
* It creates an input tensor from a map passed by react native js.
* 'data' must be a string type as data is encoded as base64. It first decodes it and creates a tensor.
*/
+(Ort::Value)createInputTensor:(NSDictionary*)input
ortAllocator:(OrtAllocator*)ortAllocator
allocations:(std::vector<Ort::MemoryAllocation>&)allocatons;
/**
* It creates an output map from an output tensor.
* a data array is encoded as base64 string.
*/
+(NSDictionary*)createOutputTensor:(const std::vector<const char*>&)outputNames
values:(const std::vector<Ort::Value>&)values;
@end
#endif /* TensorHelper_h */