2021-05-11 17:34:40 +00:00
|
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
|
|
|
|
|
// Licensed under the MIT License.
|
|
|
|
|
|
|
|
|
|
#import "TensorHelper.h"
|
|
|
|
|
#import <Foundation/Foundation.h>
|
|
|
|
|
|
|
|
|
|
@implementation TensorHelper
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Supported tensor data type
|
|
|
|
|
*/
|
2024-08-14 23:51:22 +00:00
|
|
|
NSString* const JsTensorTypeBool = @"bool";
|
|
|
|
|
NSString* const JsTensorTypeUnsignedByte = @"uint8";
|
|
|
|
|
NSString* const JsTensorTypeByte = @"int8";
|
|
|
|
|
NSString* const JsTensorTypeShort = @"int16";
|
|
|
|
|
NSString* const JsTensorTypeInt = @"int32";
|
|
|
|
|
NSString* const JsTensorTypeLong = @"int64";
|
|
|
|
|
NSString* const JsTensorTypeFloat = @"float32";
|
|
|
|
|
NSString* const JsTensorTypeDouble = @"float64";
|
|
|
|
|
NSString* const JsTensorTypeString = @"string";
|
2021-05-11 17:34:40 +00:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* It creates an input tensor from a map passed by react native js.
|
2023-06-16 09:37:02 +00:00
|
|
|
* 'data' is blob object and the buffer is stored in RCTBlobManager. It first resolve it and creates a tensor.
|
2021-05-11 17:34:40 +00:00
|
|
|
*/
|
2024-08-14 23:51:22 +00:00
|
|
|
+ (Ort::Value)createInputTensor:(RCTBlobManager*)blobManager
|
|
|
|
|
input:(NSDictionary*)input
|
|
|
|
|
ortAllocator:(OrtAllocator*)ortAllocator
|
|
|
|
|
allocations:(std::vector<Ort::MemoryAllocation>&)allocations {
|
2021-05-11 17:34:40 +00:00
|
|
|
// shape
|
2024-08-14 23:51:22 +00:00
|
|
|
NSArray* dimsArray = [input objectForKey:@"dims"];
|
2021-05-11 17:34:40 +00:00
|
|
|
std::vector<int64_t> dims;
|
|
|
|
|
dims.reserve(dimsArray.count);
|
2024-08-14 23:51:22 +00:00
|
|
|
for (NSNumber* dim in dimsArray) {
|
2021-05-11 17:34:40 +00:00
|
|
|
dims.emplace_back([dim longLongValue]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// type
|
|
|
|
|
ONNXTensorElementDataType tensorType = [self getOnnxTensorType:[input objectForKey:@"type"]];
|
|
|
|
|
|
|
|
|
|
// data
|
|
|
|
|
if (tensorType == ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING) {
|
2024-08-14 23:51:22 +00:00
|
|
|
NSArray* values = [input objectForKey:@"data"];
|
2021-05-11 17:34:40 +00:00
|
|
|
auto inputTensor =
|
|
|
|
|
Ort::Value::CreateTensor(ortAllocator, dims.data(), dims.size(), ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING);
|
|
|
|
|
size_t index = 0;
|
2024-08-14 23:51:22 +00:00
|
|
|
for (NSString* value in values) {
|
2021-05-11 17:34:40 +00:00
|
|
|
inputTensor.FillStringTensorElement([value UTF8String], index++);
|
|
|
|
|
}
|
|
|
|
|
return inputTensor;
|
|
|
|
|
} else {
|
2024-08-14 23:51:22 +00:00
|
|
|
NSDictionary* data = [input objectForKey:@"data"];
|
|
|
|
|
NSString* blobId = [data objectForKey:@"blobId"];
|
2023-06-16 09:37:02 +00:00
|
|
|
long size = [[data objectForKey:@"size"] longValue];
|
|
|
|
|
long offset = [[data objectForKey:@"offset"] longValue];
|
|
|
|
|
auto buffer = [blobManager resolve:blobId offset:offset size:size];
|
2021-05-11 17:34:40 +00:00
|
|
|
Ort::Value inputTensor = [self createInputTensor:tensorType
|
|
|
|
|
dims:dims
|
|
|
|
|
buffer:buffer
|
|
|
|
|
ortAllocator:ortAllocator
|
2023-06-16 09:37:02 +00:00
|
|
|
allocations:allocations];
|
|
|
|
|
[blobManager remove:blobId];
|
2021-05-11 17:34:40 +00:00
|
|
|
return inputTensor;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* It creates an output map from an output tensor.
|
2023-06-16 09:37:02 +00:00
|
|
|
* a data array is store in RCTBlobManager.
|
2021-05-11 17:34:40 +00:00
|
|
|
*/
|
2024-08-14 23:51:22 +00:00
|
|
|
+ (NSDictionary*)createOutputTensor:(RCTBlobManager*)blobManager
|
|
|
|
|
outputNames:(const std::vector<const char*>&)outputNames
|
|
|
|
|
values:(const std::vector<Ort::Value>&)values {
|
2021-05-11 17:34:40 +00:00
|
|
|
if (outputNames.size() != values.size()) {
|
2024-08-14 23:51:22 +00:00
|
|
|
NSException* exception = [NSException exceptionWithName:@"create output tensor"
|
2021-05-11 17:34:40 +00:00
|
|
|
reason:@"output name and tensor count mismatched"
|
|
|
|
|
userInfo:nil];
|
|
|
|
|
@throw exception;
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-14 23:51:22 +00:00
|
|
|
NSMutableDictionary* outputTensorMap = [NSMutableDictionary dictionary];
|
2021-05-11 17:34:40 +00:00
|
|
|
|
|
|
|
|
for (size_t i = 0; i < outputNames.size(); ++i) {
|
|
|
|
|
const auto outputName = outputNames[i];
|
2024-08-14 23:51:22 +00:00
|
|
|
const Ort::Value& value = values[i];
|
2021-05-11 17:34:40 +00:00
|
|
|
|
|
|
|
|
if (!value.IsTensor()) {
|
2024-08-14 23:51:22 +00:00
|
|
|
NSException* exception = [NSException exceptionWithName:@"create output tensor"
|
2021-05-11 17:34:40 +00:00
|
|
|
reason:@"only tensor type is supported"
|
|
|
|
|
userInfo:nil];
|
|
|
|
|
@throw exception;
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-14 23:51:22 +00:00
|
|
|
NSMutableDictionary* outputTensor = [NSMutableDictionary dictionary];
|
2021-05-11 17:34:40 +00:00
|
|
|
|
|
|
|
|
// dims
|
2024-08-14 23:51:22 +00:00
|
|
|
NSMutableArray* outputDims = [NSMutableArray array];
|
2021-05-11 17:34:40 +00:00
|
|
|
auto dims = value.GetTensorTypeAndShapeInfo().GetShape();
|
|
|
|
|
for (auto dim : dims) {
|
|
|
|
|
[outputDims addObject:[NSNumber numberWithLongLong:dim]];
|
|
|
|
|
}
|
|
|
|
|
outputTensor[@"dims"] = outputDims;
|
|
|
|
|
|
|
|
|
|
// type
|
|
|
|
|
outputTensor[@"type"] = [self getJsTensorType:value.GetTensorTypeAndShapeInfo().GetElementType()];
|
|
|
|
|
|
|
|
|
|
// data
|
|
|
|
|
if (value.GetTensorTypeAndShapeInfo().GetElementType() == ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING) {
|
2024-08-14 23:51:22 +00:00
|
|
|
NSMutableArray* buffer = [NSMutableArray array];
|
2021-05-11 17:34:40 +00:00
|
|
|
for (NSInteger i = 0; i < value.GetTensorTypeAndShapeInfo().GetElementCount(); ++i) {
|
|
|
|
|
size_t elementLength = value.GetStringTensorElementLength(i);
|
|
|
|
|
std::string element(elementLength, '\0');
|
2024-08-14 23:51:22 +00:00
|
|
|
value.GetStringTensorElement(elementLength, i, (void*)element.data());
|
2021-05-11 17:34:40 +00:00
|
|
|
[buffer addObject:[NSString stringWithUTF8String:element.data()]];
|
|
|
|
|
}
|
|
|
|
|
outputTensor[@"data"] = buffer;
|
|
|
|
|
} else {
|
2024-08-14 23:51:22 +00:00
|
|
|
NSData* data = [self createOutputTensor:value];
|
|
|
|
|
NSString* blobId = [blobManager store:data];
|
2023-06-16 09:37:02 +00:00
|
|
|
outputTensor[@"data"] = @{
|
|
|
|
|
@"blobId" : blobId,
|
|
|
|
|
@"offset" : @0,
|
|
|
|
|
@"size" : @(data.length),
|
|
|
|
|
};
|
2021-05-11 17:34:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
outputTensorMap[[NSString stringWithUTF8String:outputName]] = outputTensor;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return outputTensorMap;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
template <typename T>
|
2024-08-14 23:51:22 +00:00
|
|
|
static Ort::Value createInputTensorT(OrtAllocator* ortAllocator, const std::vector<int64_t>& dims, NSData* buffer,
|
|
|
|
|
std::vector<Ort::MemoryAllocation>& allocations) {
|
|
|
|
|
T* dataBuffer = static_cast<T*>(ortAllocator->Alloc(ortAllocator, [buffer length]));
|
2021-05-11 17:34:40 +00:00
|
|
|
allocations.emplace_back(ortAllocator, dataBuffer, [buffer length]);
|
2024-08-14 23:51:22 +00:00
|
|
|
memcpy(static_cast<void*>(dataBuffer), [buffer bytes], [buffer length]);
|
2021-05-11 17:34:40 +00:00
|
|
|
|
|
|
|
|
return Ort::Value::CreateTensor<T>(ortAllocator->Info(ortAllocator), dataBuffer, buffer.length / sizeof(T),
|
|
|
|
|
dims.data(), dims.size());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
+ (Ort::Value)createInputTensor:(ONNXTensorElementDataType)tensorType
|
2024-08-14 23:51:22 +00:00
|
|
|
dims:(const std::vector<int64_t>&)dims
|
|
|
|
|
buffer:(NSData*)buffer
|
|
|
|
|
ortAllocator:(OrtAllocator*)ortAllocator
|
|
|
|
|
allocations:(std::vector<Ort::MemoryAllocation>&)allocations {
|
2021-05-11 17:34:40 +00:00
|
|
|
switch (tensorType) {
|
2024-08-14 23:51:22 +00:00
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT:
|
|
|
|
|
return createInputTensorT<float>(ortAllocator, dims, buffer, allocations);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8:
|
|
|
|
|
return createInputTensorT<uint8_t>(ortAllocator, dims, buffer, allocations);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8:
|
|
|
|
|
return createInputTensorT<int8_t>(ortAllocator, dims, buffer, allocations);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT16:
|
|
|
|
|
return createInputTensorT<int16_t>(ortAllocator, dims, buffer, allocations);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT32:
|
|
|
|
|
return createInputTensorT<int32_t>(ortAllocator, dims, buffer, allocations);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT64:
|
|
|
|
|
return createInputTensorT<int64_t>(ortAllocator, dims, buffer, allocations);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_BOOL:
|
|
|
|
|
return createInputTensorT<bool>(ortAllocator, dims, buffer, allocations);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_DOUBLE:
|
|
|
|
|
return createInputTensorT<double_t>(ortAllocator, dims, buffer, allocations);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UNDEFINED:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT16:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT32:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT64:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_COMPLEX64:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_COMPLEX128:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_BFLOAT16:
|
|
|
|
|
default: {
|
|
|
|
|
NSException* exception = [NSException exceptionWithName:@"create input tensor"
|
|
|
|
|
reason:@"unsupported tensor type"
|
|
|
|
|
userInfo:nil];
|
|
|
|
|
@throw exception;
|
|
|
|
|
}
|
2021-05-11 17:34:40 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-14 23:51:22 +00:00
|
|
|
template <typename T>
|
|
|
|
|
static NSData* createOutputTensorT(const Ort::Value& tensor) {
|
2021-05-11 17:34:40 +00:00
|
|
|
const auto data = tensor.GetTensorData<T>();
|
2024-08-14 23:51:22 +00:00
|
|
|
return [NSData dataWithBytesNoCopy:(void*)data
|
2023-06-16 09:37:02 +00:00
|
|
|
length:tensor.GetTensorTypeAndShapeInfo().GetElementCount() * sizeof(T)
|
|
|
|
|
freeWhenDone:false];
|
2021-05-11 17:34:40 +00:00
|
|
|
}
|
|
|
|
|
|
2024-08-14 23:51:22 +00:00
|
|
|
+ (NSData*)createOutputTensor:(const Ort::Value&)tensor {
|
2021-05-11 17:34:40 +00:00
|
|
|
ONNXTensorElementDataType tensorType = tensor.GetTensorTypeAndShapeInfo().GetElementType();
|
|
|
|
|
|
|
|
|
|
switch (tensorType) {
|
2024-08-14 23:51:22 +00:00
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT:
|
|
|
|
|
return createOutputTensorT<float>(tensor);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8:
|
|
|
|
|
return createOutputTensorT<uint8_t>(tensor);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8:
|
|
|
|
|
return createOutputTensorT<int8_t>(tensor);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT16:
|
|
|
|
|
return createOutputTensorT<int16_t>(tensor);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT32:
|
|
|
|
|
return createOutputTensorT<int32_t>(tensor);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_INT64:
|
|
|
|
|
return createOutputTensorT<int64_t>(tensor);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_BOOL:
|
|
|
|
|
return createOutputTensorT<bool>(tensor);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_DOUBLE:
|
|
|
|
|
return createOutputTensorT<double_t>(tensor);
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UNDEFINED:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT16:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT32:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT64:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_COMPLEX64:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_COMPLEX128:
|
|
|
|
|
case ONNX_TENSOR_ELEMENT_DATA_TYPE_BFLOAT16:
|
|
|
|
|
default: {
|
|
|
|
|
NSException* exception = [NSException exceptionWithName:@"create output tensor"
|
|
|
|
|
reason:@"unsupported tensor type"
|
|
|
|
|
userInfo:nil];
|
|
|
|
|
@throw exception;
|
|
|
|
|
}
|
2021-05-11 17:34:40 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-14 23:51:22 +00:00
|
|
|
NSDictionary* JsTensorTypeToOnnxTensorTypeMap;
|
|
|
|
|
NSDictionary* OnnxTensorTypeToJsTensorTypeMap;
|
2021-05-11 17:34:40 +00:00
|
|
|
|
|
|
|
|
+ (void)initialize {
|
|
|
|
|
JsTensorTypeToOnnxTensorTypeMap = @{
|
|
|
|
|
JsTensorTypeFloat : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT),
|
2022-10-06 18:35:25 +00:00
|
|
|
JsTensorTypeUnsignedByte : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8),
|
2021-05-11 17:34:40 +00:00
|
|
|
JsTensorTypeByte : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8),
|
|
|
|
|
JsTensorTypeShort : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_INT16),
|
|
|
|
|
JsTensorTypeInt : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_INT32),
|
|
|
|
|
JsTensorTypeLong : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_INT64),
|
|
|
|
|
JsTensorTypeString : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING),
|
|
|
|
|
JsTensorTypeBool : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_BOOL),
|
|
|
|
|
JsTensorTypeDouble : @(ONNX_TENSOR_ELEMENT_DATA_TYPE_DOUBLE)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
OnnxTensorTypeToJsTensorTypeMap = @{
|
|
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT) : JsTensorTypeFloat,
|
2022-10-06 18:35:25 +00:00
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8) : JsTensorTypeUnsignedByte,
|
2021-05-11 17:34:40 +00:00
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8) : JsTensorTypeByte,
|
|
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_INT16) : JsTensorTypeShort,
|
|
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_INT32) : JsTensorTypeInt,
|
|
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_INT64) : JsTensorTypeLong,
|
|
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING) : JsTensorTypeString,
|
|
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_BOOL) : JsTensorTypeBool,
|
|
|
|
|
@(ONNX_TENSOR_ELEMENT_DATA_TYPE_DOUBLE) : JsTensorTypeDouble
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-14 23:51:22 +00:00
|
|
|
+ (ONNXTensorElementDataType)getOnnxTensorType:(const NSString*)type {
|
2021-05-11 17:34:40 +00:00
|
|
|
if ([JsTensorTypeToOnnxTensorTypeMap objectForKey:type]) {
|
|
|
|
|
return (ONNXTensorElementDataType)[JsTensorTypeToOnnxTensorTypeMap[type] intValue];
|
|
|
|
|
} else {
|
|
|
|
|
return ONNX_TENSOR_ELEMENT_DATA_TYPE_UNDEFINED;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-14 23:51:22 +00:00
|
|
|
+ (NSString*)getJsTensorType:(ONNXTensorElementDataType)type {
|
2021-05-11 17:34:40 +00:00
|
|
|
if ([OnnxTensorTypeToJsTensorTypeMap objectForKey:@(type)]) {
|
|
|
|
|
return OnnxTensorTypeToJsTensorTypeMap[@(type)];
|
|
|
|
|
} else {
|
|
|
|
|
return @"undefined";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@end
|