1 /**
2 * Copyright 2020-2021 Huawei Technologies Co., Ltd
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "src/runtime/kernel/arm/fp16/common_fp16.h"
17 #include "nnacl/fp16/cast_fp16.h"
18 #include "include/errorcode.h"
19
20 using mindspore::lite::RET_ERROR;
21 using mindspore::lite::RET_OK;
22
23 namespace mindspore::kernel {
ConvertInputFp32toFp16(lite::Tensor * input,const lite::InnerContext * ctx)24 float16_t *ConvertInputFp32toFp16(lite::Tensor *input, const lite::InnerContext *ctx) {
25 float16_t *fp16_data = nullptr;
26 auto data_type = input->data_type();
27 if (data_type == kNumberTypeFloat32) {
28 auto ele_num = input->ElementsNum();
29 fp16_data = reinterpret_cast<float16_t *>(ctx->allocator->Malloc(ele_num * sizeof(float16_t)));
30 if (fp16_data == nullptr) {
31 MS_LOG(ERROR) << "malloc fp16_data failed.";
32 return nullptr;
33 }
34 auto ori_data = reinterpret_cast<float *>(input->MutableData());
35 Float32ToFloat16(ori_data, fp16_data, ele_num);
36 } else {
37 fp16_data = reinterpret_cast<float16_t *>(input->MutableData());
38 }
39 return fp16_data;
40 }
41
MallocOutputFp16(lite::Tensor * output,const lite::InnerContext * ctx)42 float16_t *MallocOutputFp16(lite::Tensor *output, const lite::InnerContext *ctx) {
43 float16_t *fp16_data = nullptr;
44 auto data_type = output->data_type();
45 if (data_type == kNumberTypeFloat32) {
46 auto ele_num = output->ElementsNum();
47 fp16_data = reinterpret_cast<float16_t *>(ctx->allocator->Malloc(ele_num * sizeof(float16_t)));
48 if (fp16_data == nullptr) {
49 MS_LOG(ERROR) << "malloc fp16_data failed.";
50 return nullptr;
51 }
52 } else {
53 fp16_data = reinterpret_cast<float16_t *>(output->MutableData());
54 }
55 return fp16_data;
56 }
57
ConvertFp32TensorToFp16(lite::Tensor * tensor,const lite::InnerContext * ctx)58 int ConvertFp32TensorToFp16(lite::Tensor *tensor, const lite::InnerContext *ctx) {
59 if (tensor->data_type() == TypeId::kNumberTypeFloat16) {
60 return RET_OK;
61 }
62 auto fp32_data = tensor->data();
63 tensor->set_data(nullptr);
64 tensor->set_data_type(TypeId::kNumberTypeFloat16);
65 auto ret = tensor->MallocData();
66 if (RET_OK != ret) {
67 MS_LOG(ERROR) << "malloc data failed";
68 return RET_ERROR;
69 }
70 Float32ToFloat16(static_cast<float *>(fp32_data), static_cast<float16_t *>(tensor->data()), tensor->ElementsNum());
71 ctx->allocator->Free(fp32_data);
72 return RET_OK;
73 }
74 } // namespace mindspore::kernel
75