• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 #include "tensorflow/lite/delegates/coreml/builders/activation_layer_builder.h"
16 
17 #include "tensorflow/lite/builtin_ops.h"
18 #include "tensorflow/lite/c/builtin_op_data.h"
19 #include "tensorflow/lite/c/common.h"
20 #include "tensorflow/lite/delegates/coreml/builders/op_factory.h"
21 #include "tensorflow/lite/delegates/coreml/builders/threshold_layer_builder.h"
22 
23 namespace tflite {
24 namespace delegates {
25 namespace coreml {
26 
DebugName()27 const std::string& ActivationLayerBuilder::DebugName() {
28   if (debug_name_.empty()) SetDebugName("ActivationLayerBuilder", node_id_);
29   return debug_name_;
30 }
31 
Build()32 CoreML::Specification::NeuralNetworkLayer* ActivationLayerBuilder::Build() {
33   layer_->set_name(DebugName());
34   switch (activation_) {
35     // ActNone is used for sclalar multiplication (linear activation)
36     case kTfLiteActNone:
37       layer_->mutable_activation()->mutable_linear()->set_alpha(alpha_);
38       break;
39     case kTfLiteActRelu:
40       layer_->mutable_activation()->mutable_relu();
41       break;
42     // Relu1 and Relu6 layers are fully composed in PopulateSubgraph().
43     case kTfLiteActReluN1To1:  // clip(-1, 1)
44       layer_->mutable_unary()->set_alpha(-1);
45       layer_->mutable_unary()->set_type(
46           CoreML::Specification::UnaryFunctionLayerParams::THRESHOLD);
47       break;
48     case kTfLiteActRelu6:  // clip(0, 6)
49       layer_->mutable_activation()->mutable_relu();
50       break;
51     case kTfLiteActTanh:
52       layer_->mutable_activation()->mutable_tanh();
53       break;
54     case kTfLiteActSigmoid:
55       layer_->mutable_activation()->mutable_sigmoid();
56       break;
57     // TODO(taeheej): signbit is not implemented.
58     default:
59       fprintf(stderr, "Activation %d is not supported.\n", activation_);
60       break;
61   }
62   return layer_.release();
63 }
64 
PopulateSubgraph(TfLiteContext * context)65 TfLiteStatus ActivationLayerBuilder::PopulateSubgraph(TfLiteContext* context) {
66   if (!(activation_ == kTfLiteActRelu6 || activation_ == kTfLiteActReluN1To1)) {
67     builder_output_ = AddOutput();
68     return kTfLiteOk;
69   }
70 
71   // Relu1: Threshold(-1) -> Threshold(-1) with scale: -1 -> Negation
72   // Relu6: ReLU -> Threshold(-6) with scale: -1 -> Negation
73   const int relu_threshold = activation_ == kTfLiteActRelu6 ? 6 : 1;
74   ThresholdLayerBuilder* threshold_builder =
75       reinterpret_cast<ThresholdLayerBuilder*>(
76           graph_builder_->AddBuilder(CreateThresholdLayerBuilder, nullptr));
77 
78   threshold_builder->SetAlpha(-relu_threshold);
79   threshold_builder->SetScale(-1);
80 
81   threshold_builder->AddInput(AddOutput());
82 
83   ActivationLayerBuilder* negation_builder =
84       reinterpret_cast<ActivationLayerBuilder*>(
85           graph_builder_->AddBuilder(CreateActivationLayerBuilder, nullptr));
86   negation_builder->SetActivation(kTfLiteActNone);
87   negation_builder->SetAlpha(-1);
88 
89   negation_builder->AddInput(threshold_builder->AddOutput());
90   builder_output_ = negation_builder->AddOutput();
91   return kTfLiteOk;
92 }
93 
RegisterInputs(const TfLiteIntArray * inputs,TfLiteContext * context)94 TfLiteStatus ActivationLayerBuilder::RegisterInputs(
95     const TfLiteIntArray* inputs, TfLiteContext* context) {
96   if (inputs->size != 1) {
97     TF_LITE_KERNEL_LOG(context, "Activation: Wrong # of inputs!.");
98     return kTfLiteError;
99   }
100   AddInput(inputs->data[0]);
101   return kTfLiteOk;
102 }
103 
RegisterOutputs(const TfLiteIntArray * outputs,TfLiteContext * context)104 TfLiteStatus ActivationLayerBuilder::RegisterOutputs(
105     const TfLiteIntArray* outputs, TfLiteContext* context) {
106   if (outputs->size != 1) {
107     TF_LITE_KERNEL_LOG(context, "Activation: Wrong # of outputs!.");
108     return kTfLiteError;
109   }
110   graph_builder_->AddTensorWithID(outputs->data[0], GetOutput(context));
111   return kTfLiteOk;
112 }
113 
CreateActivationLayerBuilder(GraphBuilder * graph_builder)114 OpBuilder* CreateActivationLayerBuilder(GraphBuilder* graph_builder) {
115   return new ActivationLayerBuilder(graph_builder);
116 }
117 
CreateLogisticOpBuilder(GraphBuilder * graph_builder)118 OpBuilder* CreateLogisticOpBuilder(GraphBuilder* graph_builder) {
119   return new ActivationLayerBuilder(graph_builder, kTfLiteActSigmoid);
120 }
121 
CreateReluOpBuilder(GraphBuilder * graph_builder)122 OpBuilder* CreateReluOpBuilder(GraphBuilder* graph_builder) {
123   return new ActivationLayerBuilder(graph_builder, kTfLiteActRelu);
124 }
125 
CreateReluN1To1OpBuilder(GraphBuilder * graph_builder)126 OpBuilder* CreateReluN1To1OpBuilder(GraphBuilder* graph_builder) {
127   return new ActivationLayerBuilder(graph_builder, kTfLiteActReluN1To1);
128 }
129 
CreateRelu6OpBuilder(GraphBuilder * graph_builder)130 OpBuilder* CreateRelu6OpBuilder(GraphBuilder* graph_builder) {
131   return new ActivationLayerBuilder(graph_builder, kTfLiteActRelu6);
132 }
133 
CreateTanhOpBuilder(GraphBuilder * graph_builder)134 OpBuilder* CreateTanhOpBuilder(GraphBuilder* graph_builder) {
135   return new ActivationLayerBuilder(graph_builder, kTfLiteActTanh);
136 }
137 
138 }  // namespace coreml
139 }  // namespace delegates
140 }  // namespace tflite
141