• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2017-2020, 2022 Arm Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #ifndef ARM_COMPUTE_TEST_ACTIVATION_LAYER_H
25 #define ARM_COMPUTE_TEST_ACTIVATION_LAYER_H
26 
27 #include "tests/SimpleTensor.h"
28 #include "tests/validation/Helpers.h"
29 
30 namespace arm_compute
31 {
32 namespace test
33 {
34 namespace validation
35 {
36 namespace reference
37 {
38 template <typename T>
activate_float(T x,T a,T b,ActivationLayerInfo::ActivationFunction activation)39 inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction activation)
40 {
41     T ret;
42 
43     switch(activation)
44     {
45         case ActivationLayerInfo::ActivationFunction::ABS:
46             ret = std::abs(x);
47             break;
48         case ActivationLayerInfo::ActivationFunction::LINEAR:
49             ret = a * x + b;
50             break;
51         case ActivationLayerInfo::ActivationFunction::LOGISTIC:
52             ret = static_cast<T>(1) / (static_cast<T>(1) + std::exp(-x));
53             break;
54         case ActivationLayerInfo::ActivationFunction::RELU:
55             ret = std::max<T>(static_cast<T>(0), x);
56             break;
57         case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
58             ret = std::min<T>(a, std::max(static_cast<T>(0), x));
59             break;
60         case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
61             ret = std::min<T>(a, std::max<T>(b, x));
62             break;
63         case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
64             ret = (x > 0) ? x : a * x;
65             break;
66         case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
67             ret = std::log(static_cast<T>(1) + std::exp(static_cast<double>(x)));
68             break;
69         case ActivationLayerInfo::ActivationFunction::ELU:
70             ret = (x > 0) ? x : a * (std::exp(x) - static_cast<T>(1));
71             break;
72         case ActivationLayerInfo::ActivationFunction::SQRT:
73             ret = std::sqrt(x);
74             break;
75         case ActivationLayerInfo::ActivationFunction::SQUARE:
76             ret = x * x;
77             break;
78         case ActivationLayerInfo::ActivationFunction::TANH:
79             ret = a * std::tanh(b * x);
80             break;
81         case ActivationLayerInfo::ActivationFunction::IDENTITY:
82             ret = x;
83             break;
84         case ActivationLayerInfo::ActivationFunction::HARD_SWISH:
85             ret = x * ((std::min(std::max(static_cast<T>(x + 3), static_cast<T>(0.0f)), static_cast<T>(6.0f))) * 0.166666667f);
86             break;
87         case ActivationLayerInfo::ActivationFunction::SWISH:
88             ret = static_cast<T>(x) / (static_cast<T>(1) + std::exp(-a*x));
89             break;
90         case ActivationLayerInfo::ActivationFunction::GELU:
91             ret = x * 0.5f * (1 + erf(x / std::sqrt(2.0f)));
92             break;
93         default:
94             ARM_COMPUTE_ERROR("Unsupported activation function");
95             break;
96     }
97 
98     return ret;
99 }
100 
101 template <typename T>
102 SimpleTensor<T> activation_layer(const SimpleTensor<T> &src, ActivationLayerInfo info, const QuantizationInfo &oq_info = QuantizationInfo());
103 } // namespace reference
104 } // namespace validation
105 } // namespace test
106 } // namespace arm_compute
107 #endif /* ARM_COMPUTE_TEST_ACTIVATION_LAYER_H */
108