1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in
12 * the documentation and/or other materials provided with the
13 * distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 #include <private/bionic_ifuncs.h>
30 #include <stddef.h>
31 #include <sys/auxv.h>
32
__bionic_is_oryon(unsigned long hwcap)33 static inline bool __bionic_is_oryon(unsigned long hwcap) {
34 if (!(hwcap & HWCAP_CPUID)) return false;
35
36 // Extract the implementor and variant bits from MIDR_EL1.
37 // https://www.kernel.org/doc/html/latest/arch/arm64/cpu-feature-registers.html#list-of-registers-with-visible-features
38 unsigned long midr;
39 __asm__ __volatile__("mrs %0, MIDR_EL1" : "=r"(midr));
40 uint16_t cpu = (midr >> 20) & 0xfff;
41
42 auto make_cpu = [](unsigned implementor, unsigned variant) {
43 return (implementor << 4) | variant;
44 };
45
46 // Check for implementor Qualcomm's variants 0x1..0x5 (Oryon).
47 return cpu >= make_cpu('Q', 0x1) && cpu <= make_cpu('Q', 0x5);
48 }
49
50 extern "C" {
51
52 typedef void* memchr_func(const void*, int, size_t);
DEFINE_IFUNC_FOR(memchr)53 DEFINE_IFUNC_FOR(memchr) {
54 if (arg->_hwcap2 & HWCAP2_MTE) {
55 RETURN_FUNC(memchr_func, __memchr_aarch64_mte);
56 } else {
57 RETURN_FUNC(memchr_func, __memchr_aarch64);
58 }
59 }
60
61 typedef int memcmp_func(const void*, const void*, size_t);
DEFINE_IFUNC_FOR(memcmp)62 DEFINE_IFUNC_FOR(memcmp) {
63 // TODO: enable the SVE version.
64 RETURN_FUNC(memcmp_func, __memcmp_aarch64);
65 }
66
67 typedef void* memcpy_func(void*, const void*, size_t);
DEFINE_IFUNC_FOR(memcpy)68 DEFINE_IFUNC_FOR(memcpy) {
69 if (__bionic_is_oryon(arg->_hwcap)) {
70 RETURN_FUNC(memcpy_func, __memcpy_aarch64_nt);
71 } else if (arg->_hwcap & HWCAP_ASIMD) {
72 RETURN_FUNC(memcpy_func, __memcpy_aarch64_simd);
73 } else {
74 RETURN_FUNC(memcpy_func, __memcpy_aarch64);
75 }
76 }
77
78 typedef void* memmove_func(void*, const void*, size_t);
DEFINE_IFUNC_FOR(memmove)79 DEFINE_IFUNC_FOR(memmove) {
80 if (__bionic_is_oryon(arg->_hwcap)) {
81 RETURN_FUNC(memcpy_func, __memmove_aarch64_nt);
82 } else if (arg->_hwcap & HWCAP_ASIMD) {
83 RETURN_FUNC(memmove_func, __memmove_aarch64_simd);
84 } else {
85 RETURN_FUNC(memmove_func, __memmove_aarch64);
86 }
87 }
88
89 typedef int memrchr_func(const void*, int, size_t);
DEFINE_IFUNC_FOR(memrchr)90 DEFINE_IFUNC_FOR(memrchr) {
91 RETURN_FUNC(memrchr_func, __memrchr_aarch64);
92 }
93
94 typedef int memset_func(void*, int, size_t);
DEFINE_IFUNC_FOR(memset)95 DEFINE_IFUNC_FOR(memset) {
96 if (__bionic_is_oryon(arg->_hwcap)) {
97 RETURN_FUNC(memset_func, __memset_aarch64_nt);
98 } else {
99 RETURN_FUNC(memset_func, __memset_aarch64);
100 }
101 }
102
103 typedef char* stpcpy_func(char*, const char*, size_t);
DEFINE_IFUNC_FOR(stpcpy)104 DEFINE_IFUNC_FOR(stpcpy) {
105 // TODO: enable the SVE version.
106 RETURN_FUNC(stpcpy_func, __stpcpy_aarch64);
107 }
108
109 typedef char* strchr_func(const char*, int);
DEFINE_IFUNC_FOR(strchr)110 DEFINE_IFUNC_FOR(strchr) {
111 if (arg->_hwcap2 & HWCAP2_MTE) {
112 RETURN_FUNC(strchr_func, __strchr_aarch64_mte);
113 } else {
114 RETURN_FUNC(strchr_func, __strchr_aarch64);
115 }
116 }
117
118 typedef char* strchrnul_func(const char*, int);
DEFINE_IFUNC_FOR(strchrnul)119 DEFINE_IFUNC_FOR(strchrnul) {
120 if (arg->_hwcap2 & HWCAP2_MTE) {
121 RETURN_FUNC(strchrnul_func, __strchrnul_aarch64_mte);
122 } else {
123 RETURN_FUNC(strchrnul_func, __strchrnul_aarch64);
124 }
125 }
126
127 typedef int strcmp_func(const char*, const char*);
DEFINE_IFUNC_FOR(strcmp)128 DEFINE_IFUNC_FOR(strcmp) {
129 // TODO: enable the SVE version.
130 RETURN_FUNC(strcmp_func, __strcmp_aarch64);
131 }
132
133 typedef char* strcpy_func(char*, const char*);
DEFINE_IFUNC_FOR(strcpy)134 DEFINE_IFUNC_FOR(strcpy) {
135 // TODO: enable the SVE version.
136 RETURN_FUNC(strcpy_func, __strcpy_aarch64);
137 }
138
139 typedef size_t strlen_func(const char*);
DEFINE_IFUNC_FOR(strlen)140 DEFINE_IFUNC_FOR(strlen) {
141 if (arg->_hwcap2 & HWCAP2_MTE) {
142 RETURN_FUNC(strlen_func, __strlen_aarch64_mte);
143 } else {
144 RETURN_FUNC(strlen_func, __strlen_aarch64);
145 }
146 }
147
148 typedef int strncmp_func(const char*, const char*, size_t);
DEFINE_IFUNC_FOR(strncmp)149 DEFINE_IFUNC_FOR(strncmp) {
150 // TODO: enable the SVE version.
151 RETURN_FUNC(strncmp_func, __strncmp_aarch64);
152 }
153
154 typedef size_t strnlen_func(const char*, size_t);
DEFINE_IFUNC_FOR(strnlen)155 DEFINE_IFUNC_FOR(strnlen) {
156 // TODO: enable the SVE version.
157 RETURN_FUNC(strnlen_func, __strnlen_aarch64);
158 }
159
160 typedef char* strrchr_func(const char*, int);
DEFINE_IFUNC_FOR(strrchr)161 DEFINE_IFUNC_FOR(strrchr) {
162 if (arg->_hwcap2 & HWCAP2_MTE) {
163 RETURN_FUNC(strrchr_func, __strrchr_aarch64_mte);
164 } else {
165 RETURN_FUNC(strrchr_func, __strrchr_aarch64);
166 }
167 }
168
169 } // extern "C"
170