• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_H_
18 #define ART_COMPILER_OPTIMIZING_INTRINSICS_H_
19 
20 #include "base/macros.h"
21 #include "code_generator.h"
22 #include "nodes.h"
23 #include "optimization.h"
24 #include "parallel_move_resolver.h"
25 
26 namespace art HIDDEN {
27 
28 class DexFile;
29 
30 // Positive floating-point infinities.
31 static constexpr uint32_t kPositiveInfinityFloat = 0x7f800000U;
32 static constexpr uint64_t kPositiveInfinityDouble = UINT64_C(0x7ff0000000000000);
33 
34 static constexpr uint32_t kNanFloat = 0x7fc00000U;
35 static constexpr uint64_t kNanDouble = 0x7ff8000000000000;
36 
37 class IntrinsicVisitor : public ValueObject {
38  public:
~IntrinsicVisitor()39   virtual ~IntrinsicVisitor() {}
40 
41   // Dispatch logic.
42 
Dispatch(HInvoke * invoke)43   void Dispatch(HInvoke* invoke) {
44     switch (invoke->GetIntrinsic()) {
45       case Intrinsics::kNone:
46         return;
47 #define OPTIMIZING_INTRINSICS(Name, ...) \
48       case Intrinsics::k ## Name: \
49         Visit ## Name(invoke);    \
50         return;
51 #include "intrinsics_list.h"
52         INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
53 #undef INTRINSICS_LIST
54 #undef OPTIMIZING_INTRINSICS
55 
56       // Do not put a default case. That way the compiler will complain if we missed a case.
57     }
58   }
59 
60   // Define visitor methods.
61 
62 #define OPTIMIZING_INTRINSICS(Name, ...) \
63   virtual void Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
64   }
65 #include "intrinsics_list.h"
INTRINSICS_LIST(OPTIMIZING_INTRINSICS)66   INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
67 #undef INTRINSICS_LIST
68 #undef OPTIMIZING_INTRINSICS
69 
70   static void MoveArguments(HInvoke* invoke,
71                             CodeGenerator* codegen,
72                             InvokeDexCallingConventionVisitor* calling_convention_visitor) {
73     if (kIsDebugBuild && invoke->IsInvokeStaticOrDirect()) {
74       HInvokeStaticOrDirect* invoke_static_or_direct = invoke->AsInvokeStaticOrDirect();
75       // Explicit clinit checks triggered by static invokes must have been
76       // pruned by art::PrepareForRegisterAllocation.
77       DCHECK(!invoke_static_or_direct->IsStaticWithExplicitClinitCheck());
78     }
79 
80     if (invoke->GetNumberOfArguments() == 0) {
81       // No argument to move.
82       return;
83     }
84 
85     LocationSummary* locations = invoke->GetLocations();
86 
87     // We're moving potentially two or more locations to locations that could overlap, so we need
88     // a parallel move resolver.
89     HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
90 
91     for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) {
92       HInstruction* input = invoke->InputAt(i);
93       Location cc_loc = calling_convention_visitor->GetNextLocation(input->GetType());
94       Location actual_loc = locations->InAt(i);
95 
96       parallel_move.AddMove(actual_loc, cc_loc, input->GetType(), nullptr);
97     }
98 
99     codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
100   }
101 
102   static void ComputeIntegerValueOfLocations(HInvoke* invoke,
103                                              CodeGenerator* codegen,
104                                              Location return_location,
105                                              Location first_argument_location);
106 
107   // Temporary data structure for holding Integer.valueOf data for generating code.
108   // We only use it if the boot image contains the IntegerCache objects.
109   struct IntegerValueOfInfo {
110     static constexpr uint32_t kInvalidReference = static_cast<uint32_t>(-1);
111 
112     IntegerValueOfInfo();
113 
114     // Offset of the Integer.value field for initializing a newly allocated instance.
115     uint32_t value_offset;
116     // The low value in the cache.
117     int32_t low;
118     // The length of the cache array.
119     uint32_t length;
120 
121     // This union contains references to the boot image. For app AOT or JIT compilation,
122     // these are the boot image offsets of the target. For boot image compilation, the
123     // location shall be known only at link time, so we encode a symbolic reference using
124     // IntrinsicObjects::EncodePatch().
125     union {
126       // The target value for a constant input in the cache range. If the constant input
127       // is out of range (use `low` and `length` to check), this value is bogus (set to
128       // kInvalidReference) and the code must allocate a new Integer.
129       uint32_t value_boot_image_reference;
130 
131       // The cache array data used for a non-constant input in the cache range.
132       // If the input is out of range, the code must allocate a new Integer.
133       uint32_t array_data_boot_image_reference;
134     };
135   };
136 
137   static IntegerValueOfInfo ComputeIntegerValueOfInfo(
138       HInvoke* invoke, const CompilerOptions& compiler_options);
139 
140   static MemberOffset GetReferenceDisableIntrinsicOffset();
141   static MemberOffset GetReferenceSlowPathEnabledOffset();
142   static void CreateReferenceGetReferentLocations(HInvoke* invoke, CodeGenerator* codegen);
143   static void CreateReferenceRefersToLocations(HInvoke* invoke);
144 
145  protected:
IntrinsicVisitor()146   IntrinsicVisitor() {}
147 
148   static void AssertNonMovableStringClass();
149 
150  private:
151   DISALLOW_COPY_AND_ASSIGN(IntrinsicVisitor);
152 };
153 
154 #define GENERIC_OPTIMIZATION(name, bit)                \
155 public:                                                \
156 void Set##name() { SetBit(k##name); }                  \
157 bool Get##name() const { return IsBitSet(k##name); }   \
158 private:                                               \
159 static constexpr size_t k##name = bit
160 
161 class IntrinsicOptimizations : public ValueObject {
162  public:
IntrinsicOptimizations(HInvoke * invoke)163   explicit IntrinsicOptimizations(HInvoke* invoke)
164       : value_(invoke->GetIntrinsicOptimizations()) {}
IntrinsicOptimizations(const HInvoke & invoke)165   explicit IntrinsicOptimizations(const HInvoke& invoke)
166       : value_(invoke.GetIntrinsicOptimizations()) {}
167 
168   static constexpr int kNumberOfGenericOptimizations = 1;
169   GENERIC_OPTIMIZATION(DoesNotNeedEnvironment, 0);
170 
171  protected:
IsBitSet(uint32_t bit)172   bool IsBitSet(uint32_t bit) const {
173     DCHECK_LT(bit, sizeof(uint32_t) * kBitsPerByte);
174     return (*value_ & (1 << bit)) != 0u;
175   }
176 
SetBit(uint32_t bit)177   void SetBit(uint32_t bit) {
178     DCHECK_LT(bit, sizeof(uint32_t) * kBitsPerByte);
179     *(const_cast<uint32_t* const>(value_)) |= (1 << bit);
180   }
181 
182  private:
183   const uint32_t* const value_;
184 
185   DISALLOW_COPY_AND_ASSIGN(IntrinsicOptimizations);
186 };
187 
188 #undef GENERIC_OPTIMIZATION
189 
190 #define INTRINSIC_OPTIMIZATION(name, bit)                             \
191 public:                                                               \
192 void Set##name() { SetBit(k##name); }                                 \
193 bool Get##name() const { return IsBitSet(k##name); }                  \
194 private:                                                              \
195 static constexpr size_t k##name = (bit) + kNumberOfGenericOptimizations
196 
197 class StringEqualsOptimizations : public IntrinsicOptimizations {
198  public:
StringEqualsOptimizations(HInvoke * invoke)199   explicit StringEqualsOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {}
200 
201   INTRINSIC_OPTIMIZATION(ArgumentNotNull, 0);
202   INTRINSIC_OPTIMIZATION(ArgumentIsString, 1);
203 
204  private:
205   DISALLOW_COPY_AND_ASSIGN(StringEqualsOptimizations);
206 };
207 
208 class SystemArrayCopyOptimizations : public IntrinsicOptimizations {
209  public:
SystemArrayCopyOptimizations(HInvoke * invoke)210   explicit SystemArrayCopyOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {}
211 
212   INTRINSIC_OPTIMIZATION(SourceIsNotNull, 0);
213   INTRINSIC_OPTIMIZATION(DestinationIsNotNull, 1);
214   INTRINSIC_OPTIMIZATION(DestinationIsSource, 2);
215   INTRINSIC_OPTIMIZATION(CountIsSourceLength, 3);
216   INTRINSIC_OPTIMIZATION(CountIsDestinationLength, 4);
217   INTRINSIC_OPTIMIZATION(DoesNotNeedTypeCheck, 5);
218   INTRINSIC_OPTIMIZATION(DestinationIsTypedObjectArray, 6);
219   INTRINSIC_OPTIMIZATION(DestinationIsNonPrimitiveArray, 7);
220   INTRINSIC_OPTIMIZATION(DestinationIsPrimitiveArray, 8);
221   INTRINSIC_OPTIMIZATION(SourceIsNonPrimitiveArray, 9);
222   INTRINSIC_OPTIMIZATION(SourceIsPrimitiveArray, 10);
223 
224  private:
225   DISALLOW_COPY_AND_ASSIGN(SystemArrayCopyOptimizations);
226 };
227 
228 class VarHandleOptimizations : public IntrinsicOptimizations {
229  public:
VarHandleOptimizations(HInvoke * invoke)230   explicit VarHandleOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {}
231 
232   INTRINSIC_OPTIMIZATION(DoNotIntrinsify, 0);  // One of the checks is statically known to fail.
233   INTRINSIC_OPTIMIZATION(SkipObjectNullCheck, 1);  // Not applicable for static fields.
234 
235   // Use known `VarHandle` from the boot image. To apply this optimization, the following
236   // `VarHandle` checks must pass based on static analysis:
237   //   - `VarHandle` type check (must match the coordinate count),
238   //   - access mode check,
239   //   - var type check (including assignability for reference types),
240   //   - object type check (except for static field VarHandles that do not take an object).
241   // Note that the object null check is controlled by the above flag `SkipObjectNullCheck`
242   // and arrays and byte array views (which always need a range check and sometimes also
243   // array type check) are currently unsupported.
244   INTRINSIC_OPTIMIZATION(UseKnownBootImageVarHandle, 2);
245 };
246 
247 #undef INTRISIC_OPTIMIZATION
248 
249 //
250 // Macros for use in the intrinsics code generators.
251 //
252 
253 // Defines an unimplemented intrinsic: that is, a method call that is recognized as an
254 // intrinsic to exploit e.g. no side-effects or exceptions, but otherwise not handled
255 // by this architecture-specific intrinsics code generator. Eventually it is implemented
256 // as a true method call.
257 #define UNIMPLEMENTED_INTRINSIC(Arch, Name)                                               \
258 void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
259 }                                                                                         \
260 void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \
261 }
262 
263 // Defines a list of unreached intrinsics: that is, method calls that are recognized as
264 // an intrinsic, and then always converted into HIR instructions before they reach any
265 // architecture-specific intrinsics code generator. This only applies to non-baseline
266 // compilation.
267 #define UNREACHABLE_INTRINSIC(Arch, Name)                                \
268 void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke) { \
269   if (Runtime::Current()->IsAotCompiler() &&                             \
270       !codegen_->GetCompilerOptions().IsBaseline()) {                    \
271     LOG(FATAL) << "Unreachable: intrinsic " << invoke->GetIntrinsic()    \
272                << " should have been converted to HIR";                  \
273   }                                                                      \
274 }                                                                        \
275 void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke) {    \
276   LOG(FATAL) << "Unreachable: intrinsic " << invoke->GetIntrinsic()      \
277              << " should have been converted to HIR";                    \
278 }
279 #define UNREACHABLE_INTRINSICS(Arch)                            \
280 UNREACHABLE_INTRINSIC(Arch, MathMinIntInt)                      \
281 UNREACHABLE_INTRINSIC(Arch, MathMinLongLong)                    \
282 UNREACHABLE_INTRINSIC(Arch, MathMinFloatFloat)                  \
283 UNREACHABLE_INTRINSIC(Arch, MathMinDoubleDouble)                \
284 UNREACHABLE_INTRINSIC(Arch, MathMaxIntInt)                      \
285 UNREACHABLE_INTRINSIC(Arch, MathMaxLongLong)                    \
286 UNREACHABLE_INTRINSIC(Arch, MathMaxFloatFloat)                  \
287 UNREACHABLE_INTRINSIC(Arch, MathMaxDoubleDouble)                \
288 UNREACHABLE_INTRINSIC(Arch, MathAbsInt)                         \
289 UNREACHABLE_INTRINSIC(Arch, MathAbsLong)                        \
290 UNREACHABLE_INTRINSIC(Arch, MathAbsFloat)                       \
291 UNREACHABLE_INTRINSIC(Arch, MathAbsDouble)                      \
292 UNREACHABLE_INTRINSIC(Arch, FloatFloatToIntBits)                \
293 UNREACHABLE_INTRINSIC(Arch, DoubleDoubleToLongBits)             \
294 UNREACHABLE_INTRINSIC(Arch, FloatIsNaN)                         \
295 UNREACHABLE_INTRINSIC(Arch, DoubleIsNaN)                        \
296 UNREACHABLE_INTRINSIC(Arch, IntegerRotateLeft)                  \
297 UNREACHABLE_INTRINSIC(Arch, LongRotateLeft)                     \
298 UNREACHABLE_INTRINSIC(Arch, IntegerRotateRight)                 \
299 UNREACHABLE_INTRINSIC(Arch, LongRotateRight)                    \
300 UNREACHABLE_INTRINSIC(Arch, IntegerCompare)                     \
301 UNREACHABLE_INTRINSIC(Arch, LongCompare)                        \
302 UNREACHABLE_INTRINSIC(Arch, IntegerSignum)                      \
303 UNREACHABLE_INTRINSIC(Arch, LongSignum)                         \
304 UNREACHABLE_INTRINSIC(Arch, StringCharAt)                       \
305 UNREACHABLE_INTRINSIC(Arch, StringIsEmpty)                      \
306 UNREACHABLE_INTRINSIC(Arch, StringLength)                       \
307 UNREACHABLE_INTRINSIC(Arch, UnsafeLoadFence)                    \
308 UNREACHABLE_INTRINSIC(Arch, UnsafeStoreFence)                   \
309 UNREACHABLE_INTRINSIC(Arch, UnsafeFullFence)                    \
310 UNREACHABLE_INTRINSIC(Arch, JdkUnsafeLoadFence)                 \
311 UNREACHABLE_INTRINSIC(Arch, JdkUnsafeStoreFence)                \
312 UNREACHABLE_INTRINSIC(Arch, JdkUnsafeFullFence)                 \
313 UNREACHABLE_INTRINSIC(Arch, VarHandleFullFence)                 \
314 UNREACHABLE_INTRINSIC(Arch, VarHandleAcquireFence)              \
315 UNREACHABLE_INTRINSIC(Arch, VarHandleReleaseFence)              \
316 UNREACHABLE_INTRINSIC(Arch, VarHandleLoadLoadFence)             \
317 UNREACHABLE_INTRINSIC(Arch, VarHandleStoreStoreFence)
318 
319 template <typename IntrinsicLocationsBuilder, typename Codegenerator>
IsCallFreeIntrinsic(HInvoke * invoke,Codegenerator * codegen)320 bool IsCallFreeIntrinsic(HInvoke* invoke, Codegenerator* codegen) {
321   if (invoke->GetIntrinsic() != Intrinsics::kNone) {
322     // This invoke may have intrinsic code generation defined. However, we must
323     // now also determine if this code generation is truly there and call-free
324     // (not unimplemented, no bail on instruction features, or call on slow path).
325     // This is done by actually calling the locations builder on the instruction
326     // and clearing out the locations once result is known. We assume this
327     // call only has creating locations as side effects!
328     // TODO: Avoid wasting Arena memory.
329     IntrinsicLocationsBuilder builder(codegen);
330     bool success = builder.TryDispatch(invoke) && !invoke->GetLocations()->CanCall();
331     invoke->SetLocations(nullptr);
332     return success;
333   }
334   return false;
335 }
336 
337 }  // namespace art
338 
339 #endif  // ART_COMPILER_OPTIMIZING_INTRINSICS_H_
340