1 /*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "Dalvik.h"
18 #include "libdex/DexOpcodes.h"
19 #include "libdex/DexCatch.h"
20 #include "interp/Jit.h"
21 #include "CompilerInternals.h"
22 #include "Dataflow.h"
23
contentIsInsn(const u2 * codePtr)24 static inline bool contentIsInsn(const u2 *codePtr) {
25 u2 instr = *codePtr;
26 Opcode opcode = (Opcode)(instr & 0xff);
27
28 /*
29 * Since the low 8-bit in metadata may look like OP_NOP, we need to check
30 * both the low and whole sub-word to determine whether it is code or data.
31 */
32 return (opcode != OP_NOP || instr == 0);
33 }
34
35 /*
36 * Parse an instruction, return the length of the instruction
37 */
parseInsn(const u2 * codePtr,DecodedInstruction * decInsn,bool printMe)38 static inline int parseInsn(const u2 *codePtr, DecodedInstruction *decInsn,
39 bool printMe)
40 {
41 // Don't parse instruction data
42 if (!contentIsInsn(codePtr)) {
43 return 0;
44 }
45
46 u2 instr = *codePtr;
47 Opcode opcode = dexOpcodeFromCodeUnit(instr);
48
49 dexDecodeInstruction(codePtr, decInsn);
50 if (printMe) {
51 char *decodedString = dvmCompilerGetDalvikDisassembly(decInsn, NULL);
52 LOGD("%p: %#06x %s", codePtr, opcode, decodedString);
53 }
54 return dexGetWidthFromOpcode(opcode);
55 }
56
57 #define UNKNOWN_TARGET 0xffffffff
58
59 /*
60 * Identify block-ending instructions and collect supplemental information
61 * regarding the following instructions.
62 */
findBlockBoundary(const Method * caller,MIR * insn,unsigned int curOffset,unsigned int * target,bool * isInvoke,const Method ** callee)63 static inline bool findBlockBoundary(const Method *caller, MIR *insn,
64 unsigned int curOffset,
65 unsigned int *target, bool *isInvoke,
66 const Method **callee)
67 {
68 switch (insn->dalvikInsn.opcode) {
69 /* Target is not compile-time constant */
70 case OP_RETURN_VOID:
71 case OP_RETURN:
72 case OP_RETURN_WIDE:
73 case OP_RETURN_OBJECT:
74 case OP_THROW:
75 *target = UNKNOWN_TARGET;
76 break;
77 case OP_INVOKE_VIRTUAL:
78 case OP_INVOKE_VIRTUAL_RANGE:
79 case OP_INVOKE_VIRTUAL_JUMBO:
80 case OP_INVOKE_INTERFACE:
81 case OP_INVOKE_INTERFACE_RANGE:
82 case OP_INVOKE_INTERFACE_JUMBO:
83 case OP_INVOKE_VIRTUAL_QUICK:
84 case OP_INVOKE_VIRTUAL_QUICK_RANGE:
85 *isInvoke = true;
86 break;
87 case OP_INVOKE_SUPER:
88 case OP_INVOKE_SUPER_RANGE:
89 case OP_INVOKE_SUPER_JUMBO: {
90 int mIndex = caller->clazz->pDvmDex->
91 pResMethods[insn->dalvikInsn.vB]->methodIndex;
92 const Method *calleeMethod =
93 caller->clazz->super->vtable[mIndex];
94
95 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
96 *target = (unsigned int) calleeMethod->insns;
97 }
98 *isInvoke = true;
99 *callee = calleeMethod;
100 break;
101 }
102 case OP_INVOKE_STATIC:
103 case OP_INVOKE_STATIC_RANGE:
104 case OP_INVOKE_STATIC_JUMBO: {
105 const Method *calleeMethod =
106 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB];
107
108 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
109 *target = (unsigned int) calleeMethod->insns;
110 }
111 *isInvoke = true;
112 *callee = calleeMethod;
113 break;
114 }
115 case OP_INVOKE_SUPER_QUICK:
116 case OP_INVOKE_SUPER_QUICK_RANGE: {
117 const Method *calleeMethod =
118 caller->clazz->super->vtable[insn->dalvikInsn.vB];
119
120 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
121 *target = (unsigned int) calleeMethod->insns;
122 }
123 *isInvoke = true;
124 *callee = calleeMethod;
125 break;
126 }
127 case OP_INVOKE_DIRECT:
128 case OP_INVOKE_DIRECT_RANGE:
129 case OP_INVOKE_DIRECT_JUMBO: {
130 const Method *calleeMethod =
131 caller->clazz->pDvmDex->pResMethods[insn->dalvikInsn.vB];
132 if (calleeMethod && !dvmIsNativeMethod(calleeMethod)) {
133 *target = (unsigned int) calleeMethod->insns;
134 }
135 *isInvoke = true;
136 *callee = calleeMethod;
137 break;
138 }
139 case OP_GOTO:
140 case OP_GOTO_16:
141 case OP_GOTO_32:
142 *target = curOffset + (int) insn->dalvikInsn.vA;
143 break;
144
145 case OP_IF_EQ:
146 case OP_IF_NE:
147 case OP_IF_LT:
148 case OP_IF_GE:
149 case OP_IF_GT:
150 case OP_IF_LE:
151 *target = curOffset + (int) insn->dalvikInsn.vC;
152 break;
153
154 case OP_IF_EQZ:
155 case OP_IF_NEZ:
156 case OP_IF_LTZ:
157 case OP_IF_GEZ:
158 case OP_IF_GTZ:
159 case OP_IF_LEZ:
160 *target = curOffset + (int) insn->dalvikInsn.vB;
161 break;
162
163 default:
164 return false;
165 }
166 return true;
167 }
168
isGoto(MIR * insn)169 static inline bool isGoto(MIR *insn)
170 {
171 switch (insn->dalvikInsn.opcode) {
172 case OP_GOTO:
173 case OP_GOTO_16:
174 case OP_GOTO_32:
175 return true;
176 default:
177 return false;
178 }
179 }
180
181 /*
182 * Identify unconditional branch instructions
183 */
isUnconditionalBranch(MIR * insn)184 static inline bool isUnconditionalBranch(MIR *insn)
185 {
186 switch (insn->dalvikInsn.opcode) {
187 case OP_RETURN_VOID:
188 case OP_RETURN:
189 case OP_RETURN_WIDE:
190 case OP_RETURN_OBJECT:
191 return true;
192 default:
193 return isGoto(insn);
194 }
195 }
196
197 /*
198 * dvmHashTableLookup() callback
199 */
compareMethod(const CompilerMethodStats * m1,const CompilerMethodStats * m2)200 static int compareMethod(const CompilerMethodStats *m1,
201 const CompilerMethodStats *m2)
202 {
203 return (int) m1->method - (int) m2->method;
204 }
205
206 /*
207 * Analyze the body of the method to collect high-level information regarding
208 * inlining:
209 * - is empty method?
210 * - is getter/setter?
211 * - can throw exception?
212 *
213 * Currently the inliner only handles getters and setters. When its capability
214 * becomes more sophisticated more information will be retrieved here.
215 */
analyzeInlineTarget(DecodedInstruction * dalvikInsn,int attributes,int offset)216 static int analyzeInlineTarget(DecodedInstruction *dalvikInsn, int attributes,
217 int offset)
218 {
219 int flags = dexGetFlagsFromOpcode(dalvikInsn->opcode);
220 int dalvikOpcode = dalvikInsn->opcode;
221
222 if (flags & kInstrInvoke) {
223 attributes &= ~METHOD_IS_LEAF;
224 }
225
226 if (!(flags & kInstrCanReturn)) {
227 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] &
228 DF_IS_GETTER)) {
229 attributes &= ~METHOD_IS_GETTER;
230 }
231 if (!(dvmCompilerDataFlowAttributes[dalvikOpcode] &
232 DF_IS_SETTER)) {
233 attributes &= ~METHOD_IS_SETTER;
234 }
235 }
236
237 /*
238 * The expected instruction sequence is setter will never return value and
239 * getter will also do. Clear the bits if the behavior is discovered
240 * otherwise.
241 */
242 if (flags & kInstrCanReturn) {
243 if (dalvikOpcode == OP_RETURN_VOID) {
244 attributes &= ~METHOD_IS_GETTER;
245 }
246 else {
247 attributes &= ~METHOD_IS_SETTER;
248 }
249 }
250
251 if (flags & kInstrCanThrow) {
252 attributes &= ~METHOD_IS_THROW_FREE;
253 }
254
255 if (offset == 0 && dalvikOpcode == OP_RETURN_VOID) {
256 attributes |= METHOD_IS_EMPTY;
257 }
258
259 /*
260 * Check if this opcode is selected for single stepping.
261 * If so, don't inline the callee as there is no stack frame for the
262 * interpreter to single-step through the instruction.
263 */
264 if (SINGLE_STEP_OP(dalvikOpcode)) {
265 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER);
266 }
267
268 return attributes;
269 }
270
271 /*
272 * Analyze each method whose traces are ever compiled. Collect a variety of
273 * statistics like the ratio of exercised vs overall code and code bloat
274 * ratios. If isCallee is true, also analyze each instruction in more details
275 * to see if it is suitable for inlining.
276 */
dvmCompilerAnalyzeMethodBody(const Method * method,bool isCallee)277 CompilerMethodStats *dvmCompilerAnalyzeMethodBody(const Method *method,
278 bool isCallee)
279 {
280 const DexCode *dexCode = dvmGetMethodCode(method);
281 const u2 *codePtr = dexCode->insns;
282 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize;
283 int insnSize = 0;
284 int hashValue = dvmComputeUtf8Hash(method->name);
285
286 CompilerMethodStats dummyMethodEntry; // For hash table lookup
287 CompilerMethodStats *realMethodEntry; // For hash table storage
288
289 /* For lookup only */
290 dummyMethodEntry.method = method;
291 realMethodEntry = (CompilerMethodStats *)
292 dvmHashTableLookup(gDvmJit.methodStatsTable,
293 hashValue,
294 &dummyMethodEntry,
295 (HashCompareFunc) compareMethod,
296 false);
297
298 /* This method has never been analyzed before - create an entry */
299 if (realMethodEntry == NULL) {
300 realMethodEntry =
301 (CompilerMethodStats *) calloc(1, sizeof(CompilerMethodStats));
302 realMethodEntry->method = method;
303
304 dvmHashTableLookup(gDvmJit.methodStatsTable, hashValue,
305 realMethodEntry,
306 (HashCompareFunc) compareMethod,
307 true);
308 }
309
310 /* This method is invoked as a callee and has been analyzed - just return */
311 if ((isCallee == true) && (realMethodEntry->attributes & METHOD_IS_CALLEE))
312 return realMethodEntry;
313
314 /*
315 * Similarly, return if this method has been compiled before as a hot
316 * method already.
317 */
318 if ((isCallee == false) &&
319 (realMethodEntry->attributes & METHOD_IS_HOT))
320 return realMethodEntry;
321
322 int attributes;
323
324 /* Method hasn't been analyzed for the desired purpose yet */
325 if (isCallee) {
326 /* Aggressively set the attributes until proven otherwise */
327 attributes = METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE |
328 METHOD_IS_GETTER | METHOD_IS_SETTER;
329 } else {
330 attributes = METHOD_IS_HOT;
331 }
332
333 /* Count the number of instructions */
334 while (codePtr < codeEnd) {
335 DecodedInstruction dalvikInsn;
336 int width = parseInsn(codePtr, &dalvikInsn, false);
337
338 /* Terminate when the data section is seen */
339 if (width == 0)
340 break;
341
342 if (isCallee) {
343 attributes = analyzeInlineTarget(&dalvikInsn, attributes, insnSize);
344 }
345
346 insnSize += width;
347 codePtr += width;
348 }
349
350 /*
351 * Only handle simple getters/setters with one instruction followed by
352 * return
353 */
354 if ((attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) &&
355 (insnSize != 3)) {
356 attributes &= ~(METHOD_IS_GETTER | METHOD_IS_SETTER);
357 }
358
359 realMethodEntry->dalvikSize = insnSize * 2;
360 realMethodEntry->attributes |= attributes;
361
362 #if 0
363 /* Uncomment the following to explore various callee patterns */
364 if (attributes & METHOD_IS_THROW_FREE) {
365 LOGE("%s%s is inlinable%s", method->clazz->descriptor, method->name,
366 (attributes & METHOD_IS_EMPTY) ? " empty" : "");
367 }
368
369 if (attributes & METHOD_IS_LEAF) {
370 LOGE("%s%s is leaf %d%s", method->clazz->descriptor, method->name,
371 insnSize, insnSize < 5 ? " (small)" : "");
372 }
373
374 if (attributes & (METHOD_IS_GETTER | METHOD_IS_SETTER)) {
375 LOGE("%s%s is %s", method->clazz->descriptor, method->name,
376 attributes & METHOD_IS_GETTER ? "getter": "setter");
377 }
378 if (attributes ==
379 (METHOD_IS_LEAF | METHOD_IS_THROW_FREE | METHOD_IS_CALLEE)) {
380 LOGE("%s%s is inlinable non setter/getter", method->clazz->descriptor,
381 method->name);
382 }
383 #endif
384
385 return realMethodEntry;
386 }
387
388 /*
389 * Crawl the stack of the thread that requesed compilation to see if any of the
390 * ancestors are on the blacklist.
391 */
filterMethodByCallGraph(Thread * thread,const char * curMethodName)392 static bool filterMethodByCallGraph(Thread *thread, const char *curMethodName)
393 {
394 /* Crawl the Dalvik stack frames and compare the method name*/
395 StackSaveArea *ssaPtr = ((StackSaveArea *) thread->interpSave.curFrame) - 1;
396 while (ssaPtr != ((StackSaveArea *) NULL) - 1) {
397 const Method *method = ssaPtr->method;
398 if (method) {
399 int hashValue = dvmComputeUtf8Hash(method->name);
400 bool found =
401 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
402 (char *) method->name,
403 (HashCompareFunc) strcmp, false) !=
404 NULL;
405 if (found) {
406 LOGD("Method %s (--> %s) found on the JIT %s list",
407 method->name, curMethodName,
408 gDvmJit.includeSelectedMethod ? "white" : "black");
409 return true;
410 }
411
412 }
413 ssaPtr = ((StackSaveArea *) ssaPtr->prevFrame) - 1;
414 };
415 return false;
416 }
417
418 /*
419 * Since we are including instructions from possibly a cold method into the
420 * current trace, we need to make sure that all the associated information
421 * with the callee is properly initialized. If not, we punt on this inline
422 * target.
423 *
424 * TODO: volatile instructions will be handled later.
425 */
dvmCompilerCanIncludeThisInstruction(const Method * method,const DecodedInstruction * insn)426 bool dvmCompilerCanIncludeThisInstruction(const Method *method,
427 const DecodedInstruction *insn)
428 {
429 switch (insn->opcode) {
430 case OP_NEW_INSTANCE:
431 case OP_NEW_INSTANCE_JUMBO:
432 case OP_CHECK_CAST:
433 case OP_CHECK_CAST_JUMBO: {
434 ClassObject *classPtr = (ClassObject *)(void*)
435 (method->clazz->pDvmDex->pResClasses[insn->vB]);
436
437 /* Class hasn't been initialized yet */
438 if (classPtr == NULL) {
439 return false;
440 }
441 return true;
442 }
443 case OP_SGET:
444 case OP_SGET_JUMBO:
445 case OP_SGET_WIDE:
446 case OP_SGET_WIDE_JUMBO:
447 case OP_SGET_OBJECT:
448 case OP_SGET_OBJECT_JUMBO:
449 case OP_SGET_BOOLEAN:
450 case OP_SGET_BOOLEAN_JUMBO:
451 case OP_SGET_BYTE:
452 case OP_SGET_BYTE_JUMBO:
453 case OP_SGET_CHAR:
454 case OP_SGET_CHAR_JUMBO:
455 case OP_SGET_SHORT:
456 case OP_SGET_SHORT_JUMBO:
457 case OP_SPUT:
458 case OP_SPUT_JUMBO:
459 case OP_SPUT_WIDE:
460 case OP_SPUT_WIDE_JUMBO:
461 case OP_SPUT_OBJECT:
462 case OP_SPUT_OBJECT_JUMBO:
463 case OP_SPUT_BOOLEAN:
464 case OP_SPUT_BOOLEAN_JUMBO:
465 case OP_SPUT_BYTE:
466 case OP_SPUT_BYTE_JUMBO:
467 case OP_SPUT_CHAR:
468 case OP_SPUT_CHAR_JUMBO:
469 case OP_SPUT_SHORT:
470 case OP_SPUT_SHORT_JUMBO: {
471 void *fieldPtr = (void*)
472 (method->clazz->pDvmDex->pResFields[insn->vB]);
473
474 if (fieldPtr == NULL) {
475 return false;
476 }
477 return true;
478 }
479 case OP_INVOKE_SUPER:
480 case OP_INVOKE_SUPER_RANGE:
481 case OP_INVOKE_SUPER_JUMBO: {
482 int mIndex = method->clazz->pDvmDex->
483 pResMethods[insn->vB]->methodIndex;
484 const Method *calleeMethod = method->clazz->super->vtable[mIndex];
485 if (calleeMethod == NULL) {
486 return false;
487 }
488 return true;
489 }
490 case OP_INVOKE_SUPER_QUICK:
491 case OP_INVOKE_SUPER_QUICK_RANGE: {
492 const Method *calleeMethod = method->clazz->super->vtable[insn->vB];
493 if (calleeMethod == NULL) {
494 return false;
495 }
496 return true;
497 }
498 case OP_INVOKE_STATIC:
499 case OP_INVOKE_STATIC_RANGE:
500 case OP_INVOKE_STATIC_JUMBO:
501 case OP_INVOKE_DIRECT:
502 case OP_INVOKE_DIRECT_RANGE:
503 case OP_INVOKE_DIRECT_JUMBO: {
504 const Method *calleeMethod =
505 method->clazz->pDvmDex->pResMethods[insn->vB];
506 if (calleeMethod == NULL) {
507 return false;
508 }
509 return true;
510 }
511 case OP_CONST_CLASS:
512 case OP_CONST_CLASS_JUMBO: {
513 void *classPtr = (void*)
514 (method->clazz->pDvmDex->pResClasses[insn->vB]);
515
516 if (classPtr == NULL) {
517 return false;
518 }
519 return true;
520 }
521 case OP_CONST_STRING_JUMBO:
522 case OP_CONST_STRING: {
523 void *strPtr = (void*)
524 (method->clazz->pDvmDex->pResStrings[insn->vB]);
525
526 if (strPtr == NULL) {
527 return false;
528 }
529 return true;
530 }
531 default:
532 return true;
533 }
534 }
535
536 /* Split an existing block from the specified code offset into two */
splitBlock(CompilationUnit * cUnit,unsigned int codeOffset,BasicBlock * origBlock)537 static BasicBlock *splitBlock(CompilationUnit *cUnit,
538 unsigned int codeOffset,
539 BasicBlock *origBlock)
540 {
541 MIR *insn = origBlock->firstMIRInsn;
542 while (insn) {
543 if (insn->offset == codeOffset) break;
544 insn = insn->next;
545 }
546 if (insn == NULL) {
547 LOGE("Break split failed");
548 dvmAbort();
549 }
550 BasicBlock *bottomBlock = dvmCompilerNewBB(kDalvikByteCode,
551 cUnit->numBlocks++);
552 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bottomBlock);
553
554 bottomBlock->startOffset = codeOffset;
555 bottomBlock->firstMIRInsn = insn;
556 bottomBlock->lastMIRInsn = origBlock->lastMIRInsn;
557
558 /* Handle the taken path */
559 bottomBlock->taken = origBlock->taken;
560 if (bottomBlock->taken) {
561 origBlock->taken = NULL;
562 dvmCompilerClearBit(bottomBlock->taken->predecessors, origBlock->id);
563 dvmCompilerSetBit(bottomBlock->taken->predecessors, bottomBlock->id);
564 }
565
566 /* Handle the fallthrough path */
567 bottomBlock->needFallThroughBranch = origBlock->needFallThroughBranch;
568 bottomBlock->fallThrough = origBlock->fallThrough;
569 origBlock->fallThrough = bottomBlock;
570 origBlock->needFallThroughBranch = true;
571 dvmCompilerSetBit(bottomBlock->predecessors, origBlock->id);
572 if (bottomBlock->fallThrough) {
573 dvmCompilerClearBit(bottomBlock->fallThrough->predecessors,
574 origBlock->id);
575 dvmCompilerSetBit(bottomBlock->fallThrough->predecessors,
576 bottomBlock->id);
577 }
578
579 /* Handle the successor list */
580 if (origBlock->successorBlockList.blockListType != kNotUsed) {
581 bottomBlock->successorBlockList = origBlock->successorBlockList;
582 origBlock->successorBlockList.blockListType = kNotUsed;
583 GrowableListIterator iterator;
584
585 dvmGrowableListIteratorInit(&bottomBlock->successorBlockList.blocks,
586 &iterator);
587 while (true) {
588 SuccessorBlockInfo *successorBlockInfo =
589 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
590 if (successorBlockInfo == NULL) break;
591 BasicBlock *bb = successorBlockInfo->block;
592 dvmCompilerClearBit(bb->predecessors, origBlock->id);
593 dvmCompilerSetBit(bb->predecessors, bottomBlock->id);
594 }
595 }
596
597 origBlock->lastMIRInsn = insn->prev;
598
599 insn->prev->next = NULL;
600 insn->prev = NULL;
601 return bottomBlock;
602 }
603
604 /*
605 * Given a code offset, find out the block that starts with it. If the offset
606 * is in the middle of an existing block, split it into two.
607 */
findBlock(CompilationUnit * cUnit,unsigned int codeOffset,bool split,bool create)608 static BasicBlock *findBlock(CompilationUnit *cUnit,
609 unsigned int codeOffset,
610 bool split, bool create)
611 {
612 GrowableList *blockList = &cUnit->blockList;
613 BasicBlock *bb;
614 unsigned int i;
615
616 for (i = 0; i < blockList->numUsed; i++) {
617 bb = (BasicBlock *) blockList->elemList[i];
618 if (bb->blockType != kDalvikByteCode) continue;
619 if (bb->startOffset == codeOffset) return bb;
620 /* Check if a branch jumps into the middle of an existing block */
621 if ((split == true) && (codeOffset > bb->startOffset) &&
622 (bb->lastMIRInsn != NULL) &&
623 (codeOffset <= bb->lastMIRInsn->offset)) {
624 BasicBlock *newBB = splitBlock(cUnit, codeOffset, bb);
625 return newBB;
626 }
627 }
628 if (create) {
629 bb = dvmCompilerNewBB(kDalvikByteCode, cUnit->numBlocks++);
630 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
631 bb->startOffset = codeOffset;
632 return bb;
633 }
634 return NULL;
635 }
636
637 /* Dump the CFG into a DOT graph */
dvmDumpCFG(CompilationUnit * cUnit,const char * dirPrefix)638 void dvmDumpCFG(CompilationUnit *cUnit, const char *dirPrefix)
639 {
640 const Method *method = cUnit->method;
641 FILE *file;
642 char *signature = dexProtoCopyMethodDescriptor(&method->prototype);
643 char startOffset[80];
644 sprintf(startOffset, "_%x", cUnit->entryBlock->fallThrough->startOffset);
645 char *fileName = (char *) dvmCompilerNew(
646 strlen(dirPrefix) +
647 strlen(method->clazz->descriptor) +
648 strlen(method->name) +
649 strlen(signature) +
650 strlen(startOffset) +
651 strlen(".dot") + 1, true);
652 sprintf(fileName, "%s%s%s%s%s.dot", dirPrefix,
653 method->clazz->descriptor, method->name, signature, startOffset);
654 free(signature);
655
656 /*
657 * Convert the special characters into a filesystem- and shell-friendly
658 * format.
659 */
660 int i;
661 for (i = strlen(dirPrefix); fileName[i]; i++) {
662 if (fileName[i] == '/') {
663 fileName[i] = '_';
664 } else if (fileName[i] == ';') {
665 fileName[i] = '#';
666 } else if (fileName[i] == '$') {
667 fileName[i] = '+';
668 } else if (fileName[i] == '(' || fileName[i] == ')') {
669 fileName[i] = '@';
670 } else if (fileName[i] == '<' || fileName[i] == '>') {
671 fileName[i] = '=';
672 }
673 }
674 file = fopen(fileName, "w");
675 if (file == NULL) {
676 return;
677 }
678 fprintf(file, "digraph G {\n");
679
680 fprintf(file, " rankdir=TB\n");
681
682 int numReachableBlocks = cUnit->numReachableBlocks;
683 int idx;
684 const GrowableList *blockList = &cUnit->blockList;
685
686 for (idx = 0; idx < numReachableBlocks; idx++) {
687 int blockIdx = cUnit->dfsOrder.elemList[idx];
688 BasicBlock *bb = (BasicBlock *) dvmGrowableListGetElement(blockList,
689 blockIdx);
690 if (bb == NULL) break;
691 if (bb->blockType == kEntryBlock) {
692 fprintf(file, " entry [shape=Mdiamond];\n");
693 } else if (bb->blockType == kExitBlock) {
694 fprintf(file, " exit [shape=Mdiamond];\n");
695 } else if (bb->blockType == kDalvikByteCode) {
696 fprintf(file, " block%04x [shape=record,label = \"{ \\\n",
697 bb->startOffset);
698 const MIR *mir;
699 fprintf(file, " {block id %d\\l}%s\\\n", bb->id,
700 bb->firstMIRInsn ? " | " : " ");
701 for (mir = bb->firstMIRInsn; mir; mir = mir->next) {
702 fprintf(file, " {%04x %s\\l}%s\\\n", mir->offset,
703 mir->ssaRep ?
704 dvmCompilerFullDisassembler(cUnit, mir) :
705 dexGetOpcodeName(mir->dalvikInsn.opcode),
706 mir->next ? " | " : " ");
707 }
708 fprintf(file, " }\"];\n\n");
709 } else if (bb->blockType == kExceptionHandling) {
710 char blockName[BLOCK_NAME_LEN];
711
712 dvmGetBlockName(bb, blockName);
713 fprintf(file, " %s [shape=invhouse];\n", blockName);
714 }
715
716 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN];
717
718 if (bb->taken) {
719 dvmGetBlockName(bb, blockName1);
720 dvmGetBlockName(bb->taken, blockName2);
721 fprintf(file, " %s:s -> %s:n [style=dotted]\n",
722 blockName1, blockName2);
723 }
724 if (bb->fallThrough) {
725 dvmGetBlockName(bb, blockName1);
726 dvmGetBlockName(bb->fallThrough, blockName2);
727 fprintf(file, " %s:s -> %s:n\n", blockName1, blockName2);
728 }
729
730 if (bb->successorBlockList.blockListType != kNotUsed) {
731 fprintf(file, " succ%04x [shape=%s,label = \"{ \\\n",
732 bb->startOffset,
733 (bb->successorBlockList.blockListType == kCatch) ?
734 "Mrecord" : "record");
735 GrowableListIterator iterator;
736 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks,
737 &iterator);
738 SuccessorBlockInfo *successorBlockInfo =
739 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
740
741 int succId = 0;
742 while (true) {
743 if (successorBlockInfo == NULL) break;
744
745 BasicBlock *destBlock = successorBlockInfo->block;
746 SuccessorBlockInfo *nextSuccessorBlockInfo =
747 (SuccessorBlockInfo *) dvmGrowableListIteratorNext(&iterator);
748
749 fprintf(file, " {<f%d> %04x: %04x\\l}%s\\\n",
750 succId++,
751 successorBlockInfo->key,
752 destBlock->startOffset,
753 (nextSuccessorBlockInfo != NULL) ? " | " : " ");
754
755 successorBlockInfo = nextSuccessorBlockInfo;
756 }
757 fprintf(file, " }\"];\n\n");
758
759 dvmGetBlockName(bb, blockName1);
760 fprintf(file, " %s:s -> succ%04x:n [style=dashed]\n",
761 blockName1, bb->startOffset);
762
763 if (bb->successorBlockList.blockListType == kPackedSwitch ||
764 bb->successorBlockList.blockListType == kSparseSwitch) {
765
766 dvmGrowableListIteratorInit(&bb->successorBlockList.blocks,
767 &iterator);
768
769 succId = 0;
770 while (true) {
771 SuccessorBlockInfo *successorBlockInfo =
772 (SuccessorBlockInfo *)
773 dvmGrowableListIteratorNext(&iterator);
774 if (successorBlockInfo == NULL) break;
775
776 BasicBlock *destBlock = successorBlockInfo->block;
777
778 dvmGetBlockName(destBlock, blockName2);
779 fprintf(file, " succ%04x:f%d:e -> %s:n\n",
780 bb->startOffset, succId++,
781 blockName2);
782 }
783 }
784 }
785 fprintf(file, "\n");
786
787 /*
788 * If we need to debug the dominator tree, uncomment the following code
789 */
790 #if 1
791 dvmGetBlockName(bb, blockName1);
792 fprintf(file, " cfg%s [label=\"%s\", shape=none];\n",
793 blockName1, blockName1);
794 if (bb->iDom) {
795 dvmGetBlockName(bb->iDom, blockName2);
796 fprintf(file, " cfg%s:s -> cfg%s:n\n\n",
797 blockName2, blockName1);
798 }
799 #endif
800 }
801 fprintf(file, "}\n");
802 fclose(file);
803 }
804
805 /* Verify if all the successor is connected with all the claimed predecessors */
verifyPredInfo(CompilationUnit * cUnit,BasicBlock * bb)806 static bool verifyPredInfo(CompilationUnit *cUnit, BasicBlock *bb)
807 {
808 BitVectorIterator bvIterator;
809
810 dvmBitVectorIteratorInit(bb->predecessors, &bvIterator);
811 while (true) {
812 int blockIdx = dvmBitVectorIteratorNext(&bvIterator);
813 if (blockIdx == -1) break;
814 BasicBlock *predBB = (BasicBlock *)
815 dvmGrowableListGetElement(&cUnit->blockList, blockIdx);
816 bool found = false;
817 if (predBB->taken == bb) {
818 found = true;
819 } else if (predBB->fallThrough == bb) {
820 found = true;
821 } else if (predBB->successorBlockList.blockListType != kNotUsed) {
822 GrowableListIterator iterator;
823 dvmGrowableListIteratorInit(&predBB->successorBlockList.blocks,
824 &iterator);
825 while (true) {
826 SuccessorBlockInfo *successorBlockInfo =
827 (SuccessorBlockInfo *)
828 dvmGrowableListIteratorNext(&iterator);
829 if (successorBlockInfo == NULL) break;
830 BasicBlock *succBB = successorBlockInfo->block;
831 if (succBB == bb) {
832 found = true;
833 break;
834 }
835 }
836 }
837 if (found == false) {
838 char blockName1[BLOCK_NAME_LEN], blockName2[BLOCK_NAME_LEN];
839 dvmGetBlockName(bb, blockName1);
840 dvmGetBlockName(predBB, blockName2);
841 dvmDumpCFG(cUnit, "/sdcard/cfg/");
842 LOGE("Successor %s not found from %s",
843 blockName1, blockName2);
844 dvmAbort();
845 }
846 }
847 return true;
848 }
849
850 /* Identify code range in try blocks and set up the empty catch blocks */
processTryCatchBlocks(CompilationUnit * cUnit)851 static void processTryCatchBlocks(CompilationUnit *cUnit)
852 {
853 const Method *meth = cUnit->method;
854 const DexCode *pCode = dvmGetMethodCode(meth);
855 int triesSize = pCode->triesSize;
856 int i;
857 int offset;
858
859 if (triesSize == 0) {
860 return;
861 }
862
863 const DexTry *pTries = dexGetTries(pCode);
864 BitVector *tryBlockAddr = cUnit->tryBlockAddr;
865
866 /* Mark all the insn offsets in Try blocks */
867 for (i = 0; i < triesSize; i++) {
868 const DexTry* pTry = &pTries[i];
869 /* all in 16-bit units */
870 int startOffset = pTry->startAddr;
871 int endOffset = startOffset + pTry->insnCount;
872
873 for (offset = startOffset; offset < endOffset; offset++) {
874 dvmCompilerSetBit(tryBlockAddr, offset);
875 }
876 }
877
878 /* Iterate over each of the handlers to enqueue the empty Catch blocks */
879 offset = dexGetFirstHandlerOffset(pCode);
880 int handlersSize = dexGetHandlersSize(pCode);
881
882 for (i = 0; i < handlersSize; i++) {
883 DexCatchIterator iterator;
884 dexCatchIteratorInit(&iterator, pCode, offset);
885
886 for (;;) {
887 DexCatchHandler* handler = dexCatchIteratorNext(&iterator);
888
889 if (handler == NULL) {
890 break;
891 }
892
893 /*
894 * Create dummy catch blocks first. Since these are created before
895 * other blocks are processed, "split" is specified as false.
896 */
897 findBlock(cUnit, handler->address,
898 /* split */
899 false,
900 /* create */
901 true);
902 }
903
904 offset = dexCatchIteratorGetEndOffset(&iterator, pCode);
905 }
906 }
907
908 /* Process instructions with the kInstrCanBranch flag */
processCanBranch(CompilationUnit * cUnit,BasicBlock * curBlock,MIR * insn,int curOffset,int width,int flags,const u2 * codePtr,const u2 * codeEnd)909 static void processCanBranch(CompilationUnit *cUnit, BasicBlock *curBlock,
910 MIR *insn, int curOffset, int width, int flags,
911 const u2* codePtr, const u2* codeEnd)
912 {
913 int target = curOffset;
914 switch (insn->dalvikInsn.opcode) {
915 case OP_GOTO:
916 case OP_GOTO_16:
917 case OP_GOTO_32:
918 target += (int) insn->dalvikInsn.vA;
919 break;
920 case OP_IF_EQ:
921 case OP_IF_NE:
922 case OP_IF_LT:
923 case OP_IF_GE:
924 case OP_IF_GT:
925 case OP_IF_LE:
926 target += (int) insn->dalvikInsn.vC;
927 break;
928 case OP_IF_EQZ:
929 case OP_IF_NEZ:
930 case OP_IF_LTZ:
931 case OP_IF_GEZ:
932 case OP_IF_GTZ:
933 case OP_IF_LEZ:
934 target += (int) insn->dalvikInsn.vB;
935 break;
936 default:
937 LOGE("Unexpected opcode(%d) with kInstrCanBranch set",
938 insn->dalvikInsn.opcode);
939 dvmAbort();
940 }
941 BasicBlock *takenBlock = findBlock(cUnit, target,
942 /* split */
943 true,
944 /* create */
945 true);
946 curBlock->taken = takenBlock;
947 dvmCompilerSetBit(takenBlock->predecessors, curBlock->id);
948
949 /* Always terminate the current block for conditional branches */
950 if (flags & kInstrCanContinue) {
951 BasicBlock *fallthroughBlock = findBlock(cUnit,
952 curOffset + width,
953 /*
954 * If the method is processed
955 * in sequential order from the
956 * beginning, we don't need to
957 * specify split for continue
958 * blocks. However, this
959 * routine can be called by
960 * compileLoop, which starts
961 * parsing the method from an
962 * arbitrary address in the
963 * method body.
964 */
965 true,
966 /* create */
967 true);
968 curBlock->fallThrough = fallthroughBlock;
969 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
970 } else if (codePtr < codeEnd) {
971 /* Create a fallthrough block for real instructions (incl. OP_NOP) */
972 if (contentIsInsn(codePtr)) {
973 findBlock(cUnit, curOffset + width,
974 /* split */
975 false,
976 /* create */
977 true);
978 }
979 }
980 }
981
982 /* Process instructions with the kInstrCanSwitch flag */
processCanSwitch(CompilationUnit * cUnit,BasicBlock * curBlock,MIR * insn,int curOffset,int width,int flags)983 static void processCanSwitch(CompilationUnit *cUnit, BasicBlock *curBlock,
984 MIR *insn, int curOffset, int width, int flags)
985 {
986 u2 *switchData= (u2 *) (cUnit->method->insns + curOffset +
987 insn->dalvikInsn.vB);
988 int size;
989 int *keyTable;
990 int *targetTable;
991 int i;
992 int firstKey;
993
994 /*
995 * Packed switch data format:
996 * ushort ident = 0x0100 magic value
997 * ushort size number of entries in the table
998 * int first_key first (and lowest) switch case value
999 * int targets[size] branch targets, relative to switch opcode
1000 *
1001 * Total size is (4+size*2) 16-bit code units.
1002 */
1003 if (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) {
1004 assert(switchData[0] == kPackedSwitchSignature);
1005 size = switchData[1];
1006 firstKey = switchData[2] | (switchData[3] << 16);
1007 targetTable = (int *) &switchData[4];
1008 keyTable = NULL; // Make the compiler happy
1009 /*
1010 * Sparse switch data format:
1011 * ushort ident = 0x0200 magic value
1012 * ushort size number of entries in the table; > 0
1013 * int keys[size] keys, sorted low-to-high; 32-bit aligned
1014 * int targets[size] branch targets, relative to switch opcode
1015 *
1016 * Total size is (2+size*4) 16-bit code units.
1017 */
1018 } else {
1019 assert(switchData[0] == kSparseSwitchSignature);
1020 size = switchData[1];
1021 keyTable = (int *) &switchData[2];
1022 targetTable = (int *) &switchData[2 + size*2];
1023 firstKey = 0; // To make the compiler happy
1024 }
1025
1026 if (curBlock->successorBlockList.blockListType != kNotUsed) {
1027 LOGE("Successor block list already in use: %d",
1028 curBlock->successorBlockList.blockListType);
1029 dvmAbort();
1030 }
1031 curBlock->successorBlockList.blockListType =
1032 (insn->dalvikInsn.opcode == OP_PACKED_SWITCH) ?
1033 kPackedSwitch : kSparseSwitch;
1034 dvmInitGrowableList(&curBlock->successorBlockList.blocks, size);
1035
1036 for (i = 0; i < size; i++) {
1037 BasicBlock *caseBlock = findBlock(cUnit, curOffset + targetTable[i],
1038 /* split */
1039 true,
1040 /* create */
1041 true);
1042 SuccessorBlockInfo *successorBlockInfo =
1043 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo),
1044 false);
1045 successorBlockInfo->block = caseBlock;
1046 successorBlockInfo->key = (insn->dalvikInsn.opcode == OP_PACKED_SWITCH)?
1047 firstKey + i : keyTable[i];
1048 dvmInsertGrowableList(&curBlock->successorBlockList.blocks,
1049 (intptr_t) successorBlockInfo);
1050 dvmCompilerSetBit(caseBlock->predecessors, curBlock->id);
1051 }
1052
1053 /* Fall-through case */
1054 BasicBlock *fallthroughBlock = findBlock(cUnit,
1055 curOffset + width,
1056 /* split */
1057 false,
1058 /* create */
1059 true);
1060 curBlock->fallThrough = fallthroughBlock;
1061 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
1062 }
1063
1064 /* Process instructions with the kInstrCanThrow flag */
processCanThrow(CompilationUnit * cUnit,BasicBlock * curBlock,MIR * insn,int curOffset,int width,int flags,BitVector * tryBlockAddr,const u2 * codePtr,const u2 * codeEnd)1065 static void processCanThrow(CompilationUnit *cUnit, BasicBlock *curBlock,
1066 MIR *insn, int curOffset, int width, int flags,
1067 BitVector *tryBlockAddr, const u2 *codePtr,
1068 const u2* codeEnd)
1069 {
1070 const Method *method = cUnit->method;
1071 const DexCode *dexCode = dvmGetMethodCode(method);
1072
1073 /* In try block */
1074 if (dvmIsBitSet(tryBlockAddr, curOffset)) {
1075 DexCatchIterator iterator;
1076
1077 if (!dexFindCatchHandler(&iterator, dexCode, curOffset)) {
1078 LOGE("Catch block not found in dexfile for insn %x in %s",
1079 curOffset, method->name);
1080 dvmAbort();
1081
1082 }
1083 if (curBlock->successorBlockList.blockListType != kNotUsed) {
1084 LOGE("Successor block list already in use: %d",
1085 curBlock->successorBlockList.blockListType);
1086 dvmAbort();
1087 }
1088 curBlock->successorBlockList.blockListType = kCatch;
1089 dvmInitGrowableList(&curBlock->successorBlockList.blocks, 2);
1090
1091 for (;;) {
1092 DexCatchHandler* handler = dexCatchIteratorNext(&iterator);
1093
1094 if (handler == NULL) {
1095 break;
1096 }
1097
1098 BasicBlock *catchBlock = findBlock(cUnit, handler->address,
1099 /* split */
1100 false,
1101 /* create */
1102 false);
1103
1104 SuccessorBlockInfo *successorBlockInfo =
1105 (SuccessorBlockInfo *) dvmCompilerNew(sizeof(SuccessorBlockInfo),
1106 false);
1107 successorBlockInfo->block = catchBlock;
1108 successorBlockInfo->key = handler->typeIdx;
1109 dvmInsertGrowableList(&curBlock->successorBlockList.blocks,
1110 (intptr_t) successorBlockInfo);
1111 dvmCompilerSetBit(catchBlock->predecessors, curBlock->id);
1112 }
1113 } else {
1114 BasicBlock *ehBlock = dvmCompilerNewBB(kExceptionHandling,
1115 cUnit->numBlocks++);
1116 curBlock->taken = ehBlock;
1117 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) ehBlock);
1118 ehBlock->startOffset = curOffset;
1119 dvmCompilerSetBit(ehBlock->predecessors, curBlock->id);
1120 }
1121
1122 /*
1123 * Force the current block to terminate.
1124 *
1125 * Data may be present before codeEnd, so we need to parse it to know
1126 * whether it is code or data.
1127 */
1128 if (codePtr < codeEnd) {
1129 /* Create a fallthrough block for real instructions (incl. OP_NOP) */
1130 if (contentIsInsn(codePtr)) {
1131 BasicBlock *fallthroughBlock = findBlock(cUnit,
1132 curOffset + width,
1133 /* split */
1134 false,
1135 /* create */
1136 true);
1137 /*
1138 * OP_THROW and OP_THROW_VERIFICATION_ERROR are unconditional
1139 * branches.
1140 */
1141 if (insn->dalvikInsn.opcode != OP_THROW_VERIFICATION_ERROR &&
1142 insn->dalvikInsn.opcode != OP_THROW) {
1143 curBlock->fallThrough = fallthroughBlock;
1144 dvmCompilerSetBit(fallthroughBlock->predecessors, curBlock->id);
1145 }
1146 }
1147 }
1148 }
1149
1150 /*
1151 * Similar to dvmCompileTrace, but the entity processed here is the whole
1152 * method.
1153 *
1154 * TODO: implementation will be revisited when the trace builder can provide
1155 * whole-method traces.
1156 */
dvmCompileMethod(const Method * method,JitTranslationInfo * info)1157 bool dvmCompileMethod(const Method *method, JitTranslationInfo *info)
1158 {
1159 CompilationUnit cUnit;
1160 const DexCode *dexCode = dvmGetMethodCode(method);
1161 const u2 *codePtr = dexCode->insns;
1162 const u2 *codeEnd = dexCode->insns + dexCode->insnsSize;
1163 int numBlocks = 0;
1164 unsigned int curOffset = 0;
1165
1166 /* Method already compiled */
1167 if (dvmJitGetMethodAddr(codePtr)) {
1168 info->codeAddress = NULL;
1169 return false;
1170 }
1171
1172 memset(&cUnit, 0, sizeof(cUnit));
1173 cUnit.method = method;
1174
1175 cUnit.jitMode = kJitMethod;
1176
1177 /* Initialize the block list */
1178 dvmInitGrowableList(&cUnit.blockList, 4);
1179
1180 /*
1181 * FIXME - PC reconstruction list won't be needed after the codegen routines
1182 * are enhanced to true method mode.
1183 */
1184 /* Initialize the PC reconstruction list */
1185 dvmInitGrowableList(&cUnit.pcReconstructionList, 8);
1186
1187 /* Allocate the bit-vector to track the beginning of basic blocks */
1188 BitVector *tryBlockAddr = dvmCompilerAllocBitVector(dexCode->insnsSize,
1189 true /* expandable */);
1190 cUnit.tryBlockAddr = tryBlockAddr;
1191
1192 /* Create the default entry and exit blocks and enter them to the list */
1193 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1194 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++);
1195
1196 cUnit.entryBlock = entryBlock;
1197 cUnit.exitBlock = exitBlock;
1198
1199 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) entryBlock);
1200 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) exitBlock);
1201
1202 /* Current block to record parsed instructions */
1203 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1204 curBlock->startOffset = 0;
1205 dvmInsertGrowableList(&cUnit.blockList, (intptr_t) curBlock);
1206 entryBlock->fallThrough = curBlock;
1207 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id);
1208
1209 /*
1210 * Store back the number of blocks since new blocks may be created of
1211 * accessing cUnit.
1212 */
1213 cUnit.numBlocks = numBlocks;
1214
1215 /* Identify code range in try blocks and set up the empty catch blocks */
1216 processTryCatchBlocks(&cUnit);
1217
1218 /* Parse all instructions and put them into containing basic blocks */
1219 while (codePtr < codeEnd) {
1220 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true);
1221 insn->offset = curOffset;
1222 int width = parseInsn(codePtr, &insn->dalvikInsn, false);
1223 insn->width = width;
1224
1225 /* Terminate when the data section is seen */
1226 if (width == 0)
1227 break;
1228
1229 dvmCompilerAppendMIR(curBlock, insn);
1230
1231 codePtr += width;
1232 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1233
1234 if (flags & kInstrCanBranch) {
1235 processCanBranch(&cUnit, curBlock, insn, curOffset, width, flags,
1236 codePtr, codeEnd);
1237 } else if (flags & kInstrCanReturn) {
1238 curBlock->fallThrough = exitBlock;
1239 dvmCompilerSetBit(exitBlock->predecessors, curBlock->id);
1240 /*
1241 * Terminate the current block if there are instructions
1242 * afterwards.
1243 */
1244 if (codePtr < codeEnd) {
1245 /*
1246 * Create a fallthrough block for real instructions
1247 * (incl. OP_NOP).
1248 */
1249 if (contentIsInsn(codePtr)) {
1250 findBlock(&cUnit, curOffset + width,
1251 /* split */
1252 false,
1253 /* create */
1254 true);
1255 }
1256 }
1257 } else if (flags & kInstrCanThrow) {
1258 processCanThrow(&cUnit, curBlock, insn, curOffset, width, flags,
1259 tryBlockAddr, codePtr, codeEnd);
1260 } else if (flags & kInstrCanSwitch) {
1261 processCanSwitch(&cUnit, curBlock, insn, curOffset, width, flags);
1262 }
1263 curOffset += width;
1264 BasicBlock *nextBlock = findBlock(&cUnit, curOffset,
1265 /* split */
1266 false,
1267 /* create */
1268 false);
1269 if (nextBlock) {
1270 /*
1271 * The next instruction could be the target of a previously parsed
1272 * forward branch so a block is already created. If the current
1273 * instruction is not an unconditional branch, connect them through
1274 * the fall-through link.
1275 */
1276 assert(curBlock->fallThrough == NULL ||
1277 curBlock->fallThrough == nextBlock ||
1278 curBlock->fallThrough == exitBlock);
1279
1280 if ((curBlock->fallThrough == NULL) &&
1281 (flags & kInstrCanContinue)) {
1282 curBlock->fallThrough = nextBlock;
1283 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id);
1284 }
1285 curBlock = nextBlock;
1286 }
1287 }
1288
1289 if (cUnit.printMe) {
1290 dvmCompilerDumpCompilationUnit(&cUnit);
1291 }
1292
1293 /* Adjust this value accordingly once inlining is performed */
1294 cUnit.numDalvikRegisters = cUnit.method->registersSize;
1295
1296 /* Verify if all blocks are connected as claimed */
1297 /* FIXME - to be disabled in the future */
1298 dvmCompilerDataFlowAnalysisDispatcher(&cUnit, verifyPredInfo,
1299 kAllNodes,
1300 false /* isIterative */);
1301
1302
1303 /* Perform SSA transformation for the whole method */
1304 dvmCompilerMethodSSATransformation(&cUnit);
1305
1306 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming
1307
1308 /* Allocate Registers using simple local allocation scheme */
1309 dvmCompilerLocalRegAlloc(&cUnit);
1310
1311 /* Convert MIR to LIR, etc. */
1312 dvmCompilerMethodMIR2LIR(&cUnit);
1313
1314 // Debugging only
1315 //dvmDumpCFG(&cUnit, "/sdcard/cfg/");
1316
1317 /* Method is not empty */
1318 if (cUnit.firstLIRInsn) {
1319 /* Convert LIR into machine code. Loop for recoverable retries */
1320 do {
1321 dvmCompilerAssembleLIR(&cUnit, info);
1322 cUnit.assemblerRetries++;
1323 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess)
1324 LOGD("Assembler abort #%d on %d",cUnit.assemblerRetries,
1325 cUnit.assemblerStatus);
1326 } while (cUnit.assemblerStatus == kRetryAll);
1327
1328 if (cUnit.printMe) {
1329 dvmCompilerCodegenDump(&cUnit);
1330 }
1331
1332 if (info->codeAddress) {
1333 dvmJitSetCodeAddr(dexCode->insns, info->codeAddress,
1334 info->instructionSet, true, 0);
1335 /*
1336 * Clear the codeAddress for the enclosing trace to reuse the info
1337 */
1338 info->codeAddress = NULL;
1339 }
1340 }
1341
1342 return false;
1343 }
1344
1345 /* Extending the trace by crawling the code from curBlock */
exhaustTrace(CompilationUnit * cUnit,BasicBlock * curBlock)1346 static bool exhaustTrace(CompilationUnit *cUnit, BasicBlock *curBlock)
1347 {
1348 unsigned int curOffset = curBlock->startOffset;
1349 const u2 *codePtr = cUnit->method->insns + curOffset;
1350
1351 if (curBlock->visited == true) return false;
1352
1353 curBlock->visited = true;
1354
1355 if (curBlock->blockType == kEntryBlock ||
1356 curBlock->blockType == kExitBlock) {
1357 return false;
1358 }
1359
1360 /*
1361 * Block has been parsed - check the taken/fallThrough in case it is a split
1362 * block.
1363 */
1364 if (curBlock->firstMIRInsn != NULL) {
1365 bool changed = false;
1366 if (curBlock->taken)
1367 changed |= exhaustTrace(cUnit, curBlock->taken);
1368 if (curBlock->fallThrough)
1369 changed |= exhaustTrace(cUnit, curBlock->fallThrough);
1370 return changed;
1371 }
1372 while (true) {
1373 MIR *insn = (MIR *) dvmCompilerNew(sizeof(MIR), true);
1374 insn->offset = curOffset;
1375 int width = parseInsn(codePtr, &insn->dalvikInsn, false);
1376 insn->width = width;
1377
1378 /* Terminate when the data section is seen */
1379 if (width == 0)
1380 break;
1381
1382 dvmCompilerAppendMIR(curBlock, insn);
1383
1384 codePtr += width;
1385 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1386
1387 /* Stop extending the trace after seeing these instructions */
1388 if (flags & (kInstrCanReturn | kInstrCanSwitch | kInstrInvoke)) {
1389 curBlock->fallThrough = cUnit->exitBlock;
1390 dvmCompilerSetBit(cUnit->exitBlock->predecessors, curBlock->id);
1391 break;
1392 } else if (flags & kInstrCanBranch) {
1393 processCanBranch(cUnit, curBlock, insn, curOffset, width, flags,
1394 codePtr, NULL);
1395 if (curBlock->taken) {
1396 exhaustTrace(cUnit, curBlock->taken);
1397 }
1398 if (curBlock->fallThrough) {
1399 exhaustTrace(cUnit, curBlock->fallThrough);
1400 }
1401 break;
1402 }
1403 curOffset += width;
1404 BasicBlock *nextBlock = findBlock(cUnit, curOffset,
1405 /* split */
1406 false,
1407 /* create */
1408 false);
1409 if (nextBlock) {
1410 /*
1411 * The next instruction could be the target of a previously parsed
1412 * forward branch so a block is already created. If the current
1413 * instruction is not an unconditional branch, connect them through
1414 * the fall-through link.
1415 */
1416 assert(curBlock->fallThrough == NULL ||
1417 curBlock->fallThrough == nextBlock ||
1418 curBlock->fallThrough == cUnit->exitBlock);
1419
1420 if ((curBlock->fallThrough == NULL) &&
1421 (flags & kInstrCanContinue)) {
1422 curBlock->needFallThroughBranch = true;
1423 curBlock->fallThrough = nextBlock;
1424 dvmCompilerSetBit(nextBlock->predecessors, curBlock->id);
1425 }
1426 /* Block has been visited - no more parsing needed */
1427 if (nextBlock->visited == true) {
1428 return true;
1429 }
1430 curBlock = nextBlock;
1431 }
1432 }
1433 return true;
1434 }
1435
1436 /* Compile a loop */
compileLoop(CompilationUnit * cUnit,unsigned int startOffset,JitTraceDescription * desc,int numMaxInsts,JitTranslationInfo * info,jmp_buf * bailPtr,int optHints)1437 static bool compileLoop(CompilationUnit *cUnit, unsigned int startOffset,
1438 JitTraceDescription *desc, int numMaxInsts,
1439 JitTranslationInfo *info, jmp_buf *bailPtr,
1440 int optHints)
1441 {
1442 int numBlocks = 0;
1443 unsigned int curOffset = startOffset;
1444 bool changed;
1445 BasicBlock *bb;
1446 #if defined(WITH_JIT_TUNING)
1447 CompilerMethodStats *methodStats;
1448 #endif
1449
1450 cUnit->jitMode = kJitLoop;
1451
1452 /* Initialize the block list */
1453 dvmInitGrowableList(&cUnit->blockList, 4);
1454
1455 /* Initialize the PC reconstruction list */
1456 dvmInitGrowableList(&cUnit->pcReconstructionList, 8);
1457
1458 /* Create the default entry and exit blocks and enter them to the list */
1459 BasicBlock *entryBlock = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1460 entryBlock->startOffset = curOffset;
1461 BasicBlock *exitBlock = dvmCompilerNewBB(kExitBlock, numBlocks++);
1462
1463 cUnit->entryBlock = entryBlock;
1464 cUnit->exitBlock = exitBlock;
1465
1466 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) entryBlock);
1467 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) exitBlock);
1468
1469 /* Current block to record parsed instructions */
1470 BasicBlock *curBlock = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1471 curBlock->startOffset = curOffset;
1472
1473 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) curBlock);
1474 entryBlock->fallThrough = curBlock;
1475 dvmCompilerSetBit(curBlock->predecessors, entryBlock->id);
1476
1477 /*
1478 * Store back the number of blocks since new blocks may be created of
1479 * accessing cUnit.
1480 */
1481 cUnit->numBlocks = numBlocks;
1482
1483 do {
1484 dvmCompilerDataFlowAnalysisDispatcher(cUnit,
1485 dvmCompilerClearVisitedFlag,
1486 kAllNodes,
1487 false /* isIterative */);
1488 changed = exhaustTrace(cUnit, curBlock);
1489 } while (changed);
1490
1491 /* Backward chaining block */
1492 bb = dvmCompilerNewBB(kChainingCellBackwardBranch, cUnit->numBlocks++);
1493 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1494 cUnit->backChainBlock = bb;
1495
1496 /* A special block to host PC reconstruction code */
1497 bb = dvmCompilerNewBB(kPCReconstruction, cUnit->numBlocks++);
1498 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1499
1500 /* And one final block that publishes the PC and raises the exception */
1501 bb = dvmCompilerNewBB(kExceptionHandling, cUnit->numBlocks++);
1502 dvmInsertGrowableList(&cUnit->blockList, (intptr_t) bb);
1503 cUnit->puntBlock = bb;
1504
1505 cUnit->numDalvikRegisters = cUnit->method->registersSize;
1506
1507 /* Verify if all blocks are connected as claimed */
1508 /* FIXME - to be disabled in the future */
1509 dvmCompilerDataFlowAnalysisDispatcher(cUnit, verifyPredInfo,
1510 kAllNodes,
1511 false /* isIterative */);
1512
1513
1514 /* Try to identify a loop */
1515 if (!dvmCompilerBuildLoop(cUnit))
1516 goto bail;
1517
1518 dvmCompilerLoopOpt(cUnit);
1519
1520 /*
1521 * Change the backward branch to the backward chaining cell after dataflow
1522 * analsys/optimizations are done.
1523 */
1524 dvmCompilerInsertBackwardChaining(cUnit);
1525
1526 dvmCompilerInitializeRegAlloc(cUnit);
1527
1528 /* Allocate Registers using simple local allocation scheme */
1529 dvmCompilerLocalRegAlloc(cUnit);
1530
1531 /* Convert MIR to LIR, etc. */
1532 dvmCompilerMIR2LIR(cUnit);
1533
1534 /* Loop contains never executed blocks / heavy instructions */
1535 if (cUnit->quitLoopMode) {
1536 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) {
1537 LOGD("Loop trace @ offset %04x aborted due to unresolved code info",
1538 cUnit->entryBlock->startOffset);
1539 }
1540 goto bail;
1541 }
1542
1543 /* Convert LIR into machine code. Loop for recoverable retries */
1544 do {
1545 dvmCompilerAssembleLIR(cUnit, info);
1546 cUnit->assemblerRetries++;
1547 if (cUnit->printMe && cUnit->assemblerStatus != kSuccess)
1548 LOGD("Assembler abort #%d on %d", cUnit->assemblerRetries,
1549 cUnit->assemblerStatus);
1550 } while (cUnit->assemblerStatus == kRetryAll);
1551
1552 /* Loop is too big - bail out */
1553 if (cUnit->assemblerStatus == kRetryHalve) {
1554 goto bail;
1555 }
1556
1557 if (cUnit->printMe || gDvmJit.receivedSIGUSR2) {
1558 LOGD("Loop trace @ offset %04x", cUnit->entryBlock->startOffset);
1559 dvmCompilerCodegenDump(cUnit);
1560 }
1561
1562 /*
1563 * If this trace uses class objects as constants,
1564 * dvmJitInstallClassObjectPointers will switch the thread state
1565 * to running and look up the class pointers using the descriptor/loader
1566 * tuple stored in the callsite info structure. We need to make this window
1567 * as short as possible since it is blocking GC.
1568 */
1569 if (cUnit->hasClassLiterals && info->codeAddress) {
1570 dvmJitInstallClassObjectPointers(cUnit, (char *) info->codeAddress);
1571 }
1572
1573 /*
1574 * Since callsiteinfo is allocated from the arena, delay the reset until
1575 * class pointers are resolved.
1576 */
1577 dvmCompilerArenaReset();
1578
1579 assert(cUnit->assemblerStatus == kSuccess);
1580 #if defined(WITH_JIT_TUNING)
1581 /* Locate the entry to store compilation statistics for this method */
1582 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false);
1583 methodStats->nativeSize += cUnit->totalSize;
1584 #endif
1585 return info->codeAddress != NULL;
1586
1587 bail:
1588 /* Retry the original trace with JIT_OPT_NO_LOOP disabled */
1589 dvmCompilerArenaReset();
1590 return dvmCompileTrace(desc, numMaxInsts, info, bailPtr,
1591 optHints | JIT_OPT_NO_LOOP);
1592 }
1593
1594 /*
1595 * Main entry point to start trace compilation. Basic blocks are constructed
1596 * first and they will be passed to the codegen routines to convert Dalvik
1597 * bytecode into machine code.
1598 */
dvmCompileTrace(JitTraceDescription * desc,int numMaxInsts,JitTranslationInfo * info,jmp_buf * bailPtr,int optHints)1599 bool dvmCompileTrace(JitTraceDescription *desc, int numMaxInsts,
1600 JitTranslationInfo *info, jmp_buf *bailPtr,
1601 int optHints)
1602 {
1603 const DexCode *dexCode = dvmGetMethodCode(desc->method);
1604 const JitTraceRun* currRun = &desc->trace[0];
1605 unsigned int curOffset = currRun->info.frag.startOffset;
1606 unsigned int startOffset = curOffset;
1607 unsigned int numInsts = currRun->info.frag.numInsts;
1608 const u2 *codePtr = dexCode->insns + curOffset;
1609 int traceSize = 0; // # of half-words
1610 const u2 *startCodePtr = codePtr;
1611 BasicBlock *curBB, *entryCodeBB;
1612 int numBlocks = 0;
1613 static int compilationId;
1614 CompilationUnit cUnit;
1615 GrowableList *blockList;
1616 #if defined(WITH_JIT_TUNING)
1617 CompilerMethodStats *methodStats;
1618 #endif
1619
1620 /* If we've already compiled this trace, just return success */
1621 if (dvmJitGetTraceAddr(startCodePtr) && !info->discardResult) {
1622 /*
1623 * Make sure the codeAddress is NULL so that it won't clobber the
1624 * existing entry.
1625 */
1626 info->codeAddress = NULL;
1627 return true;
1628 }
1629
1630 /* If the work order is stale, discard it */
1631 if (info->cacheVersion != gDvmJit.cacheVersion) {
1632 return false;
1633 }
1634
1635 compilationId++;
1636 memset(&cUnit, 0, sizeof(CompilationUnit));
1637
1638 #if defined(WITH_JIT_TUNING)
1639 /* Locate the entry to store compilation statistics for this method */
1640 methodStats = dvmCompilerAnalyzeMethodBody(desc->method, false);
1641 #endif
1642
1643 /* Set the recover buffer pointer */
1644 cUnit.bailPtr = bailPtr;
1645
1646 /* Initialize the printMe flag */
1647 cUnit.printMe = gDvmJit.printMe;
1648
1649 /* Setup the method */
1650 cUnit.method = desc->method;
1651
1652 /* Store the trace descriptor and set the initial mode */
1653 cUnit.traceDesc = desc;
1654 cUnit.jitMode = kJitTrace;
1655
1656 /* Initialize the PC reconstruction list */
1657 dvmInitGrowableList(&cUnit.pcReconstructionList, 8);
1658
1659 /* Initialize the basic block list */
1660 blockList = &cUnit.blockList;
1661 dvmInitGrowableList(blockList, 8);
1662
1663 /* Identify traces that we don't want to compile */
1664 if (gDvmJit.methodTable) {
1665 int len = strlen(desc->method->clazz->descriptor) +
1666 strlen(desc->method->name) + 1;
1667 char *fullSignature = (char *)dvmCompilerNew(len, true);
1668 strcpy(fullSignature, desc->method->clazz->descriptor);
1669 strcat(fullSignature, desc->method->name);
1670
1671 int hashValue = dvmComputeUtf8Hash(fullSignature);
1672
1673 /*
1674 * Doing three levels of screening to see whether we want to skip
1675 * compiling this method
1676 */
1677
1678 /* First, check the full "class;method" signature */
1679 bool methodFound =
1680 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1681 fullSignature, (HashCompareFunc) strcmp,
1682 false) !=
1683 NULL;
1684
1685 /* Full signature not found - check the enclosing class */
1686 if (methodFound == false) {
1687 int hashValue = dvmComputeUtf8Hash(desc->method->clazz->descriptor);
1688 methodFound =
1689 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1690 (char *) desc->method->clazz->descriptor,
1691 (HashCompareFunc) strcmp, false) !=
1692 NULL;
1693 /* Enclosing class not found - check the method name */
1694 if (methodFound == false) {
1695 int hashValue = dvmComputeUtf8Hash(desc->method->name);
1696 methodFound =
1697 dvmHashTableLookup(gDvmJit.methodTable, hashValue,
1698 (char *) desc->method->name,
1699 (HashCompareFunc) strcmp, false) !=
1700 NULL;
1701
1702 /*
1703 * Debug by call-graph is enabled. Check if the debug list
1704 * covers any methods on the VM stack.
1705 */
1706 if (methodFound == false && gDvmJit.checkCallGraph == true) {
1707 methodFound =
1708 filterMethodByCallGraph(info->requestingThread,
1709 desc->method->name);
1710 }
1711 }
1712 }
1713
1714 /*
1715 * Under the following conditions, the trace will be *conservatively*
1716 * compiled by only containing single-step instructions to and from the
1717 * interpreter.
1718 * 1) If includeSelectedMethod == false, the method matches the full or
1719 * partial signature stored in the hash table.
1720 *
1721 * 2) If includeSelectedMethod == true, the method does not match the
1722 * full and partial signature stored in the hash table.
1723 */
1724 if (gDvmJit.includeSelectedMethod != methodFound) {
1725 cUnit.allSingleStep = true;
1726 } else {
1727 /* Compile the trace as normal */
1728
1729 /* Print the method we cherry picked */
1730 if (gDvmJit.includeSelectedMethod == true) {
1731 cUnit.printMe = true;
1732 }
1733 }
1734 }
1735
1736 /* Allocate the entry block */
1737 curBB = dvmCompilerNewBB(kEntryBlock, numBlocks++);
1738 dvmInsertGrowableList(blockList, (intptr_t) curBB);
1739 curBB->startOffset = curOffset;
1740
1741 entryCodeBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1742 dvmInsertGrowableList(blockList, (intptr_t) entryCodeBB);
1743 entryCodeBB->startOffset = curOffset;
1744 curBB->fallThrough = entryCodeBB;
1745 curBB = entryCodeBB;
1746
1747 if (cUnit.printMe) {
1748 LOGD("--------\nCompiler: Building trace for %s, offset %#x",
1749 desc->method->name, curOffset);
1750 }
1751
1752 /*
1753 * Analyze the trace descriptor and include up to the maximal number
1754 * of Dalvik instructions into the IR.
1755 */
1756 while (1) {
1757 MIR *insn;
1758 int width;
1759 insn = (MIR *)dvmCompilerNew(sizeof(MIR), true);
1760 insn->offset = curOffset;
1761 width = parseInsn(codePtr, &insn->dalvikInsn, cUnit.printMe);
1762
1763 /* The trace should never incude instruction data */
1764 assert(width);
1765 insn->width = width;
1766 traceSize += width;
1767 dvmCompilerAppendMIR(curBB, insn);
1768 cUnit.numInsts++;
1769
1770 int flags = dexGetFlagsFromOpcode(insn->dalvikInsn.opcode);
1771
1772 if (flags & kInstrInvoke) {
1773 const Method *calleeMethod = (const Method *)
1774 currRun[JIT_TRACE_CUR_METHOD].info.meta;
1775 assert(numInsts == 1);
1776 CallsiteInfo *callsiteInfo =
1777 (CallsiteInfo *)dvmCompilerNew(sizeof(CallsiteInfo), true);
1778 callsiteInfo->classDescriptor = (const char *)
1779 currRun[JIT_TRACE_CLASS_DESC].info.meta;
1780 callsiteInfo->classLoader = (Object *)
1781 currRun[JIT_TRACE_CLASS_LOADER].info.meta;
1782 callsiteInfo->method = calleeMethod;
1783 insn->meta.callsiteInfo = callsiteInfo;
1784 }
1785
1786 /* Instruction limit reached - terminate the trace here */
1787 if (cUnit.numInsts >= numMaxInsts) {
1788 break;
1789 }
1790 if (--numInsts == 0) {
1791 if (currRun->info.frag.runEnd) {
1792 break;
1793 } else {
1794 /* Advance to the next trace description (ie non-meta info) */
1795 do {
1796 currRun++;
1797 } while (!currRun->isCode);
1798
1799 /* Dummy end-of-run marker seen */
1800 if (currRun->info.frag.numInsts == 0) {
1801 break;
1802 }
1803
1804 curBB = dvmCompilerNewBB(kDalvikByteCode, numBlocks++);
1805 dvmInsertGrowableList(blockList, (intptr_t) curBB);
1806 curOffset = currRun->info.frag.startOffset;
1807 numInsts = currRun->info.frag.numInsts;
1808 curBB->startOffset = curOffset;
1809 codePtr = dexCode->insns + curOffset;
1810 }
1811 } else {
1812 curOffset += width;
1813 codePtr += width;
1814 }
1815 }
1816
1817 #if defined(WITH_JIT_TUNING)
1818 /* Convert # of half-word to bytes */
1819 methodStats->compiledDalvikSize += traceSize * 2;
1820 #endif
1821
1822 /*
1823 * Now scan basic blocks containing real code to connect the
1824 * taken/fallthrough links. Also create chaining cells for code not included
1825 * in the trace.
1826 */
1827 size_t blockId;
1828 for (blockId = 0; blockId < blockList->numUsed; blockId++) {
1829 curBB = (BasicBlock *) dvmGrowableListGetElement(blockList, blockId);
1830 MIR *lastInsn = curBB->lastMIRInsn;
1831 /* Skip empty blocks */
1832 if (lastInsn == NULL) {
1833 continue;
1834 }
1835 curOffset = lastInsn->offset;
1836 unsigned int targetOffset = curOffset;
1837 unsigned int fallThroughOffset = curOffset + lastInsn->width;
1838 bool isInvoke = false;
1839 const Method *callee = NULL;
1840
1841 findBlockBoundary(desc->method, curBB->lastMIRInsn, curOffset,
1842 &targetOffset, &isInvoke, &callee);
1843
1844 /* Link the taken and fallthrough blocks */
1845 BasicBlock *searchBB;
1846
1847 int flags = dexGetFlagsFromOpcode(lastInsn->dalvikInsn.opcode);
1848
1849 if (flags & kInstrInvoke) {
1850 cUnit.hasInvoke = true;
1851 }
1852
1853 /* Backward branch seen */
1854 if (isInvoke == false &&
1855 (flags & kInstrCanBranch) != 0 &&
1856 targetOffset < curOffset &&
1857 (optHints & JIT_OPT_NO_LOOP) == 0) {
1858 dvmCompilerArenaReset();
1859 return compileLoop(&cUnit, startOffset, desc, numMaxInsts,
1860 info, bailPtr, optHints);
1861 }
1862
1863 /* No backward branch in the trace - start searching the next BB */
1864 size_t searchBlockId;
1865 for (searchBlockId = blockId+1; searchBlockId < blockList->numUsed;
1866 searchBlockId++) {
1867 searchBB = (BasicBlock *) dvmGrowableListGetElement(blockList,
1868 searchBlockId);
1869 if (targetOffset == searchBB->startOffset) {
1870 curBB->taken = searchBB;
1871 dvmCompilerSetBit(searchBB->predecessors, curBB->id);
1872 }
1873 if (fallThroughOffset == searchBB->startOffset) {
1874 curBB->fallThrough = searchBB;
1875 dvmCompilerSetBit(searchBB->predecessors, curBB->id);
1876
1877 /*
1878 * Fallthrough block of an invoke instruction needs to be
1879 * aligned to 4-byte boundary (alignment instruction to be
1880 * inserted later.
1881 */
1882 if (flags & kInstrInvoke) {
1883 searchBB->isFallThroughFromInvoke = true;
1884 }
1885 }
1886 }
1887
1888 /*
1889 * Some blocks are ended by non-control-flow-change instructions,
1890 * currently only due to trace length constraint. In this case we need
1891 * to generate an explicit branch at the end of the block to jump to
1892 * the chaining cell.
1893 */
1894 curBB->needFallThroughBranch =
1895 ((flags & (kInstrCanBranch | kInstrCanSwitch | kInstrCanReturn |
1896 kInstrInvoke)) == 0);
1897 if (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH ||
1898 lastInsn->dalvikInsn.opcode == OP_SPARSE_SWITCH) {
1899 int i;
1900 const u2 *switchData = desc->method->insns + lastInsn->offset +
1901 lastInsn->dalvikInsn.vB;
1902 int size = switchData[1];
1903 int maxChains = MIN(size, MAX_CHAINED_SWITCH_CASES);
1904
1905 /*
1906 * Generate the landing pad for cases whose ranks are higher than
1907 * MAX_CHAINED_SWITCH_CASES. The code will re-enter the interpreter
1908 * through the NoChain point.
1909 */
1910 if (maxChains != size) {
1911 cUnit.switchOverflowPad =
1912 desc->method->insns + lastInsn->offset;
1913 }
1914
1915 s4 *targets = (s4 *) (switchData + 2 +
1916 (lastInsn->dalvikInsn.opcode == OP_PACKED_SWITCH ?
1917 2 : size * 2));
1918
1919 /* One chaining cell for the first MAX_CHAINED_SWITCH_CASES cases */
1920 for (i = 0; i < maxChains; i++) {
1921 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal,
1922 numBlocks++);
1923 dvmInsertGrowableList(blockList, (intptr_t) caseChain);
1924 caseChain->startOffset = lastInsn->offset + targets[i];
1925 }
1926
1927 /* One more chaining cell for the default case */
1928 BasicBlock *caseChain = dvmCompilerNewBB(kChainingCellNormal,
1929 numBlocks++);
1930 dvmInsertGrowableList(blockList, (intptr_t) caseChain);
1931 caseChain->startOffset = lastInsn->offset + lastInsn->width;
1932 /* Fallthrough block not included in the trace */
1933 } else if (!isUnconditionalBranch(lastInsn) &&
1934 curBB->fallThrough == NULL) {
1935 BasicBlock *fallThroughBB;
1936 /*
1937 * If the chaining cell is after an invoke or
1938 * instruction that cannot change the control flow, request a hot
1939 * chaining cell.
1940 */
1941 if (isInvoke || curBB->needFallThroughBranch) {
1942 fallThroughBB = dvmCompilerNewBB(kChainingCellHot, numBlocks++);
1943 } else {
1944 fallThroughBB = dvmCompilerNewBB(kChainingCellNormal,
1945 numBlocks++);
1946 }
1947 dvmInsertGrowableList(blockList, (intptr_t) fallThroughBB);
1948 fallThroughBB->startOffset = fallThroughOffset;
1949 curBB->fallThrough = fallThroughBB;
1950 dvmCompilerSetBit(fallThroughBB->predecessors, curBB->id);
1951 }
1952 /* Target block not included in the trace */
1953 if (curBB->taken == NULL &&
1954 (isGoto(lastInsn) || isInvoke ||
1955 (targetOffset != UNKNOWN_TARGET && targetOffset != curOffset))) {
1956 BasicBlock *newBB = NULL;
1957 if (isInvoke) {
1958 /* Monomorphic callee */
1959 if (callee) {
1960 /* JNI call doesn't need a chaining cell */
1961 if (!dvmIsNativeMethod(callee)) {
1962 newBB = dvmCompilerNewBB(kChainingCellInvokeSingleton,
1963 numBlocks++);
1964 newBB->startOffset = 0;
1965 newBB->containingMethod = callee;
1966 }
1967 /* Will resolve at runtime */
1968 } else {
1969 newBB = dvmCompilerNewBB(kChainingCellInvokePredicted,
1970 numBlocks++);
1971 newBB->startOffset = 0;
1972 }
1973 /* For unconditional branches, request a hot chaining cell */
1974 } else {
1975 #if !defined(WITH_SELF_VERIFICATION)
1976 newBB = dvmCompilerNewBB(dexIsGoto(flags) ?
1977 kChainingCellHot :
1978 kChainingCellNormal,
1979 numBlocks++);
1980 newBB->startOffset = targetOffset;
1981 #else
1982 /* Handle branches that branch back into the block */
1983 if (targetOffset >= curBB->firstMIRInsn->offset &&
1984 targetOffset <= curBB->lastMIRInsn->offset) {
1985 newBB = dvmCompilerNewBB(kChainingCellBackwardBranch,
1986 numBlocks++);
1987 } else {
1988 newBB = dvmCompilerNewBB(dexIsGoto(flags) ?
1989 kChainingCellHot :
1990 kChainingCellNormal,
1991 numBlocks++);
1992 }
1993 newBB->startOffset = targetOffset;
1994 #endif
1995 }
1996 if (newBB) {
1997 curBB->taken = newBB;
1998 dvmCompilerSetBit(newBB->predecessors, curBB->id);
1999 dvmInsertGrowableList(blockList, (intptr_t) newBB);
2000 }
2001 }
2002 }
2003
2004 /* Now create a special block to host PC reconstruction code */
2005 curBB = dvmCompilerNewBB(kPCReconstruction, numBlocks++);
2006 dvmInsertGrowableList(blockList, (intptr_t) curBB);
2007
2008 /* And one final block that publishes the PC and raise the exception */
2009 curBB = dvmCompilerNewBB(kExceptionHandling, numBlocks++);
2010 dvmInsertGrowableList(blockList, (intptr_t) curBB);
2011 cUnit.puntBlock = curBB;
2012
2013 if (cUnit.printMe) {
2014 char* signature =
2015 dexProtoCopyMethodDescriptor(&desc->method->prototype);
2016 LOGD("TRACEINFO (%d): 0x%08x %s%s.%s %#x %d of %d, %d blocks",
2017 compilationId,
2018 (intptr_t) desc->method->insns,
2019 desc->method->clazz->descriptor,
2020 desc->method->name,
2021 signature,
2022 desc->trace[0].info.frag.startOffset,
2023 traceSize,
2024 dexCode->insnsSize,
2025 numBlocks);
2026 free(signature);
2027 }
2028
2029 cUnit.numBlocks = numBlocks;
2030
2031 /* Set the instruction set to use (NOTE: later components may change it) */
2032 cUnit.instructionSet = dvmCompilerInstructionSet();
2033
2034 /* Inline transformation @ the MIR level */
2035 if (cUnit.hasInvoke && !(gDvmJit.disableOpt & (1 << kMethodInlining))) {
2036 dvmCompilerInlineMIR(&cUnit, info);
2037 }
2038
2039 cUnit.numDalvikRegisters = cUnit.method->registersSize;
2040
2041 /* Preparation for SSA conversion */
2042 dvmInitializeSSAConversion(&cUnit);
2043
2044 dvmCompilerNonLoopAnalysis(&cUnit);
2045
2046 dvmCompilerInitializeRegAlloc(&cUnit); // Needs to happen after SSA naming
2047
2048 if (cUnit.printMe) {
2049 dvmCompilerDumpCompilationUnit(&cUnit);
2050 }
2051
2052 /* Allocate Registers using simple local allocation scheme */
2053 dvmCompilerLocalRegAlloc(&cUnit);
2054
2055 /* Convert MIR to LIR, etc. */
2056 dvmCompilerMIR2LIR(&cUnit);
2057
2058 /* Convert LIR into machine code. Loop for recoverable retries */
2059 do {
2060 dvmCompilerAssembleLIR(&cUnit, info);
2061 cUnit.assemblerRetries++;
2062 if (cUnit.printMe && cUnit.assemblerStatus != kSuccess)
2063 LOGD("Assembler abort #%d on %d",cUnit.assemblerRetries,
2064 cUnit.assemblerStatus);
2065 } while (cUnit.assemblerStatus == kRetryAll);
2066
2067 if (cUnit.printMe) {
2068 LOGD("Trace Dalvik PC: %p", startCodePtr);
2069 dvmCompilerCodegenDump(&cUnit);
2070 LOGD("End %s%s, %d Dalvik instructions",
2071 desc->method->clazz->descriptor, desc->method->name,
2072 cUnit.numInsts);
2073 }
2074
2075 if (cUnit.assemblerStatus == kRetryHalve) {
2076 /* Reset the compiler resource pool before retry */
2077 dvmCompilerArenaReset();
2078
2079 /* Halve the instruction count and start from the top */
2080 return dvmCompileTrace(desc, cUnit.numInsts / 2, info, bailPtr,
2081 optHints);
2082 }
2083
2084 /*
2085 * If this trace uses class objects as constants,
2086 * dvmJitInstallClassObjectPointers will switch the thread state
2087 * to running and look up the class pointers using the descriptor/loader
2088 * tuple stored in the callsite info structure. We need to make this window
2089 * as short as possible since it is blocking GC.
2090 */
2091 if (cUnit.hasClassLiterals && info->codeAddress) {
2092 dvmJitInstallClassObjectPointers(&cUnit, (char *) info->codeAddress);
2093 }
2094
2095 /*
2096 * Since callsiteinfo is allocated from the arena, delay the reset until
2097 * class pointers are resolved.
2098 */
2099 dvmCompilerArenaReset();
2100
2101 assert(cUnit.assemblerStatus == kSuccess);
2102 #if defined(WITH_JIT_TUNING)
2103 methodStats->nativeSize += cUnit.totalSize;
2104 #endif
2105
2106 return info->codeAddress != NULL;
2107 }
2108