1 /*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #include "config.h"
31 #include "BytecodeGenerator.h"
32
33 #include "BatchedTransitionOptimizer.h"
34 #include "JSFunction.h"
35 #include "Interpreter.h"
36 #include "UString.h"
37
38 using namespace std;
39
40 namespace JSC {
41
42 /*
43 The layout of a register frame looks like this:
44
45 For
46
47 function f(x, y) {
48 var v1;
49 function g() { }
50 var v2;
51 return (x) * (y);
52 }
53
54 assuming (x) and (y) generated temporaries t1 and t2, you would have
55
56 ------------------------------------
57 | x | y | g | v2 | v1 | t1 | t2 | <-- value held
58 ------------------------------------
59 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
60 ------------------------------------
61 | params->|<-locals | temps->
62
63 Because temporary registers are allocated in a stack-like fashion, we
64 can reclaim them with a simple popping algorithm. The same goes for labels.
65 (We never reclaim parameter or local registers, because parameters and
66 locals are DontDelete.)
67
68 The register layout before a function call looks like this:
69
70 For
71
72 function f(x, y)
73 {
74 }
75
76 f(1);
77
78 > <------------------------------
79 < > reserved: call frame | 1 | <-- value held
80 > >snip< <------------------------------
81 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
82 > <------------------------------
83 | params->|<-locals | temps->
84
85 The call instruction fills in the "call frame" registers. It also pads
86 missing arguments at the end of the call:
87
88 > <-----------------------------------
89 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined")
90 > >snip< <-----------------------------------
91 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
92 > <-----------------------------------
93 | params->|<-locals | temps->
94
95 After filling in missing arguments, the call instruction sets up the new
96 stack frame to overlap the end of the old stack frame:
97
98 |----------------------------------> <
99 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined")
100 |----------------------------------> >snip< <
101 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index
102 |----------------------------------> <
103 | | params->|<-locals | temps->
104
105 That way, arguments are "copied" into the callee's stack frame for free.
106
107 If the caller supplies too many arguments, this trick doesn't work. The
108 extra arguments protrude into space reserved for locals and temporaries.
109 In that case, the call instruction makes a real copy of the call frame header,
110 along with just the arguments expected by the callee, leaving the original
111 call frame header and arguments behind. (The call instruction can't just discard
112 extra arguments, because the "arguments" object may access them later.)
113 This copying strategy ensures that all named values will be at the indices
114 expected by the callee.
115 */
116
117 #ifndef NDEBUG
118 static bool s_dumpsGeneratedCode = false;
119 #endif
120
setDumpsGeneratedCode(bool dumpsGeneratedCode)121 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
122 {
123 #ifndef NDEBUG
124 s_dumpsGeneratedCode = dumpsGeneratedCode;
125 #else
126 UNUSED_PARAM(dumpsGeneratedCode);
127 #endif
128 }
129
dumpsGeneratedCode()130 bool BytecodeGenerator::dumpsGeneratedCode()
131 {
132 #ifndef NDEBUG
133 return s_dumpsGeneratedCode;
134 #else
135 return false;
136 #endif
137 }
138
generate()139 void BytecodeGenerator::generate()
140 {
141 m_codeBlock->setThisRegister(m_thisRegister.index());
142
143 m_scopeNode->emitBytecode(*this);
144
145 #ifndef NDEBUG
146 m_codeBlock->setInstructionCount(m_codeBlock->instructions().size());
147
148 if (s_dumpsGeneratedCode)
149 m_codeBlock->dump(m_scopeChain->globalObject()->globalExec());
150 #endif
151
152 if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
153 symbolTable().clear();
154
155 m_codeBlock->setIsNumericCompareFunction(instructions() == m_globalData->numericCompareFunction(m_scopeChain->globalObject()->globalExec()));
156
157 #if !ENABLE(OPCODE_SAMPLING)
158 if (!m_regeneratingForExceptionInfo && (m_codeType == FunctionCode || m_codeType == EvalCode))
159 m_codeBlock->clearExceptionInfo();
160 #endif
161
162 m_codeBlock->shrinkToFit();
163 }
164
addVar(const Identifier & ident,bool isConstant,RegisterID * & r0)165 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
166 {
167 int index = m_calleeRegisters.size();
168 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
169 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.ustring().rep(), newEntry);
170
171 if (!result.second) {
172 r0 = ®isterFor(result.first->second.getIndex());
173 return false;
174 }
175
176 ++m_codeBlock->m_numVars;
177 r0 = newRegister();
178 return true;
179 }
180
addGlobalVar(const Identifier & ident,bool isConstant,RegisterID * & r0)181 bool BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
182 {
183 int index = m_nextGlobalIndex;
184 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
185 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.ustring().rep(), newEntry);
186
187 if (!result.second)
188 index = result.first->second.getIndex();
189 else {
190 --m_nextGlobalIndex;
191 m_globals.append(index + m_globalVarStorageOffset);
192 }
193
194 r0 = ®isterFor(index);
195 return result.second;
196 }
197
allocateConstants(size_t count)198 void BytecodeGenerator::allocateConstants(size_t count)
199 {
200 m_codeBlock->m_numConstants = count;
201 if (!count)
202 return;
203
204 m_nextConstantIndex = m_calleeRegisters.size();
205
206 for (size_t i = 0; i < count; ++i)
207 newRegister();
208 m_lastConstant = &m_calleeRegisters.last();
209 }
210
BytecodeGenerator(ProgramNode * programNode,const Debugger * debugger,const ScopeChain & scopeChain,SymbolTable * symbolTable,ProgramCodeBlock * codeBlock)211 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock)
212 : m_shouldEmitDebugHooks(!!debugger)
213 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
214 , m_scopeChain(&scopeChain)
215 , m_symbolTable(symbolTable)
216 , m_scopeNode(programNode)
217 , m_codeBlock(codeBlock)
218 , m_thisRegister(RegisterFile::ProgramCodeThisRegister)
219 , m_finallyDepth(0)
220 , m_dynamicScopeDepth(0)
221 , m_baseScopeDepth(0)
222 , m_codeType(GlobalCode)
223 , m_nextGlobalIndex(-1)
224 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
225 , m_lastOpcodeID(op_end)
226 , m_emitNodeDepth(0)
227 , m_regeneratingForExceptionInfo(false)
228 , m_codeBlockBeingRegeneratedFrom(0)
229 {
230 if (m_shouldEmitDebugHooks)
231 m_codeBlock->setNeedsFullScopeChain(true);
232
233 emitOpcode(op_enter);
234 codeBlock->setGlobalData(m_globalData);
235
236 // FIXME: Move code that modifies the global object to Interpreter::execute.
237
238 m_codeBlock->m_numParameters = 1; // Allocate space for "this"
239
240 JSGlobalObject* globalObject = scopeChain.globalObject();
241 ExecState* exec = globalObject->globalExec();
242 RegisterFile* registerFile = &exec->globalData().interpreter->registerFile();
243
244 // Shift register indexes in generated code to elide registers allocated by intermediate stack frames.
245 m_globalVarStorageOffset = -RegisterFile::CallFrameHeaderSize - m_codeBlock->m_numParameters - registerFile->size();
246
247 // Add previously defined symbols to bookkeeping.
248 m_globals.grow(symbolTable->size());
249 SymbolTable::iterator end = symbolTable->end();
250 for (SymbolTable::iterator it = symbolTable->begin(); it != end; ++it)
251 registerFor(it->second.getIndex()).setIndex(it->second.getIndex() + m_globalVarStorageOffset);
252
253 BatchedTransitionOptimizer optimizer(globalObject);
254
255 const VarStack& varStack = programNode->varStack();
256 const FunctionStack& functionStack = programNode->functionStack();
257 bool canOptimizeNewGlobals = symbolTable->size() + functionStack.size() + varStack.size() < registerFile->maxGlobals();
258 if (canOptimizeNewGlobals) {
259 // Shift new symbols so they get stored prior to existing symbols.
260 m_nextGlobalIndex -= symbolTable->size();
261
262 for (size_t i = 0; i < functionStack.size(); ++i) {
263 FuncDeclNode* funcDecl = functionStack[i].get();
264 globalObject->removeDirect(funcDecl->m_ident); // Make sure our new function is not shadowed by an old property.
265 emitNewFunction(addGlobalVar(funcDecl->m_ident, false), funcDecl);
266 }
267
268 Vector<RegisterID*, 32> newVars;
269 for (size_t i = 0; i < varStack.size(); ++i)
270 if (!globalObject->hasProperty(exec, varStack[i].first))
271 newVars.append(addGlobalVar(varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant));
272
273 allocateConstants(programNode->neededConstants());
274
275 for (size_t i = 0; i < newVars.size(); ++i)
276 emitLoad(newVars[i], jsUndefined());
277 } else {
278 for (size_t i = 0; i < functionStack.size(); ++i) {
279 FuncDeclNode* funcDecl = functionStack[i].get();
280 globalObject->putWithAttributes(exec, funcDecl->m_ident, funcDecl->makeFunction(exec, scopeChain.node()), DontDelete);
281 }
282 for (size_t i = 0; i < varStack.size(); ++i) {
283 if (globalObject->hasProperty(exec, varStack[i].first))
284 continue;
285 int attributes = DontDelete;
286 if (varStack[i].second & DeclarationStacks::IsConstant)
287 attributes |= ReadOnly;
288 globalObject->putWithAttributes(exec, varStack[i].first, jsUndefined(), attributes);
289 }
290
291 allocateConstants(programNode->neededConstants());
292 }
293 }
294
BytecodeGenerator(FunctionBodyNode * functionBody,const Debugger * debugger,const ScopeChain & scopeChain,SymbolTable * symbolTable,CodeBlock * codeBlock)295 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock)
296 : m_shouldEmitDebugHooks(!!debugger)
297 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
298 , m_scopeChain(&scopeChain)
299 , m_symbolTable(symbolTable)
300 , m_scopeNode(functionBody)
301 , m_codeBlock(codeBlock)
302 , m_finallyDepth(0)
303 , m_dynamicScopeDepth(0)
304 , m_baseScopeDepth(0)
305 , m_codeType(FunctionCode)
306 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
307 , m_lastOpcodeID(op_end)
308 , m_emitNodeDepth(0)
309 , m_regeneratingForExceptionInfo(false)
310 , m_codeBlockBeingRegeneratedFrom(0)
311 {
312 if (m_shouldEmitDebugHooks)
313 m_codeBlock->setNeedsFullScopeChain(true);
314
315 codeBlock->setGlobalData(m_globalData);
316
317 bool usesArguments = functionBody->usesArguments();
318 codeBlock->setUsesArguments(usesArguments);
319 if (usesArguments) {
320 m_argumentsRegister.setIndex(RegisterFile::OptionalCalleeArguments);
321 addVar(propertyNames().arguments, false);
322 }
323
324 if (m_codeBlock->needsFullScopeChain()) {
325 ++m_codeBlock->m_numVars;
326 m_activationRegisterIndex = newRegister()->index();
327 emitOpcode(op_enter_with_activation);
328 instructions().append(m_activationRegisterIndex);
329 } else
330 emitOpcode(op_enter);
331
332 if (usesArguments)
333 emitOpcode(op_create_arguments);
334
335 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
336 for (size_t i = 0; i < functionStack.size(); ++i) {
337 FuncDeclNode* funcDecl = functionStack[i].get();
338 const Identifier& ident = funcDecl->m_ident;
339 m_functions.add(ident.ustring().rep());
340 emitNewFunction(addVar(ident, false), funcDecl);
341 }
342
343 const DeclarationStacks::VarStack& varStack = functionBody->varStack();
344 for (size_t i = 0; i < varStack.size(); ++i)
345 addVar(varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant);
346
347 const Identifier* parameters = functionBody->parameters();
348 size_t parameterCount = functionBody->parameterCount();
349 m_nextParameterIndex = -RegisterFile::CallFrameHeaderSize - parameterCount - 1;
350 m_parameters.grow(1 + parameterCount); // reserve space for "this"
351
352 // Add "this" as a parameter
353 m_thisRegister.setIndex(m_nextParameterIndex);
354 ++m_nextParameterIndex;
355 ++m_codeBlock->m_numParameters;
356
357 if (functionBody->usesThis() || m_shouldEmitDebugHooks) {
358 emitOpcode(op_convert_this);
359 instructions().append(m_thisRegister.index());
360 }
361
362 for (size_t i = 0; i < parameterCount; ++i)
363 addParameter(parameters[i]);
364
365 allocateConstants(functionBody->neededConstants());
366 }
367
BytecodeGenerator(EvalNode * evalNode,const Debugger * debugger,const ScopeChain & scopeChain,SymbolTable * symbolTable,EvalCodeBlock * codeBlock)368 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock)
369 : m_shouldEmitDebugHooks(!!debugger)
370 , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling())
371 , m_scopeChain(&scopeChain)
372 , m_symbolTable(symbolTable)
373 , m_scopeNode(evalNode)
374 , m_codeBlock(codeBlock)
375 , m_thisRegister(RegisterFile::ProgramCodeThisRegister)
376 , m_finallyDepth(0)
377 , m_dynamicScopeDepth(0)
378 , m_baseScopeDepth(codeBlock->baseScopeDepth())
379 , m_codeType(EvalCode)
380 , m_globalData(&scopeChain.globalObject()->globalExec()->globalData())
381 , m_lastOpcodeID(op_end)
382 , m_emitNodeDepth(0)
383 , m_regeneratingForExceptionInfo(false)
384 , m_codeBlockBeingRegeneratedFrom(0)
385 {
386 if (m_shouldEmitDebugHooks || m_baseScopeDepth)
387 m_codeBlock->setNeedsFullScopeChain(true);
388
389 emitOpcode(op_enter);
390 codeBlock->setGlobalData(m_globalData);
391 m_codeBlock->m_numParameters = 1; // Allocate space for "this"
392
393 allocateConstants(evalNode->neededConstants());
394 }
395
addParameter(const Identifier & ident)396 RegisterID* BytecodeGenerator::addParameter(const Identifier& ident)
397 {
398 // Parameters overwrite var declarations, but not function declarations.
399 RegisterID* result = 0;
400 UString::Rep* rep = ident.ustring().rep();
401 if (!m_functions.contains(rep)) {
402 symbolTable().set(rep, m_nextParameterIndex);
403 RegisterID& parameter = registerFor(m_nextParameterIndex);
404 parameter.setIndex(m_nextParameterIndex);
405 result = ¶meter;
406 }
407
408 // To maintain the calling convention, we have to allocate unique space for
409 // each parameter, even if the parameter doesn't make it into the symbol table.
410 ++m_nextParameterIndex;
411 ++m_codeBlock->m_numParameters;
412 return result;
413 }
414
registerFor(const Identifier & ident)415 RegisterID* BytecodeGenerator::registerFor(const Identifier& ident)
416 {
417 if (ident == propertyNames().thisIdentifier)
418 return &m_thisRegister;
419
420 if (!shouldOptimizeLocals())
421 return 0;
422
423 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep());
424 if (entry.isNull())
425 return 0;
426
427 return ®isterFor(entry.getIndex());
428 }
429
constRegisterFor(const Identifier & ident)430 RegisterID* BytecodeGenerator::constRegisterFor(const Identifier& ident)
431 {
432 if (m_codeType == EvalCode)
433 return 0;
434
435 SymbolTableEntry entry = symbolTable().get(ident.ustring().rep());
436 ASSERT(!entry.isNull());
437
438 return ®isterFor(entry.getIndex());
439 }
440
isLocal(const Identifier & ident)441 bool BytecodeGenerator::isLocal(const Identifier& ident)
442 {
443 if (ident == propertyNames().thisIdentifier)
444 return true;
445
446 return shouldOptimizeLocals() && symbolTable().contains(ident.ustring().rep());
447 }
448
isLocalConstant(const Identifier & ident)449 bool BytecodeGenerator::isLocalConstant(const Identifier& ident)
450 {
451 return symbolTable().get(ident.ustring().rep()).isReadOnly();
452 }
453
newRegister()454 RegisterID* BytecodeGenerator::newRegister()
455 {
456 m_calleeRegisters.append(m_calleeRegisters.size());
457 m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
458 return &m_calleeRegisters.last();
459 }
460
newTemporary()461 RegisterID* BytecodeGenerator::newTemporary()
462 {
463 // Reclaim free register IDs.
464 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
465 m_calleeRegisters.removeLast();
466
467 RegisterID* result = newRegister();
468 result->setTemporary();
469 return result;
470 }
471
highestUsedRegister()472 RegisterID* BytecodeGenerator::highestUsedRegister()
473 {
474 size_t count = m_codeBlock->m_numCalleeRegisters;
475 while (m_calleeRegisters.size() < count)
476 newRegister();
477 return &m_calleeRegisters.last();
478 }
479
newLabelScope(LabelScope::Type type,const Identifier * name)480 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
481 {
482 // Reclaim free label scopes.
483 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
484 m_labelScopes.removeLast();
485
486 // Allocate new label scope.
487 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : 0); // Only loops have continue targets.
488 m_labelScopes.append(scope);
489 return &m_labelScopes.last();
490 }
491
newLabel()492 PassRefPtr<Label> BytecodeGenerator::newLabel()
493 {
494 // Reclaim free label IDs.
495 while (m_labels.size() && !m_labels.last().refCount())
496 m_labels.removeLast();
497
498 // Allocate new label ID.
499 m_labels.append(m_codeBlock);
500 return &m_labels.last();
501 }
502
emitLabel(Label * l0)503 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
504 {
505 unsigned newLabelIndex = instructions().size();
506 l0->setLocation(newLabelIndex);
507
508 if (m_codeBlock->numberOfJumpTargets()) {
509 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
510 ASSERT(lastLabelIndex <= newLabelIndex);
511 if (newLabelIndex == lastLabelIndex) {
512 // Peephole optimizations have already been disabled by emitting the last label
513 return l0;
514 }
515 }
516
517 m_codeBlock->addJumpTarget(newLabelIndex);
518
519 // This disables peephole optimizations when an instruction is a jump target
520 m_lastOpcodeID = op_end;
521 return l0;
522 }
523
emitOpcode(OpcodeID opcodeID)524 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
525 {
526 instructions().append(globalData()->interpreter->getOpcode(opcodeID));
527 m_lastOpcodeID = opcodeID;
528 }
529
retrieveLastBinaryOp(int & dstIndex,int & src1Index,int & src2Index)530 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
531 {
532 ASSERT(instructions().size() >= 4);
533 size_t size = instructions().size();
534 dstIndex = instructions().at(size - 3).u.operand;
535 src1Index = instructions().at(size - 2).u.operand;
536 src2Index = instructions().at(size - 1).u.operand;
537 }
538
retrieveLastUnaryOp(int & dstIndex,int & srcIndex)539 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
540 {
541 ASSERT(instructions().size() >= 3);
542 size_t size = instructions().size();
543 dstIndex = instructions().at(size - 2).u.operand;
544 srcIndex = instructions().at(size - 1).u.operand;
545 }
546
rewindBinaryOp()547 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
548 {
549 ASSERT(instructions().size() >= 4);
550 instructions().shrink(instructions().size() - 4);
551 }
552
rewindUnaryOp()553 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
554 {
555 ASSERT(instructions().size() >= 3);
556 instructions().shrink(instructions().size() - 3);
557 }
558
emitJump(Label * target)559 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
560 {
561 emitOpcode(target->isForward() ? op_jmp : op_loop);
562 instructions().append(target->offsetFrom(instructions().size()));
563 return target;
564 }
565
emitJumpIfTrue(RegisterID * cond,Label * target)566 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
567 {
568 if (m_lastOpcodeID == op_less && !target->isForward()) {
569 int dstIndex;
570 int src1Index;
571 int src2Index;
572
573 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
574
575 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
576 rewindBinaryOp();
577 emitOpcode(op_loop_if_less);
578 instructions().append(src1Index);
579 instructions().append(src2Index);
580 instructions().append(target->offsetFrom(instructions().size()));
581 return target;
582 }
583 } else if (m_lastOpcodeID == op_lesseq && !target->isForward()) {
584 int dstIndex;
585 int src1Index;
586 int src2Index;
587
588 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
589
590 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
591 rewindBinaryOp();
592 emitOpcode(op_loop_if_lesseq);
593 instructions().append(src1Index);
594 instructions().append(src2Index);
595 instructions().append(target->offsetFrom(instructions().size()));
596 return target;
597 }
598 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
599 int dstIndex;
600 int srcIndex;
601
602 retrieveLastUnaryOp(dstIndex, srcIndex);
603
604 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
605 rewindUnaryOp();
606 emitOpcode(op_jeq_null);
607 instructions().append(srcIndex);
608 instructions().append(target->offsetFrom(instructions().size()));
609 return target;
610 }
611 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
612 int dstIndex;
613 int srcIndex;
614
615 retrieveLastUnaryOp(dstIndex, srcIndex);
616
617 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
618 rewindUnaryOp();
619 emitOpcode(op_jneq_null);
620 instructions().append(srcIndex);
621 instructions().append(target->offsetFrom(instructions().size()));
622 return target;
623 }
624 }
625
626 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
627 instructions().append(cond->index());
628 instructions().append(target->offsetFrom(instructions().size()));
629 return target;
630 }
631
emitJumpIfFalse(RegisterID * cond,Label * target)632 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
633 {
634 ASSERT(target->isForward());
635
636 if (m_lastOpcodeID == op_less) {
637 int dstIndex;
638 int src1Index;
639 int src2Index;
640
641 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
642
643 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
644 rewindBinaryOp();
645 emitOpcode(op_jnless);
646 instructions().append(src1Index);
647 instructions().append(src2Index);
648 instructions().append(target->offsetFrom(instructions().size()));
649 return target;
650 }
651 } else if (m_lastOpcodeID == op_not) {
652 int dstIndex;
653 int srcIndex;
654
655 retrieveLastUnaryOp(dstIndex, srcIndex);
656
657 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
658 rewindUnaryOp();
659 emitOpcode(op_jtrue);
660 instructions().append(srcIndex);
661 instructions().append(target->offsetFrom(instructions().size()));
662 return target;
663 }
664 } else if (m_lastOpcodeID == op_eq_null) {
665 int dstIndex;
666 int srcIndex;
667
668 retrieveLastUnaryOp(dstIndex, srcIndex);
669
670 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
671 rewindUnaryOp();
672 emitOpcode(op_jneq_null);
673 instructions().append(srcIndex);
674 instructions().append(target->offsetFrom(instructions().size()));
675 return target;
676 }
677 } else if (m_lastOpcodeID == op_neq_null) {
678 int dstIndex;
679 int srcIndex;
680
681 retrieveLastUnaryOp(dstIndex, srcIndex);
682
683 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
684 rewindUnaryOp();
685 emitOpcode(op_jeq_null);
686 instructions().append(srcIndex);
687 instructions().append(target->offsetFrom(instructions().size()));
688 return target;
689 }
690 }
691
692 emitOpcode(op_jfalse);
693 instructions().append(cond->index());
694 instructions().append(target->offsetFrom(instructions().size()));
695 return target;
696 }
697
addConstant(FuncDeclNode * n)698 unsigned BytecodeGenerator::addConstant(FuncDeclNode* n)
699 {
700 // No need to explicitly unique function body nodes -- they're unique already.
701 return m_codeBlock->addFunction(n);
702 }
703
addConstant(FuncExprNode * n)704 unsigned BytecodeGenerator::addConstant(FuncExprNode* n)
705 {
706 // No need to explicitly unique function expression nodes -- they're unique already.
707 return m_codeBlock->addFunctionExpression(n);
708 }
709
addConstant(const Identifier & ident)710 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
711 {
712 UString::Rep* rep = ident.ustring().rep();
713 pair<IdentifierMap::iterator, bool> result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
714 if (result.second) // new entry
715 m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
716
717 return result.first->second;
718 }
719
addConstant(JSValuePtr v)720 RegisterID* BytecodeGenerator::addConstant(JSValuePtr v)
721 {
722 pair<JSValueMap::iterator, bool> result = m_jsValueMap.add(JSValuePtr::encode(v), m_nextConstantIndex);
723 if (result.second) {
724 RegisterID& constant = m_calleeRegisters[m_nextConstantIndex];
725
726 ++m_nextConstantIndex;
727
728 m_codeBlock->addConstantRegister(JSValuePtr(v));
729 return &constant;
730 }
731
732 return ®isterFor(result.first->second);
733 }
734
addUnexpectedConstant(JSValuePtr v)735 unsigned BytecodeGenerator::addUnexpectedConstant(JSValuePtr v)
736 {
737 return m_codeBlock->addUnexpectedConstant(v);
738 }
739
addRegExp(RegExp * r)740 unsigned BytecodeGenerator::addRegExp(RegExp* r)
741 {
742 return m_codeBlock->addRegExp(r);
743 }
744
emitMove(RegisterID * dst,RegisterID * src)745 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
746 {
747 emitOpcode(op_mov);
748 instructions().append(dst->index());
749 instructions().append(src->index());
750 return dst;
751 }
752
emitUnaryOp(OpcodeID opcodeID,RegisterID * dst,RegisterID * src)753 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
754 {
755 emitOpcode(opcodeID);
756 instructions().append(dst->index());
757 instructions().append(src->index());
758 return dst;
759 }
760
emitPreInc(RegisterID * srcDst)761 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
762 {
763 emitOpcode(op_pre_inc);
764 instructions().append(srcDst->index());
765 return srcDst;
766 }
767
emitPreDec(RegisterID * srcDst)768 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
769 {
770 emitOpcode(op_pre_dec);
771 instructions().append(srcDst->index());
772 return srcDst;
773 }
774
emitPostInc(RegisterID * dst,RegisterID * srcDst)775 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
776 {
777 emitOpcode(op_post_inc);
778 instructions().append(dst->index());
779 instructions().append(srcDst->index());
780 return dst;
781 }
782
emitPostDec(RegisterID * dst,RegisterID * srcDst)783 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
784 {
785 emitOpcode(op_post_dec);
786 instructions().append(dst->index());
787 instructions().append(srcDst->index());
788 return dst;
789 }
790
emitBinaryOp(OpcodeID opcodeID,RegisterID * dst,RegisterID * src1,RegisterID * src2,OperandTypes types)791 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
792 {
793 emitOpcode(opcodeID);
794 instructions().append(dst->index());
795 instructions().append(src1->index());
796 instructions().append(src2->index());
797
798 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
799 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub) {
800 instructions().append(types.toInt());
801 }
802
803 return dst;
804 }
805
emitEqualityOp(OpcodeID opcodeID,RegisterID * dst,RegisterID * src1,RegisterID * src2)806 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
807 {
808 if (m_lastOpcodeID == op_typeof) {
809 int dstIndex;
810 int srcIndex;
811
812 retrieveLastUnaryOp(dstIndex, srcIndex);
813
814 if (src1->index() == dstIndex
815 && src1->isTemporary()
816 && m_codeBlock->isConstantRegisterIndex(src2->index())
817 && m_codeBlock->constantRegister(src2->index() - m_codeBlock->m_numVars).jsValue(m_scopeChain->globalObject()->globalExec()).isString()) {
818 const UString& value = asString(m_codeBlock->constantRegister(src2->index() - m_codeBlock->m_numVars).jsValue(m_scopeChain->globalObject()->globalExec()))->value();
819 if (value == "undefined") {
820 rewindUnaryOp();
821 emitOpcode(op_is_undefined);
822 instructions().append(dst->index());
823 instructions().append(srcIndex);
824 return dst;
825 }
826 if (value == "boolean") {
827 rewindUnaryOp();
828 emitOpcode(op_is_boolean);
829 instructions().append(dst->index());
830 instructions().append(srcIndex);
831 return dst;
832 }
833 if (value == "number") {
834 rewindUnaryOp();
835 emitOpcode(op_is_number);
836 instructions().append(dst->index());
837 instructions().append(srcIndex);
838 return dst;
839 }
840 if (value == "string") {
841 rewindUnaryOp();
842 emitOpcode(op_is_string);
843 instructions().append(dst->index());
844 instructions().append(srcIndex);
845 return dst;
846 }
847 if (value == "object") {
848 rewindUnaryOp();
849 emitOpcode(op_is_object);
850 instructions().append(dst->index());
851 instructions().append(srcIndex);
852 return dst;
853 }
854 if (value == "function") {
855 rewindUnaryOp();
856 emitOpcode(op_is_function);
857 instructions().append(dst->index());
858 instructions().append(srcIndex);
859 return dst;
860 }
861 }
862 }
863
864 emitOpcode(opcodeID);
865 instructions().append(dst->index());
866 instructions().append(src1->index());
867 instructions().append(src2->index());
868 return dst;
869 }
870
emitLoad(RegisterID * dst,bool b)871 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
872 {
873 return emitLoad(dst, jsBoolean(b));
874 }
875
emitLoad(RegisterID * dst,double number)876 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
877 {
878 // FIXME: Our hash tables won't hold infinity, so we make a new JSNumberCell each time.
879 // Later we can do the extra work to handle that like the other cases.
880 if (number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
881 return emitLoad(dst, jsNumber(globalData(), number));
882 JSValuePtr& valueInMap = m_numberMap.add(number, noValue()).first->second;
883 if (!valueInMap)
884 valueInMap = jsNumber(globalData(), number);
885 return emitLoad(dst, valueInMap);
886 }
887
emitLoad(RegisterID * dst,const Identifier & identifier)888 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
889 {
890 JSString*& stringInMap = m_stringMap.add(identifier.ustring().rep(), 0).first->second;
891 if (!stringInMap)
892 stringInMap = jsOwnedString(globalData(), identifier.ustring());
893 return emitLoad(dst, JSValuePtr(stringInMap));
894 }
895
emitLoad(RegisterID * dst,JSValuePtr v)896 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValuePtr v)
897 {
898 RegisterID* constantID = addConstant(v);
899 if (dst)
900 return emitMove(dst, constantID);
901 return constantID;
902 }
903
emitUnexpectedLoad(RegisterID * dst,bool b)904 RegisterID* BytecodeGenerator::emitUnexpectedLoad(RegisterID* dst, bool b)
905 {
906 emitOpcode(op_unexpected_load);
907 instructions().append(dst->index());
908 instructions().append(addUnexpectedConstant(jsBoolean(b)));
909 return dst;
910 }
911
emitUnexpectedLoad(RegisterID * dst,double d)912 RegisterID* BytecodeGenerator::emitUnexpectedLoad(RegisterID* dst, double d)
913 {
914 emitOpcode(op_unexpected_load);
915 instructions().append(dst->index());
916 instructions().append(addUnexpectedConstant(jsNumber(globalData(), d)));
917 return dst;
918 }
919
findScopedProperty(const Identifier & property,int & index,size_t & stackDepth,bool forWriting,JSObject * & globalObject)920 bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, JSObject*& globalObject)
921 {
922 // Cases where we cannot statically optimize the lookup.
923 if (property == propertyNames().arguments || !canOptimizeNonLocals()) {
924 stackDepth = 0;
925 index = missingSymbolMarker();
926
927 if (shouldOptimizeLocals() && m_codeType == GlobalCode) {
928 ScopeChainIterator iter = m_scopeChain->begin();
929 globalObject = *iter;
930 ASSERT((++iter) == m_scopeChain->end());
931 }
932 return false;
933 }
934
935 size_t depth = 0;
936
937 ScopeChainIterator iter = m_scopeChain->begin();
938 ScopeChainIterator end = m_scopeChain->end();
939 for (; iter != end; ++iter, ++depth) {
940 JSObject* currentScope = *iter;
941 if (!currentScope->isVariableObject())
942 break;
943 JSVariableObject* currentVariableObject = static_cast<JSVariableObject*>(currentScope);
944 SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.ustring().rep());
945
946 // Found the property
947 if (!entry.isNull()) {
948 if (entry.isReadOnly() && forWriting) {
949 stackDepth = 0;
950 index = missingSymbolMarker();
951 if (++iter == end)
952 globalObject = currentVariableObject;
953 return false;
954 }
955 stackDepth = depth;
956 index = entry.getIndex();
957 if (++iter == end)
958 globalObject = currentVariableObject;
959 return true;
960 }
961 if (currentVariableObject->isDynamicScope())
962 break;
963 }
964
965 // Can't locate the property but we're able to avoid a few lookups.
966 stackDepth = depth;
967 index = missingSymbolMarker();
968 JSObject* scope = *iter;
969 if (++iter == end)
970 globalObject = scope;
971 return true;
972 }
973
emitInstanceOf(RegisterID * dst,RegisterID * value,RegisterID * base,RegisterID * basePrototype)974 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype)
975 {
976 emitOpcode(op_instanceof);
977 instructions().append(dst->index());
978 instructions().append(value->index());
979 instructions().append(base->index());
980 instructions().append(basePrototype->index());
981 return dst;
982 }
983
emitResolve(RegisterID * dst,const Identifier & property)984 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const Identifier& property)
985 {
986 size_t depth = 0;
987 int index = 0;
988 JSObject* globalObject = 0;
989 if (!findScopedProperty(property, index, depth, false, globalObject) && !globalObject) {
990 // We can't optimise at all :-(
991 emitOpcode(op_resolve);
992 instructions().append(dst->index());
993 instructions().append(addConstant(property));
994 return dst;
995 }
996
997 if (globalObject) {
998 bool forceGlobalResolve = false;
999 if (m_regeneratingForExceptionInfo) {
1000 #if ENABLE(JIT)
1001 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(instructions().size());
1002 #else
1003 forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size());
1004 #endif
1005 }
1006
1007 if (index != missingSymbolMarker() && !forceGlobalResolve) {
1008 // Directly index the property lookup across multiple scopes.
1009 return emitGetScopedVar(dst, depth, index, globalObject);
1010 }
1011
1012 #if ENABLE(JIT)
1013 m_codeBlock->addGlobalResolveInfo(instructions().size());
1014 #else
1015 m_codeBlock->addGlobalResolveInstruction(instructions().size());
1016 #endif
1017 emitOpcode(op_resolve_global);
1018 instructions().append(dst->index());
1019 instructions().append(globalObject);
1020 instructions().append(addConstant(property));
1021 instructions().append(0);
1022 instructions().append(0);
1023 return dst;
1024 }
1025
1026 if (index != missingSymbolMarker()) {
1027 // Directly index the property lookup across multiple scopes.
1028 return emitGetScopedVar(dst, depth, index, globalObject);
1029 }
1030
1031 // In this case we are at least able to drop a few scope chains from the
1032 // lookup chain, although we still need to hash from then on.
1033 emitOpcode(op_resolve_skip);
1034 instructions().append(dst->index());
1035 instructions().append(addConstant(property));
1036 instructions().append(depth);
1037 return dst;
1038 }
1039
emitGetScopedVar(RegisterID * dst,size_t depth,int index,JSValuePtr globalObject)1040 RegisterID* BytecodeGenerator::emitGetScopedVar(RegisterID* dst, size_t depth, int index, JSValuePtr globalObject)
1041 {
1042 if (globalObject) {
1043 emitOpcode(op_get_global_var);
1044 instructions().append(dst->index());
1045 instructions().append(asCell(globalObject));
1046 instructions().append(index);
1047 return dst;
1048 }
1049
1050 emitOpcode(op_get_scoped_var);
1051 instructions().append(dst->index());
1052 instructions().append(index);
1053 instructions().append(depth);
1054 return dst;
1055 }
1056
emitPutScopedVar(size_t depth,int index,RegisterID * value,JSValuePtr globalObject)1057 RegisterID* BytecodeGenerator::emitPutScopedVar(size_t depth, int index, RegisterID* value, JSValuePtr globalObject)
1058 {
1059 if (globalObject) {
1060 emitOpcode(op_put_global_var);
1061 instructions().append(asCell(globalObject));
1062 instructions().append(index);
1063 instructions().append(value->index());
1064 return value;
1065 }
1066 emitOpcode(op_put_scoped_var);
1067 instructions().append(index);
1068 instructions().append(depth);
1069 instructions().append(value->index());
1070 return value;
1071 }
1072
emitResolveBase(RegisterID * dst,const Identifier & property)1073 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const Identifier& property)
1074 {
1075 emitOpcode(op_resolve_base);
1076 instructions().append(dst->index());
1077 instructions().append(addConstant(property));
1078 return dst;
1079 }
1080
emitResolveWithBase(RegisterID * baseDst,RegisterID * propDst,const Identifier & property)1081 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const Identifier& property)
1082 {
1083 emitOpcode(op_resolve_with_base);
1084 instructions().append(baseDst->index());
1085 instructions().append(propDst->index());
1086 instructions().append(addConstant(property));
1087 return baseDst;
1088 }
1089
emitResolveFunction(RegisterID * baseDst,RegisterID * funcDst,const Identifier & property)1090 RegisterID* BytecodeGenerator::emitResolveFunction(RegisterID* baseDst, RegisterID* funcDst, const Identifier& property)
1091 {
1092 emitOpcode(op_resolve_func);
1093 instructions().append(baseDst->index());
1094 instructions().append(funcDst->index());
1095 instructions().append(addConstant(property));
1096 return baseDst;
1097 }
1098
emitGetById(RegisterID * dst,RegisterID * base,const Identifier & property)1099 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1100 {
1101 #if ENABLE(JIT)
1102 m_codeBlock->addStructureStubInfo(StructureStubInfo(op_get_by_id));
1103 #else
1104 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1105 #endif
1106
1107 emitOpcode(op_get_by_id);
1108 instructions().append(dst->index());
1109 instructions().append(base->index());
1110 instructions().append(addConstant(property));
1111 instructions().append(0);
1112 instructions().append(0);
1113 instructions().append(0);
1114 instructions().append(0);
1115 return dst;
1116 }
1117
emitPutById(RegisterID * base,const Identifier & property,RegisterID * value)1118 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1119 {
1120 #if ENABLE(JIT)
1121 m_codeBlock->addStructureStubInfo(StructureStubInfo(op_put_by_id));
1122 #else
1123 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1124 #endif
1125
1126 emitOpcode(op_put_by_id);
1127 instructions().append(base->index());
1128 instructions().append(addConstant(property));
1129 instructions().append(value->index());
1130 instructions().append(0);
1131 instructions().append(0);
1132 instructions().append(0);
1133 instructions().append(0);
1134 return value;
1135 }
1136
emitPutGetter(RegisterID * base,const Identifier & property,RegisterID * value)1137 RegisterID* BytecodeGenerator::emitPutGetter(RegisterID* base, const Identifier& property, RegisterID* value)
1138 {
1139 emitOpcode(op_put_getter);
1140 instructions().append(base->index());
1141 instructions().append(addConstant(property));
1142 instructions().append(value->index());
1143 return value;
1144 }
1145
emitPutSetter(RegisterID * base,const Identifier & property,RegisterID * value)1146 RegisterID* BytecodeGenerator::emitPutSetter(RegisterID* base, const Identifier& property, RegisterID* value)
1147 {
1148 emitOpcode(op_put_setter);
1149 instructions().append(base->index());
1150 instructions().append(addConstant(property));
1151 instructions().append(value->index());
1152 return value;
1153 }
1154
emitDeleteById(RegisterID * dst,RegisterID * base,const Identifier & property)1155 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1156 {
1157 emitOpcode(op_del_by_id);
1158 instructions().append(dst->index());
1159 instructions().append(base->index());
1160 instructions().append(addConstant(property));
1161 return dst;
1162 }
1163
emitGetByVal(RegisterID * dst,RegisterID * base,RegisterID * property)1164 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1165 {
1166 emitOpcode(op_get_by_val);
1167 instructions().append(dst->index());
1168 instructions().append(base->index());
1169 instructions().append(property->index());
1170 return dst;
1171 }
1172
emitPutByVal(RegisterID * base,RegisterID * property,RegisterID * value)1173 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1174 {
1175 emitOpcode(op_put_by_val);
1176 instructions().append(base->index());
1177 instructions().append(property->index());
1178 instructions().append(value->index());
1179 return value;
1180 }
1181
emitDeleteByVal(RegisterID * dst,RegisterID * base,RegisterID * property)1182 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1183 {
1184 emitOpcode(op_del_by_val);
1185 instructions().append(dst->index());
1186 instructions().append(base->index());
1187 instructions().append(property->index());
1188 return dst;
1189 }
1190
emitPutByIndex(RegisterID * base,unsigned index,RegisterID * value)1191 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1192 {
1193 emitOpcode(op_put_by_index);
1194 instructions().append(base->index());
1195 instructions().append(index);
1196 instructions().append(value->index());
1197 return value;
1198 }
1199
emitNewObject(RegisterID * dst)1200 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1201 {
1202 emitOpcode(op_new_object);
1203 instructions().append(dst->index());
1204 return dst;
1205 }
1206
emitNewArray(RegisterID * dst,ElementNode * elements)1207 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements)
1208 {
1209 Vector<RefPtr<RegisterID>, 16> argv;
1210 for (ElementNode* n = elements; n; n = n->next()) {
1211 if (n->elision())
1212 break;
1213 argv.append(newTemporary());
1214 // op_new_array requires the initial values to be a sequential range of registers
1215 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1216 emitNode(argv.last().get(), n->value());
1217 }
1218 emitOpcode(op_new_array);
1219 instructions().append(dst->index());
1220 instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1221 instructions().append(argv.size()); // argc
1222 return dst;
1223 }
1224
emitNewFunction(RegisterID * dst,FuncDeclNode * n)1225 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FuncDeclNode* n)
1226 {
1227 emitOpcode(op_new_func);
1228 instructions().append(dst->index());
1229 instructions().append(addConstant(n));
1230 return dst;
1231 }
1232
emitNewRegExp(RegisterID * dst,RegExp * regExp)1233 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1234 {
1235 emitOpcode(op_new_regexp);
1236 instructions().append(dst->index());
1237 instructions().append(addRegExp(regExp));
1238 return dst;
1239 }
1240
1241
emitNewFunctionExpression(RegisterID * r0,FuncExprNode * n)1242 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1243 {
1244 emitOpcode(op_new_func_exp);
1245 instructions().append(r0->index());
1246 instructions().append(addConstant(n));
1247 return r0;
1248 }
1249
emitCall(RegisterID * dst,RegisterID * func,RegisterID * thisRegister,ArgumentsNode * argumentsNode,unsigned divot,unsigned startOffset,unsigned endOffset)1250 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1251 {
1252 return emitCall(op_call, dst, func, thisRegister, argumentsNode, divot, startOffset, endOffset);
1253 }
1254
emitCallEval(RegisterID * dst,RegisterID * func,RegisterID * thisRegister,ArgumentsNode * argumentsNode,unsigned divot,unsigned startOffset,unsigned endOffset)1255 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1256 {
1257 return emitCall(op_call_eval, dst, func, thisRegister, argumentsNode, divot, startOffset, endOffset);
1258 }
1259
emitCall(OpcodeID opcodeID,RegisterID * dst,RegisterID * func,RegisterID * thisRegister,ArgumentsNode * argumentsNode,unsigned divot,unsigned startOffset,unsigned endOffset)1260 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1261 {
1262 ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1263 ASSERT(func->refCount());
1264 ASSERT(thisRegister->refCount());
1265
1266 RegisterID* originalFunc = func;
1267 if (m_shouldEmitProfileHooks) {
1268 // If codegen decided to recycle func as this call's destination register,
1269 // we need to undo that optimization here so that func will still be around
1270 // for the sake of op_profile_did_call.
1271 if (dst == func) {
1272 RefPtr<RegisterID> movedThisRegister = emitMove(newTemporary(), thisRegister);
1273 RefPtr<RegisterID> movedFunc = emitMove(thisRegister, func);
1274
1275 thisRegister = movedThisRegister.release().releaseRef();
1276 func = movedFunc.release().releaseRef();
1277 }
1278 }
1279
1280 // Generate code for arguments.
1281 Vector<RefPtr<RegisterID>, 16> argv;
1282 argv.append(thisRegister);
1283 for (ArgumentListNode* n = argumentsNode->m_listNode.get(); n; n = n->m_next.get()) {
1284 argv.append(newTemporary());
1285 // op_call requires the arguments to be a sequential range of registers
1286 ASSERT(argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1287 emitNode(argv.last().get(), n);
1288 }
1289
1290 // Reserve space for call frame.
1291 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1292 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1293 callFrame.append(newTemporary());
1294
1295 if (m_shouldEmitProfileHooks) {
1296 emitOpcode(op_profile_will_call);
1297 instructions().append(func->index());
1298
1299 #if ENABLE(JIT)
1300 m_codeBlock->addFunctionRegisterInfo(instructions().size(), func->index());
1301 #endif
1302 }
1303
1304 emitExpressionInfo(divot, startOffset, endOffset);
1305
1306 #if ENABLE(JIT)
1307 m_codeBlock->addCallLinkInfo();
1308 #endif
1309
1310 // Emit call.
1311 emitOpcode(opcodeID);
1312 instructions().append(dst->index()); // dst
1313 instructions().append(func->index()); // func
1314 instructions().append(argv.size()); // argCount
1315 instructions().append(argv[0]->index() + argv.size() + RegisterFile::CallFrameHeaderSize); // registerOffset
1316
1317 if (m_shouldEmitProfileHooks) {
1318 emitOpcode(op_profile_did_call);
1319 instructions().append(func->index());
1320
1321 if (dst == originalFunc) {
1322 thisRegister->deref();
1323 func->deref();
1324 }
1325 }
1326
1327 return dst;
1328 }
1329
emitReturn(RegisterID * src)1330 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1331 {
1332 if (m_codeBlock->needsFullScopeChain()) {
1333 emitOpcode(op_tear_off_activation);
1334 instructions().append(m_activationRegisterIndex);
1335 } else if (m_codeBlock->usesArguments() && m_codeBlock->m_numParameters > 1)
1336 emitOpcode(op_tear_off_arguments);
1337
1338 return emitUnaryNoDstOp(op_ret, src);
1339 }
1340
emitUnaryNoDstOp(OpcodeID opcodeID,RegisterID * src)1341 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1342 {
1343 emitOpcode(opcodeID);
1344 instructions().append(src->index());
1345 return src;
1346 }
1347
emitConstruct(RegisterID * dst,RegisterID * func,ArgumentsNode * argumentsNode,unsigned divot,unsigned startOffset,unsigned endOffset)1348 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset)
1349 {
1350 ASSERT(func->refCount());
1351
1352 RegisterID* originalFunc = func;
1353 if (m_shouldEmitProfileHooks) {
1354 // If codegen decided to recycle func as this call's destination register,
1355 // we need to undo that optimization here so that func will still be around
1356 // for the sake of op_profile_did_call.
1357 if (dst == func) {
1358 RefPtr<RegisterID> movedFunc = emitMove(newTemporary(), func);
1359 func = movedFunc.release().releaseRef();
1360 }
1361 }
1362
1363 RefPtr<RegisterID> funcProto = newTemporary();
1364
1365 // Generate code for arguments.
1366 Vector<RefPtr<RegisterID>, 16> argv;
1367 argv.append(newTemporary()); // reserve space for "this"
1368 for (ArgumentListNode* n = argumentsNode ? argumentsNode->m_listNode.get() : 0; n; n = n->m_next.get()) {
1369 argv.append(newTemporary());
1370 // op_construct requires the arguments to be a sequential range of registers
1371 ASSERT(argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1372 emitNode(argv.last().get(), n);
1373 }
1374
1375 if (m_shouldEmitProfileHooks) {
1376 emitOpcode(op_profile_will_call);
1377 instructions().append(func->index());
1378 }
1379
1380 // Load prototype.
1381 emitExpressionInfo(divot, startOffset, endOffset);
1382 emitGetByIdExceptionInfo(op_construct);
1383 emitGetById(funcProto.get(), func, globalData()->propertyNames->prototype);
1384
1385 // Reserve space for call frame.
1386 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1387 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1388 callFrame.append(newTemporary());
1389
1390 emitExpressionInfo(divot, startOffset, endOffset);
1391
1392 #if ENABLE(JIT)
1393 m_codeBlock->addCallLinkInfo();
1394 #endif
1395
1396 emitOpcode(op_construct);
1397 instructions().append(dst->index()); // dst
1398 instructions().append(func->index()); // func
1399 instructions().append(argv.size()); // argCount
1400 instructions().append(argv[0]->index() + argv.size() + RegisterFile::CallFrameHeaderSize); // registerOffset
1401 instructions().append(funcProto->index()); // proto
1402 instructions().append(argv[0]->index()); // thisRegister
1403
1404 emitOpcode(op_construct_verify);
1405 instructions().append(dst->index());
1406 instructions().append(argv[0]->index());
1407
1408 if (m_shouldEmitProfileHooks) {
1409 emitOpcode(op_profile_did_call);
1410 instructions().append(func->index());
1411
1412 if (dst == originalFunc)
1413 func->deref();
1414 }
1415
1416 return dst;
1417 }
1418
emitPushScope(RegisterID * scope)1419 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope)
1420 {
1421 ASSERT(scope->isTemporary());
1422 ControlFlowContext context;
1423 context.isFinallyBlock = false;
1424 m_scopeContextStack.append(context);
1425 m_dynamicScopeDepth++;
1426
1427 return emitUnaryNoDstOp(op_push_scope, scope);
1428 }
1429
emitPopScope()1430 void BytecodeGenerator::emitPopScope()
1431 {
1432 ASSERT(m_scopeContextStack.size());
1433 ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1434
1435 emitOpcode(op_pop_scope);
1436
1437 m_scopeContextStack.removeLast();
1438 m_dynamicScopeDepth--;
1439 }
1440
emitDebugHook(DebugHookID debugHookID,int firstLine,int lastLine)1441 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
1442 {
1443 if (!m_shouldEmitDebugHooks)
1444 return;
1445 emitOpcode(op_debug);
1446 instructions().append(debugHookID);
1447 instructions().append(firstLine);
1448 instructions().append(lastLine);
1449 }
1450
pushFinallyContext(Label * target,RegisterID * retAddrDst)1451 void BytecodeGenerator::pushFinallyContext(Label* target, RegisterID* retAddrDst)
1452 {
1453 ControlFlowContext scope;
1454 scope.isFinallyBlock = true;
1455 FinallyContext context = { target, retAddrDst };
1456 scope.finallyContext = context;
1457 m_scopeContextStack.append(scope);
1458 m_finallyDepth++;
1459 }
1460
popFinallyContext()1461 void BytecodeGenerator::popFinallyContext()
1462 {
1463 ASSERT(m_scopeContextStack.size());
1464 ASSERT(m_scopeContextStack.last().isFinallyBlock);
1465 ASSERT(m_finallyDepth > 0);
1466 m_scopeContextStack.removeLast();
1467 m_finallyDepth--;
1468 }
1469
breakTarget(const Identifier & name)1470 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
1471 {
1472 // Reclaim free label scopes.
1473 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
1474 m_labelScopes.removeLast();
1475
1476 if (!m_labelScopes.size())
1477 return 0;
1478
1479 // We special-case the following, which is a syntax error in Firefox:
1480 // label:
1481 // break;
1482 if (name.isEmpty()) {
1483 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1484 LabelScope* scope = &m_labelScopes[i];
1485 if (scope->type() != LabelScope::NamedLabel) {
1486 ASSERT(scope->breakTarget());
1487 return scope;
1488 }
1489 }
1490 return 0;
1491 }
1492
1493 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1494 LabelScope* scope = &m_labelScopes[i];
1495 if (scope->name() && *scope->name() == name) {
1496 ASSERT(scope->breakTarget());
1497 return scope;
1498 }
1499 }
1500 return 0;
1501 }
1502
continueTarget(const Identifier & name)1503 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
1504 {
1505 // Reclaim free label scopes.
1506 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
1507 m_labelScopes.removeLast();
1508
1509 if (!m_labelScopes.size())
1510 return 0;
1511
1512 if (name.isEmpty()) {
1513 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1514 LabelScope* scope = &m_labelScopes[i];
1515 if (scope->type() == LabelScope::Loop) {
1516 ASSERT(scope->continueTarget());
1517 return scope;
1518 }
1519 }
1520 return 0;
1521 }
1522
1523 // Continue to the loop nested nearest to the label scope that matches
1524 // 'name'.
1525 LabelScope* result = 0;
1526 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1527 LabelScope* scope = &m_labelScopes[i];
1528 if (scope->type() == LabelScope::Loop) {
1529 ASSERT(scope->continueTarget());
1530 result = scope;
1531 }
1532 if (scope->name() && *scope->name() == name)
1533 return result; // may be 0
1534 }
1535 return 0;
1536 }
1537
emitComplexJumpScopes(Label * target,ControlFlowContext * topScope,ControlFlowContext * bottomScope)1538 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
1539 {
1540 while (topScope > bottomScope) {
1541 // First we count the number of dynamic scopes we need to remove to get
1542 // to a finally block.
1543 int nNormalScopes = 0;
1544 while (topScope > bottomScope) {
1545 if (topScope->isFinallyBlock)
1546 break;
1547 ++nNormalScopes;
1548 --topScope;
1549 }
1550
1551 if (nNormalScopes) {
1552 // We need to remove a number of dynamic scopes to get to the next
1553 // finally block
1554 emitOpcode(op_jmp_scopes);
1555 instructions().append(nNormalScopes);
1556
1557 // If topScope == bottomScope then there isn't actually a finally block
1558 // left to emit, so make the jmp_scopes jump directly to the target label
1559 if (topScope == bottomScope) {
1560 instructions().append(target->offsetFrom(instructions().size()));
1561 return target;
1562 }
1563
1564 // Otherwise we just use jmp_scopes to pop a group of scopes and go
1565 // to the next instruction
1566 RefPtr<Label> nextInsn = newLabel();
1567 instructions().append(nextInsn->offsetFrom(instructions().size()));
1568 emitLabel(nextInsn.get());
1569 }
1570
1571 // To get here there must be at least one finally block present
1572 do {
1573 ASSERT(topScope->isFinallyBlock);
1574 emitJumpSubroutine(topScope->finallyContext.retAddrDst, topScope->finallyContext.finallyAddr);
1575 --topScope;
1576 if (!topScope->isFinallyBlock)
1577 break;
1578 } while (topScope > bottomScope);
1579 }
1580 return emitJump(target);
1581 }
1582
emitJumpScopes(Label * target,int targetScopeDepth)1583 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
1584 {
1585 ASSERT(scopeDepth() - targetScopeDepth >= 0);
1586 ASSERT(target->isForward());
1587
1588 size_t scopeDelta = scopeDepth() - targetScopeDepth;
1589 ASSERT(scopeDelta <= m_scopeContextStack.size());
1590 if (!scopeDelta)
1591 return emitJump(target);
1592
1593 if (m_finallyDepth)
1594 return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
1595
1596 emitOpcode(op_jmp_scopes);
1597 instructions().append(scopeDelta);
1598 instructions().append(target->offsetFrom(instructions().size()));
1599 return target;
1600 }
1601
emitNextPropertyName(RegisterID * dst,RegisterID * iter,Label * target)1602 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* iter, Label* target)
1603 {
1604 emitOpcode(op_next_pname);
1605 instructions().append(dst->index());
1606 instructions().append(iter->index());
1607 instructions().append(target->offsetFrom(instructions().size()));
1608 return dst;
1609 }
1610
emitCatch(RegisterID * targetRegister,Label * start,Label * end)1611 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end)
1612 {
1613 #if ENABLE(JIT)
1614 HandlerInfo info = { start->offsetFrom(0), end->offsetFrom(0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, 0 };
1615 #else
1616 HandlerInfo info = { start->offsetFrom(0), end->offsetFrom(0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth };
1617 #endif
1618
1619 m_codeBlock->addExceptionHandler(info);
1620 emitOpcode(op_catch);
1621 instructions().append(targetRegister->index());
1622 return targetRegister;
1623 }
1624
emitNewError(RegisterID * dst,ErrorType type,JSValuePtr message)1625 RegisterID* BytecodeGenerator::emitNewError(RegisterID* dst, ErrorType type, JSValuePtr message)
1626 {
1627 emitOpcode(op_new_error);
1628 instructions().append(dst->index());
1629 instructions().append(static_cast<int>(type));
1630 instructions().append(addUnexpectedConstant(message));
1631 return dst;
1632 }
1633
emitJumpSubroutine(RegisterID * retAddrDst,Label * finally)1634 PassRefPtr<Label> BytecodeGenerator::emitJumpSubroutine(RegisterID* retAddrDst, Label* finally)
1635 {
1636 emitOpcode(op_jsr);
1637 instructions().append(retAddrDst->index());
1638 instructions().append(finally->offsetFrom(instructions().size()));
1639 return finally;
1640 }
1641
emitSubroutineReturn(RegisterID * retAddrSrc)1642 void BytecodeGenerator::emitSubroutineReturn(RegisterID* retAddrSrc)
1643 {
1644 emitOpcode(op_sret);
1645 instructions().append(retAddrSrc->index());
1646 }
1647
emitPushNewScope(RegisterID * dst,Identifier & property,RegisterID * value)1648 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, Identifier& property, RegisterID* value)
1649 {
1650 ControlFlowContext context;
1651 context.isFinallyBlock = false;
1652 m_scopeContextStack.append(context);
1653 m_dynamicScopeDepth++;
1654
1655 emitOpcode(op_push_new_scope);
1656 instructions().append(dst->index());
1657 instructions().append(addConstant(property));
1658 instructions().append(value->index());
1659 }
1660
beginSwitch(RegisterID * scrutineeRegister,SwitchInfo::SwitchType type)1661 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
1662 {
1663 SwitchInfo info = { instructions().size(), type };
1664 switch (type) {
1665 case SwitchInfo::SwitchImmediate:
1666 emitOpcode(op_switch_imm);
1667 break;
1668 case SwitchInfo::SwitchCharacter:
1669 emitOpcode(op_switch_char);
1670 break;
1671 case SwitchInfo::SwitchString:
1672 emitOpcode(op_switch_string);
1673 break;
1674 default:
1675 ASSERT_NOT_REACHED();
1676 }
1677
1678 instructions().append(0); // place holder for table index
1679 instructions().append(0); // place holder for default target
1680 instructions().append(scrutineeRegister->index());
1681 m_switchContextStack.append(info);
1682 }
1683
keyForImmediateSwitch(ExpressionNode * node,int32_t min,int32_t max)1684 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
1685 {
1686 UNUSED_PARAM(max);
1687 ASSERT(node->isNumber());
1688 double value = static_cast<NumberNode*>(node)->value();
1689 int32_t key = static_cast<int32_t>(value);
1690 ASSERT(JSValuePtr::makeInt32Fast(key) && (JSValuePtr::makeInt32Fast(key).getInt32Fast() == value));
1691 ASSERT(key == value);
1692 ASSERT(key >= min);
1693 ASSERT(key <= max);
1694 return key - min;
1695 }
1696
prepareJumpTableForImmediateSwitch(SimpleJumpTable & jumpTable,int32_t switchAddress,uint32_t clauseCount,RefPtr<Label> * labels,ExpressionNode ** nodes,int32_t min,int32_t max)1697 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
1698 {
1699 jumpTable.min = min;
1700 jumpTable.branchOffsets.resize(max - min + 1);
1701 jumpTable.branchOffsets.fill(0);
1702 for (uint32_t i = 0; i < clauseCount; ++i) {
1703 // We're emitting this after the clause labels should have been fixed, so
1704 // the labels should not be "forward" references
1705 ASSERT(!labels[i]->isForward());
1706 jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->offsetFrom(switchAddress));
1707 }
1708 }
1709
keyForCharacterSwitch(ExpressionNode * node,int32_t min,int32_t max)1710 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
1711 {
1712 UNUSED_PARAM(max);
1713 ASSERT(node->isString());
1714 UString::Rep* clause = static_cast<StringNode*>(node)->value().ustring().rep();
1715 ASSERT(clause->size() == 1);
1716
1717 int32_t key = clause->data()[0];
1718 ASSERT(key >= min);
1719 ASSERT(key <= max);
1720 return key - min;
1721 }
1722
prepareJumpTableForCharacterSwitch(SimpleJumpTable & jumpTable,int32_t switchAddress,uint32_t clauseCount,RefPtr<Label> * labels,ExpressionNode ** nodes,int32_t min,int32_t max)1723 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
1724 {
1725 jumpTable.min = min;
1726 jumpTable.branchOffsets.resize(max - min + 1);
1727 jumpTable.branchOffsets.fill(0);
1728 for (uint32_t i = 0; i < clauseCount; ++i) {
1729 // We're emitting this after the clause labels should have been fixed, so
1730 // the labels should not be "forward" references
1731 ASSERT(!labels[i]->isForward());
1732 jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->offsetFrom(switchAddress));
1733 }
1734 }
1735
prepareJumpTableForStringSwitch(StringJumpTable & jumpTable,int32_t switchAddress,uint32_t clauseCount,RefPtr<Label> * labels,ExpressionNode ** nodes)1736 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
1737 {
1738 for (uint32_t i = 0; i < clauseCount; ++i) {
1739 // We're emitting this after the clause labels should have been fixed, so
1740 // the labels should not be "forward" references
1741 ASSERT(!labels[i]->isForward());
1742
1743 ASSERT(nodes[i]->isString());
1744 UString::Rep* clause = static_cast<StringNode*>(nodes[i])->value().ustring().rep();
1745 OffsetLocation location;
1746 location.branchOffset = labels[i]->offsetFrom(switchAddress);
1747 #if ENABLE(JIT)
1748 location.ctiOffset = 0;
1749 #endif
1750 jumpTable.offsetTable.add(clause, location);
1751 }
1752 }
1753
endSwitch(uint32_t clauseCount,RefPtr<Label> * labels,ExpressionNode ** nodes,Label * defaultLabel,int32_t min,int32_t max)1754 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
1755 {
1756 SwitchInfo switchInfo = m_switchContextStack.last();
1757 m_switchContextStack.removeLast();
1758 if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
1759 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
1760 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->offsetFrom(switchInfo.bytecodeOffset + 3);
1761
1762 SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
1763 prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset + 3, clauseCount, labels, nodes, min, max);
1764 } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
1765 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
1766 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->offsetFrom(switchInfo.bytecodeOffset + 3);
1767
1768 SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
1769 prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset + 3, clauseCount, labels, nodes, min, max);
1770 } else {
1771 ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
1772 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
1773 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->offsetFrom(switchInfo.bytecodeOffset + 3);
1774
1775 StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
1776 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset + 3, clauseCount, labels, nodes);
1777 }
1778 }
1779
emitThrowExpressionTooDeepException()1780 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
1781 {
1782 // It would be nice to do an even better job of identifying exactly where the expression is.
1783 // And we could make the caller pass the node pointer in, if there was some way of getting
1784 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
1785 // is still good enough to get us an accurate line number.
1786 emitExpressionInfo(0, 0, 0);
1787 RegisterID* exception = emitNewError(newTemporary(), SyntaxError, jsString(globalData(), "Expression too deep"));
1788 emitThrow(exception);
1789 return exception;
1790 }
1791
1792 } // namespace JSC
1793