| 1 | /* | 
| 2 |  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved. | 
| 3 |  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca> | 
| 4 |  * | 
| 5 |  * Redistribution and use in source and binary forms, with or without | 
| 6 |  * modification, are permitted provided that the following conditions | 
| 7 |  * are met: | 
| 8 |  * | 
| 9 |  * 1.  Redistributions of source code must retain the above copyright | 
| 10 |  *     notice, this list of conditions and the following disclaimer. | 
| 11 |  * 2.  Redistributions in binary form must reproduce the above copyright | 
| 12 |  *     notice, this list of conditions and the following disclaimer in the | 
| 13 |  *     documentation and/or other materials provided with the distribution. | 
| 14 |  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of | 
| 15 |  *     its contributors may be used to endorse or promote products derived | 
| 16 |  *     from this software without specific prior written permission. | 
| 17 |  * | 
| 18 |  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY | 
| 19 |  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | 
| 20 |  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | 
| 21 |  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY | 
| 22 |  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | 
| 23 |  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | 
| 24 |  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND | 
| 25 |  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 
| 26 |  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF | 
| 27 |  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 
| 28 |  */ | 
| 29 |  | 
| 30 | #include "config.h" | 
| 31 | #include "BytecodeGenerator.h" | 
| 32 |  | 
| 33 | #include "BatchedTransitionOptimizer.h" | 
| 34 | #include "PrototypeFunction.h" | 
| 35 | #include "JSFunction.h" | 
| 36 | #include "Interpreter.h" | 
| 37 | #include "UString.h" | 
| 38 |  | 
| 39 | using namespace std; | 
| 40 |  | 
| 41 | namespace JSC { | 
| 42 |  | 
| 43 | /* | 
| 44 |     The layout of a register frame looks like this: | 
| 45 |  | 
| 46 |     For | 
| 47 |  | 
| 48 |     function f(x, y) { | 
| 49 |         var v1; | 
| 50 |         function g() { } | 
| 51 |         var v2; | 
| 52 |         return (x) * (y); | 
| 53 |     } | 
| 54 |  | 
| 55 |     assuming (x) and (y) generated temporaries t1 and t2, you would have | 
| 56 |  | 
| 57 |     ------------------------------------ | 
| 58 |     |  x |  y |  g | v2 | v1 | t1 | t2 | <-- value held | 
| 59 |     ------------------------------------ | 
| 60 |     | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index | 
| 61 |     ------------------------------------ | 
| 62 |     | params->|<-locals      | temps-> | 
| 63 |  | 
| 64 |     Because temporary registers are allocated in a stack-like fashion, we | 
| 65 |     can reclaim them with a simple popping algorithm. The same goes for labels. | 
| 66 |     (We never reclaim parameter or local registers, because parameters and | 
| 67 |     locals are DontDelete.) | 
| 68 |  | 
| 69 |     The register layout before a function call looks like this: | 
| 70 |  | 
| 71 |     For | 
| 72 |  | 
| 73 |     function f(x, y) | 
| 74 |     { | 
| 75 |     } | 
| 76 |  | 
| 77 |     f(1); | 
| 78 |  | 
| 79 |     >                        <------------------------------ | 
| 80 |     <                        >  reserved: call frame  |  1 | <-- value held | 
| 81 |     >         >snip<         <------------------------------ | 
| 82 |     <                        > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index | 
| 83 |     >                        <------------------------------ | 
| 84 |     | params->|<-locals      | temps-> | 
| 85 |  | 
| 86 |     The call instruction fills in the "call frame" registers. It also pads | 
| 87 |     missing arguments at the end of the call: | 
| 88 |  | 
| 89 |     >                        <----------------------------------- | 
| 90 |     <                        >  reserved: call frame  |  1 |  ? | <-- value held ("?" stands for "undefined") | 
| 91 |     >         >snip<         <----------------------------------- | 
| 92 |     <                        > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index | 
| 93 |     >                        <----------------------------------- | 
| 94 |     | params->|<-locals      | temps-> | 
| 95 |  | 
| 96 |     After filling in missing arguments, the call instruction sets up the new | 
| 97 |     stack frame to overlap the end of the old stack frame: | 
| 98 |  | 
| 99 |                              |---------------------------------->                        < | 
| 100 |                              |  reserved: call frame  |  1 |  ? <                        > <-- value held ("?" stands for "undefined") | 
| 101 |                              |---------------------------------->         >snip<         < | 
| 102 |                              | -7 | -6 | -5 | -4 | -3 | -2 | -1 <                        > <-- register index | 
| 103 |                              |---------------------------------->                        < | 
| 104 |                              |                        | params->|<-locals       | temps-> | 
| 105 |  | 
| 106 |     That way, arguments are "copied" into the callee's stack frame for free. | 
| 107 |  | 
| 108 |     If the caller supplies too many arguments, this trick doesn't work. The | 
| 109 |     extra arguments protrude into space reserved for locals and temporaries. | 
| 110 |     In that case, the call instruction makes a real copy of the call frame header, | 
| 111 |     along with just the arguments expected by the callee, leaving the original | 
| 112 |     call frame header and arguments behind. (The call instruction can't just discard | 
| 113 |     extra arguments, because the "arguments" object may access them later.) | 
| 114 |     This copying strategy ensures that all named values will be at the indices | 
| 115 |     expected by the callee. | 
| 116 | */ | 
| 117 |  | 
| 118 | #ifndef NDEBUG | 
| 119 | static bool s_dumpsGeneratedCode = false; | 
| 120 | #endif | 
| 121 |  | 
| 122 | void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode) | 
| 123 | { | 
| 124 | #ifndef NDEBUG | 
| 125 |     s_dumpsGeneratedCode = dumpsGeneratedCode; | 
| 126 | #else | 
| 127 |     UNUSED_PARAM(dumpsGeneratedCode); | 
| 128 | #endif | 
| 129 | } | 
| 130 |  | 
| 131 | bool BytecodeGenerator::dumpsGeneratedCode() | 
| 132 | { | 
| 133 | #ifndef NDEBUG | 
| 134 |     return s_dumpsGeneratedCode; | 
| 135 | #else | 
| 136 |     return false; | 
| 137 | #endif | 
| 138 | } | 
| 139 |  | 
| 140 | void BytecodeGenerator::generate() | 
| 141 | { | 
| 142 |     m_codeBlock->setThisRegister(m_thisRegister.index()); | 
| 143 |  | 
| 144 |     m_scopeNode->emitBytecode(*this); | 
| 145 |  | 
| 146 | #ifndef NDEBUG | 
| 147 |     m_codeBlock->setInstructionCount(m_codeBlock->instructions().size()); | 
| 148 |  | 
| 149 |     if (s_dumpsGeneratedCode) | 
| 150 |         m_codeBlock->dump(m_scopeChain->globalObject()->globalExec()); | 
| 151 | #endif | 
| 152 |  | 
| 153 |     if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode) | 
| 154 |         symbolTable().clear(); | 
| 155 |          | 
| 156 |     m_codeBlock->setIsNumericCompareFunction(instructions() == m_globalData->numericCompareFunction(m_scopeChain->globalObject()->globalExec())); | 
| 157 |  | 
| 158 | #if !ENABLE(OPCODE_SAMPLING) | 
| 159 |     if (!m_regeneratingForExceptionInfo && (m_codeType == FunctionCode || m_codeType == EvalCode)) | 
| 160 |         m_codeBlock->clearExceptionInfo(); | 
| 161 | #endif | 
| 162 |  | 
| 163 |     m_codeBlock->shrinkToFit(); | 
| 164 | } | 
| 165 |  | 
| 166 | bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0) | 
| 167 | { | 
| 168 |     int index = m_calleeRegisters.size(); | 
| 169 |     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0); | 
| 170 |     pair<SymbolTable::iterator, bool> result = symbolTable().add(key: ident.ustring().rep(), mapped: newEntry); | 
| 171 |  | 
| 172 |     if (!result.second) { | 
| 173 |         r0 = ®isterFor(index: result.first->second.getIndex()); | 
| 174 |         return false; | 
| 175 |     } | 
| 176 |  | 
| 177 |     ++m_codeBlock->m_numVars; | 
| 178 |     r0 = newRegister(); | 
| 179 |     return true; | 
| 180 | } | 
| 181 |  | 
| 182 | bool BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant, RegisterID*& r0) | 
| 183 | { | 
| 184 |     int index = m_nextGlobalIndex; | 
| 185 |     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0); | 
| 186 |     pair<SymbolTable::iterator, bool> result = symbolTable().add(key: ident.ustring().rep(), mapped: newEntry); | 
| 187 |  | 
| 188 |     if (!result.second) | 
| 189 |         index = result.first->second.getIndex(); | 
| 190 |     else { | 
| 191 |         --m_nextGlobalIndex; | 
| 192 |         m_globals.append(value: index + m_globalVarStorageOffset); | 
| 193 |     } | 
| 194 |  | 
| 195 |     r0 = ®isterFor(index); | 
| 196 |     return result.second; | 
| 197 | } | 
| 198 |  | 
| 199 | void BytecodeGenerator::preserveLastVar() | 
| 200 | { | 
| 201 |     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0) | 
| 202 |         m_lastVar = &m_calleeRegisters.last(); | 
| 203 | } | 
| 204 |  | 
| 205 | BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock) | 
| 206 |     : m_shouldEmitDebugHooks(!!debugger) | 
| 207 |     , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling()) | 
| 208 |     , m_scopeChain(&scopeChain) | 
| 209 |     , m_symbolTable(symbolTable) | 
| 210 |     , m_scopeNode(programNode) | 
| 211 |     , m_codeBlock(codeBlock) | 
| 212 |     , m_thisRegister(RegisterFile::ProgramCodeThisRegister) | 
| 213 |     , m_finallyDepth(0) | 
| 214 |     , m_dynamicScopeDepth(0) | 
| 215 |     , m_baseScopeDepth(0) | 
| 216 |     , m_codeType(GlobalCode) | 
| 217 |     , m_nextGlobalIndex(-1) | 
| 218 |     , m_nextConstantOffset(0) | 
| 219 |     , m_globalConstantIndex(0) | 
| 220 |     , m_globalData(&scopeChain.globalObject()->globalExec()->globalData()) | 
| 221 |     , m_lastOpcodeID(op_end) | 
| 222 |     , m_emitNodeDepth(0) | 
| 223 |     , m_regeneratingForExceptionInfo(false) | 
| 224 |     , m_codeBlockBeingRegeneratedFrom(0) | 
| 225 | { | 
| 226 |     if (m_shouldEmitDebugHooks) | 
| 227 |         m_codeBlock->setNeedsFullScopeChain(true); | 
| 228 |  | 
| 229 |     emitOpcode(op_enter); | 
| 230 |     codeBlock->setGlobalData(m_globalData); | 
| 231 |  | 
| 232 |     // FIXME: Move code that modifies the global object to Interpreter::execute. | 
| 233 |      | 
| 234 |     m_codeBlock->m_numParameters = 1; // Allocate space for "this" | 
| 235 |  | 
| 236 |     JSGlobalObject* globalObject = scopeChain.globalObject(); | 
| 237 |     ExecState* exec = globalObject->globalExec(); | 
| 238 |     RegisterFile* registerFile = &exec->globalData().interpreter->registerFile(); | 
| 239 |      | 
| 240 |     // Shift register indexes in generated code to elide registers allocated by intermediate stack frames. | 
| 241 |     m_globalVarStorageOffset = -RegisterFile::CallFrameHeaderSize - m_codeBlock->m_numParameters - registerFile->size(); | 
| 242 |  | 
| 243 |     // Add previously defined symbols to bookkeeping. | 
| 244 |     m_globals.grow(size: symbolTable->size()); | 
| 245 |     SymbolTable::iterator end = symbolTable->end(); | 
| 246 |     for (SymbolTable::iterator it = symbolTable->begin(); it != end; ++it) | 
| 247 |         registerFor(index: it->second.getIndex()).setIndex(it->second.getIndex() + m_globalVarStorageOffset); | 
| 248 |          | 
| 249 |     BatchedTransitionOptimizer optimizer(globalObject); | 
| 250 |  | 
| 251 |     const VarStack& varStack = programNode->varStack(); | 
| 252 |     const FunctionStack& functionStack = programNode->functionStack(); | 
| 253 |     bool canOptimizeNewGlobals = symbolTable->size() + functionStack.size() + varStack.size() < registerFile->maxGlobals(); | 
| 254 |     if (canOptimizeNewGlobals) { | 
| 255 |         // Shift new symbols so they get stored prior to existing symbols. | 
| 256 |         m_nextGlobalIndex -= symbolTable->size(); | 
| 257 |  | 
| 258 |         for (size_t i = 0; i < functionStack.size(); ++i) { | 
| 259 |             FunctionBodyNode* function = functionStack[i]; | 
| 260 |             globalObject->removeDirect(propertyName: function->ident()); // Make sure our new function is not shadowed by an old property. | 
| 261 |             emitNewFunction(dst: addGlobalVar(ident: function->ident(), isConstant: false), body: function); | 
| 262 |         } | 
| 263 |  | 
| 264 |         Vector<RegisterID*, 32> newVars; | 
| 265 |         for (size_t i = 0; i < varStack.size(); ++i) | 
| 266 |             if (!globalObject->hasProperty(exec, propertyName: *varStack[i].first)) | 
| 267 |                 newVars.append(val: addGlobalVar(ident: *varStack[i].first, isConstant: varStack[i].second & DeclarationStacks::IsConstant)); | 
| 268 |  | 
| 269 |         preserveLastVar(); | 
| 270 |  | 
| 271 |         for (size_t i = 0; i < newVars.size(); ++i) | 
| 272 |             emitLoad(dst: newVars[i], jsUndefined()); | 
| 273 |     } else { | 
| 274 |         for (size_t i = 0; i < functionStack.size(); ++i) { | 
| 275 |             FunctionBodyNode* function = functionStack[i]; | 
| 276 |             globalObject->putWithAttributes(exec, propertyName: function->ident(), value: new (exec) JSFunction(exec, makeFunction(exec, body: function), scopeChain.node()), attributes: DontDelete); | 
| 277 |         } | 
| 278 |         for (size_t i = 0; i < varStack.size(); ++i) { | 
| 279 |             if (globalObject->hasProperty(exec, propertyName: *varStack[i].first)) | 
| 280 |                 continue; | 
| 281 |             int attributes = DontDelete; | 
| 282 |             if (varStack[i].second & DeclarationStacks::IsConstant) | 
| 283 |                 attributes |= ReadOnly; | 
| 284 |             globalObject->putWithAttributes(exec, propertyName: *varStack[i].first, value: jsUndefined(), attributes); | 
| 285 |         } | 
| 286 |  | 
| 287 |         preserveLastVar(); | 
| 288 |     } | 
| 289 | } | 
| 290 |  | 
| 291 | BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock) | 
| 292 |     : m_shouldEmitDebugHooks(!!debugger) | 
| 293 |     , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling()) | 
| 294 |     , m_scopeChain(&scopeChain) | 
| 295 |     , m_symbolTable(symbolTable) | 
| 296 |     , m_scopeNode(functionBody) | 
| 297 |     , m_codeBlock(codeBlock) | 
| 298 |     , m_finallyDepth(0) | 
| 299 |     , m_dynamicScopeDepth(0) | 
| 300 |     , m_baseScopeDepth(0) | 
| 301 |     , m_codeType(FunctionCode) | 
| 302 |     , m_nextConstantOffset(0) | 
| 303 |     , m_globalConstantIndex(0) | 
| 304 |     , m_globalData(&scopeChain.globalObject()->globalExec()->globalData()) | 
| 305 |     , m_lastOpcodeID(op_end) | 
| 306 |     , m_emitNodeDepth(0) | 
| 307 |     , m_regeneratingForExceptionInfo(false) | 
| 308 |     , m_codeBlockBeingRegeneratedFrom(0) | 
| 309 | { | 
| 310 |     if (m_shouldEmitDebugHooks) | 
| 311 |         m_codeBlock->setNeedsFullScopeChain(true); | 
| 312 |  | 
| 313 |     codeBlock->setGlobalData(m_globalData); | 
| 314 |  | 
| 315 |     bool usesArguments = functionBody->usesArguments(); | 
| 316 |     codeBlock->setUsesArguments(usesArguments); | 
| 317 |     if (usesArguments) { | 
| 318 |         m_argumentsRegister.setIndex(RegisterFile::OptionalCalleeArguments); | 
| 319 |         addVar(ident: propertyNames().arguments, isConstant: false); | 
| 320 |     } | 
| 321 |  | 
| 322 |     if (m_codeBlock->needsFullScopeChain()) { | 
| 323 |         ++m_codeBlock->m_numVars; | 
| 324 |         m_activationRegisterIndex = newRegister()->index(); | 
| 325 |         emitOpcode(op_enter_with_activation); | 
| 326 |         instructions().append(val: m_activationRegisterIndex); | 
| 327 |     } else | 
| 328 |         emitOpcode(op_enter); | 
| 329 |  | 
| 330 |     if (usesArguments) { | 
| 331 |         emitOpcode(op_init_arguments); | 
| 332 |  | 
| 333 |         // The debugger currently retrieves the arguments object from an activation rather than pulling | 
| 334 |         // it from a call frame.  In the long-term it should stop doing that (<rdar://problem/6911886>), | 
| 335 |         // but for now we force eager creation of the arguments object when debugging. | 
| 336 |         if (m_shouldEmitDebugHooks) | 
| 337 |             emitOpcode(op_create_arguments); | 
| 338 |     } | 
| 339 |  | 
| 340 |     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack(); | 
| 341 |     for (size_t i = 0; i < functionStack.size(); ++i) { | 
| 342 |         FunctionBodyNode* function = functionStack[i]; | 
| 343 |         const Identifier& ident = function->ident(); | 
| 344 |         m_functions.add(value: ident.ustring().rep()); | 
| 345 |         emitNewFunction(dst: addVar(ident, isConstant: false), body: function); | 
| 346 |     } | 
| 347 |  | 
| 348 |     const DeclarationStacks::VarStack& varStack = functionBody->varStack(); | 
| 349 |     for (size_t i = 0; i < varStack.size(); ++i) | 
| 350 |         addVar(ident: *varStack[i].first, isConstant: varStack[i].second & DeclarationStacks::IsConstant); | 
| 351 |  | 
| 352 |     FunctionParameters& parameters = *functionBody->parameters(); | 
| 353 |     size_t parameterCount = parameters.size(); | 
| 354 |     m_nextParameterIndex = -RegisterFile::CallFrameHeaderSize - parameterCount - 1; | 
| 355 |     m_parameters.grow(size: 1 + parameterCount); // reserve space for "this" | 
| 356 |  | 
| 357 |     // Add "this" as a parameter | 
| 358 |     m_thisRegister.setIndex(m_nextParameterIndex); | 
| 359 |     ++m_nextParameterIndex; | 
| 360 |     ++m_codeBlock->m_numParameters; | 
| 361 |  | 
| 362 |     if (functionBody->usesThis() || m_shouldEmitDebugHooks) { | 
| 363 |         emitOpcode(op_convert_this); | 
| 364 |         instructions().append(val: m_thisRegister.index()); | 
| 365 |     } | 
| 366 |      | 
| 367 |     for (size_t i = 0; i < parameterCount; ++i) | 
| 368 |         addParameter(parameters[i]); | 
| 369 |  | 
| 370 |     preserveLastVar(); | 
| 371 | } | 
| 372 |  | 
| 373 | BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, const Debugger* debugger, const ScopeChain& scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock) | 
| 374 |     : m_shouldEmitDebugHooks(!!debugger) | 
| 375 |     , m_shouldEmitProfileHooks(scopeChain.globalObject()->supportsProfiling()) | 
| 376 |     , m_scopeChain(&scopeChain) | 
| 377 |     , m_symbolTable(symbolTable) | 
| 378 |     , m_scopeNode(evalNode) | 
| 379 |     , m_codeBlock(codeBlock) | 
| 380 |     , m_thisRegister(RegisterFile::ProgramCodeThisRegister) | 
| 381 |     , m_finallyDepth(0) | 
| 382 |     , m_dynamicScopeDepth(0) | 
| 383 |     , m_baseScopeDepth(codeBlock->baseScopeDepth()) | 
| 384 |     , m_codeType(EvalCode) | 
| 385 |     , m_nextConstantOffset(0) | 
| 386 |     , m_globalConstantIndex(0) | 
| 387 |     , m_globalData(&scopeChain.globalObject()->globalExec()->globalData()) | 
| 388 |     , m_lastOpcodeID(op_end) | 
| 389 |     , m_emitNodeDepth(0) | 
| 390 |     , m_regeneratingForExceptionInfo(false) | 
| 391 |     , m_codeBlockBeingRegeneratedFrom(0) | 
| 392 | { | 
| 393 |     if (m_shouldEmitDebugHooks || m_baseScopeDepth) | 
| 394 |         m_codeBlock->setNeedsFullScopeChain(true); | 
| 395 |  | 
| 396 |     emitOpcode(op_enter); | 
| 397 |     codeBlock->setGlobalData(m_globalData); | 
| 398 |     m_codeBlock->m_numParameters = 1; // Allocate space for "this" | 
| 399 |  | 
| 400 |     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack(); | 
| 401 |     for (size_t i = 0; i < functionStack.size(); ++i) | 
| 402 |         m_codeBlock->addFunctionDecl(n: makeFunction(globalData: m_globalData, body: functionStack[i])); | 
| 403 |  | 
| 404 |     const DeclarationStacks::VarStack& varStack = evalNode->varStack(); | 
| 405 |     unsigned numVariables = varStack.size(); | 
| 406 |     Vector<Identifier> variables; | 
| 407 |     variables.reserveCapacity(newCapacity: numVariables); | 
| 408 |     for (size_t i = 0; i < numVariables; ++i) | 
| 409 |         variables.append(val: *varStack[i].first); | 
| 410 |     codeBlock->adoptVariables(variables); | 
| 411 |  | 
| 412 |     preserveLastVar(); | 
| 413 | } | 
| 414 |  | 
| 415 | RegisterID* BytecodeGenerator::addParameter(const Identifier& ident) | 
| 416 | { | 
| 417 |     // Parameters overwrite var declarations, but not function declarations. | 
| 418 |     RegisterID* result = 0; | 
| 419 |     UString::Rep* rep = ident.ustring().rep(); | 
| 420 |     if (!m_functions.contains(value: rep)) { | 
| 421 |         symbolTable().set(key: rep, mapped: m_nextParameterIndex); | 
| 422 |         RegisterID& parameter = registerFor(index: m_nextParameterIndex); | 
| 423 |         parameter.setIndex(m_nextParameterIndex); | 
| 424 |         result = ¶meter; | 
| 425 |     } | 
| 426 |  | 
| 427 |     // To maintain the calling convention, we have to allocate unique space for | 
| 428 |     // each parameter, even if the parameter doesn't make it into the symbol table. | 
| 429 |     ++m_nextParameterIndex; | 
| 430 |     ++m_codeBlock->m_numParameters; | 
| 431 |     return result; | 
| 432 | } | 
| 433 |  | 
| 434 | RegisterID* BytecodeGenerator::registerFor(const Identifier& ident) | 
| 435 | { | 
| 436 |     if (ident == propertyNames().thisIdentifier) | 
| 437 |         return &m_thisRegister; | 
| 438 |  | 
| 439 |     if (!shouldOptimizeLocals()) | 
| 440 |         return 0; | 
| 441 |  | 
| 442 |     SymbolTableEntry entry = symbolTable().get(key: ident.ustring().rep()); | 
| 443 |     if (entry.isNull()) | 
| 444 |         return 0; | 
| 445 |  | 
| 446 |     if (ident == propertyNames().arguments) | 
| 447 |         createArgumentsIfNecessary(); | 
| 448 |  | 
| 449 |     return ®isterFor(index: entry.getIndex()); | 
| 450 | } | 
| 451 |  | 
| 452 | bool BytecodeGenerator::willResolveToArguments(const Identifier& ident) | 
| 453 | { | 
| 454 |     if (ident != propertyNames().arguments) | 
| 455 |         return false; | 
| 456 |      | 
| 457 |     if (!shouldOptimizeLocals()) | 
| 458 |         return false; | 
| 459 |      | 
| 460 |     SymbolTableEntry entry = symbolTable().get(key: ident.ustring().rep()); | 
| 461 |     if (entry.isNull()) | 
| 462 |         return false; | 
| 463 |      | 
| 464 |     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode) | 
| 465 |         return true; | 
| 466 |      | 
| 467 |     return false; | 
| 468 | } | 
| 469 |  | 
| 470 | RegisterID* BytecodeGenerator::uncheckedRegisterForArguments() | 
| 471 | { | 
| 472 |     ASSERT(willResolveToArguments(propertyNames().arguments)); | 
| 473 |  | 
| 474 |     SymbolTableEntry entry = symbolTable().get(key: propertyNames().arguments.ustring().rep()); | 
| 475 |     ASSERT(!entry.isNull()); | 
| 476 |     return ®isterFor(index: entry.getIndex()); | 
| 477 | } | 
| 478 |  | 
| 479 | RegisterID* BytecodeGenerator::constRegisterFor(const Identifier& ident) | 
| 480 | { | 
| 481 |     if (m_codeType == EvalCode) | 
| 482 |         return 0; | 
| 483 |  | 
| 484 |     SymbolTableEntry entry = symbolTable().get(key: ident.ustring().rep()); | 
| 485 |     if (entry.isNull()) | 
| 486 |         return 0; | 
| 487 |  | 
| 488 |     return ®isterFor(index: entry.getIndex()); | 
| 489 | } | 
| 490 |  | 
| 491 | bool BytecodeGenerator::isLocal(const Identifier& ident) | 
| 492 | { | 
| 493 |     if (ident == propertyNames().thisIdentifier) | 
| 494 |         return true; | 
| 495 |      | 
| 496 |     return shouldOptimizeLocals() && symbolTable().contains(key: ident.ustring().rep()); | 
| 497 | } | 
| 498 |  | 
| 499 | bool BytecodeGenerator::isLocalConstant(const Identifier& ident) | 
| 500 | { | 
| 501 |     return symbolTable().get(key: ident.ustring().rep()).isReadOnly(); | 
| 502 | } | 
| 503 |  | 
| 504 | RegisterID* BytecodeGenerator::newRegister() | 
| 505 | { | 
| 506 |     m_calleeRegisters.append(value: m_calleeRegisters.size()); | 
| 507 |     m_codeBlock->m_numCalleeRegisters = max<int>(a: m_codeBlock->m_numCalleeRegisters, b: m_calleeRegisters.size()); | 
| 508 |     return &m_calleeRegisters.last(); | 
| 509 | } | 
| 510 |  | 
| 511 | RegisterID* BytecodeGenerator::newTemporary() | 
| 512 | { | 
| 513 |     // Reclaim free register IDs. | 
| 514 |     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount()) | 
| 515 |         m_calleeRegisters.removeLast(); | 
| 516 |          | 
| 517 |     RegisterID* result = newRegister(); | 
| 518 |     result->setTemporary(); | 
| 519 |     return result; | 
| 520 | } | 
| 521 |  | 
| 522 | RegisterID* BytecodeGenerator::highestUsedRegister() | 
| 523 | { | 
| 524 |     size_t count = m_codeBlock->m_numCalleeRegisters; | 
| 525 |     while (m_calleeRegisters.size() < count) | 
| 526 |         newRegister(); | 
| 527 |     return &m_calleeRegisters.last(); | 
| 528 | } | 
| 529 |  | 
| 530 | PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name) | 
| 531 | { | 
| 532 |     // Reclaim free label scopes. | 
| 533 |     while (m_labelScopes.size() && !m_labelScopes.last().refCount()) | 
| 534 |         m_labelScopes.removeLast(); | 
| 535 |  | 
| 536 |     // Allocate new label scope. | 
| 537 |     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets. | 
| 538 |     m_labelScopes.append(value: scope); | 
| 539 |     return &m_labelScopes.last(); | 
| 540 | } | 
| 541 |  | 
| 542 | PassRefPtr<Label> BytecodeGenerator::newLabel() | 
| 543 | { | 
| 544 |     // Reclaim free label IDs. | 
| 545 |     while (m_labels.size() && !m_labels.last().refCount()) | 
| 546 |         m_labels.removeLast(); | 
| 547 |  | 
| 548 |     // Allocate new label ID. | 
| 549 |     m_labels.append(value: m_codeBlock); | 
| 550 |     return &m_labels.last(); | 
| 551 | } | 
| 552 |  | 
| 553 | PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0) | 
| 554 | { | 
| 555 |     unsigned newLabelIndex = instructions().size(); | 
| 556 |     l0->setLocation(newLabelIndex); | 
| 557 |  | 
| 558 |     if (m_codeBlock->numberOfJumpTargets()) { | 
| 559 |         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget(); | 
| 560 |         ASSERT(lastLabelIndex <= newLabelIndex); | 
| 561 |         if (newLabelIndex == lastLabelIndex) { | 
| 562 |             // Peephole optimizations have already been disabled by emitting the last label | 
| 563 |             return l0; | 
| 564 |         } | 
| 565 |     } | 
| 566 |  | 
| 567 |     m_codeBlock->addJumpTarget(jumpTarget: newLabelIndex); | 
| 568 |  | 
| 569 |     // This disables peephole optimizations when an instruction is a jump target | 
| 570 |     m_lastOpcodeID = op_end; | 
| 571 |     return l0; | 
| 572 | } | 
| 573 |  | 
| 574 | void BytecodeGenerator::emitOpcode(OpcodeID opcodeID) | 
| 575 | { | 
| 576 |     instructions().append(val: globalData()->interpreter->getOpcode(id: opcodeID)); | 
| 577 |     m_lastOpcodeID = opcodeID; | 
| 578 | } | 
| 579 |  | 
| 580 | void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index) | 
| 581 | { | 
| 582 |     ASSERT(instructions().size() >= 4); | 
| 583 |     size_t size = instructions().size(); | 
| 584 |     dstIndex = instructions().at(i: size - 3).u.operand; | 
| 585 |     src1Index = instructions().at(i: size - 2).u.operand; | 
| 586 |     src2Index = instructions().at(i: size - 1).u.operand; | 
| 587 | } | 
| 588 |  | 
| 589 | void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex) | 
| 590 | { | 
| 591 |     ASSERT(instructions().size() >= 3); | 
| 592 |     size_t size = instructions().size(); | 
| 593 |     dstIndex = instructions().at(i: size - 2).u.operand; | 
| 594 |     srcIndex = instructions().at(i: size - 1).u.operand; | 
| 595 | } | 
| 596 |  | 
| 597 | void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp() | 
| 598 | { | 
| 599 |     ASSERT(instructions().size() >= 4); | 
| 600 |     instructions().shrink(size: instructions().size() - 4); | 
| 601 | } | 
| 602 |  | 
| 603 | void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp() | 
| 604 | { | 
| 605 |     ASSERT(instructions().size() >= 3); | 
| 606 |     instructions().shrink(size: instructions().size() - 3); | 
| 607 | } | 
| 608 |  | 
| 609 | PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target) | 
| 610 | { | 
| 611 |     size_t begin = instructions().size(); | 
| 612 |     emitOpcode(opcodeID: target->isForward() ? op_jmp : op_loop); | 
| 613 |     instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 614 |     return target; | 
| 615 | } | 
| 616 |  | 
| 617 | PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target) | 
| 618 | { | 
| 619 |     if (m_lastOpcodeID == op_less) { | 
| 620 |         int dstIndex; | 
| 621 |         int src1Index; | 
| 622 |         int src2Index; | 
| 623 |  | 
| 624 |         retrieveLastBinaryOp(dstIndex, src1Index, src2Index); | 
| 625 |  | 
| 626 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 627 |             rewindBinaryOp(); | 
| 628 |  | 
| 629 |             size_t begin = instructions().size(); | 
| 630 |             emitOpcode(opcodeID: target->isForward() ? op_jless : op_loop_if_less); | 
| 631 |             instructions().append(val: src1Index); | 
| 632 |             instructions().append(val: src2Index); | 
| 633 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 634 |             return target; | 
| 635 |         } | 
| 636 |     } else if (m_lastOpcodeID == op_lesseq && !target->isForward()) { | 
| 637 |         int dstIndex; | 
| 638 |         int src1Index; | 
| 639 |         int src2Index; | 
| 640 |  | 
| 641 |         retrieveLastBinaryOp(dstIndex, src1Index, src2Index); | 
| 642 |  | 
| 643 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 644 |             rewindBinaryOp(); | 
| 645 |  | 
| 646 |             size_t begin = instructions().size(); | 
| 647 |             emitOpcode(opcodeID: op_loop_if_lesseq); | 
| 648 |             instructions().append(val: src1Index); | 
| 649 |             instructions().append(val: src2Index); | 
| 650 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 651 |             return target; | 
| 652 |         } | 
| 653 |     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) { | 
| 654 |         int dstIndex; | 
| 655 |         int srcIndex; | 
| 656 |  | 
| 657 |         retrieveLastUnaryOp(dstIndex, srcIndex); | 
| 658 |  | 
| 659 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 660 |             rewindUnaryOp(); | 
| 661 |  | 
| 662 |             size_t begin = instructions().size(); | 
| 663 |             emitOpcode(opcodeID: op_jeq_null); | 
| 664 |             instructions().append(val: srcIndex); | 
| 665 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 666 |             return target; | 
| 667 |         } | 
| 668 |     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) { | 
| 669 |         int dstIndex; | 
| 670 |         int srcIndex; | 
| 671 |  | 
| 672 |         retrieveLastUnaryOp(dstIndex, srcIndex); | 
| 673 |  | 
| 674 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 675 |             rewindUnaryOp(); | 
| 676 |  | 
| 677 |             size_t begin = instructions().size(); | 
| 678 |             emitOpcode(opcodeID: op_jneq_null); | 
| 679 |             instructions().append(val: srcIndex); | 
| 680 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 681 |             return target; | 
| 682 |         } | 
| 683 |     } | 
| 684 |  | 
| 685 |     size_t begin = instructions().size(); | 
| 686 |  | 
| 687 |     emitOpcode(opcodeID: target->isForward() ? op_jtrue : op_loop_if_true); | 
| 688 |     instructions().append(val: cond->index()); | 
| 689 |     instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 690 |     return target; | 
| 691 | } | 
| 692 |  | 
| 693 | PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target) | 
| 694 | { | 
| 695 |     if (m_lastOpcodeID == op_less && target->isForward()) { | 
| 696 |         int dstIndex; | 
| 697 |         int src1Index; | 
| 698 |         int src2Index; | 
| 699 |  | 
| 700 |         retrieveLastBinaryOp(dstIndex, src1Index, src2Index); | 
| 701 |  | 
| 702 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 703 |             rewindBinaryOp(); | 
| 704 |  | 
| 705 |             size_t begin = instructions().size(); | 
| 706 |             emitOpcode(opcodeID: op_jnless); | 
| 707 |             instructions().append(val: src1Index); | 
| 708 |             instructions().append(val: src2Index); | 
| 709 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 710 |             return target; | 
| 711 |         } | 
| 712 |     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) { | 
| 713 |         int dstIndex; | 
| 714 |         int src1Index; | 
| 715 |         int src2Index; | 
| 716 |  | 
| 717 |         retrieveLastBinaryOp(dstIndex, src1Index, src2Index); | 
| 718 |  | 
| 719 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 720 |             rewindBinaryOp(); | 
| 721 |  | 
| 722 |             size_t begin = instructions().size(); | 
| 723 |             emitOpcode(opcodeID: op_jnlesseq); | 
| 724 |             instructions().append(val: src1Index); | 
| 725 |             instructions().append(val: src2Index); | 
| 726 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 727 |             return target; | 
| 728 |         } | 
| 729 |     } else if (m_lastOpcodeID == op_not) { | 
| 730 |         int dstIndex; | 
| 731 |         int srcIndex; | 
| 732 |  | 
| 733 |         retrieveLastUnaryOp(dstIndex, srcIndex); | 
| 734 |  | 
| 735 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 736 |             rewindUnaryOp(); | 
| 737 |  | 
| 738 |             size_t begin = instructions().size(); | 
| 739 |             emitOpcode(opcodeID: target->isForward() ? op_jtrue : op_loop_if_true); | 
| 740 |             instructions().append(val: srcIndex); | 
| 741 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 742 |             return target; | 
| 743 |         } | 
| 744 |     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) { | 
| 745 |         int dstIndex; | 
| 746 |         int srcIndex; | 
| 747 |  | 
| 748 |         retrieveLastUnaryOp(dstIndex, srcIndex); | 
| 749 |  | 
| 750 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 751 |             rewindUnaryOp(); | 
| 752 |  | 
| 753 |             size_t begin = instructions().size(); | 
| 754 |             emitOpcode(opcodeID: op_jneq_null); | 
| 755 |             instructions().append(val: srcIndex); | 
| 756 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 757 |             return target; | 
| 758 |         } | 
| 759 |     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) { | 
| 760 |         int dstIndex; | 
| 761 |         int srcIndex; | 
| 762 |  | 
| 763 |         retrieveLastUnaryOp(dstIndex, srcIndex); | 
| 764 |  | 
| 765 |         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { | 
| 766 |             rewindUnaryOp(); | 
| 767 |  | 
| 768 |             size_t begin = instructions().size(); | 
| 769 |             emitOpcode(opcodeID: op_jeq_null); | 
| 770 |             instructions().append(val: srcIndex); | 
| 771 |             instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 772 |             return target; | 
| 773 |         } | 
| 774 |     } | 
| 775 |  | 
| 776 |     size_t begin = instructions().size(); | 
| 777 |     emitOpcode(opcodeID: target->isForward() ? op_jfalse : op_loop_if_false); | 
| 778 |     instructions().append(val: cond->index()); | 
| 779 |     instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 780 |     return target; | 
| 781 | } | 
| 782 |  | 
| 783 | PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target) | 
| 784 | { | 
| 785 |     size_t begin = instructions().size(); | 
| 786 |  | 
| 787 |     emitOpcode(opcodeID: op_jneq_ptr); | 
| 788 |     instructions().append(val: cond->index()); | 
| 789 |     instructions().append(val: m_scopeChain->globalObject()->d()->callFunction); | 
| 790 |     instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 791 |     return target; | 
| 792 | } | 
| 793 |  | 
| 794 | PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target) | 
| 795 | { | 
| 796 |     size_t begin = instructions().size(); | 
| 797 |  | 
| 798 |     emitOpcode(opcodeID: op_jneq_ptr); | 
| 799 |     instructions().append(val: cond->index()); | 
| 800 |     instructions().append(val: m_scopeChain->globalObject()->d()->applyFunction); | 
| 801 |     instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 802 |     return target; | 
| 803 | } | 
| 804 |  | 
| 805 | unsigned BytecodeGenerator::addConstant(const Identifier& ident) | 
| 806 | { | 
| 807 |     UString::Rep* rep = ident.ustring().rep(); | 
| 808 |     pair<IdentifierMap::iterator, bool> result = m_identifierMap.add(key: rep, mapped: m_codeBlock->numberOfIdentifiers()); | 
| 809 |     if (result.second) // new entry | 
| 810 |         m_codeBlock->addIdentifier(i: Identifier(m_globalData, rep)); | 
| 811 |  | 
| 812 |     return result.first->second; | 
| 813 | } | 
| 814 |  | 
| 815 | RegisterID* BytecodeGenerator::addConstantValue(JSValue v) | 
| 816 | { | 
| 817 |     int index = m_nextConstantOffset; | 
| 818 |  | 
| 819 |     pair<JSValueMap::iterator, bool> result = m_jsValueMap.add(key: JSValue::encode(value: v), mapped: m_nextConstantOffset); | 
| 820 |     if (result.second) { | 
| 821 |         m_constantPoolRegisters.append(value: FirstConstantRegisterIndex + m_nextConstantOffset); | 
| 822 |         ++m_nextConstantOffset; | 
| 823 |         m_codeBlock->addConstantRegister(r: JSValue(v)); | 
| 824 |     } else | 
| 825 |         index = result.first->second; | 
| 826 |  | 
| 827 |     return &m_constantPoolRegisters[index]; | 
| 828 | } | 
| 829 |  | 
| 830 | unsigned BytecodeGenerator::addRegExp(RegExp* r) | 
| 831 | { | 
| 832 |     return m_codeBlock->addRegExp(r); | 
| 833 | } | 
| 834 |  | 
| 835 | RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src) | 
| 836 | { | 
| 837 |     emitOpcode(opcodeID: op_mov); | 
| 838 |     instructions().append(val: dst->index()); | 
| 839 |     instructions().append(val: src->index()); | 
| 840 |     return dst; | 
| 841 | } | 
| 842 |  | 
| 843 | RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src) | 
| 844 | { | 
| 845 |     emitOpcode(opcodeID); | 
| 846 |     instructions().append(val: dst->index()); | 
| 847 |     instructions().append(val: src->index()); | 
| 848 |     return dst; | 
| 849 | } | 
| 850 |  | 
| 851 | RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst) | 
| 852 | { | 
| 853 |     emitOpcode(opcodeID: op_pre_inc); | 
| 854 |     instructions().append(val: srcDst->index()); | 
| 855 |     return srcDst; | 
| 856 | } | 
| 857 |  | 
| 858 | RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst) | 
| 859 | { | 
| 860 |     emitOpcode(opcodeID: op_pre_dec); | 
| 861 |     instructions().append(val: srcDst->index()); | 
| 862 |     return srcDst; | 
| 863 | } | 
| 864 |  | 
| 865 | RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst) | 
| 866 | { | 
| 867 |     emitOpcode(opcodeID: op_post_inc); | 
| 868 |     instructions().append(val: dst->index()); | 
| 869 |     instructions().append(val: srcDst->index()); | 
| 870 |     return dst; | 
| 871 | } | 
| 872 |  | 
| 873 | RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst) | 
| 874 | { | 
| 875 |     emitOpcode(opcodeID: op_post_dec); | 
| 876 |     instructions().append(val: dst->index()); | 
| 877 |     instructions().append(val: srcDst->index()); | 
| 878 |     return dst; | 
| 879 | } | 
| 880 |  | 
| 881 | RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types) | 
| 882 | { | 
| 883 |     emitOpcode(opcodeID); | 
| 884 |     instructions().append(val: dst->index()); | 
| 885 |     instructions().append(val: src1->index()); | 
| 886 |     instructions().append(val: src2->index()); | 
| 887 |  | 
| 888 |     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor || | 
| 889 |         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div) | 
| 890 |         instructions().append(val: types.toInt()); | 
| 891 |  | 
| 892 |     return dst; | 
| 893 | } | 
| 894 |  | 
| 895 | RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2) | 
| 896 | { | 
| 897 |     if (m_lastOpcodeID == op_typeof) { | 
| 898 |         int dstIndex; | 
| 899 |         int srcIndex; | 
| 900 |  | 
| 901 |         retrieveLastUnaryOp(dstIndex, srcIndex); | 
| 902 |  | 
| 903 |         if (src1->index() == dstIndex | 
| 904 |             && src1->isTemporary() | 
| 905 |             && m_codeBlock->isConstantRegisterIndex(index: src2->index()) | 
| 906 |             && m_codeBlock->constantRegister(index: src2->index()).jsValue().isString()) { | 
| 907 |             const UString& value = asString(value: m_codeBlock->constantRegister(index: src2->index()).jsValue())->tryGetValue(); | 
| 908 |             if (value == "undefined" ) { | 
| 909 |                 rewindUnaryOp(); | 
| 910 |                 emitOpcode(opcodeID: op_is_undefined); | 
| 911 |                 instructions().append(val: dst->index()); | 
| 912 |                 instructions().append(val: srcIndex); | 
| 913 |                 return dst; | 
| 914 |             } | 
| 915 |             if (value == "boolean" ) { | 
| 916 |                 rewindUnaryOp(); | 
| 917 |                 emitOpcode(opcodeID: op_is_boolean); | 
| 918 |                 instructions().append(val: dst->index()); | 
| 919 |                 instructions().append(val: srcIndex); | 
| 920 |                 return dst; | 
| 921 |             } | 
| 922 |             if (value == "number" ) { | 
| 923 |                 rewindUnaryOp(); | 
| 924 |                 emitOpcode(opcodeID: op_is_number); | 
| 925 |                 instructions().append(val: dst->index()); | 
| 926 |                 instructions().append(val: srcIndex); | 
| 927 |                 return dst; | 
| 928 |             } | 
| 929 |             if (value == "string" ) { | 
| 930 |                 rewindUnaryOp(); | 
| 931 |                 emitOpcode(opcodeID: op_is_string); | 
| 932 |                 instructions().append(val: dst->index()); | 
| 933 |                 instructions().append(val: srcIndex); | 
| 934 |                 return dst; | 
| 935 |             } | 
| 936 |             if (value == "object" ) { | 
| 937 |                 rewindUnaryOp(); | 
| 938 |                 emitOpcode(opcodeID: op_is_object); | 
| 939 |                 instructions().append(val: dst->index()); | 
| 940 |                 instructions().append(val: srcIndex); | 
| 941 |                 return dst; | 
| 942 |             } | 
| 943 |             if (value == "function" ) { | 
| 944 |                 rewindUnaryOp(); | 
| 945 |                 emitOpcode(opcodeID: op_is_function); | 
| 946 |                 instructions().append(val: dst->index()); | 
| 947 |                 instructions().append(val: srcIndex); | 
| 948 |                 return dst; | 
| 949 |             } | 
| 950 |         } | 
| 951 |     } | 
| 952 |  | 
| 953 |     emitOpcode(opcodeID); | 
| 954 |     instructions().append(val: dst->index()); | 
| 955 |     instructions().append(val: src1->index()); | 
| 956 |     instructions().append(val: src2->index()); | 
| 957 |     return dst; | 
| 958 | } | 
| 959 |  | 
| 960 | RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b) | 
| 961 | { | 
| 962 |     return emitLoad(dst, jsBoolean(b)); | 
| 963 | } | 
| 964 |  | 
| 965 | RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number) | 
| 966 | { | 
| 967 |     // FIXME: Our hash tables won't hold infinity, so we make a new JSNumberCell each time. | 
| 968 |     // Later we can do the extra work to handle that like the other cases. | 
| 969 |     if (number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(value: number)) | 
| 970 |         return emitLoad(dst, jsNumber(globalData: globalData(), d: number)); | 
| 971 |     JSValue& valueInMap = m_numberMap.add(key: number, mapped: JSValue()).first->second; | 
| 972 |     if (!valueInMap) | 
| 973 |         valueInMap = jsNumber(globalData: globalData(), d: number); | 
| 974 |     return emitLoad(dst, valueInMap); | 
| 975 | } | 
| 976 |  | 
| 977 | RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier) | 
| 978 | { | 
| 979 |     JSString*& stringInMap = m_stringMap.add(key: identifier.ustring().rep(), mapped: 0).first->second; | 
| 980 |     if (!stringInMap) | 
| 981 |         stringInMap = jsOwnedString(globalData: globalData(), s: identifier.ustring()); | 
| 982 |     return emitLoad(dst, JSValue(stringInMap)); | 
| 983 | } | 
| 984 |  | 
| 985 | RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v) | 
| 986 | { | 
| 987 |     RegisterID* constantID = addConstantValue(v); | 
| 988 |     if (dst) | 
| 989 |         return emitMove(dst, src: constantID); | 
| 990 |     return constantID; | 
| 991 | } | 
| 992 |  | 
| 993 | bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, JSObject*& globalObject) | 
| 994 | { | 
| 995 |     // Cases where we cannot statically optimize the lookup. | 
| 996 |     if (property == propertyNames().arguments || !canOptimizeNonLocals()) { | 
| 997 |         stackDepth = 0; | 
| 998 |         index = missingSymbolMarker(); | 
| 999 |  | 
| 1000 |         if (shouldOptimizeLocals() && m_codeType == GlobalCode) { | 
| 1001 |             ScopeChainIterator iter = m_scopeChain->begin(); | 
| 1002 |             globalObject = *iter; | 
| 1003 |             ASSERT((++iter) == m_scopeChain->end()); | 
| 1004 |         } | 
| 1005 |         return false; | 
| 1006 |     } | 
| 1007 |  | 
| 1008 |     size_t depth = 0; | 
| 1009 |      | 
| 1010 |     ScopeChainIterator iter = m_scopeChain->begin(); | 
| 1011 |     ScopeChainIterator end = m_scopeChain->end(); | 
| 1012 |     for (; iter != end; ++iter, ++depth) { | 
| 1013 |         JSObject* currentScope = *iter; | 
| 1014 |         if (!currentScope->isVariableObject()) | 
| 1015 |             break; | 
| 1016 |         JSVariableObject* currentVariableObject = static_cast<JSVariableObject*>(currentScope); | 
| 1017 |         SymbolTableEntry entry = currentVariableObject->symbolTable().get(key: property.ustring().rep()); | 
| 1018 |  | 
| 1019 |         // Found the property | 
| 1020 |         if (!entry.isNull()) { | 
| 1021 |             if (entry.isReadOnly() && forWriting) { | 
| 1022 |                 stackDepth = 0; | 
| 1023 |                 index = missingSymbolMarker(); | 
| 1024 |                 if (++iter == end) | 
| 1025 |                     globalObject = currentVariableObject; | 
| 1026 |                 return false; | 
| 1027 |             } | 
| 1028 |             stackDepth = depth; | 
| 1029 |             index = entry.getIndex(); | 
| 1030 |             if (++iter == end) | 
| 1031 |                 globalObject = currentVariableObject; | 
| 1032 |             return true; | 
| 1033 |         } | 
| 1034 |         if (currentVariableObject->isDynamicScope()) | 
| 1035 |             break; | 
| 1036 |     } | 
| 1037 |  | 
| 1038 |     // Can't locate the property but we're able to avoid a few lookups. | 
| 1039 |     stackDepth = depth; | 
| 1040 |     index = missingSymbolMarker(); | 
| 1041 |     JSObject* scope = *iter; | 
| 1042 |     if (++iter == end) | 
| 1043 |         globalObject = scope; | 
| 1044 |     return true; | 
| 1045 | } | 
| 1046 |  | 
| 1047 | RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype) | 
| 1048 | {  | 
| 1049 |     emitOpcode(opcodeID: op_instanceof); | 
| 1050 |     instructions().append(val: dst->index()); | 
| 1051 |     instructions().append(val: value->index()); | 
| 1052 |     instructions().append(val: base->index()); | 
| 1053 |     instructions().append(val: basePrototype->index()); | 
| 1054 |     return dst; | 
| 1055 | } | 
| 1056 |  | 
| 1057 | RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const Identifier& property) | 
| 1058 | { | 
| 1059 |     size_t depth = 0; | 
| 1060 |     int index = 0; | 
| 1061 |     JSObject* globalObject = 0; | 
| 1062 |     if (!findScopedProperty(property, index, stackDepth&: depth, forWriting: false, globalObject) && !globalObject) { | 
| 1063 |         // We can't optimise at all :-( | 
| 1064 |         emitOpcode(opcodeID: op_resolve); | 
| 1065 |         instructions().append(val: dst->index()); | 
| 1066 |         instructions().append(val: addConstant(ident: property)); | 
| 1067 |         return dst; | 
| 1068 |     } | 
| 1069 |  | 
| 1070 |     if (globalObject) { | 
| 1071 |         bool forceGlobalResolve = false; | 
| 1072 |         if (m_regeneratingForExceptionInfo) { | 
| 1073 | #if ENABLE(JIT) | 
| 1074 |             forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(bytecodeOffset: instructions().size()); | 
| 1075 | #else | 
| 1076 |             forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size()); | 
| 1077 | #endif | 
| 1078 |         } | 
| 1079 |  | 
| 1080 |         if (index != missingSymbolMarker() && !forceGlobalResolve) { | 
| 1081 |             // Directly index the property lookup across multiple scopes. | 
| 1082 |             return emitGetScopedVar(dst, skip: depth, index, globalObject); | 
| 1083 |         } | 
| 1084 |  | 
| 1085 | #if ENABLE(JIT) | 
| 1086 |         m_codeBlock->addGlobalResolveInfo(globalResolveInstruction: instructions().size()); | 
| 1087 | #else | 
| 1088 |         m_codeBlock->addGlobalResolveInstruction(instructions().size()); | 
| 1089 | #endif | 
| 1090 |         emitOpcode(opcodeID: op_resolve_global); | 
| 1091 |         instructions().append(val: dst->index()); | 
| 1092 |         instructions().append(val: globalObject); | 
| 1093 |         instructions().append(val: addConstant(ident: property)); | 
| 1094 |         instructions().append(val: 0); | 
| 1095 |         instructions().append(val: 0); | 
| 1096 |         return dst; | 
| 1097 |     } | 
| 1098 |  | 
| 1099 |     if (index != missingSymbolMarker()) { | 
| 1100 |         // Directly index the property lookup across multiple scopes. | 
| 1101 |         return emitGetScopedVar(dst, skip: depth, index, globalObject); | 
| 1102 |     } | 
| 1103 |  | 
| 1104 |     // In this case we are at least able to drop a few scope chains from the | 
| 1105 |     // lookup chain, although we still need to hash from then on. | 
| 1106 |     emitOpcode(opcodeID: op_resolve_skip); | 
| 1107 |     instructions().append(val: dst->index()); | 
| 1108 |     instructions().append(val: addConstant(ident: property)); | 
| 1109 |     instructions().append(val: depth); | 
| 1110 |     return dst; | 
| 1111 | } | 
| 1112 |  | 
| 1113 | RegisterID* BytecodeGenerator::emitGetScopedVar(RegisterID* dst, size_t depth, int index, JSValue globalObject) | 
| 1114 | { | 
| 1115 |     if (globalObject) { | 
| 1116 |         emitOpcode(opcodeID: op_get_global_var); | 
| 1117 |         instructions().append(val: dst->index()); | 
| 1118 |         instructions().append(val: asCell(value: globalObject)); | 
| 1119 |         instructions().append(val: index); | 
| 1120 |         return dst; | 
| 1121 |     } | 
| 1122 |  | 
| 1123 |     emitOpcode(opcodeID: op_get_scoped_var); | 
| 1124 |     instructions().append(val: dst->index()); | 
| 1125 |     instructions().append(val: index); | 
| 1126 |     instructions().append(val: depth); | 
| 1127 |     return dst; | 
| 1128 | } | 
| 1129 |  | 
| 1130 | RegisterID* BytecodeGenerator::emitPutScopedVar(size_t depth, int index, RegisterID* value, JSValue globalObject) | 
| 1131 | { | 
| 1132 |     if (globalObject) { | 
| 1133 |         emitOpcode(opcodeID: op_put_global_var); | 
| 1134 |         instructions().append(val: asCell(value: globalObject)); | 
| 1135 |         instructions().append(val: index); | 
| 1136 |         instructions().append(val: value->index()); | 
| 1137 |         return value; | 
| 1138 |     } | 
| 1139 |     emitOpcode(opcodeID: op_put_scoped_var); | 
| 1140 |     instructions().append(val: index); | 
| 1141 |     instructions().append(val: depth); | 
| 1142 |     instructions().append(val: value->index()); | 
| 1143 |     return value; | 
| 1144 | } | 
| 1145 |  | 
| 1146 | RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const Identifier& property) | 
| 1147 | { | 
| 1148 |     size_t depth = 0; | 
| 1149 |     int index = 0; | 
| 1150 |     JSObject* globalObject = 0; | 
| 1151 |     findScopedProperty(property, index, stackDepth&: depth, forWriting: false, globalObject); | 
| 1152 |     if (!globalObject) { | 
| 1153 |         // We can't optimise at all :-( | 
| 1154 |         emitOpcode(opcodeID: op_resolve_base); | 
| 1155 |         instructions().append(val: dst->index()); | 
| 1156 |         instructions().append(val: addConstant(ident: property)); | 
| 1157 |         return dst; | 
| 1158 |     } | 
| 1159 |  | 
| 1160 |     // Global object is the base | 
| 1161 |     return emitLoad(dst, v: JSValue(globalObject)); | 
| 1162 | } | 
| 1163 |  | 
| 1164 | RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const Identifier& property) | 
| 1165 | { | 
| 1166 |     size_t depth = 0; | 
| 1167 |     int index = 0; | 
| 1168 |     JSObject* globalObject = 0; | 
| 1169 |     if (!findScopedProperty(property, index, stackDepth&: depth, forWriting: false, globalObject) || !globalObject) { | 
| 1170 |         // We can't optimise at all :-( | 
| 1171 |         emitOpcode(opcodeID: op_resolve_with_base); | 
| 1172 |         instructions().append(val: baseDst->index()); | 
| 1173 |         instructions().append(val: propDst->index()); | 
| 1174 |         instructions().append(val: addConstant(ident: property)); | 
| 1175 |         return baseDst; | 
| 1176 |     } | 
| 1177 |  | 
| 1178 |     bool forceGlobalResolve = false; | 
| 1179 |     if (m_regeneratingForExceptionInfo) { | 
| 1180 | #if ENABLE(JIT) | 
| 1181 |         forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInfoAtBytecodeOffset(bytecodeOffset: instructions().size()); | 
| 1182 | #else | 
| 1183 |         forceGlobalResolve = m_codeBlockBeingRegeneratedFrom->hasGlobalResolveInstructionAtBytecodeOffset(instructions().size()); | 
| 1184 | #endif | 
| 1185 |     } | 
| 1186 |  | 
| 1187 |     // Global object is the base | 
| 1188 |     emitLoad(dst: baseDst, v: JSValue(globalObject)); | 
| 1189 |  | 
| 1190 |     if (index != missingSymbolMarker() && !forceGlobalResolve) { | 
| 1191 |         // Directly index the property lookup across multiple scopes. | 
| 1192 |         emitGetScopedVar(dst: propDst, depth, index, globalObject); | 
| 1193 |         return baseDst; | 
| 1194 |     } | 
| 1195 |  | 
| 1196 | #if ENABLE(JIT) | 
| 1197 |     m_codeBlock->addGlobalResolveInfo(globalResolveInstruction: instructions().size()); | 
| 1198 | #else | 
| 1199 |     m_codeBlock->addGlobalResolveInstruction(instructions().size()); | 
| 1200 | #endif | 
| 1201 |     emitOpcode(opcodeID: op_resolve_global); | 
| 1202 |     instructions().append(val: propDst->index()); | 
| 1203 |     instructions().append(val: globalObject); | 
| 1204 |     instructions().append(val: addConstant(ident: property)); | 
| 1205 |     instructions().append(val: 0); | 
| 1206 |     instructions().append(val: 0); | 
| 1207 |     return baseDst; | 
| 1208 | } | 
| 1209 |  | 
| 1210 | void BytecodeGenerator::emitMethodCheck() | 
| 1211 | { | 
| 1212 |     emitOpcode(opcodeID: op_method_check); | 
| 1213 | } | 
| 1214 |  | 
| 1215 | RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property) | 
| 1216 | { | 
| 1217 | #if ENABLE(JIT) | 
| 1218 |     m_codeBlock->addStructureStubInfo(stubInfo: StructureStubInfo(access_get_by_id)); | 
| 1219 | #else | 
| 1220 |     m_codeBlock->addPropertyAccessInstruction(instructions().size()); | 
| 1221 | #endif | 
| 1222 |  | 
| 1223 |     emitOpcode(opcodeID: op_get_by_id); | 
| 1224 |     instructions().append(val: dst->index()); | 
| 1225 |     instructions().append(val: base->index()); | 
| 1226 |     instructions().append(val: addConstant(ident: property)); | 
| 1227 |     instructions().append(val: 0); | 
| 1228 |     instructions().append(val: 0); | 
| 1229 |     instructions().append(val: 0); | 
| 1230 |     instructions().append(val: 0); | 
| 1231 |     return dst; | 
| 1232 | } | 
| 1233 |  | 
| 1234 | RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value) | 
| 1235 | { | 
| 1236 | #if ENABLE(JIT) | 
| 1237 |     m_codeBlock->addStructureStubInfo(stubInfo: StructureStubInfo(access_put_by_id)); | 
| 1238 | #else | 
| 1239 |     m_codeBlock->addPropertyAccessInstruction(instructions().size()); | 
| 1240 | #endif | 
| 1241 |  | 
| 1242 |     emitOpcode(opcodeID: op_put_by_id); | 
| 1243 |     instructions().append(val: base->index()); | 
| 1244 |     instructions().append(val: addConstant(ident: property)); | 
| 1245 |     instructions().append(val: value->index()); | 
| 1246 |     instructions().append(val: 0); | 
| 1247 |     instructions().append(val: 0); | 
| 1248 |     instructions().append(val: 0); | 
| 1249 |     instructions().append(val: 0); | 
| 1250 |     return value; | 
| 1251 | } | 
| 1252 |  | 
| 1253 | RegisterID* BytecodeGenerator::emitPutGetter(RegisterID* base, const Identifier& property, RegisterID* value) | 
| 1254 | { | 
| 1255 |     emitOpcode(opcodeID: op_put_getter); | 
| 1256 |     instructions().append(val: base->index()); | 
| 1257 |     instructions().append(val: addConstant(ident: property)); | 
| 1258 |     instructions().append(val: value->index()); | 
| 1259 |     return value; | 
| 1260 | } | 
| 1261 |  | 
| 1262 | RegisterID* BytecodeGenerator::emitPutSetter(RegisterID* base, const Identifier& property, RegisterID* value) | 
| 1263 | { | 
| 1264 |     emitOpcode(opcodeID: op_put_setter); | 
| 1265 |     instructions().append(val: base->index()); | 
| 1266 |     instructions().append(val: addConstant(ident: property)); | 
| 1267 |     instructions().append(val: value->index()); | 
| 1268 |     return value; | 
| 1269 | } | 
| 1270 |  | 
| 1271 | RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property) | 
| 1272 | { | 
| 1273 |     emitOpcode(opcodeID: op_del_by_id); | 
| 1274 |     instructions().append(val: dst->index()); | 
| 1275 |     instructions().append(val: base->index()); | 
| 1276 |     instructions().append(val: addConstant(ident: property)); | 
| 1277 |     return dst; | 
| 1278 | } | 
| 1279 |  | 
| 1280 | RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property) | 
| 1281 | { | 
| 1282 |     for (size_t i = m_forInContextStack.size(); i > 0; i--) { | 
| 1283 |         ForInContext& context = m_forInContextStack[i - 1]; | 
| 1284 |         if (context.propertyRegister == property) { | 
| 1285 |             emitOpcode(opcodeID: op_get_by_pname); | 
| 1286 |             instructions().append(val: dst->index()); | 
| 1287 |             instructions().append(val: base->index()); | 
| 1288 |             instructions().append(val: property->index()); | 
| 1289 |             instructions().append(val: context.expectedSubscriptRegister->index()); | 
| 1290 |             instructions().append(val: context.iterRegister->index()); | 
| 1291 |             instructions().append(val: context.indexRegister->index()); | 
| 1292 |             return dst; | 
| 1293 |         } | 
| 1294 |     } | 
| 1295 |     emitOpcode(opcodeID: op_get_by_val); | 
| 1296 |     instructions().append(val: dst->index()); | 
| 1297 |     instructions().append(val: base->index()); | 
| 1298 |     instructions().append(val: property->index()); | 
| 1299 |     return dst; | 
| 1300 | } | 
| 1301 |  | 
| 1302 | RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value) | 
| 1303 | { | 
| 1304 |     emitOpcode(opcodeID: op_put_by_val); | 
| 1305 |     instructions().append(val: base->index()); | 
| 1306 |     instructions().append(val: property->index()); | 
| 1307 |     instructions().append(val: value->index()); | 
| 1308 |     return value; | 
| 1309 | } | 
| 1310 |  | 
| 1311 | RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property) | 
| 1312 | { | 
| 1313 |     emitOpcode(opcodeID: op_del_by_val); | 
| 1314 |     instructions().append(val: dst->index()); | 
| 1315 |     instructions().append(val: base->index()); | 
| 1316 |     instructions().append(val: property->index()); | 
| 1317 |     return dst; | 
| 1318 | } | 
| 1319 |  | 
| 1320 | RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value) | 
| 1321 | { | 
| 1322 |     emitOpcode(opcodeID: op_put_by_index); | 
| 1323 |     instructions().append(val: base->index()); | 
| 1324 |     instructions().append(val: index); | 
| 1325 |     instructions().append(val: value->index()); | 
| 1326 |     return value; | 
| 1327 | } | 
| 1328 |  | 
| 1329 | RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst) | 
| 1330 | { | 
| 1331 |     emitOpcode(opcodeID: op_new_object); | 
| 1332 |     instructions().append(val: dst->index()); | 
| 1333 |     return dst; | 
| 1334 | } | 
| 1335 |  | 
| 1336 | RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements) | 
| 1337 | { | 
| 1338 |     Vector<RefPtr<RegisterID>, 16> argv; | 
| 1339 |     for (ElementNode* n = elements; n; n = n->next()) { | 
| 1340 |         if (n->elision()) | 
| 1341 |             break; | 
| 1342 |         argv.append(val: newTemporary()); | 
| 1343 |         // op_new_array requires the initial values to be a sequential range of registers | 
| 1344 |         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1); | 
| 1345 |         emitNode(dst: argv.last().get(), n: n->value()); | 
| 1346 |     } | 
| 1347 |     emitOpcode(opcodeID: op_new_array); | 
| 1348 |     instructions().append(val: dst->index()); | 
| 1349 |     instructions().append(val: argv.size() ? argv[0]->index() : 0); // argv | 
| 1350 |     instructions().append(val: argv.size()); // argc | 
| 1351 |     return dst; | 
| 1352 | } | 
| 1353 |  | 
| 1354 | RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function) | 
| 1355 | { | 
| 1356 |     unsigned index = m_codeBlock->addFunctionDecl(n: makeFunction(globalData: m_globalData, body: function)); | 
| 1357 |  | 
| 1358 |     emitOpcode(opcodeID: op_new_func); | 
| 1359 |     instructions().append(val: dst->index()); | 
| 1360 |     instructions().append(val: index); | 
| 1361 |     return dst; | 
| 1362 | } | 
| 1363 |  | 
| 1364 | RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp) | 
| 1365 | { | 
| 1366 |     emitOpcode(opcodeID: op_new_regexp); | 
| 1367 |     instructions().append(val: dst->index()); | 
| 1368 |     instructions().append(val: addRegExp(r: regExp)); | 
| 1369 |     return dst; | 
| 1370 | } | 
| 1371 |  | 
| 1372 |  | 
| 1373 | RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n) | 
| 1374 | { | 
| 1375 |     FunctionBodyNode* function = n->body(); | 
| 1376 |     unsigned index = m_codeBlock->addFunctionExpr(n: makeFunction(globalData: m_globalData, body: function)); | 
| 1377 |  | 
| 1378 |     emitOpcode(opcodeID: op_new_func_exp); | 
| 1379 |     instructions().append(val: r0->index()); | 
| 1380 |     instructions().append(val: index); | 
| 1381 |     return r0; | 
| 1382 | } | 
| 1383 |  | 
| 1384 | RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset) | 
| 1385 | { | 
| 1386 |     return emitCall(op_call, dst, func, thisRegister, argumentsNode, divot, startOffset, endOffset); | 
| 1387 | } | 
| 1388 |  | 
| 1389 | void BytecodeGenerator::createArgumentsIfNecessary() | 
| 1390 | { | 
| 1391 |     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode) | 
| 1392 |         emitOpcode(opcodeID: op_create_arguments); | 
| 1393 | } | 
| 1394 |  | 
| 1395 | RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset) | 
| 1396 | { | 
| 1397 |     createArgumentsIfNecessary(); | 
| 1398 |     return emitCall(op_call_eval, dst, func, thisRegister, argumentsNode, divot, startOffset, endOffset); | 
| 1399 | } | 
| 1400 |  | 
| 1401 | RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, RegisterID* thisRegister, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset) | 
| 1402 | { | 
| 1403 |     ASSERT(opcodeID == op_call || opcodeID == op_call_eval); | 
| 1404 |     ASSERT(func->refCount()); | 
| 1405 |     ASSERT(thisRegister->refCount()); | 
| 1406 |  | 
| 1407 |     RegisterID* originalFunc = func; | 
| 1408 |     if (m_shouldEmitProfileHooks) { | 
| 1409 |         // If codegen decided to recycle func as this call's destination register, | 
| 1410 |         // we need to undo that optimization here so that func will still be around | 
| 1411 |         // for the sake of op_profile_did_call. | 
| 1412 |         if (dst == func) { | 
| 1413 |             RefPtr<RegisterID> movedThisRegister = emitMove(dst: newTemporary(), src: thisRegister); | 
| 1414 |             RefPtr<RegisterID> movedFunc = emitMove(dst: thisRegister, src: func); | 
| 1415 |              | 
| 1416 |             thisRegister = movedThisRegister.release().releaseRef(); | 
| 1417 |             func = movedFunc.release().releaseRef(); | 
| 1418 |         } | 
| 1419 |     } | 
| 1420 |  | 
| 1421 |     // Generate code for arguments. | 
| 1422 |     Vector<RefPtr<RegisterID>, 16> argv; | 
| 1423 |     argv.append(val: thisRegister); | 
| 1424 |     for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next) { | 
| 1425 |         argv.append(val: newTemporary()); | 
| 1426 |         // op_call requires the arguments to be a sequential range of registers | 
| 1427 |         ASSERT(argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1); | 
| 1428 |         emitNode(dst: argv.last().get(), n); | 
| 1429 |     } | 
| 1430 |  | 
| 1431 |     // Reserve space for call frame. | 
| 1432 |     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame; | 
| 1433 |     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i) | 
| 1434 |         callFrame.append(val: newTemporary()); | 
| 1435 |  | 
| 1436 |     if (m_shouldEmitProfileHooks) { | 
| 1437 |         emitOpcode(opcodeID: op_profile_will_call); | 
| 1438 |         instructions().append(val: func->index()); | 
| 1439 |  | 
| 1440 | #if ENABLE(JIT) | 
| 1441 |         m_codeBlock->addFunctionRegisterInfo(bytecodeOffset: instructions().size(), functionIndex: func->index()); | 
| 1442 | #endif | 
| 1443 |     } | 
| 1444 |  | 
| 1445 |     emitExpressionInfo(divot, startOffset, endOffset); | 
| 1446 |  | 
| 1447 | #if ENABLE(JIT) | 
| 1448 |     m_codeBlock->addCallLinkInfo(); | 
| 1449 | #endif | 
| 1450 |  | 
| 1451 |     // Emit call. | 
| 1452 |     emitOpcode(opcodeID); | 
| 1453 |     instructions().append(val: dst->index()); // dst | 
| 1454 |     instructions().append(val: func->index()); // func | 
| 1455 |     instructions().append(val: argv.size()); // argCount | 
| 1456 |     instructions().append(val: argv[0]->index() + argv.size() + RegisterFile::CallFrameHeaderSize); // registerOffset | 
| 1457 |  | 
| 1458 |     if (m_shouldEmitProfileHooks) { | 
| 1459 |         emitOpcode(opcodeID: op_profile_did_call); | 
| 1460 |         instructions().append(val: func->index()); | 
| 1461 |  | 
| 1462 |         if (dst == originalFunc) { | 
| 1463 |             thisRegister->deref(); | 
| 1464 |             func->deref(); | 
| 1465 |         } | 
| 1466 |     } | 
| 1467 |  | 
| 1468 |     return dst; | 
| 1469 | } | 
| 1470 |  | 
| 1471 | RegisterID* BytecodeGenerator::emitLoadVarargs(RegisterID* argCountDst, RegisterID* arguments) | 
| 1472 | { | 
| 1473 |     ASSERT(argCountDst->index() < arguments->index()); | 
| 1474 |     emitOpcode(opcodeID: op_load_varargs); | 
| 1475 |     instructions().append(val: argCountDst->index()); | 
| 1476 |     instructions().append(val: arguments->index()); | 
| 1477 |     return argCountDst; | 
| 1478 | } | 
| 1479 |  | 
| 1480 | RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* argCountRegister, unsigned divot, unsigned startOffset, unsigned endOffset) | 
| 1481 | { | 
| 1482 |     ASSERT(func->refCount()); | 
| 1483 |     ASSERT(thisRegister->refCount()); | 
| 1484 |     ASSERT(dst != func); | 
| 1485 |     if (m_shouldEmitProfileHooks) { | 
| 1486 |         emitOpcode(opcodeID: op_profile_will_call); | 
| 1487 |         instructions().append(val: func->index()); | 
| 1488 |          | 
| 1489 | #if ENABLE(JIT) | 
| 1490 |         m_codeBlock->addFunctionRegisterInfo(bytecodeOffset: instructions().size(), functionIndex: func->index()); | 
| 1491 | #endif | 
| 1492 |     } | 
| 1493 |      | 
| 1494 |     emitExpressionInfo(divot, startOffset, endOffset); | 
| 1495 |      | 
| 1496 |     // Emit call. | 
| 1497 |     emitOpcode(opcodeID: op_call_varargs); | 
| 1498 |     instructions().append(val: dst->index()); // dst | 
| 1499 |     instructions().append(val: func->index()); // func | 
| 1500 |     instructions().append(val: argCountRegister->index()); // arg count | 
| 1501 |     instructions().append(val: thisRegister->index() + RegisterFile::CallFrameHeaderSize); // initial registerOffset | 
| 1502 |     if (m_shouldEmitProfileHooks) { | 
| 1503 |         emitOpcode(opcodeID: op_profile_did_call); | 
| 1504 |         instructions().append(val: func->index()); | 
| 1505 |     } | 
| 1506 |     return dst; | 
| 1507 | } | 
| 1508 |  | 
| 1509 | RegisterID* BytecodeGenerator::emitReturn(RegisterID* src) | 
| 1510 | { | 
| 1511 |     if (m_codeBlock->needsFullScopeChain()) { | 
| 1512 |         emitOpcode(opcodeID: op_tear_off_activation); | 
| 1513 |         instructions().append(val: m_activationRegisterIndex); | 
| 1514 |     } else if (m_codeBlock->usesArguments() && m_codeBlock->m_numParameters > 1) | 
| 1515 |         emitOpcode(opcodeID: op_tear_off_arguments); | 
| 1516 |  | 
| 1517 |     return emitUnaryNoDstOp(op_ret, src); | 
| 1518 | } | 
| 1519 |  | 
| 1520 | RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src) | 
| 1521 | { | 
| 1522 |     emitOpcode(opcodeID); | 
| 1523 |     instructions().append(val: src->index()); | 
| 1524 |     return src; | 
| 1525 | } | 
| 1526 |  | 
| 1527 | RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ArgumentsNode* argumentsNode, unsigned divot, unsigned startOffset, unsigned endOffset) | 
| 1528 | { | 
| 1529 |     ASSERT(func->refCount()); | 
| 1530 |  | 
| 1531 |     RegisterID* originalFunc = func; | 
| 1532 |     if (m_shouldEmitProfileHooks) { | 
| 1533 |         // If codegen decided to recycle func as this call's destination register, | 
| 1534 |         // we need to undo that optimization here so that func will still be around | 
| 1535 |         // for the sake of op_profile_did_call. | 
| 1536 |         if (dst == func) { | 
| 1537 |             RefPtr<RegisterID> movedFunc = emitMove(dst: newTemporary(), src: func); | 
| 1538 |             func = movedFunc.release().releaseRef(); | 
| 1539 |         } | 
| 1540 |     } | 
| 1541 |  | 
| 1542 |     RefPtr<RegisterID> funcProto = newTemporary(); | 
| 1543 |  | 
| 1544 |     // Generate code for arguments. | 
| 1545 |     Vector<RefPtr<RegisterID>, 16> argv; | 
| 1546 |     argv.append(val: newTemporary()); // reserve space for "this" | 
| 1547 |     for (ArgumentListNode* n = argumentsNode ? argumentsNode->m_listNode : 0; n; n = n->m_next) { | 
| 1548 |         argv.append(val: newTemporary()); | 
| 1549 |         // op_construct requires the arguments to be a sequential range of registers | 
| 1550 |         ASSERT(argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1); | 
| 1551 |         emitNode(dst: argv.last().get(), n); | 
| 1552 |     } | 
| 1553 |  | 
| 1554 |     if (m_shouldEmitProfileHooks) { | 
| 1555 |         emitOpcode(opcodeID: op_profile_will_call); | 
| 1556 |         instructions().append(val: func->index()); | 
| 1557 |     } | 
| 1558 |  | 
| 1559 |     // Load prototype. | 
| 1560 |     emitExpressionInfo(divot, startOffset, endOffset); | 
| 1561 |     emitGetByIdExceptionInfo(opcodeID: op_construct); | 
| 1562 |     emitGetById(dst: funcProto.get(), base: func, property: globalData()->propertyNames->prototype); | 
| 1563 |  | 
| 1564 |     // Reserve space for call frame. | 
| 1565 |     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame; | 
| 1566 |     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i) | 
| 1567 |         callFrame.append(val: newTemporary()); | 
| 1568 |  | 
| 1569 |     emitExpressionInfo(divot, startOffset, endOffset); | 
| 1570 |  | 
| 1571 | #if ENABLE(JIT) | 
| 1572 |     m_codeBlock->addCallLinkInfo(); | 
| 1573 | #endif | 
| 1574 |  | 
| 1575 |     emitOpcode(opcodeID: op_construct); | 
| 1576 |     instructions().append(val: dst->index()); // dst | 
| 1577 |     instructions().append(val: func->index()); // func | 
| 1578 |     instructions().append(val: argv.size()); // argCount | 
| 1579 |     instructions().append(val: argv[0]->index() + argv.size() + RegisterFile::CallFrameHeaderSize); // registerOffset | 
| 1580 |     instructions().append(val: funcProto->index()); // proto | 
| 1581 |     instructions().append(val: argv[0]->index()); // thisRegister | 
| 1582 |  | 
| 1583 |     emitOpcode(opcodeID: op_construct_verify); | 
| 1584 |     instructions().append(val: dst->index()); | 
| 1585 |     instructions().append(val: argv[0]->index()); | 
| 1586 |  | 
| 1587 |     if (m_shouldEmitProfileHooks) { | 
| 1588 |         emitOpcode(opcodeID: op_profile_did_call); | 
| 1589 |         instructions().append(val: func->index()); | 
| 1590 |          | 
| 1591 |         if (dst == originalFunc) | 
| 1592 |             func->deref(); | 
| 1593 |     } | 
| 1594 |  | 
| 1595 |     return dst; | 
| 1596 | } | 
| 1597 |  | 
| 1598 | RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count) | 
| 1599 | { | 
| 1600 |     emitOpcode(opcodeID: op_strcat); | 
| 1601 |     instructions().append(val: dst->index()); | 
| 1602 |     instructions().append(val: src->index()); | 
| 1603 |     instructions().append(val: count); | 
| 1604 |  | 
| 1605 |     return dst; | 
| 1606 | } | 
| 1607 |  | 
| 1608 | void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src) | 
| 1609 | { | 
| 1610 |     emitOpcode(opcodeID: op_to_primitive); | 
| 1611 |     instructions().append(val: dst->index()); | 
| 1612 |     instructions().append(val: src->index()); | 
| 1613 | } | 
| 1614 |  | 
| 1615 | RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope) | 
| 1616 | { | 
| 1617 |     ASSERT(scope->isTemporary()); | 
| 1618 |     ControlFlowContext context; | 
| 1619 |     context.isFinallyBlock = false; | 
| 1620 |     m_scopeContextStack.append(val: context); | 
| 1621 |     m_dynamicScopeDepth++; | 
| 1622 |     createArgumentsIfNecessary(); | 
| 1623 |  | 
| 1624 |     return emitUnaryNoDstOp(opcodeID: op_push_scope, src: scope); | 
| 1625 | } | 
| 1626 |  | 
| 1627 | void BytecodeGenerator::emitPopScope() | 
| 1628 | { | 
| 1629 |     ASSERT(m_scopeContextStack.size()); | 
| 1630 |     ASSERT(!m_scopeContextStack.last().isFinallyBlock); | 
| 1631 |  | 
| 1632 |     emitOpcode(opcodeID: op_pop_scope); | 
| 1633 |  | 
| 1634 |     m_scopeContextStack.removeLast(); | 
| 1635 |     m_dynamicScopeDepth--; | 
| 1636 | } | 
| 1637 |  | 
| 1638 | void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine) | 
| 1639 | { | 
| 1640 |     if (!m_shouldEmitDebugHooks) | 
| 1641 |         return; | 
| 1642 |     emitOpcode(opcodeID: op_debug); | 
| 1643 |     instructions().append(val: debugHookID); | 
| 1644 |     instructions().append(val: firstLine); | 
| 1645 |     instructions().append(val: lastLine); | 
| 1646 | } | 
| 1647 |  | 
| 1648 | void BytecodeGenerator::pushFinallyContext(Label* target, RegisterID* retAddrDst) | 
| 1649 | { | 
| 1650 |     ControlFlowContext scope; | 
| 1651 |     scope.isFinallyBlock = true; | 
| 1652 |     FinallyContext context = { .finallyAddr: target, .retAddrDst: retAddrDst }; | 
| 1653 |     scope.finallyContext = context; | 
| 1654 |     m_scopeContextStack.append(val: scope); | 
| 1655 |     m_finallyDepth++; | 
| 1656 | } | 
| 1657 |  | 
| 1658 | void BytecodeGenerator::popFinallyContext() | 
| 1659 | { | 
| 1660 |     ASSERT(m_scopeContextStack.size()); | 
| 1661 |     ASSERT(m_scopeContextStack.last().isFinallyBlock); | 
| 1662 |     ASSERT(m_finallyDepth > 0); | 
| 1663 |     m_scopeContextStack.removeLast(); | 
| 1664 |     m_finallyDepth--; | 
| 1665 | } | 
| 1666 |  | 
| 1667 | LabelScope* BytecodeGenerator::breakTarget(const Identifier& name) | 
| 1668 | { | 
| 1669 |     // Reclaim free label scopes. | 
| 1670 |     // | 
| 1671 |     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()', | 
| 1672 |     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with | 
| 1673 |     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to | 
| 1674 |     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the | 
| 1675 |     // loop condition is a workaround. | 
| 1676 |     while (m_labelScopes.size()) { | 
| 1677 |         if  (m_labelScopes.last().refCount()) | 
| 1678 |             break; | 
| 1679 |         m_labelScopes.removeLast(); | 
| 1680 |     } | 
| 1681 |  | 
| 1682 |     if (!m_labelScopes.size()) | 
| 1683 |         return 0; | 
| 1684 |  | 
| 1685 |     // We special-case the following, which is a syntax error in Firefox: | 
| 1686 |     // label: | 
| 1687 |     //     break; | 
| 1688 |     if (name.isEmpty()) { | 
| 1689 |         for (int i = m_labelScopes.size() - 1; i >= 0; --i) { | 
| 1690 |             LabelScope* scope = &m_labelScopes[i]; | 
| 1691 |             if (scope->type() != LabelScope::NamedLabel) { | 
| 1692 |                 ASSERT(scope->breakTarget()); | 
| 1693 |                 return scope; | 
| 1694 |             } | 
| 1695 |         } | 
| 1696 |         return 0; | 
| 1697 |     } | 
| 1698 |  | 
| 1699 |     for (int i = m_labelScopes.size() - 1; i >= 0; --i) { | 
| 1700 |         LabelScope* scope = &m_labelScopes[i]; | 
| 1701 |         if (scope->name() && *scope->name() == name) { | 
| 1702 |             ASSERT(scope->breakTarget()); | 
| 1703 |             return scope; | 
| 1704 |         } | 
| 1705 |     } | 
| 1706 |     return 0; | 
| 1707 | } | 
| 1708 |  | 
| 1709 | LabelScope* BytecodeGenerator::continueTarget(const Identifier& name) | 
| 1710 | { | 
| 1711 |     // Reclaim free label scopes. | 
| 1712 |     while (m_labelScopes.size() && !m_labelScopes.last().refCount()) | 
| 1713 |         m_labelScopes.removeLast(); | 
| 1714 |  | 
| 1715 |     if (!m_labelScopes.size()) | 
| 1716 |         return 0; | 
| 1717 |  | 
| 1718 |     if (name.isEmpty()) { | 
| 1719 |         for (int i = m_labelScopes.size() - 1; i >= 0; --i) { | 
| 1720 |             LabelScope* scope = &m_labelScopes[i]; | 
| 1721 |             if (scope->type() == LabelScope::Loop) { | 
| 1722 |                 ASSERT(scope->continueTarget()); | 
| 1723 |                 return scope; | 
| 1724 |             } | 
| 1725 |         } | 
| 1726 |         return 0; | 
| 1727 |     } | 
| 1728 |  | 
| 1729 |     // Continue to the loop nested nearest to the label scope that matches | 
| 1730 |     // 'name'. | 
| 1731 |     LabelScope* result = 0; | 
| 1732 |     for (int i = m_labelScopes.size() - 1; i >= 0; --i) { | 
| 1733 |         LabelScope* scope = &m_labelScopes[i]; | 
| 1734 |         if (scope->type() == LabelScope::Loop) { | 
| 1735 |             ASSERT(scope->continueTarget()); | 
| 1736 |             result = scope; | 
| 1737 |         } | 
| 1738 |         if (scope->name() && *scope->name() == name) | 
| 1739 |             return result; // may be 0 | 
| 1740 |     } | 
| 1741 |     return 0; | 
| 1742 | } | 
| 1743 |  | 
| 1744 | PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope) | 
| 1745 | { | 
| 1746 |     while (topScope > bottomScope) { | 
| 1747 |         // First we count the number of dynamic scopes we need to remove to get | 
| 1748 |         // to a finally block. | 
| 1749 |         int nNormalScopes = 0; | 
| 1750 |         while (topScope > bottomScope) { | 
| 1751 |             if (topScope->isFinallyBlock) | 
| 1752 |                 break; | 
| 1753 |             ++nNormalScopes; | 
| 1754 |             --topScope; | 
| 1755 |         } | 
| 1756 |  | 
| 1757 |         if (nNormalScopes) { | 
| 1758 |             size_t begin = instructions().size(); | 
| 1759 |  | 
| 1760 |             // We need to remove a number of dynamic scopes to get to the next | 
| 1761 |             // finally block | 
| 1762 |             emitOpcode(opcodeID: op_jmp_scopes); | 
| 1763 |             instructions().append(val: nNormalScopes); | 
| 1764 |  | 
| 1765 |             // If topScope == bottomScope then there isn't actually a finally block | 
| 1766 |             // left to emit, so make the jmp_scopes jump directly to the target label | 
| 1767 |             if (topScope == bottomScope) { | 
| 1768 |                 instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 1769 |                 return target; | 
| 1770 |             } | 
| 1771 |  | 
| 1772 |             // Otherwise we just use jmp_scopes to pop a group of scopes and go | 
| 1773 |             // to the next instruction | 
| 1774 |             RefPtr<Label> nextInsn = newLabel(); | 
| 1775 |             instructions().append(val: nextInsn->bind(opcode: begin, offset: instructions().size())); | 
| 1776 |             emitLabel(l0: nextInsn.get()); | 
| 1777 |         } | 
| 1778 |  | 
| 1779 |         while (topScope > bottomScope && topScope->isFinallyBlock) { | 
| 1780 |             emitJumpSubroutine(retAddrDst: topScope->finallyContext.retAddrDst, topScope->finallyContext.finallyAddr); | 
| 1781 |             --topScope; | 
| 1782 |         } | 
| 1783 |     } | 
| 1784 |     return emitJump(target); | 
| 1785 | } | 
| 1786 |  | 
| 1787 | PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth) | 
| 1788 | { | 
| 1789 |     ASSERT(scopeDepth() - targetScopeDepth >= 0); | 
| 1790 |     ASSERT(target->isForward()); | 
| 1791 |  | 
| 1792 |     size_t scopeDelta = scopeDepth() - targetScopeDepth; | 
| 1793 |     ASSERT(scopeDelta <= m_scopeContextStack.size()); | 
| 1794 |     if (!scopeDelta) | 
| 1795 |         return emitJump(target); | 
| 1796 |  | 
| 1797 |     if (m_finallyDepth) | 
| 1798 |         return emitComplexJumpScopes(target, topScope: &m_scopeContextStack.last(), bottomScope: &m_scopeContextStack.last() - scopeDelta); | 
| 1799 |  | 
| 1800 |     size_t begin = instructions().size(); | 
| 1801 |  | 
| 1802 |     emitOpcode(opcodeID: op_jmp_scopes); | 
| 1803 |     instructions().append(val: scopeDelta); | 
| 1804 |     instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 1805 |     return target; | 
| 1806 | } | 
| 1807 |  | 
| 1808 | RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget) | 
| 1809 | { | 
| 1810 |     size_t begin = instructions().size(); | 
| 1811 |  | 
| 1812 |     emitOpcode(opcodeID: op_get_pnames); | 
| 1813 |     instructions().append(val: dst->index()); | 
| 1814 |     instructions().append(val: base->index()); | 
| 1815 |     instructions().append(val: i->index()); | 
| 1816 |     instructions().append(val: size->index()); | 
| 1817 |     instructions().append(val: breakTarget->bind(opcode: begin, offset: instructions().size())); | 
| 1818 |     return dst; | 
| 1819 | } | 
| 1820 |  | 
| 1821 | RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target) | 
| 1822 | { | 
| 1823 |     size_t begin = instructions().size(); | 
| 1824 |  | 
| 1825 |     emitOpcode(opcodeID: op_next_pname); | 
| 1826 |     instructions().append(val: dst->index()); | 
| 1827 |     instructions().append(val: base->index()); | 
| 1828 |     instructions().append(val: i->index()); | 
| 1829 |     instructions().append(val: size->index()); | 
| 1830 |     instructions().append(val: iter->index()); | 
| 1831 |     instructions().append(val: target->bind(opcode: begin, offset: instructions().size())); | 
| 1832 |     return dst; | 
| 1833 | } | 
| 1834 |  | 
| 1835 | RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end) | 
| 1836 | { | 
| 1837 | #if ENABLE(JIT) | 
| 1838 |     HandlerInfo info = { | 
| 1839 |         .start: static_cast<uint32_t>(start->bind(opcode: 0, offset: 0)), | 
| 1840 |         .end: static_cast<uint32_t>(end->bind(opcode: 0, offset: 0)), | 
| 1841 |         .target: static_cast<uint32_t>(instructions().size()), | 
| 1842 |         .scopeDepth: static_cast<uint32_t>(m_dynamicScopeDepth + m_baseScopeDepth), | 
| 1843 |         .nativeCode: CodeLocationLabel() | 
| 1844 |     }; | 
| 1845 | #else | 
| 1846 |     HandlerInfo info = { | 
| 1847 |         static_cast<uint32_t>(start->bind(0, 0)), | 
| 1848 |         static_cast<uint32_t>(end->bind(0, 0)), | 
| 1849 |         static_cast<uint32_t>(instructions().size()), | 
| 1850 |         static_cast<uint32_t>(m_dynamicScopeDepth + m_baseScopeDepth) | 
| 1851 |     }; | 
| 1852 | #endif | 
| 1853 |  | 
| 1854 |     m_codeBlock->addExceptionHandler(hanler: info); | 
| 1855 |     emitOpcode(opcodeID: op_catch); | 
| 1856 |     instructions().append(val: targetRegister->index()); | 
| 1857 |     return targetRegister; | 
| 1858 | } | 
| 1859 |  | 
| 1860 | RegisterID* BytecodeGenerator::emitNewError(RegisterID* dst, ErrorType type, JSValue message) | 
| 1861 | { | 
| 1862 |     emitOpcode(opcodeID: op_new_error); | 
| 1863 |     instructions().append(val: dst->index()); | 
| 1864 |     instructions().append(val: static_cast<int>(type)); | 
| 1865 |     instructions().append(val: addConstantValue(v: message)->index()); | 
| 1866 |     return dst; | 
| 1867 | } | 
| 1868 |  | 
| 1869 | PassRefPtr<Label> BytecodeGenerator::emitJumpSubroutine(RegisterID* retAddrDst, Label* finally) | 
| 1870 | { | 
| 1871 |     size_t begin = instructions().size(); | 
| 1872 |  | 
| 1873 |     emitOpcode(opcodeID: op_jsr); | 
| 1874 |     instructions().append(val: retAddrDst->index()); | 
| 1875 |     instructions().append(val: finally->bind(opcode: begin, offset: instructions().size())); | 
| 1876 |     emitLabel(l0: newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it. | 
| 1877 |     return finally; | 
| 1878 | } | 
| 1879 |  | 
| 1880 | void BytecodeGenerator::emitSubroutineReturn(RegisterID* retAddrSrc) | 
| 1881 | { | 
| 1882 |     emitOpcode(opcodeID: op_sret); | 
| 1883 |     instructions().append(val: retAddrSrc->index()); | 
| 1884 | } | 
| 1885 |  | 
| 1886 | void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value) | 
| 1887 | { | 
| 1888 |     ControlFlowContext context; | 
| 1889 |     context.isFinallyBlock = false; | 
| 1890 |     m_scopeContextStack.append(val: context); | 
| 1891 |     m_dynamicScopeDepth++; | 
| 1892 |      | 
| 1893 |     createArgumentsIfNecessary(); | 
| 1894 |  | 
| 1895 |     emitOpcode(opcodeID: op_push_new_scope); | 
| 1896 |     instructions().append(val: dst->index()); | 
| 1897 |     instructions().append(val: addConstant(ident: property)); | 
| 1898 |     instructions().append(val: value->index()); | 
| 1899 | } | 
| 1900 |  | 
| 1901 | void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type) | 
| 1902 | { | 
| 1903 |     SwitchInfo info = { .bytecodeOffset: static_cast<uint32_t>(instructions().size()), .switchType: type }; | 
| 1904 |     switch (type) { | 
| 1905 |         case SwitchInfo::SwitchImmediate: | 
| 1906 |             emitOpcode(opcodeID: op_switch_imm); | 
| 1907 |             break; | 
| 1908 |         case SwitchInfo::SwitchCharacter: | 
| 1909 |             emitOpcode(opcodeID: op_switch_char); | 
| 1910 |             break; | 
| 1911 |         case SwitchInfo::SwitchString: | 
| 1912 |             emitOpcode(opcodeID: op_switch_string); | 
| 1913 |             break; | 
| 1914 |         default: | 
| 1915 |             ASSERT_NOT_REACHED(); | 
| 1916 |     } | 
| 1917 |  | 
| 1918 |     instructions().append(val: 0); // place holder for table index | 
| 1919 |     instructions().append(val: 0); // place holder for default target     | 
| 1920 |     instructions().append(val: scrutineeRegister->index()); | 
| 1921 |     m_switchContextStack.append(val: info); | 
| 1922 | } | 
| 1923 |  | 
| 1924 | static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max) | 
| 1925 | { | 
| 1926 |     UNUSED_PARAM(max); | 
| 1927 |     ASSERT(node->isNumber()); | 
| 1928 |     double value = static_cast<NumberNode*>(node)->value(); | 
| 1929 |     int32_t key = static_cast<int32_t>(value); | 
| 1930 |     ASSERT(key == value); | 
| 1931 |     ASSERT(key >= min); | 
| 1932 |     ASSERT(key <= max); | 
| 1933 |     return key - min; | 
| 1934 | } | 
| 1935 |  | 
| 1936 | static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max) | 
| 1937 | { | 
| 1938 |     jumpTable.min = min; | 
| 1939 |     jumpTable.branchOffsets.resize(size: max - min + 1); | 
| 1940 |     jumpTable.branchOffsets.fill(val: 0); | 
| 1941 |     for (uint32_t i = 0; i < clauseCount; ++i) { | 
| 1942 |         // We're emitting this after the clause labels should have been fixed, so  | 
| 1943 |         // the labels should not be "forward" references | 
| 1944 |         ASSERT(!labels[i]->isForward()); | 
| 1945 |         jumpTable.add(key: keyForImmediateSwitch(node: nodes[i], min, max), offset: labels[i]->bind(opcode: switchAddress, offset: switchAddress + 3));  | 
| 1946 |     } | 
| 1947 | } | 
| 1948 |  | 
| 1949 | static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max) | 
| 1950 | { | 
| 1951 |     UNUSED_PARAM(max); | 
| 1952 |     ASSERT(node->isString()); | 
| 1953 |     UString::Rep* clause = static_cast<StringNode*>(node)->value().ustring().rep(); | 
| 1954 |     ASSERT(clause->size() == 1); | 
| 1955 |      | 
| 1956 |     int32_t key = clause->data()[0]; | 
| 1957 |     ASSERT(key >= min); | 
| 1958 |     ASSERT(key <= max); | 
| 1959 |     return key - min; | 
| 1960 | } | 
| 1961 |  | 
| 1962 | static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max) | 
| 1963 | { | 
| 1964 |     jumpTable.min = min; | 
| 1965 |     jumpTable.branchOffsets.resize(size: max - min + 1); | 
| 1966 |     jumpTable.branchOffsets.fill(val: 0); | 
| 1967 |     for (uint32_t i = 0; i < clauseCount; ++i) { | 
| 1968 |         // We're emitting this after the clause labels should have been fixed, so  | 
| 1969 |         // the labels should not be "forward" references | 
| 1970 |         ASSERT(!labels[i]->isForward()); | 
| 1971 |         jumpTable.add(key: keyForCharacterSwitch(node: nodes[i], min, max), offset: labels[i]->bind(opcode: switchAddress, offset: switchAddress + 3));  | 
| 1972 |     } | 
| 1973 | } | 
| 1974 |  | 
| 1975 | static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes) | 
| 1976 | { | 
| 1977 |     for (uint32_t i = 0; i < clauseCount; ++i) { | 
| 1978 |         // We're emitting this after the clause labels should have been fixed, so  | 
| 1979 |         // the labels should not be "forward" references | 
| 1980 |         ASSERT(!labels[i]->isForward()); | 
| 1981 |          | 
| 1982 |         ASSERT(nodes[i]->isString()); | 
| 1983 |         UString::Rep* clause = static_cast<StringNode*>(nodes[i])->value().ustring().rep(); | 
| 1984 |         OffsetLocation location; | 
| 1985 |         location.branchOffset = labels[i]->bind(opcode: switchAddress, offset: switchAddress + 3); | 
| 1986 |         jumpTable.offsetTable.add(key: clause, mapped: location); | 
| 1987 |     } | 
| 1988 | } | 
| 1989 |  | 
| 1990 | void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max) | 
| 1991 | { | 
| 1992 |     SwitchInfo switchInfo = m_switchContextStack.last(); | 
| 1993 |     m_switchContextStack.removeLast(); | 
| 1994 |     if (switchInfo.switchType == SwitchInfo::SwitchImmediate) { | 
| 1995 |         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables(); | 
| 1996 |         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(opcode: switchInfo.bytecodeOffset, offset: switchInfo.bytecodeOffset + 3); | 
| 1997 |  | 
| 1998 |         SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable(); | 
| 1999 |         prepareJumpTableForImmediateSwitch(jumpTable, switchAddress: switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max); | 
| 2000 |     } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) { | 
| 2001 |         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables(); | 
| 2002 |         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(opcode: switchInfo.bytecodeOffset, offset: switchInfo.bytecodeOffset + 3); | 
| 2003 |          | 
| 2004 |         SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable(); | 
| 2005 |         prepareJumpTableForCharacterSwitch(jumpTable, switchAddress: switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max); | 
| 2006 |     } else { | 
| 2007 |         ASSERT(switchInfo.switchType == SwitchInfo::SwitchString); | 
| 2008 |         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables(); | 
| 2009 |         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(opcode: switchInfo.bytecodeOffset, offset: switchInfo.bytecodeOffset + 3); | 
| 2010 |  | 
| 2011 |         StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable(); | 
| 2012 |         prepareJumpTableForStringSwitch(jumpTable, switchAddress: switchInfo.bytecodeOffset, clauseCount, labels, nodes); | 
| 2013 |     } | 
| 2014 | } | 
| 2015 |  | 
| 2016 | RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException() | 
| 2017 | { | 
| 2018 |     // It would be nice to do an even better job of identifying exactly where the expression is. | 
| 2019 |     // And we could make the caller pass the node pointer in, if there was some way of getting | 
| 2020 |     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data | 
| 2021 |     // is still good enough to get us an accurate line number. | 
| 2022 |     emitExpressionInfo(divot: 0, startOffset: 0, endOffset: 0); | 
| 2023 |     RegisterID* exception = emitNewError(dst: newTemporary(), type: SyntaxError, message: jsString(globalData: globalData(), s: "Expression too deep" )); | 
| 2024 |     emitThrow(exc: exception); | 
| 2025 |     return exception; | 
| 2026 | } | 
| 2027 |  | 
| 2028 | } // namespace JSC | 
| 2029 |  |