]> git.saurik.com Git - apple/javascriptcore.git/blame - dfg/DFGByteCodeParser.cpp
JavaScriptCore-7600.1.4.15.12.tar.gz
[apple/javascriptcore.git] / dfg / DFGByteCodeParser.cpp
CommitLineData
81345200
A
1 /*
2 * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
14957cd0
A
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGByteCodeParser.h"
28
29#if ENABLE(DFG_JIT)
30
93a37866 31#include "ArrayConstructor.h"
6fe7ccc8 32#include "CallLinkStatus.h"
14957cd0 33#include "CodeBlock.h"
93a37866
A
34#include "CodeBlockWithJITType.h"
35#include "DFGArrayMode.h"
6fe7ccc8 36#include "DFGCapabilities.h"
81345200 37#include "DFGJITCode.h"
6fe7ccc8 38#include "GetByIdStatus.h"
81345200
A
39#include "Heap.h"
40#include "JSActivation.h"
41#include "JSCInlines.h"
93a37866 42#include "PreciseJumpTargets.h"
6fe7ccc8 43#include "PutByIdStatus.h"
81345200 44#include "StackAlignment.h"
93a37866
A
45#include "StringConstructor.h"
46#include <wtf/CommaPrinter.h>
6fe7ccc8
A
47#include <wtf/HashMap.h>
48#include <wtf/MathExtras.h>
81345200 49#include <wtf/StdLibExtras.h>
14957cd0
A
50
51namespace JSC { namespace DFG {
52
93a37866
A
53class ConstantBufferKey {
54public:
55 ConstantBufferKey()
56 : m_codeBlock(0)
57 , m_index(0)
58 {
59 }
60
61 ConstantBufferKey(WTF::HashTableDeletedValueType)
62 : m_codeBlock(0)
63 , m_index(1)
64 {
65 }
66
67 ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
68 : m_codeBlock(codeBlock)
69 , m_index(index)
70 {
71 }
72
73 bool operator==(const ConstantBufferKey& other) const
74 {
75 return m_codeBlock == other.m_codeBlock
76 && m_index == other.m_index;
77 }
78
79 unsigned hash() const
80 {
81 return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
82 }
83
84 bool isHashTableDeletedValue() const
85 {
86 return !m_codeBlock && m_index;
87 }
88
89 CodeBlock* codeBlock() const { return m_codeBlock; }
90 unsigned index() const { return m_index; }
91
92private:
93 CodeBlock* m_codeBlock;
94 unsigned m_index;
95};
96
97struct ConstantBufferKeyHash {
98 static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
99 static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
100 {
101 return a == b;
102 }
103
104 static const bool safeToCompareToEmptyOrDeleted = true;
105};
106
107} } // namespace JSC::DFG
108
109namespace WTF {
110
111template<typename T> struct DefaultHash;
112template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
113 typedef JSC::DFG::ConstantBufferKeyHash Hash;
114};
115
116template<typename T> struct HashTraits;
117template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
118
119} // namespace WTF
120
121namespace JSC { namespace DFG {
122
14957cd0
A
123// === ByteCodeParser ===
124//
125// This class is used to compile the dataflow graph from a CodeBlock.
126class ByteCodeParser {
127public:
6fe7ccc8 128 ByteCodeParser(Graph& graph)
93a37866 129 : m_vm(&graph.m_vm)
6fe7ccc8
A
130 , m_codeBlock(graph.m_codeBlock)
131 , m_profiledBlock(graph.m_profiledBlock)
14957cd0 132 , m_graph(graph)
6fe7ccc8 133 , m_currentBlock(0)
14957cd0 134 , m_currentIndex(0)
14957cd0
A
135 , m_constantUndefined(UINT_MAX)
136 , m_constantNull(UINT_MAX)
6fe7ccc8 137 , m_constantNaN(UINT_MAX)
14957cd0 138 , m_constant1(UINT_MAX)
6fe7ccc8
A
139 , m_constants(m_codeBlock->numberOfConstantRegisters())
140 , m_numArguments(m_codeBlock->numParameters())
141 , m_numLocals(m_codeBlock->m_numCalleeRegisters)
6fe7ccc8
A
142 , m_parameterSlots(0)
143 , m_numPassedVarArgs(0)
6fe7ccc8
A
144 , m_inlineStackTop(0)
145 , m_haveBuiltOperandMaps(false)
146 , m_emptyJSValueIndex(UINT_MAX)
93a37866 147 , m_currentInstruction(0)
14957cd0 148 {
6fe7ccc8 149 ASSERT(m_profiledBlock);
14957cd0 150 }
6fe7ccc8 151
14957cd0
A
152 // Parse a full CodeBlock of bytecode.
153 bool parse();
6fe7ccc8 154
14957cd0 155private:
93a37866
A
156 struct InlineStackEntry;
157
6fe7ccc8
A
158 // Just parse from m_currentIndex to the end of the current CodeBlock.
159 void parseCodeBlock();
81345200
A
160
161 void ensureLocals(unsigned newNumLocals)
162 {
163 if (newNumLocals <= m_numLocals)
164 return;
165 m_numLocals = newNumLocals;
166 for (size_t i = 0; i < m_graph.numBlocks(); ++i)
167 m_graph.block(i)->ensureLocals(newNumLocals);
168 }
6fe7ccc8
A
169
170 // Helper for min and max.
81345200 171 bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
6fe7ccc8
A
172
173 // Handle calls. This resolves issues surrounding inlining and intrinsics.
81345200
A
174 void handleCall(int result, NodeType op, CodeSpecializationKind, unsigned instructionSize, int callee, int argCount, int registerOffset);
175 void handleCall(Instruction* pc, NodeType op, CodeSpecializationKind);
93a37866
A
176 void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
177 void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
6fe7ccc8 178 // Handle inlining. Return true if it succeeded, false if we need to plant a call.
81345200 179 bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
6fe7ccc8 180 // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
81345200
A
181 bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
182 bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType);
183 bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
184 Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
93a37866
A
185 Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
186 void handleGetByOffset(
187 int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
188 PropertyOffset);
189 void handleGetById(
190 int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
191 const GetByIdStatus&);
81345200
A
192 void emitPutById(
193 Node* base, unsigned identifierNumber, Node* value, const PutByIdStatus&, bool isDirect);
194 void handlePutById(
195 Node* base, unsigned identifierNumber, Node* value, const PutByIdStatus&,
196 bool isDirect);
197 Node* emitPrototypeChecks(Structure*, IntendedStructureChain*);
93a37866
A
198
199 Node* getScope(bool skipTop, unsigned skipCount);
200
6fe7ccc8
A
201 // Prepare to parse a block.
202 void prepareToParseBlock();
14957cd0
A
203 // Parse a single basic block of bytecode instructions.
204 bool parseBlock(unsigned limit);
6fe7ccc8 205 // Link block successors.
81345200
A
206 void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
207 void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
6fe7ccc8 208
81345200 209 VariableAccessData* newVariableAccessData(VirtualRegister operand, bool isCaptured)
6fe7ccc8 210 {
81345200 211 ASSERT(!operand.isConstant());
6fe7ccc8 212
81345200 213 m_graph.m_variableAccessData.append(VariableAccessData(operand, isCaptured));
6fe7ccc8
A
214 return &m_graph.m_variableAccessData.last();
215 }
216
14957cd0 217 // Get/Set the operands/result of a bytecode instruction.
81345200 218 Node* getDirect(VirtualRegister operand)
14957cd0
A
219 {
220 // Is this a constant?
81345200
A
221 if (operand.isConstant()) {
222 unsigned constant = operand.toConstantIndex();
14957cd0
A
223 ASSERT(constant < m_constants.size());
224 return getJSConstant(constant);
225 }
226
227 // Is this an argument?
81345200 228 if (operand.isArgument())
14957cd0
A
229 return getArgument(operand);
230
231 // Must be a local.
81345200 232 return getLocal(operand);
14957cd0 233 }
81345200
A
234
235 Node* get(VirtualRegister operand)
6fe7ccc8 236 {
81345200
A
237 if (inlineCallFrame()) {
238 if (!inlineCallFrame()->isClosureCall) {
239 JSFunction* callee = inlineCallFrame()->calleeConstant();
240 if (operand.offset() == JSStack::Callee)
241 return cellConstant(callee);
242 if (operand.offset() == JSStack::ScopeChain)
243 return cellConstant(callee->scope());
244 }
245 } else if (operand.offset() == JSStack::Callee)
246 return addToGraph(GetCallee);
247 else if (operand.offset() == JSStack::ScopeChain)
248 return addToGraph(GetMyScope);
93a37866 249
6fe7ccc8
A
250 return getDirect(m_inlineStackTop->remapOperand(operand));
251 }
81345200
A
252
253 enum SetMode {
254 // A normal set which follows a two-phase commit that spans code origins. During
255 // the current code origin it issues a MovHint, and at the start of the next
256 // code origin there will be a SetLocal. If the local needs flushing, the second
257 // SetLocal will be preceded with a Flush.
258 NormalSet,
259
260 // A set where the SetLocal happens immediately and there is still a Flush. This
261 // is relevant when assigning to a local in tricky situations for the delayed
262 // SetLocal logic but where we know that we have not performed any side effects
263 // within this code origin. This is a safe replacement for NormalSet anytime we
264 // know that we have not yet performed side effects in this code origin.
265 ImmediateSetWithFlush,
266
267 // A set where the SetLocal happens immediately and we do not Flush it even if
268 // this is a local that is marked as needing it. This is relevant when
269 // initializing locals at the top of a function.
270 ImmediateNakedSet
271 };
272 Node* setDirect(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
14957cd0 273 {
81345200
A
274 addToGraph(MovHint, OpInfo(operand.offset()), value);
275
276 DelayedSetLocal delayed = DelayedSetLocal(operand, value);
277
278 if (setMode == NormalSet) {
279 m_setLocalQueue.append(delayed);
280 return 0;
14957cd0 281 }
81345200
A
282
283 return delayed.execute(this, setMode);
6fe7ccc8 284 }
81345200
A
285
286 Node* set(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
6fe7ccc8 287 {
81345200 288 return setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
93a37866
A
289 }
290
291 Node* injectLazyOperandSpeculation(Node* node)
292 {
293 ASSERT(node->op() == GetLocal);
81345200
A
294 ASSERT(node->origin.semantic.bytecodeIndex == m_currentIndex);
295 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
296 LazyOperandValueProfileKey key(m_currentIndex, node->local());
297 SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
93a37866
A
298 node->variableAccessData()->predict(prediction);
299 return node;
6fe7ccc8 300 }
14957cd0
A
301
302 // Used in implementing get/set, above, where the operand is a local variable.
81345200
A
303 Node* getLocal(VirtualRegister operand)
304 {
305 unsigned local = operand.toLocal();
306
307 if (local < m_localWatchpoints.size()) {
308 if (VariableWatchpointSet* set = m_localWatchpoints[local]) {
309 if (JSValue value = set->inferredValue()) {
310 addToGraph(FunctionReentryWatchpoint, OpInfo(m_codeBlock->symbolTable()));
311 addToGraph(VariableWatchpoint, OpInfo(set));
312 // Note: this is very special from an OSR exit standpoint. We wouldn't be
313 // able to do this for most locals, but it works here because we're dealing
314 // with a flushed local. For most locals we would need to issue a GetLocal
315 // here and ensure that we have uses in DFG IR wherever there would have
316 // been uses in bytecode. Clearly this optimization does not do this. But
317 // that's fine, because we don't need to track liveness for captured
318 // locals, and this optimization only kicks in for captured locals.
319 return inferredConstant(value);
320 }
321 }
322 }
323
324 Node* node = m_currentBlock->variablesAtTail.local(local);
93a37866 325 bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
6fe7ccc8 326
93a37866
A
327 // This has two goals: 1) link together variable access datas, and 2)
328 // try to avoid creating redundant GetLocals. (1) is required for
329 // correctness - no other phase will ensure that block-local variable
330 // access data unification is done correctly. (2) is purely opportunistic
331 // and is meant as an compile-time optimization only.
6fe7ccc8 332
93a37866 333 VariableAccessData* variable;
6fe7ccc8 334
93a37866
A
335 if (node) {
336 variable = node->variableAccessData();
337 variable->mergeIsCaptured(isCaptured);
338
339 if (!isCaptured) {
340 switch (node->op()) {
341 case GetLocal:
342 return node;
343 case SetLocal:
344 return node->child1().node();
345 default:
346 break;
347 }
348 }
81345200 349 } else
93a37866 350 variable = newVariableAccessData(operand, isCaptured);
6fe7ccc8 351
93a37866 352 node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
81345200 353 m_currentBlock->variablesAtTail.local(local) = node;
93a37866 354 return node;
14957cd0 355 }
81345200
A
356
357 Node* setLocal(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
14957cd0 358 {
81345200 359 unsigned local = operand.toLocal();
93a37866 360 bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
6fe7ccc8 361
81345200 362 if (setMode != ImmediateNakedSet) {
93a37866
A
363 ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
364 if (isCaptured || argumentPosition)
365 flushDirect(operand, argumentPosition);
6fe7ccc8 366 }
93a37866
A
367
368 VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
369 variableAccessData->mergeStructureCheckHoistingFailed(
81345200
A
370 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
371 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint));
372 variableAccessData->mergeCheckArrayHoistingFailed(
373 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
93a37866 374 Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
81345200
A
375 m_currentBlock->variablesAtTail.local(local) = node;
376 return node;
14957cd0
A
377 }
378
379 // Used in implementing get/set, above, where the operand is an argument.
81345200 380 Node* getArgument(VirtualRegister operand)
14957cd0 381 {
81345200 382 unsigned argument = operand.toArgument();
14957cd0 383 ASSERT(argument < m_numArguments);
6fe7ccc8 384
93a37866
A
385 Node* node = m_currentBlock->variablesAtTail.argument(argument);
386 bool isCaptured = m_codeBlock->isCaptured(operand);
14957cd0 387
93a37866 388 VariableAccessData* variable;
6fe7ccc8 389
93a37866
A
390 if (node) {
391 variable = node->variableAccessData();
392 variable->mergeIsCaptured(isCaptured);
393
394 switch (node->op()) {
395 case GetLocal:
396 return node;
397 case SetLocal:
398 return node->child1().node();
399 default:
400 break;
401 }
402 } else
403 variable = newVariableAccessData(operand, isCaptured);
6fe7ccc8 404
93a37866
A
405 node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
406 m_currentBlock->variablesAtTail.argument(argument) = node;
407 return node;
14957cd0 408 }
81345200 409 Node* setArgument(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
14957cd0 410 {
81345200 411 unsigned argument = operand.toArgument();
14957cd0 412 ASSERT(argument < m_numArguments);
6fe7ccc8 413
93a37866
A
414 bool isCaptured = m_codeBlock->isCaptured(operand);
415
416 VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
417
418 // Always flush arguments, except for 'this'. If 'this' is created by us,
419 // then make sure that it's never unboxed.
420 if (argument) {
81345200 421 if (setMode != ImmediateNakedSet)
93a37866
A
422 flushDirect(operand);
423 } else if (m_codeBlock->specializationKind() == CodeForConstruct)
424 variableAccessData->mergeShouldNeverUnbox(true);
425
426 variableAccessData->mergeStructureCheckHoistingFailed(
81345200
A
427 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
428 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint));
429 variableAccessData->mergeCheckArrayHoistingFailed(
430 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
93a37866
A
431 Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
432 m_currentBlock->variablesAtTail.argument(argument) = node;
81345200 433 return node;
93a37866
A
434 }
435
436 ArgumentPosition* findArgumentPositionForArgument(int argument)
437 {
6fe7ccc8 438 InlineStackEntry* stack = m_inlineStackTop;
93a37866 439 while (stack->m_inlineCallFrame)
6fe7ccc8 440 stack = stack->m_caller;
93a37866 441 return stack->m_argumentPositions[argument];
6fe7ccc8
A
442 }
443
81345200 444 ArgumentPosition* findArgumentPositionForLocal(VirtualRegister operand)
6fe7ccc8 445 {
93a37866
A
446 for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
447 InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
448 if (!inlineCallFrame)
449 break;
81345200 450 if (operand.offset() < static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize))
93a37866 451 continue;
81345200 452 if (operand.offset() == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
93a37866 453 continue;
81345200 454 if (operand.offset() >= static_cast<int>(inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset() + inlineCallFrame->arguments.size()))
93a37866 455 continue;
81345200 456 int argument = VirtualRegister(operand.offset() - inlineCallFrame->stackOffset).toArgument();
93a37866
A
457 return stack->m_argumentPositions[argument];
458 }
459 return 0;
460 }
461
81345200 462 ArgumentPosition* findArgumentPosition(VirtualRegister operand)
93a37866 463 {
81345200
A
464 if (operand.isArgument())
465 return findArgumentPositionForArgument(operand.toArgument());
93a37866
A
466 return findArgumentPositionForLocal(operand);
467 }
81345200
A
468
469 void addConstant(JSValue value)
470 {
471 unsigned constantIndex = m_codeBlock->addConstantLazily();
472 initializeLazyWriteBarrierForConstant(
473 m_graph.m_plan.writeBarriers,
474 m_codeBlock->constants()[constantIndex],
475 m_codeBlock,
476 constantIndex,
477 m_codeBlock->ownerExecutable(),
478 value);
479 }
93a37866 480
81345200 481 void flush(VirtualRegister operand)
93a37866
A
482 {
483 flushDirect(m_inlineStackTop->remapOperand(operand));
484 }
485
81345200 486 void flushDirect(VirtualRegister operand)
93a37866
A
487 {
488 flushDirect(operand, findArgumentPosition(operand));
489 }
490
81345200 491 void flushDirect(VirtualRegister operand, ArgumentPosition* argumentPosition)
93a37866
A
492 {
493 bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
6fe7ccc8 494
81345200 495 ASSERT(!operand.isConstant());
6fe7ccc8 496
93a37866 497 Node* node = m_currentBlock->variablesAtTail.operand(operand);
6fe7ccc8 498
93a37866
A
499 VariableAccessData* variable;
500
501 if (node) {
502 variable = node->variableAccessData();
503 variable->mergeIsCaptured(isCaptured);
504 } else
505 variable = newVariableAccessData(operand, isCaptured);
506
507 node = addToGraph(Flush, OpInfo(variable));
508 m_currentBlock->variablesAtTail.operand(operand) = node;
509 if (argumentPosition)
510 argumentPosition->addVariable(variable);
511 }
81345200 512
93a37866
A
513 void flush(InlineStackEntry* inlineStackEntry)
514 {
515 int numArguments;
81345200 516 if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame) {
93a37866 517 numArguments = inlineCallFrame->arguments.size();
81345200
A
518 if (inlineCallFrame->isClosureCall) {
519 flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::Callee)));
520 flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::ScopeChain)));
521 }
522 } else
93a37866
A
523 numArguments = inlineStackEntry->m_codeBlock->numParameters();
524 for (unsigned argument = numArguments; argument-- > 1;)
81345200 525 flushDirect(inlineStackEntry->remapOperand(virtualRegisterForArgument(argument)));
93a37866 526 for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
81345200 527 if (!inlineStackEntry->m_codeBlock->isCaptured(virtualRegisterForLocal(local)))
93a37866 528 continue;
81345200 529 flushDirect(inlineStackEntry->remapOperand(virtualRegisterForLocal(local)));
6fe7ccc8 530 }
93a37866
A
531 }
532
81345200 533 void flushForTerminal()
93a37866
A
534 {
535 for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
536 flush(inlineStackEntry);
537 }
538
81345200 539 void flushForReturn()
93a37866
A
540 {
541 flush(m_inlineStackTop);
14957cd0 542 }
81345200
A
543
544 void flushIfTerminal(SwitchData& data)
14957cd0 545 {
81345200
A
546 if (data.fallThrough.bytecodeIndex() > m_currentIndex)
547 return;
548
549 for (unsigned i = data.cases.size(); i--;) {
550 if (data.cases[i].target.bytecodeIndex() > m_currentIndex)
551 return;
14957cd0 552 }
81345200
A
553
554 flushForTerminal();
14957cd0
A
555 }
556
93a37866
A
557 // NOTE: Only use this to construct constants that arise from non-speculative
558 // constant folding. I.e. creating constants using this if we had constant
559 // field inference would be a bad idea, since the bytecode parser's folding
560 // doesn't handle liveness preservation.
561 Node* getJSConstantForValue(JSValue constantValue)
14957cd0 562 {
81345200
A
563 unsigned constantIndex;
564 if (!m_codeBlock->findConstant(constantValue, constantIndex)) {
565 addConstant(constantValue);
6fe7ccc8 566 m_constants.append(ConstantRecord());
81345200 567 }
6fe7ccc8
A
568
569 ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
570
571 return getJSConstant(constantIndex);
14957cd0
A
572 }
573
93a37866 574 Node* getJSConstant(unsigned constant)
14957cd0 575 {
93a37866
A
576 Node* node = m_constants[constant].asJSValue;
577 if (node)
578 return node;
579
580 Node* result = addToGraph(JSConstant, OpInfo(constant));
581 m_constants[constant].asJSValue = result;
582 return result;
583 }
14957cd0 584
14957cd0 585 // Helper functions to get/set the this value.
93a37866 586 Node* getThis()
14957cd0 587 {
6fe7ccc8 588 return get(m_inlineStackTop->m_codeBlock->thisRegister());
14957cd0 589 }
81345200 590
93a37866 591 void setThis(Node* value)
14957cd0 592 {
6fe7ccc8 593 set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
14957cd0
A
594 }
595
596 // Convenience methods for checking nodes for constants.
93a37866 597 bool isJSConstant(Node* node)
14957cd0 598 {
93a37866 599 return node->op() == JSConstant;
14957cd0 600 }
93a37866 601 bool isInt32Constant(Node* node)
14957cd0 602 {
93a37866 603 return isJSConstant(node) && valueOfJSConstant(node).isInt32();
14957cd0 604 }
6fe7ccc8 605 // Convenience methods for getting constant values.
93a37866 606 JSValue valueOfJSConstant(Node* node)
14957cd0 607 {
93a37866
A
608 ASSERT(isJSConstant(node));
609 return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
14957cd0 610 }
93a37866 611 int32_t valueOfInt32Constant(Node* node)
6fe7ccc8 612 {
93a37866
A
613 ASSERT(isInt32Constant(node));
614 return valueOfJSConstant(node).asInt32();
6fe7ccc8
A
615 }
616
14957cd0 617 // This method returns a JSConstant with the value 'undefined'.
93a37866 618 Node* constantUndefined()
14957cd0
A
619 {
620 // Has m_constantUndefined been set up yet?
621 if (m_constantUndefined == UINT_MAX) {
622 // Search the constant pool for undefined, if we find it, we can just reuse this!
623 unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
624 for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
625 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
626 if (testMe.isUndefined())
627 return getJSConstant(m_constantUndefined);
628 }
629
630 // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
631 ASSERT(m_constants.size() == numberOfConstants);
81345200 632 addConstant(jsUndefined());
14957cd0
A
633 m_constants.append(ConstantRecord());
634 ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
635 }
636
637 // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
638 ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
639 return getJSConstant(m_constantUndefined);
640 }
641
642 // This method returns a JSConstant with the value 'null'.
93a37866 643 Node* constantNull()
14957cd0
A
644 {
645 // Has m_constantNull been set up yet?
646 if (m_constantNull == UINT_MAX) {
647 // Search the constant pool for null, if we find it, we can just reuse this!
648 unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
649 for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
650 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
651 if (testMe.isNull())
652 return getJSConstant(m_constantNull);
653 }
654
655 // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
656 ASSERT(m_constants.size() == numberOfConstants);
81345200 657 addConstant(jsNull());
14957cd0
A
658 m_constants.append(ConstantRecord());
659 ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
660 }
661
662 // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
663 ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
664 return getJSConstant(m_constantNull);
665 }
666
667 // This method returns a DoubleConstant with the value 1.
93a37866 668 Node* one()
14957cd0
A
669 {
670 // Has m_constant1 been set up yet?
671 if (m_constant1 == UINT_MAX) {
672 // Search the constant pool for the value 1, if we find it, we can just reuse this!
673 unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
674 for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
675 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
676 if (testMe.isInt32() && testMe.asInt32() == 1)
6fe7ccc8 677 return getJSConstant(m_constant1);
14957cd0
A
678 }
679
680 // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
681 ASSERT(m_constants.size() == numberOfConstants);
81345200 682 addConstant(jsNumber(1));
14957cd0
A
683 m_constants.append(ConstantRecord());
684 ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
685 }
686
687 // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
688 ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
689 ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
6fe7ccc8 690 return getJSConstant(m_constant1);
14957cd0 691 }
6fe7ccc8
A
692
693 // This method returns a DoubleConstant with the value NaN.
93a37866 694 Node* constantNaN()
6fe7ccc8
A
695 {
696 JSValue nan = jsNaN();
697
698 // Has m_constantNaN been set up yet?
699 if (m_constantNaN == UINT_MAX) {
700 // Search the constant pool for the value NaN, if we find it, we can just reuse this!
701 unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
702 for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
703 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
704 if (JSValue::encode(testMe) == JSValue::encode(nan))
705 return getJSConstant(m_constantNaN);
706 }
14957cd0 707
6fe7ccc8
A
708 // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
709 ASSERT(m_constants.size() == numberOfConstants);
81345200 710 addConstant(nan);
6fe7ccc8
A
711 m_constants.append(ConstantRecord());
712 ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
713 }
714
715 // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
716 ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
93a37866 717 ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
6fe7ccc8
A
718 return getJSConstant(m_constantNaN);
719 }
720
93a37866 721 Node* cellConstant(JSCell* cell)
6fe7ccc8 722 {
81345200
A
723 HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, nullptr);
724 if (result.isNewEntry) {
725 ASSERT(!Heap::isZombified(cell));
93a37866 726 result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
81345200 727 }
6fe7ccc8 728
93a37866 729 return result.iterator->value;
6fe7ccc8
A
730 }
731
81345200
A
732 Node* inferredConstant(JSValue value)
733 {
734 if (value.isCell())
735 return cellConstant(value.asCell());
736 return getJSConstantForValue(value);
737 }
738
93a37866
A
739 InlineCallFrame* inlineCallFrame()
740 {
741 return m_inlineStackTop->m_inlineCallFrame;
742 }
743
6fe7ccc8
A
744 CodeOrigin currentCodeOrigin()
745 {
81345200 746 return CodeOrigin(m_currentIndex, inlineCallFrame());
93a37866
A
747 }
748
81345200 749 BranchData* branchData(unsigned taken, unsigned notTaken)
93a37866 750 {
81345200
A
751 // We assume that branches originating from bytecode always have a fall-through. We
752 // use this assumption to avoid checking for the creation of terminal blocks.
753 ASSERT((taken > m_currentIndex) || (notTaken > m_currentIndex));
754 BranchData* data = m_graph.m_branchData.add();
755 *data = BranchData::withBytecodeIndices(taken, notTaken);
756 return data;
93a37866
A
757 }
758
759 Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
14957cd0 760 {
93a37866 761 Node* result = m_graph.addNode(
81345200
A
762 SpecNone, op, NodeOrigin(currentCodeOrigin()), Edge(child1), Edge(child2),
763 Edge(child3));
6fe7ccc8 764 ASSERT(op != Phi);
93a37866
A
765 m_currentBlock->append(result);
766 return result;
14957cd0 767 }
93a37866 768 Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
14957cd0 769 {
93a37866 770 Node* result = m_graph.addNode(
81345200 771 SpecNone, op, NodeOrigin(currentCodeOrigin()), child1, child2, child3);
93a37866
A
772 ASSERT(op != Phi);
773 m_currentBlock->append(result);
774 return result;
14957cd0 775 }
93a37866 776 Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
14957cd0 777 {
93a37866 778 Node* result = m_graph.addNode(
81345200
A
779 SpecNone, op, NodeOrigin(currentCodeOrigin()), info, Edge(child1), Edge(child2),
780 Edge(child3));
6fe7ccc8 781 ASSERT(op != Phi);
93a37866
A
782 m_currentBlock->append(result);
783 return result;
784 }
785 Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
786 {
787 Node* result = m_graph.addNode(
81345200 788 SpecNone, op, NodeOrigin(currentCodeOrigin()), info1, info2,
93a37866
A
789 Edge(child1), Edge(child2), Edge(child3));
790 ASSERT(op != Phi);
791 m_currentBlock->append(result);
792 return result;
14957cd0 793 }
6fe7ccc8 794
93a37866 795 Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
14957cd0 796 {
93a37866 797 Node* result = m_graph.addNode(
81345200 798 SpecNone, Node::VarArg, op, NodeOrigin(currentCodeOrigin()), info1, info2,
93a37866 799 m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
6fe7ccc8 800 ASSERT(op != Phi);
93a37866 801 m_currentBlock->append(result);
6fe7ccc8
A
802
803 m_numPassedVarArgs = 0;
804
93a37866 805 return result;
6fe7ccc8 806 }
14957cd0 807
93a37866 808 void addVarArgChild(Node* child)
6fe7ccc8
A
809 {
810 m_graph.m_varArgChildren.append(Edge(child));
811 m_numPassedVarArgs++;
812 }
813
81345200 814 Node* addCall(int result, NodeType op, int callee, int argCount, int registerOffset)
6fe7ccc8 815 {
81345200 816 SpeculatedType prediction = getPrediction();
6fe7ccc8 817
81345200
A
818 addVarArgChild(get(VirtualRegister(callee)));
819 size_t parameterSlots = JSStack::CallFrameHeaderSize - JSStack::CallerFrameAndPCSize + argCount;
820 if (parameterSlots > m_parameterSlots)
821 m_parameterSlots = parameterSlots;
6fe7ccc8 822
6fe7ccc8
A
823 int dummyThisArgument = op == Call ? 0 : 1;
824 for (int i = 0 + dummyThisArgument; i < argCount; ++i)
81345200 825 addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
6fe7ccc8 826
93a37866 827 Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
81345200 828 set(VirtualRegister(result), call);
6fe7ccc8
A
829 return call;
830 }
831
81345200 832 Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
6fe7ccc8 833 {
93a37866 834 Node* objectNode = cellConstant(object);
93a37866 835 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
93a37866
A
836 return objectNode;
837 }
838
81345200 839 Node* cellConstantWithStructureCheck(JSCell* object)
93a37866 840 {
81345200 841 return cellConstantWithStructureCheck(object, object->structure());
93a37866 842 }
81345200 843
93a37866
A
844 SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
845 {
81345200
A
846 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
847 return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
6fe7ccc8 848 }
14957cd0 849
93a37866 850 SpeculatedType getPrediction(unsigned bytecodeIndex)
6fe7ccc8 851 {
93a37866 852 SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
6fe7ccc8 853
93a37866 854 if (prediction == SpecNone) {
6fe7ccc8
A
855 // We have no information about what values this node generates. Give up
856 // on executing this code, since we're likely to do more damage than good.
857 addToGraph(ForceOSRExit);
858 }
859
860 return prediction;
861 }
862
93a37866 863 SpeculatedType getPredictionWithoutOSRExit()
6fe7ccc8 864 {
81345200 865 return getPredictionWithoutOSRExit(m_currentIndex);
6fe7ccc8
A
866 }
867
93a37866 868 SpeculatedType getPrediction()
6fe7ccc8 869 {
81345200 870 return getPrediction(m_currentIndex);
14957cd0 871 }
93a37866
A
872
873 ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
874 {
81345200
A
875 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
876 profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
877 return ArrayMode::fromObserved(locker, profile, action, false);
93a37866
A
878 }
879
880 ArrayMode getArrayMode(ArrayProfile* profile)
881 {
882 return getArrayMode(profile, Array::Read);
883 }
884
81345200 885 ArrayMode getArrayModeConsideringSlowPath(ArrayProfile* profile, Array::Action action)
6fe7ccc8 886 {
81345200 887 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
93a37866 888
81345200 889 profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
6fe7ccc8 890
93a37866
A
891 bool makeSafe =
892 m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
81345200 893 || profile->outOfBounds(locker);
93a37866 894
81345200 895 ArrayMode result = ArrayMode::fromObserved(locker, profile, action, makeSafe);
93a37866
A
896
897 return result;
898 }
899
900 Node* makeSafe(Node* node)
901 {
81345200
A
902 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
903 node->mergeFlags(NodeMayOverflowInDFG);
904 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
905 node->mergeFlags(NodeMayNegZeroInDFG);
6fe7ccc8 906
81345200
A
907 if (!isX86() && node->op() == ArithMod)
908 return node;
909
910 if (!m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex))
93a37866 911 return node;
6fe7ccc8 912
93a37866 913 switch (node->op()) {
6fe7ccc8
A
914 case UInt32ToNumber:
915 case ArithAdd:
916 case ArithSub:
6fe7ccc8
A
917 case ValueAdd:
918 case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
81345200 919 node->mergeFlags(NodeMayOverflowInBaseline);
6fe7ccc8
A
920 break;
921
81345200
A
922 case ArithNegate:
923 // Currently we can't tell the difference between a negation overflowing
924 // (i.e. -(1 << 31)) or generating negative zero (i.e. -0). If it took slow
925 // path then we assume that it did both of those things.
926 node->mergeFlags(NodeMayOverflowInBaseline);
927 node->mergeFlags(NodeMayNegZeroInBaseline);
928 break;
929
6fe7ccc8 930 case ArithMul:
81345200
A
931 // FIXME: We should detect cases where we only overflowed but never created
932 // negative zero.
933 // https://bugs.webkit.org/show_bug.cgi?id=132470
6fe7ccc8 934 if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
81345200
A
935 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
936 node->mergeFlags(NodeMayOverflowInBaseline | NodeMayNegZeroInBaseline);
937 else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
938 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
939 node->mergeFlags(NodeMayNegZeroInBaseline);
6fe7ccc8
A
940 break;
941
942 default:
93a37866 943 RELEASE_ASSERT_NOT_REACHED();
6fe7ccc8
A
944 break;
945 }
946
93a37866 947 return node;
6fe7ccc8
A
948 }
949
93a37866 950 Node* makeDivSafe(Node* node)
6fe7ccc8 951 {
93a37866 952 ASSERT(node->op() == ArithDiv);
6fe7ccc8 953
81345200
A
954 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
955 node->mergeFlags(NodeMayOverflowInDFG);
956 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
957 node->mergeFlags(NodeMayNegZeroInDFG);
958
6fe7ccc8
A
959 // The main slow case counter for op_div in the old JIT counts only when
960 // the operands are not numbers. We don't care about that since we already
961 // have speculations in place that take care of that separately. We only
962 // care about when the outcome of the division is not an integer, which
963 // is what the special fast case counter tells us.
964
81345200 965 if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex))
93a37866 966 return node;
6fe7ccc8 967
81345200
A
968 // FIXME: It might be possible to make this more granular.
969 node->mergeFlags(NodeMayOverflowInBaseline | NodeMayNegZeroInBaseline);
6fe7ccc8 970
93a37866 971 return node;
6fe7ccc8
A
972 }
973
974 bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
975 {
976 if (direct)
977 return true;
978
979 if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
980 return false;
981
982 for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
983 if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
984 return false;
985 }
986
987 return true;
988 }
989
990 void buildOperandMapsIfNecessary();
991
93a37866 992 VM* m_vm;
14957cd0 993 CodeBlock* m_codeBlock;
6fe7ccc8 994 CodeBlock* m_profiledBlock;
14957cd0
A
995 Graph& m_graph;
996
997 // The current block being generated.
998 BasicBlock* m_currentBlock;
999 // The bytecode index of the current instruction being generated.
1000 unsigned m_currentIndex;
14957cd0
A
1001
1002 // We use these values during code generation, and to avoid the need for
1003 // special handling we make sure they are available as constants in the
1004 // CodeBlock's constant pool. These variables are initialized to
1005 // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
1006 // constant pool, as necessary.
1007 unsigned m_constantUndefined;
1008 unsigned m_constantNull;
6fe7ccc8 1009 unsigned m_constantNaN;
14957cd0 1010 unsigned m_constant1;
6fe7ccc8 1011 HashMap<JSCell*, unsigned> m_cellConstants;
93a37866 1012 HashMap<JSCell*, Node*> m_cellConstantNodes;
14957cd0
A
1013
1014 // A constant in the constant pool may be represented by more than one
1015 // node in the graph, depending on the context in which it is being used.
1016 struct ConstantRecord {
1017 ConstantRecord()
93a37866
A
1018 : asInt32(0)
1019 , asNumeric(0)
1020 , asJSValue(0)
14957cd0
A
1021 {
1022 }
1023
93a37866
A
1024 Node* asInt32;
1025 Node* asNumeric;
1026 Node* asJSValue;
14957cd0
A
1027 };
1028
6fe7ccc8
A
1029 // Track the index of the node whose result is the current value for every
1030 // register value in the bytecode - argument, local, and temporary.
1031 Vector<ConstantRecord, 16> m_constants;
1032
1033 // The number of arguments passed to the function.
1034 unsigned m_numArguments;
1035 // The number of locals (vars + temporaries) used in the function.
1036 unsigned m_numLocals;
6fe7ccc8 1037 // The number of slots (in units of sizeof(Register)) that we need to
81345200
A
1038 // preallocate for arguments to outgoing calls from this frame. This
1039 // number includes the CallFrame slots that we initialize for the callee
1040 // (but not the callee-initialized CallerFrame and ReturnPC slots).
1041 // This number is 0 if and only if this function is a leaf.
6fe7ccc8
A
1042 unsigned m_parameterSlots;
1043 // The number of var args passed to the next var arg node.
1044 unsigned m_numPassedVarArgs;
6fe7ccc8 1045
93a37866 1046 HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
6fe7ccc8 1047
81345200
A
1048 Vector<VariableWatchpointSet*, 16> m_localWatchpoints;
1049
6fe7ccc8
A
1050 struct InlineStackEntry {
1051 ByteCodeParser* m_byteCodeParser;
1052
1053 CodeBlock* m_codeBlock;
1054 CodeBlock* m_profiledBlock;
1055 InlineCallFrame* m_inlineCallFrame;
6fe7ccc8
A
1056
1057 ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
1058
1059 QueryableExitProfile m_exitProfile;
1060
1061 // Remapping of identifier and constant numbers from the code block being
1062 // inlined (inline callee) to the code block that we're inlining into
1063 // (the machine code block, which is the transitive, though not necessarily
1064 // direct, caller).
1065 Vector<unsigned> m_identifierRemap;
1066 Vector<unsigned> m_constantRemap;
93a37866 1067 Vector<unsigned> m_constantBufferRemap;
81345200 1068 Vector<unsigned> m_switchRemap;
6fe7ccc8
A
1069
1070 // Blocks introduced by this code block, which need successor linking.
1071 // May include up to one basic block that includes the continuation after
1072 // the callsite in the caller. These must be appended in the order that they
1073 // are created, but their bytecodeBegin values need not be in order as they
1074 // are ignored.
1075 Vector<UnlinkedBlock> m_unlinkedBlocks;
1076
1077 // Potential block linking targets. Must be sorted by bytecodeBegin, and
1078 // cannot have two blocks that have the same bytecodeBegin. For this very
1079 // reason, this is not equivalent to
81345200 1080 Vector<BasicBlock*> m_blockLinkingTargets;
6fe7ccc8
A
1081
1082 // If the callsite's basic block was split into two, then this will be
1083 // the head of the callsite block. It needs its successors linked to the
1084 // m_unlinkedBlocks, but not the other way around: there's no way for
1085 // any blocks in m_unlinkedBlocks to jump back into this block.
81345200 1086 BasicBlock* m_callsiteBlockHead;
6fe7ccc8
A
1087
1088 // Does the callsite block head need linking? This is typically true
1089 // but will be false for the machine code block's inline stack entry
1090 // (since that one is not inlined) and for cases where an inline callee
1091 // did the linking for us.
1092 bool m_callsiteBlockHeadNeedsLinking;
1093
1094 VirtualRegister m_returnValue;
1095
93a37866 1096 // Speculations about variable types collected from the profiled code block,
6fe7ccc8
A
1097 // which are based on OSR exit profiles that past DFG compilatins of this
1098 // code block had gathered.
1099 LazyOperandValueProfileParser m_lazyOperands;
1100
81345200
A
1101 CallLinkInfoMap m_callLinkInfos;
1102 StubInfoMap m_stubInfos;
1103
6fe7ccc8
A
1104 // Did we see any returns? We need to handle the (uncommon but necessary)
1105 // case where a procedure that does not return was inlined.
1106 bool m_didReturn;
1107
1108 // Did we have any early returns?
1109 bool m_didEarlyReturn;
1110
1111 // Pointers to the argument position trackers for this slice of code.
1112 Vector<ArgumentPosition*> m_argumentPositions;
1113
1114 InlineStackEntry* m_caller;
1115
93a37866
A
1116 InlineStackEntry(
1117 ByteCodeParser*,
1118 CodeBlock*,
1119 CodeBlock* profiledBlock,
81345200 1120 BasicBlock* callsiteBlockHead,
93a37866
A
1121 JSFunction* callee, // Null if this is a closure call.
1122 VirtualRegister returnValueVR,
1123 VirtualRegister inlineCallFrameStart,
1124 int argumentCountIncludingThis,
1125 CodeSpecializationKind);
6fe7ccc8
A
1126
1127 ~InlineStackEntry()
1128 {
1129 m_byteCodeParser->m_inlineStackTop = m_caller;
1130 }
1131
81345200 1132 VirtualRegister remapOperand(VirtualRegister operand) const
6fe7ccc8
A
1133 {
1134 if (!m_inlineCallFrame)
1135 return operand;
1136
81345200
A
1137 if (operand.isConstant()) {
1138 VirtualRegister result = VirtualRegister(m_constantRemap[operand.toConstantIndex()]);
1139 ASSERT(result.isConstant());
6fe7ccc8
A
1140 return result;
1141 }
93a37866 1142
81345200 1143 return VirtualRegister(operand.offset() + m_inlineCallFrame->stackOffset);
6fe7ccc8
A
1144 }
1145 };
1146
1147 InlineStackEntry* m_inlineStackTop;
81345200
A
1148
1149 struct DelayedSetLocal {
1150 VirtualRegister m_operand;
1151 Node* m_value;
1152
1153 DelayedSetLocal() { }
1154 DelayedSetLocal(VirtualRegister operand, Node* value)
1155 : m_operand(operand)
1156 , m_value(value)
1157 {
1158 }
1159
1160 Node* execute(ByteCodeParser* parser, SetMode setMode = NormalSet)
1161 {
1162 if (m_operand.isArgument())
1163 return parser->setArgument(m_operand, m_value, setMode);
1164 return parser->setLocal(m_operand, m_value, setMode);
1165 }
1166 };
1167
1168 Vector<DelayedSetLocal, 2> m_setLocalQueue;
6fe7ccc8
A
1169
1170 // Have we built operand maps? We initialize them lazily, and only when doing
1171 // inlining.
1172 bool m_haveBuiltOperandMaps;
1173 // Mapping between identifier names and numbers.
81345200 1174 BorrowedIdentifierMap m_identifierMap;
6fe7ccc8
A
1175 // Mapping between values and constant numbers.
1176 JSValueMap m_jsValueMap;
1177 // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1178 // work-around for the fact that JSValueMap can't handle "empty" values.
1179 unsigned m_emptyJSValueIndex;
1180
81345200
A
1181 CodeBlock* m_dfgCodeBlock;
1182 CallLinkStatus::ContextMap m_callContextMap;
1183 StubInfoMap m_dfgStubInfos;
1184
93a37866 1185 Instruction* m_currentInstruction;
6fe7ccc8
A
1186};
1187
1188#define NEXT_OPCODE(name) \
1189 m_currentIndex += OPCODE_LENGTH(name); \
1190 continue
1191
1192#define LAST_OPCODE(name) \
1193 m_currentIndex += OPCODE_LENGTH(name); \
1194 return shouldContinueParsing
1195
81345200 1196void ByteCodeParser::handleCall(Instruction* pc, NodeType op, CodeSpecializationKind kind)
6fe7ccc8
A
1197{
1198 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
81345200
A
1199 handleCall(
1200 pc[1].u.operand, op, kind, OPCODE_LENGTH(op_call),
1201 pc[2].u.operand, pc[3].u.operand, -pc[4].u.operand);
1202}
1203
1204void ByteCodeParser::handleCall(
1205 int result, NodeType op, CodeSpecializationKind kind, unsigned instructionSize,
1206 int callee, int argumentCountIncludingThis, int registerOffset)
1207{
1208 ASSERT(registerOffset <= 0);
6fe7ccc8 1209
81345200 1210 Node* callTarget = get(VirtualRegister(callee));
93a37866
A
1211
1212 CallLinkStatus callLinkStatus;
1213
81345200
A
1214 if (m_graph.isConstant(callTarget)) {
1215 callLinkStatus = CallLinkStatus(
1216 m_graph.valueOfJSConstant(callTarget)).setIsProved(true);
1217 } else {
1218 callLinkStatus = CallLinkStatus::computeFor(
1219 m_inlineStackTop->m_profiledBlock, currentCodeOrigin(),
1220 m_inlineStackTop->m_callLinkInfos, m_callContextMap);
93a37866 1221 }
6fe7ccc8 1222
93a37866
A
1223 if (!callLinkStatus.canOptimize()) {
1224 // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
1225 // that we cannot optimize them.
1226
81345200 1227 addCall(result, op, callee, argumentCountIncludingThis, registerOffset);
93a37866
A
1228 return;
1229 }
1230
81345200
A
1231 unsigned nextOffset = m_currentIndex + instructionSize;
1232 SpeculatedType prediction = getPrediction();
93a37866
A
1233
1234 if (InternalFunction* function = callLinkStatus.internalFunction()) {
81345200 1235 if (handleConstantInternalFunction(result, function, registerOffset, argumentCountIncludingThis, prediction, kind)) {
93a37866
A
1236 // This phantoming has to be *after* the code for the intrinsic, to signify that
1237 // the inputs must be kept alive whatever exits the intrinsic may do.
1238 addToGraph(Phantom, callTarget);
1239 emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1240 return;
6fe7ccc8 1241 }
93a37866
A
1242
1243 // Can only handle this using the generic call handler.
81345200 1244 addCall(result, op, callee, argumentCountIncludingThis, registerOffset);
93a37866
A
1245 return;
1246 }
1247
1248 Intrinsic intrinsic = callLinkStatus.intrinsicFor(kind);
1249 if (intrinsic != NoIntrinsic) {
1250 emitFunctionChecks(callLinkStatus, callTarget, registerOffset, kind);
6fe7ccc8 1251
81345200 1252 if (handleIntrinsic(result, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
93a37866
A
1253 // This phantoming has to be *after* the code for the intrinsic, to signify that
1254 // the inputs must be kept alive whatever exits the intrinsic may do.
1255 addToGraph(Phantom, callTarget);
1256 emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
81345200
A
1257 if (m_graph.compilation())
1258 m_graph.compilation()->noticeInlinedCall();
6fe7ccc8 1259 return;
93a37866 1260 }
81345200
A
1261 } else if (handleInlining(callTarget, result, callLinkStatus, registerOffset, argumentCountIncludingThis, nextOffset, kind)) {
1262 if (m_graph.compilation())
1263 m_graph.compilation()->noticeInlinedCall();
93a37866 1264 return;
6fe7ccc8 1265 }
93a37866 1266
81345200 1267 addCall(result, op, callee, argumentCountIncludingThis, registerOffset);
6fe7ccc8
A
1268}
1269
93a37866 1270void ByteCodeParser::emitFunctionChecks(const CallLinkStatus& callLinkStatus, Node* callTarget, int registerOffset, CodeSpecializationKind kind)
6fe7ccc8 1271{
93a37866 1272 Node* thisArgument;
6fe7ccc8 1273 if (kind == CodeForCall)
81345200 1274 thisArgument = get(virtualRegisterForArgument(0, registerOffset));
6fe7ccc8 1275 else
93a37866
A
1276 thisArgument = 0;
1277
1278 if (callLinkStatus.isProved()) {
1279 addToGraph(Phantom, callTarget, thisArgument);
1280 return;
1281 }
1282
1283 ASSERT(callLinkStatus.canOptimize());
1284
1285 if (JSFunction* function = callLinkStatus.function())
1286 addToGraph(CheckFunction, OpInfo(function), callTarget, thisArgument);
1287 else {
1288 ASSERT(callLinkStatus.structure());
1289 ASSERT(callLinkStatus.executable());
1290
1291 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(callLinkStatus.structure())), callTarget);
1292 addToGraph(CheckExecutable, OpInfo(callLinkStatus.executable()), callTarget, thisArgument);
1293 }
1294}
1295
1296void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind kind)
1297{
1298 for (int i = kind == CodeForCall ? 0 : 1; i < argumentCountIncludingThis; ++i)
81345200 1299 addToGraph(Phantom, get(virtualRegisterForArgument(i, registerOffset)));
6fe7ccc8
A
1300}
1301
81345200 1302bool ByteCodeParser::handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
6fe7ccc8 1303{
81345200
A
1304 static const bool verbose = false;
1305
1306 if (verbose)
1307 dataLog("Considering inlining ", callLinkStatus, " into ", currentCodeOrigin(), "\n");
1308
6fe7ccc8 1309 // First, the really simple checks: do we have an actual JS function?
81345200
A
1310 if (!callLinkStatus.executable()) {
1311 if (verbose)
1312 dataLog(" Failing because there is no executable.\n");
6fe7ccc8 1313 return false;
81345200
A
1314 }
1315 if (callLinkStatus.executable()->isHostFunction()) {
1316 if (verbose)
1317 dataLog(" Failing because it's a host function.\n");
6fe7ccc8 1318 return false;
81345200 1319 }
6fe7ccc8 1320
93a37866 1321 FunctionExecutable* executable = jsCast<FunctionExecutable*>(callLinkStatus.executable());
6fe7ccc8 1322
93a37866
A
1323 // Does the number of arguments we're passing match the arity of the target? We currently
1324 // inline only if the number of arguments passed is greater than or equal to the number
1325 // arguments expected.
81345200
A
1326 if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis) {
1327 if (verbose)
1328 dataLog(" Failing because of arity mismatch.\n");
6fe7ccc8 1329 return false;
6fe7ccc8
A
1330 }
1331
93a37866
A
1332 // Do we have a code block, and does the code block's size match the heuristics/requirements for
1333 // being an inline candidate? We might not have a code block if code was thrown away or if we
1334 // simply hadn't actually made this call yet. We could still theoretically attempt to inline it
1335 // if we had a static proof of what was being called; this might happen for example if you call a
1336 // global function, where watchpointing gives us static information. Overall, it's a rare case
1337 // because we expect that any hot callees would have already been compiled.
1338 CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
81345200
A
1339 if (!codeBlock) {
1340 if (verbose)
1341 dataLog(" Failing because no code block available.\n");
6fe7ccc8 1342 return false;
81345200
A
1343 }
1344 CapabilityLevel capabilityLevel = inlineFunctionForCapabilityLevel(
1345 codeBlock, kind, callLinkStatus.isClosureCall());
1346 if (!canInline(capabilityLevel)) {
1347 if (verbose)
1348 dataLog(" Failing because the function is not inlineable.\n");
93a37866 1349 return false;
81345200 1350 }
6fe7ccc8 1351
81345200
A
1352 // Check if the caller is already too large. We do this check here because that's just
1353 // where we happen to also have the callee's code block, and we want that for the
1354 // purpose of unsetting SABI.
1355 if (!isSmallEnoughToInlineCodeInto(m_codeBlock)) {
1356 codeBlock->m_shouldAlwaysBeInlined = false;
1357 if (verbose)
1358 dataLog(" Failing because the caller is too large.\n");
1359 return false;
1360 }
1361
1362 // FIXME: this should be better at predicting how much bloat we will introduce by inlining
1363 // this function.
1364 // https://bugs.webkit.org/show_bug.cgi?id=127627
1365
1366 // Have we exceeded inline stack depth, or are we trying to inline a recursive call to
1367 // too many levels? If either of these are detected, then don't inline. We adjust our
1368 // heuristics if we are dealing with a function that cannot otherwise be compiled.
1369
1370 unsigned depth = 0;
1371 unsigned recursion = 0;
1372
1373 for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1374 ++depth;
1375 if (depth >= Options::maximumInliningDepth()) {
1376 if (verbose)
1377 dataLog(" Failing because depth exceeded.\n");
1378 return false;
1379 }
1380
1381 if (entry->executable() == executable) {
1382 ++recursion;
1383 if (recursion >= Options::maximumInliningRecursion()) {
1384 if (verbose)
1385 dataLog(" Failing because recursion detected.\n");
1386 return false;
1387 }
1388 }
1389 }
1390
1391 if (verbose)
1392 dataLog(" Committing to inlining.\n");
6fe7ccc8
A
1393
1394 // Now we know without a doubt that we are committed to inlining. So begin the process
1395 // by checking the callee (if necessary) and making sure that arguments and the callee
1396 // are flushed.
93a37866 1397 emitFunctionChecks(callLinkStatus, callTargetNode, registerOffset, kind);
6fe7ccc8
A
1398
1399 // FIXME: Don't flush constants!
1400
81345200 1401 int inlineCallFrameStart = m_inlineStackTop->remapOperand(VirtualRegister(registerOffset)).offset() + JSStack::CallFrameHeaderSize;
6fe7ccc8 1402
81345200
A
1403 ensureLocals(
1404 VirtualRegister(inlineCallFrameStart).toLocal() + 1 +
1405 JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters);
6fe7ccc8 1406
93a37866
A
1407 size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1408
1409 InlineStackEntry inlineStackEntry(
81345200
A
1410 this, codeBlock, codeBlock, m_graph.lastBlock(), callLinkStatus.function(),
1411 m_inlineStackTop->remapOperand(VirtualRegister(resultOperand)),
93a37866 1412 (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
6fe7ccc8
A
1413
1414 // This is where the actual inlining really happens.
1415 unsigned oldIndex = m_currentIndex;
6fe7ccc8 1416 m_currentIndex = 0;
6fe7ccc8 1417
81345200
A
1418 InlineVariableData inlineVariableData;
1419 inlineVariableData.inlineCallFrame = m_inlineStackTop->m_inlineCallFrame;
1420 inlineVariableData.argumentPositionStart = argumentPositionStart;
1421 inlineVariableData.calleeVariable = 0;
1422
1423 RELEASE_ASSERT(
1424 m_inlineStackTop->m_inlineCallFrame->isClosureCall
1425 == callLinkStatus.isClosureCall());
93a37866 1426 if (callLinkStatus.isClosureCall()) {
81345200
A
1427 VariableAccessData* calleeVariable =
1428 set(VirtualRegister(JSStack::Callee), callTargetNode, ImmediateNakedSet)->variableAccessData();
1429 VariableAccessData* scopeVariable =
1430 set(VirtualRegister(JSStack::ScopeChain), addToGraph(GetScope, callTargetNode), ImmediateNakedSet)->variableAccessData();
1431
1432 calleeVariable->mergeShouldNeverUnbox(true);
1433 scopeVariable->mergeShouldNeverUnbox(true);
1434
1435 inlineVariableData.calleeVariable = calleeVariable;
93a37866 1436 }
6fe7ccc8 1437
81345200
A
1438 m_graph.m_inlineVariableData.append(inlineVariableData);
1439
6fe7ccc8
A
1440 parseCodeBlock();
1441
1442 m_currentIndex = oldIndex;
6fe7ccc8
A
1443
1444 // If the inlined code created some new basic blocks, then we have linking to do.
81345200 1445 if (inlineStackEntry.m_callsiteBlockHead != m_graph.lastBlock()) {
6fe7ccc8
A
1446
1447 ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1448 if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
81345200 1449 linkBlock(inlineStackEntry.m_callsiteBlockHead, inlineStackEntry.m_blockLinkingTargets);
6fe7ccc8 1450 else
81345200 1451 ASSERT(inlineStackEntry.m_callsiteBlockHead->isLinked);
6fe7ccc8
A
1452
1453 // It's possible that the callsite block head is not owned by the caller.
1454 if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1455 // It's definitely owned by the caller, because the caller created new blocks.
1456 // Assert that this all adds up.
81345200 1457 ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_block == inlineStackEntry.m_callsiteBlockHead);
6fe7ccc8
A
1458 ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1459 inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1460 } else {
1461 // It's definitely not owned by the caller. Tell the caller that he does not
1462 // need to link his callsite block head, because we did it for him.
1463 ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1464 ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1465 inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1466 }
1467
1468 linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1469 } else
1470 ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1471
81345200 1472 BasicBlock* lastBlock = m_graph.lastBlock();
6fe7ccc8
A
1473 // If there was a return, but no early returns, then we're done. We allow parsing of
1474 // the caller to continue in whatever basic block we're in right now.
1475 if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
93a37866 1476 ASSERT(lastBlock->isEmpty() || !lastBlock->last()->isTerminal());
6fe7ccc8
A
1477
1478 // If we created new blocks then the last block needs linking, but in the
1479 // caller. It doesn't need to be linked to, but it needs outgoing links.
1480 if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
6fe7ccc8
A
1481 // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1482 // for release builds because this block will never serve as a potential target
1483 // in the linker's binary search.
1484 lastBlock->bytecodeBegin = m_currentIndex;
81345200 1485 m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.lastBlock()));
6fe7ccc8
A
1486 }
1487
81345200 1488 m_currentBlock = m_graph.lastBlock();
6fe7ccc8
A
1489 return true;
1490 }
1491
1492 // If we get to this point then all blocks must end in some sort of terminals.
93a37866 1493 ASSERT(lastBlock->last()->isTerminal());
6fe7ccc8 1494
81345200
A
1495
1496 // Need to create a new basic block for the continuation at the caller.
1497 RefPtr<BasicBlock> block = adoptRef(new BasicBlock(nextOffset, m_numArguments, m_numLocals, PNaN));
1498
6fe7ccc8
A
1499 // Link the early returns to the basic block we're about to create.
1500 for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1501 if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1502 continue;
81345200
A
1503 BasicBlock* blockToLink = inlineStackEntry.m_unlinkedBlocks[i].m_block;
1504 ASSERT(!blockToLink->isLinked);
1505 Node* node = blockToLink->last();
93a37866 1506 ASSERT(node->op() == Jump);
81345200
A
1507 ASSERT(!node->targetBlock());
1508 node->targetBlock() = block.get();
6fe7ccc8
A
1509 inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1510#if !ASSERT_DISABLED
81345200 1511 blockToLink->isLinked = true;
6fe7ccc8
A
1512#endif
1513 }
1514
6fe7ccc8 1515 m_currentBlock = block.get();
81345200
A
1516 ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_caller->m_blockLinkingTargets.last()->bytecodeBegin < nextOffset);
1517 m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
1518 m_inlineStackTop->m_caller->m_blockLinkingTargets.append(block.get());
1519 m_graph.appendBlock(block);
6fe7ccc8
A
1520 prepareToParseBlock();
1521
1522 // At this point we return and continue to generate code for the caller, but
1523 // in the new basic block.
6fe7ccc8
A
1524 return true;
1525}
1526
81345200 1527bool ByteCodeParser::handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
6fe7ccc8
A
1528{
1529 if (argumentCountIncludingThis == 1) { // Math.min()
81345200 1530 set(VirtualRegister(resultOperand), constantNaN());
6fe7ccc8
A
1531 return true;
1532 }
1533
1534 if (argumentCountIncludingThis == 2) { // Math.min(x)
81345200 1535 Node* result = get(VirtualRegister(virtualRegisterForArgument(1, registerOffset)));
93a37866 1536 addToGraph(Phantom, Edge(result, NumberUse));
81345200 1537 set(VirtualRegister(resultOperand), result);
6fe7ccc8
A
1538 return true;
1539 }
1540
1541 if (argumentCountIncludingThis == 3) { // Math.min(x, y)
81345200 1542 set(VirtualRegister(resultOperand), addToGraph(op, get(virtualRegisterForArgument(1, registerOffset)), get(virtualRegisterForArgument(2, registerOffset))));
6fe7ccc8
A
1543 return true;
1544 }
1545
1546 // Don't handle >=3 arguments for now.
1547 return false;
1548}
1549
81345200 1550bool ByteCodeParser::handleIntrinsic(int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
6fe7ccc8
A
1551{
1552 switch (intrinsic) {
1553 case AbsIntrinsic: {
1554 if (argumentCountIncludingThis == 1) { // Math.abs()
81345200 1555 set(VirtualRegister(resultOperand), constantNaN());
6fe7ccc8
A
1556 return true;
1557 }
1558
1559 if (!MacroAssembler::supportsFloatingPointAbs())
1560 return false;
1561
81345200 1562 Node* node = addToGraph(ArithAbs, get(virtualRegisterForArgument(1, registerOffset)));
6fe7ccc8 1563 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
81345200
A
1564 node->mergeFlags(NodeMayOverflowInDFG);
1565 set(VirtualRegister(resultOperand), node);
6fe7ccc8
A
1566 return true;
1567 }
1568
1569 case MinIntrinsic:
81345200 1570 return handleMinMax(resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
6fe7ccc8
A
1571
1572 case MaxIntrinsic:
81345200 1573 return handleMinMax(resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
6fe7ccc8 1574
81345200
A
1575 case SqrtIntrinsic:
1576 case CosIntrinsic:
1577 case SinIntrinsic: {
1578 if (argumentCountIncludingThis == 1) {
1579 set(VirtualRegister(resultOperand), constantNaN());
6fe7ccc8
A
1580 return true;
1581 }
1582
81345200
A
1583 switch (intrinsic) {
1584 case SqrtIntrinsic:
1585 if (!MacroAssembler::supportsFloatingPointSqrt())
1586 return false;
1587
1588 set(VirtualRegister(resultOperand), addToGraph(ArithSqrt, get(virtualRegisterForArgument(1, registerOffset))));
1589 return true;
1590
1591 case CosIntrinsic:
1592 set(VirtualRegister(resultOperand), addToGraph(ArithCos, get(virtualRegisterForArgument(1, registerOffset))));
1593 return true;
1594
1595 case SinIntrinsic:
1596 set(VirtualRegister(resultOperand), addToGraph(ArithSin, get(virtualRegisterForArgument(1, registerOffset))));
1597 return true;
1598
1599 default:
1600 RELEASE_ASSERT_NOT_REACHED();
6fe7ccc8 1601 return false;
81345200 1602 }
6fe7ccc8
A
1603 }
1604
1605 case ArrayPushIntrinsic: {
1606 if (argumentCountIncludingThis != 2)
1607 return false;
1608
81345200 1609 ArrayMode arrayMode = getArrayMode(m_currentInstruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile);
93a37866
A
1610 if (!arrayMode.isJSArray())
1611 return false;
1612 switch (arrayMode.type()) {
1613 case Array::Undecided:
1614 case Array::Int32:
1615 case Array::Double:
1616 case Array::Contiguous:
1617 case Array::ArrayStorage: {
81345200
A
1618 Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1619 set(VirtualRegister(resultOperand), arrayPush);
93a37866
A
1620
1621 return true;
1622 }
1623
1624 default:
1625 return false;
1626 }
6fe7ccc8
A
1627 }
1628
1629 case ArrayPopIntrinsic: {
1630 if (argumentCountIncludingThis != 1)
1631 return false;
1632
81345200 1633 ArrayMode arrayMode = getArrayMode(m_currentInstruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile);
93a37866
A
1634 if (!arrayMode.isJSArray())
1635 return false;
1636 switch (arrayMode.type()) {
1637 case Array::Int32:
1638 case Array::Double:
1639 case Array::Contiguous:
1640 case Array::ArrayStorage: {
81345200
A
1641 Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)));
1642 set(VirtualRegister(resultOperand), arrayPop);
93a37866
A
1643 return true;
1644 }
1645
1646 default:
1647 return false;
1648 }
6fe7ccc8
A
1649 }
1650
1651 case CharCodeAtIntrinsic: {
1652 if (argumentCountIncludingThis != 2)
1653 return false;
1654
81345200
A
1655 VirtualRegister thisOperand = virtualRegisterForArgument(0, registerOffset);
1656 VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
1657 Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), get(indexOperand));
14957cd0 1658
81345200 1659 set(VirtualRegister(resultOperand), charCode);
6fe7ccc8
A
1660 return true;
1661 }
14957cd0 1662
6fe7ccc8
A
1663 case CharAtIntrinsic: {
1664 if (argumentCountIncludingThis != 2)
1665 return false;
14957cd0 1666
81345200
A
1667 VirtualRegister thisOperand = virtualRegisterForArgument(0, registerOffset);
1668 VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
1669 Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), get(indexOperand));
93a37866 1670
81345200 1671 set(VirtualRegister(resultOperand), charCode);
93a37866
A
1672 return true;
1673 }
1674 case FromCharCodeIntrinsic: {
1675 if (argumentCountIncludingThis != 2)
6fe7ccc8 1676 return false;
14957cd0 1677
81345200
A
1678 VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
1679 Node* charCode = addToGraph(StringFromCharCode, get(indexOperand));
14957cd0 1680
81345200 1681 set(VirtualRegister(resultOperand), charCode);
93a37866 1682
6fe7ccc8
A
1683 return true;
1684 }
14957cd0 1685
6fe7ccc8
A
1686 case RegExpExecIntrinsic: {
1687 if (argumentCountIncludingThis != 2)
1688 return false;
1689
81345200
A
1690 Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1691 set(VirtualRegister(resultOperand), regExpExec);
6fe7ccc8
A
1692
1693 return true;
1694 }
1695
1696 case RegExpTestIntrinsic: {
1697 if (argumentCountIncludingThis != 2)
1698 return false;
1699
81345200
A
1700 Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1701 set(VirtualRegister(resultOperand), regExpExec);
6fe7ccc8
A
1702
1703 return true;
1704 }
93a37866
A
1705
1706 case IMulIntrinsic: {
1707 if (argumentCountIncludingThis != 3)
1708 return false;
81345200
A
1709 VirtualRegister leftOperand = virtualRegisterForArgument(1, registerOffset);
1710 VirtualRegister rightOperand = virtualRegisterForArgument(2, registerOffset);
1711 Node* left = get(leftOperand);
1712 Node* right = get(rightOperand);
1713 set(VirtualRegister(resultOperand), addToGraph(ArithIMul, left, right));
1714 return true;
1715 }
1716
1717 case FRoundIntrinsic: {
1718 if (argumentCountIncludingThis != 2)
1719 return false;
1720 VirtualRegister operand = virtualRegisterForArgument(1, registerOffset);
1721 set(VirtualRegister(resultOperand), addToGraph(ArithFRound, get(operand)));
1722 return true;
1723 }
1724
1725 case DFGTrueIntrinsic: {
1726 set(VirtualRegister(resultOperand), getJSConstantForValue(jsBoolean(true)));
1727 return true;
1728 }
1729
1730 case OSRExitIntrinsic: {
1731 addToGraph(ForceOSRExit);
1732 set(VirtualRegister(resultOperand), constantUndefined());
1733 return true;
1734 }
1735
1736 case IsFinalTierIntrinsic: {
1737 set(VirtualRegister(resultOperand),
1738 getJSConstantForValue(jsBoolean(Options::useFTLJIT() ? isFTL(m_graph.m_plan.mode) : true)));
1739 return true;
1740 }
1741
1742 case SetInt32HeapPredictionIntrinsic: {
1743 for (int i = 1; i < argumentCountIncludingThis; ++i) {
1744 Node* node = get(virtualRegisterForArgument(i, registerOffset));
1745 if (node->hasHeapPrediction())
1746 node->setHeapPrediction(SpecInt32);
1747 }
1748 set(VirtualRegister(resultOperand), constantUndefined());
1749 return true;
1750 }
1751
1752 case FiatInt52Intrinsic: {
1753 if (argumentCountIncludingThis != 2)
1754 return false;
1755 VirtualRegister operand = virtualRegisterForArgument(1, registerOffset);
1756 if (enableInt52())
1757 set(VirtualRegister(resultOperand), addToGraph(FiatInt52, get(operand)));
1758 else
1759 set(VirtualRegister(resultOperand), get(operand));
93a37866
A
1760 return true;
1761 }
6fe7ccc8
A
1762
1763 default:
1764 return false;
1765 }
1766}
1767
81345200
A
1768bool ByteCodeParser::handleTypedArrayConstructor(
1769 int resultOperand, InternalFunction* function, int registerOffset,
1770 int argumentCountIncludingThis, TypedArrayType type)
1771{
1772 if (!isTypedView(type))
1773 return false;
1774
1775 if (function->classInfo() != constructorClassInfoForType(type))
1776 return false;
1777
1778 if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1779 return false;
1780
1781 // We only have an intrinsic for the case where you say:
1782 //
1783 // new FooArray(blah);
1784 //
1785 // Of course, 'blah' could be any of the following:
1786 //
1787 // - Integer, indicating that you want to allocate an array of that length.
1788 // This is the thing we're hoping for, and what we can actually do meaningful
1789 // optimizations for.
1790 //
1791 // - Array buffer, indicating that you want to create a view onto that _entire_
1792 // buffer.
1793 //
1794 // - Non-buffer object, indicating that you want to create a copy of that
1795 // object by pretending that it quacks like an array.
1796 //
1797 // - Anything else, indicating that you want to have an exception thrown at
1798 // you.
1799 //
1800 // The intrinsic, NewTypedArray, will behave as if it could do any of these
1801 // things up until we do Fixup. Thereafter, if child1 (i.e. 'blah') is
1802 // predicted Int32, then we lock it in as a normal typed array allocation.
1803 // Otherwise, NewTypedArray turns into a totally opaque function call that
1804 // may clobber the world - by virtue of it accessing properties on what could
1805 // be an object.
1806 //
1807 // Note that although the generic form of NewTypedArray sounds sort of awful,
1808 // it is actually quite likely to be more efficient than a fully generic
1809 // Construct. So, we might want to think about making NewTypedArray variadic,
1810 // or else making Construct not super slow.
1811
1812 if (argumentCountIncludingThis != 2)
1813 return false;
1814
1815 set(VirtualRegister(resultOperand),
1816 addToGraph(NewTypedArray, OpInfo(type), get(virtualRegisterForArgument(1, registerOffset))));
1817 return true;
1818}
1819
93a37866 1820bool ByteCodeParser::handleConstantInternalFunction(
81345200 1821 int resultOperand, InternalFunction* function, int registerOffset,
93a37866
A
1822 int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1823{
1824 // If we ever find that we have a lot of internal functions that we specialize for,
1825 // then we should probably have some sort of hashtable dispatch, or maybe even
1826 // dispatch straight through the MethodTable of the InternalFunction. But for now,
1827 // it seems that this case is hit infrequently enough, and the number of functions
1828 // we know about is small enough, that having just a linear cascade of if statements
1829 // is good enough.
1830
1831 UNUSED_PARAM(prediction); // Remove this once we do more things.
1832
81345200
A
1833 if (function->classInfo() == ArrayConstructor::info()) {
1834 if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1835 return false;
1836
93a37866 1837 if (argumentCountIncludingThis == 2) {
81345200
A
1838 set(VirtualRegister(resultOperand),
1839 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(virtualRegisterForArgument(1, registerOffset))));
93a37866
A
1840 return true;
1841 }
1842
1843 for (int i = 1; i < argumentCountIncludingThis; ++i)
81345200
A
1844 addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
1845 set(VirtualRegister(resultOperand),
93a37866
A
1846 addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1847 return true;
81345200
A
1848 }
1849
1850 if (function->classInfo() == StringConstructor::info()) {
93a37866
A
1851 Node* result;
1852
1853 if (argumentCountIncludingThis <= 1)
1854 result = cellConstant(m_vm->smallStrings.emptyString());
1855 else
81345200 1856 result = addToGraph(ToString, get(virtualRegisterForArgument(1, registerOffset)));
93a37866
A
1857
1858 if (kind == CodeForConstruct)
1859 result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
1860
81345200 1861 set(VirtualRegister(resultOperand), result);
93a37866
A
1862 return true;
1863 }
1864
81345200
A
1865 for (unsigned typeIndex = 0; typeIndex < NUMBER_OF_TYPED_ARRAY_TYPES; ++typeIndex) {
1866 bool result = handleTypedArrayConstructor(
1867 resultOperand, function, registerOffset, argumentCountIncludingThis,
1868 indexToTypedArrayType(typeIndex));
1869 if (result)
1870 return true;
1871 }
1872
93a37866
A
1873 return false;
1874}
1875
1876Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, unsigned identifierNumber, PropertyOffset offset)
1877{
1878 Node* propertyStorage;
1879 if (isInlineOffset(offset))
1880 propertyStorage = base;
1881 else
1882 propertyStorage = addToGraph(GetButterfly, base);
81345200 1883 Node* getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage, base);
93a37866
A
1884
1885 StorageAccessData storageAccessData;
81345200 1886 storageAccessData.offset = offset;
93a37866
A
1887 storageAccessData.identifierNumber = identifierNumber;
1888 m_graph.m_storageAccessData.append(storageAccessData);
1889
1890 return getByOffset;
1891}
1892
1893void ByteCodeParser::handleGetByOffset(
1894 int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1895 PropertyOffset offset)
1896{
81345200
A
1897 set(VirtualRegister(destinationOperand), handleGetByOffset(prediction, base, identifierNumber, offset));
1898}
1899
1900Node* ByteCodeParser::handlePutByOffset(Node* base, unsigned identifier, PropertyOffset offset, Node* value)
1901{
1902 Node* propertyStorage;
1903 if (isInlineOffset(offset))
1904 propertyStorage = base;
1905 else
1906 propertyStorage = addToGraph(GetButterfly, base);
1907 Node* result = addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
1908
1909 StorageAccessData storageAccessData;
1910 storageAccessData.offset = offset;
1911 storageAccessData.identifierNumber = identifier;
1912 m_graph.m_storageAccessData.append(storageAccessData);
1913
1914 return result;
1915}
1916
1917Node* ByteCodeParser::emitPrototypeChecks(
1918 Structure* structure, IntendedStructureChain* chain)
1919{
1920 Node* base = 0;
1921 m_graph.chains().addLazily(chain);
1922 Structure* currentStructure = structure;
1923 JSObject* currentObject = 0;
1924 for (unsigned i = 0; i < chain->size(); ++i) {
1925 currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1926 currentStructure = chain->at(i);
1927 base = cellConstantWithStructureCheck(currentObject, currentStructure);
1928 }
1929 return base;
93a37866
A
1930}
1931
1932void ByteCodeParser::handleGetById(
1933 int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1934 const GetByIdStatus& getByIdStatus)
1935{
81345200
A
1936 if (!getByIdStatus.isSimple() || !Options::enableAccessInlining()) {
1937 set(VirtualRegister(destinationOperand),
93a37866
A
1938 addToGraph(
1939 getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1940 OpInfo(identifierNumber), OpInfo(prediction), base));
1941 return;
1942 }
1943
81345200
A
1944 if (getByIdStatus.numVariants() > 1) {
1945 if (!isFTL(m_graph.m_plan.mode) || !Options::enablePolymorphicAccessInlining()) {
1946 set(VirtualRegister(destinationOperand),
1947 addToGraph(GetById, OpInfo(identifierNumber), OpInfo(prediction), base));
1948 return;
93a37866 1949 }
81345200
A
1950
1951 if (m_graph.compilation())
1952 m_graph.compilation()->noticeInlinedGetById();
1953
1954 // 1) Emit prototype structure checks for all chains. This could sort of maybe not be
1955 // optimal, if there is some rarely executed case in the chain that requires a lot
1956 // of checks and those checks are not watchpointable.
1957 for (unsigned variantIndex = getByIdStatus.numVariants(); variantIndex--;) {
1958 if (getByIdStatus[variantIndex].chain()) {
1959 emitPrototypeChecks(
1960 getByIdStatus[variantIndex].structureSet().singletonStructure(),
1961 getByIdStatus[variantIndex].chain());
1962 }
1963 }
1964
1965 // 2) Emit a MultiGetByOffset
1966 MultiGetByOffsetData* data = m_graph.m_multiGetByOffsetData.add();
1967 data->variants = getByIdStatus.variants();
1968 data->identifierNumber = identifierNumber;
1969 set(VirtualRegister(destinationOperand),
1970 addToGraph(MultiGetByOffset, OpInfo(data), OpInfo(prediction), base));
1971 return;
1972 }
1973
1974 ASSERT(getByIdStatus.numVariants() == 1);
1975 GetByIdVariant variant = getByIdStatus[0];
1976
1977 if (m_graph.compilation())
1978 m_graph.compilation()->noticeInlinedGetById();
1979
1980 Node* originalBaseForBaselineJIT = base;
1981
1982 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(variant.structureSet())), base);
1983
1984 if (variant.chain()) {
1985 base = emitPrototypeChecks(
1986 variant.structureSet().singletonStructure(), variant.chain());
1987 }
93a37866
A
1988
1989 // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1990 // ensure that the base of the original get_by_id is kept alive until we're done with
1991 // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1992 // on something other than the base following the CheckStructure on base, or if the
1993 // access was compiled to a WeakJSConstant specific value, in which case we might not
1994 // have any explicit use of the base at all.
81345200 1995 if (variant.specificValue() || originalBaseForBaselineJIT != base)
93a37866
A
1996 addToGraph(Phantom, originalBaseForBaselineJIT);
1997
81345200
A
1998 if (variant.specificValue()) {
1999 ASSERT(variant.specificValue().isCell());
93a37866 2000
81345200 2001 set(VirtualRegister(destinationOperand), cellConstant(variant.specificValue().asCell()));
93a37866
A
2002 return;
2003 }
2004
2005 handleGetByOffset(
81345200 2006 destinationOperand, prediction, base, identifierNumber, variant.offset());
93a37866
A
2007}
2008
81345200
A
2009void ByteCodeParser::emitPutById(
2010 Node* base, unsigned identifierNumber, Node* value, const PutByIdStatus& putByIdStatus, bool isDirect)
6fe7ccc8 2011{
81345200
A
2012 if (isDirect)
2013 addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2014 else
2015 addToGraph(putByIdStatus.makesCalls() ? PutByIdFlush : PutById, OpInfo(identifierNumber), base, value);
6fe7ccc8 2016}
14957cd0 2017
81345200
A
2018void ByteCodeParser::handlePutById(
2019 Node* base, unsigned identifierNumber, Node* value,
2020 const PutByIdStatus& putByIdStatus, bool isDirect)
93a37866 2021{
81345200
A
2022 if (!putByIdStatus.isSimple() || !Options::enableAccessInlining()) {
2023 if (!putByIdStatus.isSet())
2024 addToGraph(ForceOSRExit);
2025 emitPutById(base, identifierNumber, value, putByIdStatus, isDirect);
2026 return;
93a37866 2027 }
81345200
A
2028
2029 if (putByIdStatus.numVariants() > 1) {
2030 if (!isFTL(m_graph.m_plan.mode) || putByIdStatus.makesCalls()
2031 || !Options::enablePolymorphicAccessInlining()) {
2032 emitPutById(base, identifierNumber, value, putByIdStatus, isDirect);
2033 return;
2034 }
2035
2036 if (m_graph.compilation())
2037 m_graph.compilation()->noticeInlinedPutById();
2038
2039 if (!isDirect) {
2040 for (unsigned variantIndex = putByIdStatus.numVariants(); variantIndex--;) {
2041 if (putByIdStatus[variantIndex].kind() != PutByIdVariant::Transition)
2042 continue;
2043 if (!putByIdStatus[variantIndex].structureChain())
2044 continue;
2045 emitPrototypeChecks(
2046 putByIdStatus[variantIndex].oldStructure(),
2047 putByIdStatus[variantIndex].structureChain());
2048 }
2049 }
2050
2051 MultiPutByOffsetData* data = m_graph.m_multiPutByOffsetData.add();
2052 data->variants = putByIdStatus.variants();
2053 data->identifierNumber = identifierNumber;
2054 addToGraph(MultiPutByOffset, OpInfo(data), base, value);
2055 return;
2056 }
2057
2058 ASSERT(putByIdStatus.numVariants() == 1);
2059 const PutByIdVariant& variant = putByIdStatus[0];
2060
2061 if (variant.kind() == PutByIdVariant::Replace) {
2062 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(variant.structure())), base);
2063 handlePutByOffset(base, identifierNumber, variant.offset(), value);
2064 if (m_graph.compilation())
2065 m_graph.compilation()->noticeInlinedPutById();
2066 return;
2067 }
2068
2069 if (variant.kind() != PutByIdVariant::Transition) {
2070 emitPutById(base, identifierNumber, value, putByIdStatus, isDirect);
2071 return;
93a37866 2072 }
93a37866 2073
81345200
A
2074 if (variant.structureChain() && !variant.structureChain()->isStillValid()) {
2075 emitPutById(base, identifierNumber, value, putByIdStatus, isDirect);
2076 return;
93a37866 2077 }
81345200
A
2078
2079 m_graph.chains().addLazily(variant.structureChain());
2080
2081 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(variant.oldStructure())), base);
2082 if (!isDirect)
2083 emitPrototypeChecks(variant.oldStructure(), variant.structureChain());
93a37866 2084
81345200
A
2085 ASSERT(variant.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2086
2087 Node* propertyStorage;
2088 StructureTransitionData* transitionData = m_graph.addStructureTransitionData(
2089 StructureTransitionData(variant.oldStructure(), variant.newStructure()));
93a37866 2090
81345200
A
2091 if (variant.oldStructure()->outOfLineCapacity()
2092 != variant.newStructure()->outOfLineCapacity()) {
93a37866 2093
81345200
A
2094 // If we're growing the property storage then it must be because we're
2095 // storing into the out-of-line storage.
2096 ASSERT(!isInlineOffset(variant.offset()));
93a37866 2097
81345200
A
2098 if (!variant.oldStructure()->outOfLineCapacity()) {
2099 propertyStorage = addToGraph(
2100 AllocatePropertyStorage, OpInfo(transitionData), base);
2101 } else {
2102 propertyStorage = addToGraph(
2103 ReallocatePropertyStorage, OpInfo(transitionData),
2104 base, addToGraph(GetButterfly, base));
93a37866 2105 }
81345200
A
2106 } else {
2107 if (isInlineOffset(variant.offset()))
2108 propertyStorage = base;
2109 else
2110 propertyStorage = addToGraph(GetButterfly, base);
93a37866 2111 }
93a37866 2112
81345200 2113 addToGraph(PutStructure, OpInfo(transitionData), base);
93a37866 2114
81345200
A
2115 addToGraph(
2116 PutByOffset,
2117 OpInfo(m_graph.m_storageAccessData.size()),
2118 propertyStorage,
2119 base,
2120 value);
93a37866 2121
81345200
A
2122 StorageAccessData storageAccessData;
2123 storageAccessData.offset = variant.offset();
2124 storageAccessData.identifierNumber = identifierNumber;
2125 m_graph.m_storageAccessData.append(storageAccessData);
93a37866 2126
81345200
A
2127 if (m_graph.compilation())
2128 m_graph.compilation()->noticeInlinedPutById();
2129}
93a37866 2130
81345200
A
2131void ByteCodeParser::prepareToParseBlock()
2132{
2133 for (unsigned i = 0; i < m_constants.size(); ++i)
2134 m_constants[i] = ConstantRecord();
2135 m_cellConstantNodes.clear();
2136}
93a37866 2137
81345200
A
2138Node* ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
2139{
2140 Node* localBase = get(VirtualRegister(JSStack::ScopeChain));
2141 if (skipTop) {
2142 ASSERT(!inlineCallFrame());
2143 localBase = addToGraph(SkipTopScope, localBase);
93a37866 2144 }
81345200
A
2145 for (unsigned n = skipCount; n--;)
2146 localBase = addToGraph(SkipScope, localBase);
2147 return localBase;
93a37866
A
2148}
2149
14957cd0
A
2150bool ByteCodeParser::parseBlock(unsigned limit)
2151{
6fe7ccc8 2152 bool shouldContinueParsing = true;
93a37866
A
2153
2154 Interpreter* interpreter = m_vm->interpreter;
6fe7ccc8
A
2155 Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
2156 unsigned blockBegin = m_currentIndex;
2157
2158 // If we are the first basic block, introduce markers for arguments. This allows
2159 // us to track if a use of an argument may use the actual argument passed, as
2160 // opposed to using a value we set explicitly.
81345200 2161 if (m_currentBlock == m_graph.block(0) && !inlineCallFrame()) {
6fe7ccc8
A
2162 m_graph.m_arguments.resize(m_numArguments);
2163 for (unsigned argument = 0; argument < m_numArguments; ++argument) {
93a37866 2164 VariableAccessData* variable = newVariableAccessData(
81345200 2165 virtualRegisterForArgument(argument), m_codeBlock->isCaptured(virtualRegisterForArgument(argument)));
93a37866 2166 variable->mergeStructureCheckHoistingFailed(
81345200
A
2167 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
2168 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint));
2169 variable->mergeCheckArrayHoistingFailed(
2170 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
93a37866
A
2171
2172 Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
6fe7ccc8 2173 m_graph.m_arguments[argument] = setArgument;
6fe7ccc8
A
2174 m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
2175 }
14957cd0
A
2176 }
2177
14957cd0 2178 while (true) {
81345200
A
2179 for (unsigned i = 0; i < m_setLocalQueue.size(); ++i)
2180 m_setLocalQueue[i].execute(this);
2181 m_setLocalQueue.resize(0);
2182
14957cd0
A
2183 // Don't extend over jump destinations.
2184 if (m_currentIndex == limit) {
6fe7ccc8
A
2185 // Ordinarily we want to plant a jump. But refuse to do this if the block is
2186 // empty. This is a special case for inlining, which might otherwise create
2187 // some empty blocks in some cases. When parseBlock() returns with an empty
2188 // block, it will get repurposed instead of creating a new one. Note that this
2189 // logic relies on every bytecode resulting in one or more nodes, which would
2190 // be true anyway except for op_loop_hint, which emits a Phantom to force this
2191 // to be true.
2192 if (!m_currentBlock->isEmpty())
2193 addToGraph(Jump, OpInfo(m_currentIndex));
6fe7ccc8 2194 return shouldContinueParsing;
14957cd0 2195 }
6fe7ccc8 2196
14957cd0
A
2197 // Switch on the current bytecode opcode.
2198 Instruction* currentInstruction = instructionsBegin + m_currentIndex;
93a37866 2199 m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
6fe7ccc8 2200 OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
93a37866 2201
81345200
A
2202 if (Options::verboseDFGByteCodeParsing())
2203 dataLog(" parsing ", currentCodeOrigin(), "\n");
2204
2205 if (m_graph.compilation()) {
2206 addToGraph(CountExecution, OpInfo(m_graph.compilation()->executionCounterFor(
93a37866
A
2207 Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
2208 }
2209
6fe7ccc8 2210 switch (opcodeID) {
14957cd0
A
2211
2212 // === Function entry opcodes ===
2213
2214 case op_enter:
2215 // Initialize all locals to undefined.
6fe7ccc8 2216 for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
81345200
A
2217 set(virtualRegisterForLocal(i), constantUndefined(), ImmediateNakedSet);
2218 if (m_inlineStackTop->m_codeBlock->specializationKind() == CodeForConstruct)
2219 set(virtualRegisterForArgument(0), constantUndefined(), ImmediateNakedSet);
14957cd0 2220 NEXT_OPCODE(op_enter);
81345200
A
2221
2222 case op_touch_entry:
2223 if (m_inlineStackTop->m_codeBlock->symbolTable()->m_functionEnteredOnce.isStillValid())
2224 addToGraph(ForceOSRExit);
2225 NEXT_OPCODE(op_touch_entry);
2226
2227 case op_to_this: {
93a37866 2228 Node* op1 = getThis();
81345200
A
2229 if (op1->op() != ToThis) {
2230 Structure* cachedStructure = currentInstruction[2].u.structure.get();
2231 if (!cachedStructure
2232 || cachedStructure->classInfo()->methodTable.toThis != JSObject::info()->methodTable.toThis
2233 || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
2234 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
2235 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint)
2236 || (op1->op() == GetLocal && op1->variableAccessData()->structureCheckHoistingFailed())) {
2237 setThis(addToGraph(ToThis, op1));
2238 } else {
93a37866
A
2239 addToGraph(
2240 CheckStructure,
81345200 2241 OpInfo(m_graph.addStructureSet(cachedStructure)),
93a37866
A
2242 op1);
2243 }
2244 }
81345200 2245 NEXT_OPCODE(op_to_this);
14957cd0
A
2246 }
2247
6fe7ccc8 2248 case op_create_this: {
93a37866 2249 int calleeOperand = currentInstruction[2].u.operand;
81345200 2250 Node* callee = get(VirtualRegister(calleeOperand));
93a37866
A
2251 bool alreadyEmitted = false;
2252 if (callee->op() == WeakJSConstant) {
2253 JSCell* cell = callee->weakConstant();
81345200 2254 ASSERT(cell->inherits(JSFunction::info()));
93a37866
A
2255
2256 JSFunction* function = jsCast<JSFunction*>(cell);
81345200 2257 if (Structure* structure = function->allocationStructure()) {
93a37866
A
2258 addToGraph(AllocationProfileWatchpoint, OpInfo(function));
2259 // The callee is still live up to this point.
2260 addToGraph(Phantom, callee);
81345200 2261 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewObject, OpInfo(structure)));
93a37866
A
2262 alreadyEmitted = true;
2263 }
2264 }
81345200
A
2265 if (!alreadyEmitted) {
2266 set(VirtualRegister(currentInstruction[1].u.operand),
93a37866 2267 addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
81345200 2268 }
6fe7ccc8
A
2269 NEXT_OPCODE(op_create_this);
2270 }
93a37866 2271
6fe7ccc8 2272 case op_new_object: {
81345200 2273 set(VirtualRegister(currentInstruction[1].u.operand),
93a37866
A
2274 addToGraph(NewObject,
2275 OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
6fe7ccc8
A
2276 NEXT_OPCODE(op_new_object);
2277 }
2278
2279 case op_new_array: {
2280 int startOperand = currentInstruction[2].u.operand;
2281 int numOperands = currentInstruction[3].u.operand;
93a37866 2282 ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
81345200
A
2283 for (int operandIdx = startOperand; operandIdx > startOperand - numOperands; --operandIdx)
2284 addVarArgChild(get(VirtualRegister(operandIdx)));
2285 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
6fe7ccc8
A
2286 NEXT_OPCODE(op_new_array);
2287 }
2288
93a37866
A
2289 case op_new_array_with_size: {
2290 int lengthOperand = currentInstruction[2].u.operand;
2291 ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
81345200 2292 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(VirtualRegister(lengthOperand))));
93a37866
A
2293 NEXT_OPCODE(op_new_array_with_size);
2294 }
2295
6fe7ccc8
A
2296 case op_new_array_buffer: {
2297 int startConstant = currentInstruction[2].u.operand;
2298 int numConstants = currentInstruction[3].u.operand;
93a37866
A
2299 ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2300 NewArrayBufferData data;
2301 data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
2302 data.numConstants = numConstants;
2303 data.indexingType = profile->selectIndexingType();
2304
2305 // If this statement has never executed, we'll have the wrong indexing type in the profile.
2306 for (int i = 0; i < numConstants; ++i) {
2307 data.indexingType =
2308 leastUpperBoundOfIndexingTypeAndValue(
2309 data.indexingType,
2310 m_codeBlock->constantBuffer(data.startConstant)[i]);
2311 }
2312
2313 m_graph.m_newArrayBufferData.append(data);
81345200 2314 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
6fe7ccc8
A
2315 NEXT_OPCODE(op_new_array_buffer);
2316 }
2317
2318 case op_new_regexp: {
81345200 2319 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
6fe7ccc8
A
2320 NEXT_OPCODE(op_new_regexp);
2321 }
2322
2323 case op_get_callee: {
81345200
A
2324 JSCell* cachedFunction = currentInstruction[2].u.jsCell.get();
2325 if (!cachedFunction
2326 || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
2327 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction)) {
2328 set(VirtualRegister(currentInstruction[1].u.operand), get(VirtualRegister(JSStack::Callee)));
2329 } else {
2330 ASSERT(cachedFunction->inherits(JSFunction::info()));
2331 Node* actualCallee = get(VirtualRegister(JSStack::Callee));
2332 addToGraph(CheckFunction, OpInfo(cachedFunction), actualCallee);
2333 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(WeakJSConstant, OpInfo(cachedFunction)));
93a37866 2334 }
6fe7ccc8
A
2335 NEXT_OPCODE(op_get_callee);
2336 }
2337
14957cd0
A
2338 // === Bitwise operations ===
2339
2340 case op_bitand: {
81345200
A
2341 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2342 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2343 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitAnd, op1, op2));
14957cd0
A
2344 NEXT_OPCODE(op_bitand);
2345 }
2346
2347 case op_bitor: {
81345200
A
2348 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2349 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2350 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitOr, op1, op2));
14957cd0
A
2351 NEXT_OPCODE(op_bitor);
2352 }
2353
2354 case op_bitxor: {
81345200
A
2355 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2356 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2357 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitXor, op1, op2));
14957cd0
A
2358 NEXT_OPCODE(op_bitxor);
2359 }
2360
2361 case op_rshift: {
81345200
A
2362 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2363 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2364 set(VirtualRegister(currentInstruction[1].u.operand),
2365 addToGraph(BitRShift, op1, op2));
14957cd0
A
2366 NEXT_OPCODE(op_rshift);
2367 }
2368
2369 case op_lshift: {
81345200
A
2370 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2371 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2372 set(VirtualRegister(currentInstruction[1].u.operand),
2373 addToGraph(BitLShift, op1, op2));
14957cd0
A
2374 NEXT_OPCODE(op_lshift);
2375 }
2376
2377 case op_urshift: {
81345200
A
2378 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2379 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2380 set(VirtualRegister(currentInstruction[1].u.operand),
2381 addToGraph(BitURShift, op1, op2));
14957cd0
A
2382 NEXT_OPCODE(op_urshift);
2383 }
81345200
A
2384
2385 case op_unsigned: {
2386 set(VirtualRegister(currentInstruction[1].u.operand),
2387 makeSafe(addToGraph(UInt32ToNumber, get(VirtualRegister(currentInstruction[2].u.operand)))));
2388 NEXT_OPCODE(op_unsigned);
2389 }
14957cd0
A
2390
2391 // === Increment/Decrement opcodes ===
2392
93a37866 2393 case op_inc: {
81345200
A
2394 int srcDst = currentInstruction[1].u.operand;
2395 VirtualRegister srcDstVirtualRegister = VirtualRegister(srcDst);
2396 Node* op = get(srcDstVirtualRegister);
2397 set(srcDstVirtualRegister, makeSafe(addToGraph(ArithAdd, op, one())));
93a37866 2398 NEXT_OPCODE(op_inc);
14957cd0
A
2399 }
2400
93a37866 2401 case op_dec: {
81345200
A
2402 int srcDst = currentInstruction[1].u.operand;
2403 VirtualRegister srcDstVirtualRegister = VirtualRegister(srcDst);
2404 Node* op = get(srcDstVirtualRegister);
2405 set(srcDstVirtualRegister, makeSafe(addToGraph(ArithSub, op, one())));
93a37866 2406 NEXT_OPCODE(op_dec);
14957cd0
A
2407 }
2408
2409 // === Arithmetic operations ===
2410
2411 case op_add: {
81345200
A
2412 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2413 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
93a37866 2414 if (op1->hasNumberResult() && op2->hasNumberResult())
81345200 2415 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithAdd, op1, op2)));
14957cd0 2416 else
81345200 2417 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ValueAdd, op1, op2)));
14957cd0
A
2418 NEXT_OPCODE(op_add);
2419 }
2420
2421 case op_sub: {
81345200
A
2422 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2423 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2424 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithSub, op1, op2)));
14957cd0
A
2425 NEXT_OPCODE(op_sub);
2426 }
2427
6fe7ccc8 2428 case op_negate: {
81345200
A
2429 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2430 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithNegate, op1)));
6fe7ccc8
A
2431 NEXT_OPCODE(op_negate);
2432 }
2433
14957cd0 2434 case op_mul: {
6fe7ccc8 2435 // Multiply requires that the inputs are not truncated, unfortunately.
81345200
A
2436 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2437 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2438 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithMul, op1, op2)));
14957cd0
A
2439 NEXT_OPCODE(op_mul);
2440 }
2441
2442 case op_mod: {
81345200
A
2443 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2444 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2445 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithMod, op1, op2)));
14957cd0
A
2446 NEXT_OPCODE(op_mod);
2447 }
2448
2449 case op_div: {
81345200
A
2450 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2451 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2452 set(VirtualRegister(currentInstruction[1].u.operand), makeDivSafe(addToGraph(ArithDiv, op1, op2)));
14957cd0
A
2453 NEXT_OPCODE(op_div);
2454 }
2455
2456 // === Misc operations ===
2457
6fe7ccc8
A
2458 case op_debug:
2459 addToGraph(Breakpoint);
2460 NEXT_OPCODE(op_debug);
81345200
A
2461
2462 case op_profile_will_call: {
2463 addToGraph(ProfileWillCall);
2464 NEXT_OPCODE(op_profile_will_call);
2465 }
2466
2467 case op_profile_did_call: {
2468 addToGraph(ProfileDidCall);
2469 NEXT_OPCODE(op_profile_did_call);
2470 }
2471
14957cd0 2472 case op_mov: {
81345200
A
2473 Node* op = get(VirtualRegister(currentInstruction[2].u.operand));
2474 set(VirtualRegister(currentInstruction[1].u.operand), op);
14957cd0
A
2475 NEXT_OPCODE(op_mov);
2476 }
81345200
A
2477
2478 case op_captured_mov: {
2479 Node* op = get(VirtualRegister(currentInstruction[2].u.operand));
2480 if (VariableWatchpointSet* set = currentInstruction[3].u.watchpointSet) {
2481 if (set->state() != IsInvalidated)
2482 addToGraph(NotifyWrite, OpInfo(set), op);
2483 }
2484 set(VirtualRegister(currentInstruction[1].u.operand), op);
2485 NEXT_OPCODE(op_captured_mov);
2486 }
14957cd0 2487
6fe7ccc8 2488 case op_check_has_instance:
81345200 2489 addToGraph(CheckHasInstance, get(VirtualRegister(currentInstruction[3].u.operand)));
6fe7ccc8
A
2490 NEXT_OPCODE(op_check_has_instance);
2491
2492 case op_instanceof: {
81345200
A
2493 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2494 Node* prototype = get(VirtualRegister(currentInstruction[3].u.operand));
2495 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(InstanceOf, value, prototype));
6fe7ccc8
A
2496 NEXT_OPCODE(op_instanceof);
2497 }
2498
2499 case op_is_undefined: {
81345200
A
2500 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2501 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsUndefined, value));
6fe7ccc8
A
2502 NEXT_OPCODE(op_is_undefined);
2503 }
2504
2505 case op_is_boolean: {
81345200
A
2506 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2507 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsBoolean, value));
6fe7ccc8
A
2508 NEXT_OPCODE(op_is_boolean);
2509 }
2510
2511 case op_is_number: {
81345200
A
2512 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2513 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsNumber, value));
6fe7ccc8
A
2514 NEXT_OPCODE(op_is_number);
2515 }
2516
2517 case op_is_string: {
81345200
A
2518 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2519 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsString, value));
6fe7ccc8
A
2520 NEXT_OPCODE(op_is_string);
2521 }
2522
2523 case op_is_object: {
81345200
A
2524 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2525 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsObject, value));
6fe7ccc8
A
2526 NEXT_OPCODE(op_is_object);
2527 }
2528
2529 case op_is_function: {
81345200
A
2530 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2531 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsFunction, value));
6fe7ccc8
A
2532 NEXT_OPCODE(op_is_function);
2533 }
2534
14957cd0 2535 case op_not: {
81345200
A
2536 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2537 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, value));
14957cd0
A
2538 NEXT_OPCODE(op_not);
2539 }
6fe7ccc8
A
2540
2541 case op_to_primitive: {
81345200
A
2542 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2543 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(ToPrimitive, value));
6fe7ccc8
A
2544 NEXT_OPCODE(op_to_primitive);
2545 }
2546
2547 case op_strcat: {
2548 int startOperand = currentInstruction[2].u.operand;
2549 int numOperands = currentInstruction[3].u.operand;
93a37866
A
2550#if CPU(X86)
2551 // X86 doesn't have enough registers to compile MakeRope with three arguments.
2552 // Rather than try to be clever, we just make MakeRope dumber on this processor.
2553 const unsigned maxRopeArguments = 2;
2554#else
2555 const unsigned maxRopeArguments = 3;
2556#endif
81345200 2557 auto toStringNodes = std::make_unique<Node*[]>(numOperands);
93a37866 2558 for (int i = 0; i < numOperands; i++)
81345200 2559 toStringNodes[i] = addToGraph(ToString, get(VirtualRegister(startOperand - i)));
93a37866
A
2560
2561 for (int i = 0; i < numOperands; i++)
2562 addToGraph(Phantom, toStringNodes[i]);
2563
2564 Node* operands[AdjacencyList::Size];
2565 unsigned indexInOperands = 0;
2566 for (unsigned i = 0; i < AdjacencyList::Size; ++i)
2567 operands[i] = 0;
2568 for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
2569 if (indexInOperands == maxRopeArguments) {
2570 operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
2571 for (unsigned i = 1; i < AdjacencyList::Size; ++i)
2572 operands[i] = 0;
2573 indexInOperands = 1;
2574 }
2575
2576 ASSERT(indexInOperands < AdjacencyList::Size);
2577 ASSERT(indexInOperands < maxRopeArguments);
2578 operands[indexInOperands++] = toStringNodes[operandIdx];
2579 }
81345200 2580 set(VirtualRegister(currentInstruction[1].u.operand),
93a37866 2581 addToGraph(MakeRope, operands[0], operands[1], operands[2]));
6fe7ccc8
A
2582 NEXT_OPCODE(op_strcat);
2583 }
14957cd0
A
2584
2585 case op_less: {
81345200
A
2586 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2587 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2588 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareLess, op1, op2));
14957cd0
A
2589 NEXT_OPCODE(op_less);
2590 }
2591
2592 case op_lesseq: {
81345200
A
2593 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2594 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2595 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareLessEq, op1, op2));
14957cd0
A
2596 NEXT_OPCODE(op_lesseq);
2597 }
2598
6fe7ccc8 2599 case op_greater: {
81345200
A
2600 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2601 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2602 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareGreater, op1, op2));
6fe7ccc8
A
2603 NEXT_OPCODE(op_greater);
2604 }
2605
2606 case op_greatereq: {
81345200
A
2607 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2608 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2609 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareGreaterEq, op1, op2));
6fe7ccc8
A
2610 NEXT_OPCODE(op_greatereq);
2611 }
2612
14957cd0 2613 case op_eq: {
81345200
A
2614 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2615 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2616 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareEq, op1, op2));
14957cd0
A
2617 NEXT_OPCODE(op_eq);
2618 }
2619
2620 case op_eq_null: {
81345200
A
2621 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2622 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareEqConstant, value, constantNull()));
14957cd0
A
2623 NEXT_OPCODE(op_eq_null);
2624 }
2625
2626 case op_stricteq: {
81345200
A
2627 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2628 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2629 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareStrictEq, op1, op2));
14957cd0
A
2630 NEXT_OPCODE(op_stricteq);
2631 }
2632
2633 case op_neq: {
81345200
A
2634 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2635 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2636 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
14957cd0
A
2637 NEXT_OPCODE(op_neq);
2638 }
2639
2640 case op_neq_null: {
81345200
A
2641 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2642 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, constantNull())));
14957cd0
A
2643 NEXT_OPCODE(op_neq_null);
2644 }
2645
2646 case op_nstricteq: {
81345200
A
2647 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2648 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
93a37866 2649 Node* invertedResult;
81345200
A
2650 invertedResult = addToGraph(CompareStrictEq, op1, op2);
2651 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, invertedResult));
14957cd0
A
2652 NEXT_OPCODE(op_nstricteq);
2653 }
2654
2655 // === Property access operations ===
2656
2657 case op_get_by_val: {
93a37866 2658 SpeculatedType prediction = getPrediction();
6fe7ccc8 2659
81345200
A
2660 Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
2661 ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Read);
2662 Node* property = get(VirtualRegister(currentInstruction[3].u.operand));
93a37866 2663 Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
81345200 2664 set(VirtualRegister(currentInstruction[1].u.operand), getByVal);
14957cd0
A
2665
2666 NEXT_OPCODE(op_get_by_val);
2667 }
2668
81345200 2669 case op_put_by_val_direct:
14957cd0 2670 case op_put_by_val: {
81345200 2671 Node* base = get(VirtualRegister(currentInstruction[1].u.operand));
14957cd0 2672
81345200 2673 ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Write);
93a37866 2674
81345200
A
2675 Node* property = get(VirtualRegister(currentInstruction[2].u.operand));
2676 Node* value = get(VirtualRegister(currentInstruction[3].u.operand));
93a37866
A
2677
2678 addVarArgChild(base);
2679 addVarArgChild(property);
2680 addVarArgChild(value);
2681 addVarArgChild(0); // Leave room for property storage.
81345200
A
2682 addVarArgChild(0); // Leave room for length.
2683 addToGraph(Node::VarArg, opcodeID == op_put_by_val_direct ? PutByValDirect : PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
14957cd0
A
2684
2685 NEXT_OPCODE(op_put_by_val);
2686 }
6fe7ccc8 2687
93a37866
A
2688 case op_get_by_id:
2689 case op_get_by_id_out_of_line:
2690 case op_get_array_length: {
2691 SpeculatedType prediction = getPrediction();
6fe7ccc8 2692
81345200 2693 Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
6fe7ccc8
A
2694 unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2695
81345200 2696 StringImpl* uid = m_graph.identifiers()[identifierNumber];
6fe7ccc8 2697 GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
81345200
A
2698 m_inlineStackTop->m_profiledBlock, m_dfgCodeBlock,
2699 m_inlineStackTop->m_stubInfos, m_dfgStubInfos,
2700 currentCodeOrigin(), uid);
6fe7ccc8 2701
93a37866
A
2702 handleGetById(
2703 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
14957cd0
A
2704
2705 NEXT_OPCODE(op_get_by_id);
2706 }
6fe7ccc8 2707 case op_put_by_id:
93a37866 2708 case op_put_by_id_out_of_line:
6fe7ccc8 2709 case op_put_by_id_transition_direct:
93a37866
A
2710 case op_put_by_id_transition_normal:
2711 case op_put_by_id_transition_direct_out_of_line:
2712 case op_put_by_id_transition_normal_out_of_line: {
81345200
A
2713 Node* value = get(VirtualRegister(currentInstruction[3].u.operand));
2714 Node* base = get(VirtualRegister(currentInstruction[1].u.operand));
6fe7ccc8 2715 unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
14957cd0
A
2716 bool direct = currentInstruction[8].u.operand;
2717
6fe7ccc8 2718 PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
81345200
A
2719 m_inlineStackTop->m_profiledBlock, m_dfgCodeBlock,
2720 m_inlineStackTop->m_stubInfos, m_dfgStubInfos,
2721 currentCodeOrigin(), m_graph.identifiers()[identifierNumber]);
6fe7ccc8 2722
81345200 2723 handlePutById(base, identifierNumber, value, putByIdStatus, direct);
14957cd0
A
2724 NEXT_OPCODE(op_put_by_id);
2725 }
2726
93a37866
A
2727 case op_init_global_const_nop: {
2728 NEXT_OPCODE(op_init_global_const_nop);
14957cd0
A
2729 }
2730
93a37866 2731 case op_init_global_const: {
81345200 2732 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866
A
2733 addToGraph(
2734 PutGlobalVar,
2735 OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2736 value);
2737 NEXT_OPCODE(op_init_global_const);
14957cd0
A
2738 }
2739
93a37866
A
2740 // === Block terminators. ===
2741
2742 case op_jmp: {
81345200
A
2743 int relativeOffset = currentInstruction[1].u.operand;
2744 if (relativeOffset <= 0)
2745 flushForTerminal();
14957cd0 2746 addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
93a37866 2747 LAST_OPCODE(op_jmp);
14957cd0
A
2748 }
2749
2750 case op_jtrue: {
2751 unsigned relativeOffset = currentInstruction[2].u.operand;
81345200
A
2752 Node* condition = get(VirtualRegister(currentInstruction[1].u.operand));
2753 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jtrue))), condition);
14957cd0
A
2754 LAST_OPCODE(op_jtrue);
2755 }
2756
2757 case op_jfalse: {
2758 unsigned relativeOffset = currentInstruction[2].u.operand;
81345200
A
2759 Node* condition = get(VirtualRegister(currentInstruction[1].u.operand));
2760 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jfalse), m_currentIndex + relativeOffset)), condition);
14957cd0
A
2761 LAST_OPCODE(op_jfalse);
2762 }
2763
14957cd0
A
2764 case op_jeq_null: {
2765 unsigned relativeOffset = currentInstruction[2].u.operand;
81345200 2766 Node* value = get(VirtualRegister(currentInstruction[1].u.operand));
93a37866 2767 Node* condition = addToGraph(CompareEqConstant, value, constantNull());
81345200 2768 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jeq_null))), condition);
14957cd0
A
2769 LAST_OPCODE(op_jeq_null);
2770 }
2771
2772 case op_jneq_null: {
2773 unsigned relativeOffset = currentInstruction[2].u.operand;
81345200 2774 Node* value = get(VirtualRegister(currentInstruction[1].u.operand));
93a37866 2775 Node* condition = addToGraph(CompareEqConstant, value, constantNull());
81345200 2776 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jneq_null), m_currentIndex + relativeOffset)), condition);
14957cd0
A
2777 LAST_OPCODE(op_jneq_null);
2778 }
2779
6fe7ccc8
A
2780 case op_jless: {
2781 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
2782 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2783 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 2784 Node* condition = addToGraph(CompareLess, op1, op2);
81345200 2785 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jless))), condition);
6fe7ccc8
A
2786 LAST_OPCODE(op_jless);
2787 }
2788
2789 case op_jlesseq: {
2790 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
2791 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2792 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 2793 Node* condition = addToGraph(CompareLessEq, op1, op2);
81345200 2794 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jlesseq))), condition);
6fe7ccc8
A
2795 LAST_OPCODE(op_jlesseq);
2796 }
2797
2798 case op_jgreater: {
2799 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
2800 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2801 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 2802 Node* condition = addToGraph(CompareGreater, op1, op2);
81345200 2803 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jgreater))), condition);
6fe7ccc8
A
2804 LAST_OPCODE(op_jgreater);
2805 }
2806
2807 case op_jgreatereq: {
2808 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
2809 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2810 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 2811 Node* condition = addToGraph(CompareGreaterEq, op1, op2);
81345200 2812 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jgreatereq))), condition);
6fe7ccc8
A
2813 LAST_OPCODE(op_jgreatereq);
2814 }
2815
14957cd0
A
2816 case op_jnless: {
2817 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
2818 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2819 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 2820 Node* condition = addToGraph(CompareLess, op1, op2);
81345200 2821 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jnless), m_currentIndex + relativeOffset)), condition);
14957cd0
A
2822 LAST_OPCODE(op_jnless);
2823 }
2824
2825 case op_jnlesseq: {
2826 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
2827 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2828 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 2829 Node* condition = addToGraph(CompareLessEq, op1, op2);
81345200 2830 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jnlesseq), m_currentIndex + relativeOffset)), condition);
14957cd0
A
2831 LAST_OPCODE(op_jnlesseq);
2832 }
2833
6fe7ccc8 2834 case op_jngreater: {
14957cd0 2835 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
2836 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2837 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 2838 Node* condition = addToGraph(CompareGreater, op1, op2);
81345200 2839 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jngreater), m_currentIndex + relativeOffset)), condition);
6fe7ccc8 2840 LAST_OPCODE(op_jngreater);
14957cd0
A
2841 }
2842
6fe7ccc8 2843 case op_jngreatereq: {
14957cd0 2844 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
2845 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2846 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 2847 Node* condition = addToGraph(CompareGreaterEq, op1, op2);
81345200 2848 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jngreatereq), m_currentIndex + relativeOffset)), condition);
6fe7ccc8 2849 LAST_OPCODE(op_jngreatereq);
14957cd0 2850 }
81345200
A
2851
2852 case op_switch_imm: {
2853 SwitchData& data = *m_graph.m_switchData.add();
2854 data.kind = SwitchImm;
2855 data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2856 data.fallThrough.setBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2857 SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2858 for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2859 if (!table.branchOffsets[i])
2860 continue;
2861 unsigned target = m_currentIndex + table.branchOffsets[i];
2862 if (target == data.fallThrough.bytecodeIndex())
2863 continue;
2864 data.cases.append(SwitchCase::withBytecodeIndex(jsNumber(static_cast<int32_t>(table.min + i)), target));
2865 }
2866 flushIfTerminal(data);
2867 addToGraph(Switch, OpInfo(&data), get(VirtualRegister(currentInstruction[3].u.operand)));
2868 LAST_OPCODE(op_switch_imm);
2869 }
2870
2871 case op_switch_char: {
2872 SwitchData& data = *m_graph.m_switchData.add();
2873 data.kind = SwitchChar;
2874 data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2875 data.fallThrough.setBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2876 SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2877 for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2878 if (!table.branchOffsets[i])
2879 continue;
2880 unsigned target = m_currentIndex + table.branchOffsets[i];
2881 if (target == data.fallThrough.bytecodeIndex())
2882 continue;
2883 data.cases.append(
2884 SwitchCase::withBytecodeIndex(LazyJSValue::singleCharacterString(table.min + i), target));
2885 }
2886 flushIfTerminal(data);
2887 addToGraph(Switch, OpInfo(&data), get(VirtualRegister(currentInstruction[3].u.operand)));
2888 LAST_OPCODE(op_switch_char);
2889 }
2890
2891 case op_switch_string: {
2892 SwitchData& data = *m_graph.m_switchData.add();
2893 data.kind = SwitchString;
2894 data.switchTableIndex = currentInstruction[1].u.operand;
2895 data.fallThrough.setBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2896 StringJumpTable& table = m_codeBlock->stringSwitchJumpTable(data.switchTableIndex);
2897 StringJumpTable::StringOffsetTable::iterator iter;
2898 StringJumpTable::StringOffsetTable::iterator end = table.offsetTable.end();
2899 for (iter = table.offsetTable.begin(); iter != end; ++iter) {
2900 unsigned target = m_currentIndex + iter->value.branchOffset;
2901 if (target == data.fallThrough.bytecodeIndex())
2902 continue;
2903 data.cases.append(
2904 SwitchCase::withBytecodeIndex(LazyJSValue::knownStringImpl(iter->key.get()), target));
2905 }
2906 flushIfTerminal(data);
2907 addToGraph(Switch, OpInfo(&data), get(VirtualRegister(currentInstruction[3].u.operand)));
2908 LAST_OPCODE(op_switch_string);
2909 }
14957cd0 2910
6fe7ccc8 2911 case op_ret:
81345200 2912 flushForReturn();
93a37866 2913 if (inlineCallFrame()) {
81345200
A
2914 ASSERT(m_inlineStackTop->m_returnValue.isValid());
2915 setDirect(m_inlineStackTop->m_returnValue, get(VirtualRegister(currentInstruction[1].u.operand)), ImmediateSetWithFlush);
6fe7ccc8
A
2916 m_inlineStackTop->m_didReturn = true;
2917 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
2918 // If we're returning from the first block, then we're done parsing.
81345200 2919 ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.lastBlock());
6fe7ccc8
A
2920 shouldContinueParsing = false;
2921 LAST_OPCODE(op_ret);
2922 } else {
2923 // If inlining created blocks, and we're doing a return, then we need some
2924 // special linking.
81345200 2925 ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
6fe7ccc8
A
2926 m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
2927 }
2928 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
2929 ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
81345200 2930 addToGraph(Jump, OpInfo(0));
6fe7ccc8
A
2931 m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
2932 m_inlineStackTop->m_didEarlyReturn = true;
2933 }
2934 LAST_OPCODE(op_ret);
2935 }
81345200 2936 addToGraph(Return, get(VirtualRegister(currentInstruction[1].u.operand)));
14957cd0 2937 LAST_OPCODE(op_ret);
6fe7ccc8
A
2938
2939 case op_end:
81345200 2940 flushForReturn();
93a37866 2941 ASSERT(!inlineCallFrame());
81345200 2942 addToGraph(Return, get(VirtualRegister(currentInstruction[1].u.operand)));
6fe7ccc8
A
2943 LAST_OPCODE(op_end);
2944
2945 case op_throw:
81345200
A
2946 addToGraph(Throw, get(VirtualRegister(currentInstruction[1].u.operand)));
2947 flushForTerminal();
12899fa2 2948 addToGraph(Unreachable);
6fe7ccc8
A
2949 LAST_OPCODE(op_throw);
2950
93a37866 2951 case op_throw_static_error:
6fe7ccc8 2952 addToGraph(ThrowReferenceError);
81345200 2953 flushForTerminal();
12899fa2 2954 addToGraph(Unreachable);
93a37866 2955 LAST_OPCODE(op_throw_static_error);
6fe7ccc8
A
2956
2957 case op_call:
81345200 2958 handleCall(currentInstruction, Call, CodeForCall);
6fe7ccc8
A
2959 NEXT_OPCODE(op_call);
2960
2961 case op_construct:
81345200 2962 handleCall(currentInstruction, Construct, CodeForConstruct);
6fe7ccc8
A
2963 NEXT_OPCODE(op_construct);
2964
93a37866 2965 case op_call_varargs: {
81345200
A
2966 int result = currentInstruction[1].u.operand;
2967 int callee = currentInstruction[2].u.operand;
2968 int thisReg = currentInstruction[3].u.operand;
2969 int arguments = currentInstruction[4].u.operand;
2970 int firstFreeReg = currentInstruction[5].u.operand;
2971
93a37866 2972 ASSERT(inlineCallFrame());
81345200 2973 ASSERT_UNUSED(arguments, arguments == m_inlineStackTop->m_codeBlock->argumentsRegister().offset());
93a37866 2974 ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
81345200 2975
93a37866 2976 addToGraph(CheckArgumentsNotCreated);
81345200 2977
93a37866 2978 unsigned argCount = inlineCallFrame()->arguments.size();
93a37866 2979
81345200
A
2980 // Let's compute the register offset. We start with the last used register, and
2981 // then adjust for the things we want in the call frame.
2982 int registerOffset = firstFreeReg + 1;
2983 registerOffset -= argCount; // We will be passing some arguments.
2984 registerOffset -= JSStack::CallFrameHeaderSize; // We will pretend to have a call frame header.
93a37866 2985
81345200
A
2986 // Get the alignment right.
2987 registerOffset = -WTF::roundUpToMultipleOf(
2988 stackAlignmentRegisters(),
2989 -registerOffset);
2990
2991 ensureLocals(
2992 m_inlineStackTop->remapOperand(
2993 VirtualRegister(registerOffset)).toLocal());
93a37866 2994
81345200
A
2995 // The bytecode wouldn't have set up the arguments. But we'll do it and make it
2996 // look like the bytecode had done it.
2997 int nextRegister = registerOffset + JSStack::CallFrameHeaderSize;
2998 set(VirtualRegister(nextRegister++), get(VirtualRegister(thisReg)), ImmediateNakedSet);
2999 for (unsigned argument = 1; argument < argCount; ++argument)
3000 set(VirtualRegister(nextRegister++), get(virtualRegisterForArgument(argument)), ImmediateNakedSet);
3001
3002 handleCall(
3003 result, Call, CodeForCall, OPCODE_LENGTH(op_call_varargs),
3004 callee, argCount, registerOffset);
93a37866
A
3005 NEXT_OPCODE(op_call_varargs);
3006 }
3007
93a37866
A
3008 case op_jneq_ptr:
3009 // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
3010 // support simmer for a while before making it more general, since it's
3011 // already gnarly enough as it is.
3012 ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
3013 addToGraph(
3014 CheckFunction,
3015 OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
81345200 3016 get(VirtualRegister(currentInstruction[1].u.operand)));
93a37866
A
3017 addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
3018 LAST_OPCODE(op_jneq_ptr);
3019
81345200 3020 case op_resolve_scope: {
93a37866 3021 int dst = currentInstruction[1].u.operand;
81345200
A
3022 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
3023 unsigned depth = currentInstruction[4].u.operand;
3024
3025 // get_from_scope and put_to_scope depend on this watchpoint forcing OSR exit, so they don't add their own watchpoints.
3026 if (needsVarInjectionChecks(resolveType))
3027 addToGraph(VarInjectionWatchpoint);
3028
3029 switch (resolveType) {
3030 case GlobalProperty:
3031 case GlobalVar:
3032 case GlobalPropertyWithVarInjectionChecks:
3033 case GlobalVarWithVarInjectionChecks:
3034 set(VirtualRegister(dst), cellConstant(m_inlineStackTop->m_codeBlock->globalObject()));
3035 break;
3036 case ClosureVar:
3037 case ClosureVarWithVarInjectionChecks: {
3038 JSActivation* activation = currentInstruction[5].u.activation.get();
3039 if (activation
3040 && activation->symbolTable()->m_functionEnteredOnce.isStillValid()) {
3041 addToGraph(FunctionReentryWatchpoint, OpInfo(activation->symbolTable()));
3042 set(VirtualRegister(dst), cellConstant(activation));
3043 break;
3044 }
3045 set(VirtualRegister(dst),
3046 getScope(m_inlineStackTop->m_codeBlock->needsActivation(), depth));
3047 break;
93a37866 3048 }
81345200
A
3049 case Dynamic:
3050 RELEASE_ASSERT_NOT_REACHED();
3051 break;
3052 }
3053 NEXT_OPCODE(op_resolve_scope);
6fe7ccc8
A
3054 }
3055
81345200
A
3056 case op_get_from_scope: {
3057 int dst = currentInstruction[1].u.operand;
3058 int scope = currentInstruction[2].u.operand;
3059 unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3060 StringImpl* uid = m_graph.identifiers()[identifierNumber];
3061 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3062
3063 Structure* structure = 0;
3064 WatchpointSet* watchpoints = 0;
3065 uintptr_t operand;
3066 {
3067 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3068 if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks)
3069 watchpoints = currentInstruction[5].u.watchpointSet;
3070 else
3071 structure = currentInstruction[5].u.structure.get();
3072 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
93a37866
A
3073 }
3074
81345200 3075 UNUSED_PARAM(watchpoints); // We will use this in the future. For now we set it as a way of documenting the fact that that's what index 5 is in GlobalVar mode.
93a37866 3076
81345200
A
3077 SpeculatedType prediction = getPrediction();
3078 JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3079
3080 switch (resolveType) {
3081 case GlobalProperty:
3082 case GlobalPropertyWithVarInjectionChecks: {
3083 GetByIdStatus status = GetByIdStatus::computeFor(*m_vm, structure, uid);
3084 if (status.state() != GetByIdStatus::Simple || status.numVariants() != 1) {
3085 set(VirtualRegister(dst), addToGraph(GetByIdFlush, OpInfo(identifierNumber), OpInfo(prediction), get(VirtualRegister(scope))));
93a37866
A
3086 break;
3087 }
81345200
A
3088 Node* base = cellConstantWithStructureCheck(globalObject, status[0].structureSet().singletonStructure());
3089 addToGraph(Phantom, get(VirtualRegister(scope)));
3090 if (JSValue specificValue = status[0].specificValue())
3091 set(VirtualRegister(dst), cellConstant(specificValue.asCell()));
3092 else
3093 set(VirtualRegister(dst), handleGetByOffset(prediction, base, identifierNumber, operand));
93a37866 3094 break;
81345200
A
3095 }
3096 case GlobalVar:
3097 case GlobalVarWithVarInjectionChecks: {
3098 addToGraph(Phantom, get(VirtualRegister(scope)));
3099 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3100 VariableWatchpointSet* watchpointSet = entry.watchpointSet();
3101 JSValue specificValue =
3102 watchpointSet ? watchpointSet->inferredValue() : JSValue();
3103 if (!specificValue) {
3104 set(VirtualRegister(dst), addToGraph(GetGlobalVar, OpInfo(operand), OpInfo(prediction)));
3105 break;
3106 }
3107
3108 addToGraph(VariableWatchpoint, OpInfo(watchpointSet));
3109 set(VirtualRegister(dst), inferredConstant(specificValue));
93a37866
A
3110 break;
3111 }
81345200
A
3112 case ClosureVar:
3113 case ClosureVarWithVarInjectionChecks: {
3114 Node* scopeNode = get(VirtualRegister(scope));
3115 if (JSActivation* activation = m_graph.tryGetActivation(scopeNode)) {
3116 SymbolTable* symbolTable = activation->symbolTable();
3117 ConcurrentJITLocker locker(symbolTable->m_lock);
3118 SymbolTable::Map::iterator iter = symbolTable->find(locker, uid);
3119 ASSERT(iter != symbolTable->end(locker));
3120 VariableWatchpointSet* watchpointSet = iter->value.watchpointSet();
3121 if (watchpointSet) {
3122 if (JSValue value = watchpointSet->inferredValue()) {
3123 addToGraph(Phantom, scopeNode);
3124 addToGraph(VariableWatchpoint, OpInfo(watchpointSet));
3125 set(VirtualRegister(dst), inferredConstant(value));
3126 break;
3127 }
3128 }
93a37866 3129 }
81345200
A
3130 set(VirtualRegister(dst),
3131 addToGraph(GetClosureVar, OpInfo(operand), OpInfo(prediction),
3132 addToGraph(GetClosureRegisters, scopeNode)));
93a37866
A
3133 break;
3134 }
81345200
A
3135 case Dynamic:
3136 RELEASE_ASSERT_NOT_REACHED();
3137 break;
93a37866 3138 }
81345200 3139 NEXT_OPCODE(op_get_from_scope);
93a37866
A
3140 }
3141
81345200
A
3142 case op_put_to_scope: {
3143 unsigned scope = currentInstruction[1].u.operand;
3144 unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3145 unsigned value = currentInstruction[3].u.operand;
3146 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3147 StringImpl* uid = m_graph.identifiers()[identifierNumber];
3148
3149 Structure* structure = 0;
3150 VariableWatchpointSet* watchpoints = 0;
3151 uintptr_t operand;
3152 {
3153 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3154 if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks)
3155 watchpoints = currentInstruction[5].u.watchpointSet;
3156 else
3157 structure = currentInstruction[5].u.structure.get();
3158 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
93a37866 3159 }
6fe7ccc8 3160
81345200 3161 JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
6fe7ccc8 3162
81345200
A
3163 switch (resolveType) {
3164 case GlobalProperty:
3165 case GlobalPropertyWithVarInjectionChecks: {
3166 PutByIdStatus status = PutByIdStatus::computeFor(*m_vm, globalObject, structure, uid, false);
3167 if (status.numVariants() != 1 || status[0].kind() != PutByIdVariant::Replace) {
3168 addToGraph(PutById, OpInfo(identifierNumber), get(VirtualRegister(scope)), get(VirtualRegister(value)));
3169 break;
3170 }
3171 Node* base = cellConstantWithStructureCheck(globalObject, status[0].structure());
3172 addToGraph(Phantom, get(VirtualRegister(scope)));
3173 handlePutByOffset(base, identifierNumber, static_cast<PropertyOffset>(operand), get(VirtualRegister(value)));
3174 // Keep scope alive until after put.
3175 addToGraph(Phantom, get(VirtualRegister(scope)));
3176 break;
93a37866 3177 }
81345200
A
3178 case GlobalVar:
3179 case GlobalVarWithVarInjectionChecks: {
3180 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3181 ASSERT(watchpoints == entry.watchpointSet());
3182 Node* valueNode = get(VirtualRegister(value));
3183 addToGraph(PutGlobalVar, OpInfo(operand), valueNode);
3184 if (watchpoints->state() != IsInvalidated)
3185 addToGraph(NotifyWrite, OpInfo(watchpoints), valueNode);
3186 // Keep scope alive until after put.
3187 addToGraph(Phantom, get(VirtualRegister(scope)));
3188 break;
93a37866 3189 }
81345200
A
3190 case ClosureVar:
3191 case ClosureVarWithVarInjectionChecks: {
3192 Node* scopeNode = get(VirtualRegister(scope));
3193 Node* scopeRegisters = addToGraph(GetClosureRegisters, scopeNode);
3194 addToGraph(PutClosureVar, OpInfo(operand), scopeNode, scopeRegisters, get(VirtualRegister(value)));
3195 break;
3196 }
3197 case Dynamic:
3198 RELEASE_ASSERT_NOT_REACHED();
3199 break;
3200 }
3201 NEXT_OPCODE(op_put_to_scope);
93a37866 3202 }
81345200 3203
6fe7ccc8
A
3204 case op_loop_hint: {
3205 // Baseline->DFG OSR jumps between loop hints. The DFG assumes that Baseline->DFG
3206 // OSR can only happen at basic block boundaries. Assert that these two statements
3207 // are compatible.
93a37866 3208 RELEASE_ASSERT(m_currentIndex == blockBegin);
6fe7ccc8
A
3209
3210 // We never do OSR into an inlined code block. That could not happen, since OSR
3211 // looks up the code block that is the replacement for the baseline JIT code
3212 // block. Hence, machine code block = true code block = not inline code block.
3213 if (!m_inlineStackTop->m_caller)
3214 m_currentBlock->isOSRTarget = true;
93a37866 3215
81345200
A
3216 addToGraph(LoopHint);
3217
3218 if (m_vm->watchdog && m_vm->watchdog->isEnabled())
93a37866 3219 addToGraph(CheckWatchdogTimer);
6fe7ccc8
A
3220
3221 NEXT_OPCODE(op_loop_hint);
3222 }
3223
3224 case op_init_lazy_reg: {
81345200
A
3225 set(VirtualRegister(currentInstruction[1].u.operand), getJSConstantForValue(JSValue()));
3226 ASSERT(operandIsLocal(currentInstruction[1].u.operand));
3227 m_graph.m_lazyVars.set(VirtualRegister(currentInstruction[1].u.operand).toLocal());
6fe7ccc8
A
3228 NEXT_OPCODE(op_init_lazy_reg);
3229 }
3230
3231 case op_create_activation: {
81345200 3232 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CreateActivation, get(VirtualRegister(currentInstruction[1].u.operand))));
6fe7ccc8
A
3233 NEXT_OPCODE(op_create_activation);
3234 }
3235
93a37866
A
3236 case op_create_arguments: {
3237 m_graph.m_hasArguments = true;
81345200
A
3238 Node* createArguments = addToGraph(CreateArguments, get(VirtualRegister(currentInstruction[1].u.operand)));
3239 set(VirtualRegister(currentInstruction[1].u.operand), createArguments);
3240 set(unmodifiedArgumentsRegister(VirtualRegister(currentInstruction[1].u.operand)), createArguments);
93a37866
A
3241 NEXT_OPCODE(op_create_arguments);
3242 }
3243
6fe7ccc8 3244 case op_tear_off_activation: {
81345200 3245 addToGraph(TearOffActivation, get(VirtualRegister(currentInstruction[1].u.operand)));
6fe7ccc8
A
3246 NEXT_OPCODE(op_tear_off_activation);
3247 }
93a37866
A
3248
3249 case op_tear_off_arguments: {
3250 m_graph.m_hasArguments = true;
81345200 3251 addToGraph(TearOffArguments, get(unmodifiedArgumentsRegister(VirtualRegister(currentInstruction[1].u.operand))), get(VirtualRegister(currentInstruction[2].u.operand)));
93a37866
A
3252 NEXT_OPCODE(op_tear_off_arguments);
3253 }
3254
3255 case op_get_arguments_length: {
3256 m_graph.m_hasArguments = true;
81345200 3257 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(GetMyArgumentsLengthSafe));
93a37866
A
3258 NEXT_OPCODE(op_get_arguments_length);
3259 }
3260
3261 case op_get_argument_by_val: {
3262 m_graph.m_hasArguments = true;
81345200 3263 set(VirtualRegister(currentInstruction[1].u.operand),
93a37866
A
3264 addToGraph(
3265 GetMyArgumentByValSafe, OpInfo(0), OpInfo(getPrediction()),
81345200 3266 get(VirtualRegister(currentInstruction[3].u.operand))));
93a37866
A
3267 NEXT_OPCODE(op_get_argument_by_val);
3268 }
6fe7ccc8
A
3269
3270 case op_new_func: {
3271 if (!currentInstruction[3].u.operand) {
81345200 3272 set(VirtualRegister(currentInstruction[1].u.operand),
6fe7ccc8
A
3273 addToGraph(NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand)));
3274 } else {
81345200 3275 set(VirtualRegister(currentInstruction[1].u.operand),
6fe7ccc8
A
3276 addToGraph(
3277 NewFunction,
3278 OpInfo(currentInstruction[2].u.operand),
81345200 3279 get(VirtualRegister(currentInstruction[1].u.operand))));
6fe7ccc8
A
3280 }
3281 NEXT_OPCODE(op_new_func);
3282 }
3283
81345200
A
3284 case op_new_captured_func: {
3285 Node* function = addToGraph(
3286 NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand));
3287 if (VariableWatchpointSet* set = currentInstruction[3].u.watchpointSet)
3288 addToGraph(NotifyWrite, OpInfo(set), function);
3289 set(VirtualRegister(currentInstruction[1].u.operand), function);
3290 NEXT_OPCODE(op_new_captured_func);
3291 }
3292
6fe7ccc8 3293 case op_new_func_exp: {
81345200 3294 set(VirtualRegister(currentInstruction[1].u.operand),
6fe7ccc8
A
3295 addToGraph(NewFunctionExpression, OpInfo(currentInstruction[2].u.operand)));
3296 NEXT_OPCODE(op_new_func_exp);
14957cd0
A
3297 }
3298
93a37866 3299 case op_typeof: {
81345200
A
3300 set(VirtualRegister(currentInstruction[1].u.operand),
3301 addToGraph(TypeOf, get(VirtualRegister(currentInstruction[2].u.operand))));
93a37866
A
3302 NEXT_OPCODE(op_typeof);
3303 }
3304
3305 case op_to_number: {
81345200
A
3306 Node* node = get(VirtualRegister(currentInstruction[2].u.operand));
3307 addToGraph(Phantom, Edge(node, NumberUse));
3308 set(VirtualRegister(currentInstruction[1].u.operand), node);
93a37866
A
3309 NEXT_OPCODE(op_to_number);
3310 }
81345200
A
3311
3312 case op_in: {
3313 set(VirtualRegister(currentInstruction[1].u.operand),
3314 addToGraph(In, get(VirtualRegister(currentInstruction[2].u.operand)), get(VirtualRegister(currentInstruction[3].u.operand))));
3315 NEXT_OPCODE(op_in);
3316 }
93a37866 3317
14957cd0 3318 default:
6fe7ccc8
A
3319 // Parse failed! This should not happen because the capabilities checker
3320 // should have caught it.
93a37866 3321 RELEASE_ASSERT_NOT_REACHED();
14957cd0
A
3322 return false;
3323 }
6fe7ccc8
A
3324 }
3325}
3326
81345200 3327void ByteCodeParser::linkBlock(BasicBlock* block, Vector<BasicBlock*>& possibleTargets)
6fe7ccc8
A
3328{
3329 ASSERT(!block->isLinked);
3330 ASSERT(!block->isEmpty());
93a37866
A
3331 Node* node = block->last();
3332 ASSERT(node->isTerminal());
6fe7ccc8 3333
93a37866 3334 switch (node->op()) {
6fe7ccc8 3335 case Jump:
81345200 3336 node->targetBlock() = blockForBytecodeOffset(possibleTargets, node->targetBytecodeOffsetDuringParsing());
6fe7ccc8
A
3337 break;
3338
81345200
A
3339 case Branch: {
3340 BranchData* data = node->branchData();
3341 data->taken.block = blockForBytecodeOffset(possibleTargets, data->takenBytecodeIndex());
3342 data->notTaken.block = blockForBytecodeOffset(possibleTargets, data->notTakenBytecodeIndex());
6fe7ccc8 3343 break;
81345200
A
3344 }
3345
3346 case Switch: {
3347 SwitchData* data = node->switchData();
3348 for (unsigned i = node->switchData()->cases.size(); i--;)
3349 data->cases[i].target.block = blockForBytecodeOffset(possibleTargets, data->cases[i].target.bytecodeIndex());
3350 data->fallThrough.block = blockForBytecodeOffset(possibleTargets, data->fallThrough.bytecodeIndex());
3351 break;
3352 }
6fe7ccc8
A
3353
3354 default:
6fe7ccc8
A
3355 break;
3356 }
3357
3358#if !ASSERT_DISABLED
3359 block->isLinked = true;
3360#endif
3361}
14957cd0 3362
81345200 3363void ByteCodeParser::linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets)
6fe7ccc8
A
3364{
3365 for (size_t i = 0; i < unlinkedBlocks.size(); ++i) {
3366 if (unlinkedBlocks[i].m_needsNormalLinking) {
81345200 3367 linkBlock(unlinkedBlocks[i].m_block, possibleTargets);
6fe7ccc8 3368 unlinkedBlocks[i].m_needsNormalLinking = false;
14957cd0
A
3369 }
3370 }
3371}
3372
6fe7ccc8 3373void ByteCodeParser::buildOperandMapsIfNecessary()
14957cd0 3374{
6fe7ccc8
A
3375 if (m_haveBuiltOperandMaps)
3376 return;
3377
3378 for (size_t i = 0; i < m_codeBlock->numberOfIdentifiers(); ++i)
3379 m_identifierMap.add(m_codeBlock->identifier(i).impl(), i);
3380 for (size_t i = 0; i < m_codeBlock->numberOfConstantRegisters(); ++i) {
3381 JSValue value = m_codeBlock->getConstant(i + FirstConstantRegisterIndex);
3382 if (!value)
3383 m_emptyJSValueIndex = i + FirstConstantRegisterIndex;
3384 else
3385 m_jsValueMap.add(JSValue::encode(value), i + FirstConstantRegisterIndex);
3386 }
3387
3388 m_haveBuiltOperandMaps = true;
3389}
14957cd0 3390
93a37866
A
3391ByteCodeParser::InlineStackEntry::InlineStackEntry(
3392 ByteCodeParser* byteCodeParser,
3393 CodeBlock* codeBlock,
3394 CodeBlock* profiledBlock,
81345200 3395 BasicBlock* callsiteBlockHead,
93a37866
A
3396 JSFunction* callee, // Null if this is a closure call.
3397 VirtualRegister returnValueVR,
3398 VirtualRegister inlineCallFrameStart,
3399 int argumentCountIncludingThis,
3400 CodeSpecializationKind kind)
6fe7ccc8
A
3401 : m_byteCodeParser(byteCodeParser)
3402 , m_codeBlock(codeBlock)
3403 , m_profiledBlock(profiledBlock)
6fe7ccc8
A
3404 , m_callsiteBlockHead(callsiteBlockHead)
3405 , m_returnValue(returnValueVR)
6fe7ccc8
A
3406 , m_didReturn(false)
3407 , m_didEarlyReturn(false)
3408 , m_caller(byteCodeParser->m_inlineStackTop)
3409{
81345200
A
3410 {
3411 ConcurrentJITLocker locker(m_profiledBlock->m_lock);
3412 m_lazyOperands.initialize(locker, m_profiledBlock->lazyOperandValueProfiles());
3413 m_exitProfile.initialize(locker, profiledBlock->exitProfile());
3414
3415 // We do this while holding the lock because we want to encourage StructureStubInfo's
3416 // to be potentially added to operations and because the profiled block could be in the
3417 // middle of LLInt->JIT tier-up in which case we would be adding the info's right now.
3418 if (m_profiledBlock->hasBaselineJITProfiling()) {
3419 m_profiledBlock->getStubInfoMap(locker, m_stubInfos);
3420 m_profiledBlock->getCallLinkInfoMap(locker, m_callLinkInfos);
3421 }
3422 }
3423
93a37866
A
3424 m_argumentPositions.resize(argumentCountIncludingThis);
3425 for (int i = 0; i < argumentCountIncludingThis; ++i) {
6fe7ccc8
A
3426 byteCodeParser->m_graph.m_argumentPositions.append(ArgumentPosition());
3427 ArgumentPosition* argumentPosition = &byteCodeParser->m_graph.m_argumentPositions.last();
3428 m_argumentPositions[i] = argumentPosition;
3429 }
93a37866
A
3430
3431 // Track the code-block-global exit sites.
3432 if (m_exitProfile.hasExitSite(ArgumentsEscaped)) {
3433 byteCodeParser->m_graph.m_executablesWhoseArgumentsEscaped.add(
3434 codeBlock->ownerExecutable());
3435 }
6fe7ccc8
A
3436
3437 if (m_caller) {
3438 // Inline case.
3439 ASSERT(codeBlock != byteCodeParser->m_codeBlock);
81345200
A
3440 ASSERT(inlineCallFrameStart.isValid());
3441 ASSERT(callsiteBlockHead);
3442
3443 m_inlineCallFrame = byteCodeParser->m_graph.m_plan.inlineCallFrames->add();
3444 initializeLazyWriteBarrierForInlineCallFrameExecutable(
3445 byteCodeParser->m_graph.m_plan.writeBarriers,
3446 m_inlineCallFrame->executable,
3447 byteCodeParser->m_codeBlock,
3448 m_inlineCallFrame,
3449 byteCodeParser->m_codeBlock->ownerExecutable(),
3450 codeBlock->ownerExecutable());
3451 m_inlineCallFrame->stackOffset = inlineCallFrameStart.offset() - JSStack::CallFrameHeaderSize;
3452 if (callee) {
3453 m_inlineCallFrame->calleeRecovery = ValueRecovery::constant(callee);
3454 m_inlineCallFrame->isClosureCall = false;
3455 } else
3456 m_inlineCallFrame->isClosureCall = true;
3457 m_inlineCallFrame->caller = byteCodeParser->currentCodeOrigin();
3458 m_inlineCallFrame->arguments.resize(argumentCountIncludingThis); // Set the number of arguments including this, but don't configure the value recoveries, yet.
3459 m_inlineCallFrame->isCall = isCall(kind);
3460
3461 if (m_inlineCallFrame->caller.inlineCallFrame)
3462 m_inlineCallFrame->capturedVars = m_inlineCallFrame->caller.inlineCallFrame->capturedVars;
93a37866
A
3463 else {
3464 for (int i = byteCodeParser->m_codeBlock->m_numVars; i--;) {
81345200
A
3465 if (byteCodeParser->m_codeBlock->isCaptured(virtualRegisterForLocal(i)))
3466 m_inlineCallFrame->capturedVars.set(i);
93a37866
A
3467 }
3468 }
3469
3470 for (int i = argumentCountIncludingThis; i--;) {
81345200
A
3471 VirtualRegister argument = virtualRegisterForArgument(i);
3472 if (codeBlock->isCaptured(argument))
3473 m_inlineCallFrame->capturedVars.set(VirtualRegister(argument.offset() + m_inlineCallFrame->stackOffset).toLocal());
93a37866
A
3474 }
3475 for (size_t i = codeBlock->m_numVars; i--;) {
81345200
A
3476 VirtualRegister local = virtualRegisterForLocal(i);
3477 if (codeBlock->isCaptured(local))
3478 m_inlineCallFrame->capturedVars.set(VirtualRegister(local.offset() + m_inlineCallFrame->stackOffset).toLocal());
93a37866
A
3479 }
3480
6fe7ccc8
A
3481 byteCodeParser->buildOperandMapsIfNecessary();
3482
3483 m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3484 m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
93a37866 3485 m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
81345200 3486 m_switchRemap.resize(codeBlock->numberOfSwitchJumpTables());
6fe7ccc8
A
3487
3488 for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i) {
3489 StringImpl* rep = codeBlock->identifier(i).impl();
81345200 3490 BorrowedIdentifierMap::AddResult result = byteCodeParser->m_identifierMap.add(rep, byteCodeParser->m_graph.identifiers().numberOfIdentifiers());
6fe7ccc8 3491 if (result.isNewEntry)
81345200 3492 byteCodeParser->m_graph.identifiers().addLazily(rep);
93a37866 3493 m_identifierRemap[i] = result.iterator->value;
6fe7ccc8
A
3494 }
3495 for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i) {
3496 JSValue value = codeBlock->getConstant(i + FirstConstantRegisterIndex);
3497 if (!value) {
3498 if (byteCodeParser->m_emptyJSValueIndex == UINT_MAX) {
3499 byteCodeParser->m_emptyJSValueIndex = byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex;
81345200 3500 byteCodeParser->addConstant(JSValue());
6fe7ccc8
A
3501 byteCodeParser->m_constants.append(ConstantRecord());
3502 }
3503 m_constantRemap[i] = byteCodeParser->m_emptyJSValueIndex;
3504 continue;
3505 }
3506 JSValueMap::AddResult result = byteCodeParser->m_jsValueMap.add(JSValue::encode(value), byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex);
3507 if (result.isNewEntry) {
81345200 3508 byteCodeParser->addConstant(value);
6fe7ccc8
A
3509 byteCodeParser->m_constants.append(ConstantRecord());
3510 }
93a37866
A
3511 m_constantRemap[i] = result.iterator->value;
3512 }
3513 for (unsigned i = 0; i < codeBlock->numberOfConstantBuffers(); ++i) {
3514 // If we inline the same code block multiple times, we don't want to needlessly
3515 // duplicate its constant buffers.
3516 HashMap<ConstantBufferKey, unsigned>::iterator iter =
3517 byteCodeParser->m_constantBufferCache.find(ConstantBufferKey(codeBlock, i));
3518 if (iter != byteCodeParser->m_constantBufferCache.end()) {
3519 m_constantBufferRemap[i] = iter->value;
3520 continue;
3521 }
3522 Vector<JSValue>& buffer = codeBlock->constantBufferAsVector(i);
3523 unsigned newIndex = byteCodeParser->m_codeBlock->addConstantBuffer(buffer);
3524 m_constantBufferRemap[i] = newIndex;
3525 byteCodeParser->m_constantBufferCache.add(ConstantBufferKey(codeBlock, i), newIndex);
14957cd0 3526 }
81345200
A
3527 for (unsigned i = 0; i < codeBlock->numberOfSwitchJumpTables(); ++i) {
3528 m_switchRemap[i] = byteCodeParser->m_codeBlock->numberOfSwitchJumpTables();
3529 byteCodeParser->m_codeBlock->addSwitchJumpTable() = codeBlock->switchJumpTable(i);
3530 }
6fe7ccc8
A
3531 m_callsiteBlockHeadNeedsLinking = true;
3532 } else {
3533 // Machine code block case.
3534 ASSERT(codeBlock == byteCodeParser->m_codeBlock);
3535 ASSERT(!callee);
81345200
A
3536 ASSERT(!returnValueVR.isValid());
3537 ASSERT(!inlineCallFrameStart.isValid());
3538 ASSERT(!callsiteBlockHead);
6fe7ccc8
A
3539
3540 m_inlineCallFrame = 0;
3541
3542 m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3543 m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
93a37866 3544 m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
81345200 3545 m_switchRemap.resize(codeBlock->numberOfSwitchJumpTables());
6fe7ccc8
A
3546 for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i)
3547 m_identifierRemap[i] = i;
3548 for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i)
3549 m_constantRemap[i] = i + FirstConstantRegisterIndex;
93a37866
A
3550 for (size_t i = 0; i < codeBlock->numberOfConstantBuffers(); ++i)
3551 m_constantBufferRemap[i] = i;
81345200
A
3552 for (size_t i = 0; i < codeBlock->numberOfSwitchJumpTables(); ++i)
3553 m_switchRemap[i] = i;
6fe7ccc8 3554 m_callsiteBlockHeadNeedsLinking = false;
14957cd0 3555 }
6fe7ccc8
A
3556
3557 for (size_t i = 0; i < m_constantRemap.size(); ++i)
3558 ASSERT(m_constantRemap[i] >= static_cast<unsigned>(FirstConstantRegisterIndex));
3559
3560 byteCodeParser->m_inlineStackTop = this;
14957cd0
A
3561}
3562
6fe7ccc8 3563void ByteCodeParser::parseCodeBlock()
14957cd0 3564{
6fe7ccc8
A
3565 CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
3566
81345200
A
3567 if (m_graph.compilation()) {
3568 m_graph.compilation()->addProfiledBytecodes(
93a37866
A
3569 *m_vm->m_perBytecodeProfiler, m_inlineStackTop->m_profiledBlock);
3570 }
3571
3572 bool shouldDumpBytecode = Options::dumpBytecodeAtDFGTime();
93a37866
A
3573 if (shouldDumpBytecode) {
3574 dataLog("Parsing ", *codeBlock);
3575 if (inlineCallFrame()) {
3576 dataLog(
3577 " for inlining at ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT),
3578 " ", inlineCallFrame()->caller);
3579 }
3580 dataLog(
3581 ": captureCount = ", codeBlock->symbolTable() ? codeBlock->symbolTable()->captureCount() : 0,
81345200 3582 ", needsActivation = ", codeBlock->needsActivation(),
93a37866
A
3583 ", isStrictMode = ", codeBlock->ownerExecutable()->isStrictMode(), "\n");
3584 codeBlock->baselineVersion()->dumpBytecode();
3585 }
3586
3587 Vector<unsigned, 32> jumpTargets;
3588 computePreciseJumpTargets(codeBlock, jumpTargets);
3589 if (Options::dumpBytecodeAtDFGTime()) {
3590 dataLog("Jump targets: ");
3591 CommaPrinter comma;
3592 for (unsigned i = 0; i < jumpTargets.size(); ++i)
3593 dataLog(comma, jumpTargets[i]);
3594 dataLog("\n");
3595 }
3596
3597 for (unsigned jumpTargetIndex = 0; jumpTargetIndex <= jumpTargets.size(); ++jumpTargetIndex) {
14957cd0 3598 // The maximum bytecode offset to go into the current basicblock is either the next jump target, or the end of the instructions.
93a37866 3599 unsigned limit = jumpTargetIndex < jumpTargets.size() ? jumpTargets[jumpTargetIndex] : codeBlock->instructions().size();
14957cd0
A
3600 ASSERT(m_currentIndex < limit);
3601
3602 // Loop until we reach the current limit (i.e. next jump target).
3603 do {
6fe7ccc8
A
3604 if (!m_currentBlock) {
3605 // Check if we can use the last block.
81345200 3606 if (m_graph.numBlocks() && m_graph.lastBlock()->isEmpty()) {
6fe7ccc8 3607 // This must be a block belonging to us.
81345200 3608 ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
6fe7ccc8
A
3609 // Either the block is linkable or it isn't. If it's linkable then it's the last
3610 // block in the blockLinkingTargets list. If it's not then the last block will
3611 // have a lower bytecode index that the one we're about to give to this block.
81345200 3612 if (m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_blockLinkingTargets.last()->bytecodeBegin != m_currentIndex) {
6fe7ccc8 3613 // Make the block linkable.
81345200
A
3614 ASSERT(m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_blockLinkingTargets.last()->bytecodeBegin < m_currentIndex);
3615 m_inlineStackTop->m_blockLinkingTargets.append(m_graph.lastBlock());
6fe7ccc8
A
3616 }
3617 // Change its bytecode begin and continue.
81345200 3618 m_currentBlock = m_graph.lastBlock();
6fe7ccc8
A
3619 m_currentBlock->bytecodeBegin = m_currentIndex;
3620 } else {
81345200 3621 RefPtr<BasicBlock> block = adoptRef(new BasicBlock(m_currentIndex, m_numArguments, m_numLocals, PNaN));
6fe7ccc8 3622 m_currentBlock = block.get();
93a37866
A
3623 // This assertion checks two things:
3624 // 1) If the bytecodeBegin is greater than currentIndex, then something has gone
3625 // horribly wrong. So, we're probably generating incorrect code.
3626 // 2) If the bytecodeBegin is equal to the currentIndex, then we failed to do
3627 // a peephole coalescing of this block in the if statement above. So, we're
3628 // generating suboptimal code and leaving more work for the CFG simplifier.
81345200
A
3629 ASSERT(m_inlineStackTop->m_unlinkedBlocks.isEmpty() || m_inlineStackTop->m_unlinkedBlocks.last().m_block->bytecodeBegin < m_currentIndex);
3630 m_inlineStackTop->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
3631 m_inlineStackTop->m_blockLinkingTargets.append(block.get());
93a37866 3632 // The first block is definitely an OSR target.
81345200 3633 if (!m_graph.numBlocks())
93a37866 3634 block->isOSRTarget = true;
81345200 3635 m_graph.appendBlock(block);
6fe7ccc8
A
3636 prepareToParseBlock();
3637 }
3638 }
3639
3640 bool shouldContinueParsing = parseBlock(limit);
14957cd0 3641
14957cd0
A
3642 // We should not have gone beyond the limit.
3643 ASSERT(m_currentIndex <= limit);
6fe7ccc8
A
3644
3645 // We should have planted a terminal, or we just gave up because
3646 // we realized that the jump target information is imprecise, or we
3647 // are at the end of an inline function, or we realized that we
3648 // should stop parsing because there was a return in the first
3649 // basic block.
93a37866 3650 ASSERT(m_currentBlock->isEmpty() || m_currentBlock->last()->isTerminal() || (m_currentIndex == codeBlock->instructions().size() && inlineCallFrame()) || !shouldContinueParsing);
6fe7ccc8
A
3651
3652 if (!shouldContinueParsing)
3653 return;
3654
3655 m_currentBlock = 0;
14957cd0
A
3656 } while (m_currentIndex < limit);
3657 }
3658
3659 // Should have reached the end of the instructions.
6fe7ccc8
A
3660 ASSERT(m_currentIndex == codeBlock->instructions().size());
3661}
14957cd0 3662
6fe7ccc8
A
3663bool ByteCodeParser::parse()
3664{
3665 // Set during construction.
3666 ASSERT(!m_currentIndex);
3667
81345200
A
3668 if (Options::verboseDFGByteCodeParsing())
3669 dataLog("Parsing ", *m_codeBlock, "\n");
3670
3671 m_dfgCodeBlock = m_graph.m_plan.profiledDFGCodeBlock.get();
3672 if (isFTL(m_graph.m_plan.mode) && m_dfgCodeBlock
3673 && Options::enablePolyvariantDevirtualization()) {
3674 if (Options::enablePolyvariantCallInlining())
3675 CallLinkStatus::computeDFGStatuses(m_dfgCodeBlock, m_callContextMap);
3676 if (Options::enablePolyvariantByIdInlining())
3677 m_dfgCodeBlock->getStubInfoMap(m_dfgStubInfos);
3678 }
3679
3680 if (m_codeBlock->captureCount()) {
3681 SymbolTable* symbolTable = m_codeBlock->symbolTable();
3682 ConcurrentJITLocker locker(symbolTable->m_lock);
3683 SymbolTable::Map::iterator iter = symbolTable->begin(locker);
3684 SymbolTable::Map::iterator end = symbolTable->end(locker);
3685 for (; iter != end; ++iter) {
3686 VariableWatchpointSet* set = iter->value.watchpointSet();
3687 if (!set)
3688 continue;
3689 size_t index = static_cast<size_t>(VirtualRegister(iter->value.getIndex()).toLocal());
3690 while (m_localWatchpoints.size() <= index)
3691 m_localWatchpoints.append(nullptr);
3692 m_localWatchpoints[index] = set;
3693 }
3694 }
6fe7ccc8 3695
93a37866 3696 InlineStackEntry inlineStackEntry(
81345200 3697 this, m_codeBlock, m_profiledBlock, 0, 0, VirtualRegister(), VirtualRegister(),
93a37866 3698 m_codeBlock->numParameters(), CodeForCall);
6fe7ccc8
A
3699
3700 parseCodeBlock();
3701
3702 linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
93a37866 3703 m_graph.determineReachability();
81345200 3704 m_graph.killUnreachableBlocks();
6fe7ccc8 3705
81345200
A
3706 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
3707 BasicBlock* block = m_graph.block(blockIndex);
3708 if (!block)
93a37866 3709 continue;
81345200
A
3710 ASSERT(block->variablesAtHead.numberOfLocals() == m_graph.block(0)->variablesAtHead.numberOfLocals());
3711 ASSERT(block->variablesAtHead.numberOfArguments() == m_graph.block(0)->variablesAtHead.numberOfArguments());
3712 ASSERT(block->variablesAtTail.numberOfLocals() == m_graph.block(0)->variablesAtHead.numberOfLocals());
3713 ASSERT(block->variablesAtTail.numberOfArguments() == m_graph.block(0)->variablesAtHead.numberOfArguments());
93a37866 3714 }
6fe7ccc8 3715
6fe7ccc8
A
3716 m_graph.m_localVars = m_numLocals;
3717 m_graph.m_parameterSlots = m_parameterSlots;
14957cd0
A
3718
3719 return true;
3720}
3721
81345200 3722bool parse(Graph& graph)
14957cd0 3723{
93a37866 3724 SamplingRegion samplingRegion("DFG Parsing");
6fe7ccc8 3725 return ByteCodeParser(graph).parse();
14957cd0
A
3726}
3727
3728} } // namespace JSC::DFG
3729
3730#endif