]> git.saurik.com Git - apple/javascriptcore.git/blame - dfg/DFGByteCodeParser.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / dfg / DFGByteCodeParser.cpp
CommitLineData
ed1e77d3
A
1/*
2 * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
14957cd0
A
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGByteCodeParser.h"
28
29#if ENABLE(DFG_JIT)
30
93a37866 31#include "ArrayConstructor.h"
ed1e77d3 32#include "BasicBlockLocation.h"
6fe7ccc8 33#include "CallLinkStatus.h"
14957cd0 34#include "CodeBlock.h"
93a37866
A
35#include "CodeBlockWithJITType.h"
36#include "DFGArrayMode.h"
6fe7ccc8 37#include "DFGCapabilities.h"
ed1e77d3 38#include "DFGGraph.h"
81345200 39#include "DFGJITCode.h"
6fe7ccc8 40#include "GetByIdStatus.h"
81345200 41#include "Heap.h"
ed1e77d3 42#include "JSLexicalEnvironment.h"
81345200 43#include "JSCInlines.h"
93a37866 44#include "PreciseJumpTargets.h"
6fe7ccc8 45#include "PutByIdStatus.h"
81345200 46#include "StackAlignment.h"
93a37866
A
47#include "StringConstructor.h"
48#include <wtf/CommaPrinter.h>
6fe7ccc8
A
49#include <wtf/HashMap.h>
50#include <wtf/MathExtras.h>
81345200 51#include <wtf/StdLibExtras.h>
14957cd0
A
52
53namespace JSC { namespace DFG {
54
ed1e77d3
A
55static const bool verbose = false;
56
93a37866
A
57class ConstantBufferKey {
58public:
59 ConstantBufferKey()
60 : m_codeBlock(0)
61 , m_index(0)
62 {
63 }
64
65 ConstantBufferKey(WTF::HashTableDeletedValueType)
66 : m_codeBlock(0)
67 , m_index(1)
68 {
69 }
70
71 ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
72 : m_codeBlock(codeBlock)
73 , m_index(index)
74 {
75 }
76
77 bool operator==(const ConstantBufferKey& other) const
78 {
79 return m_codeBlock == other.m_codeBlock
80 && m_index == other.m_index;
81 }
82
83 unsigned hash() const
84 {
85 return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
86 }
87
88 bool isHashTableDeletedValue() const
89 {
90 return !m_codeBlock && m_index;
91 }
92
93 CodeBlock* codeBlock() const { return m_codeBlock; }
94 unsigned index() const { return m_index; }
95
96private:
97 CodeBlock* m_codeBlock;
98 unsigned m_index;
99};
100
101struct ConstantBufferKeyHash {
102 static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
103 static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
104 {
105 return a == b;
106 }
107
108 static const bool safeToCompareToEmptyOrDeleted = true;
109};
110
111} } // namespace JSC::DFG
112
113namespace WTF {
114
115template<typename T> struct DefaultHash;
116template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
117 typedef JSC::DFG::ConstantBufferKeyHash Hash;
118};
119
120template<typename T> struct HashTraits;
121template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
122
123} // namespace WTF
124
125namespace JSC { namespace DFG {
126
14957cd0
A
127// === ByteCodeParser ===
128//
129// This class is used to compile the dataflow graph from a CodeBlock.
130class ByteCodeParser {
131public:
6fe7ccc8 132 ByteCodeParser(Graph& graph)
93a37866 133 : m_vm(&graph.m_vm)
6fe7ccc8
A
134 , m_codeBlock(graph.m_codeBlock)
135 , m_profiledBlock(graph.m_profiledBlock)
14957cd0 136 , m_graph(graph)
6fe7ccc8 137 , m_currentBlock(0)
14957cd0 138 , m_currentIndex(0)
ed1e77d3
A
139 , m_constantUndefined(graph.freeze(jsUndefined()))
140 , m_constantNull(graph.freeze(jsNull()))
141 , m_constantNaN(graph.freeze(jsNumber(PNaN)))
142 , m_constantOne(graph.freeze(jsNumber(1)))
6fe7ccc8
A
143 , m_numArguments(m_codeBlock->numParameters())
144 , m_numLocals(m_codeBlock->m_numCalleeRegisters)
6fe7ccc8
A
145 , m_parameterSlots(0)
146 , m_numPassedVarArgs(0)
6fe7ccc8
A
147 , m_inlineStackTop(0)
148 , m_haveBuiltOperandMaps(false)
93a37866 149 , m_currentInstruction(0)
ed1e77d3 150 , m_hasDebuggerEnabled(graph.hasDebuggerEnabled())
14957cd0 151 {
6fe7ccc8 152 ASSERT(m_profiledBlock);
14957cd0 153 }
6fe7ccc8 154
14957cd0
A
155 // Parse a full CodeBlock of bytecode.
156 bool parse();
6fe7ccc8 157
14957cd0 158private:
93a37866
A
159 struct InlineStackEntry;
160
6fe7ccc8
A
161 // Just parse from m_currentIndex to the end of the current CodeBlock.
162 void parseCodeBlock();
81345200
A
163
164 void ensureLocals(unsigned newNumLocals)
165 {
166 if (newNumLocals <= m_numLocals)
167 return;
168 m_numLocals = newNumLocals;
169 for (size_t i = 0; i < m_graph.numBlocks(); ++i)
170 m_graph.block(i)->ensureLocals(newNumLocals);
171 }
6fe7ccc8
A
172
173 // Helper for min and max.
ed1e77d3
A
174 template<typename ChecksFunctor>
175 bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis, const ChecksFunctor& insertChecks);
6fe7ccc8
A
176
177 // Handle calls. This resolves issues surrounding inlining and intrinsics.
ed1e77d3
A
178 void handleCall(
179 int result, NodeType op, InlineCallFrame::Kind, unsigned instructionSize,
180 Node* callTarget, int argCount, int registerOffset, CallLinkStatus,
181 SpeculatedType prediction);
182 void handleCall(
183 int result, NodeType op, InlineCallFrame::Kind, unsigned instructionSize,
184 Node* callTarget, int argCount, int registerOffset, CallLinkStatus);
81345200
A
185 void handleCall(int result, NodeType op, CodeSpecializationKind, unsigned instructionSize, int callee, int argCount, int registerOffset);
186 void handleCall(Instruction* pc, NodeType op, CodeSpecializationKind);
ed1e77d3
A
187 void handleVarargsCall(Instruction* pc, NodeType op, CodeSpecializationKind);
188 void emitFunctionChecks(CallVariant, Node* callTarget, VirtualRegister thisArgumnt);
189 void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis);
190 unsigned inliningCost(CallVariant, int argumentCountIncludingThis, CodeSpecializationKind); // Return UINT_MAX if it's not an inlining candidate. By convention, intrinsics have a cost of 1.
6fe7ccc8 191 // Handle inlining. Return true if it succeeded, false if we need to plant a call.
ed1e77d3
A
192 bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, VirtualRegister thisArgument, VirtualRegister argumentsArgument, unsigned argumentsOffset, int argumentCountIncludingThis, unsigned nextOffset, NodeType callOp, InlineCallFrame::Kind, SpeculatedType prediction);
193 enum CallerLinkability { CallerDoesNormalLinking, CallerLinksManually };
194 template<typename ChecksFunctor>
195 bool attemptToInlineCall(Node* callTargetNode, int resultOperand, CallVariant, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, InlineCallFrame::Kind, CallerLinkability, SpeculatedType prediction, unsigned& inliningBalance, const ChecksFunctor& insertChecks);
196 template<typename ChecksFunctor>
197 void inlineCall(Node* callTargetNode, int resultOperand, CallVariant, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, InlineCallFrame::Kind, CallerLinkability, const ChecksFunctor& insertChecks);
198 void cancelLinkingForBlock(InlineStackEntry*, BasicBlock*); // Only works when the given block is the last one to have been added for that inline stack entry.
6fe7ccc8 199 // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
ed1e77d3
A
200 template<typename ChecksFunctor>
201 bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, const ChecksFunctor& insertChecks);
202 template<typename ChecksFunctor>
203 bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType, const ChecksFunctor& insertChecks);
204 template<typename ChecksFunctor>
205 bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind, const ChecksFunctor& insertChecks);
81345200 206 Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
ed1e77d3 207 Node* handleGetByOffset(SpeculatedType, Node* base, const StructureSet&, unsigned identifierNumber, PropertyOffset, NodeType op = GetByOffset);
93a37866
A
208 void handleGetById(
209 int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
210 const GetByIdStatus&);
81345200
A
211 void emitPutById(
212 Node* base, unsigned identifierNumber, Node* value, const PutByIdStatus&, bool isDirect);
213 void handlePutById(
214 Node* base, unsigned identifierNumber, Node* value, const PutByIdStatus&,
215 bool isDirect);
ed1e77d3 216 void emitChecks(const ConstantStructureCheckVector&);
93a37866 217
6fe7ccc8 218 void prepareToParseBlock();
ed1e77d3
A
219 void clearCaches();
220
14957cd0
A
221 // Parse a single basic block of bytecode instructions.
222 bool parseBlock(unsigned limit);
6fe7ccc8 223 // Link block successors.
81345200
A
224 void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
225 void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
6fe7ccc8 226
ed1e77d3 227 VariableAccessData* newVariableAccessData(VirtualRegister operand)
6fe7ccc8 228 {
81345200 229 ASSERT(!operand.isConstant());
6fe7ccc8 230
ed1e77d3 231 m_graph.m_variableAccessData.append(VariableAccessData(operand));
6fe7ccc8
A
232 return &m_graph.m_variableAccessData.last();
233 }
234
14957cd0 235 // Get/Set the operands/result of a bytecode instruction.
81345200 236 Node* getDirect(VirtualRegister operand)
14957cd0 237 {
ed1e77d3 238 ASSERT(!operand.isConstant());
14957cd0
A
239
240 // Is this an argument?
81345200 241 if (operand.isArgument())
14957cd0
A
242 return getArgument(operand);
243
244 // Must be a local.
81345200 245 return getLocal(operand);
14957cd0 246 }
81345200
A
247
248 Node* get(VirtualRegister operand)
6fe7ccc8 249 {
ed1e77d3
A
250 if (operand.isConstant()) {
251 unsigned constantIndex = operand.toConstantIndex();
252 unsigned oldSize = m_constants.size();
253 if (constantIndex >= oldSize || !m_constants[constantIndex]) {
254 const CodeBlock& codeBlock = *m_inlineStackTop->m_codeBlock;
255 JSValue value = codeBlock.getConstant(operand.offset());
256 SourceCodeRepresentation sourceCodeRepresentation = codeBlock.constantSourceCodeRepresentation(operand.offset());
257 if (constantIndex >= oldSize) {
258 m_constants.grow(constantIndex + 1);
259 for (unsigned i = oldSize; i < m_constants.size(); ++i)
260 m_constants[i] = nullptr;
261 }
262
263 Node* constantNode = nullptr;
264 if (sourceCodeRepresentation == SourceCodeRepresentation::Double)
265 constantNode = addToGraph(DoubleConstant, OpInfo(m_graph.freezeStrong(jsDoubleNumber(value.asNumber()))));
266 else
267 constantNode = addToGraph(JSConstant, OpInfo(m_graph.freezeStrong(value)));
268 m_constants[constantIndex] = constantNode;
269 }
270 ASSERT(m_constants[constantIndex]);
271 return m_constants[constantIndex];
272 }
273
81345200
A
274 if (inlineCallFrame()) {
275 if (!inlineCallFrame()->isClosureCall) {
276 JSFunction* callee = inlineCallFrame()->calleeConstant();
277 if (operand.offset() == JSStack::Callee)
ed1e77d3
A
278 return weakJSConstant(callee);
279 }
280 } else if (operand.offset() == JSStack::Callee) {
281 // We have to do some constant-folding here because this enables CreateThis folding. Note
282 // that we don't have such watchpoint-based folding for inlined uses of Callee, since in that
283 // case if the function is a singleton then we already know it.
284 if (FunctionExecutable* executable = jsDynamicCast<FunctionExecutable*>(m_codeBlock->ownerExecutable())) {
285 InferredValue* singleton = executable->singletonFunction();
286 if (JSValue value = singleton->inferredValue()) {
287 m_graph.watchpoints().addLazily(singleton);
288 JSFunction* function = jsCast<JSFunction*>(value);
289 return weakJSConstant(function);
290 }
81345200 291 }
81345200 292 return addToGraph(GetCallee);
ed1e77d3 293 }
93a37866 294
6fe7ccc8
A
295 return getDirect(m_inlineStackTop->remapOperand(operand));
296 }
81345200
A
297
298 enum SetMode {
299 // A normal set which follows a two-phase commit that spans code origins. During
300 // the current code origin it issues a MovHint, and at the start of the next
301 // code origin there will be a SetLocal. If the local needs flushing, the second
302 // SetLocal will be preceded with a Flush.
303 NormalSet,
304
305 // A set where the SetLocal happens immediately and there is still a Flush. This
306 // is relevant when assigning to a local in tricky situations for the delayed
307 // SetLocal logic but where we know that we have not performed any side effects
308 // within this code origin. This is a safe replacement for NormalSet anytime we
309 // know that we have not yet performed side effects in this code origin.
310 ImmediateSetWithFlush,
311
312 // A set where the SetLocal happens immediately and we do not Flush it even if
313 // this is a local that is marked as needing it. This is relevant when
314 // initializing locals at the top of a function.
315 ImmediateNakedSet
316 };
317 Node* setDirect(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
14957cd0 318 {
81345200 319 addToGraph(MovHint, OpInfo(operand.offset()), value);
ed1e77d3
A
320
321 DelayedSetLocal delayed(currentCodeOrigin(), operand, value);
81345200
A
322
323 if (setMode == NormalSet) {
324 m_setLocalQueue.append(delayed);
325 return 0;
14957cd0 326 }
81345200
A
327
328 return delayed.execute(this, setMode);
6fe7ccc8 329 }
ed1e77d3
A
330
331 void processSetLocalQueue()
332 {
333 for (unsigned i = 0; i < m_setLocalQueue.size(); ++i)
334 m_setLocalQueue[i].execute(this);
335 m_setLocalQueue.resize(0);
336 }
81345200
A
337
338 Node* set(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
6fe7ccc8 339 {
81345200 340 return setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
93a37866
A
341 }
342
343 Node* injectLazyOperandSpeculation(Node* node)
344 {
345 ASSERT(node->op() == GetLocal);
81345200
A
346 ASSERT(node->origin.semantic.bytecodeIndex == m_currentIndex);
347 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
348 LazyOperandValueProfileKey key(m_currentIndex, node->local());
349 SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
93a37866
A
350 node->variableAccessData()->predict(prediction);
351 return node;
6fe7ccc8 352 }
14957cd0
A
353
354 // Used in implementing get/set, above, where the operand is a local variable.
81345200
A
355 Node* getLocal(VirtualRegister operand)
356 {
357 unsigned local = operand.toLocal();
358
81345200 359 Node* node = m_currentBlock->variablesAtTail.local(local);
6fe7ccc8 360
93a37866
A
361 // This has two goals: 1) link together variable access datas, and 2)
362 // try to avoid creating redundant GetLocals. (1) is required for
363 // correctness - no other phase will ensure that block-local variable
364 // access data unification is done correctly. (2) is purely opportunistic
365 // and is meant as an compile-time optimization only.
6fe7ccc8 366
93a37866 367 VariableAccessData* variable;
6fe7ccc8 368
93a37866
A
369 if (node) {
370 variable = node->variableAccessData();
93a37866 371
ed1e77d3
A
372 switch (node->op()) {
373 case GetLocal:
374 return node;
375 case SetLocal:
376 return node->child1().node();
377 default:
378 break;
93a37866 379 }
81345200 380 } else
ed1e77d3 381 variable = newVariableAccessData(operand);
6fe7ccc8 382
93a37866 383 node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
81345200 384 m_currentBlock->variablesAtTail.local(local) = node;
93a37866 385 return node;
14957cd0 386 }
81345200 387
ed1e77d3 388 Node* setLocal(const CodeOrigin& semanticOrigin, VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
14957cd0 389 {
ed1e77d3
A
390 CodeOrigin oldSemanticOrigin = m_currentSemanticOrigin;
391 m_currentSemanticOrigin = semanticOrigin;
392
81345200 393 unsigned local = operand.toLocal();
6fe7ccc8 394
81345200 395 if (setMode != ImmediateNakedSet) {
93a37866 396 ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
ed1e77d3 397 if (argumentPosition)
93a37866 398 flushDirect(operand, argumentPosition);
ed1e77d3
A
399 else if (m_hasDebuggerEnabled && operand == m_codeBlock->scopeRegister())
400 flush(operand);
6fe7ccc8 401 }
93a37866 402
ed1e77d3 403 VariableAccessData* variableAccessData = newVariableAccessData(operand);
93a37866 404 variableAccessData->mergeStructureCheckHoistingFailed(
ed1e77d3 405 m_inlineStackTop->m_exitProfile.hasExitSite(semanticOrigin.bytecodeIndex, BadCache));
81345200 406 variableAccessData->mergeCheckArrayHoistingFailed(
ed1e77d3 407 m_inlineStackTop->m_exitProfile.hasExitSite(semanticOrigin.bytecodeIndex, BadIndexingType));
93a37866 408 Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
81345200 409 m_currentBlock->variablesAtTail.local(local) = node;
ed1e77d3
A
410
411 m_currentSemanticOrigin = oldSemanticOrigin;
81345200 412 return node;
14957cd0
A
413 }
414
415 // Used in implementing get/set, above, where the operand is an argument.
81345200 416 Node* getArgument(VirtualRegister operand)
14957cd0 417 {
81345200 418 unsigned argument = operand.toArgument();
14957cd0 419 ASSERT(argument < m_numArguments);
6fe7ccc8 420
93a37866 421 Node* node = m_currentBlock->variablesAtTail.argument(argument);
14957cd0 422
93a37866 423 VariableAccessData* variable;
6fe7ccc8 424
93a37866
A
425 if (node) {
426 variable = node->variableAccessData();
93a37866
A
427
428 switch (node->op()) {
429 case GetLocal:
430 return node;
431 case SetLocal:
432 return node->child1().node();
433 default:
434 break;
435 }
436 } else
ed1e77d3 437 variable = newVariableAccessData(operand);
6fe7ccc8 438
93a37866
A
439 node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
440 m_currentBlock->variablesAtTail.argument(argument) = node;
441 return node;
14957cd0 442 }
ed1e77d3 443 Node* setArgument(const CodeOrigin& semanticOrigin, VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
14957cd0 444 {
ed1e77d3
A
445 CodeOrigin oldSemanticOrigin = m_currentSemanticOrigin;
446 m_currentSemanticOrigin = semanticOrigin;
447
81345200 448 unsigned argument = operand.toArgument();
14957cd0 449 ASSERT(argument < m_numArguments);
6fe7ccc8 450
ed1e77d3 451 VariableAccessData* variableAccessData = newVariableAccessData(operand);
93a37866
A
452
453 // Always flush arguments, except for 'this'. If 'this' is created by us,
454 // then make sure that it's never unboxed.
455 if (argument) {
81345200 456 if (setMode != ImmediateNakedSet)
93a37866
A
457 flushDirect(operand);
458 } else if (m_codeBlock->specializationKind() == CodeForConstruct)
459 variableAccessData->mergeShouldNeverUnbox(true);
460
461 variableAccessData->mergeStructureCheckHoistingFailed(
ed1e77d3 462 m_inlineStackTop->m_exitProfile.hasExitSite(semanticOrigin.bytecodeIndex, BadCache));
81345200 463 variableAccessData->mergeCheckArrayHoistingFailed(
ed1e77d3 464 m_inlineStackTop->m_exitProfile.hasExitSite(semanticOrigin.bytecodeIndex, BadIndexingType));
93a37866
A
465 Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
466 m_currentBlock->variablesAtTail.argument(argument) = node;
ed1e77d3
A
467
468 m_currentSemanticOrigin = oldSemanticOrigin;
81345200 469 return node;
93a37866
A
470 }
471
472 ArgumentPosition* findArgumentPositionForArgument(int argument)
473 {
6fe7ccc8 474 InlineStackEntry* stack = m_inlineStackTop;
93a37866 475 while (stack->m_inlineCallFrame)
6fe7ccc8 476 stack = stack->m_caller;
93a37866 477 return stack->m_argumentPositions[argument];
6fe7ccc8
A
478 }
479
81345200 480 ArgumentPosition* findArgumentPositionForLocal(VirtualRegister operand)
6fe7ccc8 481 {
93a37866
A
482 for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
483 InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
484 if (!inlineCallFrame)
485 break;
81345200 486 if (operand.offset() < static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize))
93a37866 487 continue;
81345200 488 if (operand.offset() == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
93a37866 489 continue;
81345200 490 if (operand.offset() >= static_cast<int>(inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset() + inlineCallFrame->arguments.size()))
93a37866 491 continue;
81345200 492 int argument = VirtualRegister(operand.offset() - inlineCallFrame->stackOffset).toArgument();
93a37866
A
493 return stack->m_argumentPositions[argument];
494 }
495 return 0;
496 }
497
81345200 498 ArgumentPosition* findArgumentPosition(VirtualRegister operand)
93a37866 499 {
81345200
A
500 if (operand.isArgument())
501 return findArgumentPositionForArgument(operand.toArgument());
93a37866
A
502 return findArgumentPositionForLocal(operand);
503 }
81345200 504
81345200 505 void flush(VirtualRegister operand)
93a37866
A
506 {
507 flushDirect(m_inlineStackTop->remapOperand(operand));
508 }
509
81345200 510 void flushDirect(VirtualRegister operand)
93a37866
A
511 {
512 flushDirect(operand, findArgumentPosition(operand));
513 }
514
81345200 515 void flushDirect(VirtualRegister operand, ArgumentPosition* argumentPosition)
93a37866 516 {
81345200 517 ASSERT(!operand.isConstant());
6fe7ccc8 518
93a37866 519 Node* node = m_currentBlock->variablesAtTail.operand(operand);
6fe7ccc8 520
93a37866
A
521 VariableAccessData* variable;
522
ed1e77d3 523 if (node)
93a37866 524 variable = node->variableAccessData();
ed1e77d3
A
525 else
526 variable = newVariableAccessData(operand);
93a37866
A
527
528 node = addToGraph(Flush, OpInfo(variable));
529 m_currentBlock->variablesAtTail.operand(operand) = node;
530 if (argumentPosition)
531 argumentPosition->addVariable(variable);
532 }
81345200 533
93a37866
A
534 void flush(InlineStackEntry* inlineStackEntry)
535 {
536 int numArguments;
81345200 537 if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame) {
ed1e77d3 538 ASSERT(!m_hasDebuggerEnabled);
93a37866 539 numArguments = inlineCallFrame->arguments.size();
ed1e77d3 540 if (inlineCallFrame->isClosureCall)
81345200 541 flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::Callee)));
ed1e77d3
A
542 if (inlineCallFrame->isVarargs())
543 flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::ArgumentCount)));
81345200 544 } else
93a37866
A
545 numArguments = inlineStackEntry->m_codeBlock->numParameters();
546 for (unsigned argument = numArguments; argument-- > 1;)
81345200 547 flushDirect(inlineStackEntry->remapOperand(virtualRegisterForArgument(argument)));
ed1e77d3
A
548 if (m_hasDebuggerEnabled)
549 flush(m_codeBlock->scopeRegister());
93a37866
A
550 }
551
81345200 552 void flushForTerminal()
93a37866
A
553 {
554 for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
555 flush(inlineStackEntry);
556 }
557
81345200 558 void flushForReturn()
93a37866
A
559 {
560 flush(m_inlineStackTop);
14957cd0 561 }
81345200
A
562
563 void flushIfTerminal(SwitchData& data)
14957cd0 564 {
81345200
A
565 if (data.fallThrough.bytecodeIndex() > m_currentIndex)
566 return;
567
568 for (unsigned i = data.cases.size(); i--;) {
569 if (data.cases[i].target.bytecodeIndex() > m_currentIndex)
570 return;
14957cd0 571 }
81345200
A
572
573 flushForTerminal();
14957cd0
A
574 }
575
ed1e77d3
A
576 // Assumes that the constant should be strongly marked.
577 Node* jsConstant(JSValue constantValue)
14957cd0 578 {
ed1e77d3 579 return addToGraph(JSConstant, OpInfo(m_graph.freezeStrong(constantValue)));
14957cd0
A
580 }
581
ed1e77d3 582 Node* weakJSConstant(JSValue constantValue)
14957cd0 583 {
ed1e77d3 584 return addToGraph(JSConstant, OpInfo(m_graph.freeze(constantValue)));
93a37866 585 }
14957cd0 586
14957cd0 587 // Helper functions to get/set the this value.
93a37866 588 Node* getThis()
14957cd0 589 {
6fe7ccc8 590 return get(m_inlineStackTop->m_codeBlock->thisRegister());
14957cd0 591 }
81345200 592
93a37866 593 void setThis(Node* value)
14957cd0 594 {
6fe7ccc8 595 set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
14957cd0
A
596 }
597
93a37866
A
598 InlineCallFrame* inlineCallFrame()
599 {
600 return m_inlineStackTop->m_inlineCallFrame;
601 }
602
6fe7ccc8
A
603 CodeOrigin currentCodeOrigin()
604 {
81345200 605 return CodeOrigin(m_currentIndex, inlineCallFrame());
93a37866 606 }
ed1e77d3
A
607
608 NodeOrigin currentNodeOrigin()
609 {
610 // FIXME: We should set the forExit origin only on those nodes that can exit.
611 // https://bugs.webkit.org/show_bug.cgi?id=145204
612 if (m_currentSemanticOrigin.isSet())
613 return NodeOrigin(m_currentSemanticOrigin, currentCodeOrigin());
614 return NodeOrigin(currentCodeOrigin());
615 }
93a37866 616
81345200 617 BranchData* branchData(unsigned taken, unsigned notTaken)
93a37866 618 {
81345200
A
619 // We assume that branches originating from bytecode always have a fall-through. We
620 // use this assumption to avoid checking for the creation of terminal blocks.
621 ASSERT((taken > m_currentIndex) || (notTaken > m_currentIndex));
622 BranchData* data = m_graph.m_branchData.add();
623 *data = BranchData::withBytecodeIndices(taken, notTaken);
624 return data;
93a37866
A
625 }
626
ed1e77d3
A
627 Node* addToGraph(Node* node)
628 {
629 if (Options::verboseDFGByteCodeParsing())
630 dataLog(" appended ", node, " ", Graph::opName(node->op()), "\n");
631 m_currentBlock->append(node);
632 return node;
633 }
634
93a37866 635 Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
14957cd0 636 {
93a37866 637 Node* result = m_graph.addNode(
ed1e77d3 638 SpecNone, op, currentNodeOrigin(), Edge(child1), Edge(child2),
81345200 639 Edge(child3));
ed1e77d3 640 return addToGraph(result);
14957cd0 641 }
93a37866 642 Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
14957cd0 643 {
93a37866 644 Node* result = m_graph.addNode(
ed1e77d3
A
645 SpecNone, op, currentNodeOrigin(), child1, child2, child3);
646 return addToGraph(result);
14957cd0 647 }
93a37866 648 Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
14957cd0 649 {
93a37866 650 Node* result = m_graph.addNode(
ed1e77d3 651 SpecNone, op, currentNodeOrigin(), info, Edge(child1), Edge(child2),
81345200 652 Edge(child3));
ed1e77d3 653 return addToGraph(result);
93a37866
A
654 }
655 Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
656 {
657 Node* result = m_graph.addNode(
ed1e77d3 658 SpecNone, op, currentNodeOrigin(), info1, info2,
93a37866 659 Edge(child1), Edge(child2), Edge(child3));
ed1e77d3 660 return addToGraph(result);
14957cd0 661 }
6fe7ccc8 662
93a37866 663 Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
14957cd0 664 {
93a37866 665 Node* result = m_graph.addNode(
ed1e77d3 666 SpecNone, Node::VarArg, op, currentNodeOrigin(), info1, info2,
93a37866 667 m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
ed1e77d3 668 addToGraph(result);
6fe7ccc8
A
669
670 m_numPassedVarArgs = 0;
671
93a37866 672 return result;
6fe7ccc8 673 }
ed1e77d3 674
93a37866 675 void addVarArgChild(Node* child)
6fe7ccc8
A
676 {
677 m_graph.m_varArgChildren.append(Edge(child));
678 m_numPassedVarArgs++;
679 }
680
ed1e77d3
A
681 Node* addCallWithoutSettingResult(
682 NodeType op, OpInfo opInfo, Node* callee, int argCount, int registerOffset,
683 SpeculatedType prediction)
6fe7ccc8 684 {
ed1e77d3 685 addVarArgChild(callee);
81345200
A
686 size_t parameterSlots = JSStack::CallFrameHeaderSize - JSStack::CallerFrameAndPCSize + argCount;
687 if (parameterSlots > m_parameterSlots)
688 m_parameterSlots = parameterSlots;
6fe7ccc8 689
ed1e77d3 690 for (int i = 0; i < argCount; ++i)
81345200 691 addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
6fe7ccc8 692
ed1e77d3
A
693 return addToGraph(Node::VarArg, op, opInfo, OpInfo(prediction));
694 }
695
696 Node* addCall(
697 int result, NodeType op, OpInfo opInfo, Node* callee, int argCount, int registerOffset,
698 SpeculatedType prediction)
699 {
700 Node* call = addCallWithoutSettingResult(
701 op, opInfo, callee, argCount, registerOffset, prediction);
702 VirtualRegister resultReg(result);
703 if (resultReg.isValid())
704 set(resultReg, call);
6fe7ccc8
A
705 return call;
706 }
707
81345200 708 Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
6fe7ccc8 709 {
ed1e77d3 710 Node* objectNode = weakJSConstant(object);
93a37866 711 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
93a37866
A
712 return objectNode;
713 }
714
93a37866
A
715 SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
716 {
81345200
A
717 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
718 return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
6fe7ccc8 719 }
14957cd0 720
93a37866 721 SpeculatedType getPrediction(unsigned bytecodeIndex)
6fe7ccc8 722 {
93a37866 723 SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
6fe7ccc8 724
93a37866 725 if (prediction == SpecNone) {
6fe7ccc8
A
726 // We have no information about what values this node generates. Give up
727 // on executing this code, since we're likely to do more damage than good.
728 addToGraph(ForceOSRExit);
729 }
730
731 return prediction;
732 }
733
93a37866 734 SpeculatedType getPredictionWithoutOSRExit()
6fe7ccc8 735 {
81345200 736 return getPredictionWithoutOSRExit(m_currentIndex);
6fe7ccc8
A
737 }
738
93a37866 739 SpeculatedType getPrediction()
6fe7ccc8 740 {
81345200 741 return getPrediction(m_currentIndex);
14957cd0 742 }
93a37866
A
743
744 ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
745 {
81345200
A
746 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
747 profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
ed1e77d3
A
748 bool makeSafe = profile->outOfBounds(locker);
749 return ArrayMode::fromObserved(locker, profile, action, makeSafe);
93a37866
A
750 }
751
752 ArrayMode getArrayMode(ArrayProfile* profile)
753 {
754 return getArrayMode(profile, Array::Read);
755 }
756
93a37866
A
757 Node* makeSafe(Node* node)
758 {
81345200
A
759 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
760 node->mergeFlags(NodeMayOverflowInDFG);
761 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
762 node->mergeFlags(NodeMayNegZeroInDFG);
6fe7ccc8 763
81345200
A
764 if (!isX86() && node->op() == ArithMod)
765 return node;
766
767 if (!m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex))
93a37866 768 return node;
6fe7ccc8 769
93a37866 770 switch (node->op()) {
6fe7ccc8
A
771 case UInt32ToNumber:
772 case ArithAdd:
773 case ArithSub:
6fe7ccc8
A
774 case ValueAdd:
775 case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
81345200 776 node->mergeFlags(NodeMayOverflowInBaseline);
6fe7ccc8
A
777 break;
778
81345200
A
779 case ArithNegate:
780 // Currently we can't tell the difference between a negation overflowing
781 // (i.e. -(1 << 31)) or generating negative zero (i.e. -0). If it took slow
782 // path then we assume that it did both of those things.
783 node->mergeFlags(NodeMayOverflowInBaseline);
784 node->mergeFlags(NodeMayNegZeroInBaseline);
785 break;
786
6fe7ccc8 787 case ArithMul:
81345200
A
788 // FIXME: We should detect cases where we only overflowed but never created
789 // negative zero.
790 // https://bugs.webkit.org/show_bug.cgi?id=132470
6fe7ccc8 791 if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
81345200
A
792 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
793 node->mergeFlags(NodeMayOverflowInBaseline | NodeMayNegZeroInBaseline);
794 else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
795 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
796 node->mergeFlags(NodeMayNegZeroInBaseline);
6fe7ccc8
A
797 break;
798
799 default:
93a37866 800 RELEASE_ASSERT_NOT_REACHED();
6fe7ccc8
A
801 break;
802 }
803
93a37866 804 return node;
6fe7ccc8
A
805 }
806
93a37866 807 Node* makeDivSafe(Node* node)
6fe7ccc8 808 {
93a37866 809 ASSERT(node->op() == ArithDiv);
6fe7ccc8 810
81345200
A
811 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
812 node->mergeFlags(NodeMayOverflowInDFG);
813 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
814 node->mergeFlags(NodeMayNegZeroInDFG);
815
6fe7ccc8
A
816 // The main slow case counter for op_div in the old JIT counts only when
817 // the operands are not numbers. We don't care about that since we already
818 // have speculations in place that take care of that separately. We only
819 // care about when the outcome of the division is not an integer, which
820 // is what the special fast case counter tells us.
821
81345200 822 if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex))
93a37866 823 return node;
6fe7ccc8 824
81345200
A
825 // FIXME: It might be possible to make this more granular.
826 node->mergeFlags(NodeMayOverflowInBaseline | NodeMayNegZeroInBaseline);
6fe7ccc8 827
93a37866 828 return node;
6fe7ccc8
A
829 }
830
ed1e77d3 831 void noticeArgumentsUse()
6fe7ccc8 832 {
ed1e77d3
A
833 // All of the arguments in this function need to be formatted as JSValues because we will
834 // load from them in a random-access fashion and we don't want to have to switch on
835 // format.
6fe7ccc8 836
ed1e77d3
A
837 for (ArgumentPosition* argument : m_inlineStackTop->m_argumentPositions)
838 argument->mergeShouldNeverUnbox(true);
6fe7ccc8
A
839 }
840
841 void buildOperandMapsIfNecessary();
842
93a37866 843 VM* m_vm;
14957cd0 844 CodeBlock* m_codeBlock;
6fe7ccc8 845 CodeBlock* m_profiledBlock;
14957cd0
A
846 Graph& m_graph;
847
848 // The current block being generated.
849 BasicBlock* m_currentBlock;
850 // The bytecode index of the current instruction being generated.
851 unsigned m_currentIndex;
ed1e77d3
A
852 // The semantic origin of the current node if different from the current Index.
853 CodeOrigin m_currentSemanticOrigin;
14957cd0 854
ed1e77d3
A
855 FrozenValue* m_constantUndefined;
856 FrozenValue* m_constantNull;
857 FrozenValue* m_constantNaN;
858 FrozenValue* m_constantOne;
859 Vector<Node*, 16> m_constants;
6fe7ccc8
A
860
861 // The number of arguments passed to the function.
862 unsigned m_numArguments;
863 // The number of locals (vars + temporaries) used in the function.
864 unsigned m_numLocals;
6fe7ccc8 865 // The number of slots (in units of sizeof(Register)) that we need to
81345200
A
866 // preallocate for arguments to outgoing calls from this frame. This
867 // number includes the CallFrame slots that we initialize for the callee
868 // (but not the callee-initialized CallerFrame and ReturnPC slots).
869 // This number is 0 if and only if this function is a leaf.
6fe7ccc8
A
870 unsigned m_parameterSlots;
871 // The number of var args passed to the next var arg node.
872 unsigned m_numPassedVarArgs;
6fe7ccc8 873
93a37866 874 HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
6fe7ccc8
A
875
876 struct InlineStackEntry {
877 ByteCodeParser* m_byteCodeParser;
878
879 CodeBlock* m_codeBlock;
880 CodeBlock* m_profiledBlock;
881 InlineCallFrame* m_inlineCallFrame;
6fe7ccc8
A
882
883 ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
884
885 QueryableExitProfile m_exitProfile;
886
887 // Remapping of identifier and constant numbers from the code block being
888 // inlined (inline callee) to the code block that we're inlining into
889 // (the machine code block, which is the transitive, though not necessarily
890 // direct, caller).
891 Vector<unsigned> m_identifierRemap;
93a37866 892 Vector<unsigned> m_constantBufferRemap;
81345200 893 Vector<unsigned> m_switchRemap;
6fe7ccc8
A
894
895 // Blocks introduced by this code block, which need successor linking.
896 // May include up to one basic block that includes the continuation after
897 // the callsite in the caller. These must be appended in the order that they
898 // are created, but their bytecodeBegin values need not be in order as they
899 // are ignored.
900 Vector<UnlinkedBlock> m_unlinkedBlocks;
901
902 // Potential block linking targets. Must be sorted by bytecodeBegin, and
ed1e77d3 903 // cannot have two blocks that have the same bytecodeBegin.
81345200 904 Vector<BasicBlock*> m_blockLinkingTargets;
6fe7ccc8
A
905
906 // If the callsite's basic block was split into two, then this will be
907 // the head of the callsite block. It needs its successors linked to the
908 // m_unlinkedBlocks, but not the other way around: there's no way for
909 // any blocks in m_unlinkedBlocks to jump back into this block.
81345200 910 BasicBlock* m_callsiteBlockHead;
6fe7ccc8
A
911
912 // Does the callsite block head need linking? This is typically true
913 // but will be false for the machine code block's inline stack entry
914 // (since that one is not inlined) and for cases where an inline callee
915 // did the linking for us.
916 bool m_callsiteBlockHeadNeedsLinking;
917
918 VirtualRegister m_returnValue;
919
93a37866 920 // Speculations about variable types collected from the profiled code block,
6fe7ccc8
A
921 // which are based on OSR exit profiles that past DFG compilatins of this
922 // code block had gathered.
923 LazyOperandValueProfileParser m_lazyOperands;
924
81345200
A
925 CallLinkInfoMap m_callLinkInfos;
926 StubInfoMap m_stubInfos;
927
6fe7ccc8
A
928 // Did we see any returns? We need to handle the (uncommon but necessary)
929 // case where a procedure that does not return was inlined.
930 bool m_didReturn;
931
932 // Did we have any early returns?
933 bool m_didEarlyReturn;
934
935 // Pointers to the argument position trackers for this slice of code.
936 Vector<ArgumentPosition*> m_argumentPositions;
937
938 InlineStackEntry* m_caller;
939
93a37866
A
940 InlineStackEntry(
941 ByteCodeParser*,
942 CodeBlock*,
943 CodeBlock* profiledBlock,
81345200 944 BasicBlock* callsiteBlockHead,
93a37866
A
945 JSFunction* callee, // Null if this is a closure call.
946 VirtualRegister returnValueVR,
947 VirtualRegister inlineCallFrameStart,
948 int argumentCountIncludingThis,
ed1e77d3 949 InlineCallFrame::Kind);
6fe7ccc8
A
950
951 ~InlineStackEntry()
952 {
953 m_byteCodeParser->m_inlineStackTop = m_caller;
954 }
955
81345200 956 VirtualRegister remapOperand(VirtualRegister operand) const
6fe7ccc8
A
957 {
958 if (!m_inlineCallFrame)
959 return operand;
960
ed1e77d3 961 ASSERT(!operand.isConstant());
93a37866 962
81345200 963 return VirtualRegister(operand.offset() + m_inlineCallFrame->stackOffset);
6fe7ccc8
A
964 }
965 };
966
967 InlineStackEntry* m_inlineStackTop;
81345200
A
968
969 struct DelayedSetLocal {
ed1e77d3 970 CodeOrigin m_origin;
81345200
A
971 VirtualRegister m_operand;
972 Node* m_value;
973
974 DelayedSetLocal() { }
ed1e77d3
A
975 DelayedSetLocal(const CodeOrigin& origin, VirtualRegister operand, Node* value)
976 : m_origin(origin)
977 , m_operand(operand)
81345200
A
978 , m_value(value)
979 {
980 }
981
982 Node* execute(ByteCodeParser* parser, SetMode setMode = NormalSet)
983 {
984 if (m_operand.isArgument())
ed1e77d3
A
985 return parser->setArgument(m_origin, m_operand, m_value, setMode);
986 return parser->setLocal(m_origin, m_operand, m_value, setMode);
81345200
A
987 }
988 };
989
990 Vector<DelayedSetLocal, 2> m_setLocalQueue;
6fe7ccc8
A
991
992 // Have we built operand maps? We initialize them lazily, and only when doing
993 // inlining.
994 bool m_haveBuiltOperandMaps;
995 // Mapping between identifier names and numbers.
81345200 996 BorrowedIdentifierMap m_identifierMap;
6fe7ccc8 997
81345200
A
998 CodeBlock* m_dfgCodeBlock;
999 CallLinkStatus::ContextMap m_callContextMap;
1000 StubInfoMap m_dfgStubInfos;
1001
93a37866 1002 Instruction* m_currentInstruction;
ed1e77d3 1003 bool m_hasDebuggerEnabled;
6fe7ccc8
A
1004};
1005
1006#define NEXT_OPCODE(name) \
1007 m_currentIndex += OPCODE_LENGTH(name); \
1008 continue
1009
1010#define LAST_OPCODE(name) \
1011 m_currentIndex += OPCODE_LENGTH(name); \
1012 return shouldContinueParsing
1013
81345200 1014void ByteCodeParser::handleCall(Instruction* pc, NodeType op, CodeSpecializationKind kind)
6fe7ccc8
A
1015{
1016 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
81345200
A
1017 handleCall(
1018 pc[1].u.operand, op, kind, OPCODE_LENGTH(op_call),
1019 pc[2].u.operand, pc[3].u.operand, -pc[4].u.operand);
1020}
1021
1022void ByteCodeParser::handleCall(
1023 int result, NodeType op, CodeSpecializationKind kind, unsigned instructionSize,
1024 int callee, int argumentCountIncludingThis, int registerOffset)
1025{
81345200 1026 Node* callTarget = get(VirtualRegister(callee));
93a37866 1027
ed1e77d3
A
1028 CallLinkStatus callLinkStatus = CallLinkStatus::computeFor(
1029 m_inlineStackTop->m_profiledBlock, currentCodeOrigin(),
1030 m_inlineStackTop->m_callLinkInfos, m_callContextMap);
1031
1032 handleCall(
1033 result, op, InlineCallFrame::kindFor(kind), instructionSize, callTarget,
1034 argumentCountIncludingThis, registerOffset, callLinkStatus);
1035}
1036
1037void ByteCodeParser::handleCall(
1038 int result, NodeType op, InlineCallFrame::Kind kind, unsigned instructionSize,
1039 Node* callTarget, int argumentCountIncludingThis, int registerOffset,
1040 CallLinkStatus callLinkStatus)
1041{
1042 handleCall(
1043 result, op, kind, instructionSize, callTarget, argumentCountIncludingThis,
1044 registerOffset, callLinkStatus, getPrediction());
1045}
93a37866 1046
ed1e77d3
A
1047void ByteCodeParser::handleCall(
1048 int result, NodeType op, InlineCallFrame::Kind kind, unsigned instructionSize,
1049 Node* callTarget, int argumentCountIncludingThis, int registerOffset,
1050 CallLinkStatus callLinkStatus, SpeculatedType prediction)
1051{
1052 ASSERT(registerOffset <= 0);
1053
1054 if (callTarget->isCellConstant())
1055 callLinkStatus.setProvenConstantCallee(CallVariant(callTarget->asCell()));
1056
1057 if (Options::verboseDFGByteCodeParsing())
1058 dataLog(" Handling call at ", currentCodeOrigin(), ": ", callLinkStatus, "\n");
6fe7ccc8 1059
93a37866
A
1060 if (!callLinkStatus.canOptimize()) {
1061 // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
1062 // that we cannot optimize them.
1063
ed1e77d3 1064 addCall(result, op, OpInfo(), callTarget, argumentCountIncludingThis, registerOffset, prediction);
93a37866
A
1065 return;
1066 }
1067
81345200 1068 unsigned nextOffset = m_currentIndex + instructionSize;
ed1e77d3
A
1069
1070 OpInfo callOpInfo;
1071
1072 if (handleInlining(callTarget, result, callLinkStatus, registerOffset, virtualRegisterForArgument(0, registerOffset), VirtualRegister(), 0, argumentCountIncludingThis, nextOffset, op, kind, prediction)) {
1073 if (m_graph.compilation())
1074 m_graph.compilation()->noticeInlinedCall();
93a37866
A
1075 return;
1076 }
ed1e77d3
A
1077
1078#if ENABLE(FTL_NATIVE_CALL_INLINING)
1079 if (isFTL(m_graph.m_plan.mode) && Options::optimizeNativeCalls() && callLinkStatus.size() == 1 && !callLinkStatus.couldTakeSlowPath()) {
1080 CallVariant callee = callLinkStatus[0];
1081 JSFunction* function = callee.function();
1082 CodeSpecializationKind specializationKind = InlineCallFrame::specializationKindFor(kind);
1083 if (function && function->isHostFunction()) {
1084 emitFunctionChecks(callee, callTarget, virtualRegisterForArgument(0, registerOffset));
1085 callOpInfo = OpInfo(m_graph.freeze(function));
1086
1087 if (op == Call)
1088 op = NativeCall;
1089 else {
1090 ASSERT(op == Construct);
1091 op = NativeConstruct;
1092 }
93a37866 1093 }
ed1e77d3
A
1094 }
1095#endif
1096
1097 addCall(result, op, callOpInfo, callTarget, argumentCountIncludingThis, registerOffset, prediction);
1098}
1099
1100void ByteCodeParser::handleVarargsCall(Instruction* pc, NodeType op, CodeSpecializationKind kind)
1101{
1102 ASSERT(OPCODE_LENGTH(op_call_varargs) == OPCODE_LENGTH(op_construct_varargs));
1103
1104 int result = pc[1].u.operand;
1105 int callee = pc[2].u.operand;
1106 int thisReg = pc[3].u.operand;
1107 int arguments = pc[4].u.operand;
1108 int firstFreeReg = pc[5].u.operand;
1109 int firstVarArgOffset = pc[6].u.operand;
1110
1111 SpeculatedType prediction = getPrediction();
1112
1113 Node* callTarget = get(VirtualRegister(callee));
1114
1115 CallLinkStatus callLinkStatus = CallLinkStatus::computeFor(
1116 m_inlineStackTop->m_profiledBlock, currentCodeOrigin(),
1117 m_inlineStackTop->m_callLinkInfos, m_callContextMap);
1118 if (callTarget->isCellConstant())
1119 callLinkStatus.setProvenConstantCallee(CallVariant(callTarget->asCell()));
1120
1121 if (Options::verboseDFGByteCodeParsing())
1122 dataLog(" Varargs call link status at ", currentCodeOrigin(), ": ", callLinkStatus, "\n");
1123
1124 if (callLinkStatus.canOptimize()
1125 && handleInlining(callTarget, result, callLinkStatus, firstFreeReg, VirtualRegister(thisReg), VirtualRegister(arguments), firstVarArgOffset, 0, m_currentIndex + OPCODE_LENGTH(op_call_varargs), op, InlineCallFrame::varargsKindFor(kind), prediction)) {
81345200
A
1126 if (m_graph.compilation())
1127 m_graph.compilation()->noticeInlinedCall();
93a37866 1128 return;
6fe7ccc8 1129 }
93a37866 1130
ed1e77d3
A
1131 CallVarargsData* data = m_graph.m_callVarargsData.add();
1132 data->firstVarArgOffset = firstVarArgOffset;
1133
1134 Node* thisChild = get(VirtualRegister(thisReg));
1135
1136 Node* call = addToGraph(op, OpInfo(data), OpInfo(prediction), callTarget, get(VirtualRegister(arguments)), thisChild);
1137 VirtualRegister resultReg(result);
1138 if (resultReg.isValid())
1139 set(resultReg, call);
6fe7ccc8
A
1140}
1141
ed1e77d3 1142void ByteCodeParser::emitFunctionChecks(CallVariant callee, Node* callTarget, VirtualRegister thisArgumentReg)
6fe7ccc8 1143{
93a37866 1144 Node* thisArgument;
ed1e77d3
A
1145 if (thisArgumentReg.isValid())
1146 thisArgument = get(thisArgumentReg);
6fe7ccc8 1147 else
93a37866
A
1148 thisArgument = 0;
1149
ed1e77d3
A
1150 JSCell* calleeCell;
1151 Node* callTargetForCheck;
1152 if (callee.isClosureCall()) {
1153 calleeCell = callee.executable();
1154 callTargetForCheck = addToGraph(GetExecutable, callTarget);
1155 } else {
1156 calleeCell = callee.nonExecutableCallee();
1157 callTargetForCheck = callTarget;
93a37866
A
1158 }
1159
ed1e77d3
A
1160 ASSERT(calleeCell);
1161 addToGraph(CheckCell, OpInfo(m_graph.freeze(calleeCell)), callTargetForCheck, thisArgument);
93a37866
A
1162}
1163
ed1e77d3 1164void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis)
93a37866 1165{
ed1e77d3 1166 for (int i = 0; i < argumentCountIncludingThis; ++i)
81345200 1167 addToGraph(Phantom, get(virtualRegisterForArgument(i, registerOffset)));
6fe7ccc8
A
1168}
1169
ed1e77d3 1170unsigned ByteCodeParser::inliningCost(CallVariant callee, int argumentCountIncludingThis, CodeSpecializationKind kind)
6fe7ccc8 1171{
81345200 1172 if (verbose)
ed1e77d3 1173 dataLog("Considering inlining ", callee, " into ", currentCodeOrigin(), "\n");
81345200 1174
ed1e77d3 1175 if (m_hasDebuggerEnabled) {
81345200 1176 if (verbose)
ed1e77d3
A
1177 dataLog(" Failing because the debugger is in use.\n");
1178 return UINT_MAX;
81345200 1179 }
ed1e77d3
A
1180
1181 FunctionExecutable* executable = callee.functionExecutable();
1182 if (!executable) {
81345200 1183 if (verbose)
ed1e77d3
A
1184 dataLog(" Failing because there is no function executable.\n");
1185 return UINT_MAX;
81345200 1186 }
6fe7ccc8 1187
93a37866
A
1188 // Does the number of arguments we're passing match the arity of the target? We currently
1189 // inline only if the number of arguments passed is greater than or equal to the number
1190 // arguments expected.
81345200
A
1191 if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis) {
1192 if (verbose)
1193 dataLog(" Failing because of arity mismatch.\n");
ed1e77d3 1194 return UINT_MAX;
6fe7ccc8
A
1195 }
1196
93a37866 1197 // Do we have a code block, and does the code block's size match the heuristics/requirements for
ed1e77d3
A
1198 // being an inline candidate? We might not have a code block (1) if code was thrown away,
1199 // (2) if we simply hadn't actually made this call yet or (3) code is a builtin function and
1200 // specialization kind is construct. In the former 2 cases, we could still theoretically attempt
1201 // to inline it if we had a static proof of what was being called; this might happen for example
1202 // if you call a global function, where watchpointing gives us static information. Overall,
1203 // it's a rare case because we expect that any hot callees would have already been compiled.
93a37866 1204 CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
81345200
A
1205 if (!codeBlock) {
1206 if (verbose)
1207 dataLog(" Failing because no code block available.\n");
ed1e77d3 1208 return UINT_MAX;
81345200
A
1209 }
1210 CapabilityLevel capabilityLevel = inlineFunctionForCapabilityLevel(
ed1e77d3
A
1211 codeBlock, kind, callee.isClosureCall());
1212 if (verbose) {
1213 dataLog(" Kind: ", kind, "\n");
1214 dataLog(" Is closure call: ", callee.isClosureCall(), "\n");
1215 dataLog(" Capability level: ", capabilityLevel, "\n");
1216 dataLog(" Might inline function: ", mightInlineFunctionFor(codeBlock, kind), "\n");
1217 dataLog(" Might compile function: ", mightCompileFunctionFor(codeBlock, kind), "\n");
1218 dataLog(" Is supported for inlining: ", isSupportedForInlining(codeBlock), "\n");
1219 dataLog(" Needs activation: ", codeBlock->ownerExecutable()->needsActivation(), "\n");
1220 dataLog(" Is inlining candidate: ", codeBlock->ownerExecutable()->isInliningCandidate(), "\n");
1221 }
81345200
A
1222 if (!canInline(capabilityLevel)) {
1223 if (verbose)
1224 dataLog(" Failing because the function is not inlineable.\n");
ed1e77d3 1225 return UINT_MAX;
81345200 1226 }
6fe7ccc8 1227
81345200
A
1228 // Check if the caller is already too large. We do this check here because that's just
1229 // where we happen to also have the callee's code block, and we want that for the
1230 // purpose of unsetting SABI.
1231 if (!isSmallEnoughToInlineCodeInto(m_codeBlock)) {
1232 codeBlock->m_shouldAlwaysBeInlined = false;
1233 if (verbose)
1234 dataLog(" Failing because the caller is too large.\n");
ed1e77d3 1235 return UINT_MAX;
81345200
A
1236 }
1237
1238 // FIXME: this should be better at predicting how much bloat we will introduce by inlining
1239 // this function.
1240 // https://bugs.webkit.org/show_bug.cgi?id=127627
1241
ed1e77d3
A
1242 // FIXME: We currently inline functions that have run in LLInt but not in Baseline. These
1243 // functions have very low fidelity profiling, and presumably they weren't very hot if they
1244 // haven't gotten to Baseline yet. Consider not inlining these functions.
1245 // https://bugs.webkit.org/show_bug.cgi?id=145503
1246
81345200
A
1247 // Have we exceeded inline stack depth, or are we trying to inline a recursive call to
1248 // too many levels? If either of these are detected, then don't inline. We adjust our
1249 // heuristics if we are dealing with a function that cannot otherwise be compiled.
1250
1251 unsigned depth = 0;
1252 unsigned recursion = 0;
1253
1254 for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1255 ++depth;
1256 if (depth >= Options::maximumInliningDepth()) {
1257 if (verbose)
1258 dataLog(" Failing because depth exceeded.\n");
ed1e77d3 1259 return UINT_MAX;
81345200
A
1260 }
1261
1262 if (entry->executable() == executable) {
1263 ++recursion;
1264 if (recursion >= Options::maximumInliningRecursion()) {
1265 if (verbose)
1266 dataLog(" Failing because recursion detected.\n");
ed1e77d3 1267 return UINT_MAX;
81345200
A
1268 }
1269 }
1270 }
1271
1272 if (verbose)
ed1e77d3 1273 dataLog(" Inlining should be possible.\n");
6fe7ccc8 1274
ed1e77d3
A
1275 // It might be possible to inline.
1276 return codeBlock->instructionCount();
1277}
1278
1279template<typename ChecksFunctor>
1280void ByteCodeParser::inlineCall(Node* callTargetNode, int resultOperand, CallVariant callee, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, InlineCallFrame::Kind kind, CallerLinkability callerLinkability, const ChecksFunctor& insertChecks)
1281{
1282 CodeSpecializationKind specializationKind = InlineCallFrame::specializationKindFor(kind);
1283
1284 ASSERT(inliningCost(callee, argumentCountIncludingThis, specializationKind) != UINT_MAX);
6fe7ccc8 1285
ed1e77d3
A
1286 CodeBlock* codeBlock = callee.functionExecutable()->baselineCodeBlockFor(specializationKind);
1287 insertChecks(codeBlock);
1288
6fe7ccc8
A
1289 // FIXME: Don't flush constants!
1290
81345200 1291 int inlineCallFrameStart = m_inlineStackTop->remapOperand(VirtualRegister(registerOffset)).offset() + JSStack::CallFrameHeaderSize;
6fe7ccc8 1292
81345200
A
1293 ensureLocals(
1294 VirtualRegister(inlineCallFrameStart).toLocal() + 1 +
1295 JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters);
6fe7ccc8 1296
93a37866
A
1297 size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1298
ed1e77d3
A
1299 VirtualRegister resultReg(resultOperand);
1300 if (resultReg.isValid())
1301 resultReg = m_inlineStackTop->remapOperand(resultReg);
1302
93a37866 1303 InlineStackEntry inlineStackEntry(
ed1e77d3 1304 this, codeBlock, codeBlock, m_graph.lastBlock(), callee.function(), resultReg,
93a37866 1305 (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
6fe7ccc8
A
1306
1307 // This is where the actual inlining really happens.
1308 unsigned oldIndex = m_currentIndex;
6fe7ccc8 1309 m_currentIndex = 0;
6fe7ccc8 1310
81345200
A
1311 InlineVariableData inlineVariableData;
1312 inlineVariableData.inlineCallFrame = m_inlineStackTop->m_inlineCallFrame;
1313 inlineVariableData.argumentPositionStart = argumentPositionStart;
1314 inlineVariableData.calleeVariable = 0;
1315
1316 RELEASE_ASSERT(
1317 m_inlineStackTop->m_inlineCallFrame->isClosureCall
ed1e77d3
A
1318 == callee.isClosureCall());
1319 if (callee.isClosureCall()) {
81345200
A
1320 VariableAccessData* calleeVariable =
1321 set(VirtualRegister(JSStack::Callee), callTargetNode, ImmediateNakedSet)->variableAccessData();
81345200
A
1322
1323 calleeVariable->mergeShouldNeverUnbox(true);
81345200
A
1324
1325 inlineVariableData.calleeVariable = calleeVariable;
93a37866 1326 }
6fe7ccc8 1327
81345200
A
1328 m_graph.m_inlineVariableData.append(inlineVariableData);
1329
6fe7ccc8 1330 parseCodeBlock();
ed1e77d3 1331 clearCaches(); // Reset our state now that we're back to the outer code.
6fe7ccc8
A
1332
1333 m_currentIndex = oldIndex;
6fe7ccc8
A
1334
1335 // If the inlined code created some new basic blocks, then we have linking to do.
81345200 1336 if (inlineStackEntry.m_callsiteBlockHead != m_graph.lastBlock()) {
6fe7ccc8
A
1337
1338 ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1339 if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
81345200 1340 linkBlock(inlineStackEntry.m_callsiteBlockHead, inlineStackEntry.m_blockLinkingTargets);
6fe7ccc8 1341 else
81345200 1342 ASSERT(inlineStackEntry.m_callsiteBlockHead->isLinked);
6fe7ccc8 1343
ed1e77d3
A
1344 if (callerLinkability == CallerDoesNormalLinking)
1345 cancelLinkingForBlock(inlineStackEntry.m_caller, inlineStackEntry.m_callsiteBlockHead);
6fe7ccc8
A
1346
1347 linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1348 } else
1349 ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1350
81345200 1351 BasicBlock* lastBlock = m_graph.lastBlock();
6fe7ccc8
A
1352 // If there was a return, but no early returns, then we're done. We allow parsing of
1353 // the caller to continue in whatever basic block we're in right now.
1354 if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
ed1e77d3
A
1355 if (Options::verboseDFGByteCodeParsing())
1356 dataLog(" Allowing parsing to continue in last inlined block.\n");
1357
1358 ASSERT(lastBlock->isEmpty() || !lastBlock->terminal());
6fe7ccc8
A
1359
1360 // If we created new blocks then the last block needs linking, but in the
1361 // caller. It doesn't need to be linked to, but it needs outgoing links.
1362 if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
6fe7ccc8
A
1363 // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1364 // for release builds because this block will never serve as a potential target
1365 // in the linker's binary search.
ed1e77d3
A
1366 if (Options::verboseDFGByteCodeParsing())
1367 dataLog(" Repurposing last block from ", lastBlock->bytecodeBegin, " to ", m_currentIndex, "\n");
6fe7ccc8 1368 lastBlock->bytecodeBegin = m_currentIndex;
ed1e77d3
A
1369 if (callerLinkability == CallerDoesNormalLinking) {
1370 if (verbose)
1371 dataLog("Adding unlinked block ", RawPointer(m_graph.lastBlock()), " (one return)\n");
1372 m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.lastBlock()));
1373 }
6fe7ccc8
A
1374 }
1375
81345200 1376 m_currentBlock = m_graph.lastBlock();
ed1e77d3 1377 return;
6fe7ccc8
A
1378 }
1379
ed1e77d3
A
1380 if (Options::verboseDFGByteCodeParsing())
1381 dataLog(" Creating new block after inlining.\n");
1382
6fe7ccc8 1383 // If we get to this point then all blocks must end in some sort of terminals.
ed1e77d3 1384 ASSERT(lastBlock->terminal());
81345200
A
1385
1386 // Need to create a new basic block for the continuation at the caller.
1387 RefPtr<BasicBlock> block = adoptRef(new BasicBlock(nextOffset, m_numArguments, m_numLocals, PNaN));
1388
6fe7ccc8
A
1389 // Link the early returns to the basic block we're about to create.
1390 for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1391 if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1392 continue;
81345200
A
1393 BasicBlock* blockToLink = inlineStackEntry.m_unlinkedBlocks[i].m_block;
1394 ASSERT(!blockToLink->isLinked);
ed1e77d3 1395 Node* node = blockToLink->terminal();
93a37866 1396 ASSERT(node->op() == Jump);
81345200
A
1397 ASSERT(!node->targetBlock());
1398 node->targetBlock() = block.get();
6fe7ccc8 1399 inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
ed1e77d3
A
1400 if (verbose)
1401 dataLog("Marking ", RawPointer(blockToLink), " as linked (jumps to return)\n");
1402 blockToLink->didLink();
6fe7ccc8
A
1403 }
1404
6fe7ccc8 1405 m_currentBlock = block.get();
81345200 1406 ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_caller->m_blockLinkingTargets.last()->bytecodeBegin < nextOffset);
ed1e77d3
A
1407 if (verbose)
1408 dataLog("Adding unlinked block ", RawPointer(block.get()), " (many returns)\n");
1409 if (callerLinkability == CallerDoesNormalLinking) {
1410 m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
1411 m_inlineStackTop->m_caller->m_blockLinkingTargets.append(block.get());
1412 }
81345200 1413 m_graph.appendBlock(block);
6fe7ccc8 1414 prepareToParseBlock();
ed1e77d3
A
1415}
1416
1417void ByteCodeParser::cancelLinkingForBlock(InlineStackEntry* inlineStackEntry, BasicBlock* block)
1418{
1419 // It's possible that the callsite block head is not owned by the caller.
1420 if (!inlineStackEntry->m_unlinkedBlocks.isEmpty()) {
1421 // It's definitely owned by the caller, because the caller created new blocks.
1422 // Assert that this all adds up.
1423 ASSERT_UNUSED(block, inlineStackEntry->m_unlinkedBlocks.last().m_block == block);
1424 ASSERT(inlineStackEntry->m_unlinkedBlocks.last().m_needsNormalLinking);
1425 inlineStackEntry->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1426 } else {
1427 // It's definitely not owned by the caller. Tell the caller that he does not
1428 // need to link his callsite block head, because we did it for him.
1429 ASSERT(inlineStackEntry->m_callsiteBlockHeadNeedsLinking);
1430 ASSERT_UNUSED(block, inlineStackEntry->m_callsiteBlockHead == block);
1431 inlineStackEntry->m_callsiteBlockHeadNeedsLinking = false;
1432 }
1433}
1434
1435template<typename ChecksFunctor>
1436bool ByteCodeParser::attemptToInlineCall(Node* callTargetNode, int resultOperand, CallVariant callee, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, InlineCallFrame::Kind kind, CallerLinkability callerLinkability, SpeculatedType prediction, unsigned& inliningBalance, const ChecksFunctor& insertChecks)
1437{
1438 CodeSpecializationKind specializationKind = InlineCallFrame::specializationKindFor(kind);
1439
1440 if (!inliningBalance)
1441 return false;
1442
1443 bool didInsertChecks = false;
1444 auto insertChecksWithAccounting = [&] () {
1445 insertChecks(nullptr);
1446 didInsertChecks = true;
1447 };
1448
1449 if (verbose)
1450 dataLog(" Considering callee ", callee, "\n");
1451
1452 // Intrinsics and internal functions can only be inlined if we're not doing varargs. This is because
1453 // we currently don't have any way of getting profiling information for arguments to non-JS varargs
1454 // calls. The prediction propagator won't be of any help because LoadVarargs obscures the data flow,
1455 // and there are no callsite value profiles and native function won't have callee value profiles for
1456 // those arguments. Even worse, if the intrinsic decides to exit, it won't really have anywhere to
1457 // exit to: LoadVarargs is effectful and it's part of the op_call_varargs, so we can't exit without
1458 // calling LoadVarargs twice.
1459 if (!InlineCallFrame::isVarargs(kind)) {
1460 if (InternalFunction* function = callee.internalFunction()) {
1461 if (handleConstantInternalFunction(resultOperand, function, registerOffset, argumentCountIncludingThis, specializationKind, insertChecksWithAccounting)) {
1462 RELEASE_ASSERT(didInsertChecks);
1463 addToGraph(Phantom, callTargetNode);
1464 emitArgumentPhantoms(registerOffset, argumentCountIncludingThis);
1465 inliningBalance--;
1466 return true;
1467 }
1468 RELEASE_ASSERT(!didInsertChecks);
1469 return false;
1470 }
1471
1472 Intrinsic intrinsic = callee.intrinsicFor(specializationKind);
1473 if (intrinsic != NoIntrinsic) {
1474 if (handleIntrinsic(resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction, insertChecksWithAccounting)) {
1475 RELEASE_ASSERT(didInsertChecks);
1476 addToGraph(Phantom, callTargetNode);
1477 emitArgumentPhantoms(registerOffset, argumentCountIncludingThis);
1478 inliningBalance--;
1479 return true;
1480 }
1481 RELEASE_ASSERT(!didInsertChecks);
1482 return false;
1483 }
1484 }
6fe7ccc8 1485
ed1e77d3
A
1486 unsigned myInliningCost = inliningCost(callee, argumentCountIncludingThis, specializationKind);
1487 if (myInliningCost > inliningBalance)
1488 return false;
1489
1490 Instruction* savedCurrentInstruction = m_currentInstruction;
1491 inlineCall(callTargetNode, resultOperand, callee, registerOffset, argumentCountIncludingThis, nextOffset, kind, callerLinkability, insertChecks);
1492 inliningBalance -= myInliningCost;
1493 m_currentInstruction = savedCurrentInstruction;
6fe7ccc8
A
1494 return true;
1495}
1496
ed1e77d3
A
1497bool ByteCodeParser::handleInlining(
1498 Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus,
1499 int registerOffsetOrFirstFreeReg, VirtualRegister thisArgument,
1500 VirtualRegister argumentsArgument, unsigned argumentsOffset, int argumentCountIncludingThis,
1501 unsigned nextOffset, NodeType callOp, InlineCallFrame::Kind kind, SpeculatedType prediction)
1502{
1503 if (verbose) {
1504 dataLog("Handling inlining...\n");
1505 dataLog("Stack: ", currentCodeOrigin(), "\n");
1506 }
1507 CodeSpecializationKind specializationKind = InlineCallFrame::specializationKindFor(kind);
1508
1509 if (!callLinkStatus.size()) {
1510 if (verbose)
1511 dataLog("Bailing inlining.\n");
1512 return false;
1513 }
1514
1515 if (InlineCallFrame::isVarargs(kind)
1516 && callLinkStatus.maxNumArguments() > Options::maximumVarargsForInlining()) {
1517 if (verbose)
1518 dataLog("Bailing inlining because of varargs.\n");
1519 return false;
1520 }
1521
1522 unsigned inliningBalance = Options::maximumFunctionForCallInlineCandidateInstructionCount();
1523 if (specializationKind == CodeForConstruct)
1524 inliningBalance = std::min(inliningBalance, Options::maximumFunctionForConstructInlineCandidateInstructionCount());
1525 if (callLinkStatus.isClosureCall())
1526 inliningBalance = std::min(inliningBalance, Options::maximumFunctionForClosureCallInlineCandidateInstructionCount());
1527
1528 // First check if we can avoid creating control flow. Our inliner does some CFG
1529 // simplification on the fly and this helps reduce compile times, but we can only leverage
1530 // this in cases where we don't need control flow diamonds to check the callee.
1531 if (!callLinkStatus.couldTakeSlowPath() && callLinkStatus.size() == 1) {
1532 int registerOffset;
1533
1534 // Only used for varargs calls.
1535 unsigned mandatoryMinimum = 0;
1536 unsigned maxNumArguments = 0;
1537
1538 if (InlineCallFrame::isVarargs(kind)) {
1539 if (FunctionExecutable* functionExecutable = callLinkStatus[0].functionExecutable())
1540 mandatoryMinimum = functionExecutable->parameterCount();
1541 else
1542 mandatoryMinimum = 0;
1543
1544 // includes "this"
1545 maxNumArguments = std::max(
1546 callLinkStatus.maxNumArguments(),
1547 mandatoryMinimum + 1);
1548
1549 // We sort of pretend that this *is* the number of arguments that were passed.
1550 argumentCountIncludingThis = maxNumArguments;
1551
1552 registerOffset = registerOffsetOrFirstFreeReg + 1;
1553 registerOffset -= maxNumArguments; // includes "this"
1554 registerOffset -= JSStack::CallFrameHeaderSize;
1555 registerOffset = -WTF::roundUpToMultipleOf(
1556 stackAlignmentRegisters(),
1557 -registerOffset);
1558 } else
1559 registerOffset = registerOffsetOrFirstFreeReg;
1560
1561 bool result = attemptToInlineCall(
1562 callTargetNode, resultOperand, callLinkStatus[0], registerOffset,
1563 argumentCountIncludingThis, nextOffset, kind, CallerDoesNormalLinking, prediction,
1564 inliningBalance, [&] (CodeBlock* codeBlock) {
1565 emitFunctionChecks(callLinkStatus[0], callTargetNode, thisArgument);
1566
1567 // If we have a varargs call, we want to extract the arguments right now.
1568 if (InlineCallFrame::isVarargs(kind)) {
1569 int remappedRegisterOffset =
1570 m_inlineStackTop->remapOperand(VirtualRegister(registerOffset)).offset();
1571
1572 ensureLocals(VirtualRegister(remappedRegisterOffset).toLocal());
1573
1574 int argumentStart = registerOffset + JSStack::CallFrameHeaderSize;
1575 int remappedArgumentStart =
1576 m_inlineStackTop->remapOperand(VirtualRegister(argumentStart)).offset();
1577
1578 LoadVarargsData* data = m_graph.m_loadVarargsData.add();
1579 data->start = VirtualRegister(remappedArgumentStart + 1);
1580 data->count = VirtualRegister(remappedRegisterOffset + JSStack::ArgumentCount);
1581 data->offset = argumentsOffset;
1582 data->limit = maxNumArguments;
1583 data->mandatoryMinimum = mandatoryMinimum;
1584
1585 addToGraph(LoadVarargs, OpInfo(data), get(argumentsArgument));
1586
1587 // LoadVarargs may OSR exit. Hence, we need to keep alive callTargetNode, thisArgument
1588 // and argumentsArgument for the baseline JIT. However, we only need a Phantom for
1589 // callTargetNode because the other 2 are still in use and alive at this point.
1590 addToGraph(Phantom, callTargetNode);
1591
1592 // In DFG IR before SSA, we cannot insert control flow between after the
1593 // LoadVarargs and the last SetArgument. This isn't a problem once we get to DFG
1594 // SSA. Fortunately, we also have other reasons for not inserting control flow
1595 // before SSA.
1596
1597 VariableAccessData* countVariable = newVariableAccessData(
1598 VirtualRegister(remappedRegisterOffset + JSStack::ArgumentCount));
1599 // This is pretty lame, but it will force the count to be flushed as an int. This doesn't
1600 // matter very much, since our use of a SetArgument and Flushes for this local slot is
1601 // mostly just a formality.
1602 countVariable->predict(SpecInt32);
1603 countVariable->mergeIsProfitableToUnbox(true);
1604 Node* setArgumentCount = addToGraph(SetArgument, OpInfo(countVariable));
1605 m_currentBlock->variablesAtTail.setOperand(countVariable->local(), setArgumentCount);
1606
1607 set(VirtualRegister(argumentStart), get(thisArgument), ImmediateNakedSet);
1608 for (unsigned argument = 1; argument < maxNumArguments; ++argument) {
1609 VariableAccessData* variable = newVariableAccessData(
1610 VirtualRegister(remappedArgumentStart + argument));
1611 variable->mergeShouldNeverUnbox(true); // We currently have nowhere to put the type check on the LoadVarargs. LoadVarargs is effectful, so after it finishes, we cannot exit.
1612
1613 // For a while it had been my intention to do things like this inside the
1614 // prediction injection phase. But in this case it's really best to do it here,
1615 // because it's here that we have access to the variable access datas for the
1616 // inlining we're about to do.
1617 //
1618 // Something else that's interesting here is that we'd really love to get
1619 // predictions from the arguments loaded at the callsite, rather than the
1620 // arguments received inside the callee. But that probably won't matter for most
1621 // calls.
1622 if (codeBlock && argument < static_cast<unsigned>(codeBlock->numParameters())) {
1623 ConcurrentJITLocker locker(codeBlock->m_lock);
1624 if (ValueProfile* profile = codeBlock->valueProfileForArgument(argument))
1625 variable->predict(profile->computeUpdatedPrediction(locker));
1626 }
1627
1628 Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
1629 m_currentBlock->variablesAtTail.setOperand(variable->local(), setArgument);
1630 }
1631 }
1632 });
1633 if (verbose) {
1634 dataLog("Done inlining (simple).\n");
1635 dataLog("Stack: ", currentCodeOrigin(), "\n");
1636 dataLog("Result: ", result, "\n");
1637 }
1638 return result;
1639 }
1640
1641 // We need to create some kind of switch over callee. For now we only do this if we believe that
1642 // we're in the top tier. We have two reasons for this: first, it provides us an opportunity to
1643 // do more detailed polyvariant/polymorphic profiling; and second, it reduces compile times in
1644 // the DFG. And by polyvariant profiling we mean polyvariant profiling of *this* call. Note that
1645 // we could improve that aspect of this by doing polymorphic inlining but having the profiling
1646 // also.
1647 if (!isFTL(m_graph.m_plan.mode) || !Options::enablePolymorphicCallInlining()
1648 || InlineCallFrame::isVarargs(kind)) {
1649 if (verbose) {
1650 dataLog("Bailing inlining (hard).\n");
1651 dataLog("Stack: ", currentCodeOrigin(), "\n");
1652 }
1653 return false;
1654 }
1655
1656 unsigned oldOffset = m_currentIndex;
1657
1658 bool allAreClosureCalls = true;
1659 bool allAreDirectCalls = true;
1660 for (unsigned i = callLinkStatus.size(); i--;) {
1661 if (callLinkStatus[i].isClosureCall())
1662 allAreDirectCalls = false;
1663 else
1664 allAreClosureCalls = false;
1665 }
1666
1667 Node* thingToSwitchOn;
1668 if (allAreDirectCalls)
1669 thingToSwitchOn = callTargetNode;
1670 else if (allAreClosureCalls)
1671 thingToSwitchOn = addToGraph(GetExecutable, callTargetNode);
1672 else {
1673 // FIXME: We should be able to handle this case, but it's tricky and we don't know of cases
1674 // where it would be beneficial. It might be best to handle these cases as if all calls were
1675 // closure calls.
1676 // https://bugs.webkit.org/show_bug.cgi?id=136020
1677 if (verbose) {
1678 dataLog("Bailing inlining (mix).\n");
1679 dataLog("Stack: ", currentCodeOrigin(), "\n");
1680 }
1681 return false;
1682 }
1683
1684 if (verbose) {
1685 dataLog("Doing hard inlining...\n");
1686 dataLog("Stack: ", currentCodeOrigin(), "\n");
1687 }
1688
1689 int registerOffset = registerOffsetOrFirstFreeReg;
1690
1691 // This makes me wish that we were in SSA all the time. We need to pick a variable into which to
1692 // store the callee so that it will be accessible to all of the blocks we're about to create. We
1693 // get away with doing an immediate-set here because we wouldn't have performed any side effects
1694 // yet.
1695 if (verbose)
1696 dataLog("Register offset: ", registerOffset);
1697 VirtualRegister calleeReg(registerOffset + JSStack::Callee);
1698 calleeReg = m_inlineStackTop->remapOperand(calleeReg);
1699 if (verbose)
1700 dataLog("Callee is going to be ", calleeReg, "\n");
1701 setDirect(calleeReg, callTargetNode, ImmediateSetWithFlush);
1702
1703 SwitchData& data = *m_graph.m_switchData.add();
1704 data.kind = SwitchCell;
1705 addToGraph(Switch, OpInfo(&data), thingToSwitchOn);
1706
1707 BasicBlock* originBlock = m_currentBlock;
1708 if (verbose)
1709 dataLog("Marking ", RawPointer(originBlock), " as linked (origin of poly inline)\n");
1710 originBlock->didLink();
1711 cancelLinkingForBlock(m_inlineStackTop, originBlock);
1712
1713 // Each inlined callee will have a landing block that it returns at. They should all have jumps
1714 // to the continuation block, which we create last.
1715 Vector<BasicBlock*> landingBlocks;
1716
1717 // We may force this true if we give up on inlining any of the edges.
1718 bool couldTakeSlowPath = callLinkStatus.couldTakeSlowPath();
1719
1720 if (verbose)
1721 dataLog("About to loop over functions at ", currentCodeOrigin(), ".\n");
1722
1723 for (unsigned i = 0; i < callLinkStatus.size(); ++i) {
1724 m_currentIndex = oldOffset;
1725 RefPtr<BasicBlock> block = adoptRef(new BasicBlock(UINT_MAX, m_numArguments, m_numLocals, PNaN));
1726 m_currentBlock = block.get();
1727 m_graph.appendBlock(block);
1728 prepareToParseBlock();
1729
1730 Node* myCallTargetNode = getDirect(calleeReg);
1731
1732 bool inliningResult = attemptToInlineCall(
1733 myCallTargetNode, resultOperand, callLinkStatus[i], registerOffset,
1734 argumentCountIncludingThis, nextOffset, kind, CallerLinksManually, prediction,
1735 inliningBalance, [&] (CodeBlock*) { });
1736
1737 if (!inliningResult) {
1738 // That failed so we let the block die. Nothing interesting should have been added to
1739 // the block. We also give up on inlining any of the (less frequent) callees.
1740 ASSERT(m_currentBlock == block.get());
1741 ASSERT(m_graph.m_blocks.last() == block);
1742 m_graph.killBlockAndItsContents(block.get());
1743 m_graph.m_blocks.removeLast();
1744
1745 // The fact that inlining failed means we need a slow path.
1746 couldTakeSlowPath = true;
1747 break;
1748 }
1749
1750 JSCell* thingToCaseOn;
1751 if (allAreDirectCalls)
1752 thingToCaseOn = callLinkStatus[i].nonExecutableCallee();
1753 else {
1754 ASSERT(allAreClosureCalls);
1755 thingToCaseOn = callLinkStatus[i].executable();
1756 }
1757 data.cases.append(SwitchCase(m_graph.freeze(thingToCaseOn), block.get()));
1758 m_currentIndex = nextOffset;
1759 processSetLocalQueue(); // This only comes into play for intrinsics, since normal inlined code will leave an empty queue.
1760 addToGraph(Jump);
1761 if (verbose)
1762 dataLog("Marking ", RawPointer(m_currentBlock), " as linked (tail of poly inlinee)\n");
1763 m_currentBlock->didLink();
1764 landingBlocks.append(m_currentBlock);
1765
1766 if (verbose)
1767 dataLog("Finished inlining ", callLinkStatus[i], " at ", currentCodeOrigin(), ".\n");
1768 }
1769
1770 RefPtr<BasicBlock> slowPathBlock = adoptRef(
1771 new BasicBlock(UINT_MAX, m_numArguments, m_numLocals, PNaN));
1772 m_currentIndex = oldOffset;
1773 data.fallThrough = BranchTarget(slowPathBlock.get());
1774 m_graph.appendBlock(slowPathBlock);
1775 if (verbose)
1776 dataLog("Marking ", RawPointer(slowPathBlock.get()), " as linked (slow path block)\n");
1777 slowPathBlock->didLink();
1778 prepareToParseBlock();
1779 m_currentBlock = slowPathBlock.get();
1780 Node* myCallTargetNode = getDirect(calleeReg);
1781 if (couldTakeSlowPath) {
1782 addCall(
1783 resultOperand, callOp, OpInfo(), myCallTargetNode, argumentCountIncludingThis,
1784 registerOffset, prediction);
1785 } else {
1786 addToGraph(CheckBadCell);
1787 addToGraph(Phantom, myCallTargetNode);
1788 emitArgumentPhantoms(registerOffset, argumentCountIncludingThis);
1789
1790 set(VirtualRegister(resultOperand), addToGraph(BottomValue));
1791 }
1792
1793 m_currentIndex = nextOffset;
1794 processSetLocalQueue();
1795 addToGraph(Jump);
1796 landingBlocks.append(m_currentBlock);
1797
1798 RefPtr<BasicBlock> continuationBlock = adoptRef(
1799 new BasicBlock(UINT_MAX, m_numArguments, m_numLocals, PNaN));
1800 m_graph.appendBlock(continuationBlock);
1801 if (verbose)
1802 dataLog("Adding unlinked block ", RawPointer(continuationBlock.get()), " (continuation)\n");
1803 m_inlineStackTop->m_unlinkedBlocks.append(UnlinkedBlock(continuationBlock.get()));
1804 prepareToParseBlock();
1805 m_currentBlock = continuationBlock.get();
1806
1807 for (unsigned i = landingBlocks.size(); i--;)
1808 landingBlocks[i]->terminal()->targetBlock() = continuationBlock.get();
1809
1810 m_currentIndex = oldOffset;
1811
1812 if (verbose) {
1813 dataLog("Done inlining (hard).\n");
1814 dataLog("Stack: ", currentCodeOrigin(), "\n");
1815 }
1816 return true;
1817}
1818
1819template<typename ChecksFunctor>
1820bool ByteCodeParser::handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis, const ChecksFunctor& insertChecks)
6fe7ccc8
A
1821{
1822 if (argumentCountIncludingThis == 1) { // Math.min()
ed1e77d3
A
1823 insertChecks();
1824 set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
6fe7ccc8
A
1825 return true;
1826 }
1827
1828 if (argumentCountIncludingThis == 2) { // Math.min(x)
ed1e77d3 1829 insertChecks();
81345200 1830 Node* result = get(VirtualRegister(virtualRegisterForArgument(1, registerOffset)));
93a37866 1831 addToGraph(Phantom, Edge(result, NumberUse));
81345200 1832 set(VirtualRegister(resultOperand), result);
6fe7ccc8
A
1833 return true;
1834 }
1835
1836 if (argumentCountIncludingThis == 3) { // Math.min(x, y)
ed1e77d3 1837 insertChecks();
81345200 1838 set(VirtualRegister(resultOperand), addToGraph(op, get(virtualRegisterForArgument(1, registerOffset)), get(virtualRegisterForArgument(2, registerOffset))));
6fe7ccc8
A
1839 return true;
1840 }
1841
1842 // Don't handle >=3 arguments for now.
1843 return false;
1844}
1845
ed1e77d3
A
1846template<typename ChecksFunctor>
1847bool ByteCodeParser::handleIntrinsic(int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, const ChecksFunctor& insertChecks)
6fe7ccc8
A
1848{
1849 switch (intrinsic) {
1850 case AbsIntrinsic: {
1851 if (argumentCountIncludingThis == 1) { // Math.abs()
ed1e77d3
A
1852 insertChecks();
1853 set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
6fe7ccc8
A
1854 return true;
1855 }
1856
1857 if (!MacroAssembler::supportsFloatingPointAbs())
1858 return false;
1859
ed1e77d3 1860 insertChecks();
81345200 1861 Node* node = addToGraph(ArithAbs, get(virtualRegisterForArgument(1, registerOffset)));
6fe7ccc8 1862 if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
81345200
A
1863 node->mergeFlags(NodeMayOverflowInDFG);
1864 set(VirtualRegister(resultOperand), node);
6fe7ccc8
A
1865 return true;
1866 }
1867
1868 case MinIntrinsic:
ed1e77d3 1869 return handleMinMax(resultOperand, ArithMin, registerOffset, argumentCountIncludingThis, insertChecks);
6fe7ccc8
A
1870
1871 case MaxIntrinsic:
ed1e77d3
A
1872 return handleMinMax(resultOperand, ArithMax, registerOffset, argumentCountIncludingThis, insertChecks);
1873
81345200
A
1874 case SqrtIntrinsic:
1875 case CosIntrinsic:
ed1e77d3
A
1876 case SinIntrinsic:
1877 case LogIntrinsic: {
81345200 1878 if (argumentCountIncludingThis == 1) {
ed1e77d3
A
1879 insertChecks();
1880 set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
6fe7ccc8
A
1881 return true;
1882 }
1883
81345200
A
1884 switch (intrinsic) {
1885 case SqrtIntrinsic:
ed1e77d3 1886 insertChecks();
81345200
A
1887 set(VirtualRegister(resultOperand), addToGraph(ArithSqrt, get(virtualRegisterForArgument(1, registerOffset))));
1888 return true;
1889
1890 case CosIntrinsic:
ed1e77d3 1891 insertChecks();
81345200
A
1892 set(VirtualRegister(resultOperand), addToGraph(ArithCos, get(virtualRegisterForArgument(1, registerOffset))));
1893 return true;
1894
1895 case SinIntrinsic:
ed1e77d3 1896 insertChecks();
81345200
A
1897 set(VirtualRegister(resultOperand), addToGraph(ArithSin, get(virtualRegisterForArgument(1, registerOffset))));
1898 return true;
ed1e77d3
A
1899
1900 case LogIntrinsic:
1901 insertChecks();
1902 set(VirtualRegister(resultOperand), addToGraph(ArithLog, get(virtualRegisterForArgument(1, registerOffset))));
1903 return true;
81345200
A
1904
1905 default:
1906 RELEASE_ASSERT_NOT_REACHED();
6fe7ccc8 1907 return false;
81345200 1908 }
6fe7ccc8 1909 }
ed1e77d3
A
1910
1911 case PowIntrinsic: {
1912 if (argumentCountIncludingThis < 3) {
1913 // Math.pow() and Math.pow(x) return NaN.
1914 insertChecks();
1915 set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
1916 return true;
1917 }
1918 insertChecks();
1919 VirtualRegister xOperand = virtualRegisterForArgument(1, registerOffset);
1920 VirtualRegister yOperand = virtualRegisterForArgument(2, registerOffset);
1921 set(VirtualRegister(resultOperand), addToGraph(ArithPow, get(xOperand), get(yOperand)));
1922 return true;
1923 }
6fe7ccc8
A
1924
1925 case ArrayPushIntrinsic: {
1926 if (argumentCountIncludingThis != 2)
1927 return false;
1928
81345200 1929 ArrayMode arrayMode = getArrayMode(m_currentInstruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile);
93a37866
A
1930 if (!arrayMode.isJSArray())
1931 return false;
1932 switch (arrayMode.type()) {
1933 case Array::Undecided:
1934 case Array::Int32:
1935 case Array::Double:
1936 case Array::Contiguous:
1937 case Array::ArrayStorage: {
ed1e77d3 1938 insertChecks();
81345200
A
1939 Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1940 set(VirtualRegister(resultOperand), arrayPush);
93a37866
A
1941
1942 return true;
1943 }
1944
1945 default:
1946 return false;
1947 }
6fe7ccc8
A
1948 }
1949
1950 case ArrayPopIntrinsic: {
1951 if (argumentCountIncludingThis != 1)
1952 return false;
1953
81345200 1954 ArrayMode arrayMode = getArrayMode(m_currentInstruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile);
93a37866
A
1955 if (!arrayMode.isJSArray())
1956 return false;
1957 switch (arrayMode.type()) {
1958 case Array::Int32:
1959 case Array::Double:
1960 case Array::Contiguous:
1961 case Array::ArrayStorage: {
ed1e77d3 1962 insertChecks();
81345200
A
1963 Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)));
1964 set(VirtualRegister(resultOperand), arrayPop);
93a37866
A
1965 return true;
1966 }
1967
1968 default:
1969 return false;
1970 }
6fe7ccc8
A
1971 }
1972
1973 case CharCodeAtIntrinsic: {
1974 if (argumentCountIncludingThis != 2)
1975 return false;
1976
ed1e77d3 1977 insertChecks();
81345200
A
1978 VirtualRegister thisOperand = virtualRegisterForArgument(0, registerOffset);
1979 VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
1980 Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), get(indexOperand));
14957cd0 1981
81345200 1982 set(VirtualRegister(resultOperand), charCode);
6fe7ccc8
A
1983 return true;
1984 }
14957cd0 1985
6fe7ccc8
A
1986 case CharAtIntrinsic: {
1987 if (argumentCountIncludingThis != 2)
1988 return false;
14957cd0 1989
ed1e77d3 1990 insertChecks();
81345200
A
1991 VirtualRegister thisOperand = virtualRegisterForArgument(0, registerOffset);
1992 VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
1993 Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), get(indexOperand));
93a37866 1994
81345200 1995 set(VirtualRegister(resultOperand), charCode);
93a37866
A
1996 return true;
1997 }
ed1e77d3
A
1998 case Clz32Intrinsic: {
1999 insertChecks();
2000 if (argumentCountIncludingThis == 1)
2001 set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_graph.freeze(jsNumber(32)))));
2002 else {
2003 Node* operand = get(virtualRegisterForArgument(1, registerOffset));
2004 set(VirtualRegister(resultOperand), addToGraph(ArithClz32, operand));
2005 }
2006 return true;
2007 }
93a37866
A
2008 case FromCharCodeIntrinsic: {
2009 if (argumentCountIncludingThis != 2)
6fe7ccc8 2010 return false;
14957cd0 2011
ed1e77d3 2012 insertChecks();
81345200
A
2013 VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
2014 Node* charCode = addToGraph(StringFromCharCode, get(indexOperand));
14957cd0 2015
81345200 2016 set(VirtualRegister(resultOperand), charCode);
93a37866 2017
6fe7ccc8
A
2018 return true;
2019 }
14957cd0 2020
6fe7ccc8
A
2021 case RegExpExecIntrinsic: {
2022 if (argumentCountIncludingThis != 2)
2023 return false;
2024
ed1e77d3 2025 insertChecks();
81345200
A
2026 Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
2027 set(VirtualRegister(resultOperand), regExpExec);
6fe7ccc8
A
2028
2029 return true;
2030 }
2031
2032 case RegExpTestIntrinsic: {
2033 if (argumentCountIncludingThis != 2)
2034 return false;
2035
ed1e77d3 2036 insertChecks();
81345200
A
2037 Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
2038 set(VirtualRegister(resultOperand), regExpExec);
6fe7ccc8
A
2039
2040 return true;
2041 }
ed1e77d3
A
2042 case RoundIntrinsic: {
2043 if (argumentCountIncludingThis == 1) {
2044 insertChecks();
2045 set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantNaN)));
2046 return true;
2047 }
2048 if (argumentCountIncludingThis == 2) {
2049 insertChecks();
2050 Node* operand = get(virtualRegisterForArgument(1, registerOffset));
2051 Node* roundNode = addToGraph(ArithRound, OpInfo(0), OpInfo(prediction), operand);
2052 set(VirtualRegister(resultOperand), roundNode);
2053 return true;
2054 }
2055 return false;
2056 }
93a37866
A
2057 case IMulIntrinsic: {
2058 if (argumentCountIncludingThis != 3)
2059 return false;
ed1e77d3 2060 insertChecks();
81345200
A
2061 VirtualRegister leftOperand = virtualRegisterForArgument(1, registerOffset);
2062 VirtualRegister rightOperand = virtualRegisterForArgument(2, registerOffset);
2063 Node* left = get(leftOperand);
2064 Node* right = get(rightOperand);
2065 set(VirtualRegister(resultOperand), addToGraph(ArithIMul, left, right));
2066 return true;
2067 }
2068
2069 case FRoundIntrinsic: {
2070 if (argumentCountIncludingThis != 2)
2071 return false;
ed1e77d3 2072 insertChecks();
81345200
A
2073 VirtualRegister operand = virtualRegisterForArgument(1, registerOffset);
2074 set(VirtualRegister(resultOperand), addToGraph(ArithFRound, get(operand)));
2075 return true;
2076 }
2077
2078 case DFGTrueIntrinsic: {
ed1e77d3
A
2079 insertChecks();
2080 set(VirtualRegister(resultOperand), jsConstant(jsBoolean(true)));
81345200
A
2081 return true;
2082 }
2083
2084 case OSRExitIntrinsic: {
ed1e77d3 2085 insertChecks();
81345200 2086 addToGraph(ForceOSRExit);
ed1e77d3 2087 set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantUndefined)));
81345200
A
2088 return true;
2089 }
2090
2091 case IsFinalTierIntrinsic: {
ed1e77d3 2092 insertChecks();
81345200 2093 set(VirtualRegister(resultOperand),
ed1e77d3 2094 jsConstant(jsBoolean(Options::useFTLJIT() ? isFTL(m_graph.m_plan.mode) : true)));
81345200
A
2095 return true;
2096 }
2097
2098 case SetInt32HeapPredictionIntrinsic: {
ed1e77d3 2099 insertChecks();
81345200
A
2100 for (int i = 1; i < argumentCountIncludingThis; ++i) {
2101 Node* node = get(virtualRegisterForArgument(i, registerOffset));
2102 if (node->hasHeapPrediction())
2103 node->setHeapPrediction(SpecInt32);
2104 }
ed1e77d3
A
2105 set(VirtualRegister(resultOperand), addToGraph(JSConstant, OpInfo(m_constantUndefined)));
2106 return true;
2107 }
2108
2109 case CheckInt32Intrinsic: {
2110 insertChecks();
2111 for (int i = 1; i < argumentCountIncludingThis; ++i) {
2112 Node* node = get(virtualRegisterForArgument(i, registerOffset));
2113 addToGraph(Phantom, Edge(node, Int32Use));
2114 }
2115 set(VirtualRegister(resultOperand), jsConstant(jsBoolean(true)));
81345200
A
2116 return true;
2117 }
2118
2119 case FiatInt52Intrinsic: {
2120 if (argumentCountIncludingThis != 2)
2121 return false;
ed1e77d3 2122 insertChecks();
81345200
A
2123 VirtualRegister operand = virtualRegisterForArgument(1, registerOffset);
2124 if (enableInt52())
2125 set(VirtualRegister(resultOperand), addToGraph(FiatInt52, get(operand)));
2126 else
2127 set(VirtualRegister(resultOperand), get(operand));
93a37866
A
2128 return true;
2129 }
6fe7ccc8
A
2130
2131 default:
2132 return false;
2133 }
2134}
2135
ed1e77d3 2136template<typename ChecksFunctor>
81345200
A
2137bool ByteCodeParser::handleTypedArrayConstructor(
2138 int resultOperand, InternalFunction* function, int registerOffset,
ed1e77d3 2139 int argumentCountIncludingThis, TypedArrayType type, const ChecksFunctor& insertChecks)
81345200
A
2140{
2141 if (!isTypedView(type))
2142 return false;
2143
2144 if (function->classInfo() != constructorClassInfoForType(type))
2145 return false;
2146
2147 if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
2148 return false;
2149
2150 // We only have an intrinsic for the case where you say:
2151 //
2152 // new FooArray(blah);
2153 //
2154 // Of course, 'blah' could be any of the following:
2155 //
2156 // - Integer, indicating that you want to allocate an array of that length.
2157 // This is the thing we're hoping for, and what we can actually do meaningful
2158 // optimizations for.
2159 //
2160 // - Array buffer, indicating that you want to create a view onto that _entire_
2161 // buffer.
2162 //
2163 // - Non-buffer object, indicating that you want to create a copy of that
2164 // object by pretending that it quacks like an array.
2165 //
2166 // - Anything else, indicating that you want to have an exception thrown at
2167 // you.
2168 //
2169 // The intrinsic, NewTypedArray, will behave as if it could do any of these
2170 // things up until we do Fixup. Thereafter, if child1 (i.e. 'blah') is
2171 // predicted Int32, then we lock it in as a normal typed array allocation.
2172 // Otherwise, NewTypedArray turns into a totally opaque function call that
2173 // may clobber the world - by virtue of it accessing properties on what could
2174 // be an object.
2175 //
2176 // Note that although the generic form of NewTypedArray sounds sort of awful,
2177 // it is actually quite likely to be more efficient than a fully generic
2178 // Construct. So, we might want to think about making NewTypedArray variadic,
2179 // or else making Construct not super slow.
2180
2181 if (argumentCountIncludingThis != 2)
2182 return false;
ed1e77d3
A
2183
2184 insertChecks();
81345200
A
2185 set(VirtualRegister(resultOperand),
2186 addToGraph(NewTypedArray, OpInfo(type), get(virtualRegisterForArgument(1, registerOffset))));
2187 return true;
2188}
2189
ed1e77d3 2190template<typename ChecksFunctor>
93a37866 2191bool ByteCodeParser::handleConstantInternalFunction(
81345200 2192 int resultOperand, InternalFunction* function, int registerOffset,
ed1e77d3 2193 int argumentCountIncludingThis, CodeSpecializationKind kind, const ChecksFunctor& insertChecks)
93a37866 2194{
ed1e77d3
A
2195 if (verbose)
2196 dataLog(" Handling constant internal function ", JSValue(function), "\n");
2197
93a37866
A
2198 // If we ever find that we have a lot of internal functions that we specialize for,
2199 // then we should probably have some sort of hashtable dispatch, or maybe even
2200 // dispatch straight through the MethodTable of the InternalFunction. But for now,
2201 // it seems that this case is hit infrequently enough, and the number of functions
2202 // we know about is small enough, that having just a linear cascade of if statements
2203 // is good enough.
2204
81345200
A
2205 if (function->classInfo() == ArrayConstructor::info()) {
2206 if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
2207 return false;
2208
ed1e77d3 2209 insertChecks();
93a37866 2210 if (argumentCountIncludingThis == 2) {
81345200
A
2211 set(VirtualRegister(resultOperand),
2212 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(virtualRegisterForArgument(1, registerOffset))));
93a37866
A
2213 return true;
2214 }
2215
ed1e77d3 2216 // FIXME: Array constructor should use "this" as newTarget.
93a37866 2217 for (int i = 1; i < argumentCountIncludingThis; ++i)
81345200
A
2218 addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
2219 set(VirtualRegister(resultOperand),
93a37866
A
2220 addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
2221 return true;
81345200
A
2222 }
2223
2224 if (function->classInfo() == StringConstructor::info()) {
ed1e77d3
A
2225 insertChecks();
2226
93a37866
A
2227 Node* result;
2228
2229 if (argumentCountIncludingThis <= 1)
ed1e77d3 2230 result = jsConstant(m_vm->smallStrings.emptyString());
93a37866 2231 else
ed1e77d3 2232 result = addToGraph(CallStringConstructor, get(virtualRegisterForArgument(1, registerOffset)));
93a37866
A
2233
2234 if (kind == CodeForConstruct)
2235 result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
2236
81345200 2237 set(VirtualRegister(resultOperand), result);
93a37866
A
2238 return true;
2239 }
2240
81345200
A
2241 for (unsigned typeIndex = 0; typeIndex < NUMBER_OF_TYPED_ARRAY_TYPES; ++typeIndex) {
2242 bool result = handleTypedArrayConstructor(
2243 resultOperand, function, registerOffset, argumentCountIncludingThis,
ed1e77d3 2244 indexToTypedArrayType(typeIndex), insertChecks);
81345200
A
2245 if (result)
2246 return true;
2247 }
2248
93a37866
A
2249 return false;
2250}
2251
ed1e77d3 2252Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, const StructureSet& structureSet, unsigned identifierNumber, PropertyOffset offset, NodeType op)
93a37866 2253{
ed1e77d3
A
2254 if (base->hasConstant()) {
2255 if (JSValue constant = m_graph.tryGetConstantProperty(base->asJSValue(), structureSet, offset)) {
2256 addToGraph(Phantom, base);
2257 return weakJSConstant(constant);
2258 }
2259 }
2260
93a37866
A
2261 Node* propertyStorage;
2262 if (isInlineOffset(offset))
2263 propertyStorage = base;
2264 else
2265 propertyStorage = addToGraph(GetButterfly, base);
ed1e77d3
A
2266
2267 StorageAccessData* data = m_graph.m_storageAccessData.add();
2268 data->offset = offset;
2269 data->identifierNumber = identifierNumber;
2270
2271 Node* getByOffset = addToGraph(op, OpInfo(data), OpInfo(prediction), propertyStorage, base);
93a37866
A
2272
2273 return getByOffset;
2274}
2275
81345200
A
2276Node* ByteCodeParser::handlePutByOffset(Node* base, unsigned identifier, PropertyOffset offset, Node* value)
2277{
2278 Node* propertyStorage;
2279 if (isInlineOffset(offset))
2280 propertyStorage = base;
2281 else
2282 propertyStorage = addToGraph(GetButterfly, base);
81345200 2283
ed1e77d3
A
2284 StorageAccessData* data = m_graph.m_storageAccessData.add();
2285 data->offset = offset;
2286 data->identifierNumber = identifier;
2287
2288 Node* result = addToGraph(PutByOffset, OpInfo(data), propertyStorage, base, value);
2289
81345200
A
2290 return result;
2291}
2292
ed1e77d3 2293void ByteCodeParser::emitChecks(const ConstantStructureCheckVector& vector)
81345200 2294{
ed1e77d3
A
2295 for (unsigned i = 0; i < vector.size(); ++i)
2296 cellConstantWithStructureCheck(vector[i].constant(), vector[i].structure());
93a37866
A
2297}
2298
2299void ByteCodeParser::handleGetById(
2300 int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
2301 const GetByIdStatus& getByIdStatus)
2302{
ed1e77d3
A
2303 NodeType getById = getByIdStatus.makesCalls() ? GetByIdFlush : GetById;
2304
2305 if (!getByIdStatus.isSimple() || !getByIdStatus.numVariants() || !Options::enableAccessInlining()) {
81345200 2306 set(VirtualRegister(destinationOperand),
ed1e77d3 2307 addToGraph(getById, OpInfo(identifierNumber), OpInfo(prediction), base));
93a37866
A
2308 return;
2309 }
2310
81345200 2311 if (getByIdStatus.numVariants() > 1) {
ed1e77d3
A
2312 if (getByIdStatus.makesCalls() || !isFTL(m_graph.m_plan.mode)
2313 || !Options::enablePolymorphicAccessInlining()) {
81345200 2314 set(VirtualRegister(destinationOperand),
ed1e77d3 2315 addToGraph(getById, OpInfo(identifierNumber), OpInfo(prediction), base));
81345200 2316 return;
93a37866 2317 }
81345200
A
2318
2319 if (m_graph.compilation())
2320 m_graph.compilation()->noticeInlinedGetById();
2321
2322 // 1) Emit prototype structure checks for all chains. This could sort of maybe not be
2323 // optimal, if there is some rarely executed case in the chain that requires a lot
2324 // of checks and those checks are not watchpointable.
ed1e77d3
A
2325 for (unsigned variantIndex = getByIdStatus.numVariants(); variantIndex--;)
2326 emitChecks(getByIdStatus[variantIndex].constantChecks());
81345200
A
2327
2328 // 2) Emit a MultiGetByOffset
2329 MultiGetByOffsetData* data = m_graph.m_multiGetByOffsetData.add();
2330 data->variants = getByIdStatus.variants();
2331 data->identifierNumber = identifierNumber;
2332 set(VirtualRegister(destinationOperand),
2333 addToGraph(MultiGetByOffset, OpInfo(data), OpInfo(prediction), base));
2334 return;
2335 }
2336
2337 ASSERT(getByIdStatus.numVariants() == 1);
2338 GetByIdVariant variant = getByIdStatus[0];
2339
2340 if (m_graph.compilation())
2341 m_graph.compilation()->noticeInlinedGetById();
2342
ed1e77d3 2343 Node* originalBase = base;
81345200
A
2344
2345 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(variant.structureSet())), base);
2346
ed1e77d3
A
2347 emitChecks(variant.constantChecks());
2348
2349 if (variant.alternateBase())
2350 base = weakJSConstant(variant.alternateBase());
93a37866
A
2351
2352 // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
2353 // ensure that the base of the original get_by_id is kept alive until we're done with
2354 // all of the speculations. We only insert the Phantom if there had been a CheckStructure
ed1e77d3
A
2355 // on something other than the base following the CheckStructure on base.
2356 if (originalBase != base)
2357 addToGraph(Phantom, originalBase);
93a37866 2358
ed1e77d3
A
2359 Node* loadedValue = handleGetByOffset(
2360 variant.callLinkStatus() ? SpecCellOther : prediction,
2361 base, variant.baseStructure(), identifierNumber, variant.offset(),
2362 variant.callLinkStatus() ? GetGetterSetterByOffset : GetByOffset);
2363
2364 if (!variant.callLinkStatus()) {
2365 set(VirtualRegister(destinationOperand), loadedValue);
93a37866
A
2366 return;
2367 }
2368
ed1e77d3
A
2369 Node* getter = addToGraph(GetGetter, loadedValue);
2370
2371 // Make a call. We don't try to get fancy with using the smallest operand number because
2372 // the stack layout phase should compress the stack anyway.
2373
2374 unsigned numberOfParameters = 0;
2375 numberOfParameters++; // The 'this' argument.
2376 numberOfParameters++; // True return PC.
2377
2378 // Start with a register offset that corresponds to the last in-use register.
2379 int registerOffset = virtualRegisterForLocal(
2380 m_inlineStackTop->m_profiledBlock->m_numCalleeRegisters - 1).offset();
2381 registerOffset -= numberOfParameters;
2382 registerOffset -= JSStack::CallFrameHeaderSize;
2383
2384 // Get the alignment right.
2385 registerOffset = -WTF::roundUpToMultipleOf(
2386 stackAlignmentRegisters(),
2387 -registerOffset);
2388
2389 ensureLocals(
2390 m_inlineStackTop->remapOperand(
2391 VirtualRegister(registerOffset)).toLocal());
2392
2393 // Issue SetLocals. This has two effects:
2394 // 1) That's how handleCall() sees the arguments.
2395 // 2) If we inline then this ensures that the arguments are flushed so that if you use
2396 // the dreaded arguments object on the getter, the right things happen. Well, sort of -
2397 // since we only really care about 'this' in this case. But we're not going to take that
2398 // shortcut.
2399 int nextRegister = registerOffset + JSStack::CallFrameHeaderSize;
2400 set(VirtualRegister(nextRegister++), originalBase, ImmediateNakedSet);
2401
2402 handleCall(
2403 destinationOperand, Call, InlineCallFrame::GetterCall, OPCODE_LENGTH(op_get_by_id),
2404 getter, numberOfParameters - 1, registerOffset, *variant.callLinkStatus(), prediction);
93a37866
A
2405}
2406
81345200
A
2407void ByteCodeParser::emitPutById(
2408 Node* base, unsigned identifierNumber, Node* value, const PutByIdStatus& putByIdStatus, bool isDirect)
6fe7ccc8 2409{
81345200
A
2410 if (isDirect)
2411 addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2412 else
2413 addToGraph(putByIdStatus.makesCalls() ? PutByIdFlush : PutById, OpInfo(identifierNumber), base, value);
6fe7ccc8 2414}
14957cd0 2415
81345200
A
2416void ByteCodeParser::handlePutById(
2417 Node* base, unsigned identifierNumber, Node* value,
2418 const PutByIdStatus& putByIdStatus, bool isDirect)
93a37866 2419{
ed1e77d3 2420 if (!putByIdStatus.isSimple() || !putByIdStatus.numVariants() || !Options::enableAccessInlining()) {
81345200
A
2421 if (!putByIdStatus.isSet())
2422 addToGraph(ForceOSRExit);
2423 emitPutById(base, identifierNumber, value, putByIdStatus, isDirect);
2424 return;
93a37866 2425 }
81345200
A
2426
2427 if (putByIdStatus.numVariants() > 1) {
2428 if (!isFTL(m_graph.m_plan.mode) || putByIdStatus.makesCalls()
2429 || !Options::enablePolymorphicAccessInlining()) {
2430 emitPutById(base, identifierNumber, value, putByIdStatus, isDirect);
2431 return;
2432 }
2433
2434 if (m_graph.compilation())
2435 m_graph.compilation()->noticeInlinedPutById();
2436
2437 if (!isDirect) {
2438 for (unsigned variantIndex = putByIdStatus.numVariants(); variantIndex--;) {
2439 if (putByIdStatus[variantIndex].kind() != PutByIdVariant::Transition)
2440 continue;
ed1e77d3 2441 emitChecks(putByIdStatus[variantIndex].constantChecks());
81345200
A
2442 }
2443 }
2444
2445 MultiPutByOffsetData* data = m_graph.m_multiPutByOffsetData.add();
2446 data->variants = putByIdStatus.variants();
2447 data->identifierNumber = identifierNumber;
2448 addToGraph(MultiPutByOffset, OpInfo(data), base, value);
2449 return;
2450 }
2451
2452 ASSERT(putByIdStatus.numVariants() == 1);
2453 const PutByIdVariant& variant = putByIdStatus[0];
2454
ed1e77d3
A
2455 switch (variant.kind()) {
2456 case PutByIdVariant::Replace: {
81345200
A
2457 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(variant.structure())), base);
2458 handlePutByOffset(base, identifierNumber, variant.offset(), value);
2459 if (m_graph.compilation())
2460 m_graph.compilation()->noticeInlinedPutById();
2461 return;
2462 }
2463
ed1e77d3
A
2464 case PutByIdVariant::Transition: {
2465 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(variant.oldStructure())), base);
2466 emitChecks(variant.constantChecks());
93a37866 2467
ed1e77d3 2468 ASSERT(variant.oldStructureForTransition()->transitionWatchpointSetHasBeenInvalidated());
81345200 2469
ed1e77d3
A
2470 Node* propertyStorage;
2471 Transition* transition = m_graph.m_transitions.add(
2472 variant.oldStructureForTransition(), variant.newStructure());
93a37866 2473
ed1e77d3 2474 if (variant.reallocatesStorage()) {
93a37866 2475
ed1e77d3
A
2476 // If we're growing the property storage then it must be because we're
2477 // storing into the out-of-line storage.
2478 ASSERT(!isInlineOffset(variant.offset()));
93a37866 2479
ed1e77d3
A
2480 if (!variant.oldStructureForTransition()->outOfLineCapacity()) {
2481 propertyStorage = addToGraph(
2482 AllocatePropertyStorage, OpInfo(transition), base);
2483 } else {
2484 propertyStorage = addToGraph(
2485 ReallocatePropertyStorage, OpInfo(transition),
2486 base, addToGraph(GetButterfly, base));
2487 }
81345200 2488 } else {
ed1e77d3
A
2489 if (isInlineOffset(variant.offset()))
2490 propertyStorage = base;
2491 else
2492 propertyStorage = addToGraph(GetButterfly, base);
93a37866 2493 }
93a37866 2494
ed1e77d3
A
2495 StorageAccessData* data = m_graph.m_storageAccessData.add();
2496 data->offset = variant.offset();
2497 data->identifierNumber = identifierNumber;
2498
2499 addToGraph(
2500 PutByOffset,
2501 OpInfo(data),
2502 propertyStorage,
2503 base,
2504 value);
93a37866 2505
ed1e77d3
A
2506 // FIXME: PutStructure goes last until we fix either
2507 // https://bugs.webkit.org/show_bug.cgi?id=142921 or
2508 // https://bugs.webkit.org/show_bug.cgi?id=142924.
2509 addToGraph(PutStructure, OpInfo(transition), base);
93a37866 2510
ed1e77d3
A
2511 if (m_graph.compilation())
2512 m_graph.compilation()->noticeInlinedPutById();
2513 return;
2514 }
2515
2516 case PutByIdVariant::Setter: {
2517 Node* originalBase = base;
2518
2519 addToGraph(
2520 CheckStructure, OpInfo(m_graph.addStructureSet(variant.structure())), base);
2521
2522 emitChecks(variant.constantChecks());
2523
2524 if (variant.alternateBase())
2525 base = weakJSConstant(variant.alternateBase());
2526
2527 Node* loadedValue = handleGetByOffset(
2528 SpecCellOther, base, variant.baseStructure(), identifierNumber, variant.offset(),
2529 GetGetterSetterByOffset);
2530
2531 Node* setter = addToGraph(GetSetter, loadedValue);
2532
2533 // Make a call. We don't try to get fancy with using the smallest operand number because
2534 // the stack layout phase should compress the stack anyway.
2535
2536 unsigned numberOfParameters = 0;
2537 numberOfParameters++; // The 'this' argument.
2538 numberOfParameters++; // The new value.
2539 numberOfParameters++; // True return PC.
2540
2541 // Start with a register offset that corresponds to the last in-use register.
2542 int registerOffset = virtualRegisterForLocal(
2543 m_inlineStackTop->m_profiledBlock->m_numCalleeRegisters - 1).offset();
2544 registerOffset -= numberOfParameters;
2545 registerOffset -= JSStack::CallFrameHeaderSize;
2546
2547 // Get the alignment right.
2548 registerOffset = -WTF::roundUpToMultipleOf(
2549 stackAlignmentRegisters(),
2550 -registerOffset);
2551
2552 ensureLocals(
2553 m_inlineStackTop->remapOperand(
2554 VirtualRegister(registerOffset)).toLocal());
2555
2556 int nextRegister = registerOffset + JSStack::CallFrameHeaderSize;
2557 set(VirtualRegister(nextRegister++), originalBase, ImmediateNakedSet);
2558 set(VirtualRegister(nextRegister++), value, ImmediateNakedSet);
2559
2560 handleCall(
2561 VirtualRegister().offset(), Call, InlineCallFrame::SetterCall,
2562 OPCODE_LENGTH(op_put_by_id), setter, numberOfParameters - 1, registerOffset,
2563 *variant.callLinkStatus(), SpecOther);
2564 return;
2565 }
2566
2567 default: {
2568 emitPutById(base, identifierNumber, value, putByIdStatus, isDirect);
2569 return;
2570 } }
81345200 2571}
93a37866 2572
81345200
A
2573void ByteCodeParser::prepareToParseBlock()
2574{
ed1e77d3
A
2575 clearCaches();
2576 ASSERT(m_setLocalQueue.isEmpty());
81345200 2577}
93a37866 2578
ed1e77d3 2579void ByteCodeParser::clearCaches()
81345200 2580{
ed1e77d3 2581 m_constants.resize(0);
93a37866
A
2582}
2583
14957cd0
A
2584bool ByteCodeParser::parseBlock(unsigned limit)
2585{
6fe7ccc8 2586 bool shouldContinueParsing = true;
93a37866
A
2587
2588 Interpreter* interpreter = m_vm->interpreter;
6fe7ccc8
A
2589 Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
2590 unsigned blockBegin = m_currentIndex;
2591
2592 // If we are the first basic block, introduce markers for arguments. This allows
2593 // us to track if a use of an argument may use the actual argument passed, as
2594 // opposed to using a value we set explicitly.
81345200 2595 if (m_currentBlock == m_graph.block(0) && !inlineCallFrame()) {
6fe7ccc8
A
2596 m_graph.m_arguments.resize(m_numArguments);
2597 for (unsigned argument = 0; argument < m_numArguments; ++argument) {
93a37866 2598 VariableAccessData* variable = newVariableAccessData(
ed1e77d3 2599 virtualRegisterForArgument(argument));
93a37866 2600 variable->mergeStructureCheckHoistingFailed(
ed1e77d3 2601 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
81345200
A
2602 variable->mergeCheckArrayHoistingFailed(
2603 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
93a37866
A
2604
2605 Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
6fe7ccc8 2606 m_graph.m_arguments[argument] = setArgument;
6fe7ccc8
A
2607 m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
2608 }
14957cd0
A
2609 }
2610
14957cd0 2611 while (true) {
ed1e77d3 2612 processSetLocalQueue();
81345200 2613
14957cd0
A
2614 // Don't extend over jump destinations.
2615 if (m_currentIndex == limit) {
6fe7ccc8
A
2616 // Ordinarily we want to plant a jump. But refuse to do this if the block is
2617 // empty. This is a special case for inlining, which might otherwise create
2618 // some empty blocks in some cases. When parseBlock() returns with an empty
2619 // block, it will get repurposed instead of creating a new one. Note that this
2620 // logic relies on every bytecode resulting in one or more nodes, which would
2621 // be true anyway except for op_loop_hint, which emits a Phantom to force this
2622 // to be true.
2623 if (!m_currentBlock->isEmpty())
2624 addToGraph(Jump, OpInfo(m_currentIndex));
6fe7ccc8 2625 return shouldContinueParsing;
14957cd0 2626 }
6fe7ccc8 2627
14957cd0
A
2628 // Switch on the current bytecode opcode.
2629 Instruction* currentInstruction = instructionsBegin + m_currentIndex;
93a37866 2630 m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
6fe7ccc8 2631 OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
93a37866 2632
81345200
A
2633 if (Options::verboseDFGByteCodeParsing())
2634 dataLog(" parsing ", currentCodeOrigin(), "\n");
2635
2636 if (m_graph.compilation()) {
2637 addToGraph(CountExecution, OpInfo(m_graph.compilation()->executionCounterFor(
93a37866
A
2638 Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
2639 }
2640
6fe7ccc8 2641 switch (opcodeID) {
14957cd0
A
2642
2643 // === Function entry opcodes ===
2644
ed1e77d3
A
2645 case op_enter: {
2646 Node* undefined = addToGraph(JSConstant, OpInfo(m_constantUndefined));
14957cd0 2647 // Initialize all locals to undefined.
6fe7ccc8 2648 for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
ed1e77d3 2649 set(virtualRegisterForLocal(i), undefined, ImmediateNakedSet);
14957cd0 2650 NEXT_OPCODE(op_enter);
ed1e77d3 2651 }
81345200
A
2652
2653 case op_to_this: {
93a37866 2654 Node* op1 = getThis();
81345200
A
2655 if (op1->op() != ToThis) {
2656 Structure* cachedStructure = currentInstruction[2].u.structure.get();
ed1e77d3
A
2657 if (currentInstruction[2].u.toThisStatus != ToThisOK
2658 || !cachedStructure
81345200
A
2659 || cachedStructure->classInfo()->methodTable.toThis != JSObject::info()->methodTable.toThis
2660 || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
2661 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
81345200
A
2662 || (op1->op() == GetLocal && op1->variableAccessData()->structureCheckHoistingFailed())) {
2663 setThis(addToGraph(ToThis, op1));
2664 } else {
93a37866
A
2665 addToGraph(
2666 CheckStructure,
81345200 2667 OpInfo(m_graph.addStructureSet(cachedStructure)),
93a37866
A
2668 op1);
2669 }
2670 }
81345200 2671 NEXT_OPCODE(op_to_this);
14957cd0
A
2672 }
2673
6fe7ccc8 2674 case op_create_this: {
93a37866 2675 int calleeOperand = currentInstruction[2].u.operand;
81345200 2676 Node* callee = get(VirtualRegister(calleeOperand));
ed1e77d3
A
2677
2678 JSFunction* function = callee->dynamicCastConstant<JSFunction*>();
2679 if (!function) {
2680 JSCell* cachedFunction = currentInstruction[4].u.jsCell.unvalidatedGet();
2681 if (cachedFunction
2682 && cachedFunction != JSCell::seenMultipleCalleeObjects()
2683 && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCell)) {
2684 ASSERT(cachedFunction->inherits(JSFunction::info()));
2685
2686 FrozenValue* frozen = m_graph.freeze(cachedFunction);
2687 addToGraph(CheckCell, OpInfo(frozen), callee);
2688 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(JSConstant, OpInfo(frozen)));
2689
2690 function = static_cast<JSFunction*>(cachedFunction);
2691 }
2692 }
2693
93a37866 2694 bool alreadyEmitted = false;
ed1e77d3
A
2695 if (function) {
2696 if (FunctionRareData* rareData = function->rareData()) {
2697 if (Structure* structure = rareData->allocationStructure()) {
2698 m_graph.freeze(rareData);
2699 m_graph.watchpoints().addLazily(rareData->allocationProfileWatchpointSet());
2700 // The callee is still live up to this point.
2701 addToGraph(Phantom, callee);
2702 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewObject, OpInfo(structure)));
2703 alreadyEmitted = true;
2704 }
93a37866
A
2705 }
2706 }
81345200
A
2707 if (!alreadyEmitted) {
2708 set(VirtualRegister(currentInstruction[1].u.operand),
93a37866 2709 addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
81345200 2710 }
6fe7ccc8
A
2711 NEXT_OPCODE(op_create_this);
2712 }
93a37866 2713
6fe7ccc8 2714 case op_new_object: {
81345200 2715 set(VirtualRegister(currentInstruction[1].u.operand),
93a37866
A
2716 addToGraph(NewObject,
2717 OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
6fe7ccc8
A
2718 NEXT_OPCODE(op_new_object);
2719 }
2720
2721 case op_new_array: {
2722 int startOperand = currentInstruction[2].u.operand;
2723 int numOperands = currentInstruction[3].u.operand;
93a37866 2724 ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
81345200
A
2725 for (int operandIdx = startOperand; operandIdx > startOperand - numOperands; --operandIdx)
2726 addVarArgChild(get(VirtualRegister(operandIdx)));
2727 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
6fe7ccc8
A
2728 NEXT_OPCODE(op_new_array);
2729 }
2730
93a37866
A
2731 case op_new_array_with_size: {
2732 int lengthOperand = currentInstruction[2].u.operand;
2733 ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
81345200 2734 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(VirtualRegister(lengthOperand))));
93a37866
A
2735 NEXT_OPCODE(op_new_array_with_size);
2736 }
2737
6fe7ccc8
A
2738 case op_new_array_buffer: {
2739 int startConstant = currentInstruction[2].u.operand;
2740 int numConstants = currentInstruction[3].u.operand;
93a37866
A
2741 ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2742 NewArrayBufferData data;
2743 data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
2744 data.numConstants = numConstants;
2745 data.indexingType = profile->selectIndexingType();
2746
2747 // If this statement has never executed, we'll have the wrong indexing type in the profile.
2748 for (int i = 0; i < numConstants; ++i) {
2749 data.indexingType =
2750 leastUpperBoundOfIndexingTypeAndValue(
2751 data.indexingType,
2752 m_codeBlock->constantBuffer(data.startConstant)[i]);
2753 }
2754
2755 m_graph.m_newArrayBufferData.append(data);
81345200 2756 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
6fe7ccc8
A
2757 NEXT_OPCODE(op_new_array_buffer);
2758 }
2759
2760 case op_new_regexp: {
81345200 2761 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
6fe7ccc8
A
2762 NEXT_OPCODE(op_new_regexp);
2763 }
2764
14957cd0
A
2765 // === Bitwise operations ===
2766
2767 case op_bitand: {
81345200
A
2768 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2769 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2770 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitAnd, op1, op2));
14957cd0
A
2771 NEXT_OPCODE(op_bitand);
2772 }
2773
2774 case op_bitor: {
81345200
A
2775 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2776 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2777 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitOr, op1, op2));
14957cd0
A
2778 NEXT_OPCODE(op_bitor);
2779 }
2780
2781 case op_bitxor: {
81345200
A
2782 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2783 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2784 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitXor, op1, op2));
14957cd0
A
2785 NEXT_OPCODE(op_bitxor);
2786 }
2787
2788 case op_rshift: {
81345200
A
2789 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2790 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2791 set(VirtualRegister(currentInstruction[1].u.operand),
2792 addToGraph(BitRShift, op1, op2));
14957cd0
A
2793 NEXT_OPCODE(op_rshift);
2794 }
2795
2796 case op_lshift: {
81345200
A
2797 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2798 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2799 set(VirtualRegister(currentInstruction[1].u.operand),
2800 addToGraph(BitLShift, op1, op2));
14957cd0
A
2801 NEXT_OPCODE(op_lshift);
2802 }
2803
2804 case op_urshift: {
81345200
A
2805 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2806 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2807 set(VirtualRegister(currentInstruction[1].u.operand),
2808 addToGraph(BitURShift, op1, op2));
14957cd0
A
2809 NEXT_OPCODE(op_urshift);
2810 }
81345200
A
2811
2812 case op_unsigned: {
2813 set(VirtualRegister(currentInstruction[1].u.operand),
2814 makeSafe(addToGraph(UInt32ToNumber, get(VirtualRegister(currentInstruction[2].u.operand)))));
2815 NEXT_OPCODE(op_unsigned);
2816 }
14957cd0
A
2817
2818 // === Increment/Decrement opcodes ===
2819
93a37866 2820 case op_inc: {
81345200
A
2821 int srcDst = currentInstruction[1].u.operand;
2822 VirtualRegister srcDstVirtualRegister = VirtualRegister(srcDst);
2823 Node* op = get(srcDstVirtualRegister);
ed1e77d3 2824 set(srcDstVirtualRegister, makeSafe(addToGraph(ArithAdd, op, addToGraph(JSConstant, OpInfo(m_constantOne)))));
93a37866 2825 NEXT_OPCODE(op_inc);
14957cd0
A
2826 }
2827
93a37866 2828 case op_dec: {
81345200
A
2829 int srcDst = currentInstruction[1].u.operand;
2830 VirtualRegister srcDstVirtualRegister = VirtualRegister(srcDst);
2831 Node* op = get(srcDstVirtualRegister);
ed1e77d3 2832 set(srcDstVirtualRegister, makeSafe(addToGraph(ArithSub, op, addToGraph(JSConstant, OpInfo(m_constantOne)))));
93a37866 2833 NEXT_OPCODE(op_dec);
14957cd0
A
2834 }
2835
2836 // === Arithmetic operations ===
2837
2838 case op_add: {
81345200
A
2839 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2840 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
93a37866 2841 if (op1->hasNumberResult() && op2->hasNumberResult())
81345200 2842 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithAdd, op1, op2)));
14957cd0 2843 else
81345200 2844 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ValueAdd, op1, op2)));
14957cd0
A
2845 NEXT_OPCODE(op_add);
2846 }
2847
2848 case op_sub: {
81345200
A
2849 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2850 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2851 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithSub, op1, op2)));
14957cd0
A
2852 NEXT_OPCODE(op_sub);
2853 }
2854
6fe7ccc8 2855 case op_negate: {
81345200
A
2856 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2857 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithNegate, op1)));
6fe7ccc8
A
2858 NEXT_OPCODE(op_negate);
2859 }
2860
14957cd0 2861 case op_mul: {
6fe7ccc8 2862 // Multiply requires that the inputs are not truncated, unfortunately.
81345200
A
2863 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2864 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2865 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithMul, op1, op2)));
14957cd0
A
2866 NEXT_OPCODE(op_mul);
2867 }
2868
2869 case op_mod: {
81345200
A
2870 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2871 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2872 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithMod, op1, op2)));
14957cd0
A
2873 NEXT_OPCODE(op_mod);
2874 }
2875
2876 case op_div: {
81345200
A
2877 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2878 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2879 set(VirtualRegister(currentInstruction[1].u.operand), makeDivSafe(addToGraph(ArithDiv, op1, op2)));
14957cd0
A
2880 NEXT_OPCODE(op_div);
2881 }
2882
2883 // === Misc operations ===
2884
6fe7ccc8
A
2885 case op_debug:
2886 addToGraph(Breakpoint);
2887 NEXT_OPCODE(op_debug);
81345200
A
2888
2889 case op_profile_will_call: {
2890 addToGraph(ProfileWillCall);
2891 NEXT_OPCODE(op_profile_will_call);
2892 }
2893
2894 case op_profile_did_call: {
2895 addToGraph(ProfileDidCall);
2896 NEXT_OPCODE(op_profile_did_call);
2897 }
2898
14957cd0 2899 case op_mov: {
81345200
A
2900 Node* op = get(VirtualRegister(currentInstruction[2].u.operand));
2901 set(VirtualRegister(currentInstruction[1].u.operand), op);
14957cd0
A
2902 NEXT_OPCODE(op_mov);
2903 }
ed1e77d3
A
2904
2905 case op_check_tdz: {
2906 Node* op = get(VirtualRegister(currentInstruction[1].u.operand));
2907 addToGraph(CheckNotEmpty, op);
2908 NEXT_OPCODE(op_check_tdz);
81345200 2909 }
14957cd0 2910
6fe7ccc8 2911 case op_check_has_instance:
81345200 2912 addToGraph(CheckHasInstance, get(VirtualRegister(currentInstruction[3].u.operand)));
6fe7ccc8
A
2913 NEXT_OPCODE(op_check_has_instance);
2914
2915 case op_instanceof: {
81345200
A
2916 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2917 Node* prototype = get(VirtualRegister(currentInstruction[3].u.operand));
2918 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(InstanceOf, value, prototype));
6fe7ccc8
A
2919 NEXT_OPCODE(op_instanceof);
2920 }
2921
2922 case op_is_undefined: {
81345200
A
2923 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2924 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsUndefined, value));
6fe7ccc8
A
2925 NEXT_OPCODE(op_is_undefined);
2926 }
2927
2928 case op_is_boolean: {
81345200
A
2929 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2930 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsBoolean, value));
6fe7ccc8
A
2931 NEXT_OPCODE(op_is_boolean);
2932 }
2933
2934 case op_is_number: {
81345200
A
2935 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2936 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsNumber, value));
6fe7ccc8
A
2937 NEXT_OPCODE(op_is_number);
2938 }
2939
2940 case op_is_string: {
81345200
A
2941 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2942 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsString, value));
6fe7ccc8
A
2943 NEXT_OPCODE(op_is_string);
2944 }
2945
2946 case op_is_object: {
81345200
A
2947 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2948 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsObject, value));
6fe7ccc8
A
2949 NEXT_OPCODE(op_is_object);
2950 }
2951
ed1e77d3
A
2952 case op_is_object_or_null: {
2953 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2954 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsObjectOrNull, value));
2955 NEXT_OPCODE(op_is_object_or_null);
2956 }
2957
6fe7ccc8 2958 case op_is_function: {
81345200
A
2959 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2960 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsFunction, value));
6fe7ccc8
A
2961 NEXT_OPCODE(op_is_function);
2962 }
2963
14957cd0 2964 case op_not: {
81345200
A
2965 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2966 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, value));
14957cd0
A
2967 NEXT_OPCODE(op_not);
2968 }
6fe7ccc8
A
2969
2970 case op_to_primitive: {
81345200
A
2971 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2972 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(ToPrimitive, value));
6fe7ccc8
A
2973 NEXT_OPCODE(op_to_primitive);
2974 }
2975
2976 case op_strcat: {
2977 int startOperand = currentInstruction[2].u.operand;
2978 int numOperands = currentInstruction[3].u.operand;
93a37866
A
2979#if CPU(X86)
2980 // X86 doesn't have enough registers to compile MakeRope with three arguments.
2981 // Rather than try to be clever, we just make MakeRope dumber on this processor.
2982 const unsigned maxRopeArguments = 2;
2983#else
2984 const unsigned maxRopeArguments = 3;
2985#endif
81345200 2986 auto toStringNodes = std::make_unique<Node*[]>(numOperands);
93a37866 2987 for (int i = 0; i < numOperands; i++)
81345200 2988 toStringNodes[i] = addToGraph(ToString, get(VirtualRegister(startOperand - i)));
93a37866
A
2989
2990 for (int i = 0; i < numOperands; i++)
2991 addToGraph(Phantom, toStringNodes[i]);
2992
2993 Node* operands[AdjacencyList::Size];
2994 unsigned indexInOperands = 0;
2995 for (unsigned i = 0; i < AdjacencyList::Size; ++i)
2996 operands[i] = 0;
2997 for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
2998 if (indexInOperands == maxRopeArguments) {
2999 operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
3000 for (unsigned i = 1; i < AdjacencyList::Size; ++i)
3001 operands[i] = 0;
3002 indexInOperands = 1;
3003 }
3004
3005 ASSERT(indexInOperands < AdjacencyList::Size);
3006 ASSERT(indexInOperands < maxRopeArguments);
3007 operands[indexInOperands++] = toStringNodes[operandIdx];
3008 }
81345200 3009 set(VirtualRegister(currentInstruction[1].u.operand),
93a37866 3010 addToGraph(MakeRope, operands[0], operands[1], operands[2]));
6fe7ccc8
A
3011 NEXT_OPCODE(op_strcat);
3012 }
14957cd0
A
3013
3014 case op_less: {
81345200
A
3015 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
3016 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
3017 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareLess, op1, op2));
14957cd0
A
3018 NEXT_OPCODE(op_less);
3019 }
3020
3021 case op_lesseq: {
81345200
A
3022 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
3023 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
3024 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareLessEq, op1, op2));
14957cd0
A
3025 NEXT_OPCODE(op_lesseq);
3026 }
3027
6fe7ccc8 3028 case op_greater: {
81345200
A
3029 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
3030 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
3031 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareGreater, op1, op2));
6fe7ccc8
A
3032 NEXT_OPCODE(op_greater);
3033 }
3034
3035 case op_greatereq: {
81345200
A
3036 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
3037 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
3038 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareGreaterEq, op1, op2));
6fe7ccc8
A
3039 NEXT_OPCODE(op_greatereq);
3040 }
3041
14957cd0 3042 case op_eq: {
81345200
A
3043 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
3044 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
3045 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareEq, op1, op2));
14957cd0
A
3046 NEXT_OPCODE(op_eq);
3047 }
3048
3049 case op_eq_null: {
81345200 3050 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
ed1e77d3 3051 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareEqConstant, value, addToGraph(JSConstant, OpInfo(m_constantNull))));
14957cd0
A
3052 NEXT_OPCODE(op_eq_null);
3053 }
3054
3055 case op_stricteq: {
81345200
A
3056 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
3057 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
3058 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareStrictEq, op1, op2));
14957cd0
A
3059 NEXT_OPCODE(op_stricteq);
3060 }
3061
3062 case op_neq: {
81345200
A
3063 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
3064 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
3065 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
14957cd0
A
3066 NEXT_OPCODE(op_neq);
3067 }
3068
3069 case op_neq_null: {
81345200 3070 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
ed1e77d3 3071 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, addToGraph(JSConstant, OpInfo(m_constantNull)))));
14957cd0
A
3072 NEXT_OPCODE(op_neq_null);
3073 }
3074
3075 case op_nstricteq: {
81345200
A
3076 Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
3077 Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
93a37866 3078 Node* invertedResult;
81345200
A
3079 invertedResult = addToGraph(CompareStrictEq, op1, op2);
3080 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, invertedResult));
14957cd0
A
3081 NEXT_OPCODE(op_nstricteq);
3082 }
3083
3084 // === Property access operations ===
3085
3086 case op_get_by_val: {
ed1e77d3 3087 SpeculatedType prediction = getPredictionWithoutOSRExit();
6fe7ccc8 3088
81345200 3089 Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
ed1e77d3 3090 ArrayMode arrayMode = getArrayMode(currentInstruction[4].u.arrayProfile, Array::Read);
81345200 3091 Node* property = get(VirtualRegister(currentInstruction[3].u.operand));
93a37866 3092 Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
81345200 3093 set(VirtualRegister(currentInstruction[1].u.operand), getByVal);
14957cd0
A
3094
3095 NEXT_OPCODE(op_get_by_val);
3096 }
3097
81345200 3098 case op_put_by_val_direct:
14957cd0 3099 case op_put_by_val: {
81345200 3100 Node* base = get(VirtualRegister(currentInstruction[1].u.operand));
14957cd0 3101
ed1e77d3 3102 ArrayMode arrayMode = getArrayMode(currentInstruction[4].u.arrayProfile, Array::Write);
93a37866 3103
81345200
A
3104 Node* property = get(VirtualRegister(currentInstruction[2].u.operand));
3105 Node* value = get(VirtualRegister(currentInstruction[3].u.operand));
93a37866
A
3106
3107 addVarArgChild(base);
3108 addVarArgChild(property);
3109 addVarArgChild(value);
3110 addVarArgChild(0); // Leave room for property storage.
81345200
A
3111 addVarArgChild(0); // Leave room for length.
3112 addToGraph(Node::VarArg, opcodeID == op_put_by_val_direct ? PutByValDirect : PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
14957cd0
A
3113
3114 NEXT_OPCODE(op_put_by_val);
3115 }
6fe7ccc8 3116
93a37866
A
3117 case op_get_by_id:
3118 case op_get_by_id_out_of_line:
3119 case op_get_array_length: {
3120 SpeculatedType prediction = getPrediction();
6fe7ccc8 3121
81345200 3122 Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
6fe7ccc8
A
3123 unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3124
ed1e77d3 3125 UniquedStringImpl* uid = m_graph.identifiers()[identifierNumber];
6fe7ccc8 3126 GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
81345200
A
3127 m_inlineStackTop->m_profiledBlock, m_dfgCodeBlock,
3128 m_inlineStackTop->m_stubInfos, m_dfgStubInfos,
3129 currentCodeOrigin(), uid);
6fe7ccc8 3130
93a37866
A
3131 handleGetById(
3132 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
14957cd0
A
3133
3134 NEXT_OPCODE(op_get_by_id);
3135 }
6fe7ccc8 3136 case op_put_by_id:
93a37866 3137 case op_put_by_id_out_of_line:
6fe7ccc8 3138 case op_put_by_id_transition_direct:
93a37866
A
3139 case op_put_by_id_transition_normal:
3140 case op_put_by_id_transition_direct_out_of_line:
3141 case op_put_by_id_transition_normal_out_of_line: {
81345200
A
3142 Node* value = get(VirtualRegister(currentInstruction[3].u.operand));
3143 Node* base = get(VirtualRegister(currentInstruction[1].u.operand));
6fe7ccc8 3144 unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
14957cd0
A
3145 bool direct = currentInstruction[8].u.operand;
3146
6fe7ccc8 3147 PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
81345200
A
3148 m_inlineStackTop->m_profiledBlock, m_dfgCodeBlock,
3149 m_inlineStackTop->m_stubInfos, m_dfgStubInfos,
3150 currentCodeOrigin(), m_graph.identifiers()[identifierNumber]);
6fe7ccc8 3151
81345200 3152 handlePutById(base, identifierNumber, value, putByIdStatus, direct);
14957cd0
A
3153 NEXT_OPCODE(op_put_by_id);
3154 }
3155
93a37866
A
3156 case op_init_global_const_nop: {
3157 NEXT_OPCODE(op_init_global_const_nop);
14957cd0
A
3158 }
3159
93a37866 3160 case op_init_global_const: {
81345200 3161 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
ed1e77d3 3162 JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
93a37866
A
3163 addToGraph(
3164 PutGlobalVar,
ed1e77d3
A
3165 OpInfo(globalObject->assertVariableIsInThisObject(currentInstruction[1].u.variablePointer)),
3166 weakJSConstant(globalObject), value);
93a37866 3167 NEXT_OPCODE(op_init_global_const);
14957cd0
A
3168 }
3169
ed1e77d3
A
3170 case op_profile_type: {
3171 Node* valueToProfile = get(VirtualRegister(currentInstruction[1].u.operand));
3172 addToGraph(ProfileType, OpInfo(currentInstruction[2].u.location), valueToProfile);
3173 NEXT_OPCODE(op_profile_type);
3174 }
3175
3176 case op_profile_control_flow: {
3177 BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
3178 addToGraph(ProfileControlFlow, OpInfo(basicBlockLocation));
3179 NEXT_OPCODE(op_profile_control_flow);
3180 }
3181
93a37866
A
3182 // === Block terminators. ===
3183
3184 case op_jmp: {
81345200 3185 int relativeOffset = currentInstruction[1].u.operand;
ed1e77d3 3186 addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
81345200
A
3187 if (relativeOffset <= 0)
3188 flushForTerminal();
93a37866 3189 LAST_OPCODE(op_jmp);
14957cd0
A
3190 }
3191
3192 case op_jtrue: {
3193 unsigned relativeOffset = currentInstruction[2].u.operand;
81345200
A
3194 Node* condition = get(VirtualRegister(currentInstruction[1].u.operand));
3195 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jtrue))), condition);
14957cd0
A
3196 LAST_OPCODE(op_jtrue);
3197 }
3198
3199 case op_jfalse: {
3200 unsigned relativeOffset = currentInstruction[2].u.operand;
81345200
A
3201 Node* condition = get(VirtualRegister(currentInstruction[1].u.operand));
3202 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jfalse), m_currentIndex + relativeOffset)), condition);
14957cd0
A
3203 LAST_OPCODE(op_jfalse);
3204 }
3205
14957cd0
A
3206 case op_jeq_null: {
3207 unsigned relativeOffset = currentInstruction[2].u.operand;
81345200 3208 Node* value = get(VirtualRegister(currentInstruction[1].u.operand));
ed1e77d3 3209 Node* condition = addToGraph(CompareEqConstant, value, addToGraph(JSConstant, OpInfo(m_constantNull)));
81345200 3210 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jeq_null))), condition);
14957cd0
A
3211 LAST_OPCODE(op_jeq_null);
3212 }
3213
3214 case op_jneq_null: {
3215 unsigned relativeOffset = currentInstruction[2].u.operand;
81345200 3216 Node* value = get(VirtualRegister(currentInstruction[1].u.operand));
ed1e77d3 3217 Node* condition = addToGraph(CompareEqConstant, value, addToGraph(JSConstant, OpInfo(m_constantNull)));
81345200 3218 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jneq_null), m_currentIndex + relativeOffset)), condition);
14957cd0
A
3219 LAST_OPCODE(op_jneq_null);
3220 }
3221
6fe7ccc8
A
3222 case op_jless: {
3223 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
3224 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
3225 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 3226 Node* condition = addToGraph(CompareLess, op1, op2);
81345200 3227 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jless))), condition);
6fe7ccc8
A
3228 LAST_OPCODE(op_jless);
3229 }
3230
3231 case op_jlesseq: {
3232 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
3233 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
3234 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 3235 Node* condition = addToGraph(CompareLessEq, op1, op2);
81345200 3236 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jlesseq))), condition);
6fe7ccc8
A
3237 LAST_OPCODE(op_jlesseq);
3238 }
3239
3240 case op_jgreater: {
3241 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
3242 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
3243 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 3244 Node* condition = addToGraph(CompareGreater, op1, op2);
81345200 3245 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jgreater))), condition);
6fe7ccc8
A
3246 LAST_OPCODE(op_jgreater);
3247 }
3248
3249 case op_jgreatereq: {
3250 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
3251 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
3252 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 3253 Node* condition = addToGraph(CompareGreaterEq, op1, op2);
81345200 3254 addToGraph(Branch, OpInfo(branchData(m_currentIndex + relativeOffset, m_currentIndex + OPCODE_LENGTH(op_jgreatereq))), condition);
6fe7ccc8
A
3255 LAST_OPCODE(op_jgreatereq);
3256 }
3257
14957cd0
A
3258 case op_jnless: {
3259 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
3260 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
3261 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 3262 Node* condition = addToGraph(CompareLess, op1, op2);
81345200 3263 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jnless), m_currentIndex + relativeOffset)), condition);
14957cd0
A
3264 LAST_OPCODE(op_jnless);
3265 }
3266
3267 case op_jnlesseq: {
3268 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
3269 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
3270 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 3271 Node* condition = addToGraph(CompareLessEq, op1, op2);
81345200 3272 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jnlesseq), m_currentIndex + relativeOffset)), condition);
14957cd0
A
3273 LAST_OPCODE(op_jnlesseq);
3274 }
3275
6fe7ccc8 3276 case op_jngreater: {
14957cd0 3277 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
3278 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
3279 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 3280 Node* condition = addToGraph(CompareGreater, op1, op2);
81345200 3281 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jngreater), m_currentIndex + relativeOffset)), condition);
6fe7ccc8 3282 LAST_OPCODE(op_jngreater);
14957cd0
A
3283 }
3284
6fe7ccc8 3285 case op_jngreatereq: {
14957cd0 3286 unsigned relativeOffset = currentInstruction[3].u.operand;
81345200
A
3287 Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
3288 Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
93a37866 3289 Node* condition = addToGraph(CompareGreaterEq, op1, op2);
81345200 3290 addToGraph(Branch, OpInfo(branchData(m_currentIndex + OPCODE_LENGTH(op_jngreatereq), m_currentIndex + relativeOffset)), condition);
6fe7ccc8 3291 LAST_OPCODE(op_jngreatereq);
14957cd0 3292 }
81345200
A
3293
3294 case op_switch_imm: {
3295 SwitchData& data = *m_graph.m_switchData.add();
3296 data.kind = SwitchImm;
3297 data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
3298 data.fallThrough.setBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
3299 SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
3300 for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
3301 if (!table.branchOffsets[i])
3302 continue;
3303 unsigned target = m_currentIndex + table.branchOffsets[i];
3304 if (target == data.fallThrough.bytecodeIndex())
3305 continue;
ed1e77d3 3306 data.cases.append(SwitchCase::withBytecodeIndex(m_graph.freeze(jsNumber(static_cast<int32_t>(table.min + i))), target));
81345200 3307 }
81345200 3308 addToGraph(Switch, OpInfo(&data), get(VirtualRegister(currentInstruction[3].u.operand)));
ed1e77d3 3309 flushIfTerminal(data);
81345200
A
3310 LAST_OPCODE(op_switch_imm);
3311 }
3312
3313 case op_switch_char: {
3314 SwitchData& data = *m_graph.m_switchData.add();
3315 data.kind = SwitchChar;
3316 data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
3317 data.fallThrough.setBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
3318 SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
3319 for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
3320 if (!table.branchOffsets[i])
3321 continue;
3322 unsigned target = m_currentIndex + table.branchOffsets[i];
3323 if (target == data.fallThrough.bytecodeIndex())
3324 continue;
3325 data.cases.append(
3326 SwitchCase::withBytecodeIndex(LazyJSValue::singleCharacterString(table.min + i), target));
3327 }
81345200 3328 addToGraph(Switch, OpInfo(&data), get(VirtualRegister(currentInstruction[3].u.operand)));
ed1e77d3 3329 flushIfTerminal(data);
81345200
A
3330 LAST_OPCODE(op_switch_char);
3331 }
3332
3333 case op_switch_string: {
3334 SwitchData& data = *m_graph.m_switchData.add();
3335 data.kind = SwitchString;
3336 data.switchTableIndex = currentInstruction[1].u.operand;
3337 data.fallThrough.setBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
3338 StringJumpTable& table = m_codeBlock->stringSwitchJumpTable(data.switchTableIndex);
3339 StringJumpTable::StringOffsetTable::iterator iter;
3340 StringJumpTable::StringOffsetTable::iterator end = table.offsetTable.end();
3341 for (iter = table.offsetTable.begin(); iter != end; ++iter) {
3342 unsigned target = m_currentIndex + iter->value.branchOffset;
3343 if (target == data.fallThrough.bytecodeIndex())
3344 continue;
3345 data.cases.append(
3346 SwitchCase::withBytecodeIndex(LazyJSValue::knownStringImpl(iter->key.get()), target));
3347 }
81345200 3348 addToGraph(Switch, OpInfo(&data), get(VirtualRegister(currentInstruction[3].u.operand)));
ed1e77d3 3349 flushIfTerminal(data);
81345200
A
3350 LAST_OPCODE(op_switch_string);
3351 }
14957cd0 3352
6fe7ccc8 3353 case op_ret:
93a37866 3354 if (inlineCallFrame()) {
ed1e77d3
A
3355 flushForReturn();
3356 if (m_inlineStackTop->m_returnValue.isValid())
3357 setDirect(m_inlineStackTop->m_returnValue, get(VirtualRegister(currentInstruction[1].u.operand)), ImmediateSetWithFlush);
6fe7ccc8
A
3358 m_inlineStackTop->m_didReturn = true;
3359 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
3360 // If we're returning from the first block, then we're done parsing.
81345200 3361 ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.lastBlock());
6fe7ccc8
A
3362 shouldContinueParsing = false;
3363 LAST_OPCODE(op_ret);
3364 } else {
3365 // If inlining created blocks, and we're doing a return, then we need some
3366 // special linking.
81345200 3367 ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
6fe7ccc8
A
3368 m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
3369 }
3370 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
3371 ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
81345200 3372 addToGraph(Jump, OpInfo(0));
6fe7ccc8
A
3373 m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
3374 m_inlineStackTop->m_didEarlyReturn = true;
3375 }
3376 LAST_OPCODE(op_ret);
3377 }
81345200 3378 addToGraph(Return, get(VirtualRegister(currentInstruction[1].u.operand)));
ed1e77d3 3379 flushForReturn();
14957cd0 3380 LAST_OPCODE(op_ret);
6fe7ccc8
A
3381
3382 case op_end:
93a37866 3383 ASSERT(!inlineCallFrame());
81345200 3384 addToGraph(Return, get(VirtualRegister(currentInstruction[1].u.operand)));
ed1e77d3 3385 flushForReturn();
6fe7ccc8
A
3386 LAST_OPCODE(op_end);
3387
3388 case op_throw:
81345200
A
3389 addToGraph(Throw, get(VirtualRegister(currentInstruction[1].u.operand)));
3390 flushForTerminal();
12899fa2 3391 addToGraph(Unreachable);
6fe7ccc8
A
3392 LAST_OPCODE(op_throw);
3393
93a37866 3394 case op_throw_static_error:
6fe7ccc8 3395 addToGraph(ThrowReferenceError);
81345200 3396 flushForTerminal();
12899fa2 3397 addToGraph(Unreachable);
93a37866 3398 LAST_OPCODE(op_throw_static_error);
6fe7ccc8
A
3399
3400 case op_call:
81345200 3401 handleCall(currentInstruction, Call, CodeForCall);
ed1e77d3
A
3402 // Verify that handleCall(), which could have inlined the callee, didn't trash m_currentInstruction
3403 ASSERT(m_currentInstruction == currentInstruction);
6fe7ccc8
A
3404 NEXT_OPCODE(op_call);
3405
3406 case op_construct:
81345200 3407 handleCall(currentInstruction, Construct, CodeForConstruct);
6fe7ccc8
A
3408 NEXT_OPCODE(op_construct);
3409
93a37866 3410 case op_call_varargs: {
ed1e77d3 3411 handleVarargsCall(currentInstruction, CallVarargs, CodeForCall);
93a37866
A
3412 NEXT_OPCODE(op_call_varargs);
3413 }
3414
ed1e77d3
A
3415 case op_construct_varargs: {
3416 handleVarargsCall(currentInstruction, ConstructVarargs, CodeForConstruct);
3417 NEXT_OPCODE(op_construct_varargs);
3418 }
3419
93a37866
A
3420 case op_jneq_ptr:
3421 // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
3422 // support simmer for a while before making it more general, since it's
3423 // already gnarly enough as it is.
3424 ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
3425 addToGraph(
ed1e77d3
A
3426 CheckCell,
3427 OpInfo(m_graph.freeze(static_cast<JSCell*>(actualPointerFor(
3428 m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)))),
81345200 3429 get(VirtualRegister(currentInstruction[1].u.operand)));
93a37866
A
3430 addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
3431 LAST_OPCODE(op_jneq_ptr);
3432
81345200 3433 case op_resolve_scope: {
93a37866 3434 int dst = currentInstruction[1].u.operand;
ed1e77d3
A
3435 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
3436 unsigned depth = currentInstruction[5].u.operand;
81345200
A
3437
3438 // get_from_scope and put_to_scope depend on this watchpoint forcing OSR exit, so they don't add their own watchpoints.
3439 if (needsVarInjectionChecks(resolveType))
3440 addToGraph(VarInjectionWatchpoint);
3441
3442 switch (resolveType) {
3443 case GlobalProperty:
3444 case GlobalVar:
3445 case GlobalPropertyWithVarInjectionChecks:
3446 case GlobalVarWithVarInjectionChecks:
ed1e77d3
A
3447 set(VirtualRegister(dst), weakJSConstant(m_inlineStackTop->m_codeBlock->globalObject()));
3448 if (resolveType == GlobalPropertyWithVarInjectionChecks || resolveType == GlobalVarWithVarInjectionChecks)
3449 addToGraph(Phantom, getDirect(m_inlineStackTop->remapOperand(VirtualRegister(currentInstruction[2].u.operand))));
81345200 3450 break;
ed1e77d3 3451 case LocalClosureVar:
81345200
A
3452 case ClosureVar:
3453 case ClosureVarWithVarInjectionChecks: {
ed1e77d3
A
3454 Node* localBase = get(VirtualRegister(currentInstruction[2].u.operand));
3455 addToGraph(Phantom, localBase); // OSR exit cannot handle resolve_scope on a DCE'd scope.
3456
3457 // We have various forms of constant folding here. This is necessary to avoid
3458 // spurious recompiles in dead-but-foldable code.
3459 if (SymbolTable* symbolTable = currentInstruction[6].u.symbolTable.get()) {
3460 InferredValue* singleton = symbolTable->singletonScope();
3461 if (JSValue value = singleton->inferredValue()) {
3462 m_graph.watchpoints().addLazily(singleton);
3463 set(VirtualRegister(dst), weakJSConstant(value));
3464 break;
3465 }
3466 }
3467 if (JSScope* scope = localBase->dynamicCastConstant<JSScope*>()) {
3468 for (unsigned n = depth; n--;)
3469 scope = scope->next();
3470 set(VirtualRegister(dst), weakJSConstant(scope));
81345200
A
3471 break;
3472 }
ed1e77d3
A
3473 for (unsigned n = depth; n--;)
3474 localBase = addToGraph(SkipScope, localBase);
3475 set(VirtualRegister(dst), localBase);
81345200 3476 break;
93a37866 3477 }
81345200
A
3478 case Dynamic:
3479 RELEASE_ASSERT_NOT_REACHED();
3480 break;
3481 }
3482 NEXT_OPCODE(op_resolve_scope);
6fe7ccc8
A
3483 }
3484
81345200
A
3485 case op_get_from_scope: {
3486 int dst = currentInstruction[1].u.operand;
3487 int scope = currentInstruction[2].u.operand;
3488 unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
ed1e77d3 3489 UniquedStringImpl* uid = m_graph.identifiers()[identifierNumber];
81345200
A
3490 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3491
3492 Structure* structure = 0;
3493 WatchpointSet* watchpoints = 0;
3494 uintptr_t operand;
3495 {
3496 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3497 if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks)
3498 watchpoints = currentInstruction[5].u.watchpointSet;
3499 else
3500 structure = currentInstruction[5].u.structure.get();
3501 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
93a37866
A
3502 }
3503
81345200 3504 UNUSED_PARAM(watchpoints); // We will use this in the future. For now we set it as a way of documenting the fact that that's what index 5 is in GlobalVar mode.
93a37866 3505
81345200
A
3506 JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3507
3508 switch (resolveType) {
3509 case GlobalProperty:
3510 case GlobalPropertyWithVarInjectionChecks: {
ed1e77d3
A
3511 SpeculatedType prediction = getPrediction();
3512 GetByIdStatus status = GetByIdStatus::computeFor(structure, uid);
3513 if (status.state() != GetByIdStatus::Simple
3514 || status.numVariants() != 1
3515 || status[0].structureSet().size() != 1) {
81345200 3516 set(VirtualRegister(dst), addToGraph(GetByIdFlush, OpInfo(identifierNumber), OpInfo(prediction), get(VirtualRegister(scope))));
93a37866
A
3517 break;
3518 }
ed1e77d3 3519 Node* base = cellConstantWithStructureCheck(globalObject, status[0].structureSet().onlyStructure());
81345200 3520 addToGraph(Phantom, get(VirtualRegister(scope)));
ed1e77d3 3521 set(VirtualRegister(dst), handleGetByOffset(prediction, base, status[0].structureSet(), identifierNumber, operand));
93a37866 3522 break;
81345200
A
3523 }
3524 case GlobalVar:
3525 case GlobalVarWithVarInjectionChecks: {
3526 addToGraph(Phantom, get(VirtualRegister(scope)));
ed1e77d3
A
3527 WatchpointSet* watchpointSet;
3528 ScopeOffset offset;
3529 {
3530 ConcurrentJITLocker locker(globalObject->symbolTable()->m_lock);
3531 SymbolTableEntry entry = globalObject->symbolTable()->get(locker, uid);
3532 watchpointSet = entry.watchpointSet();
3533 offset = entry.scopeOffset();
3534 }
3535 if (watchpointSet && watchpointSet->state() == IsWatched) {
3536 // This has a fun concurrency story. There is the possibility of a race in two
3537 // directions:
3538 //
3539 // We see that the set IsWatched, but in the meantime it gets invalidated: this is
3540 // fine because if we saw that it IsWatched then we add a watchpoint. If it gets
3541 // invalidated, then this compilation is invalidated. Note that in the meantime we
3542 // may load an absurd value from the global object. It's fine to load an absurd
3543 // value if the compilation is invalidated anyway.
3544 //
3545 // We see that the set IsWatched, but the value isn't yet initialized: this isn't
3546 // possible because of the ordering of operations.
3547 //
3548 // Here's how we order operations:
3549 //
3550 // Main thread stores to the global object: always store a value first, and only
3551 // after that do we touch the watchpoint set. There is a fence in the touch, that
3552 // ensures that the store to the global object always happens before the touch on the
3553 // set.
3554 //
3555 // Compilation thread: always first load the state of the watchpoint set, and then
3556 // load the value. The WatchpointSet::state() method does fences for us to ensure
3557 // that the load of the state happens before our load of the value.
3558 //
3559 // Finalizing compilation: this happens on the main thread and synchronously checks
3560 // validity of all watchpoint sets.
3561 //
3562 // We will only perform optimizations if the load of the state yields IsWatched. That
3563 // means that at least one store would have happened to initialize the original value
3564 // of the variable (that is, the value we'd like to constant fold to). There may be
3565 // other stores that happen after that, but those stores will invalidate the
3566 // watchpoint set and also the compilation.
3567
3568 // Note that we need to use the operand, which is a direct pointer at the global,
3569 // rather than looking up the global by doing variableAt(offset). That's because the
3570 // internal data structures of JSSegmentedVariableObject are not thread-safe even
3571 // though accessing the global itself is. The segmentation involves a vector spine
3572 // that resizes with malloc/free, so if new globals unrelated to the one we are
3573 // reading are added, we might access freed memory if we do variableAt().
3574 WriteBarrier<Unknown>* pointer = bitwise_cast<WriteBarrier<Unknown>*>(operand);
3575
3576 ASSERT(globalObject->findVariableIndex(pointer) == offset);
3577
3578 JSValue value = pointer->get();
3579 if (value) {
3580 m_graph.watchpoints().addLazily(watchpointSet);
3581 set(VirtualRegister(dst), weakJSConstant(value));
3582 break;
3583 }
81345200
A
3584 }
3585
ed1e77d3
A
3586 SpeculatedType prediction = getPrediction();
3587 set(VirtualRegister(dst), addToGraph(GetGlobalVar, OpInfo(operand), OpInfo(prediction)));
93a37866
A
3588 break;
3589 }
ed1e77d3 3590 case LocalClosureVar:
81345200
A
3591 case ClosureVar:
3592 case ClosureVarWithVarInjectionChecks: {
3593 Node* scopeNode = get(VirtualRegister(scope));
ed1e77d3
A
3594
3595 // Ideally we wouldn't have to do this Phantom. But:
3596 //
3597 // For the constant case: we must do it because otherwise we would have no way of knowing
3598 // that the scope is live at OSR here.
3599 //
3600 // For the non-constant case: GetClosureVar could be DCE'd, but baseline's implementation
3601 // won't be able to handle an Undefined scope.
3602 addToGraph(Phantom, scopeNode);
3603
3604 // Constant folding in the bytecode parser is important for performance. This may not
3605 // have executed yet. If it hasn't, then we won't have a prediction. Lacking a
3606 // prediction, we'd otherwise think that it has to exit. Then when it did execute, we
3607 // would recompile. But if we can fold it here, we avoid the exit.
3608 if (JSValue value = m_graph.tryGetConstantClosureVar(scopeNode, ScopeOffset(operand))) {
3609 set(VirtualRegister(dst), weakJSConstant(value));
3610 break;
93a37866 3611 }
ed1e77d3 3612 SpeculatedType prediction = getPrediction();
81345200 3613 set(VirtualRegister(dst),
ed1e77d3 3614 addToGraph(GetClosureVar, OpInfo(operand), OpInfo(prediction), scopeNode));
93a37866
A
3615 break;
3616 }
81345200
A
3617 case Dynamic:
3618 RELEASE_ASSERT_NOT_REACHED();
3619 break;
93a37866 3620 }
81345200 3621 NEXT_OPCODE(op_get_from_scope);
93a37866
A
3622 }
3623
81345200
A
3624 case op_put_to_scope: {
3625 unsigned scope = currentInstruction[1].u.operand;
ed1e77d3
A
3626 unsigned identifierNumber = currentInstruction[2].u.operand;
3627 if (identifierNumber != UINT_MAX)
3628 identifierNumber = m_inlineStackTop->m_identifierRemap[identifierNumber];
81345200
A
3629 unsigned value = currentInstruction[3].u.operand;
3630 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
ed1e77d3
A
3631 UniquedStringImpl* uid;
3632 if (identifierNumber != UINT_MAX)
3633 uid = m_graph.identifiers()[identifierNumber];
3634 else
3635 uid = nullptr;
3636
3637 Structure* structure = nullptr;
3638 WatchpointSet* watchpoints = nullptr;
81345200
A
3639 uintptr_t operand;
3640 {
3641 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
ed1e77d3 3642 if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks || resolveType == LocalClosureVar)
81345200
A
3643 watchpoints = currentInstruction[5].u.watchpointSet;
3644 else
3645 structure = currentInstruction[5].u.structure.get();
3646 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
93a37866 3647 }
6fe7ccc8 3648
81345200 3649 JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
6fe7ccc8 3650
81345200
A
3651 switch (resolveType) {
3652 case GlobalProperty:
3653 case GlobalPropertyWithVarInjectionChecks: {
ed1e77d3
A
3654 PutByIdStatus status;
3655 if (uid)
3656 status = PutByIdStatus::computeFor(globalObject, structure, uid, false);
3657 else
3658 status = PutByIdStatus(PutByIdStatus::TakesSlowPath);
3659 if (status.numVariants() != 1
3660 || status[0].kind() != PutByIdVariant::Replace
3661 || status[0].structure().size() != 1) {
81345200
A
3662 addToGraph(PutById, OpInfo(identifierNumber), get(VirtualRegister(scope)), get(VirtualRegister(value)));
3663 break;
3664 }
ed1e77d3
A
3665 ASSERT(status[0].structure().onlyStructure() == structure);
3666 Node* base = cellConstantWithStructureCheck(globalObject, structure);
81345200
A
3667 addToGraph(Phantom, get(VirtualRegister(scope)));
3668 handlePutByOffset(base, identifierNumber, static_cast<PropertyOffset>(operand), get(VirtualRegister(value)));
3669 // Keep scope alive until after put.
3670 addToGraph(Phantom, get(VirtualRegister(scope)));
3671 break;
93a37866 3672 }
81345200
A
3673 case GlobalVar:
3674 case GlobalVarWithVarInjectionChecks: {
ed1e77d3
A
3675 if (watchpoints) {
3676 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3677 ASSERT_UNUSED(entry, watchpoints == entry.watchpointSet());
3678 }
81345200 3679 Node* valueNode = get(VirtualRegister(value));
ed1e77d3
A
3680 addToGraph(PutGlobalVar, OpInfo(operand), weakJSConstant(globalObject), valueNode);
3681 if (watchpoints && watchpoints->state() != IsInvalidated) {
3682 // Must happen after the store. See comment for GetGlobalVar.
3683 addToGraph(NotifyWrite, OpInfo(watchpoints));
3684 }
81345200
A
3685 // Keep scope alive until after put.
3686 addToGraph(Phantom, get(VirtualRegister(scope)));
3687 break;
93a37866 3688 }
ed1e77d3 3689 case LocalClosureVar:
81345200
A
3690 case ClosureVar:
3691 case ClosureVarWithVarInjectionChecks: {
3692 Node* scopeNode = get(VirtualRegister(scope));
ed1e77d3
A
3693 Node* valueNode = get(VirtualRegister(value));
3694
3695 addToGraph(PutClosureVar, OpInfo(operand), scopeNode, valueNode);
3696
3697 if (watchpoints && watchpoints->state() != IsInvalidated) {
3698 // Must happen after the store. See comment for GetGlobalVar.
3699 addToGraph(NotifyWrite, OpInfo(watchpoints));
3700 }
81345200
A
3701 break;
3702 }
3703 case Dynamic:
3704 RELEASE_ASSERT_NOT_REACHED();
3705 break;
3706 }
3707 NEXT_OPCODE(op_put_to_scope);
93a37866 3708 }
81345200 3709
6fe7ccc8
A
3710 case op_loop_hint: {
3711 // Baseline->DFG OSR jumps between loop hints. The DFG assumes that Baseline->DFG
3712 // OSR can only happen at basic block boundaries. Assert that these two statements
3713 // are compatible.
93a37866 3714 RELEASE_ASSERT(m_currentIndex == blockBegin);
6fe7ccc8
A
3715
3716 // We never do OSR into an inlined code block. That could not happen, since OSR
3717 // looks up the code block that is the replacement for the baseline JIT code
3718 // block. Hence, machine code block = true code block = not inline code block.
3719 if (!m_inlineStackTop->m_caller)
3720 m_currentBlock->isOSRTarget = true;
93a37866 3721
81345200
A
3722 addToGraph(LoopHint);
3723
3724 if (m_vm->watchdog && m_vm->watchdog->isEnabled())
93a37866 3725 addToGraph(CheckWatchdogTimer);
6fe7ccc8
A
3726
3727 NEXT_OPCODE(op_loop_hint);
3728 }
3729
ed1e77d3
A
3730 case op_create_lexical_environment: {
3731 FrozenValue* symbolTable = m_graph.freezeStrong(m_graph.symbolTableFor(currentNodeOrigin().semantic));
3732 Node* lexicalEnvironment = addToGraph(CreateActivation, OpInfo(symbolTable), get(VirtualRegister(currentInstruction[2].u.operand)));
3733 set(VirtualRegister(currentInstruction[1].u.operand), lexicalEnvironment);
3734 set(VirtualRegister(currentInstruction[2].u.operand), lexicalEnvironment);
3735 NEXT_OPCODE(op_create_lexical_environment);
6fe7ccc8
A
3736 }
3737
ed1e77d3
A
3738 case op_get_scope: {
3739 // Help the later stages a bit by doing some small constant folding here. Note that this
3740 // only helps for the first basic block. It's extremely important not to constant fold
3741 // loads from the scope register later, as that would prevent the DFG from tracking the
3742 // bytecode-level liveness of the scope register.
3743 Node* callee = get(VirtualRegister(JSStack::Callee));
3744 Node* result;
3745 if (JSFunction* function = callee->dynamicCastConstant<JSFunction*>())
3746 result = weakJSConstant(function->scope());
3747 else
3748 result = addToGraph(GetScope, callee);
3749 set(VirtualRegister(currentInstruction[1].u.operand), result);
3750 NEXT_OPCODE(op_get_scope);
6fe7ccc8
A
3751 }
3752
ed1e77d3
A
3753 case op_create_direct_arguments: {
3754 noticeArgumentsUse();
3755 Node* createArguments = addToGraph(CreateDirectArguments);
81345200 3756 set(VirtualRegister(currentInstruction[1].u.operand), createArguments);
ed1e77d3 3757 NEXT_OPCODE(op_create_direct_arguments);
93a37866
A
3758 }
3759
ed1e77d3
A
3760 case op_create_scoped_arguments: {
3761 noticeArgumentsUse();
3762 Node* createArguments = addToGraph(CreateScopedArguments, get(VirtualRegister(currentInstruction[2].u.operand)));
3763 set(VirtualRegister(currentInstruction[1].u.operand), createArguments);
3764 NEXT_OPCODE(op_create_scoped_arguments);
6fe7ccc8 3765 }
93a37866 3766
ed1e77d3
A
3767 case op_create_out_of_band_arguments: {
3768 noticeArgumentsUse();
3769 Node* createArguments = addToGraph(CreateClonedArguments);
3770 set(VirtualRegister(currentInstruction[1].u.operand), createArguments);
3771 NEXT_OPCODE(op_create_out_of_band_arguments);
93a37866
A
3772 }
3773
ed1e77d3 3774 case op_get_from_arguments: {
81345200 3775 set(VirtualRegister(currentInstruction[1].u.operand),
93a37866 3776 addToGraph(
ed1e77d3
A
3777 GetFromArguments,
3778 OpInfo(currentInstruction[3].u.operand),
3779 OpInfo(getPrediction()),
3780 get(VirtualRegister(currentInstruction[2].u.operand))));
3781 NEXT_OPCODE(op_get_from_arguments);
93a37866 3782 }
6fe7ccc8 3783
ed1e77d3
A
3784 case op_put_to_arguments: {
3785 addToGraph(
3786 PutToArguments,
3787 OpInfo(currentInstruction[2].u.operand),
3788 get(VirtualRegister(currentInstruction[1].u.operand)),
3789 get(VirtualRegister(currentInstruction[3].u.operand)));
3790 NEXT_OPCODE(op_put_to_arguments);
6fe7ccc8
A
3791 }
3792
ed1e77d3
A
3793 case op_new_func: {
3794 FunctionExecutable* decl = m_inlineStackTop->m_profiledBlock->functionDecl(currentInstruction[3].u.operand);
3795 FrozenValue* frozen = m_graph.freezeStrong(decl);
3796 set(VirtualRegister(currentInstruction[1].u.operand),
3797 addToGraph(NewFunction, OpInfo(frozen), get(VirtualRegister(currentInstruction[2].u.operand))));
3798 NEXT_OPCODE(op_new_func);
81345200 3799 }
ed1e77d3 3800
6fe7ccc8 3801 case op_new_func_exp: {
ed1e77d3
A
3802 FunctionExecutable* expr = m_inlineStackTop->m_profiledBlock->functionExpr(currentInstruction[3].u.operand);
3803 FrozenValue* frozen = m_graph.freezeStrong(expr);
81345200 3804 set(VirtualRegister(currentInstruction[1].u.operand),
ed1e77d3 3805 addToGraph(NewFunction, OpInfo(frozen), get(VirtualRegister(currentInstruction[2].u.operand))));
6fe7ccc8 3806 NEXT_OPCODE(op_new_func_exp);
14957cd0
A
3807 }
3808
93a37866 3809 case op_typeof: {
81345200
A
3810 set(VirtualRegister(currentInstruction[1].u.operand),
3811 addToGraph(TypeOf, get(VirtualRegister(currentInstruction[2].u.operand))));
93a37866
A
3812 NEXT_OPCODE(op_typeof);
3813 }
3814
3815 case op_to_number: {
81345200
A
3816 Node* node = get(VirtualRegister(currentInstruction[2].u.operand));
3817 addToGraph(Phantom, Edge(node, NumberUse));
3818 set(VirtualRegister(currentInstruction[1].u.operand), node);
93a37866
A
3819 NEXT_OPCODE(op_to_number);
3820 }
ed1e77d3
A
3821
3822 case op_to_string: {
3823 Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
3824 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(ToString, value));
3825 NEXT_OPCODE(op_to_string);
3826 }
3827
81345200
A
3828 case op_in: {
3829 set(VirtualRegister(currentInstruction[1].u.operand),
3830 addToGraph(In, get(VirtualRegister(currentInstruction[2].u.operand)), get(VirtualRegister(currentInstruction[3].u.operand))));
3831 NEXT_OPCODE(op_in);
3832 }
93a37866 3833
ed1e77d3
A
3834 case op_get_enumerable_length: {
3835 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(GetEnumerableLength,
3836 get(VirtualRegister(currentInstruction[2].u.operand))));
3837 NEXT_OPCODE(op_get_enumerable_length);
3838 }
3839
3840 case op_has_generic_property: {
3841 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(HasGenericProperty,
3842 get(VirtualRegister(currentInstruction[2].u.operand)),
3843 get(VirtualRegister(currentInstruction[3].u.operand))));
3844 NEXT_OPCODE(op_has_generic_property);
3845 }
3846
3847 case op_has_structure_property: {
3848 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(HasStructureProperty,
3849 get(VirtualRegister(currentInstruction[2].u.operand)),
3850 get(VirtualRegister(currentInstruction[3].u.operand)),
3851 get(VirtualRegister(currentInstruction[4].u.operand))));
3852 NEXT_OPCODE(op_has_structure_property);
3853 }
3854
3855 case op_has_indexed_property: {
3856 Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
3857 ArrayMode arrayMode = getArrayMode(currentInstruction[4].u.arrayProfile, Array::Read);
3858 Node* property = get(VirtualRegister(currentInstruction[3].u.operand));
3859 Node* hasIterableProperty = addToGraph(HasIndexedProperty, OpInfo(arrayMode.asWord()), base, property);
3860 set(VirtualRegister(currentInstruction[1].u.operand), hasIterableProperty);
3861 NEXT_OPCODE(op_has_indexed_property);
3862 }
3863
3864 case op_get_direct_pname: {
3865 SpeculatedType prediction = getPredictionWithoutOSRExit();
3866
3867 Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
3868 Node* property = get(VirtualRegister(currentInstruction[3].u.operand));
3869 Node* index = get(VirtualRegister(currentInstruction[4].u.operand));
3870 Node* enumerator = get(VirtualRegister(currentInstruction[5].u.operand));
3871
3872 addVarArgChild(base);
3873 addVarArgChild(property);
3874 addVarArgChild(index);
3875 addVarArgChild(enumerator);
3876 set(VirtualRegister(currentInstruction[1].u.operand),
3877 addToGraph(Node::VarArg, GetDirectPname, OpInfo(0), OpInfo(prediction)));
3878
3879 NEXT_OPCODE(op_get_direct_pname);
3880 }
3881
3882 case op_get_property_enumerator: {
3883 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(GetPropertyEnumerator,
3884 get(VirtualRegister(currentInstruction[2].u.operand))));
3885 NEXT_OPCODE(op_get_property_enumerator);
3886 }
3887
3888 case op_enumerator_structure_pname: {
3889 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(GetEnumeratorStructurePname,
3890 get(VirtualRegister(currentInstruction[2].u.operand)),
3891 get(VirtualRegister(currentInstruction[3].u.operand))));
3892 NEXT_OPCODE(op_enumerator_structure_pname);
3893 }
3894
3895 case op_enumerator_generic_pname: {
3896 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(GetEnumeratorGenericPname,
3897 get(VirtualRegister(currentInstruction[2].u.operand)),
3898 get(VirtualRegister(currentInstruction[3].u.operand))));
3899 NEXT_OPCODE(op_enumerator_generic_pname);
3900 }
3901
3902 case op_to_index_string: {
3903 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(ToIndexString,
3904 get(VirtualRegister(currentInstruction[2].u.operand))));
3905 NEXT_OPCODE(op_to_index_string);
3906 }
3907
14957cd0 3908 default:
6fe7ccc8
A
3909 // Parse failed! This should not happen because the capabilities checker
3910 // should have caught it.
93a37866 3911 RELEASE_ASSERT_NOT_REACHED();
14957cd0
A
3912 return false;
3913 }
6fe7ccc8
A
3914 }
3915}
3916
81345200 3917void ByteCodeParser::linkBlock(BasicBlock* block, Vector<BasicBlock*>& possibleTargets)
6fe7ccc8
A
3918{
3919 ASSERT(!block->isLinked);
3920 ASSERT(!block->isEmpty());
ed1e77d3 3921 Node* node = block->terminal();
93a37866 3922 ASSERT(node->isTerminal());
6fe7ccc8 3923
93a37866 3924 switch (node->op()) {
6fe7ccc8 3925 case Jump:
81345200 3926 node->targetBlock() = blockForBytecodeOffset(possibleTargets, node->targetBytecodeOffsetDuringParsing());
6fe7ccc8
A
3927 break;
3928
81345200
A
3929 case Branch: {
3930 BranchData* data = node->branchData();
3931 data->taken.block = blockForBytecodeOffset(possibleTargets, data->takenBytecodeIndex());
3932 data->notTaken.block = blockForBytecodeOffset(possibleTargets, data->notTakenBytecodeIndex());
6fe7ccc8 3933 break;
81345200
A
3934 }
3935
3936 case Switch: {
3937 SwitchData* data = node->switchData();
3938 for (unsigned i = node->switchData()->cases.size(); i--;)
3939 data->cases[i].target.block = blockForBytecodeOffset(possibleTargets, data->cases[i].target.bytecodeIndex());
3940 data->fallThrough.block = blockForBytecodeOffset(possibleTargets, data->fallThrough.bytecodeIndex());
3941 break;
3942 }
6fe7ccc8
A
3943
3944 default:
6fe7ccc8
A
3945 break;
3946 }
3947
ed1e77d3
A
3948 if (verbose)
3949 dataLog("Marking ", RawPointer(block), " as linked (actually did linking)\n");
3950 block->didLink();
6fe7ccc8 3951}
14957cd0 3952
81345200 3953void ByteCodeParser::linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets)
6fe7ccc8
A
3954{
3955 for (size_t i = 0; i < unlinkedBlocks.size(); ++i) {
ed1e77d3
A
3956 if (verbose)
3957 dataLog("Attempting to link ", RawPointer(unlinkedBlocks[i].m_block), "\n");
6fe7ccc8 3958 if (unlinkedBlocks[i].m_needsNormalLinking) {
ed1e77d3
A
3959 if (verbose)
3960 dataLog(" Does need normal linking.\n");
81345200 3961 linkBlock(unlinkedBlocks[i].m_block, possibleTargets);
6fe7ccc8 3962 unlinkedBlocks[i].m_needsNormalLinking = false;
14957cd0
A
3963 }
3964 }
3965}
3966
6fe7ccc8 3967void ByteCodeParser::buildOperandMapsIfNecessary()
14957cd0 3968{
6fe7ccc8
A
3969 if (m_haveBuiltOperandMaps)
3970 return;
3971
3972 for (size_t i = 0; i < m_codeBlock->numberOfIdentifiers(); ++i)
3973 m_identifierMap.add(m_codeBlock->identifier(i).impl(), i);
6fe7ccc8
A
3974
3975 m_haveBuiltOperandMaps = true;
3976}
14957cd0 3977
93a37866
A
3978ByteCodeParser::InlineStackEntry::InlineStackEntry(
3979 ByteCodeParser* byteCodeParser,
3980 CodeBlock* codeBlock,
3981 CodeBlock* profiledBlock,
81345200 3982 BasicBlock* callsiteBlockHead,
93a37866
A
3983 JSFunction* callee, // Null if this is a closure call.
3984 VirtualRegister returnValueVR,
3985 VirtualRegister inlineCallFrameStart,
3986 int argumentCountIncludingThis,
ed1e77d3 3987 InlineCallFrame::Kind kind)
6fe7ccc8
A
3988 : m_byteCodeParser(byteCodeParser)
3989 , m_codeBlock(codeBlock)
3990 , m_profiledBlock(profiledBlock)
6fe7ccc8
A
3991 , m_callsiteBlockHead(callsiteBlockHead)
3992 , m_returnValue(returnValueVR)
6fe7ccc8
A
3993 , m_didReturn(false)
3994 , m_didEarlyReturn(false)
3995 , m_caller(byteCodeParser->m_inlineStackTop)
3996{
81345200
A
3997 {
3998 ConcurrentJITLocker locker(m_profiledBlock->m_lock);
3999 m_lazyOperands.initialize(locker, m_profiledBlock->lazyOperandValueProfiles());
4000 m_exitProfile.initialize(locker, profiledBlock->exitProfile());
4001
4002 // We do this while holding the lock because we want to encourage StructureStubInfo's
4003 // to be potentially added to operations and because the profiled block could be in the
4004 // middle of LLInt->JIT tier-up in which case we would be adding the info's right now.
4005 if (m_profiledBlock->hasBaselineJITProfiling()) {
4006 m_profiledBlock->getStubInfoMap(locker, m_stubInfos);
4007 m_profiledBlock->getCallLinkInfoMap(locker, m_callLinkInfos);
4008 }
4009 }
4010
93a37866
A
4011 m_argumentPositions.resize(argumentCountIncludingThis);
4012 for (int i = 0; i < argumentCountIncludingThis; ++i) {
6fe7ccc8
A
4013 byteCodeParser->m_graph.m_argumentPositions.append(ArgumentPosition());
4014 ArgumentPosition* argumentPosition = &byteCodeParser->m_graph.m_argumentPositions.last();
4015 m_argumentPositions[i] = argumentPosition;
4016 }
93a37866 4017
6fe7ccc8
A
4018 if (m_caller) {
4019 // Inline case.
4020 ASSERT(codeBlock != byteCodeParser->m_codeBlock);
81345200
A
4021 ASSERT(inlineCallFrameStart.isValid());
4022 ASSERT(callsiteBlockHead);
4023
4024 m_inlineCallFrame = byteCodeParser->m_graph.m_plan.inlineCallFrames->add();
ed1e77d3 4025 byteCodeParser->m_graph.freeze(codeBlock->ownerExecutable());
81345200
A
4026 initializeLazyWriteBarrierForInlineCallFrameExecutable(
4027 byteCodeParser->m_graph.m_plan.writeBarriers,
4028 m_inlineCallFrame->executable,
4029 byteCodeParser->m_codeBlock,
4030 m_inlineCallFrame,
4031 byteCodeParser->m_codeBlock->ownerExecutable(),
4032 codeBlock->ownerExecutable());
ed1e77d3 4033 m_inlineCallFrame->setStackOffset(inlineCallFrameStart.offset() - JSStack::CallFrameHeaderSize);
81345200
A
4034 if (callee) {
4035 m_inlineCallFrame->calleeRecovery = ValueRecovery::constant(callee);
4036 m_inlineCallFrame->isClosureCall = false;
4037 } else
4038 m_inlineCallFrame->isClosureCall = true;
4039 m_inlineCallFrame->caller = byteCodeParser->currentCodeOrigin();
ed1e77d3
A
4040 m_inlineCallFrame->arguments.resizeToFit(argumentCountIncludingThis); // Set the number of arguments including this, but don't configure the value recoveries, yet.
4041 m_inlineCallFrame->kind = kind;
81345200 4042
6fe7ccc8
A
4043 byteCodeParser->buildOperandMapsIfNecessary();
4044
4045 m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
93a37866 4046 m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
81345200 4047 m_switchRemap.resize(codeBlock->numberOfSwitchJumpTables());
6fe7ccc8
A
4048
4049 for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i) {
ed1e77d3 4050 UniquedStringImpl* rep = codeBlock->identifier(i).impl();
81345200 4051 BorrowedIdentifierMap::AddResult result = byteCodeParser->m_identifierMap.add(rep, byteCodeParser->m_graph.identifiers().numberOfIdentifiers());
6fe7ccc8 4052 if (result.isNewEntry)
81345200 4053 byteCodeParser->m_graph.identifiers().addLazily(rep);
93a37866 4054 m_identifierRemap[i] = result.iterator->value;
6fe7ccc8 4055 }
93a37866
A
4056 for (unsigned i = 0; i < codeBlock->numberOfConstantBuffers(); ++i) {
4057 // If we inline the same code block multiple times, we don't want to needlessly
4058 // duplicate its constant buffers.
4059 HashMap<ConstantBufferKey, unsigned>::iterator iter =
4060 byteCodeParser->m_constantBufferCache.find(ConstantBufferKey(codeBlock, i));
4061 if (iter != byteCodeParser->m_constantBufferCache.end()) {
4062 m_constantBufferRemap[i] = iter->value;
4063 continue;
4064 }
4065 Vector<JSValue>& buffer = codeBlock->constantBufferAsVector(i);
4066 unsigned newIndex = byteCodeParser->m_codeBlock->addConstantBuffer(buffer);
4067 m_constantBufferRemap[i] = newIndex;
4068 byteCodeParser->m_constantBufferCache.add(ConstantBufferKey(codeBlock, i), newIndex);
14957cd0 4069 }
81345200
A
4070 for (unsigned i = 0; i < codeBlock->numberOfSwitchJumpTables(); ++i) {
4071 m_switchRemap[i] = byteCodeParser->m_codeBlock->numberOfSwitchJumpTables();
4072 byteCodeParser->m_codeBlock->addSwitchJumpTable() = codeBlock->switchJumpTable(i);
4073 }
6fe7ccc8
A
4074 m_callsiteBlockHeadNeedsLinking = true;
4075 } else {
4076 // Machine code block case.
4077 ASSERT(codeBlock == byteCodeParser->m_codeBlock);
4078 ASSERT(!callee);
81345200
A
4079 ASSERT(!returnValueVR.isValid());
4080 ASSERT(!inlineCallFrameStart.isValid());
4081 ASSERT(!callsiteBlockHead);
6fe7ccc8
A
4082
4083 m_inlineCallFrame = 0;
4084
4085 m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
93a37866 4086 m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
81345200 4087 m_switchRemap.resize(codeBlock->numberOfSwitchJumpTables());
6fe7ccc8
A
4088 for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i)
4089 m_identifierRemap[i] = i;
93a37866
A
4090 for (size_t i = 0; i < codeBlock->numberOfConstantBuffers(); ++i)
4091 m_constantBufferRemap[i] = i;
81345200
A
4092 for (size_t i = 0; i < codeBlock->numberOfSwitchJumpTables(); ++i)
4093 m_switchRemap[i] = i;
6fe7ccc8 4094 m_callsiteBlockHeadNeedsLinking = false;
14957cd0 4095 }
6fe7ccc8 4096
6fe7ccc8 4097 byteCodeParser->m_inlineStackTop = this;
14957cd0
A
4098}
4099
6fe7ccc8 4100void ByteCodeParser::parseCodeBlock()
14957cd0 4101{
ed1e77d3
A
4102 clearCaches();
4103
6fe7ccc8
A
4104 CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
4105
81345200
A
4106 if (m_graph.compilation()) {
4107 m_graph.compilation()->addProfiledBytecodes(
93a37866
A
4108 *m_vm->m_perBytecodeProfiler, m_inlineStackTop->m_profiledBlock);
4109 }
4110
ed1e77d3
A
4111 if (UNLIKELY(Options::dumpSourceAtDFGTime())) {
4112 Vector<DeferredSourceDump>& deferredSourceDump = m_graph.m_plan.callback->ensureDeferredSourceDump();
4113 if (inlineCallFrame()) {
4114 DeferredSourceDump dump(codeBlock->baselineVersion(), m_codeBlock, JITCode::DFGJIT, inlineCallFrame()->caller);
4115 deferredSourceDump.append(dump);
4116 } else
4117 deferredSourceDump.append(DeferredSourceDump(codeBlock->baselineVersion()));
4118 }
4119
4120 if (Options::dumpBytecodeAtDFGTime()) {
93a37866
A
4121 dataLog("Parsing ", *codeBlock);
4122 if (inlineCallFrame()) {
4123 dataLog(
4124 " for inlining at ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT),
4125 " ", inlineCallFrame()->caller);
4126 }
4127 dataLog(
ed1e77d3 4128 ": needsActivation = ", codeBlock->needsActivation(),
93a37866
A
4129 ", isStrictMode = ", codeBlock->ownerExecutable()->isStrictMode(), "\n");
4130 codeBlock->baselineVersion()->dumpBytecode();
4131 }
4132
4133 Vector<unsigned, 32> jumpTargets;
4134 computePreciseJumpTargets(codeBlock, jumpTargets);
4135 if (Options::dumpBytecodeAtDFGTime()) {
4136 dataLog("Jump targets: ");
4137 CommaPrinter comma;
4138 for (unsigned i = 0; i < jumpTargets.size(); ++i)
4139 dataLog(comma, jumpTargets[i]);
4140 dataLog("\n");
4141 }
4142
4143 for (unsigned jumpTargetIndex = 0; jumpTargetIndex <= jumpTargets.size(); ++jumpTargetIndex) {
14957cd0 4144 // The maximum bytecode offset to go into the current basicblock is either the next jump target, or the end of the instructions.
93a37866 4145 unsigned limit = jumpTargetIndex < jumpTargets.size() ? jumpTargets[jumpTargetIndex] : codeBlock->instructions().size();
14957cd0
A
4146 ASSERT(m_currentIndex < limit);
4147
4148 // Loop until we reach the current limit (i.e. next jump target).
4149 do {
6fe7ccc8
A
4150 if (!m_currentBlock) {
4151 // Check if we can use the last block.
81345200 4152 if (m_graph.numBlocks() && m_graph.lastBlock()->isEmpty()) {
6fe7ccc8 4153 // This must be a block belonging to us.
81345200 4154 ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
6fe7ccc8
A
4155 // Either the block is linkable or it isn't. If it's linkable then it's the last
4156 // block in the blockLinkingTargets list. If it's not then the last block will
4157 // have a lower bytecode index that the one we're about to give to this block.
81345200 4158 if (m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_blockLinkingTargets.last()->bytecodeBegin != m_currentIndex) {
6fe7ccc8 4159 // Make the block linkable.
81345200
A
4160 ASSERT(m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_blockLinkingTargets.last()->bytecodeBegin < m_currentIndex);
4161 m_inlineStackTop->m_blockLinkingTargets.append(m_graph.lastBlock());
6fe7ccc8
A
4162 }
4163 // Change its bytecode begin and continue.
81345200 4164 m_currentBlock = m_graph.lastBlock();
6fe7ccc8
A
4165 m_currentBlock->bytecodeBegin = m_currentIndex;
4166 } else {
81345200 4167 RefPtr<BasicBlock> block = adoptRef(new BasicBlock(m_currentIndex, m_numArguments, m_numLocals, PNaN));
6fe7ccc8 4168 m_currentBlock = block.get();
93a37866
A
4169 // This assertion checks two things:
4170 // 1) If the bytecodeBegin is greater than currentIndex, then something has gone
4171 // horribly wrong. So, we're probably generating incorrect code.
4172 // 2) If the bytecodeBegin is equal to the currentIndex, then we failed to do
4173 // a peephole coalescing of this block in the if statement above. So, we're
4174 // generating suboptimal code and leaving more work for the CFG simplifier.
ed1e77d3
A
4175 if (!m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
4176 unsigned lastBegin =
4177 m_inlineStackTop->m_unlinkedBlocks.last().m_block->bytecodeBegin;
4178 ASSERT_UNUSED(
4179 lastBegin, lastBegin == UINT_MAX || lastBegin < m_currentIndex);
4180 }
81345200
A
4181 m_inlineStackTop->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
4182 m_inlineStackTop->m_blockLinkingTargets.append(block.get());
93a37866 4183 // The first block is definitely an OSR target.
81345200 4184 if (!m_graph.numBlocks())
93a37866 4185 block->isOSRTarget = true;
81345200 4186 m_graph.appendBlock(block);
6fe7ccc8
A
4187 prepareToParseBlock();
4188 }
4189 }
4190
4191 bool shouldContinueParsing = parseBlock(limit);
14957cd0 4192
14957cd0
A
4193 // We should not have gone beyond the limit.
4194 ASSERT(m_currentIndex <= limit);
6fe7ccc8
A
4195
4196 // We should have planted a terminal, or we just gave up because
4197 // we realized that the jump target information is imprecise, or we
4198 // are at the end of an inline function, or we realized that we
4199 // should stop parsing because there was a return in the first
4200 // basic block.
ed1e77d3 4201 ASSERT(m_currentBlock->isEmpty() || m_currentBlock->terminal() || (m_currentIndex == codeBlock->instructions().size() && inlineCallFrame()) || !shouldContinueParsing);
6fe7ccc8 4202
ed1e77d3
A
4203 if (!shouldContinueParsing) {
4204 if (Options::verboseDFGByteCodeParsing())
4205 dataLog("Done parsing ", *codeBlock, "\n");
6fe7ccc8 4206 return;
ed1e77d3 4207 }
6fe7ccc8
A
4208
4209 m_currentBlock = 0;
14957cd0
A
4210 } while (m_currentIndex < limit);
4211 }
4212
4213 // Should have reached the end of the instructions.
6fe7ccc8 4214 ASSERT(m_currentIndex == codeBlock->instructions().size());
ed1e77d3
A
4215
4216 if (Options::verboseDFGByteCodeParsing())
4217 dataLog("Done parsing ", *codeBlock, " (fell off end)\n");
6fe7ccc8 4218}
14957cd0 4219
6fe7ccc8
A
4220bool ByteCodeParser::parse()
4221{
4222 // Set during construction.
4223 ASSERT(!m_currentIndex);
4224
81345200
A
4225 if (Options::verboseDFGByteCodeParsing())
4226 dataLog("Parsing ", *m_codeBlock, "\n");
4227
4228 m_dfgCodeBlock = m_graph.m_plan.profiledDFGCodeBlock.get();
4229 if (isFTL(m_graph.m_plan.mode) && m_dfgCodeBlock
4230 && Options::enablePolyvariantDevirtualization()) {
4231 if (Options::enablePolyvariantCallInlining())
4232 CallLinkStatus::computeDFGStatuses(m_dfgCodeBlock, m_callContextMap);
4233 if (Options::enablePolyvariantByIdInlining())
4234 m_dfgCodeBlock->getStubInfoMap(m_dfgStubInfos);
4235 }
4236
93a37866 4237 InlineStackEntry inlineStackEntry(
81345200 4238 this, m_codeBlock, m_profiledBlock, 0, 0, VirtualRegister(), VirtualRegister(),
ed1e77d3 4239 m_codeBlock->numParameters(), InlineCallFrame::Call);
6fe7ccc8
A
4240
4241 parseCodeBlock();
4242
4243 linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
93a37866 4244 m_graph.determineReachability();
81345200 4245 m_graph.killUnreachableBlocks();
6fe7ccc8 4246
81345200
A
4247 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
4248 BasicBlock* block = m_graph.block(blockIndex);
4249 if (!block)
93a37866 4250 continue;
81345200
A
4251 ASSERT(block->variablesAtHead.numberOfLocals() == m_graph.block(0)->variablesAtHead.numberOfLocals());
4252 ASSERT(block->variablesAtHead.numberOfArguments() == m_graph.block(0)->variablesAtHead.numberOfArguments());
4253 ASSERT(block->variablesAtTail.numberOfLocals() == m_graph.block(0)->variablesAtHead.numberOfLocals());
4254 ASSERT(block->variablesAtTail.numberOfArguments() == m_graph.block(0)->variablesAtHead.numberOfArguments());
93a37866 4255 }
6fe7ccc8 4256
6fe7ccc8
A
4257 m_graph.m_localVars = m_numLocals;
4258 m_graph.m_parameterSlots = m_parameterSlots;
14957cd0
A
4259
4260 return true;
4261}
4262
81345200 4263bool parse(Graph& graph)
14957cd0 4264{
93a37866 4265 SamplingRegion samplingRegion("DFG Parsing");
6fe7ccc8 4266 return ByteCodeParser(graph).parse();
14957cd0
A
4267}
4268
4269} } // namespace JSC::DFG
4270
4271#endif