2 * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBlockWorklist.h"
36 #include "DFGClobberSet.h"
37 #include "DFGJITCode.h"
38 #include "DFGVariableAccessDataDump.h"
39 #include "FullBytecodeLiveness.h"
40 #include "FunctionExecutableDump.h"
42 #include "JSLexicalEnvironment.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "OperandsInlines.h"
45 #include "JSCInlines.h"
46 #include "StackAlignment.h"
47 #include <wtf/CommaPrinter.h>
48 #include <wtf/ListDump.h>
50 namespace JSC
{ namespace DFG
{
52 // Creates an array of stringized names.
53 static const char* dfgOpNames
[] = {
54 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
55 FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM
)
56 #undef STRINGIZE_DFG_OP_ENUM
59 Graph::Graph(VM
& vm
, Plan
& plan
, LongLivedState
& longLivedState
)
62 , m_codeBlock(m_plan
.codeBlock
.get())
63 , m_profiledBlock(m_codeBlock
->alternative())
64 , m_allocator(longLivedState
.m_allocator
)
65 , m_nextMachineLocal(0)
66 , m_fixpointState(BeforeFixpoint
)
67 , m_structureRegistrationState(HaveNotStartedRegistering
)
69 , m_unificationState(LocallyUnified
)
70 , m_refCountState(EverythingIsLive
)
72 ASSERT(m_profiledBlock
);
74 m_hasDebuggerEnabled
= m_profiledBlock
->globalObject()->hasDebugger()
75 || Options::forceDebuggerBytecodeGeneration();
80 for (BlockIndex blockIndex
= numBlocks(); blockIndex
--;) {
81 BasicBlock
* block
= this->block(blockIndex
);
85 for (unsigned phiIndex
= block
->phis
.size(); phiIndex
--;)
86 m_allocator
.free(block
->phis
[phiIndex
]);
87 for (unsigned nodeIndex
= block
->size(); nodeIndex
--;)
88 m_allocator
.free(block
->at(nodeIndex
));
90 m_allocator
.freeAll();
93 const char *Graph::opName(NodeType op
)
95 return dfgOpNames
[op
];
98 static void printWhiteSpace(PrintStream
& out
, unsigned amount
)
104 bool Graph::dumpCodeOrigin(PrintStream
& out
, const char* prefix
, Node
* previousNode
, Node
* currentNode
, DumpContext
* context
)
109 if (previousNode
->origin
.semantic
.inlineCallFrame
== currentNode
->origin
.semantic
.inlineCallFrame
)
112 Vector
<CodeOrigin
> previousInlineStack
= previousNode
->origin
.semantic
.inlineStack();
113 Vector
<CodeOrigin
> currentInlineStack
= currentNode
->origin
.semantic
.inlineStack();
114 unsigned commonSize
= std::min(previousInlineStack
.size(), currentInlineStack
.size());
115 unsigned indexOfDivergence
= commonSize
;
116 for (unsigned i
= 0; i
< commonSize
; ++i
) {
117 if (previousInlineStack
[i
].inlineCallFrame
!= currentInlineStack
[i
].inlineCallFrame
) {
118 indexOfDivergence
= i
;
123 bool hasPrinted
= false;
126 for (unsigned i
= previousInlineStack
.size(); i
-- > indexOfDivergence
;) {
128 printWhiteSpace(out
, i
* 2);
129 out
.print("<-- ", inContext(*previousInlineStack
[i
].inlineCallFrame
, context
), "\n");
134 for (unsigned i
= indexOfDivergence
; i
< currentInlineStack
.size(); ++i
) {
136 printWhiteSpace(out
, i
* 2);
137 out
.print("--> ", inContext(*currentInlineStack
[i
].inlineCallFrame
, context
), "\n");
144 int Graph::amountOfNodeWhiteSpace(Node
* node
)
146 return (node
->origin
.semantic
.inlineDepth() - 1) * 2;
149 void Graph::printNodeWhiteSpace(PrintStream
& out
, Node
* node
)
151 printWhiteSpace(out
, amountOfNodeWhiteSpace(node
));
154 void Graph::dump(PrintStream
& out
, const char* prefix
, Node
* node
, DumpContext
* context
)
156 NodeType op
= node
->op();
158 unsigned refCount
= node
->refCount();
159 bool mustGenerate
= node
->mustGenerate();
164 printNodeWhiteSpace(out
, node
);
166 // Example/explanation of dataflow dump output
168 // 14: <!2:7> GetByVal(@3, @13)
171 // (1) The nodeIndex of this operation.
172 // (2) The reference count. The number printed is the 'real' count,
173 // not including the 'mustGenerate' ref. If the node is
174 // 'mustGenerate' then the count it prefixed with '!'.
175 // (3) The virtual register slot assigned to this node.
176 // (4) The name of the operation.
177 // (5) The arguments to the operation. The may be of the form:
178 // @# - a NodeIndex referencing a prior node in the graph.
179 // arg# - an argument number.
180 // id# - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
181 // var# - the index of a var on the global object, used by GetGlobalVar/PutGlobalVar operations.
182 out
.printf("% 4d:<%c%u:", (int)node
->index(), mustGenerate
? '!' : ' ', refCount
);
183 if (node
->hasResult() && node
->hasVirtualRegister() && node
->virtualRegister().isValid())
184 out
.print(node
->virtualRegister());
187 out
.print(">\t", opName(op
), "(");
189 if (node
->flags() & NodeHasVarArgs
) {
190 for (unsigned childIdx
= node
->firstChild(); childIdx
< node
->firstChild() + node
->numChildren(); childIdx
++) {
191 if (!m_varArgChildren
[childIdx
])
193 out
.print(comma
, m_varArgChildren
[childIdx
]);
196 if (!!node
->child1() || !!node
->child2() || !!node
->child3())
197 out
.print(comma
, node
->child1());
198 if (!!node
->child2() || !!node
->child3())
199 out
.print(comma
, node
->child2());
200 if (!!node
->child3())
201 out
.print(comma
, node
->child3());
204 if (toCString(NodeFlagsDump(node
->flags())) != "<empty>")
205 out
.print(comma
, NodeFlagsDump(node
->flags()));
206 if (node
->prediction())
207 out
.print(comma
, SpeculationDump(node
->prediction()));
208 if (node
->hasArrayMode())
209 out
.print(comma
, node
->arrayMode());
210 if (node
->hasArithMode())
211 out
.print(comma
, node
->arithMode());
212 if (node
->hasScopeOffset())
213 out
.print(comma
, node
->scopeOffset());
214 if (node
->hasDirectArgumentsOffset())
215 out
.print(comma
, node
->capturedArgumentsOffset());
216 if (node
->hasRegisterPointer())
217 out
.print(comma
, "global", globalObjectFor(node
->origin
.semantic
)->findVariableIndex(node
->variablePointer()), "(", RawPointer(node
->variablePointer()), ")");
218 if (node
->hasIdentifier())
219 out
.print(comma
, "id", node
->identifierNumber(), "{", identifiers()[node
->identifierNumber()], "}");
220 if (node
->hasPromotedLocationDescriptor())
221 out
.print(comma
, node
->promotedLocationDescriptor());
222 if (node
->hasStructureSet())
223 out
.print(comma
, inContext(node
->structureSet(), context
));
224 if (node
->hasStructure())
225 out
.print(comma
, inContext(*node
->structure(), context
));
226 if (node
->hasTransition()) {
227 out
.print(comma
, pointerDumpInContext(node
->transition(), context
));
229 out
.print(", ID:", node
->transition()->next
->id());
231 out
.print(", ID:", RawPointer(node
->transition()->next
));
234 if (node
->hasCellOperand()) {
235 if (!node
->cellOperand()->value() || !node
->cellOperand()->value().isCell())
236 out
.print(comma
, "invalid cell operand: ", node
->cellOperand()->value());
238 out
.print(comma
, pointerDump(node
->cellOperand()->value().asCell()));
239 if (node
->cellOperand()->value().isCell()) {
240 CallVariant
variant(node
->cellOperand()->value().asCell());
241 if (ExecutableBase
* executable
= variant
.executable()) {
242 if (executable
->isHostFunction())
243 out
.print(comma
, "<host function>");
244 else if (FunctionExecutable
* functionExecutable
= jsDynamicCast
<FunctionExecutable
*>(executable
))
245 out
.print(comma
, FunctionExecutableDump(functionExecutable
));
247 out
.print(comma
, "<non-function executable>");
252 if (node
->hasStorageAccessData()) {
253 StorageAccessData
& storageAccessData
= node
->storageAccessData();
254 out
.print(comma
, "id", storageAccessData
.identifierNumber
, "{", identifiers()[storageAccessData
.identifierNumber
], "}");
255 out
.print(", ", static_cast<ptrdiff_t>(storageAccessData
.offset
));
257 if (node
->hasMultiGetByOffsetData()) {
258 MultiGetByOffsetData
& data
= node
->multiGetByOffsetData();
259 out
.print(comma
, "id", data
.identifierNumber
, "{", identifiers()[data
.identifierNumber
], "}");
260 for (unsigned i
= 0; i
< data
.variants
.size(); ++i
)
261 out
.print(comma
, inContext(data
.variants
[i
], context
));
263 if (node
->hasMultiPutByOffsetData()) {
264 MultiPutByOffsetData
& data
= node
->multiPutByOffsetData();
265 out
.print(comma
, "id", data
.identifierNumber
, "{", identifiers()[data
.identifierNumber
], "}");
266 for (unsigned i
= 0; i
< data
.variants
.size(); ++i
)
267 out
.print(comma
, inContext(data
.variants
[i
], context
));
269 ASSERT(node
->hasVariableAccessData(*this) == node
->hasLocal(*this));
270 if (node
->hasVariableAccessData(*this)) {
271 VariableAccessData
* variableAccessData
= node
->tryGetVariableAccessData();
272 if (variableAccessData
) {
273 VirtualRegister operand
= variableAccessData
->local();
274 out
.print(comma
, variableAccessData
->local(), "(", VariableAccessDataDump(*this, variableAccessData
), ")");
275 operand
= variableAccessData
->machineLocal();
276 if (operand
.isValid())
277 out
.print(comma
, "machine:", operand
);
280 if (node
->hasStackAccessData()) {
281 StackAccessData
* data
= node
->stackAccessData();
282 out
.print(comma
, data
->local
);
283 if (data
->machineLocal
.isValid())
284 out
.print(comma
, "machine:", data
->machineLocal
);
285 out
.print(comma
, data
->format
);
287 if (node
->hasUnlinkedLocal())
288 out
.print(comma
, node
->unlinkedLocal());
289 if (node
->hasUnlinkedMachineLocal()) {
290 VirtualRegister operand
= node
->unlinkedMachineLocal();
291 if (operand
.isValid())
292 out
.print(comma
, "machine:", operand
);
294 if (node
->hasConstantBuffer()) {
296 out
.print(node
->startConstant(), ":[");
297 CommaPrinter anotherComma
;
298 for (unsigned i
= 0; i
< node
->numConstants(); ++i
)
299 out
.print(anotherComma
, pointerDumpInContext(freeze(m_codeBlock
->constantBuffer(node
->startConstant())[i
]), context
));
302 if (node
->hasIndexingType())
303 out
.print(comma
, IndexingTypeDump(node
->indexingType()));
304 if (node
->hasTypedArrayType())
305 out
.print(comma
, node
->typedArrayType());
307 out
.print(comma
, "^", node
->phi()->index());
308 if (node
->hasExecutionCounter())
309 out
.print(comma
, RawPointer(node
->executionCounter()));
310 if (node
->hasWatchpointSet())
311 out
.print(comma
, RawPointer(node
->watchpointSet()));
312 if (node
->hasStoragePointer())
313 out
.print(comma
, RawPointer(node
->storagePointer()));
314 if (node
->hasObjectMaterializationData())
315 out
.print(comma
, node
->objectMaterializationData());
316 if (node
->hasCallVarargsData())
317 out
.print(comma
, "firstVarArgOffset = ", node
->callVarargsData()->firstVarArgOffset
);
318 if (node
->hasLoadVarargsData()) {
319 LoadVarargsData
* data
= node
->loadVarargsData();
320 out
.print(comma
, "start = ", data
->start
, ", count = ", data
->count
);
321 if (data
->machineStart
.isValid())
322 out
.print(", machineStart = ", data
->machineStart
);
323 if (data
->machineCount
.isValid())
324 out
.print(", machineCount = ", data
->machineCount
);
325 out
.print(", offset = ", data
->offset
, ", mandatoryMinimum = ", data
->mandatoryMinimum
);
326 out
.print(", limit = ", data
->limit
);
328 if (node
->isConstant())
329 out
.print(comma
, pointerDumpInContext(node
->constant(), context
));
331 out
.print(comma
, "T:", *node
->targetBlock());
332 if (node
->isBranch())
333 out
.print(comma
, "T:", node
->branchData()->taken
, ", F:", node
->branchData()->notTaken
);
334 if (node
->isSwitch()) {
335 SwitchData
* data
= node
->switchData();
336 out
.print(comma
, data
->kind
);
337 for (unsigned i
= 0; i
< data
->cases
.size(); ++i
)
338 out
.print(comma
, inContext(data
->cases
[i
].value
, context
), ":", data
->cases
[i
].target
);
339 out
.print(comma
, "default:", data
->fallThrough
);
343 addReadsAndWrites(*this, node
, reads
, writes
);
344 if (!reads
.isEmpty())
345 out
.print(comma
, "R:", sortedListDump(reads
.direct(), ","));
346 if (!writes
.isEmpty())
347 out
.print(comma
, "W:", sortedListDump(writes
.direct(), ","));
348 if (node
->origin
.isSet()) {
349 out
.print(comma
, "bc#", node
->origin
.semantic
.bytecodeIndex
);
350 if (node
->origin
.semantic
!= node
->origin
.forExit
)
351 out
.print(comma
, "exit: ", node
->origin
.forExit
);
356 if (node
->hasVariableAccessData(*this) && node
->tryGetVariableAccessData())
357 out
.print(" predicting ", SpeculationDump(node
->tryGetVariableAccessData()->prediction()));
358 else if (node
->hasHeapPrediction())
359 out
.print(" predicting ", SpeculationDump(node
->getHeapPrediction()));
364 bool Graph::terminalsAreValid()
366 for (BasicBlock
* block
: blocksInNaturalOrder()) {
367 if (!block
->terminal())
373 void Graph::dumpBlockHeader(PrintStream
& out
, const char* prefix
, BasicBlock
* block
, PhiNodeDumpMode phiNodeDumpMode
, DumpContext
* context
)
375 out
.print(prefix
, "Block ", *block
, " (", inContext(block
->at(0)->origin
.semantic
, context
), "):", block
->isReachable
? "" : " (skipped)", block
->isOSRTarget
? " (OSR target)" : "", "\n");
376 if (block
->executionCount
== block
->executionCount
)
377 out
.print(prefix
, " Execution count: ", block
->executionCount
, "\n");
378 out
.print(prefix
, " Predecessors:");
379 for (size_t i
= 0; i
< block
->predecessors
.size(); ++i
)
380 out
.print(" ", *block
->predecessors
[i
]);
382 out
.print(prefix
, " Successors:");
383 if (block
->terminal()) {
384 for (BasicBlock
* successor
: block
->successors()) {
385 out
.print(" ", *successor
);
386 if (m_prePostNumbering
.isValid())
387 out
.print(" (", m_prePostNumbering
.edgeKind(block
, successor
), ")");
390 out
.print(" <invalid>");
392 if (m_dominators
.isValid() && terminalsAreValid()) {
393 out
.print(prefix
, " Dominated by: ", m_dominators
.dominatorsOf(block
), "\n");
394 out
.print(prefix
, " Dominates: ", m_dominators
.blocksDominatedBy(block
), "\n");
395 out
.print(prefix
, " Dominance Frontier: ", m_dominators
.dominanceFrontierOf(block
), "\n");
396 out
.print(prefix
, " Iterated Dominance Frontier: ", m_dominators
.iteratedDominanceFrontierOf(BlockList(1, block
)), "\n");
398 if (m_prePostNumbering
.isValid())
399 out
.print(prefix
, " Pre/Post Numbering: ", m_prePostNumbering
.preNumber(block
), "/", m_prePostNumbering
.postNumber(block
), "\n");
400 if (m_naturalLoops
.isValid()) {
401 if (const NaturalLoop
* loop
= m_naturalLoops
.headerOf(block
)) {
402 out
.print(prefix
, " Loop header, contains:");
403 Vector
<BlockIndex
> sortedBlockList
;
404 for (unsigned i
= 0; i
< loop
->size(); ++i
)
405 sortedBlockList
.append(loop
->at(i
)->index
);
406 std::sort(sortedBlockList
.begin(), sortedBlockList
.end());
407 for (unsigned i
= 0; i
< sortedBlockList
.size(); ++i
)
408 out
.print(" #", sortedBlockList
[i
]);
412 Vector
<const NaturalLoop
*> containingLoops
=
413 m_naturalLoops
.loopsOf(block
);
414 if (!containingLoops
.isEmpty()) {
415 out
.print(prefix
, " Containing loop headers:");
416 for (unsigned i
= 0; i
< containingLoops
.size(); ++i
)
417 out
.print(" ", *containingLoops
[i
]->header());
421 if (!block
->phis
.isEmpty()) {
422 out
.print(prefix
, " Phi Nodes:");
423 for (size_t i
= 0; i
< block
->phis
.size(); ++i
) {
424 Node
* phiNode
= block
->phis
[i
];
425 if (!phiNode
->shouldGenerate() && phiNodeDumpMode
== DumpLivePhisOnly
)
427 out
.print(" @", phiNode
->index(), "<", phiNode
->local(), ",", phiNode
->refCount(), ">->(");
428 if (phiNode
->child1()) {
429 out
.print("@", phiNode
->child1()->index());
430 if (phiNode
->child2()) {
431 out
.print(", @", phiNode
->child2()->index());
432 if (phiNode
->child3())
433 out
.print(", @", phiNode
->child3()->index());
436 out
.print(")", i
+ 1 < block
->phis
.size() ? "," : "");
442 void Graph::dump(PrintStream
& out
, DumpContext
* context
)
444 DumpContext myContext
;
445 myContext
.graph
= this;
447 context
= &myContext
;
450 out
.print("DFG for ", CodeBlockWithJITType(m_codeBlock
, JITCode::DFGJIT
), ":\n");
451 out
.print(" Fixpoint state: ", m_fixpointState
, "; Form: ", m_form
, "; Unification state: ", m_unificationState
, "; Ref count state: ", m_refCountState
, "\n");
453 out
.print(" Argument formats: ", listDump(m_argumentFormats
), "\n");
455 out
.print(" Arguments: ", listDump(m_arguments
), "\n");
459 for (size_t b
= 0; b
< m_blocks
.size(); ++b
) {
460 BasicBlock
* block
= m_blocks
[b
].get();
463 dumpBlockHeader(out
, "", block
, DumpAllPhis
, context
);
464 out
.print(" States: ", block
->cfaStructureClobberStateAtHead
);
465 if (!block
->cfaHasVisited
)
466 out
.print(", CurrentlyCFAUnreachable");
467 if (!block
->intersectionOfCFAHasVisited
)
468 out
.print(", CFAUnreachable");
473 out
.print(" Vars Before: ");
474 if (block
->cfaHasVisited
)
475 out
.print(inContext(block
->valuesAtHead
, context
));
477 out
.print("<empty>");
479 out
.print(" Intersected Vars Before: ");
480 if (block
->intersectionOfCFAHasVisited
)
481 out
.print(inContext(block
->intersectionOfPastValuesAtHead
, context
));
483 out
.print("<empty>");
485 out
.print(" Var Links: ", block
->variablesAtHead
, "\n");
490 RELEASE_ASSERT(block
->ssa
);
491 out
.print(" Availability: ", block
->ssa
->availabilityAtHead
, "\n");
492 out
.print(" Live: ", nodeListDump(block
->ssa
->liveAtHead
), "\n");
493 out
.print(" Values: ", nodeMapDump(block
->ssa
->valuesAtHead
, context
), "\n");
496 for (size_t i
= 0; i
< block
->size(); ++i
) {
497 dumpCodeOrigin(out
, "", lastNode
, block
->at(i
), context
);
498 dump(out
, "", block
->at(i
), context
);
499 lastNode
= block
->at(i
);
501 out
.print(" States: ", block
->cfaBranchDirection
, ", ", block
->cfaStructureClobberStateAtTail
);
502 if (!block
->cfaDidFinish
)
503 out
.print(", CFAInvalidated");
508 out
.print(" Vars After: ");
509 if (block
->cfaHasVisited
)
510 out
.print(inContext(block
->valuesAtTail
, context
));
512 out
.print("<empty>");
514 out
.print(" Var Links: ", block
->variablesAtTail
, "\n");
519 RELEASE_ASSERT(block
->ssa
);
520 out
.print(" Availability: ", block
->ssa
->availabilityAtTail
, "\n");
521 out
.print(" Live: ", nodeListDump(block
->ssa
->liveAtTail
), "\n");
522 out
.print(" Values: ", nodeMapDump(block
->ssa
->valuesAtTail
, context
), "\n");
528 out
.print("GC Values:\n");
529 for (FrozenValue
* value
: m_frozenValues
) {
530 if (value
->pointsToHeap())
531 out
.print(" ", inContext(*value
, &myContext
), "\n");
534 if (!myContext
.isEmpty()) {
540 void Graph::dethread()
542 if (m_form
== LoadStore
|| m_form
== SSA
)
545 if (logCompilationChanges())
546 dataLog("Dethreading DFG graph.\n");
548 SamplingRegion
samplingRegion("DFG Dethreading");
550 for (BlockIndex blockIndex
= m_blocks
.size(); blockIndex
--;) {
551 BasicBlock
* block
= m_blocks
[blockIndex
].get();
554 for (unsigned phiIndex
= block
->phis
.size(); phiIndex
--;) {
555 Node
* phi
= block
->phis
[phiIndex
];
556 phi
->children
.reset();
563 void Graph::handleSuccessor(Vector
<BasicBlock
*, 16>& worklist
, BasicBlock
* block
, BasicBlock
* successor
)
565 if (!successor
->isReachable
) {
566 successor
->isReachable
= true;
567 worklist
.append(successor
);
570 successor
->predecessors
.append(block
);
573 void Graph::determineReachability()
575 Vector
<BasicBlock
*, 16> worklist
;
576 worklist
.append(block(0));
577 block(0)->isReachable
= true;
578 while (!worklist
.isEmpty()) {
579 BasicBlock
* block
= worklist
.takeLast();
580 for (unsigned i
= block
->numSuccessors(); i
--;)
581 handleSuccessor(worklist
, block
, block
->successor(i
));
585 void Graph::resetReachability()
587 for (BlockIndex blockIndex
= m_blocks
.size(); blockIndex
--;) {
588 BasicBlock
* block
= m_blocks
[blockIndex
].get();
591 block
->isReachable
= false;
592 block
->predecessors
.clear();
595 determineReachability();
600 class RefCountCalculator
{
602 RefCountCalculator(Graph
& graph
)
609 // First reset the counts to 0 for all nodes.
610 for (BlockIndex blockIndex
= 0; blockIndex
< m_graph
.numBlocks(); ++blockIndex
) {
611 BasicBlock
* block
= m_graph
.block(blockIndex
);
614 for (unsigned indexInBlock
= block
->size(); indexInBlock
--;)
615 block
->at(indexInBlock
)->setRefCount(0);
616 for (unsigned phiIndex
= block
->phis
.size(); phiIndex
--;)
617 block
->phis
[phiIndex
]->setRefCount(0);
620 // Now find the roots:
621 // - Nodes that are must-generate.
622 // - Nodes that are reachable from type checks.
623 // Set their ref counts to 1 and put them on the worklist.
624 for (BlockIndex blockIndex
= 0; blockIndex
< m_graph
.numBlocks(); ++blockIndex
) {
625 BasicBlock
* block
= m_graph
.block(blockIndex
);
628 for (unsigned indexInBlock
= block
->size(); indexInBlock
--;) {
629 Node
* node
= block
->at(indexInBlock
);
630 DFG_NODE_DO_TO_CHILDREN(m_graph
, node
, findTypeCheckRoot
);
631 if (!(node
->flags() & NodeMustGenerate
))
633 if (!node
->postfixRef())
634 m_worklist
.append(node
);
638 while (!m_worklist
.isEmpty()) {
639 while (!m_worklist
.isEmpty()) {
640 Node
* node
= m_worklist
.last();
641 m_worklist
.removeLast();
642 ASSERT(node
->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
643 DFG_NODE_DO_TO_CHILDREN(m_graph
, node
, countEdge
);
646 if (m_graph
.m_form
== SSA
) {
647 // Find Phi->Upsilon edges, which are represented as meta-data in the
649 for (BlockIndex blockIndex
= m_graph
.numBlocks(); blockIndex
--;) {
650 BasicBlock
* block
= m_graph
.block(blockIndex
);
653 for (unsigned nodeIndex
= block
->size(); nodeIndex
--;) {
654 Node
* node
= block
->at(nodeIndex
);
655 if (node
->op() != Upsilon
)
657 if (node
->shouldGenerate())
659 if (node
->phi()->shouldGenerate())
668 void findTypeCheckRoot(Node
*, Edge edge
)
670 // We may have an "unproved" untyped use for code that is unreachable. The CFA
671 // will just not have gotten around to it.
672 if (edge
.isProved() || edge
.willNotHaveCheck())
674 if (!edge
->postfixRef())
675 m_worklist
.append(edge
.node());
678 void countNode(Node
* node
)
680 if (node
->postfixRef())
682 m_worklist
.append(node
);
685 void countEdge(Node
*, Edge edge
)
687 // Don't count edges that are already counted for their type checks.
688 if (!(edge
.isProved() || edge
.willNotHaveCheck()))
690 countNode(edge
.node());
694 Vector
<Node
*, 128> m_worklist
;
697 } // anonymous namespace
699 void Graph::computeRefCounts()
701 RefCountCalculator
calculator(*this);
702 calculator
.calculate();
705 void Graph::killBlockAndItsContents(BasicBlock
* block
)
707 for (unsigned phiIndex
= block
->phis
.size(); phiIndex
--;)
708 m_allocator
.free(block
->phis
[phiIndex
]);
709 for (unsigned nodeIndex
= block
->size(); nodeIndex
--;)
710 m_allocator
.free(block
->at(nodeIndex
));
715 void Graph::killUnreachableBlocks()
717 for (BlockIndex blockIndex
= 0; blockIndex
< numBlocks(); ++blockIndex
) {
718 BasicBlock
* block
= this->block(blockIndex
);
721 if (block
->isReachable
)
724 killBlockAndItsContents(block
);
728 void Graph::invalidateCFG()
730 m_dominators
.invalidate();
731 m_naturalLoops
.invalidate();
732 m_prePostNumbering
.invalidate();
735 void Graph::substituteGetLocal(BasicBlock
& block
, unsigned startIndexInBlock
, VariableAccessData
* variableAccessData
, Node
* newGetLocal
)
737 for (unsigned indexInBlock
= startIndexInBlock
; indexInBlock
< block
.size(); ++indexInBlock
) {
738 Node
* node
= block
[indexInBlock
];
739 bool shouldContinue
= true;
740 switch (node
->op()) {
742 if (node
->local() == variableAccessData
->local())
743 shouldContinue
= false;
748 if (node
->variableAccessData() != variableAccessData
)
750 substitute(block
, indexInBlock
, node
, newGetLocal
);
751 Node
* oldTailNode
= block
.variablesAtTail
.operand(variableAccessData
->local());
752 if (oldTailNode
== node
)
753 block
.variablesAtTail
.operand(variableAccessData
->local()) = newGetLocal
;
754 shouldContinue
= false;
766 BlockList
Graph::blocksInPreOrder()
769 BlockWorklist worklist
;
770 worklist
.push(block(0));
771 while (BasicBlock
* block
= worklist
.pop()) {
772 result
.append(block
);
773 for (unsigned i
= block
->numSuccessors(); i
--;)
774 worklist
.push(block
->successor(i
));
779 BlockList
Graph::blocksInPostOrder()
782 PostOrderBlockWorklist worklist
;
783 worklist
.push(block(0));
784 while (BlockWithOrder item
= worklist
.pop()) {
785 switch (item
.order
) {
787 worklist
.pushPost(item
.block
);
788 for (unsigned i
= item
.block
->numSuccessors(); i
--;)
789 worklist
.push(item
.block
->successor(i
));
792 result
.append(item
.block
);
799 void Graph::clearReplacements()
801 for (BlockIndex blockIndex
= numBlocks(); blockIndex
--;) {
802 BasicBlock
* block
= m_blocks
[blockIndex
].get();
805 for (unsigned phiIndex
= block
->phis
.size(); phiIndex
--;)
806 block
->phis
[phiIndex
]->setReplacement(nullptr);
807 for (unsigned nodeIndex
= block
->size(); nodeIndex
--;)
808 block
->at(nodeIndex
)->setReplacement(nullptr);
812 void Graph::clearEpochs()
814 for (BlockIndex blockIndex
= numBlocks(); blockIndex
--;) {
815 BasicBlock
* block
= m_blocks
[blockIndex
].get();
818 for (unsigned phiIndex
= block
->phis
.size(); phiIndex
--;)
819 block
->phis
[phiIndex
]->setEpoch(Epoch());
820 for (unsigned nodeIndex
= block
->size(); nodeIndex
--;)
821 block
->at(nodeIndex
)->setEpoch(Epoch());
825 void Graph::initializeNodeOwners()
827 for (BlockIndex blockIndex
= numBlocks(); blockIndex
--;) {
828 BasicBlock
* block
= m_blocks
[blockIndex
].get();
831 for (unsigned phiIndex
= block
->phis
.size(); phiIndex
--;)
832 block
->phis
[phiIndex
]->owner
= block
;
833 for (unsigned nodeIndex
= block
->size(); nodeIndex
--;)
834 block
->at(nodeIndex
)->owner
= block
;
838 void Graph::clearFlagsOnAllNodes(NodeFlags flags
)
840 for (BlockIndex blockIndex
= numBlocks(); blockIndex
--;) {
841 BasicBlock
* block
= m_blocks
[blockIndex
].get();
844 for (unsigned phiIndex
= block
->phis
.size(); phiIndex
--;)
845 block
->phis
[phiIndex
]->clearFlags(flags
);
846 for (unsigned nodeIndex
= block
->size(); nodeIndex
--;)
847 block
->at(nodeIndex
)->clearFlags(flags
);
851 FullBytecodeLiveness
& Graph::livenessFor(CodeBlock
* codeBlock
)
853 HashMap
<CodeBlock
*, std::unique_ptr
<FullBytecodeLiveness
>>::iterator iter
= m_bytecodeLiveness
.find(codeBlock
);
854 if (iter
!= m_bytecodeLiveness
.end())
857 std::unique_ptr
<FullBytecodeLiveness
> liveness
= std::make_unique
<FullBytecodeLiveness
>();
858 codeBlock
->livenessAnalysis().computeFullLiveness(*liveness
);
859 FullBytecodeLiveness
& result
= *liveness
;
860 m_bytecodeLiveness
.add(codeBlock
, WTF::move(liveness
));
864 FullBytecodeLiveness
& Graph::livenessFor(InlineCallFrame
* inlineCallFrame
)
866 return livenessFor(baselineCodeBlockFor(inlineCallFrame
));
869 BytecodeKills
& Graph::killsFor(CodeBlock
* codeBlock
)
871 HashMap
<CodeBlock
*, std::unique_ptr
<BytecodeKills
>>::iterator iter
= m_bytecodeKills
.find(codeBlock
);
872 if (iter
!= m_bytecodeKills
.end())
875 std::unique_ptr
<BytecodeKills
> kills
= std::make_unique
<BytecodeKills
>();
876 codeBlock
->livenessAnalysis().computeKills(*kills
);
877 BytecodeKills
& result
= *kills
;
878 m_bytecodeKills
.add(codeBlock
, WTF::move(kills
));
882 BytecodeKills
& Graph::killsFor(InlineCallFrame
* inlineCallFrame
)
884 return killsFor(baselineCodeBlockFor(inlineCallFrame
));
887 bool Graph::isLiveInBytecode(VirtualRegister operand
, CodeOrigin codeOrigin
)
890 VirtualRegister reg
= VirtualRegister(
891 operand
.offset() - codeOrigin
.stackOffset());
893 if (operand
.offset() < codeOrigin
.stackOffset() + JSStack::CallFrameHeaderSize
) {
894 if (reg
.isArgument()) {
895 RELEASE_ASSERT(reg
.offset() < JSStack::CallFrameHeaderSize
);
897 if (codeOrigin
.inlineCallFrame
->isClosureCall
898 && reg
.offset() == JSStack::Callee
)
901 if (codeOrigin
.inlineCallFrame
->isVarargs()
902 && reg
.offset() == JSStack::ArgumentCount
)
908 return livenessFor(codeOrigin
.inlineCallFrame
).operandIsLive(
909 reg
.offset(), codeOrigin
.bytecodeIndex
);
912 InlineCallFrame
* inlineCallFrame
= codeOrigin
.inlineCallFrame
;
913 if (!inlineCallFrame
)
916 // Arguments are always live. This would be redundant if it wasn't for our
917 // op_call_varargs inlining.
919 && static_cast<size_t>(reg
.toArgument()) < inlineCallFrame
->arguments
.size())
922 codeOrigin
= inlineCallFrame
->caller
;
928 BitVector
Graph::localsLiveInBytecode(CodeOrigin codeOrigin
)
931 result
.ensureSize(block(0)->variablesAtHead
.numberOfLocals());
932 forAllLocalsLiveInBytecode(
934 [&] (VirtualRegister reg
) {
935 ASSERT(reg
.isLocal());
936 result
.quickSet(reg
.toLocal());
941 unsigned Graph::frameRegisterCount()
943 unsigned result
= m_nextMachineLocal
+ std::max(m_parameterSlots
, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters
));
944 return roundLocalRegisterCountForFramePointerOffset(result
);
947 unsigned Graph::stackPointerOffset()
949 return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
952 unsigned Graph::requiredRegisterCountForExit()
954 unsigned count
= JIT::frameRegisterCountFor(m_profiledBlock
);
955 for (InlineCallFrameSet::iterator iter
= m_plan
.inlineCallFrames
->begin(); !!iter
; ++iter
) {
956 InlineCallFrame
* inlineCallFrame
= *iter
;
957 CodeBlock
* codeBlock
= baselineCodeBlockForInlineCallFrame(inlineCallFrame
);
958 unsigned requiredCount
= VirtualRegister(inlineCallFrame
->stackOffset
).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock
);
959 count
= std::max(count
, requiredCount
);
964 unsigned Graph::requiredRegisterCountForExecutionAndExit()
966 return std::max(frameRegisterCount(), requiredRegisterCountForExit());
969 JSValue
Graph::tryGetConstantProperty(
970 JSValue base
, const StructureSet
& structureSet
, PropertyOffset offset
)
972 if (!base
|| !base
.isObject())
975 JSObject
* object
= asObject(base
);
977 for (unsigned i
= structureSet
.size(); i
--;) {
978 Structure
* structure
= structureSet
[i
];
979 WatchpointSet
* set
= structure
->propertyReplacementWatchpointSet(offset
);
980 if (!set
|| !set
->isStillValid())
983 ASSERT(structure
->isValidOffset(offset
));
984 ASSERT(!structure
->isUncacheableDictionary());
986 watchpoints().addLazily(set
);
989 // What follows may require some extra thought. We need this load to load a valid JSValue. If
990 // our profiling makes sense and we're still on track to generate code that won't be
991 // invalidated, then we have nothing to worry about. We do, however, have to worry about
992 // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
995 // One argument in favor of this code is that it should definitely work because the butterfly
996 // is always set before the structure. However, we don't currently have a fence between those
997 // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
998 // So, for this to fail, you'd need an access on a constant object pointer such that the inline
999 // caches told us that the object had a structure that it did not *yet* have, and then later,
1000 // the object transitioned to that structure that the inline caches had alraedy seen. And then
1001 // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1002 // this is worth revisiting but it isn't worth losing sleep over. Filed:
1003 // https://bugs.webkit.org/show_bug.cgi?id=134641
1005 // For now, we just do the minimal thing: defend against the structure right now being
1006 // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1007 // determine if the structure belongs to the proven set.
1009 if (!structureSet
.contains(object
->structure()))
1012 return object
->getDirect(offset
);
1015 JSValue
Graph::tryGetConstantProperty(JSValue base
, Structure
* structure
, PropertyOffset offset
)
1017 return tryGetConstantProperty(base
, StructureSet(structure
), offset
);
1020 JSValue
Graph::tryGetConstantProperty(
1021 JSValue base
, const StructureAbstractValue
& structure
, PropertyOffset offset
)
1023 if (structure
.isTop() || structure
.isClobbered())
1026 return tryGetConstantProperty(base
, structure
.set(), offset
);
1029 JSValue
Graph::tryGetConstantProperty(const AbstractValue
& base
, PropertyOffset offset
)
1031 return tryGetConstantProperty(base
.m_value
, base
.m_structure
, offset
);
1034 JSValue
Graph::tryGetConstantClosureVar(JSValue base
, ScopeOffset offset
)
1036 // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1041 JSLexicalEnvironment
* activation
= jsDynamicCast
<JSLexicalEnvironment
*>(base
);
1045 SymbolTable
* symbolTable
= activation
->symbolTable();
1049 ConcurrentJITLocker
locker(symbolTable
->m_lock
);
1051 SymbolTableEntry
* entry
= symbolTable
->entryFor(locker
, offset
);
1055 set
= entry
->watchpointSet();
1059 if (set
->state() != IsWatched
)
1062 ASSERT(entry
->scopeOffset() == offset
);
1063 value
= activation
->variableAt(offset
).get();
1068 watchpoints().addLazily(set
);
1073 JSValue
Graph::tryGetConstantClosureVar(const AbstractValue
& value
, ScopeOffset offset
)
1075 return tryGetConstantClosureVar(value
.m_value
, offset
);
1078 JSValue
Graph::tryGetConstantClosureVar(Node
* node
, ScopeOffset offset
)
1080 if (!node
->hasConstant())
1082 return tryGetConstantClosureVar(node
->asJSValue(), offset
);
1085 JSArrayBufferView
* Graph::tryGetFoldableView(JSValue value
)
1089 JSArrayBufferView
* view
= jsDynamicCast
<JSArrayBufferView
*>(value
);
1092 if (!view
->length())
1094 WTF::loadLoadFence();
1095 watchpoints().addLazily(view
);
1099 JSArrayBufferView
* Graph::tryGetFoldableView(JSValue value
, ArrayMode arrayMode
)
1101 if (arrayMode
.typedArrayType() == NotTypedArray
)
1103 return tryGetFoldableView(value
);
1106 void Graph::registerFrozenValues()
1108 m_codeBlock
->constants().resize(0);
1109 m_codeBlock
->constantsSourceCodeRepresentation().resize(0);
1110 for (FrozenValue
* value
: m_frozenValues
) {
1111 if (!value
->pointsToHeap())
1114 ASSERT(value
->structure());
1115 ASSERT(m_plan
.weakReferences
.contains(value
->structure()));
1117 switch (value
->strength()) {
1119 m_plan
.weakReferences
.addLazily(value
->value().asCell());
1123 unsigned constantIndex
= m_codeBlock
->addConstantLazily();
1124 initializeLazyWriteBarrierForConstant(
1125 m_plan
.writeBarriers
,
1126 m_codeBlock
->constants()[constantIndex
],
1129 m_codeBlock
->ownerExecutable(),
1134 m_codeBlock
->constants().shrinkToFit();
1135 m_codeBlock
->constantsSourceCodeRepresentation().shrinkToFit();
1138 void Graph::visitChildren(SlotVisitor
& visitor
)
1140 for (FrozenValue
* value
: m_frozenValues
) {
1141 visitor
.appendUnbarrieredReadOnlyValue(value
->value());
1142 visitor
.appendUnbarrieredReadOnlyPointer(value
->structure());
1145 for (BlockIndex blockIndex
= numBlocks(); blockIndex
--;) {
1146 BasicBlock
* block
= this->block(blockIndex
);
1150 for (unsigned nodeIndex
= 0; nodeIndex
< block
->size(); ++nodeIndex
) {
1151 Node
* node
= block
->at(nodeIndex
);
1153 switch (node
->op()) {
1154 case CheckStructure
:
1155 for (unsigned i
= node
->structureSet().size(); i
--;)
1156 visitor
.appendUnbarrieredReadOnlyPointer(node
->structureSet()[i
]);
1160 case ArrayifyToStructure
:
1161 case NewStringObject
:
1162 visitor
.appendUnbarrieredReadOnlyPointer(node
->structure());
1166 case AllocatePropertyStorage
:
1167 case ReallocatePropertyStorage
:
1168 visitor
.appendUnbarrieredReadOnlyPointer(
1169 node
->transition()->previous
);
1170 visitor
.appendUnbarrieredReadOnlyPointer(
1171 node
->transition()->next
);
1174 case MultiGetByOffset
:
1175 for (unsigned i
= node
->multiGetByOffsetData().variants
.size(); i
--;) {
1176 GetByIdVariant
& variant
= node
->multiGetByOffsetData().variants
[i
];
1177 const StructureSet
& set
= variant
.structureSet();
1178 for (unsigned j
= set
.size(); j
--;)
1179 visitor
.appendUnbarrieredReadOnlyPointer(set
[j
]);
1181 // Don't need to mark anything in the structure chain because that would
1182 // have been decomposed into CheckStructure's. Don't need to mark the
1183 // callLinkStatus because we wouldn't use MultiGetByOffset if any of the
1184 // variants did that.
1185 ASSERT(!variant
.callLinkStatus());
1189 case MultiPutByOffset
:
1190 for (unsigned i
= node
->multiPutByOffsetData().variants
.size(); i
--;) {
1191 PutByIdVariant
& variant
= node
->multiPutByOffsetData().variants
[i
];
1192 const StructureSet
& set
= variant
.oldStructure();
1193 for (unsigned j
= set
.size(); j
--;)
1194 visitor
.appendUnbarrieredReadOnlyPointer(set
[j
]);
1195 if (variant
.kind() == PutByIdVariant::Transition
)
1196 visitor
.appendUnbarrieredReadOnlyPointer(variant
.newStructure());
1207 FrozenValue
* Graph::freeze(JSValue value
)
1209 if (UNLIKELY(!value
))
1210 return FrozenValue::emptySingleton();
1212 auto result
= m_frozenValueMap
.add(JSValue::encode(value
), nullptr);
1213 if (LIKELY(!result
.isNewEntry
))
1214 return result
.iterator
->value
;
1216 if (value
.isUInt32())
1217 m_uint32ValuesInUse
.append(value
.asUInt32());
1219 FrozenValue frozenValue
= FrozenValue::freeze(value
);
1220 if (Structure
* structure
= frozenValue
.structure())
1221 registerStructure(structure
);
1223 return result
.iterator
->value
= m_frozenValues
.add(frozenValue
);
1226 FrozenValue
* Graph::freezeStrong(JSValue value
)
1228 FrozenValue
* result
= freeze(value
);
1229 result
->strengthenTo(StrongValue
);
1233 void Graph::convertToConstant(Node
* node
, FrozenValue
* value
)
1235 if (value
->structure())
1236 assertIsRegistered(value
->structure());
1237 node
->convertToConstant(value
);
1240 void Graph::convertToConstant(Node
* node
, JSValue value
)
1242 convertToConstant(node
, freeze(value
));
1245 void Graph::convertToStrongConstant(Node
* node
, JSValue value
)
1247 convertToConstant(node
, freezeStrong(value
));
1250 StructureRegistrationResult
Graph::registerStructure(Structure
* structure
)
1252 m_plan
.weakReferences
.addLazily(structure
);
1253 if (m_plan
.watchpoints
.consider(structure
))
1254 return StructureRegisteredAndWatched
;
1255 return StructureRegisteredNormally
;
1258 void Graph::assertIsRegistered(Structure
* structure
)
1260 // It's convenient to be able to call this with a maybe-null structure.
1264 if (m_structureRegistrationState
== HaveNotStartedRegistering
)
1267 DFG_ASSERT(*this, nullptr, m_plan
.weakReferences
.contains(structure
));
1269 if (!structure
->dfgShouldWatch())
1271 if (watchpoints().isWatched(structure
->transitionWatchpointSet()))
1274 DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure
), " is watchable but isn't being watched.").data());
1277 NO_RETURN_DUE_TO_CRASH
static void crash(
1278 Graph
& graph
, const CString
& whileText
, const char* file
, int line
, const char* function
,
1279 const char* assertion
)
1282 dataLog("DFG ASSERTION FAILED: ", assertion
, "\n");
1283 dataLog(file
, "(", line
, ") : ", function
, "\n");
1286 dataLog("Graph at time of failure:\n");
1289 dataLog("DFG ASSERTION FAILED: ", assertion
, "\n");
1290 dataLog(file
, "(", line
, ") : ", function
, "\n");
1291 CRASH_WITH_SECURITY_IMPLICATION();
1294 void Graph::handleAssertionFailure(
1295 std::nullptr_t
, const char* file
, int line
, const char* function
, const char* assertion
)
1297 crash(*this, "", file
, line
, function
, assertion
);
1300 void Graph::handleAssertionFailure(
1301 Node
* node
, const char* file
, int line
, const char* function
, const char* assertion
)
1303 crash(*this, toCString("While handling node ", node
, "\n\n"), file
, line
, function
, assertion
);
1306 void Graph::handleAssertionFailure(
1307 BasicBlock
* block
, const char* file
, int line
, const char* function
, const char* assertion
)
1309 crash(*this, toCString("While handling block ", pointerDump(block
), "\n\n"), file
, line
, function
, assertion
);
1312 ValueProfile
* Graph::valueProfileFor(Node
* node
)
1317 CodeBlock
* profiledBlock
= baselineCodeBlockFor(node
->origin
.semantic
);
1319 if (node
->hasLocal(*this)) {
1320 if (!node
->local().isArgument())
1322 int argument
= node
->local().toArgument();
1323 Node
* argumentNode
= m_arguments
[argument
];
1326 if (node
->variableAccessData() != argumentNode
->variableAccessData())
1328 return profiledBlock
->valueProfileForArgument(argument
);
1331 if (node
->hasHeapPrediction())
1332 return profiledBlock
->valueProfileForBytecodeOffset(node
->origin
.semantic
.bytecodeIndex
);
1337 MethodOfGettingAValueProfile
Graph::methodOfGettingAValueProfileFor(Node
* node
)
1340 return MethodOfGettingAValueProfile();
1342 if (ValueProfile
* valueProfile
= valueProfileFor(node
))
1343 return MethodOfGettingAValueProfile(valueProfile
);
1345 if (node
->op() == GetLocal
) {
1346 CodeBlock
* profiledBlock
= baselineCodeBlockFor(node
->origin
.semantic
);
1348 return MethodOfGettingAValueProfile::fromLazyOperand(
1350 LazyOperandValueProfileKey(
1351 node
->origin
.semantic
.bytecodeIndex
, node
->local()));
1354 return MethodOfGettingAValueProfile();
1357 } } // namespace JSC::DFG
1359 #endif // ENABLE(DFG_JIT)