]> git.saurik.com Git - apple/javascriptcore.git/blob - dfg/DFGGraph.h
c3a308baf02e7a7e5899ded899420d29e7ec7360
[apple/javascriptcore.git] / dfg / DFGGraph.h
1 /*
2 * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef DFGGraph_h
27 #define DFGGraph_h
28
29 #if ENABLE(DFG_JIT)
30
31 #include "AssemblyHelpers.h"
32 #include "CodeBlock.h"
33 #include "DFGArgumentPosition.h"
34 #include "DFGBasicBlock.h"
35 #include "DFGDominators.h"
36 #include "DFGLongLivedState.h"
37 #include "DFGNaturalLoops.h"
38 #include "DFGNode.h"
39 #include "DFGNodeAllocator.h"
40 #include "DFGPlan.h"
41 #include "DFGScannable.h"
42 #include "JSStack.h"
43 #include "MethodOfGettingAValueProfile.h"
44 #include <unordered_map>
45 #include <wtf/BitVector.h>
46 #include <wtf/HashMap.h>
47 #include <wtf/Vector.h>
48 #include <wtf/StdLibExtras.h>
49
50 namespace JSC {
51
52 class CodeBlock;
53 class ExecState;
54
55 namespace DFG {
56
57 struct StorageAccessData {
58 PropertyOffset offset;
59 unsigned identifierNumber;
60 };
61
62 struct InlineVariableData {
63 InlineCallFrame* inlineCallFrame;
64 unsigned argumentPositionStart;
65 VariableAccessData* calleeVariable;
66 };
67
68 enum AddSpeculationMode {
69 DontSpeculateInt32,
70 SpeculateInt32AndTruncateConstants,
71 SpeculateInt32
72 };
73
74 //
75 // === Graph ===
76 //
77 // The order may be significant for nodes with side-effects (property accesses, value conversions).
78 // Nodes that are 'dead' remain in the vector with refCount 0.
79 class Graph : public virtual Scannable {
80 public:
81 Graph(VM&, Plan&, LongLivedState&);
82 ~Graph();
83
84 void changeChild(Edge& edge, Node* newNode)
85 {
86 edge.setNode(newNode);
87 }
88
89 void changeEdge(Edge& edge, Edge newEdge)
90 {
91 edge = newEdge;
92 }
93
94 void compareAndSwap(Edge& edge, Node* oldNode, Node* newNode)
95 {
96 if (edge.node() != oldNode)
97 return;
98 changeChild(edge, newNode);
99 }
100
101 void compareAndSwap(Edge& edge, Edge oldEdge, Edge newEdge)
102 {
103 if (edge != oldEdge)
104 return;
105 changeEdge(edge, newEdge);
106 }
107
108 void performSubstitution(Node* node)
109 {
110 if (node->flags() & NodeHasVarArgs) {
111 for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++)
112 performSubstitutionForEdge(m_varArgChildren[childIdx]);
113 } else {
114 performSubstitutionForEdge(node->child1());
115 performSubstitutionForEdge(node->child2());
116 performSubstitutionForEdge(node->child3());
117 }
118 }
119
120 void performSubstitutionForEdge(Edge& child)
121 {
122 // Check if this operand is actually unused.
123 if (!child)
124 return;
125
126 // Check if there is any replacement.
127 Node* replacement = child->misc.replacement;
128 if (!replacement)
129 return;
130
131 child.setNode(replacement);
132
133 // There is definitely a replacement. Assert that the replacement does not
134 // have a replacement.
135 ASSERT(!child->misc.replacement);
136 }
137
138 template<typename... Params>
139 Node* addNode(SpeculatedType type, Params... params)
140 {
141 Node* node = new (m_allocator) Node(params...);
142 node->predict(type);
143 return node;
144 }
145
146 void dethread();
147
148 void convertToConstant(Node* node, unsigned constantNumber)
149 {
150 if (node->op() == GetLocal)
151 dethread();
152 else
153 ASSERT(!node->hasVariableAccessData(*this));
154 node->convertToConstant(constantNumber);
155 }
156
157 unsigned constantRegisterForConstant(JSValue value)
158 {
159 unsigned constantRegister;
160 if (!m_codeBlock->findConstant(value, constantRegister)) {
161 constantRegister = m_codeBlock->addConstantLazily();
162 initializeLazyWriteBarrierForConstant(
163 m_plan.writeBarriers,
164 m_codeBlock->constants()[constantRegister],
165 m_codeBlock,
166 constantRegister,
167 m_codeBlock->ownerExecutable(),
168 value);
169 }
170 return constantRegister;
171 }
172
173 void convertToConstant(Node* node, JSValue value)
174 {
175 if (value.isObject())
176 node->convertToWeakConstant(value.asCell());
177 else
178 convertToConstant(node, constantRegisterForConstant(value));
179 }
180
181 // CodeBlock is optional, but may allow additional information to be dumped (e.g. Identifier names).
182 void dump(PrintStream& = WTF::dataFile(), DumpContext* = 0);
183 enum PhiNodeDumpMode { DumpLivePhisOnly, DumpAllPhis };
184 void dumpBlockHeader(PrintStream&, const char* prefix, BasicBlock*, PhiNodeDumpMode, DumpContext*);
185 void dump(PrintStream&, Edge);
186 void dump(PrintStream&, const char* prefix, Node*, DumpContext* = 0);
187 static int amountOfNodeWhiteSpace(Node*);
188 static void printNodeWhiteSpace(PrintStream&, Node*);
189
190 // Dump the code origin of the given node as a diff from the code origin of the
191 // preceding node. Returns true if anything was printed.
192 bool dumpCodeOrigin(PrintStream&, const char* prefix, Node* previousNode, Node* currentNode, DumpContext*);
193
194 SpeculatedType getJSConstantSpeculation(Node* node)
195 {
196 return speculationFromValue(node->valueOfJSConstant(m_codeBlock));
197 }
198
199 AddSpeculationMode addSpeculationMode(Node* add, bool leftShouldSpeculateInt32, bool rightShouldSpeculateInt32, PredictionPass pass)
200 {
201 ASSERT(add->op() == ValueAdd || add->op() == ArithAdd || add->op() == ArithSub);
202
203 RareCaseProfilingSource source = add->sourceFor(pass);
204
205 Node* left = add->child1().node();
206 Node* right = add->child2().node();
207
208 if (left->hasConstant())
209 return addImmediateShouldSpeculateInt32(add, rightShouldSpeculateInt32, left, source);
210 if (right->hasConstant())
211 return addImmediateShouldSpeculateInt32(add, leftShouldSpeculateInt32, right, source);
212
213 return (leftShouldSpeculateInt32 && rightShouldSpeculateInt32 && add->canSpeculateInt32(source)) ? SpeculateInt32 : DontSpeculateInt32;
214 }
215
216 AddSpeculationMode valueAddSpeculationMode(Node* add, PredictionPass pass)
217 {
218 return addSpeculationMode(
219 add,
220 add->child1()->shouldSpeculateInt32OrBooleanExpectingDefined(),
221 add->child2()->shouldSpeculateInt32OrBooleanExpectingDefined(),
222 pass);
223 }
224
225 AddSpeculationMode arithAddSpeculationMode(Node* add, PredictionPass pass)
226 {
227 return addSpeculationMode(
228 add,
229 add->child1()->shouldSpeculateInt32OrBooleanForArithmetic(),
230 add->child2()->shouldSpeculateInt32OrBooleanForArithmetic(),
231 pass);
232 }
233
234 AddSpeculationMode addSpeculationMode(Node* add, PredictionPass pass)
235 {
236 if (add->op() == ValueAdd)
237 return valueAddSpeculationMode(add, pass);
238
239 return arithAddSpeculationMode(add, pass);
240 }
241
242 bool addShouldSpeculateInt32(Node* add, PredictionPass pass)
243 {
244 return addSpeculationMode(add, pass) != DontSpeculateInt32;
245 }
246
247 bool addShouldSpeculateMachineInt(Node* add)
248 {
249 if (!enableInt52())
250 return false;
251
252 Node* left = add->child1().node();
253 Node* right = add->child2().node();
254
255 bool speculation;
256 if (add->op() == ValueAdd)
257 speculation = Node::shouldSpeculateMachineInt(left, right);
258 else
259 speculation = Node::shouldSpeculateMachineInt(left, right);
260
261 return speculation && !hasExitSite(add, Int52Overflow);
262 }
263
264 bool mulShouldSpeculateInt32(Node* mul, PredictionPass pass)
265 {
266 ASSERT(mul->op() == ArithMul);
267
268 Node* left = mul->child1().node();
269 Node* right = mul->child2().node();
270
271 return Node::shouldSpeculateInt32OrBooleanForArithmetic(left, right)
272 && mul->canSpeculateInt32(mul->sourceFor(pass));
273 }
274
275 bool mulShouldSpeculateMachineInt(Node* mul, PredictionPass pass)
276 {
277 ASSERT(mul->op() == ArithMul);
278
279 if (!enableInt52())
280 return false;
281
282 Node* left = mul->child1().node();
283 Node* right = mul->child2().node();
284
285 return Node::shouldSpeculateMachineInt(left, right)
286 && mul->canSpeculateInt52(pass)
287 && !hasExitSite(mul, Int52Overflow);
288 }
289
290 bool negateShouldSpeculateInt32(Node* negate, PredictionPass pass)
291 {
292 ASSERT(negate->op() == ArithNegate);
293 return negate->child1()->shouldSpeculateInt32OrBooleanForArithmetic()
294 && negate->canSpeculateInt32(pass);
295 }
296
297 bool negateShouldSpeculateMachineInt(Node* negate, PredictionPass pass)
298 {
299 ASSERT(negate->op() == ArithNegate);
300 if (!enableInt52())
301 return false;
302 return negate->child1()->shouldSpeculateMachineInt()
303 && !hasExitSite(negate, Int52Overflow)
304 && negate->canSpeculateInt52(pass);
305 }
306
307 VirtualRegister bytecodeRegisterForArgument(CodeOrigin codeOrigin, int argument)
308 {
309 return VirtualRegister(
310 codeOrigin.inlineCallFrame->stackOffset +
311 baselineCodeBlockFor(codeOrigin)->argumentIndexAfterCapture(argument));
312 }
313
314 // Helper methods to check nodes for constants.
315 bool isConstant(Node* node)
316 {
317 return node->hasConstant();
318 }
319 bool isJSConstant(Node* node)
320 {
321 return node->hasConstant();
322 }
323 bool isInt32Constant(Node* node)
324 {
325 return node->isInt32Constant(m_codeBlock);
326 }
327 bool isDoubleConstant(Node* node)
328 {
329 return node->isDoubleConstant(m_codeBlock);
330 }
331 bool isNumberConstant(Node* node)
332 {
333 return node->isNumberConstant(m_codeBlock);
334 }
335 bool isMachineIntConstant(Node* node)
336 {
337 return node->isMachineIntConstant(m_codeBlock);
338 }
339 bool isBooleanConstant(Node* node)
340 {
341 return node->isBooleanConstant(m_codeBlock);
342 }
343 bool isCellConstant(Node* node)
344 {
345 if (!isJSConstant(node))
346 return false;
347 JSValue value = valueOfJSConstant(node);
348 return value.isCell() && !!value;
349 }
350 bool isFunctionConstant(Node* node)
351 {
352 if (!isJSConstant(node))
353 return false;
354 if (!getJSFunction(valueOfJSConstant(node)))
355 return false;
356 return true;
357 }
358 bool isInternalFunctionConstant(Node* node)
359 {
360 if (!isJSConstant(node))
361 return false;
362 JSValue value = valueOfJSConstant(node);
363 if (!value.isCell() || !value)
364 return false;
365 JSCell* cell = value.asCell();
366 if (!cell->inherits(InternalFunction::info()))
367 return false;
368 return true;
369 }
370 // Helper methods get constant values from nodes.
371 JSValue valueOfJSConstant(Node* node)
372 {
373 return node->valueOfJSConstant(m_codeBlock);
374 }
375 int32_t valueOfInt32Constant(Node* node)
376 {
377 JSValue value = valueOfJSConstant(node);
378 if (!value.isInt32()) {
379 dataLog("Value isn't int32: ", value, "\n");
380 dump();
381 RELEASE_ASSERT_NOT_REACHED();
382 }
383 return value.asInt32();
384 }
385 double valueOfNumberConstant(Node* node)
386 {
387 return valueOfJSConstant(node).asNumber();
388 }
389 bool valueOfBooleanConstant(Node* node)
390 {
391 return valueOfJSConstant(node).asBoolean();
392 }
393 JSFunction* valueOfFunctionConstant(Node* node)
394 {
395 JSCell* function = getJSFunction(valueOfJSConstant(node));
396 ASSERT(function);
397 return jsCast<JSFunction*>(function);
398 }
399
400 static const char *opName(NodeType);
401
402 StructureSet* addStructureSet(const StructureSet& structureSet)
403 {
404 ASSERT(structureSet.size());
405 m_structureSet.append(structureSet);
406 return &m_structureSet.last();
407 }
408
409 StructureTransitionData* addStructureTransitionData(const StructureTransitionData& structureTransitionData)
410 {
411 m_structureTransitionData.append(structureTransitionData);
412 return &m_structureTransitionData.last();
413 }
414
415 JSGlobalObject* globalObjectFor(CodeOrigin codeOrigin)
416 {
417 return m_codeBlock->globalObjectFor(codeOrigin);
418 }
419
420 JSObject* globalThisObjectFor(CodeOrigin codeOrigin)
421 {
422 JSGlobalObject* object = globalObjectFor(codeOrigin);
423 return jsCast<JSObject*>(object->methodTable()->toThis(object, object->globalExec(), NotStrictMode));
424 }
425
426 ScriptExecutable* executableFor(InlineCallFrame* inlineCallFrame)
427 {
428 if (!inlineCallFrame)
429 return m_codeBlock->ownerExecutable();
430
431 return inlineCallFrame->executable.get();
432 }
433
434 ScriptExecutable* executableFor(const CodeOrigin& codeOrigin)
435 {
436 return executableFor(codeOrigin.inlineCallFrame);
437 }
438
439 CodeBlock* baselineCodeBlockFor(InlineCallFrame* inlineCallFrame)
440 {
441 if (!inlineCallFrame)
442 return m_profiledBlock;
443 return baselineCodeBlockForInlineCallFrame(inlineCallFrame);
444 }
445
446 CodeBlock* baselineCodeBlockFor(const CodeOrigin& codeOrigin)
447 {
448 return baselineCodeBlockForOriginAndBaselineCodeBlock(codeOrigin, m_profiledBlock);
449 }
450
451 bool isStrictModeFor(CodeOrigin codeOrigin)
452 {
453 if (!codeOrigin.inlineCallFrame)
454 return m_codeBlock->isStrictMode();
455 return jsCast<FunctionExecutable*>(codeOrigin.inlineCallFrame->executable.get())->isStrictMode();
456 }
457
458 ECMAMode ecmaModeFor(CodeOrigin codeOrigin)
459 {
460 return isStrictModeFor(codeOrigin) ? StrictMode : NotStrictMode;
461 }
462
463 bool masqueradesAsUndefinedWatchpointIsStillValid(const CodeOrigin& codeOrigin)
464 {
465 return m_plan.watchpoints.isStillValid(
466 globalObjectFor(codeOrigin)->masqueradesAsUndefinedWatchpoint());
467 }
468
469 bool hasGlobalExitSite(const CodeOrigin& codeOrigin, ExitKind exitKind)
470 {
471 return baselineCodeBlockFor(codeOrigin)->hasExitSite(FrequentExitSite(exitKind));
472 }
473
474 bool hasExitSite(const CodeOrigin& codeOrigin, ExitKind exitKind)
475 {
476 return baselineCodeBlockFor(codeOrigin)->hasExitSite(FrequentExitSite(codeOrigin.bytecodeIndex, exitKind));
477 }
478
479 bool hasExitSite(Node* node, ExitKind exitKind)
480 {
481 return hasExitSite(node->origin.semantic, exitKind);
482 }
483
484 VirtualRegister argumentsRegisterFor(InlineCallFrame* inlineCallFrame)
485 {
486 if (!inlineCallFrame)
487 return m_profiledBlock->argumentsRegister();
488
489 return VirtualRegister(baselineCodeBlockForInlineCallFrame(
490 inlineCallFrame)->argumentsRegister().offset() +
491 inlineCallFrame->stackOffset);
492 }
493
494 VirtualRegister argumentsRegisterFor(const CodeOrigin& codeOrigin)
495 {
496 return argumentsRegisterFor(codeOrigin.inlineCallFrame);
497 }
498
499 VirtualRegister machineArgumentsRegisterFor(InlineCallFrame* inlineCallFrame)
500 {
501 if (!inlineCallFrame)
502 return m_codeBlock->argumentsRegister();
503
504 return inlineCallFrame->argumentsRegister;
505 }
506
507 VirtualRegister machineArgumentsRegisterFor(const CodeOrigin& codeOrigin)
508 {
509 return machineArgumentsRegisterFor(codeOrigin.inlineCallFrame);
510 }
511
512 VirtualRegister uncheckedArgumentsRegisterFor(InlineCallFrame* inlineCallFrame)
513 {
514 if (!inlineCallFrame)
515 return m_profiledBlock->uncheckedArgumentsRegister();
516
517 CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
518 if (!codeBlock->usesArguments())
519 return VirtualRegister();
520
521 return VirtualRegister(codeBlock->argumentsRegister().offset() +
522 inlineCallFrame->stackOffset);
523 }
524
525 VirtualRegister uncheckedArgumentsRegisterFor(const CodeOrigin& codeOrigin)
526 {
527 return uncheckedArgumentsRegisterFor(codeOrigin.inlineCallFrame);
528 }
529
530 VirtualRegister activationRegister()
531 {
532 return m_profiledBlock->activationRegister();
533 }
534
535 VirtualRegister uncheckedActivationRegister()
536 {
537 return m_profiledBlock->uncheckedActivationRegister();
538 }
539
540 VirtualRegister machineActivationRegister()
541 {
542 return m_profiledBlock->activationRegister();
543 }
544
545 VirtualRegister uncheckedMachineActivationRegister()
546 {
547 return m_profiledBlock->uncheckedActivationRegister();
548 }
549
550 ValueProfile* valueProfileFor(Node* node)
551 {
552 if (!node)
553 return 0;
554
555 CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
556
557 if (node->op() == GetArgument)
558 return profiledBlock->valueProfileForArgument(node->local().toArgument());
559
560 if (node->hasLocal(*this)) {
561 if (m_form == SSA)
562 return 0;
563 if (!node->local().isArgument())
564 return 0;
565 int argument = node->local().toArgument();
566 if (node->variableAccessData() != m_arguments[argument]->variableAccessData())
567 return 0;
568 return profiledBlock->valueProfileForArgument(argument);
569 }
570
571 if (node->hasHeapPrediction())
572 return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
573
574 return 0;
575 }
576
577 MethodOfGettingAValueProfile methodOfGettingAValueProfileFor(Node* node)
578 {
579 if (!node)
580 return MethodOfGettingAValueProfile();
581
582 CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
583
584 if (node->op() == GetLocal) {
585 return MethodOfGettingAValueProfile::fromLazyOperand(
586 profiledBlock,
587 LazyOperandValueProfileKey(
588 node->origin.semantic.bytecodeIndex, node->local()));
589 }
590
591 return MethodOfGettingAValueProfile(valueProfileFor(node));
592 }
593
594 bool usesArguments() const
595 {
596 return m_codeBlock->usesArguments();
597 }
598
599 BlockIndex numBlocks() const { return m_blocks.size(); }
600 BasicBlock* block(BlockIndex blockIndex) const { return m_blocks[blockIndex].get(); }
601 BasicBlock* lastBlock() const { return block(numBlocks() - 1); }
602
603 void appendBlock(PassRefPtr<BasicBlock> basicBlock)
604 {
605 basicBlock->index = m_blocks.size();
606 m_blocks.append(basicBlock);
607 }
608
609 void killBlock(BlockIndex blockIndex)
610 {
611 m_blocks[blockIndex].clear();
612 }
613
614 void killBlock(BasicBlock* basicBlock)
615 {
616 killBlock(basicBlock->index);
617 }
618
619 void killBlockAndItsContents(BasicBlock*);
620
621 void killUnreachableBlocks();
622
623 bool isPredictedNumerical(Node* node)
624 {
625 return isNumerical(node->child1().useKind()) && isNumerical(node->child2().useKind());
626 }
627
628 // Note that a 'true' return does not actually mean that the ByVal access clobbers nothing.
629 // It really means that it will not clobber the entire world. It's still up to you to
630 // carefully consider things like:
631 // - PutByVal definitely changes the array it stores to, and may even change its length.
632 // - PutByOffset definitely changes the object it stores to.
633 // - and so on.
634 bool byValIsPure(Node* node)
635 {
636 switch (node->arrayMode().type()) {
637 case Array::Generic:
638 return false;
639 case Array::Int32:
640 case Array::Double:
641 case Array::Contiguous:
642 case Array::ArrayStorage:
643 return !node->arrayMode().isOutOfBounds();
644 case Array::SlowPutArrayStorage:
645 return !node->arrayMode().mayStoreToHole();
646 case Array::String:
647 return node->op() == GetByVal && node->arrayMode().isInBounds();
648 #if USE(JSVALUE32_64)
649 case Array::Arguments:
650 if (node->op() == GetByVal)
651 return true;
652 return false;
653 #endif // USE(JSVALUE32_64)
654 default:
655 return true;
656 }
657 }
658
659 bool clobbersWorld(Node* node)
660 {
661 if (node->flags() & NodeClobbersWorld)
662 return true;
663 if (!(node->flags() & NodeMightClobber))
664 return false;
665 switch (node->op()) {
666 case GetByVal:
667 case PutByValDirect:
668 case PutByVal:
669 case PutByValAlias:
670 return !byValIsPure(node);
671 case ToString:
672 switch (node->child1().useKind()) {
673 case StringObjectUse:
674 case StringOrStringObjectUse:
675 return false;
676 case CellUse:
677 case UntypedUse:
678 return true;
679 default:
680 RELEASE_ASSERT_NOT_REACHED();
681 return true;
682 }
683 default:
684 RELEASE_ASSERT_NOT_REACHED();
685 return true; // If by some oddity we hit this case in release build it's safer to have CSE assume the worst.
686 }
687 }
688
689 void determineReachability();
690 void resetReachability();
691
692 void resetExitStates();
693
694 unsigned varArgNumChildren(Node* node)
695 {
696 ASSERT(node->flags() & NodeHasVarArgs);
697 return node->numChildren();
698 }
699
700 unsigned numChildren(Node* node)
701 {
702 if (node->flags() & NodeHasVarArgs)
703 return varArgNumChildren(node);
704 return AdjacencyList::Size;
705 }
706
707 Edge& varArgChild(Node* node, unsigned index)
708 {
709 ASSERT(node->flags() & NodeHasVarArgs);
710 return m_varArgChildren[node->firstChild() + index];
711 }
712
713 Edge& child(Node* node, unsigned index)
714 {
715 if (node->flags() & NodeHasVarArgs)
716 return varArgChild(node, index);
717 return node->children.child(index);
718 }
719
720 void voteNode(Node* node, unsigned ballot, float weight = 1)
721 {
722 switch (node->op()) {
723 case ValueToInt32:
724 case UInt32ToNumber:
725 node = node->child1().node();
726 break;
727 default:
728 break;
729 }
730
731 if (node->op() == GetLocal)
732 node->variableAccessData()->vote(ballot, weight);
733 }
734
735 void voteNode(Edge edge, unsigned ballot, float weight = 1)
736 {
737 voteNode(edge.node(), ballot, weight);
738 }
739
740 void voteChildren(Node* node, unsigned ballot, float weight = 1)
741 {
742 if (node->flags() & NodeHasVarArgs) {
743 for (unsigned childIdx = node->firstChild();
744 childIdx < node->firstChild() + node->numChildren();
745 childIdx++) {
746 if (!!m_varArgChildren[childIdx])
747 voteNode(m_varArgChildren[childIdx], ballot, weight);
748 }
749 return;
750 }
751
752 if (!node->child1())
753 return;
754 voteNode(node->child1(), ballot, weight);
755 if (!node->child2())
756 return;
757 voteNode(node->child2(), ballot, weight);
758 if (!node->child3())
759 return;
760 voteNode(node->child3(), ballot, weight);
761 }
762
763 template<typename T> // T = Node* or Edge
764 void substitute(BasicBlock& block, unsigned startIndexInBlock, T oldThing, T newThing)
765 {
766 for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
767 Node* node = block[indexInBlock];
768 if (node->flags() & NodeHasVarArgs) {
769 for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); ++childIdx) {
770 if (!!m_varArgChildren[childIdx])
771 compareAndSwap(m_varArgChildren[childIdx], oldThing, newThing);
772 }
773 continue;
774 }
775 if (!node->child1())
776 continue;
777 compareAndSwap(node->children.child1(), oldThing, newThing);
778 if (!node->child2())
779 continue;
780 compareAndSwap(node->children.child2(), oldThing, newThing);
781 if (!node->child3())
782 continue;
783 compareAndSwap(node->children.child3(), oldThing, newThing);
784 }
785 }
786
787 // Use this if you introduce a new GetLocal and you know that you introduced it *before*
788 // any GetLocals in the basic block.
789 // FIXME: it may be appropriate, in the future, to generalize this to handle GetLocals
790 // introduced anywhere in the basic block.
791 void substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal);
792
793 void invalidateCFG();
794
795 void clearFlagsOnAllNodes(NodeFlags);
796
797 void clearReplacements();
798 void initializeNodeOwners();
799
800 void getBlocksInDepthFirstOrder(Vector<BasicBlock*>& result);
801
802 Profiler::Compilation* compilation() { return m_plan.compilation.get(); }
803
804 DesiredIdentifiers& identifiers() { return m_plan.identifiers; }
805 DesiredWatchpoints& watchpoints() { return m_plan.watchpoints; }
806 DesiredStructureChains& chains() { return m_plan.chains; }
807
808 FullBytecodeLiveness& livenessFor(CodeBlock*);
809 FullBytecodeLiveness& livenessFor(InlineCallFrame*);
810 bool isLiveInBytecode(VirtualRegister, CodeOrigin);
811
812 unsigned frameRegisterCount();
813 unsigned stackPointerOffset();
814 unsigned requiredRegisterCountForExit();
815 unsigned requiredRegisterCountForExecutionAndExit();
816
817 JSActivation* tryGetActivation(Node*);
818 WriteBarrierBase<Unknown>* tryGetRegisters(Node*);
819
820 JSArrayBufferView* tryGetFoldableView(Node*);
821 JSArrayBufferView* tryGetFoldableView(Node*, ArrayMode);
822 JSArrayBufferView* tryGetFoldableViewForChild1(Node*);
823
824 virtual void visitChildren(SlotVisitor&) override;
825
826 VM& m_vm;
827 Plan& m_plan;
828 CodeBlock* m_codeBlock;
829 CodeBlock* m_profiledBlock;
830
831 NodeAllocator& m_allocator;
832
833 Operands<AbstractValue> m_mustHandleAbstractValues;
834
835 Vector< RefPtr<BasicBlock> , 8> m_blocks;
836 Vector<Edge, 16> m_varArgChildren;
837 Vector<StorageAccessData> m_storageAccessData;
838 Vector<Node*, 8> m_arguments;
839 SegmentedVector<VariableAccessData, 16> m_variableAccessData;
840 SegmentedVector<ArgumentPosition, 8> m_argumentPositions;
841 SegmentedVector<StructureSet, 16> m_structureSet;
842 SegmentedVector<StructureTransitionData, 8> m_structureTransitionData;
843 SegmentedVector<NewArrayBufferData, 4> m_newArrayBufferData;
844 Bag<BranchData> m_branchData;
845 Bag<SwitchData> m_switchData;
846 Bag<MultiGetByOffsetData> m_multiGetByOffsetData;
847 Bag<MultiPutByOffsetData> m_multiPutByOffsetData;
848 Vector<InlineVariableData, 4> m_inlineVariableData;
849 HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>> m_bytecodeLiveness;
850 bool m_hasArguments;
851 HashSet<ExecutableBase*> m_executablesWhoseArgumentsEscaped;
852 BitVector m_lazyVars;
853 Dominators m_dominators;
854 NaturalLoops m_naturalLoops;
855 unsigned m_localVars;
856 unsigned m_nextMachineLocal;
857 unsigned m_parameterSlots;
858 int m_machineCaptureStart;
859 std::unique_ptr<SlowArgument[]> m_slowArguments;
860
861 #if USE(JSVALUE32_64)
862 std::unordered_map<int64_t, double*> m_doubleConstantsMap;
863 std::unique_ptr<Bag<double>> m_doubleConstants;
864 #endif
865
866 OptimizationFixpointState m_fixpointState;
867 GraphForm m_form;
868 UnificationState m_unificationState;
869 RefCountState m_refCountState;
870 private:
871
872 void handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock*, BasicBlock* successor);
873 void addForDepthFirstSort(Vector<BasicBlock*>& result, Vector<BasicBlock*, 16>& worklist, HashSet<BasicBlock*>& seen, BasicBlock*);
874
875 AddSpeculationMode addImmediateShouldSpeculateInt32(Node* add, bool variableShouldSpeculateInt32, Node* immediate, RareCaseProfilingSource source)
876 {
877 ASSERT(immediate->hasConstant());
878
879 JSValue immediateValue = immediate->valueOfJSConstant(m_codeBlock);
880 if (!immediateValue.isNumber() && !immediateValue.isBoolean())
881 return DontSpeculateInt32;
882
883 if (!variableShouldSpeculateInt32)
884 return DontSpeculateInt32;
885
886 if (immediateValue.isInt32() || immediateValue.isBoolean())
887 return add->canSpeculateInt32(source) ? SpeculateInt32 : DontSpeculateInt32;
888
889 double doubleImmediate = immediateValue.asDouble();
890 const double twoToThe48 = 281474976710656.0;
891 if (doubleImmediate < -twoToThe48 || doubleImmediate > twoToThe48)
892 return DontSpeculateInt32;
893
894 return bytecodeCanTruncateInteger(add->arithNodeFlags()) ? SpeculateInt32AndTruncateConstants : DontSpeculateInt32;
895 }
896 };
897
898 #define DFG_NODE_DO_TO_CHILDREN(graph, node, thingToDo) do { \
899 Node* _node = (node); \
900 if (_node->flags() & NodeHasVarArgs) { \
901 for (unsigned _childIdx = _node->firstChild(); \
902 _childIdx < _node->firstChild() + _node->numChildren(); \
903 _childIdx++) { \
904 if (!!(graph).m_varArgChildren[_childIdx]) \
905 thingToDo(_node, (graph).m_varArgChildren[_childIdx]); \
906 } \
907 } else { \
908 if (!_node->child1()) { \
909 ASSERT( \
910 !_node->child2() \
911 && !_node->child3()); \
912 break; \
913 } \
914 thingToDo(_node, _node->child1()); \
915 \
916 if (!_node->child2()) { \
917 ASSERT(!_node->child3()); \
918 break; \
919 } \
920 thingToDo(_node, _node->child2()); \
921 \
922 if (!_node->child3()) \
923 break; \
924 thingToDo(_node, _node->child3()); \
925 } \
926 } while (false)
927
928 } } // namespace JSC::DFG
929
930 #endif
931 #endif