2 * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef DFGClobberize_h
27 #define DFGClobberize_h
31 #include "DFGAbstractHeap.h"
32 #include "DFGEdgeUsesStructure.h"
34 #include "DFGHeapLocation.h"
35 #include "DFGLazyNode.h"
36 #include "DFGPureValue.h"
38 namespace JSC
{ namespace DFG
{
40 template<typename ReadFunctor
, typename WriteFunctor
, typename DefFunctor
>
41 void clobberize(Graph
& graph
, Node
* node
, const ReadFunctor
& read
, const WriteFunctor
& write
, const DefFunctor
& def
)
45 // - The canonical way of clobbering the world is to read world and write
46 // heap. This is because World subsumes Heap and Stack, and Stack can be
47 // read by anyone but only written to by explicit stack writing operations.
48 // Of course, claiming to also write World is not wrong; it'll just
49 // pessimise some important optimizations.
51 // - We cannot hoist, or sink, anything that has effects. This means that the
52 // easiest way of indicating that something cannot be hoisted is to claim
53 // that it side-effects some miscellaneous thing.
55 // - We cannot hoist forward-exiting nodes without some additional effort. I
56 // believe that what it comes down to is that forward-exiting generally have
57 // their NodeExitsForward cleared upon hoist, except for forward-exiting
58 // nodes that take bogus state as their input. Those are substantially
59 // harder. We disable it for now. In the future we could enable it by having
60 // versions of those nodes that backward-exit instead, but I'm not convinced
63 // - Some nodes lie, and claim that they do not read the JSCell_structureID,
64 // JSCell_typeInfoFlags, etc. These are nodes that use the structure in a way
65 // that does not depend on things that change under structure transitions.
67 // - It's implicitly understood that OSR exits read the world. This is why we
68 // generally don't move or eliminate stores. Every node can exit, so the
69 // read set does not reflect things that would be read if we exited.
70 // Instead, the read set reflects what the node will have to read if it
73 // - Broadly, we don't say that we're reading something if that something is
76 // - We try to make this work even prior to type inference, just so that we
77 // can use it for IR dumps. No promises on whether the answers are sound
78 // prior to type inference - though they probably could be if we did some
81 // - If you do read(Stack) or read(World), then make sure that readTop() in
82 // PreciseLocalClobberize is correct.
84 // While read() and write() are fairly self-explanatory - they track what sorts of things the
85 // node may read or write - the def() functor is more tricky. It tells you the heap locations
86 // (not just abstract heaps) that are defined by a node. A heap location comprises an abstract
87 // heap, some nodes, and a LocationKind. Briefly, a location defined by a node is a location
88 // whose value can be deduced from looking at the node itself. The locations returned must obey
89 // the following properties:
91 // - If someone wants to CSE a load from the heap, then a HeapLocation object should be
92 // sufficient to find a single matching node.
94 // - The abstract heap is the only abstract heap that could be clobbered to invalidate any such
95 // CSE attempt. I.e. if clobberize() reports that on every path between some node and a node
96 // that defines a HeapLocation that it wanted, there were no writes to any abstract heap that
97 // overlap the location's heap, then we have a sound match. Effectively, the semantics of
98 // write() and def() are intertwined such that for them to be sound they must agree on what
101 // read(), write(), and def() for heap locations is enough to do GCSE on effectful things. To
102 // keep things simple, this code will also def() pure things. def() must be overloaded to also
103 // accept PureValue. This way, a client of clobberize() can implement GCSE entirely using the
104 // information that clobberize() passes to write() and def(). Other clients of clobberize() can
105 // just ignore def() by using a NoOpClobberize functor.
107 if (edgesUseStructure(graph
, node
))
108 read(JSCell_structureID
);
110 switch (node
->op()) {
114 def(PureValue(node
, node
->constant()));
120 case ExtractOSREntryLocal
:
121 case CheckStructureImmediate
:
143 case StringCharCodeAt
:
144 case StringFromCharCode
:
145 case CompareEqConstant
:
146 case CompareStrictEq
:
157 case BooleanToNumber
:
164 def(PureValue(node
));
167 case HasGenericProperty
:
168 case HasStructureProperty
:
169 case GetEnumerableLength
:
170 case GetPropertyEnumerator
: {
176 case GetDirectPname
: {
177 // This reads and writes heap because it can end up calling a generic getByVal
178 // if the Structure changed, which could in turn end up calling a getter.
185 case GetEnumeratorStructurePname
:
186 case GetEnumeratorGenericPname
: {
187 def(PureValue(node
));
191 case HasIndexedProperty
: {
192 read(JSObject_butterfly
);
193 ArrayMode mode
= node
->arrayMode();
194 switch (mode
.type()) {
196 if (mode
.isInBounds()) {
197 read(Butterfly_publicLength
);
198 read(IndexedInt32Properties
);
199 def(HeapLocation(HasIndexedPropertyLoc
, IndexedInt32Properties
, node
->child1(), node
->child2()), LazyNode(node
));
206 case Array::Double
: {
207 if (mode
.isInBounds()) {
208 read(Butterfly_publicLength
);
209 read(IndexedDoubleProperties
);
210 def(HeapLocation(HasIndexedPropertyLoc
, IndexedDoubleProperties
, node
->child1(), node
->child2()), LazyNode(node
));
217 case Array::Contiguous
: {
218 if (mode
.isInBounds()) {
219 read(Butterfly_publicLength
);
220 read(IndexedContiguousProperties
);
221 def(HeapLocation(HasIndexedPropertyLoc
, IndexedContiguousProperties
, node
->child1(), node
->child2()), LazyNode(node
));
228 case Array::ArrayStorage
: {
229 if (mode
.isInBounds()) {
230 read(Butterfly_vectorLength
);
231 read(IndexedArrayStorageProperties
);
244 RELEASE_ASSERT_NOT_REACHED();
256 def(PureValue(node
, node
->arithMode()));
260 def(PureValue(node
, static_cast<uintptr_t>(node
->arithRoundingMode())));
264 def(PureValue(CheckCell
, AdjacencyList(AdjacencyList::Fixed
, node
->child1()), node
->cellOperand()));
268 def(PureValue(CheckNotEmpty
, AdjacencyList(AdjacencyList::Fixed
, node
->child1())));
271 case ConstantStoragePointer
:
272 def(PureValue(node
, node
->storagePointer()));
290 case CheckTierUpInLoop
:
291 case CheckTierUpAtReturn
:
292 case CheckTierUpAndOSREnter
:
293 case CheckTierUpWithNestedTriggerAndOSREnter
:
296 case ProfileWillCall
:
299 case ProfileControlFlow
:
305 case InvalidationPoint
:
307 def(HeapLocation(InvalidationPointLoc
, Watchpoint_fire
), LazyNode(node
));
311 read(AbstractHeap(Stack
, node
->local()));
316 write(Watchpoint_fire
);
320 case CreateActivation
: {
321 SymbolTable
* table
= node
->castOperand
<SymbolTable
*>();
322 if (table
->singletonScope()->isStillValid())
323 write(Watchpoint_fire
);
324 read(HeapObjectCount
);
325 write(HeapObjectCount
);
329 case CreateDirectArguments
:
330 case CreateScopedArguments
:
331 case CreateClonedArguments
:
333 read(HeapObjectCount
);
334 write(HeapObjectCount
);
337 case PhantomDirectArguments
:
338 case PhantomClonedArguments
:
339 // DFG backend requires that the locals that this reads are flushed. FTL backend can handle those
340 // locals being promoted.
341 if (!isFTL(graph
.m_plan
.mode
))
344 // Even though it's phantom, it still has the property that one can't be replaced with another.
345 read(HeapObjectCount
);
346 write(HeapObjectCount
);
352 read(HeapObjectCount
);
353 write(HeapObjectCount
);
356 case VarInjectionWatchpoint
:
358 def(HeapLocation(VarInjectionWatchpointLoc
, MiscFields
), LazyNode(node
));
363 def(HeapLocation(IsObjectOrNullLoc
, MiscFields
, node
->child1()), LazyNode(node
));
368 def(HeapLocation(IsFunctionLoc
, MiscFields
, node
->child1()), LazyNode(node
));
381 case NativeConstruct
:
383 case CallForwardVarargs
:
384 case ConstructVarargs
:
385 case ConstructForwardVarargs
:
394 read(GetterSetter_getter
);
395 def(HeapLocation(GetterLoc
, GetterSetter_getter
, node
->child1()), LazyNode(node
));
399 read(GetterSetter_setter
);
400 def(HeapLocation(SetterLoc
, GetterSetter_setter
, node
->child1()), LazyNode(node
));
404 read(AbstractHeap(Stack
, JSStack::Callee
));
405 def(HeapLocation(StackLoc
, AbstractHeap(Stack
, JSStack::Callee
)), LazyNode(node
));
408 case GetArgumentCount
:
409 read(AbstractHeap(Stack
, JSStack::ArgumentCount
));
410 def(HeapLocation(StackPayloadLoc
, AbstractHeap(Stack
, JSStack::ArgumentCount
)), LazyNode(node
));
414 read(AbstractHeap(Stack
, node
->local()));
415 def(HeapLocation(StackLoc
, AbstractHeap(Stack
, node
->local())), LazyNode(node
));
419 write(AbstractHeap(Stack
, node
->local()));
420 def(HeapLocation(StackLoc
, AbstractHeap(Stack
, node
->local())), LazyNode(node
->child1().node()));
424 AbstractHeap
heap(Stack
, node
->stackAccessData()->local
);
426 def(HeapLocation(StackLoc
, heap
), LazyNode(node
));
431 AbstractHeap
heap(Stack
, node
->stackAccessData()->local
);
433 def(HeapLocation(StackLoc
, heap
), LazyNode(node
->child1().node()));
440 LoadVarargsData
* data
= node
->loadVarargsData();
441 write(AbstractHeap(Stack
, data
->count
.offset()));
442 for (unsigned i
= data
->limit
; i
--;)
443 write(AbstractHeap(Stack
, data
->start
.offset() + static_cast<int>(i
)));
447 case ForwardVarargs
: {
448 // We could be way more precise here.
451 LoadVarargsData
* data
= node
->loadVarargsData();
452 write(AbstractHeap(Stack
, data
->count
.offset()));
453 for (unsigned i
= data
->limit
; i
--;)
454 write(AbstractHeap(Stack
, data
->start
.offset() + static_cast<int>(i
)));
458 case GetLocalUnlinked
:
459 read(AbstractHeap(Stack
, node
->unlinkedLocal()));
460 def(HeapLocation(StackLoc
, AbstractHeap(Stack
, node
->unlinkedLocal())), LazyNode(node
));
464 ArrayMode mode
= node
->arrayMode();
465 switch (mode
.type()) {
466 case Array::SelectUsingPredictions
:
467 case Array::Unprofiled
:
468 case Array::Undecided
:
469 // Assume the worst since we don't have profiling yet.
474 case Array::ForceExit
:
484 if (mode
.isOutOfBounds()) {
489 // This appears to read nothing because it's only reading immutable data.
490 def(PureValue(node
, mode
.asWord()));
493 case Array::DirectArguments
:
494 read(DirectArgumentsProperties
);
495 def(HeapLocation(IndexedPropertyLoc
, DirectArgumentsProperties
, node
->child1(), node
->child2()), LazyNode(node
));
498 case Array::ScopedArguments
:
499 read(ScopeProperties
);
500 def(HeapLocation(IndexedPropertyLoc
, ScopeProperties
, node
->child1(), node
->child2()), LazyNode(node
));
504 if (mode
.isInBounds()) {
505 read(Butterfly_publicLength
);
506 read(IndexedInt32Properties
);
507 def(HeapLocation(IndexedPropertyLoc
, IndexedInt32Properties
, node
->child1(), node
->child2()), LazyNode(node
));
515 if (mode
.isInBounds()) {
516 read(Butterfly_publicLength
);
517 read(IndexedDoubleProperties
);
518 def(HeapLocation(IndexedPropertyLoc
, IndexedDoubleProperties
, node
->child1(), node
->child2()), LazyNode(node
));
525 case Array::Contiguous
:
526 if (mode
.isInBounds()) {
527 read(Butterfly_publicLength
);
528 read(IndexedContiguousProperties
);
529 def(HeapLocation(IndexedPropertyLoc
, IndexedContiguousProperties
, node
->child1(), node
->child2()), LazyNode(node
));
536 case Array::ArrayStorage
:
537 case Array::SlowPutArrayStorage
:
538 if (mode
.isInBounds()) {
539 read(Butterfly_vectorLength
);
540 read(IndexedArrayStorageProperties
);
547 case Array::Int8Array
:
548 case Array::Int16Array
:
549 case Array::Int32Array
:
550 case Array::Uint8Array
:
551 case Array::Uint8ClampedArray
:
552 case Array::Uint16Array
:
553 case Array::Uint32Array
:
554 case Array::Float32Array
:
555 case Array::Float64Array
:
556 read(TypedArrayProperties
);
558 def(HeapLocation(IndexedPropertyLoc
, TypedArrayProperties
, node
->child1(), node
->child2()), LazyNode(node
));
561 RELEASE_ASSERT_NOT_REACHED();
565 case GetMyArgumentByVal
: {
567 // FIXME: It would be trivial to have a def here.
568 // https://bugs.webkit.org/show_bug.cgi?id=143077
574 case PutByValAlias
: {
575 ArrayMode mode
= node
->arrayMode();
576 Node
* base
= graph
.varArgChild(node
, 0).node();
577 Node
* index
= graph
.varArgChild(node
, 1).node();
578 Node
* value
= graph
.varArgChild(node
, 2).node();
579 switch (mode
.modeForPut().type()) {
580 case Array::SelectUsingPredictions
:
581 case Array::Unprofiled
:
582 case Array::Undecided
:
583 // Assume the worst since we don't have profiling yet.
588 case Array::ForceExit
:
598 if (node
->arrayMode().isOutOfBounds()) {
603 read(Butterfly_publicLength
);
604 read(Butterfly_vectorLength
);
605 read(IndexedInt32Properties
);
606 write(IndexedInt32Properties
);
607 if (node
->arrayMode().mayStoreToHole())
608 write(Butterfly_publicLength
);
609 def(HeapLocation(IndexedPropertyLoc
, IndexedInt32Properties
, base
, index
), LazyNode(value
));
613 if (node
->arrayMode().isOutOfBounds()) {
618 read(Butterfly_publicLength
);
619 read(Butterfly_vectorLength
);
620 read(IndexedDoubleProperties
);
621 write(IndexedDoubleProperties
);
622 if (node
->arrayMode().mayStoreToHole())
623 write(Butterfly_publicLength
);
624 def(HeapLocation(IndexedPropertyLoc
, IndexedDoubleProperties
, base
, index
), LazyNode(value
));
627 case Array::Contiguous
:
628 if (node
->arrayMode().isOutOfBounds()) {
633 read(Butterfly_publicLength
);
634 read(Butterfly_vectorLength
);
635 read(IndexedContiguousProperties
);
636 write(IndexedContiguousProperties
);
637 if (node
->arrayMode().mayStoreToHole())
638 write(Butterfly_publicLength
);
639 def(HeapLocation(IndexedPropertyLoc
, IndexedContiguousProperties
, base
, index
), LazyNode(value
));
642 case Array::ArrayStorage
:
643 case Array::SlowPutArrayStorage
:
644 // Give up on life for now.
649 case Array::Int8Array
:
650 case Array::Int16Array
:
651 case Array::Int32Array
:
652 case Array::Uint8Array
:
653 case Array::Uint8ClampedArray
:
654 case Array::Uint16Array
:
655 case Array::Uint32Array
:
656 case Array::Float32Array
:
657 case Array::Float64Array
:
659 write(TypedArrayProperties
);
660 // FIXME: We can't def() anything here because these operations truncate their inputs.
661 // https://bugs.webkit.org/show_bug.cgi?id=134737
664 case Array::DirectArguments
:
665 case Array::ScopedArguments
:
666 DFG_CRASH(graph
, node
, "impossible array mode for put");
669 RELEASE_ASSERT_NOT_REACHED();
674 read(JSCell_structureID
);
678 read(JSCell_indexingType
);
679 read(JSCell_typeInfoType
);
680 read(JSCell_structureID
);
683 case CheckHasInstance
:
684 read(JSCell_typeInfoFlags
);
685 def(HeapLocation(CheckHasInstanceLoc
, JSCell_typeInfoFlags
, node
->child1()), LazyNode(node
));
689 read(JSCell_structureID
);
690 def(HeapLocation(InstanceOfLoc
, JSCell_structureID
, node
->child1(), node
->child2()), LazyNode(node
));
694 write(JSCell_structureID
);
695 write(JSCell_typeInfoType
);
696 write(JSCell_typeInfoFlags
);
697 write(JSCell_indexingType
);
700 case AllocatePropertyStorage
:
701 write(JSObject_butterfly
);
702 def(HeapLocation(ButterflyLoc
, JSObject_butterfly
, node
->child1()), LazyNode(node
));
705 case ReallocatePropertyStorage
:
706 read(JSObject_butterfly
);
707 write(JSObject_butterfly
);
708 def(HeapLocation(ButterflyLoc
, JSObject_butterfly
, node
->child1()), LazyNode(node
));
712 read(JSObject_butterfly
);
713 def(HeapLocation(ButterflyLoc
, JSObject_butterfly
, node
->child1()), LazyNode(node
));
717 case ArrayifyToStructure
:
718 read(JSCell_structureID
);
719 read(JSCell_indexingType
);
720 read(JSObject_butterfly
);
721 write(JSCell_structureID
);
722 write(JSCell_indexingType
);
723 write(JSObject_butterfly
);
724 write(Watchpoint_fire
);
727 case GetIndexedPropertyStorage
:
728 if (node
->arrayMode().type() == Array::String
) {
729 def(PureValue(node
, node
->arrayMode().asWord()));
733 def(HeapLocation(IndexedPropertyStorageLoc
, MiscFields
, node
->child1()), LazyNode(node
));
736 case GetTypedArrayByteOffset
:
738 def(HeapLocation(TypedArrayByteOffsetLoc
, MiscFields
, node
->child1()), LazyNode(node
));
742 case GetGetterSetterByOffset
: {
743 unsigned identifierNumber
= node
->storageAccessData().identifierNumber
;
744 AbstractHeap
heap(NamedProperties
, identifierNumber
);
746 def(HeapLocation(NamedPropertyLoc
, heap
, node
->child2()), LazyNode(node
));
750 case MultiGetByOffset
: {
751 read(JSCell_structureID
);
752 read(JSObject_butterfly
);
753 AbstractHeap
heap(NamedProperties
, node
->multiGetByOffsetData().identifierNumber
);
755 def(HeapLocation(NamedPropertyLoc
, heap
, node
->child1()), LazyNode(node
));
759 case MultiPutByOffset
: {
760 read(JSCell_structureID
);
761 read(JSObject_butterfly
);
762 AbstractHeap
heap(NamedProperties
, node
->multiPutByOffsetData().identifierNumber
);
764 if (node
->multiPutByOffsetData().writesStructures())
765 write(JSCell_structureID
);
766 if (node
->multiPutByOffsetData().reallocatesStorage())
767 write(JSObject_butterfly
);
768 def(HeapLocation(NamedPropertyLoc
, heap
, node
->child1()), LazyNode(node
->child2().node()));
773 unsigned identifierNumber
= node
->storageAccessData().identifierNumber
;
774 AbstractHeap
heap(NamedProperties
, identifierNumber
);
776 def(HeapLocation(NamedPropertyLoc
, heap
, node
->child2()), LazyNode(node
->child3().node()));
780 case GetArrayLength
: {
781 ArrayMode mode
= node
->arrayMode();
782 switch (mode
.type()) {
785 case Array::Contiguous
:
786 case Array::ArrayStorage
:
787 case Array::SlowPutArrayStorage
:
788 read(Butterfly_publicLength
);
789 def(HeapLocation(ArrayLengthLoc
, Butterfly_publicLength
, node
->child1()), LazyNode(node
));
793 def(PureValue(node
, mode
.asWord()));
796 case Array::DirectArguments
:
797 case Array::ScopedArguments
:
799 def(HeapLocation(ArrayLengthLoc
, MiscFields
, node
->child1()), LazyNode(node
));
803 ASSERT(mode
.typedArrayType() != NotTypedArray
);
805 def(HeapLocation(ArrayLengthLoc
, MiscFields
, node
->child1()), LazyNode(node
));
811 read(AbstractHeap(ScopeProperties
, node
->scopeOffset().offset()));
812 def(HeapLocation(ClosureVariableLoc
, AbstractHeap(ScopeProperties
, node
->scopeOffset().offset()), node
->child1()), LazyNode(node
));
816 write(AbstractHeap(ScopeProperties
, node
->scopeOffset().offset()));
817 def(HeapLocation(ClosureVariableLoc
, AbstractHeap(ScopeProperties
, node
->scopeOffset().offset()), node
->child1()), LazyNode(node
->child2().node()));
820 case GetFromArguments
: {
821 AbstractHeap
heap(DirectArgumentsProperties
, node
->capturedArgumentsOffset().offset());
823 def(HeapLocation(DirectArgumentsLoc
, heap
, node
->child1()), LazyNode(node
));
827 case PutToArguments
: {
828 AbstractHeap
heap(DirectArgumentsProperties
, node
->capturedArgumentsOffset().offset());
830 def(HeapLocation(DirectArgumentsLoc
, heap
, node
->child1()), LazyNode(node
->child2().node()));
835 read(AbstractHeap(Absolute
, node
->variablePointer()));
836 def(HeapLocation(GlobalVariableLoc
, AbstractHeap(Absolute
, node
->variablePointer())), LazyNode(node
));
840 write(AbstractHeap(Absolute
, node
->variablePointer()));
841 def(HeapLocation(GlobalVariableLoc
, AbstractHeap(Absolute
, node
->variablePointer())), LazyNode(node
->child2().node()));
844 case NewArrayWithSize
:
846 read(HeapObjectCount
);
847 write(HeapObjectCount
);
851 read(HeapObjectCount
);
852 write(HeapObjectCount
);
854 unsigned numElements
= node
->numChildren();
856 def(HeapLocation(ArrayLengthLoc
, Butterfly_publicLength
, node
),
857 LazyNode(graph
.freeze(jsNumber(numElements
))));
863 switch (node
->indexingType()) {
864 case ALL_DOUBLE_INDEXING_TYPES
:
865 heap
= IndexedDoubleProperties
;
868 case ALL_INT32_INDEXING_TYPES
:
869 heap
= IndexedInt32Properties
;
872 case ALL_CONTIGUOUS_INDEXING_TYPES
:
873 heap
= IndexedContiguousProperties
;
880 if (numElements
< graph
.m_uint32ValuesInUse
.size()) {
881 for (unsigned operandIdx
= 0; operandIdx
< numElements
; ++operandIdx
) {
882 Edge use
= graph
.m_varArgChildren
[node
->firstChild() + operandIdx
];
883 def(HeapLocation(IndexedPropertyLoc
, heap
, node
, LazyNode(graph
.freeze(jsNumber(operandIdx
)))),
884 LazyNode(use
.node()));
887 for (uint32_t operandIdx
: graph
.m_uint32ValuesInUse
) {
888 if (operandIdx
>= numElements
)
890 Edge use
= graph
.m_varArgChildren
[node
->firstChild() + operandIdx
];
891 def(HeapLocation(IndexedPropertyLoc
, heap
, node
, LazyNode(graph
.freeze(jsNumber(operandIdx
)))),
892 LazyNode(use
.node()));
898 case NewArrayBuffer
: {
899 read(HeapObjectCount
);
900 write(HeapObjectCount
);
902 unsigned numElements
= node
->numConstants();
903 def(HeapLocation(ArrayLengthLoc
, Butterfly_publicLength
, node
),
904 LazyNode(graph
.freeze(jsNumber(numElements
))));
907 NodeType op
= JSConstant
;
908 switch (node
->indexingType()) {
909 case ALL_DOUBLE_INDEXING_TYPES
:
910 heap
= IndexedDoubleProperties
;
914 case ALL_INT32_INDEXING_TYPES
:
915 heap
= IndexedInt32Properties
;
918 case ALL_CONTIGUOUS_INDEXING_TYPES
:
919 heap
= IndexedContiguousProperties
;
926 JSValue
* data
= graph
.m_codeBlock
->constantBuffer(node
->startConstant());
927 if (numElements
< graph
.m_uint32ValuesInUse
.size()) {
928 for (unsigned index
= 0; index
< numElements
; ++index
) {
929 def(HeapLocation(IndexedPropertyLoc
, heap
, node
, LazyNode(graph
.freeze(jsNumber(index
)))),
930 LazyNode(graph
.freeze(data
[index
]), op
));
933 for (uint32_t index
: graph
.m_uint32ValuesInUse
) {
934 if (index
>= numElements
)
936 def(HeapLocation(IndexedPropertyLoc
, heap
, node
, LazyNode(graph
.freeze(jsNumber(index
)))),
937 LazyNode(graph
.freeze(data
[index
]), op
));
945 case NewStringObject
:
946 case PhantomNewObject
:
947 case MaterializeNewObject
:
948 case PhantomNewFunction
:
949 case PhantomCreateActivation
:
950 case MaterializeCreateActivation
:
951 read(HeapObjectCount
);
952 write(HeapObjectCount
);
956 if (node
->castOperand
<FunctionExecutable
*>()->singletonFunction()->isStillValid())
957 write(Watchpoint_fire
);
958 read(HeapObjectCount
);
959 write(HeapObjectCount
);
969 if (node
->arrayMode().isOutOfBounds()) {
974 def(PureValue(node
));
981 case CompareGreaterEq
:
982 if (!node
->isBinaryUseKind(UntypedUse
)) {
983 def(PureValue(node
));
991 case CallStringConstructor
:
992 switch (node
->child1().useKind()) {
993 case StringObjectUse
:
994 case StringOrStringObjectUse
:
995 // These don't def a pure value, unfortunately. I'll avoid load-eliminating these for
1006 RELEASE_ASSERT_NOT_REACHED();
1010 case ThrowReferenceError
:
1012 read(HeapObjectCount
);
1013 write(HeapObjectCount
);
1016 case CountExecution
:
1017 case CheckWatchdogTimer
:
1018 read(InternalState
);
1019 write(InternalState
);
1023 RELEASE_ASSERT_NOT_REACHED();
1027 DFG_CRASH(graph
, node
, toCString("Unrecognized node type: ", Graph::opName(node
->op())).data());
1030 class NoOpClobberize
{
1032 NoOpClobberize() { }
1033 template<typename
... T
>
1034 void operator()(T
...) const { }
1037 class CheckClobberize
{
1044 template<typename
... T
>
1045 void operator()(T
...) const { m_result
= true; }
1047 bool result() const { return m_result
; }
1050 mutable bool m_result
;
1053 bool doesWrites(Graph
&, Node
*);
1055 class AbstractHeapOverlaps
{
1057 AbstractHeapOverlaps(AbstractHeap heap
)
1063 void operator()(AbstractHeap otherHeap
) const
1067 m_result
= m_heap
.overlaps(otherHeap
);
1070 bool result() const { return m_result
; }
1073 AbstractHeap m_heap
;
1074 mutable bool m_result
;
1077 bool accessesOverlap(Graph
&, Node
*, AbstractHeap
);
1078 bool writesOverlap(Graph
&, Node
*, AbstractHeap
);
1080 bool clobbersHeap(Graph
&, Node
*);
1082 // We would have used bind() for these, but because of the overlaoding that we are doing,
1083 // it's quite a bit of clearer to just write this out the traditional way.
1085 template<typename T
>
1086 class ReadMethodClobberize
{
1088 ReadMethodClobberize(T
& value
)
1093 void operator()(AbstractHeap heap
) const
1101 template<typename T
>
1102 class WriteMethodClobberize
{
1104 WriteMethodClobberize(T
& value
)
1109 void operator()(AbstractHeap heap
) const
1111 m_value
.write(heap
);
1117 template<typename T
>
1118 class DefMethodClobberize
{
1120 DefMethodClobberize(T
& value
)
1125 void operator()(PureValue value
) const
1130 void operator()(HeapLocation location
, LazyNode node
) const
1132 m_value
.def(location
, node
);
1139 template<typename Adaptor
>
1140 void clobberize(Graph
& graph
, Node
* node
, Adaptor
& adaptor
)
1142 ReadMethodClobberize
<Adaptor
> read(adaptor
);
1143 WriteMethodClobberize
<Adaptor
> write(adaptor
);
1144 DefMethodClobberize
<Adaptor
> def(adaptor
);
1145 clobberize(graph
, node
, read
, write
, def
);
1148 } } // namespace JSC::DFG
1150 #endif // ENABLE(DFG_JIT)
1152 #endif // DFGClobberize_h