]> git.saurik.com Git - apple/javascriptcore.git/blob - dfg/DFGPreciseLocalClobberize.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / dfg / DFGPreciseLocalClobberize.h
1 /*
2 * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef DFGPreciseLocalClobberize_h
27 #define DFGPreciseLocalClobberize_h
28
29 #if ENABLE(DFG_JIT)
30
31 #include "DFGClobberize.h"
32 #include "DFGMayExit.h"
33
34 namespace JSC { namespace DFG {
35
36 template<typename ReadFunctor, typename WriteFunctor, typename DefFunctor>
37 class PreciseLocalClobberizeAdaptor {
38 public:
39 PreciseLocalClobberizeAdaptor(
40 Graph& graph, Node* node,
41 const ReadFunctor& read, const WriteFunctor& write, const DefFunctor& def)
42 : m_graph(graph)
43 , m_node(node)
44 , m_read(read)
45 , m_write(write)
46 , m_def(def)
47 {
48 }
49
50 void read(AbstractHeap heap)
51 {
52 if (heap.kind() == Stack) {
53 if (heap.payload().isTop()) {
54 readTop();
55 return;
56 }
57
58 callIfAppropriate(m_read, VirtualRegister(heap.payload().value32()));
59 return;
60 }
61
62 if (heap.overlaps(Stack)) {
63 readTop();
64 return;
65 }
66 }
67
68 void write(AbstractHeap heap)
69 {
70 // We expect stack writes to already be precisely characterized by DFG::clobberize().
71 if (heap.kind() == Stack) {
72 RELEASE_ASSERT(!heap.payload().isTop());
73 callIfAppropriate(m_write, VirtualRegister(heap.payload().value32()));
74 return;
75 }
76
77 RELEASE_ASSERT(!heap.overlaps(Stack));
78 }
79
80 void def(PureValue)
81 {
82 // PureValue defs never have anything to do with locals, so ignore this.
83 }
84
85 void def(HeapLocation location, LazyNode node)
86 {
87 if (location.kind() != StackLoc)
88 return;
89
90 RELEASE_ASSERT(location.heap().kind() == Stack);
91
92 m_def(VirtualRegister(location.heap().payload().value32()), node);
93 }
94
95 private:
96 template<typename Functor>
97 void callIfAppropriate(const Functor& functor, VirtualRegister operand)
98 {
99 if (operand.isLocal() && static_cast<unsigned>(operand.toLocal()) >= m_graph.block(0)->variablesAtHead.numberOfLocals())
100 return;
101
102 if (operand.isArgument() && !operand.isHeader() && static_cast<unsigned>(operand.toArgument()) >= m_graph.block(0)->variablesAtHead.numberOfArguments())
103 return;
104
105 functor(operand);
106 }
107
108 void readTop()
109 {
110 switch (m_node->op()) {
111 case GetMyArgumentByVal:
112 case ForwardVarargs:
113 case CallForwardVarargs:
114 case ConstructForwardVarargs: {
115 InlineCallFrame* inlineCallFrame = m_node->child1()->origin.semantic.inlineCallFrame;
116 if (!inlineCallFrame) {
117 // Read the outermost arguments and argument count.
118 for (unsigned i = m_graph.m_codeBlock->numParameters(); i-- > 1;)
119 m_read(virtualRegisterForArgument(i));
120 m_read(VirtualRegister(JSStack::ArgumentCount));
121 break;
122 }
123
124 for (unsigned i = inlineCallFrame->arguments.size(); i-- > 1;)
125 m_read(VirtualRegister(inlineCallFrame->stackOffset + virtualRegisterForArgument(i).offset()));
126 if (inlineCallFrame->isVarargs())
127 m_read(VirtualRegister(inlineCallFrame->stackOffset + JSStack::ArgumentCount));
128 break;
129 }
130
131 default: {
132 // All of the outermost arguments, except this, are definitely read.
133 for (unsigned i = m_graph.m_codeBlock->numParameters(); i-- > 1;)
134 m_read(virtualRegisterForArgument(i));
135
136 // The stack header is read.
137 for (unsigned i = 0; i < JSStack::ThisArgument; ++i)
138 m_read(VirtualRegister(i));
139
140 // Read all of the inline arguments and call frame headers that we didn't already capture.
141 for (InlineCallFrame* inlineCallFrame = m_node->origin.semantic.inlineCallFrame; inlineCallFrame; inlineCallFrame = inlineCallFrame->caller.inlineCallFrame) {
142 for (unsigned i = inlineCallFrame->arguments.size(); i-- > 1;)
143 m_read(VirtualRegister(inlineCallFrame->stackOffset + virtualRegisterForArgument(i).offset()));
144 if (inlineCallFrame->isClosureCall)
145 m_read(VirtualRegister(inlineCallFrame->stackOffset + JSStack::Callee));
146 if (inlineCallFrame->isVarargs())
147 m_read(VirtualRegister(inlineCallFrame->stackOffset + JSStack::ArgumentCount));
148 }
149 break;
150 } }
151 }
152
153 Graph& m_graph;
154 Node* m_node;
155 const ReadFunctor& m_read;
156 const WriteFunctor& m_write;
157 const DefFunctor& m_def;
158 };
159
160 template<typename ReadFunctor, typename WriteFunctor, typename DefFunctor>
161 void preciseLocalClobberize(
162 Graph& graph, Node* node,
163 const ReadFunctor& read, const WriteFunctor& write, const DefFunctor& def)
164 {
165 PreciseLocalClobberizeAdaptor<ReadFunctor, WriteFunctor, DefFunctor>
166 adaptor(graph, node, read, write, def);
167 clobberize(graph, node, adaptor);
168 }
169
170 } } // namespace JSC::DFG
171
172 #endif // ENABLE(DFG_JIT)
173
174 #endif // DFGPreciseLocalClobberize_h
175