]>
Commit | Line | Data |
---|---|---|
81345200 A |
1 | /* |
2 | * Copyright (C) 2013 Apple Inc. All rights reserved. | |
3 | * | |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #include "config.h" | |
27 | #include "JITInlineCacheGenerator.h" | |
28 | ||
29 | #if ENABLE(JIT) | |
30 | ||
31 | #include "CodeBlock.h" | |
32 | #include "LinkBuffer.h" | |
33 | #include "JSCInlines.h" | |
34 | ||
35 | namespace JSC { | |
36 | ||
37 | static StructureStubInfo* garbageStubInfo() | |
38 | { | |
39 | static StructureStubInfo* stubInfo = new StructureStubInfo(); | |
40 | return stubInfo; | |
41 | } | |
42 | ||
43 | JITInlineCacheGenerator::JITInlineCacheGenerator(CodeBlock* codeBlock, CodeOrigin codeOrigin) | |
44 | : m_codeBlock(codeBlock) | |
45 | { | |
46 | m_stubInfo = m_codeBlock ? m_codeBlock->addStubInfo() : garbageStubInfo(); | |
47 | m_stubInfo->codeOrigin = codeOrigin; | |
48 | } | |
49 | ||
50 | JITByIdGenerator::JITByIdGenerator( | |
51 | CodeBlock* codeBlock, CodeOrigin codeOrigin, const RegisterSet& usedRegisters, | |
52 | JSValueRegs base, JSValueRegs value, SpillRegistersMode spillMode) | |
53 | : JITInlineCacheGenerator(codeBlock, codeOrigin) | |
54 | , m_base(base) | |
55 | , m_value(value) | |
56 | { | |
57 | m_stubInfo->patch.spillMode = spillMode; | |
58 | m_stubInfo->patch.usedRegisters = usedRegisters; | |
59 | ||
60 | // This is a convenience - in cases where the only registers you're using are base/value, | |
61 | // it allows you to pass RegisterSet() as the usedRegisters argument. | |
62 | m_stubInfo->patch.usedRegisters.set(base); | |
63 | m_stubInfo->patch.usedRegisters.set(value); | |
64 | ||
65 | m_stubInfo->patch.baseGPR = static_cast<int8_t>(base.payloadGPR()); | |
66 | m_stubInfo->patch.valueGPR = static_cast<int8_t>(value.payloadGPR()); | |
67 | #if USE(JSVALUE32_64) | |
68 | m_stubInfo->patch.valueTagGPR = static_cast<int8_t>(value.tagGPR()); | |
69 | #endif | |
70 | } | |
71 | ||
72 | void JITByIdGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath) | |
73 | { | |
74 | CodeLocationCall callReturnLocation = slowPath.locationOf(m_call); | |
75 | m_stubInfo->callReturnLocation = callReturnLocation; | |
76 | m_stubInfo->patch.deltaCheckImmToCall = MacroAssembler::differenceBetweenCodePtr( | |
77 | fastPath.locationOf(m_structureImm), callReturnLocation); | |
78 | m_stubInfo->patch.deltaCallToJump = MacroAssembler::differenceBetweenCodePtr( | |
79 | callReturnLocation, fastPath.locationOf(m_structureCheck)); | |
80 | #if USE(JSVALUE64) | |
81 | m_stubInfo->patch.deltaCallToLoadOrStore = MacroAssembler::differenceBetweenCodePtr( | |
82 | callReturnLocation, fastPath.locationOf(m_loadOrStore)); | |
83 | #else | |
84 | m_stubInfo->patch.deltaCallToTagLoadOrStore = MacroAssembler::differenceBetweenCodePtr( | |
85 | callReturnLocation, fastPath.locationOf(m_tagLoadOrStore)); | |
86 | m_stubInfo->patch.deltaCallToPayloadLoadOrStore = MacroAssembler::differenceBetweenCodePtr( | |
87 | callReturnLocation, fastPath.locationOf(m_loadOrStore)); | |
88 | #endif | |
89 | m_stubInfo->patch.deltaCallToSlowCase = MacroAssembler::differenceBetweenCodePtr( | |
90 | callReturnLocation, slowPath.locationOf(m_slowPathBegin)); | |
91 | m_stubInfo->patch.deltaCallToDone = MacroAssembler::differenceBetweenCodePtr( | |
92 | callReturnLocation, fastPath.locationOf(m_done)); | |
93 | m_stubInfo->patch.deltaCallToStorageLoad = MacroAssembler::differenceBetweenCodePtr( | |
94 | callReturnLocation, fastPath.locationOf(m_propertyStorageLoad)); | |
95 | } | |
96 | ||
97 | void JITByIdGenerator::finalize(LinkBuffer& linkBuffer) | |
98 | { | |
99 | finalize(linkBuffer, linkBuffer); | |
100 | } | |
101 | ||
102 | void JITByIdGenerator::generateFastPathChecks(MacroAssembler& jit, GPRReg butterfly) | |
103 | { | |
104 | m_structureCheck = jit.patchableBranch32WithPatch( | |
105 | MacroAssembler::NotEqual, | |
106 | MacroAssembler::Address(m_base.payloadGPR(), JSCell::structureIDOffset()), | |
107 | m_structureImm, MacroAssembler::TrustedImm32(0)); | |
108 | ||
109 | m_propertyStorageLoad = jit.convertibleLoadPtr( | |
110 | MacroAssembler::Address(m_base.payloadGPR(), JSObject::butterflyOffset()), butterfly); | |
111 | } | |
112 | ||
113 | JITGetByIdGenerator::JITGetByIdGenerator( | |
114 | CodeBlock* codeBlock, CodeOrigin codeOrigin, const RegisterSet& usedRegisters, | |
115 | JSValueRegs base, JSValueRegs value, SpillRegistersMode spillMode) | |
116 | : JITByIdGenerator(codeBlock, codeOrigin, usedRegisters, base, value, spillMode) | |
117 | { | |
118 | RELEASE_ASSERT(base.payloadGPR() != value.tagGPR()); | |
119 | } | |
120 | ||
121 | void JITGetByIdGenerator::generateFastPath(MacroAssembler& jit) | |
122 | { | |
123 | generateFastPathChecks(jit, m_value.payloadGPR()); | |
124 | ||
125 | #if USE(JSVALUE64) | |
126 | m_loadOrStore = jit.load64WithCompactAddressOffsetPatch( | |
127 | MacroAssembler::Address(m_value.payloadGPR(), 0), m_value.payloadGPR()).label(); | |
128 | #else | |
129 | m_tagLoadOrStore = jit.load32WithCompactAddressOffsetPatch( | |
130 | MacroAssembler::Address(m_value.payloadGPR(), 0), m_value.tagGPR()).label(); | |
131 | m_loadOrStore = jit.load32WithCompactAddressOffsetPatch( | |
132 | MacroAssembler::Address(m_value.payloadGPR(), 0), m_value.payloadGPR()).label(); | |
133 | #endif | |
134 | ||
135 | m_done = jit.label(); | |
136 | } | |
137 | ||
138 | JITPutByIdGenerator::JITPutByIdGenerator( | |
139 | CodeBlock* codeBlock, CodeOrigin codeOrigin, const RegisterSet& usedRegisters, | |
140 | JSValueRegs base, JSValueRegs value, GPRReg scratch, SpillRegistersMode spillMode, | |
141 | ECMAMode ecmaMode, PutKind putKind) | |
142 | : JITByIdGenerator(codeBlock, codeOrigin, usedRegisters, base, value, spillMode) | |
143 | , m_scratch(scratch) | |
144 | , m_ecmaMode(ecmaMode) | |
145 | , m_putKind(putKind) | |
146 | { | |
147 | m_stubInfo->patch.usedRegisters.clear(scratch); | |
148 | } | |
149 | ||
150 | void JITPutByIdGenerator::generateFastPath(MacroAssembler& jit) | |
151 | { | |
152 | generateFastPathChecks(jit, m_scratch); | |
153 | ||
154 | #if USE(JSVALUE64) | |
155 | m_loadOrStore = jit.store64WithAddressOffsetPatch( | |
156 | m_value.payloadGPR(), MacroAssembler::Address(m_scratch, 0)).label(); | |
157 | #else | |
158 | m_tagLoadOrStore = jit.store32WithAddressOffsetPatch( | |
159 | m_value.tagGPR(), MacroAssembler::Address(m_scratch, 0)).label(); | |
160 | m_loadOrStore = jit.store32WithAddressOffsetPatch( | |
161 | m_value.payloadGPR(), MacroAssembler::Address(m_scratch, 0)).label(); | |
162 | #endif | |
163 | ||
164 | m_done = jit.label(); | |
165 | } | |
166 | ||
167 | V_JITOperation_ESsiJJI JITPutByIdGenerator::slowPathFunction() | |
168 | { | |
169 | if (m_ecmaMode == StrictMode) { | |
170 | if (m_putKind == Direct) | |
171 | return operationPutByIdDirectStrictOptimize; | |
172 | return operationPutByIdStrictOptimize; | |
173 | } | |
174 | if (m_putKind == Direct) | |
175 | return operationPutByIdDirectNonStrictOptimize; | |
176 | return operationPutByIdNonStrictOptimize; | |
177 | } | |
178 | ||
179 | } // namespace JSC | |
180 | ||
181 | #endif // ENABLE(JIT) | |
182 |