]>
Commit | Line | Data |
---|---|---|
9dae56ea | 1 | /* |
81345200 | 2 | * Copyright (C) 2008, 2013, 2014 Apple Inc. All rights reserved. |
9dae56ea A |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #include "config.h" | |
9dae56ea A |
27 | |
28 | #if ENABLE(JIT) | |
14957cd0 A |
29 | #if USE(JSVALUE64) |
30 | #include "JIT.h" | |
9dae56ea | 31 | |
6fe7ccc8 | 32 | #include "Arguments.h" |
9dae56ea | 33 | #include "CodeBlock.h" |
93a37866 | 34 | #include "JITInlines.h" |
9dae56ea A |
35 | #include "JSArray.h" |
36 | #include "JSFunction.h" | |
37 | #include "Interpreter.h" | |
81345200 A |
38 | #include "JSCInlines.h" |
39 | #include "LinkBuffer.h" | |
93a37866 | 40 | #include "RepatchBuffer.h" |
9dae56ea A |
41 | #include "ResultType.h" |
42 | #include "SamplingTool.h" | |
81345200 | 43 | #include "StackAlignment.h" |
93a37866 A |
44 | #include "ThunkGenerators.h" |
45 | #include <wtf/StringPrintStream.h> | |
9dae56ea | 46 | |
9dae56ea A |
47 | |
48 | namespace JSC { | |
49 | ||
81345200 | 50 | void JIT::emitPutCallResult(Instruction* instruction) |
ba379fdc A |
51 | { |
52 | int dst = instruction[1].u.operand; | |
6fe7ccc8 | 53 | emitValueProfilingSite(); |
14957cd0 | 54 | emitPutVirtualRegister(dst); |
9dae56ea A |
55 | } |
56 | ||
6fe7ccc8 | 57 | void JIT::compileLoadVarargs(Instruction* instruction) |
ba379fdc | 58 | { |
81345200 A |
59 | int thisValue = instruction[3].u.operand; |
60 | int arguments = instruction[4].u.operand; | |
61 | int firstFreeRegister = instruction[5].u.operand; | |
62 | int firstVarArgOffset = instruction[6].u.operand; | |
ba379fdc | 63 | |
6fe7ccc8 A |
64 | JumpList slowCase; |
65 | JumpList end; | |
93a37866 | 66 | bool canOptimize = m_codeBlock->usesArguments() |
81345200 | 67 | && arguments == m_codeBlock->argumentsRegister().offset() |
93a37866 A |
68 | && !m_codeBlock->symbolTable()->slowArguments(); |
69 | ||
70 | if (canOptimize) { | |
6fe7ccc8 | 71 | emitGetVirtualRegister(arguments, regT0); |
93a37866 | 72 | slowCase.append(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(JSValue())))); |
14957cd0 | 73 | |
93a37866 | 74 | emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0); |
81345200 A |
75 | if (firstVarArgOffset) { |
76 | Jump sufficientArguments = branch32(GreaterThan, regT0, TrustedImm32(firstVarArgOffset + 1)); | |
77 | move(TrustedImm32(1), regT0); | |
78 | Jump endVarArgs = jump(); | |
79 | sufficientArguments.link(this); | |
80 | sub32(TrustedImm32(firstVarArgOffset), regT0); | |
81 | endVarArgs.link(this); | |
82 | } | |
6fe7ccc8 A |
83 | slowCase.append(branch32(Above, regT0, TrustedImm32(Arguments::MaxArguments + 1))); |
84 | // regT0: argumentCountIncludingThis | |
6fe7ccc8 | 85 | move(regT0, regT1); |
81345200 A |
86 | add64(TrustedImm32(-firstFreeRegister + JSStack::CallFrameHeaderSize), regT1); |
87 | // regT1 now has the required frame size in Register units | |
88 | // Round regT1 to next multiple of stackAlignmentRegisters() | |
89 | add64(TrustedImm32(stackAlignmentRegisters() - 1), regT1); | |
90 | and64(TrustedImm32(~(stackAlignmentRegisters() - 1)), regT1); | |
91 | ||
92 | neg64(regT1); | |
93 | lshift64(TrustedImm32(3), regT1); | |
6fe7ccc8 A |
94 | addPtr(callFrameRegister, regT1); |
95 | // regT1: newCallFrame | |
ba379fdc | 96 | |
81345200 | 97 | slowCase.append(branchPtr(Above, AbsoluteAddress(m_vm->addressOfStackLimit()), regT1)); |
9dae56ea | 98 | |
6fe7ccc8 | 99 | // Initialize ArgumentCount. |
93a37866 | 100 | store32(regT0, Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); |
9dae56ea | 101 | |
6fe7ccc8 A |
102 | // Initialize 'this'. |
103 | emitGetVirtualRegister(thisValue, regT2); | |
93a37866 | 104 | store64(regT2, Address(regT1, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))); |
9dae56ea | 105 | |
6fe7ccc8 | 106 | // Copy arguments. |
6fe7ccc8 | 107 | signExtend32ToPtr(regT0, regT0); |
81345200 A |
108 | end.append(branchSub64(Zero, TrustedImm32(1), regT0)); |
109 | // regT0: argumentCount | |
9dae56ea | 110 | |
6fe7ccc8 | 111 | Label copyLoop = label(); |
81345200 | 112 | load64(BaseIndex(callFrameRegister, regT0, TimesEight, (CallFrame::thisArgumentOffset() + firstVarArgOffset) * static_cast<int>(sizeof(Register))), regT2); |
93a37866 | 113 | store64(regT2, BaseIndex(regT1, regT0, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))); |
81345200 | 114 | branchSub64(NonZero, TrustedImm32(1), regT0).linkTo(copyLoop, this); |
9dae56ea | 115 | |
6fe7ccc8 A |
116 | end.append(jump()); |
117 | } | |
9dae56ea | 118 | |
93a37866 | 119 | if (canOptimize) |
6fe7ccc8 | 120 | slowCase.link(this); |
9dae56ea | 121 | |
81345200 A |
122 | emitGetVirtualRegister(arguments, regT1); |
123 | callOperation(operationSizeFrameForVarargs, regT1, firstFreeRegister, firstVarArgOffset); | |
124 | move(returnValueGPR, stackPointerRegister); | |
125 | emitGetVirtualRegister(thisValue, regT1); | |
126 | emitGetVirtualRegister(arguments, regT2); | |
127 | callOperation(operationLoadVarargs, returnValueGPR, regT1, regT2, firstVarArgOffset); | |
128 | move(returnValueGPR, regT1); | |
6fe7ccc8 | 129 | |
93a37866 | 130 | if (canOptimize) |
6fe7ccc8 | 131 | end.link(this); |
81345200 A |
132 | |
133 | addPtr(TrustedImm32(sizeof(CallerFrameAndPC)), regT1, stackPointerRegister); | |
9dae56ea A |
134 | } |
135 | ||
81345200 | 136 | void JIT::compileCallEval(Instruction* instruction) |
9dae56ea | 137 | { |
81345200 A |
138 | addPtr(TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), stackPointerRegister, regT1); |
139 | callOperationNoExceptionCheck(operationCallEval, regT1); | |
140 | ||
141 | Jump noException = emitExceptionCheck(InvertedExceptionCheck); | |
142 | addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister); | |
143 | exceptionCheck(jump()); | |
144 | ||
145 | noException.link(this); | |
93a37866 | 146 | addSlowCase(branch64(Equal, regT0, TrustedImm64(JSValue::encode(JSValue())))); |
81345200 A |
147 | |
148 | addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister); | |
149 | checkStackPointerAlignment(); | |
9dae56ea A |
150 | |
151 | sampleCodeBlock(m_codeBlock); | |
81345200 A |
152 | |
153 | emitPutCallResult(instruction); | |
9dae56ea A |
154 | } |
155 | ||
81345200 | 156 | void JIT::compileCallEvalSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter) |
6fe7ccc8 A |
157 | { |
158 | linkSlowCase(iter); | |
159 | ||
81345200 A |
160 | load64(Address(stackPointerRegister, sizeof(Register) * JSStack::Callee - sizeof(CallerFrameAndPC)), regT0); |
161 | move(TrustedImmPtr(&CallLinkInfo::dummy()), regT2); | |
162 | emitNakedCall(m_vm->getCTIStub(virtualCallThunkGenerator).code()); | |
163 | addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister); | |
164 | checkStackPointerAlignment(); | |
9dae56ea | 165 | |
6fe7ccc8 | 166 | sampleCodeBlock(m_codeBlock); |
81345200 A |
167 | |
168 | emitPutCallResult(instruction); | |
6fe7ccc8 | 169 | } |
9dae56ea A |
170 | |
171 | void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex) | |
172 | { | |
81345200 | 173 | int callee = instruction[2].u.operand; |
9dae56ea | 174 | |
6fe7ccc8 A |
175 | /* Caller always: |
176 | - Updates callFrameRegister to callee callFrame. | |
177 | - Initializes ArgumentCount; CallerFrame; Callee. | |
178 | ||
179 | For a JS call: | |
180 | - Caller initializes ScopeChain. | |
181 | - Callee initializes ReturnPC; CodeBlock. | |
182 | - Callee restores callFrameRegister before return. | |
183 | ||
184 | For a non-JS call: | |
185 | - Caller initializes ScopeChain; ReturnPC; CodeBlock. | |
186 | - Caller restores callFrameRegister after return. | |
187 | */ | |
81345200 A |
188 | COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct), call_and_construct_opcodes_must_be_same_length); |
189 | COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_varargs), call_and_call_varargs_opcodes_must_be_same_length); | |
190 | COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct_varargs), call_and_construct_varargs_opcodes_must_be_same_length); | |
191 | if (opcodeID == op_call_varargs || opcodeID == op_construct_varargs) | |
6fe7ccc8 A |
192 | compileLoadVarargs(instruction); |
193 | else { | |
81345200 A |
194 | int argCount = instruction[3].u.operand; |
195 | int registerOffset = -instruction[4].u.operand; | |
6fe7ccc8 | 196 | |
93a37866 A |
197 | if (opcodeID == op_call && shouldEmitProfiling()) { |
198 | emitGetVirtualRegister(registerOffset + CallFrame::argumentOffsetIncludingThis(0), regT0); | |
199 | Jump done = emitJumpIfNotJSCell(regT0); | |
81345200 A |
200 | load32(Address(regT0, JSCell::structureIDOffset()), regT0); |
201 | store32(regT0, instruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile->addressOfLastSeenStructureID()); | |
93a37866 A |
202 | done.link(this); |
203 | } | |
204 | ||
81345200 A |
205 | addPtr(TrustedImm32(registerOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), callFrameRegister, stackPointerRegister); |
206 | store32(TrustedImm32(argCount), Address(stackPointerRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset - sizeof(CallerFrameAndPC))); | |
207 | } // SP holds newCallFrame + sizeof(CallerFrameAndPC), with ArgumentCount initialized. | |
6fe7ccc8 | 208 | |
81345200 A |
209 | uint32_t bytecodeOffset = instruction - m_codeBlock->instructions().begin(); |
210 | uint32_t locationBits = CallFrame::Location::encodeAsBytecodeOffset(bytecodeOffset); | |
211 | store32(TrustedImm32(locationBits), Address(callFrameRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + TagOffset)); | |
6fe7ccc8 A |
212 | emitGetVirtualRegister(callee, regT0); // regT0 holds callee. |
213 | ||
81345200 | 214 | store64(regT0, Address(stackPointerRegister, JSStack::Callee * static_cast<int>(sizeof(Register)) - sizeof(CallerFrameAndPC))); |
6fe7ccc8 | 215 | |
9dae56ea | 216 | if (opcodeID == op_call_eval) { |
81345200 | 217 | compileCallEval(instruction); |
6fe7ccc8 | 218 | return; |
9dae56ea A |
219 | } |
220 | ||
9dae56ea | 221 | DataLabelPtr addressOfLinkedFunctionCheck; |
93a37866 | 222 | Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0)); |
6fe7ccc8 | 223 | addSlowCase(slowCase); |
f9bf01c6 | 224 | |
81345200 A |
225 | ASSERT(m_callCompilationInfo.size() == callLinkInfoIndex); |
226 | CallLinkInfo* info = m_codeBlock->addCallLinkInfo(); | |
227 | info->callType = CallLinkInfo::callTypeFor(opcodeID); | |
228 | info->codeOrigin = CodeOrigin(m_bytecodeOffset); | |
229 | info->calleeGPR = regT0; | |
230 | m_callCompilationInfo.append(CallCompilationInfo()); | |
231 | m_callCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck; | |
232 | m_callCompilationInfo[callLinkInfoIndex].callLinkInfo = info; | |
9dae56ea | 233 | |
81345200 A |
234 | loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT2); |
235 | store64(regT2, Address(MacroAssembler::stackPointerRegister, JSStack::ScopeChain * sizeof(Register) - sizeof(CallerFrameAndPC))); | |
236 | ||
237 | m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall(); | |
238 | ||
239 | addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister); | |
240 | checkStackPointerAlignment(); | |
9dae56ea | 241 | |
9dae56ea | 242 | sampleCodeBlock(m_codeBlock); |
81345200 A |
243 | |
244 | emitPutCallResult(instruction); | |
9dae56ea A |
245 | } |
246 | ||
81345200 | 247 | void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex) |
9dae56ea | 248 | { |
6fe7ccc8 | 249 | if (opcodeID == op_call_eval) { |
81345200 | 250 | compileCallEvalSlowCase(instruction, iter); |
6fe7ccc8 A |
251 | return; |
252 | } | |
9dae56ea A |
253 | |
254 | linkSlowCase(iter); | |
81345200 A |
255 | |
256 | ThunkGenerator generator = linkThunkGeneratorFor( | |
257 | (opcodeID == op_construct || opcodeID == op_construct_varargs) ? CodeForConstruct : CodeForCall, | |
258 | RegisterPreservationNotRequired); | |
6fe7ccc8 | 259 | |
81345200 A |
260 | move(TrustedImmPtr(m_callCompilationInfo[callLinkInfoIndex].callLinkInfo), regT2); |
261 | m_callCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(generator).code()); | |
262 | ||
263 | addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister); | |
264 | checkStackPointerAlignment(); | |
9dae56ea | 265 | |
9dae56ea | 266 | sampleCodeBlock(m_codeBlock); |
81345200 A |
267 | |
268 | emitPutCallResult(instruction); | |
9dae56ea A |
269 | } |
270 | ||
93a37866 A |
271 | void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr) |
272 | { | |
273 | JumpList slowCases; | |
81345200 | 274 | |
93a37866 | 275 | slowCases.append(branchTestPtr(NonZero, regT0, tagMaskRegister)); |
81345200 | 276 | slowCases.append(branchStructure(NotEqual, Address(regT0, JSCell::structureIDOffset()), expectedStructure)); |
93a37866 A |
277 | slowCases.append(branchPtr(NotEqual, Address(regT0, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable))); |
278 | ||
279 | loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT1); | |
280 | emitPutToCallFrameHeader(regT1, JSStack::ScopeChain); | |
281 | ||
282 | Call call = nearCall(); | |
283 | Jump done = jump(); | |
284 | ||
285 | slowCases.link(this); | |
286 | move(TrustedImmPtr(callLinkInfo->callReturnLocation.executableAddress()), regT2); | |
287 | restoreReturnAddressBeforeReturn(regT2); | |
288 | Jump slow = jump(); | |
289 | ||
81345200 | 290 | LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock); |
93a37866 A |
291 | |
292 | patchBuffer.link(call, FunctionPtr(codePtr.executableAddress())); | |
293 | patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0)); | |
81345200 | 294 | patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallThunkGenerator).code())); |
93a37866 A |
295 | |
296 | RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine( | |
297 | FINALIZE_CODE( | |
298 | patchBuffer, | |
299 | ("Baseline closure call stub for %s, return point %p, target %p (%s)", | |
300 | toCString(*m_codeBlock).data(), | |
301 | callLinkInfo->hotPathOther.labelAtOffset(0).executableAddress(), | |
302 | codePtr.executableAddress(), | |
303 | toCString(pointerDump(calleeCodeBlock)).data())), | |
304 | *m_vm, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable, | |
305 | callLinkInfo->codeOrigin)); | |
306 | ||
307 | RepatchBuffer repatchBuffer(m_codeBlock); | |
308 | ||
309 | repatchBuffer.replaceWithJump( | |
310 | RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin), | |
311 | CodeLocationLabel(stubRoutine->code().code())); | |
81345200 A |
312 | repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallThunkGenerator).code()); |
313 | ||
93a37866 A |
314 | callLinkInfo->stub = stubRoutine.release(); |
315 | } | |
316 | ||
81345200 A |
317 | void JIT::emit_op_call(Instruction* currentInstruction) |
318 | { | |
319 | compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++); | |
320 | } | |
321 | ||
322 | void JIT::emit_op_call_eval(Instruction* currentInstruction) | |
323 | { | |
324 | compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex); | |
325 | } | |
326 | ||
327 | void JIT::emit_op_call_varargs(Instruction* currentInstruction) | |
328 | { | |
329 | compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++); | |
330 | } | |
331 | ||
332 | void JIT::emit_op_construct_varargs(Instruction* currentInstruction) | |
333 | { | |
334 | compileOpCall(op_construct_varargs, currentInstruction, m_callLinkInfoIndex++); | |
335 | } | |
336 | ||
337 | void JIT::emit_op_construct(Instruction* currentInstruction) | |
338 | { | |
339 | compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++); | |
340 | } | |
341 | ||
342 | void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) | |
343 | { | |
344 | compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++); | |
345 | } | |
346 | ||
347 | void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) | |
348 | { | |
349 | compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex); | |
350 | } | |
351 | ||
352 | void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) | |
353 | { | |
354 | compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++); | |
355 | } | |
356 | ||
357 | void JIT::emitSlow_op_construct_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) | |
358 | { | |
359 | compileOpCallSlowCase(op_construct_varargs, currentInstruction, iter, m_callLinkInfoIndex++); | |
360 | } | |
361 | ||
362 | void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) | |
363 | { | |
364 | compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++); | |
365 | } | |
366 | ||
9dae56ea A |
367 | } // namespace JSC |
368 | ||
14957cd0 | 369 | #endif // USE(JSVALUE64) |
9dae56ea | 370 | #endif // ENABLE(JIT) |