]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITInlineMethods.h
JavaScriptCore-903.tar.gz
[apple/javascriptcore.git] / jit / JITInlineMethods.h
CommitLineData
9dae56ea
A
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef JITInlineMethods_h
27#define JITInlineMethods_h
28
9dae56ea
A
29
30#if ENABLE(JIT)
31
ba379fdc
A
32namespace JSC {
33
34/* Deprecated: Please use JITStubCall instead. */
35
ba379fdc
A
36ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
37{
f9bf01c6
A
38 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
39 peek(dst, argumentStackOffset);
40}
41
42ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
43{
44 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
ba379fdc
A
45}
46
47ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
48{
49 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
50 return m_codeBlock->getConstant(src);
51}
52
53ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
54{
14957cd0
A
55 storePtr(from, payloadFor(entry, callFrameRegister));
56}
57
58ALWAYS_INLINE void JIT::emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
59{
60#if USE(JSVALUE32_64)
61 store32(TrustedImm32(JSValue::CellTag), tagFor(entry, callFrameRegister));
62#endif
63 storePtr(from, payloadFor(entry, callFrameRegister));
64}
65
66ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
67{
68 store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
69 store32(from, intPayloadFor(entry, callFrameRegister));
ba379fdc
A
70}
71
72ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
73{
14957cd0 74 storePtr(TrustedImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
ba379fdc
A
75}
76
77ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
78{
79 loadPtr(Address(from, entry * sizeof(Register)), to);
14957cd0 80#if USE(JSVALUE64)
ba379fdc 81 killLastResultRegister();
9dae56ea 82#endif
ba379fdc 83}
9dae56ea 84
4e4e5a6f
A
85ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
86{
14957cd0 87 failures.append(branchPtr(NotEqual, Address(src), TrustedImmPtr(m_globalData->jsStringVPtr)));
4e4e5a6f 88 failures.append(branchTest32(NonZero, Address(src, OBJECT_OFFSETOF(JSString, m_fiberCount))));
14957cd0 89 failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
4e4e5a6f
A
90 loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
91 loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplDataOffset()), dst);
92 load16(MacroAssembler::Address(dst, 0), dst);
93}
94
ba379fdc
A
95ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
96{
97 load32(Address(from, entry * sizeof(Register)), to);
14957cd0 98#if USE(JSVALUE64)
ba379fdc
A
99 killLastResultRegister();
100#endif
101}
9dae56ea 102
ba379fdc
A
103ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
104{
14957cd0 105 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
9dae56ea 106
ba379fdc 107 Call nakedCall = nearCall();
14957cd0 108 m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
ba379fdc
A
109 return nakedCall;
110}
111
14957cd0
A
112ALWAYS_INLINE bool JIT::atJumpTarget()
113{
114 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeOffset) {
115 if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeOffset)
116 return true;
117 ++m_jumpTargetsPosition;
118 }
119 return false;
120}
121
f9bf01c6
A
122#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
123
124ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
125{
14957cd0 126 JSInterfaceJIT::beginUninterruptedSequence();
f9bf01c6
A
127#if CPU(ARM_TRADITIONAL)
128#ifndef NDEBUG
129 // Ensure the label after the sequence can also fit
130 insnSpace += sizeof(ARMWord);
131 constSpace += sizeof(uint64_t);
132#endif
133
134 ensureSpace(insnSpace, constSpace);
135
14957cd0
A
136#elif CPU(SH4)
137#ifndef NDEBUG
138 insnSpace += sizeof(SH4Word);
139 constSpace += sizeof(uint64_t);
140#endif
141
142 m_assembler.ensureSpace(insnSpace + m_assembler.maxInstructionSize + 2, constSpace + 8);
f9bf01c6
A
143#endif
144
145#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
146#ifndef NDEBUG
147 m_uninterruptedInstructionSequenceBegin = label();
148 m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
149#endif
150#endif
151}
152
14957cd0 153ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace, int dst)
f9bf01c6 154{
14957cd0 155 UNUSED_PARAM(dst);
f9bf01c6 156#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
14957cd0
A
157 /* There are several cases when the uninterrupted sequence is larger than
158 * maximum required offset for pathing the same sequence. Eg.: if in a
159 * uninterrupted sequence the last macroassembler's instruction is a stub
160 * call, it emits store instruction(s) which should not be included in the
161 * calculation of length of uninterrupted sequence. So, the insnSpace and
162 * constSpace should be upper limit instead of hard limit.
163 */
164#if CPU(SH4)
165 if ((dst > 15) || (dst < -16)) {
166 insnSpace += 8;
167 constSpace += 2;
168 }
169
170 if (((dst >= -16) && (dst < 0)) || ((dst > 7) && (dst <= 15)))
171 insnSpace += 8;
172#endif
173 ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) <= insnSpace);
174 ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin <= constSpace);
f9bf01c6 175#endif
14957cd0 176 JSInterfaceJIT::endUninterruptedSequence();
f9bf01c6
A
177}
178
179#endif
180
181#if CPU(ARM)
ba379fdc
A
182
183ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
9dae56ea 184{
f9bf01c6 185 move(linkRegister, reg);
9dae56ea
A
186}
187
ba379fdc
A
188ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
189{
f9bf01c6 190 move(reg, linkRegister);
ba379fdc
A
191}
192
193ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
194{
f9bf01c6 195 loadPtr(address, linkRegister);
ba379fdc 196}
14957cd0
A
197#elif CPU(SH4)
198
199ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
200{
201 m_assembler.stspr(reg);
202}
203
204ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
205{
206 m_assembler.ldspr(reg);
207}
208
209ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
210{
211 loadPtrLinkReg(address);
212}
ba379fdc 213
4e4e5a6f
A
214#elif CPU(MIPS)
215
216ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
217{
218 move(returnAddressRegister, reg);
219}
220
221ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
222{
223 move(reg, returnAddressRegister);
224}
225
226ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
227{
228 loadPtr(address, returnAddressRegister);
229}
230
f9bf01c6 231#else // CPU(X86) || CPU(X86_64)
ba379fdc
A
232
233ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
234{
f9bf01c6 235 pop(reg);
ba379fdc
A
236}
237
238ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
239{
f9bf01c6 240 push(reg);
ba379fdc
A
241}
242
243ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
244{
f9bf01c6 245 push(address);
ba379fdc
A
246}
247
248#endif
249
ba379fdc
A
250ALWAYS_INLINE void JIT::restoreArgumentReference()
251{
252 move(stackPointerRegister, firstArgumentRegister);
14957cd0 253 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
ba379fdc 254}
14957cd0 255
ba379fdc
A
256ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
257{
f9bf01c6 258#if CPU(X86)
ba379fdc 259 // Within a trampoline the return address will be on the stack at this point.
14957cd0 260 addPtr(TrustedImm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
f9bf01c6 261#elif CPU(ARM)
ba379fdc 262 move(stackPointerRegister, firstArgumentRegister);
14957cd0
A
263#elif CPU(SH4)
264 move(stackPointerRegister, firstArgumentRegister);
ba379fdc
A
265#endif
266 // In the trampoline on x86-64, the first argument register is not overwritten.
267}
ba379fdc
A
268
269ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
270{
14957cd0 271 return branchPtr(NotEqual, Address(reg, JSCell::structureOffset()), TrustedImmPtr(structure));
ba379fdc
A
272}
273
274ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
275{
276 if (!m_codeBlock->isKnownNotImmediate(vReg))
277 linkSlowCase(iter);
278}
279
280ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
9dae56ea 281{
14957cd0 282 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
9dae56ea 283
14957cd0 284 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
ba379fdc
A
285}
286
287ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
288{
14957cd0 289 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ba379fdc
A
290
291 const JumpList::JumpVector& jumpVector = jumpList.jumps();
292 size_t size = jumpVector.size();
293 for (size_t i = 0; i < size; ++i)
14957cd0 294 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
ba379fdc
A
295}
296
297ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
298{
14957cd0 299 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ba379fdc 300
14957cd0 301 m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
ba379fdc
A
302}
303
304ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
305{
14957cd0 306 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ba379fdc 307
14957cd0 308 jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
ba379fdc
A
309}
310
311#if ENABLE(SAMPLING_FLAGS)
312ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
313{
314 ASSERT(flag >= 1);
315 ASSERT(flag <= 32);
14957cd0 316 or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
ba379fdc
A
317}
318
319ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
320{
321 ASSERT(flag >= 1);
322 ASSERT(flag <= 32);
14957cd0 323 and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
ba379fdc
A
324}
325#endif
326
327#if ENABLE(SAMPLING_COUNTERS)
328ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
329{
f9bf01c6 330#if CPU(X86_64) // Or any other 64-bit plattform.
14957cd0 331 addPtr(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
f9bf01c6 332#elif CPU(X86) // Or any other little-endian 32-bit plattform.
14957cd0
A
333 intptr_t hiWord = reinterpret_cast<intptr_t>(counter.addressOfCounter()) + sizeof(int32_t);
334 add32(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
335 addWithCarry32(TrustedImm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
ba379fdc
A
336#else
337#error "SAMPLING_FLAGS not implemented on this platform."
338#endif
339}
340#endif
341
342#if ENABLE(OPCODE_SAMPLING)
f9bf01c6 343#if CPU(X86_64)
ba379fdc
A
344ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
345{
14957cd0
A
346 move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
347 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
ba379fdc
A
348}
349#else
350ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
351{
14957cd0 352 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
ba379fdc
A
353}
354#endif
355#endif
356
357#if ENABLE(CODEBLOCK_SAMPLING)
f9bf01c6 358#if CPU(X86_64)
ba379fdc
A
359ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
360{
14957cd0
A
361 move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
362 storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
ba379fdc
A
363}
364#else
365ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
366{
14957cd0 367 storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
ba379fdc
A
368}
369#endif
370#endif
371
4e4e5a6f 372ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
f9bf01c6 373{
4e4e5a6f 374 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
f9bf01c6
A
375}
376
ba379fdc
A
377#if USE(JSVALUE32_64)
378
ba379fdc
A
379inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
380{
381 RegisterID mappedTag;
382 if (getMappedTag(index, mappedTag)) {
383 move(mappedTag, tag);
384 unmap(tag);
9dae56ea
A
385 return;
386 }
387
ba379fdc
A
388 if (m_codeBlock->isConstantRegisterIndex(index)) {
389 move(Imm32(getConstantOperand(index).tag()), tag);
390 unmap(tag);
391 return;
392 }
9dae56ea 393
ba379fdc
A
394 load32(tagFor(index), tag);
395 unmap(tag);
396}
397
398inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
399{
400 RegisterID mappedPayload;
401 if (getMappedPayload(index, mappedPayload)) {
402 move(mappedPayload, payload);
403 unmap(payload);
404 return;
9dae56ea
A
405 }
406
ba379fdc
A
407 if (m_codeBlock->isConstantRegisterIndex(index)) {
408 move(Imm32(getConstantOperand(index).payload()), payload);
409 unmap(payload);
410 return;
411 }
412
413 load32(payloadFor(index), payload);
414 unmap(payload);
9dae56ea
A
415}
416
ba379fdc 417inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
9dae56ea 418{
ba379fdc
A
419 move(Imm32(v.payload()), payload);
420 move(Imm32(v.tag()), tag);
421}
422
423inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
424{
425 ASSERT(tag != payload);
426
427 if (base == callFrameRegister) {
428 ASSERT(payload != base);
429 emitLoadPayload(index, payload);
430 emitLoadTag(index, tag);
431 return;
9dae56ea 432 }
ba379fdc
A
433
434 if (payload == base) { // avoid stomping base
435 load32(tagFor(index, base), tag);
436 load32(payloadFor(index, base), payload);
437 return;
438 }
439
440 load32(payloadFor(index, base), payload);
441 load32(tagFor(index, base), tag);
9dae56ea
A
442}
443
ba379fdc 444inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
9dae56ea 445{
ba379fdc
A
446 if (isMapped(index1)) {
447 emitLoad(index1, tag1, payload1);
448 emitLoad(index2, tag2, payload2);
449 return;
450 }
451 emitLoad(index2, tag2, payload2);
452 emitLoad(index1, tag1, payload1);
9dae56ea
A
453}
454
ba379fdc 455inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
9dae56ea 456{
ba379fdc 457 if (m_codeBlock->isConstantRegisterIndex(index)) {
14957cd0 458 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ba379fdc
A
459 loadDouble(&inConstantPool, value);
460 } else
461 loadDouble(addressFor(index), value);
9dae56ea
A
462}
463
ba379fdc 464inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
9dae56ea 465{
ba379fdc 466 if (m_codeBlock->isConstantRegisterIndex(index)) {
14957cd0 467 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ba379fdc
A
468 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
469 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
470 } else
471 convertInt32ToDouble(payloadFor(index), value);
9dae56ea
A
472}
473
ba379fdc 474inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
9dae56ea 475{
ba379fdc
A
476 store32(payload, payloadFor(index, base));
477 store32(tag, tagFor(index, base));
9dae56ea
A
478}
479
ba379fdc 480inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
9dae56ea 481{
ba379fdc
A
482 store32(payload, payloadFor(index, callFrameRegister));
483 if (!indexIsInt32)
14957cd0 484 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
9dae56ea
A
485}
486
14957cd0 487inline void JIT::emitStoreInt32(unsigned index, TrustedImm32 payload, bool indexIsInt32)
9dae56ea 488{
ba379fdc
A
489 store32(payload, payloadFor(index, callFrameRegister));
490 if (!indexIsInt32)
14957cd0 491 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
9dae56ea
A
492}
493
ba379fdc 494inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
9dae56ea 495{
ba379fdc
A
496 store32(payload, payloadFor(index, callFrameRegister));
497 if (!indexIsCell)
14957cd0 498 store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
9dae56ea
A
499}
500
14957cd0 501inline void JIT::emitStoreBool(unsigned index, RegisterID payload, bool indexIsBool)
9dae56ea 502{
14957cd0 503 store32(payload, payloadFor(index, callFrameRegister));
ba379fdc 504 if (!indexIsBool)
14957cd0 505 store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
ba379fdc 506}
9dae56ea 507
ba379fdc
A
508inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
509{
510 storeDouble(value, addressFor(index));
9dae56ea
A
511}
512
ba379fdc 513inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
9dae56ea 514{
ba379fdc
A
515 store32(Imm32(constant.payload()), payloadFor(index, base));
516 store32(Imm32(constant.tag()), tagFor(index, base));
9dae56ea
A
517}
518
ba379fdc 519ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
9dae56ea 520{
ba379fdc 521 emitStore(dst, jsUndefined());
9dae56ea
A
522}
523
14957cd0 524inline bool JIT::isLabeled(unsigned bytecodeOffset)
9dae56ea 525{
ba379fdc
A
526 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
527 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
14957cd0 528 if (jumpTarget == bytecodeOffset)
ba379fdc 529 return true;
14957cd0 530 if (jumpTarget > bytecodeOffset)
ba379fdc
A
531 return false;
532 }
533 return false;
9dae56ea
A
534}
535
14957cd0 536inline void JIT::map(unsigned bytecodeOffset, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
9dae56ea 537{
14957cd0 538 if (isLabeled(bytecodeOffset))
ba379fdc
A
539 return;
540
14957cd0 541 m_mappedBytecodeOffset = bytecodeOffset;
ba379fdc
A
542 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
543 m_mappedTag = tag;
544 m_mappedPayload = payload;
9dae56ea
A
545}
546
ba379fdc 547inline void JIT::unmap(RegisterID registerID)
9dae56ea 548{
ba379fdc
A
549 if (m_mappedTag == registerID)
550 m_mappedTag = (RegisterID)-1;
551 else if (m_mappedPayload == registerID)
552 m_mappedPayload = (RegisterID)-1;
9dae56ea
A
553}
554
ba379fdc 555inline void JIT::unmap()
9dae56ea 556{
14957cd0 557 m_mappedBytecodeOffset = (unsigned)-1;
ba379fdc
A
558 m_mappedVirtualRegisterIndex = (unsigned)-1;
559 m_mappedTag = (RegisterID)-1;
560 m_mappedPayload = (RegisterID)-1;
9dae56ea
A
561}
562
ba379fdc 563inline bool JIT::isMapped(unsigned virtualRegisterIndex)
9dae56ea 564{
14957cd0 565 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ba379fdc
A
566 return false;
567 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
568 return false;
569 return true;
9dae56ea
A
570}
571
ba379fdc 572inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
9dae56ea 573{
14957cd0 574 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ba379fdc
A
575 return false;
576 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
577 return false;
578 if (m_mappedPayload == (RegisterID)-1)
579 return false;
580 payload = m_mappedPayload;
581 return true;
9dae56ea
A
582}
583
ba379fdc 584inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
9dae56ea 585{
14957cd0 586 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ba379fdc
A
587 return false;
588 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
589 return false;
590 if (m_mappedTag == (RegisterID)-1)
591 return false;
592 tag = m_mappedTag;
593 return true;
594}
9dae56ea 595
ba379fdc
A
596inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
597{
4e4e5a6f
A
598 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
599 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
600 addSlowCase(jump());
601 else
602 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
603 }
9dae56ea
A
604}
605
ba379fdc 606inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
9dae56ea 607{
4e4e5a6f
A
608 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
609 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
610 addSlowCase(jump());
611 else
14957cd0 612 addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
4e4e5a6f 613 }
ba379fdc 614}
9dae56ea 615
ba379fdc
A
616inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
617{
618 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
619 linkSlowCase(iter);
9dae56ea
A
620}
621
ba379fdc 622ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
9dae56ea 623{
ba379fdc 624 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
9dae56ea 625}
ba379fdc
A
626
627ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
9dae56ea 628{
ba379fdc
A
629 if (isOperandConstantImmediateInt(op1)) {
630 constant = getConstantOperand(op1).asInt32();
631 op = op2;
632 return true;
633 }
634
635 if (isOperandConstantImmediateInt(op2)) {
636 constant = getConstantOperand(op2).asInt32();
637 op = op1;
638 return true;
639 }
640
641 return false;
9dae56ea 642}
ba379fdc 643
ba379fdc
A
644#else // USE(JSVALUE32_64)
645
646ALWAYS_INLINE void JIT::killLastResultRegister()
647{
648 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
9dae56ea 649}
9dae56ea 650
ba379fdc
A
651// get arg puts an arg from the SF register array into a h/w register
652ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
9dae56ea 653{
14957cd0 654 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
9dae56ea 655
ba379fdc
A
656 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
657 if (m_codeBlock->isConstantRegisterIndex(src)) {
658 JSValue value = m_codeBlock->getConstant(src);
659 move(ImmPtr(JSValue::encode(value)), dst);
660 killLastResultRegister();
661 return;
662 }
663
14957cd0
A
664 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src) && !atJumpTarget()) {
665 // The argument we want is already stored in eax
666 if (dst != cachedResultRegister)
667 move(cachedResultRegister, dst);
668 killLastResultRegister();
669 return;
ba379fdc
A
670 }
671
672 loadPtr(Address(callFrameRegister, src * sizeof(Register)), dst);
9dae56ea 673 killLastResultRegister();
ba379fdc
A
674}
675
676ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
677{
678 if (src2 == m_lastResultBytecodeRegister) {
679 emitGetVirtualRegister(src2, dst2);
680 emitGetVirtualRegister(src1, dst1);
681 } else {
682 emitGetVirtualRegister(src1, dst1);
683 emitGetVirtualRegister(src2, dst2);
684 }
685}
9dae56ea 686
ba379fdc
A
687ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
688{
689 return getConstantOperand(src).asInt32();
9dae56ea
A
690}
691
ba379fdc
A
692ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
693{
694 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
695}
696
697ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
9dae56ea 698{
ba379fdc 699 storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
14957cd0 700 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? static_cast<int>(dst) : std::numeric_limits<int>::max();
ba379fdc
A
701}
702
703ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
704{
14957cd0 705 storePtr(TrustedImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
9dae56ea
A
706}
707
708ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
709{
ba379fdc
A
710#if USE(JSVALUE64)
711 return branchTestPtr(Zero, reg, tagMaskRegister);
9dae56ea 712#else
14957cd0 713 return branchTest32(Zero, reg, TrustedImm32(TagMask));
9dae56ea
A
714#endif
715}
716
717ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
718{
719 move(reg1, scratch);
720 orPtr(reg2, scratch);
721 return emitJumpIfJSCell(scratch);
722}
723
724ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
725{
726 addSlowCase(emitJumpIfJSCell(reg));
727}
728
729ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
730{
ba379fdc
A
731#if USE(JSVALUE64)
732 return branchTestPtr(NonZero, reg, tagMaskRegister);
9dae56ea 733#else
14957cd0 734 return branchTest32(NonZero, reg, TrustedImm32(TagMask));
9dae56ea
A
735#endif
736}
737
738ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
739{
740 addSlowCase(emitJumpIfNotJSCell(reg));
741}
742
743ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
744{
745 if (!m_codeBlock->isKnownNotImmediate(vReg))
746 emitJumpSlowCaseIfNotJSCell(reg);
747}
748
ba379fdc 749#if USE(JSVALUE64)
f9bf01c6
A
750
751inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
752{
753 if (m_codeBlock->isConstantRegisterIndex(index)) {
14957cd0 754 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
f9bf01c6
A
755 loadDouble(&inConstantPool, value);
756 } else
757 loadDouble(addressFor(index), value);
758}
759
760inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
761{
762 if (m_codeBlock->isConstantRegisterIndex(index)) {
14957cd0
A
763 ASSERT(isOperandConstantImmediateInt(index));
764 convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
f9bf01c6
A
765 } else
766 convertInt32ToDouble(addressFor(index), value);
767}
9dae56ea
A
768#endif
769
770ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
771{
ba379fdc
A
772#if USE(JSVALUE64)
773 return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
9dae56ea 774#else
14957cd0 775 return branchTest32(NonZero, reg, TrustedImm32(TagTypeNumber));
9dae56ea
A
776#endif
777}
778
779ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
780{
ba379fdc
A
781#if USE(JSVALUE64)
782 return branchPtr(Below, reg, tagTypeNumberRegister);
9dae56ea 783#else
14957cd0 784 return branchTest32(Zero, reg, TrustedImm32(TagTypeNumber));
9dae56ea
A
785#endif
786}
787
788ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
789{
790 move(reg1, scratch);
791 andPtr(reg2, scratch);
792 return emitJumpIfNotImmediateInteger(scratch);
793}
794
795ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
796{
797 addSlowCase(emitJumpIfNotImmediateInteger(reg));
798}
799
800ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
801{
802 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
803}
804
f9bf01c6
A
805ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
806{
807 addSlowCase(emitJumpIfNotImmediateNumber(reg));
808}
809
14957cd0 810#if USE(JSVALUE32_64)
9dae56ea
A
811ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
812{
14957cd0 813 subPtr(TrustedImm32(TagTypeNumber), reg);
9dae56ea
A
814}
815
816ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
817{
14957cd0 818 return branchSubPtr(Zero, TrustedImm32(TagTypeNumber), reg);
9dae56ea
A
819}
820#endif
821
822ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
823{
ba379fdc 824#if USE(JSVALUE64)
9dae56ea
A
825 emitFastArithIntToImmNoCheck(src, dest);
826#else
827 if (src != dest)
828 move(src, dest);
14957cd0 829 addPtr(TrustedImm32(TagTypeNumber), dest);
9dae56ea
A
830#endif
831}
832
9dae56ea
A
833// operand is int32_t, must have been zero-extended if register is 64-bit.
834ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
835{
ba379fdc 836#if USE(JSVALUE64)
9dae56ea
A
837 if (src != dest)
838 move(src, dest);
839 orPtr(tagTypeNumberRegister, dest);
840#else
841 signExtend32ToPtr(src, dest);
842 addPtr(dest, dest);
843 emitFastArithReTagImmediate(dest, dest);
844#endif
845}
846
847ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
848{
14957cd0 849 or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
9dae56ea
A
850}
851
ba379fdc 852#endif // USE(JSVALUE32_64)
9dae56ea 853
ba379fdc 854} // namespace JSC
9dae56ea
A
855
856#endif // ENABLE(JIT)
857
858#endif