]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITInlines.h
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / jit / JITInlines.h
CommitLineData
9dae56ea 1/*
93a37866 2 * Copyright (C) 2008, 2012 Apple Inc. All rights reserved.
9dae56ea
A
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
93a37866
A
26#ifndef JITInlines_h
27#define JITInlines_h
9dae56ea 28
9dae56ea
A
29
30#if ENABLE(JIT)
31
ba379fdc
A
32namespace JSC {
33
f9bf01c6
A
34ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
35{
36 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
ba379fdc
A
37}
38
39ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
40{
41 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
42 return m_codeBlock->getConstant(src);
43}
44
93a37866 45ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry entry)
14957cd0
A
46{
47#if USE(JSVALUE32_64)
14957cd0
A
48 store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
49 store32(from, intPayloadFor(entry, callFrameRegister));
93a37866
A
50#else
51 store64(from, addressFor(entry, callFrameRegister));
52#endif
ba379fdc
A
53}
54
93a37866 55ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
ba379fdc 56{
93a37866
A
57 loadPtr(Address(from, entry * sizeof(Register)), to);
58#if USE(JSVALUE64)
59 killLastResultRegister();
60#endif
ba379fdc
A
61}
62
93a37866 63ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
ba379fdc 64{
93a37866 65 load32(Address(from, entry * sizeof(Register)), to);
14957cd0 66#if USE(JSVALUE64)
ba379fdc 67 killLastResultRegister();
9dae56ea 68#endif
ba379fdc 69}
9dae56ea 70
93a37866
A
71#if USE(JSVALUE64)
72ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
73{
74 load64(Address(from, entry * sizeof(Register)), to);
75 killLastResultRegister();
76}
77#endif
78
4e4e5a6f
A
79ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
80{
93a37866 81 failures.append(branchPtr(NotEqual, Address(src, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
14957cd0 82 failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
4e4e5a6f 83 loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
6fe7ccc8 84 failures.append(branchTest32(Zero, dst));
93a37866
A
85 loadPtr(MacroAssembler::Address(dst, StringImpl::flagsOffset()), regT1);
86 loadPtr(MacroAssembler::Address(dst, StringImpl::dataOffset()), dst);
6fe7ccc8
A
87
88 JumpList is16Bit;
89 JumpList cont8Bit;
93a37866 90 is16Bit.append(branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
6fe7ccc8
A
91 load8(MacroAssembler::Address(dst, 0), dst);
92 cont8Bit.append(jump());
93 is16Bit.link(this);
4e4e5a6f 94 load16(MacroAssembler::Address(dst, 0), dst);
6fe7ccc8 95 cont8Bit.link(this);
4e4e5a6f
A
96}
97
ba379fdc
A
98ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
99{
14957cd0 100 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
9dae56ea 101
ba379fdc 102 Call nakedCall = nearCall();
14957cd0 103 m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
ba379fdc
A
104 return nakedCall;
105}
106
14957cd0
A
107ALWAYS_INLINE bool JIT::atJumpTarget()
108{
109 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeOffset) {
110 if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeOffset)
111 return true;
112 ++m_jumpTargetsPosition;
113 }
114 return false;
115}
116
f9bf01c6
A
117#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
118
119ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
120{
121#if CPU(ARM_TRADITIONAL)
122#ifndef NDEBUG
123 // Ensure the label after the sequence can also fit
124 insnSpace += sizeof(ARMWord);
125 constSpace += sizeof(uint64_t);
126#endif
127
128 ensureSpace(insnSpace, constSpace);
129
14957cd0
A
130#elif CPU(SH4)
131#ifndef NDEBUG
132 insnSpace += sizeof(SH4Word);
133 constSpace += sizeof(uint64_t);
134#endif
135
136 m_assembler.ensureSpace(insnSpace + m_assembler.maxInstructionSize + 2, constSpace + 8);
f9bf01c6
A
137#endif
138
f9bf01c6
A
139#ifndef NDEBUG
140 m_uninterruptedInstructionSequenceBegin = label();
141 m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
142#endif
f9bf01c6
A
143}
144
14957cd0 145ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace, int dst)
f9bf01c6 146{
93a37866 147#ifndef NDEBUG
14957cd0
A
148 /* There are several cases when the uninterrupted sequence is larger than
149 * maximum required offset for pathing the same sequence. Eg.: if in a
150 * uninterrupted sequence the last macroassembler's instruction is a stub
151 * call, it emits store instruction(s) which should not be included in the
152 * calculation of length of uninterrupted sequence. So, the insnSpace and
153 * constSpace should be upper limit instead of hard limit.
154 */
93a37866 155
14957cd0
A
156#if CPU(SH4)
157 if ((dst > 15) || (dst < -16)) {
158 insnSpace += 8;
159 constSpace += 2;
160 }
161
162 if (((dst >= -16) && (dst < 0)) || ((dst > 7) && (dst <= 15)))
163 insnSpace += 8;
93a37866
A
164#else
165 UNUSED_PARAM(dst);
14957cd0 166#endif
93a37866 167
14957cd0
A
168 ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) <= insnSpace);
169 ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin <= constSpace);
93a37866
A
170#else
171 UNUSED_PARAM(insnSpace);
172 UNUSED_PARAM(constSpace);
173 UNUSED_PARAM(dst);
f9bf01c6
A
174#endif
175}
176
93a37866 177#endif // ASSEMBLER_HAS_CONSTANT_POOL
14957cd0 178
6fe7ccc8
A
179ALWAYS_INLINE void JIT::updateTopCallFrame()
180{
181 ASSERT(static_cast<int>(m_bytecodeOffset) >= 0);
182 if (m_bytecodeOffset) {
183#if USE(JSVALUE32_64)
93a37866 184 storePtr(TrustedImmPtr(m_codeBlock->instructions().begin() + m_bytecodeOffset + 1), intTagFor(JSStack::ArgumentCount));
6fe7ccc8 185#else
93a37866 186 store32(TrustedImm32(m_bytecodeOffset + 1), intTagFor(JSStack::ArgumentCount));
6fe7ccc8
A
187#endif
188 }
93a37866 189 storePtr(callFrameRegister, &m_vm->topCallFrame);
6fe7ccc8
A
190}
191
ba379fdc
A
192ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
193{
f9bf01c6 194#if CPU(X86)
ba379fdc 195 // Within a trampoline the return address will be on the stack at this point.
14957cd0 196 addPtr(TrustedImm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
93a37866 197#elif CPU(ARM) || CPU(ARM64)
ba379fdc 198 move(stackPointerRegister, firstArgumentRegister);
14957cd0
A
199#elif CPU(SH4)
200 move(stackPointerRegister, firstArgumentRegister);
ba379fdc
A
201#endif
202 // In the trampoline on x86-64, the first argument register is not overwritten.
203}
ba379fdc
A
204
205ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
206{
14957cd0 207 return branchPtr(NotEqual, Address(reg, JSCell::structureOffset()), TrustedImmPtr(structure));
ba379fdc
A
208}
209
210ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
211{
212 if (!m_codeBlock->isKnownNotImmediate(vReg))
213 linkSlowCase(iter);
214}
215
216ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
9dae56ea 217{
14957cd0 218 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
9dae56ea 219
14957cd0 220 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
ba379fdc
A
221}
222
223ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
224{
14957cd0 225 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ba379fdc
A
226
227 const JumpList::JumpVector& jumpVector = jumpList.jumps();
228 size_t size = jumpVector.size();
229 for (size_t i = 0; i < size; ++i)
14957cd0 230 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
ba379fdc
A
231}
232
6fe7ccc8
A
233ALWAYS_INLINE void JIT::addSlowCase()
234{
235 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
236
237 Jump emptyJump; // Doing it this way to make Windows happy.
238 m_slowCases.append(SlowCaseEntry(emptyJump, m_bytecodeOffset));
239}
240
ba379fdc
A
241ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
242{
14957cd0 243 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ba379fdc 244
14957cd0 245 m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
ba379fdc
A
246}
247
248ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
249{
14957cd0 250 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ba379fdc 251
14957cd0 252 jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
ba379fdc
A
253}
254
6fe7ccc8
A
255ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotObject(RegisterID structureReg)
256{
257 return branch8(Below, Address(structureReg, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
258}
259
ba379fdc
A
260#if ENABLE(SAMPLING_FLAGS)
261ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
262{
263 ASSERT(flag >= 1);
264 ASSERT(flag <= 32);
14957cd0 265 or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
ba379fdc
A
266}
267
268ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
269{
270 ASSERT(flag >= 1);
271 ASSERT(flag <= 32);
14957cd0 272 and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
ba379fdc
A
273}
274#endif
275
276#if ENABLE(SAMPLING_COUNTERS)
6fe7ccc8
A
277ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, int32_t count)
278{
279 add64(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
ba379fdc
A
280}
281#endif
282
283#if ENABLE(OPCODE_SAMPLING)
f9bf01c6 284#if CPU(X86_64)
ba379fdc
A
285ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
286{
14957cd0
A
287 move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
288 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
ba379fdc
A
289}
290#else
291ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
292{
14957cd0 293 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
ba379fdc
A
294}
295#endif
296#endif
297
298#if ENABLE(CODEBLOCK_SAMPLING)
f9bf01c6 299#if CPU(X86_64)
ba379fdc
A
300ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
301{
14957cd0
A
302 move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
303 storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
ba379fdc
A
304}
305#else
306ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
307{
14957cd0 308 storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
ba379fdc
A
309}
310#endif
311#endif
312
4e4e5a6f 313ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
f9bf01c6 314{
4e4e5a6f 315 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
f9bf01c6
A
316}
317
93a37866
A
318template<typename StructureType>
319inline void JIT::emitAllocateJSObject(RegisterID allocator, StructureType structure, RegisterID result, RegisterID scratch)
6fe7ccc8 320{
93a37866 321 loadPtr(Address(allocator, MarkedAllocator::offsetOfFreeListHead()), result);
6fe7ccc8
A
322 addSlowCase(branchTestPtr(Zero, result));
323
324 // remove the object from the free list
93a37866
A
325 loadPtr(Address(result), scratch);
326 storePtr(scratch, Address(allocator, MarkedAllocator::offsetOfFreeListHead()));
6fe7ccc8
A
327
328 // initialize the object's structure
329 storePtr(structure, Address(result, JSCell::structureOffset()));
330
6fe7ccc8 331 // initialize the object's property storage pointer
93a37866 332 storePtr(TrustedImmPtr(0), Address(result, JSObject::butterflyOffset()));
6fe7ccc8
A
333}
334
335#if ENABLE(VALUE_PROFILER)
336inline void JIT::emitValueProfilingSite(ValueProfile* valueProfile)
337{
338 ASSERT(shouldEmitProfiling());
339 ASSERT(valueProfile);
340
341 const RegisterID value = regT0;
342#if USE(JSVALUE32_64)
343 const RegisterID valueTag = regT1;
344#endif
345 const RegisterID scratch = regT3;
346
347 if (ValueProfile::numberOfBuckets == 1) {
348 // We're in a simple configuration: only one bucket, so we can just do a direct
349 // store.
350#if USE(JSVALUE64)
93a37866 351 store64(value, valueProfile->m_buckets);
6fe7ccc8
A
352#else
353 EncodedValueDescriptor* descriptor = bitwise_cast<EncodedValueDescriptor*>(valueProfile->m_buckets);
354 store32(value, &descriptor->asBits.payload);
355 store32(valueTag, &descriptor->asBits.tag);
356#endif
357 return;
358 }
359
360 if (m_randomGenerator.getUint32() & 1)
361 add32(TrustedImm32(1), bucketCounterRegister);
362 else
363 add32(TrustedImm32(3), bucketCounterRegister);
364 and32(TrustedImm32(ValueProfile::bucketIndexMask), bucketCounterRegister);
365 move(TrustedImmPtr(valueProfile->m_buckets), scratch);
366#if USE(JSVALUE64)
93a37866 367 store64(value, BaseIndex(scratch, bucketCounterRegister, TimesEight));
6fe7ccc8
A
368#elif USE(JSVALUE32_64)
369 store32(value, BaseIndex(scratch, bucketCounterRegister, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
370 store32(valueTag, BaseIndex(scratch, bucketCounterRegister, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
371#endif
372}
373
374inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset)
375{
376 if (!shouldEmitProfiling())
377 return;
378 emitValueProfilingSite(m_codeBlock->valueProfileForBytecodeOffset(bytecodeOffset));
379}
380
381inline void JIT::emitValueProfilingSite()
382{
383 emitValueProfilingSite(m_bytecodeOffset);
384}
93a37866
A
385#endif // ENABLE(VALUE_PROFILER)
386
387inline void JIT::emitArrayProfilingSite(RegisterID structureAndIndexingType, RegisterID scratch, ArrayProfile* arrayProfile)
388{
389 UNUSED_PARAM(scratch); // We had found this scratch register useful here before, so I will keep it for now.
390
391 RegisterID structure = structureAndIndexingType;
392 RegisterID indexingType = structureAndIndexingType;
393
394 if (shouldEmitProfiling())
395 storePtr(structure, arrayProfile->addressOfLastSeenStructure());
396
397 load8(Address(structure, Structure::indexingTypeOffset()), indexingType);
398}
399
400inline void JIT::emitArrayProfilingSiteForBytecodeIndex(RegisterID structureAndIndexingType, RegisterID scratch, unsigned bytecodeIndex)
401{
402#if ENABLE(VALUE_PROFILER)
403 emitArrayProfilingSite(structureAndIndexingType, scratch, m_codeBlock->getOrAddArrayProfile(bytecodeIndex));
404#else
405 UNUSED_PARAM(bytecodeIndex);
406 emitArrayProfilingSite(structureAndIndexingType, scratch, 0);
407#endif
408}
409
410inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile* arrayProfile)
411{
412#if ENABLE(VALUE_PROFILER)
413 store8(TrustedImm32(1), arrayProfile->addressOfMayStoreToHole());
414#else
415 UNUSED_PARAM(arrayProfile);
416#endif
417}
418
419inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile* arrayProfile)
420{
421#if ENABLE(VALUE_PROFILER)
422 store8(TrustedImm32(1), arrayProfile->addressOfOutOfBounds());
423#else
424 UNUSED_PARAM(arrayProfile);
425#endif
426}
427
428static inline bool arrayProfileSaw(ArrayModes arrayModes, IndexingType capability)
429{
430#if ENABLE(VALUE_PROFILER)
431 return arrayModesInclude(arrayModes, capability);
432#else
433 UNUSED_PARAM(arrayModes);
434 UNUSED_PARAM(capability);
435 return false;
436#endif
437}
438
439inline JITArrayMode JIT::chooseArrayMode(ArrayProfile* profile)
440{
441#if ENABLE(VALUE_PROFILER)
442 profile->computeUpdatedPrediction(m_codeBlock);
443 ArrayModes arrayModes = profile->observedArrayModes();
444 if (arrayProfileSaw(arrayModes, DoubleShape))
445 return JITDouble;
446 if (arrayProfileSaw(arrayModes, Int32Shape))
447 return JITInt32;
448 if (arrayProfileSaw(arrayModes, ArrayStorageShape))
449 return JITArrayStorage;
450 return JITContiguous;
451#else
452 UNUSED_PARAM(profile);
453 return JITContiguous;
6fe7ccc8 454#endif
93a37866 455}
6fe7ccc8 456
ba379fdc
A
457#if USE(JSVALUE32_64)
458
6fe7ccc8 459inline void JIT::emitLoadTag(int index, RegisterID tag)
ba379fdc
A
460{
461 RegisterID mappedTag;
462 if (getMappedTag(index, mappedTag)) {
463 move(mappedTag, tag);
464 unmap(tag);
9dae56ea
A
465 return;
466 }
467
ba379fdc
A
468 if (m_codeBlock->isConstantRegisterIndex(index)) {
469 move(Imm32(getConstantOperand(index).tag()), tag);
470 unmap(tag);
471 return;
472 }
9dae56ea 473
ba379fdc
A
474 load32(tagFor(index), tag);
475 unmap(tag);
476}
477
6fe7ccc8 478inline void JIT::emitLoadPayload(int index, RegisterID payload)
ba379fdc
A
479{
480 RegisterID mappedPayload;
481 if (getMappedPayload(index, mappedPayload)) {
482 move(mappedPayload, payload);
483 unmap(payload);
484 return;
9dae56ea
A
485 }
486
ba379fdc
A
487 if (m_codeBlock->isConstantRegisterIndex(index)) {
488 move(Imm32(getConstantOperand(index).payload()), payload);
489 unmap(payload);
490 return;
491 }
492
493 load32(payloadFor(index), payload);
494 unmap(payload);
9dae56ea
A
495}
496
ba379fdc 497inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
9dae56ea 498{
ba379fdc
A
499 move(Imm32(v.payload()), payload);
500 move(Imm32(v.tag()), tag);
501}
502
6fe7ccc8 503inline void JIT::emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base)
ba379fdc 504{
93a37866 505 RELEASE_ASSERT(tag != payload);
ba379fdc
A
506
507 if (base == callFrameRegister) {
93a37866 508 RELEASE_ASSERT(payload != base);
ba379fdc
A
509 emitLoadPayload(index, payload);
510 emitLoadTag(index, tag);
511 return;
9dae56ea 512 }
ba379fdc
A
513
514 if (payload == base) { // avoid stomping base
515 load32(tagFor(index, base), tag);
516 load32(payloadFor(index, base), payload);
517 return;
518 }
519
520 load32(payloadFor(index, base), payload);
521 load32(tagFor(index, base), tag);
9dae56ea
A
522}
523
6fe7ccc8 524inline void JIT::emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2)
9dae56ea 525{
ba379fdc
A
526 if (isMapped(index1)) {
527 emitLoad(index1, tag1, payload1);
528 emitLoad(index2, tag2, payload2);
529 return;
530 }
531 emitLoad(index2, tag2, payload2);
532 emitLoad(index1, tag1, payload1);
9dae56ea
A
533}
534
6fe7ccc8 535inline void JIT::emitLoadDouble(int index, FPRegisterID value)
9dae56ea 536{
ba379fdc 537 if (m_codeBlock->isConstantRegisterIndex(index)) {
14957cd0 538 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ba379fdc
A
539 loadDouble(&inConstantPool, value);
540 } else
541 loadDouble(addressFor(index), value);
9dae56ea
A
542}
543
6fe7ccc8 544inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
9dae56ea 545{
ba379fdc 546 if (m_codeBlock->isConstantRegisterIndex(index)) {
14957cd0 547 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ba379fdc
A
548 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
549 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
550 } else
551 convertInt32ToDouble(payloadFor(index), value);
9dae56ea
A
552}
553
6fe7ccc8 554inline void JIT::emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base)
9dae56ea 555{
ba379fdc
A
556 store32(payload, payloadFor(index, base));
557 store32(tag, tagFor(index, base));
9dae56ea
A
558}
559
6fe7ccc8 560inline void JIT::emitStoreInt32(int index, RegisterID payload, bool indexIsInt32)
9dae56ea 561{
ba379fdc
A
562 store32(payload, payloadFor(index, callFrameRegister));
563 if (!indexIsInt32)
14957cd0 564 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
9dae56ea
A
565}
566
6fe7ccc8
A
567inline void JIT::emitStoreAndMapInt32(int index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength)
568{
569 emitStoreInt32(index, payload, indexIsInt32);
570 map(m_bytecodeOffset + opcodeLength, index, tag, payload);
571}
572
573inline void JIT::emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32)
9dae56ea 574{
ba379fdc
A
575 store32(payload, payloadFor(index, callFrameRegister));
576 if (!indexIsInt32)
14957cd0 577 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
9dae56ea
A
578}
579
6fe7ccc8 580inline void JIT::emitStoreCell(int index, RegisterID payload, bool indexIsCell)
9dae56ea 581{
ba379fdc
A
582 store32(payload, payloadFor(index, callFrameRegister));
583 if (!indexIsCell)
14957cd0 584 store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
9dae56ea
A
585}
586
6fe7ccc8 587inline void JIT::emitStoreBool(int index, RegisterID payload, bool indexIsBool)
9dae56ea 588{
14957cd0 589 store32(payload, payloadFor(index, callFrameRegister));
ba379fdc 590 if (!indexIsBool)
14957cd0 591 store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
ba379fdc 592}
9dae56ea 593
6fe7ccc8 594inline void JIT::emitStoreDouble(int index, FPRegisterID value)
ba379fdc
A
595{
596 storeDouble(value, addressFor(index));
9dae56ea
A
597}
598
6fe7ccc8 599inline void JIT::emitStore(int index, const JSValue constant, RegisterID base)
9dae56ea 600{
ba379fdc
A
601 store32(Imm32(constant.payload()), payloadFor(index, base));
602 store32(Imm32(constant.tag()), tagFor(index, base));
9dae56ea
A
603}
604
ba379fdc 605ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
9dae56ea 606{
ba379fdc 607 emitStore(dst, jsUndefined());
9dae56ea
A
608}
609
14957cd0 610inline bool JIT::isLabeled(unsigned bytecodeOffset)
9dae56ea 611{
ba379fdc
A
612 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
613 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
14957cd0 614 if (jumpTarget == bytecodeOffset)
ba379fdc 615 return true;
14957cd0 616 if (jumpTarget > bytecodeOffset)
ba379fdc
A
617 return false;
618 }
619 return false;
9dae56ea
A
620}
621
6fe7ccc8 622inline void JIT::map(unsigned bytecodeOffset, int virtualRegisterIndex, RegisterID tag, RegisterID payload)
9dae56ea 623{
14957cd0 624 if (isLabeled(bytecodeOffset))
ba379fdc
A
625 return;
626
14957cd0 627 m_mappedBytecodeOffset = bytecodeOffset;
ba379fdc
A
628 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
629 m_mappedTag = tag;
630 m_mappedPayload = payload;
6fe7ccc8 631
93a37866
A
632 ASSERT(!canBeOptimizedOrInlined() || m_mappedPayload == regT0);
633 ASSERT(!canBeOptimizedOrInlined() || m_mappedTag == regT1);
9dae56ea
A
634}
635
ba379fdc 636inline void JIT::unmap(RegisterID registerID)
9dae56ea 637{
ba379fdc
A
638 if (m_mappedTag == registerID)
639 m_mappedTag = (RegisterID)-1;
640 else if (m_mappedPayload == registerID)
641 m_mappedPayload = (RegisterID)-1;
9dae56ea
A
642}
643
ba379fdc 644inline void JIT::unmap()
9dae56ea 645{
14957cd0 646 m_mappedBytecodeOffset = (unsigned)-1;
93a37866 647 m_mappedVirtualRegisterIndex = JSStack::ReturnPC;
ba379fdc
A
648 m_mappedTag = (RegisterID)-1;
649 m_mappedPayload = (RegisterID)-1;
9dae56ea
A
650}
651
6fe7ccc8 652inline bool JIT::isMapped(int virtualRegisterIndex)
9dae56ea 653{
14957cd0 654 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ba379fdc
A
655 return false;
656 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
657 return false;
658 return true;
9dae56ea
A
659}
660
6fe7ccc8 661inline bool JIT::getMappedPayload(int virtualRegisterIndex, RegisterID& payload)
9dae56ea 662{
14957cd0 663 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ba379fdc
A
664 return false;
665 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
666 return false;
667 if (m_mappedPayload == (RegisterID)-1)
668 return false;
669 payload = m_mappedPayload;
670 return true;
9dae56ea
A
671}
672
6fe7ccc8 673inline bool JIT::getMappedTag(int virtualRegisterIndex, RegisterID& tag)
9dae56ea 674{
14957cd0 675 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ba379fdc
A
676 return false;
677 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
678 return false;
679 if (m_mappedTag == (RegisterID)-1)
680 return false;
681 tag = m_mappedTag;
682 return true;
683}
9dae56ea 684
6fe7ccc8 685inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex)
ba379fdc 686{
4e4e5a6f
A
687 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
688 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
689 addSlowCase(jump());
690 else
691 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
692 }
9dae56ea
A
693}
694
6fe7ccc8 695inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag)
9dae56ea 696{
4e4e5a6f
A
697 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
698 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
699 addSlowCase(jump());
700 else
14957cd0 701 addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
4e4e5a6f 702 }
ba379fdc 703}
9dae56ea 704
ba379fdc 705ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
9dae56ea 706{
ba379fdc 707 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
9dae56ea 708}
ba379fdc
A
709
710ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
9dae56ea 711{
ba379fdc
A
712 if (isOperandConstantImmediateInt(op1)) {
713 constant = getConstantOperand(op1).asInt32();
714 op = op2;
715 return true;
716 }
717
718 if (isOperandConstantImmediateInt(op2)) {
719 constant = getConstantOperand(op2).asInt32();
720 op = op1;
721 return true;
722 }
723
724 return false;
9dae56ea 725}
ba379fdc 726
ba379fdc
A
727#else // USE(JSVALUE32_64)
728
93a37866
A
729/* Deprecated: Please use JITStubCall instead. */
730
731ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
732{
733 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
734 peek64(dst, argumentStackOffset);
735}
736
ba379fdc
A
737ALWAYS_INLINE void JIT::killLastResultRegister()
738{
739 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
9dae56ea 740}
9dae56ea 741
ba379fdc
A
742// get arg puts an arg from the SF register array into a h/w register
743ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
9dae56ea 744{
14957cd0 745 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
9dae56ea 746
ba379fdc
A
747 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
748 if (m_codeBlock->isConstantRegisterIndex(src)) {
749 JSValue value = m_codeBlock->getConstant(src);
6fe7ccc8 750 if (!value.isNumber())
93a37866 751 move(TrustedImm64(JSValue::encode(value)), dst);
6fe7ccc8 752 else
93a37866 753 move(Imm64(JSValue::encode(value)), dst);
ba379fdc
A
754 killLastResultRegister();
755 return;
756 }
757
14957cd0
A
758 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src) && !atJumpTarget()) {
759 // The argument we want is already stored in eax
760 if (dst != cachedResultRegister)
761 move(cachedResultRegister, dst);
762 killLastResultRegister();
763 return;
ba379fdc
A
764 }
765
93a37866 766 load64(Address(callFrameRegister, src * sizeof(Register)), dst);
9dae56ea 767 killLastResultRegister();
ba379fdc
A
768}
769
770ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
771{
772 if (src2 == m_lastResultBytecodeRegister) {
773 emitGetVirtualRegister(src2, dst2);
774 emitGetVirtualRegister(src1, dst1);
775 } else {
776 emitGetVirtualRegister(src1, dst1);
777 emitGetVirtualRegister(src2, dst2);
778 }
779}
9dae56ea 780
ba379fdc
A
781ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
782{
783 return getConstantOperand(src).asInt32();
9dae56ea
A
784}
785
ba379fdc
A
786ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
787{
788 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
789}
790
791ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
9dae56ea 792{
93a37866 793 store64(from, Address(callFrameRegister, dst * sizeof(Register)));
14957cd0 794 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? static_cast<int>(dst) : std::numeric_limits<int>::max();
ba379fdc
A
795}
796
797ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
798{
93a37866 799 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
9dae56ea
A
800}
801
802ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
803{
93a37866 804 return branchTest64(Zero, reg, tagMaskRegister);
9dae56ea
A
805}
806
807ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
808{
809 move(reg1, scratch);
93a37866 810 or64(reg2, scratch);
9dae56ea
A
811 return emitJumpIfJSCell(scratch);
812}
813
814ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
815{
816 addSlowCase(emitJumpIfJSCell(reg));
817}
818
9dae56ea
A
819ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
820{
821 addSlowCase(emitJumpIfNotJSCell(reg));
822}
823
824ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
825{
826 if (!m_codeBlock->isKnownNotImmediate(vReg))
827 emitJumpSlowCaseIfNotJSCell(reg);
828}
829
6fe7ccc8 830inline void JIT::emitLoadDouble(int index, FPRegisterID value)
f9bf01c6
A
831{
832 if (m_codeBlock->isConstantRegisterIndex(index)) {
14957cd0 833 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
f9bf01c6
A
834 loadDouble(&inConstantPool, value);
835 } else
836 loadDouble(addressFor(index), value);
837}
838
6fe7ccc8 839inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
f9bf01c6
A
840{
841 if (m_codeBlock->isConstantRegisterIndex(index)) {
14957cd0
A
842 ASSERT(isOperandConstantImmediateInt(index));
843 convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
f9bf01c6
A
844 } else
845 convertInt32ToDouble(addressFor(index), value);
846}
9dae56ea
A
847
848ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
849{
93a37866 850 return branch64(AboveOrEqual, reg, tagTypeNumberRegister);
9dae56ea
A
851}
852
853ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
854{
93a37866 855 return branch64(Below, reg, tagTypeNumberRegister);
9dae56ea
A
856}
857
858ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
859{
860 move(reg1, scratch);
93a37866 861 and64(reg2, scratch);
9dae56ea
A
862 return emitJumpIfNotImmediateInteger(scratch);
863}
864
865ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
866{
867 addSlowCase(emitJumpIfNotImmediateInteger(reg));
868}
869
870ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
871{
872 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
873}
874
f9bf01c6
A
875ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
876{
877 addSlowCase(emitJumpIfNotImmediateNumber(reg));
878}
879
9dae56ea
A
880ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
881{
9dae56ea 882 emitFastArithIntToImmNoCheck(src, dest);
9dae56ea
A
883}
884
885ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
886{
14957cd0 887 or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
9dae56ea
A
888}
889
ba379fdc 890#endif // USE(JSVALUE32_64)
9dae56ea 891
ba379fdc 892} // namespace JSC
9dae56ea
A
893
894#endif // ENABLE(JIT)
895
93a37866
A
896#endif // JITInlines_h
897