]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITInlineMethods.h
JavaScriptCore-621.1.tar.gz
[apple/javascriptcore.git] / jit / JITInlineMethods.h
CommitLineData
9dae56ea
A
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef JITInlineMethods_h
27#define JITInlineMethods_h
28
9dae56ea
A
29
30#if ENABLE(JIT)
31
ba379fdc
A
32namespace JSC {
33
34/* Deprecated: Please use JITStubCall instead. */
35
36// puts an arg onto the stack, as an arg to a context threaded function.
37ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID src, unsigned argumentNumber)
38{
f9bf01c6
A
39 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
40 poke(src, argumentStackOffset);
ba379fdc
A
41}
42
43/* Deprecated: Please use JITStubCall instead. */
44
45ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber)
46{
f9bf01c6
A
47 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
48 poke(Imm32(value), argumentStackOffset);
ba379fdc
A
49}
50
51/* Deprecated: Please use JITStubCall instead. */
52
53ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(void* value, unsigned argumentNumber)
54{
f9bf01c6
A
55 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
56 poke(ImmPtr(value), argumentStackOffset);
ba379fdc
A
57}
58
59/* Deprecated: Please use JITStubCall instead. */
60
61ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
62{
f9bf01c6
A
63 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
64 peek(dst, argumentStackOffset);
65}
66
67ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
68{
69 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
ba379fdc
A
70}
71
72ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
73{
74 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
75 return m_codeBlock->getConstant(src);
76}
77
78ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
79{
80 storePtr(from, Address(callFrameRegister, entry * sizeof(Register)));
81}
82
83ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
84{
85 storePtr(ImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
86}
87
88ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
89{
90 loadPtr(Address(from, entry * sizeof(Register)), to);
91#if !USE(JSVALUE32_64)
92 killLastResultRegister();
9dae56ea 93#endif
ba379fdc 94}
9dae56ea 95
4e4e5a6f
A
96ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
97{
98 failures.append(branchPtr(NotEqual, Address(src), ImmPtr(m_globalData->jsStringVPtr)));
99 failures.append(branchTest32(NonZero, Address(src, OBJECT_OFFSETOF(JSString, m_fiberCount))));
100 failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), Imm32(1)));
101 loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
102 loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplDataOffset()), dst);
103 load16(MacroAssembler::Address(dst, 0), dst);
104}
105
ba379fdc
A
106ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
107{
108 load32(Address(from, entry * sizeof(Register)), to);
109#if !USE(JSVALUE32_64)
110 killLastResultRegister();
111#endif
112}
9dae56ea 113
ba379fdc
A
114ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
115{
116 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
9dae56ea 117
ba379fdc
A
118 Call nakedCall = nearCall();
119 m_calls.append(CallRecord(nakedCall, m_bytecodeIndex, function.executableAddress()));
120 return nakedCall;
121}
122
f9bf01c6
A
123#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
124
125ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
126{
127#if CPU(ARM_TRADITIONAL)
128#ifndef NDEBUG
129 // Ensure the label after the sequence can also fit
130 insnSpace += sizeof(ARMWord);
131 constSpace += sizeof(uint64_t);
132#endif
133
134 ensureSpace(insnSpace, constSpace);
135
136#endif
137
138#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
139#ifndef NDEBUG
140 m_uninterruptedInstructionSequenceBegin = label();
141 m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
142#endif
143#endif
144}
145
146ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace)
147{
148#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
149 ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) == insnSpace);
150 ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin == constSpace);
151#endif
152}
153
154#endif
155
156#if CPU(ARM)
ba379fdc
A
157
158ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
9dae56ea 159{
f9bf01c6 160 move(linkRegister, reg);
9dae56ea
A
161}
162
ba379fdc
A
163ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
164{
f9bf01c6 165 move(reg, linkRegister);
ba379fdc
A
166}
167
168ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
169{
f9bf01c6 170 loadPtr(address, linkRegister);
ba379fdc
A
171}
172
4e4e5a6f
A
173#elif CPU(MIPS)
174
175ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
176{
177 move(returnAddressRegister, reg);
178}
179
180ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
181{
182 move(reg, returnAddressRegister);
183}
184
185ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
186{
187 loadPtr(address, returnAddressRegister);
188}
189
f9bf01c6 190#else // CPU(X86) || CPU(X86_64)
ba379fdc
A
191
192ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
193{
f9bf01c6 194 pop(reg);
ba379fdc
A
195}
196
197ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
198{
f9bf01c6 199 push(reg);
ba379fdc
A
200}
201
202ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
203{
f9bf01c6 204 push(address);
ba379fdc
A
205}
206
207#endif
208
209#if USE(JIT_STUB_ARGUMENT_VA_LIST)
210ALWAYS_INLINE void JIT::restoreArgumentReference()
211{
212 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
213}
214ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
215#else
216ALWAYS_INLINE void JIT::restoreArgumentReference()
217{
218 move(stackPointerRegister, firstArgumentRegister);
219 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
220}
221ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
222{
f9bf01c6 223#if CPU(X86)
ba379fdc
A
224 // Within a trampoline the return address will be on the stack at this point.
225 addPtr(Imm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
f9bf01c6 226#elif CPU(ARM)
ba379fdc
A
227 move(stackPointerRegister, firstArgumentRegister);
228#endif
229 // In the trampoline on x86-64, the first argument register is not overwritten.
230}
231#endif
232
233ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
234{
235 return branchPtr(NotEqual, Address(reg, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(structure));
236}
237
238ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
239{
240 if (!m_codeBlock->isKnownNotImmediate(vReg))
241 linkSlowCase(iter);
242}
243
244ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
9dae56ea
A
245{
246 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
247
ba379fdc
A
248 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeIndex));
249}
250
251ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
252{
253 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
254
255 const JumpList::JumpVector& jumpVector = jumpList.jumps();
256 size_t size = jumpVector.size();
257 for (size_t i = 0; i < size; ++i)
258 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeIndex));
259}
260
261ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
262{
263 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
264
265 m_jmpTable.append(JumpTable(jump, m_bytecodeIndex + relativeOffset));
266}
267
268ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
269{
270 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
271
272 jump.linkTo(m_labels[m_bytecodeIndex + relativeOffset], this);
273}
274
275#if ENABLE(SAMPLING_FLAGS)
276ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
277{
278 ASSERT(flag >= 1);
279 ASSERT(flag <= 32);
280 or32(Imm32(1u << (flag - 1)), AbsoluteAddress(&SamplingFlags::s_flags));
281}
282
283ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
284{
285 ASSERT(flag >= 1);
286 ASSERT(flag <= 32);
287 and32(Imm32(~(1u << (flag - 1))), AbsoluteAddress(&SamplingFlags::s_flags));
288}
289#endif
290
291#if ENABLE(SAMPLING_COUNTERS)
292ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
293{
f9bf01c6 294#if CPU(X86_64) // Or any other 64-bit plattform.
ba379fdc 295 addPtr(Imm32(count), AbsoluteAddress(&counter.m_counter));
f9bf01c6 296#elif CPU(X86) // Or any other little-endian 32-bit plattform.
ba379fdc
A
297 intptr_t hiWord = reinterpret_cast<intptr_t>(&counter.m_counter) + sizeof(int32_t);
298 add32(Imm32(count), AbsoluteAddress(&counter.m_counter));
299 addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
300#else
301#error "SAMPLING_FLAGS not implemented on this platform."
302#endif
303}
304#endif
305
306#if ENABLE(OPCODE_SAMPLING)
f9bf01c6 307#if CPU(X86_64)
ba379fdc
A
308ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
309{
f9bf01c6
A
310 move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
311 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
ba379fdc
A
312}
313#else
314ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
315{
316 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
317}
318#endif
319#endif
320
321#if ENABLE(CODEBLOCK_SAMPLING)
f9bf01c6 322#if CPU(X86_64)
ba379fdc
A
323ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
324{
f9bf01c6
A
325 move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
326 storePtr(ImmPtr(codeBlock), X86Registers::ecx);
ba379fdc
A
327}
328#else
329ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
330{
331 storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
332}
333#endif
334#endif
335
4e4e5a6f 336ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
f9bf01c6 337{
4e4e5a6f 338 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
f9bf01c6
A
339}
340
ba379fdc
A
341#if USE(JSVALUE32_64)
342
ba379fdc
A
343inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
344{
345 RegisterID mappedTag;
346 if (getMappedTag(index, mappedTag)) {
347 move(mappedTag, tag);
348 unmap(tag);
9dae56ea
A
349 return;
350 }
351
ba379fdc
A
352 if (m_codeBlock->isConstantRegisterIndex(index)) {
353 move(Imm32(getConstantOperand(index).tag()), tag);
354 unmap(tag);
355 return;
356 }
9dae56ea 357
ba379fdc
A
358 load32(tagFor(index), tag);
359 unmap(tag);
360}
361
362inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
363{
364 RegisterID mappedPayload;
365 if (getMappedPayload(index, mappedPayload)) {
366 move(mappedPayload, payload);
367 unmap(payload);
368 return;
9dae56ea
A
369 }
370
ba379fdc
A
371 if (m_codeBlock->isConstantRegisterIndex(index)) {
372 move(Imm32(getConstantOperand(index).payload()), payload);
373 unmap(payload);
374 return;
375 }
376
377 load32(payloadFor(index), payload);
378 unmap(payload);
9dae56ea
A
379}
380
ba379fdc 381inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
9dae56ea 382{
ba379fdc
A
383 move(Imm32(v.payload()), payload);
384 move(Imm32(v.tag()), tag);
385}
386
387inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
388{
389 ASSERT(tag != payload);
390
391 if (base == callFrameRegister) {
392 ASSERT(payload != base);
393 emitLoadPayload(index, payload);
394 emitLoadTag(index, tag);
395 return;
9dae56ea 396 }
ba379fdc
A
397
398 if (payload == base) { // avoid stomping base
399 load32(tagFor(index, base), tag);
400 load32(payloadFor(index, base), payload);
401 return;
402 }
403
404 load32(payloadFor(index, base), payload);
405 load32(tagFor(index, base), tag);
9dae56ea
A
406}
407
ba379fdc 408inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
9dae56ea 409{
ba379fdc
A
410 if (isMapped(index1)) {
411 emitLoad(index1, tag1, payload1);
412 emitLoad(index2, tag2, payload2);
413 return;
414 }
415 emitLoad(index2, tag2, payload2);
416 emitLoad(index1, tag1, payload1);
9dae56ea
A
417}
418
ba379fdc 419inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
9dae56ea 420{
ba379fdc
A
421 if (m_codeBlock->isConstantRegisterIndex(index)) {
422 Register& inConstantPool = m_codeBlock->constantRegister(index);
423 loadDouble(&inConstantPool, value);
424 } else
425 loadDouble(addressFor(index), value);
9dae56ea
A
426}
427
ba379fdc 428inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
9dae56ea 429{
ba379fdc
A
430 if (m_codeBlock->isConstantRegisterIndex(index)) {
431 Register& inConstantPool = m_codeBlock->constantRegister(index);
432 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
433 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
434 } else
435 convertInt32ToDouble(payloadFor(index), value);
9dae56ea
A
436}
437
ba379fdc 438inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
9dae56ea 439{
ba379fdc
A
440 store32(payload, payloadFor(index, base));
441 store32(tag, tagFor(index, base));
9dae56ea
A
442}
443
ba379fdc 444inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
9dae56ea 445{
ba379fdc
A
446 store32(payload, payloadFor(index, callFrameRegister));
447 if (!indexIsInt32)
448 store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
9dae56ea
A
449}
450
ba379fdc 451inline void JIT::emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32)
9dae56ea 452{
ba379fdc
A
453 store32(payload, payloadFor(index, callFrameRegister));
454 if (!indexIsInt32)
455 store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
9dae56ea
A
456}
457
ba379fdc 458inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
9dae56ea 459{
ba379fdc
A
460 store32(payload, payloadFor(index, callFrameRegister));
461 if (!indexIsCell)
462 store32(Imm32(JSValue::CellTag), tagFor(index, callFrameRegister));
9dae56ea
A
463}
464
ba379fdc 465inline void JIT::emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool)
9dae56ea 466{
ba379fdc
A
467 if (!indexIsBool)
468 store32(Imm32(0), payloadFor(index, callFrameRegister));
469 store32(tag, tagFor(index, callFrameRegister));
470}
9dae56ea 471
ba379fdc
A
472inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
473{
474 storeDouble(value, addressFor(index));
9dae56ea
A
475}
476
ba379fdc 477inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
9dae56ea 478{
ba379fdc
A
479 store32(Imm32(constant.payload()), payloadFor(index, base));
480 store32(Imm32(constant.tag()), tagFor(index, base));
9dae56ea
A
481}
482
ba379fdc 483ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
9dae56ea 484{
ba379fdc 485 emitStore(dst, jsUndefined());
9dae56ea
A
486}
487
ba379fdc 488inline bool JIT::isLabeled(unsigned bytecodeIndex)
9dae56ea 489{
ba379fdc
A
490 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
491 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
492 if (jumpTarget == bytecodeIndex)
493 return true;
494 if (jumpTarget > bytecodeIndex)
495 return false;
496 }
497 return false;
9dae56ea
A
498}
499
ba379fdc 500inline void JIT::map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
9dae56ea 501{
ba379fdc
A
502 if (isLabeled(bytecodeIndex))
503 return;
504
505 m_mappedBytecodeIndex = bytecodeIndex;
506 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
507 m_mappedTag = tag;
508 m_mappedPayload = payload;
9dae56ea
A
509}
510
ba379fdc 511inline void JIT::unmap(RegisterID registerID)
9dae56ea 512{
ba379fdc
A
513 if (m_mappedTag == registerID)
514 m_mappedTag = (RegisterID)-1;
515 else if (m_mappedPayload == registerID)
516 m_mappedPayload = (RegisterID)-1;
9dae56ea
A
517}
518
ba379fdc 519inline void JIT::unmap()
9dae56ea 520{
ba379fdc
A
521 m_mappedBytecodeIndex = (unsigned)-1;
522 m_mappedVirtualRegisterIndex = (unsigned)-1;
523 m_mappedTag = (RegisterID)-1;
524 m_mappedPayload = (RegisterID)-1;
9dae56ea
A
525}
526
ba379fdc 527inline bool JIT::isMapped(unsigned virtualRegisterIndex)
9dae56ea 528{
ba379fdc
A
529 if (m_mappedBytecodeIndex != m_bytecodeIndex)
530 return false;
531 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
532 return false;
533 return true;
9dae56ea
A
534}
535
ba379fdc 536inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
9dae56ea 537{
ba379fdc
A
538 if (m_mappedBytecodeIndex != m_bytecodeIndex)
539 return false;
540 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
541 return false;
542 if (m_mappedPayload == (RegisterID)-1)
543 return false;
544 payload = m_mappedPayload;
545 return true;
9dae56ea
A
546}
547
ba379fdc 548inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
9dae56ea 549{
ba379fdc
A
550 if (m_mappedBytecodeIndex != m_bytecodeIndex)
551 return false;
552 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
553 return false;
554 if (m_mappedTag == (RegisterID)-1)
555 return false;
556 tag = m_mappedTag;
557 return true;
558}
9dae56ea 559
ba379fdc
A
560inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
561{
4e4e5a6f
A
562 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
563 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
564 addSlowCase(jump());
565 else
566 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
567 }
9dae56ea
A
568}
569
ba379fdc 570inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
9dae56ea 571{
4e4e5a6f
A
572 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
573 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
574 addSlowCase(jump());
575 else
576 addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
577 }
ba379fdc 578}
9dae56ea 579
ba379fdc
A
580inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
581{
582 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
583 linkSlowCase(iter);
9dae56ea
A
584}
585
ba379fdc 586ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
9dae56ea 587{
ba379fdc 588 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
9dae56ea 589}
ba379fdc
A
590
591ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
9dae56ea 592{
ba379fdc
A
593 if (isOperandConstantImmediateInt(op1)) {
594 constant = getConstantOperand(op1).asInt32();
595 op = op2;
596 return true;
597 }
598
599 if (isOperandConstantImmediateInt(op2)) {
600 constant = getConstantOperand(op2).asInt32();
601 op = op1;
602 return true;
603 }
604
605 return false;
9dae56ea 606}
ba379fdc 607
f9bf01c6
A
608/* Deprecated: Please use JITStubCall instead. */
609
610ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID tag, RegisterID payload, unsigned argumentNumber)
9dae56ea 611{
f9bf01c6
A
612 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
613 poke(payload, argumentStackOffset);
614 poke(tag, argumentStackOffset + 1);
9dae56ea 615}
ba379fdc
A
616
617/* Deprecated: Please use JITStubCall instead. */
618
619ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2)
9dae56ea 620{
f9bf01c6 621 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
ba379fdc
A
622 if (m_codeBlock->isConstantRegisterIndex(src)) {
623 JSValue constant = m_codeBlock->getConstant(src);
f9bf01c6
A
624 poke(Imm32(constant.payload()), argumentStackOffset);
625 poke(Imm32(constant.tag()), argumentStackOffset + 1);
ba379fdc
A
626 } else {
627 emitLoad(src, scratch1, scratch2);
f9bf01c6
A
628 poke(scratch2, argumentStackOffset);
629 poke(scratch1, argumentStackOffset + 1);
ba379fdc
A
630 }
631}
632
633#else // USE(JSVALUE32_64)
634
635ALWAYS_INLINE void JIT::killLastResultRegister()
636{
637 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
9dae56ea 638}
9dae56ea 639
ba379fdc
A
640// get arg puts an arg from the SF register array into a h/w register
641ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
9dae56ea
A
642{
643 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
644
ba379fdc
A
645 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
646 if (m_codeBlock->isConstantRegisterIndex(src)) {
647 JSValue value = m_codeBlock->getConstant(src);
648 move(ImmPtr(JSValue::encode(value)), dst);
649 killLastResultRegister();
650 return;
651 }
652
653 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src)) {
654 bool atJumpTarget = false;
655 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeIndex) {
656 if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeIndex)
657 atJumpTarget = true;
658 ++m_jumpTargetsPosition;
659 }
660
661 if (!atJumpTarget) {
662 // The argument we want is already stored in eax
663 if (dst != cachedResultRegister)
664 move(cachedResultRegister, dst);
665 killLastResultRegister();
666 return;
667 }
668 }
669
670 loadPtr(Address(callFrameRegister, src * sizeof(Register)), dst);
9dae56ea 671 killLastResultRegister();
ba379fdc
A
672}
673
674ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
675{
676 if (src2 == m_lastResultBytecodeRegister) {
677 emitGetVirtualRegister(src2, dst2);
678 emitGetVirtualRegister(src1, dst1);
679 } else {
680 emitGetVirtualRegister(src1, dst1);
681 emitGetVirtualRegister(src2, dst2);
682 }
683}
9dae56ea 684
ba379fdc
A
685ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
686{
687 return getConstantOperand(src).asInt32();
9dae56ea
A
688}
689
ba379fdc
A
690ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
691{
692 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
693}
694
695ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
9dae56ea 696{
ba379fdc
A
697 storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
698 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? dst : std::numeric_limits<int>::max();
699}
700
701ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
702{
703 storePtr(ImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
9dae56ea
A
704}
705
706ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
707{
ba379fdc
A
708#if USE(JSVALUE64)
709 return branchTestPtr(Zero, reg, tagMaskRegister);
9dae56ea 710#else
ba379fdc 711 return branchTest32(Zero, reg, Imm32(JSImmediate::TagMask));
9dae56ea
A
712#endif
713}
714
715ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
716{
717 move(reg1, scratch);
718 orPtr(reg2, scratch);
719 return emitJumpIfJSCell(scratch);
720}
721
722ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
723{
724 addSlowCase(emitJumpIfJSCell(reg));
725}
726
727ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
728{
ba379fdc
A
729#if USE(JSVALUE64)
730 return branchTestPtr(NonZero, reg, tagMaskRegister);
9dae56ea 731#else
ba379fdc 732 return branchTest32(NonZero, reg, Imm32(JSImmediate::TagMask));
9dae56ea
A
733#endif
734}
735
736ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
737{
738 addSlowCase(emitJumpIfNotJSCell(reg));
739}
740
741ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
742{
743 if (!m_codeBlock->isKnownNotImmediate(vReg))
744 emitJumpSlowCaseIfNotJSCell(reg);
745}
746
ba379fdc 747#if USE(JSVALUE64)
f9bf01c6
A
748
749inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
750{
751 if (m_codeBlock->isConstantRegisterIndex(index)) {
752 Register& inConstantPool = m_codeBlock->constantRegister(index);
753 loadDouble(&inConstantPool, value);
754 } else
755 loadDouble(addressFor(index), value);
756}
757
758inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
759{
760 if (m_codeBlock->isConstantRegisterIndex(index)) {
761 Register& inConstantPool = m_codeBlock->constantRegister(index);
762 convertInt32ToDouble(AbsoluteAddress(&inConstantPool), value);
763 } else
764 convertInt32ToDouble(addressFor(index), value);
765}
9dae56ea
A
766#endif
767
768ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
769{
ba379fdc
A
770#if USE(JSVALUE64)
771 return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
9dae56ea 772#else
ba379fdc 773 return branchTest32(NonZero, reg, Imm32(JSImmediate::TagTypeNumber));
9dae56ea
A
774#endif
775}
776
777ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
778{
ba379fdc
A
779#if USE(JSVALUE64)
780 return branchPtr(Below, reg, tagTypeNumberRegister);
9dae56ea 781#else
ba379fdc 782 return branchTest32(Zero, reg, Imm32(JSImmediate::TagTypeNumber));
9dae56ea
A
783#endif
784}
785
786ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
787{
788 move(reg1, scratch);
789 andPtr(reg2, scratch);
790 return emitJumpIfNotImmediateInteger(scratch);
791}
792
793ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
794{
795 addSlowCase(emitJumpIfNotImmediateInteger(reg));
796}
797
798ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
799{
800 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
801}
802
f9bf01c6
A
803ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
804{
805 addSlowCase(emitJumpIfNotImmediateNumber(reg));
806}
807
ba379fdc 808#if !USE(JSVALUE64)
9dae56ea
A
809ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
810{
811 subPtr(Imm32(JSImmediate::TagTypeNumber), reg);
812}
813
814ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
815{
ba379fdc 816 return branchSubPtr(Zero, Imm32(JSImmediate::TagTypeNumber), reg);
9dae56ea
A
817}
818#endif
819
820ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
821{
ba379fdc 822#if USE(JSVALUE64)
9dae56ea
A
823 emitFastArithIntToImmNoCheck(src, dest);
824#else
825 if (src != dest)
826 move(src, dest);
827 addPtr(Imm32(JSImmediate::TagTypeNumber), dest);
828#endif
829}
830
9dae56ea
A
831// operand is int32_t, must have been zero-extended if register is 64-bit.
832ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
833{
ba379fdc 834#if USE(JSVALUE64)
9dae56ea
A
835 if (src != dest)
836 move(src, dest);
837 orPtr(tagTypeNumberRegister, dest);
838#else
839 signExtend32ToPtr(src, dest);
840 addPtr(dest, dest);
841 emitFastArithReTagImmediate(dest, dest);
842#endif
843}
844
845ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
846{
847 lshift32(Imm32(JSImmediate::ExtendedPayloadShift), reg);
848 or32(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), reg);
849}
850
ba379fdc 851/* Deprecated: Please use JITStubCall instead. */
9dae56ea 852
ba379fdc
A
853// get arg puts an arg from the SF register array onto the stack, as an arg to a context threaded function.
854ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch)
9dae56ea 855{
f9bf01c6 856 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
ba379fdc
A
857 if (m_codeBlock->isConstantRegisterIndex(src)) {
858 JSValue value = m_codeBlock->getConstant(src);
f9bf01c6 859 poke(ImmPtr(JSValue::encode(value)), argumentStackOffset);
ba379fdc
A
860 } else {
861 loadPtr(Address(callFrameRegister, src * sizeof(Register)), scratch);
f9bf01c6 862 poke(scratch, argumentStackOffset);
ba379fdc 863 }
9dae56ea 864
ba379fdc 865 killLastResultRegister();
9dae56ea
A
866}
867
ba379fdc 868#endif // USE(JSVALUE32_64)
9dae56ea 869
ba379fdc 870} // namespace JSC
9dae56ea
A
871
872#endif // ENABLE(JIT)
873
874#endif