]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITInlineMethods.h
JavaScriptCore-554.1.tar.gz
[apple/javascriptcore.git] / jit / JITInlineMethods.h
CommitLineData
9dae56ea
A
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef JITInlineMethods_h
27#define JITInlineMethods_h
28
29#include <wtf/Platform.h>
30
31#if ENABLE(JIT)
32
ba379fdc
A
33namespace JSC {
34
35/* Deprecated: Please use JITStubCall instead. */
36
37// puts an arg onto the stack, as an arg to a context threaded function.
38ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID src, unsigned argumentNumber)
39{
40 poke(src, argumentNumber);
41}
42
43/* Deprecated: Please use JITStubCall instead. */
44
45ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber)
46{
47 poke(Imm32(value), argumentNumber);
48}
49
50/* Deprecated: Please use JITStubCall instead. */
51
52ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(void* value, unsigned argumentNumber)
53{
54 poke(ImmPtr(value), argumentNumber);
55}
56
57/* Deprecated: Please use JITStubCall instead. */
58
59ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
60{
61 peek(dst, argumentNumber);
62}
63
64ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
65{
66 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
67 return m_codeBlock->getConstant(src);
68}
69
70ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
71{
72 storePtr(from, Address(callFrameRegister, entry * sizeof(Register)));
73}
74
75ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
76{
77 storePtr(ImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
78}
79
80ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
81{
82 loadPtr(Address(from, entry * sizeof(Register)), to);
83#if !USE(JSVALUE32_64)
84 killLastResultRegister();
9dae56ea 85#endif
ba379fdc 86}
9dae56ea 87
ba379fdc
A
88ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
89{
90 load32(Address(from, entry * sizeof(Register)), to);
91#if !USE(JSVALUE32_64)
92 killLastResultRegister();
93#endif
94}
9dae56ea 95
ba379fdc
A
96ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
97{
98 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
9dae56ea 99
ba379fdc
A
100 Call nakedCall = nearCall();
101 m_calls.append(CallRecord(nakedCall, m_bytecodeIndex, function.executableAddress()));
102 return nakedCall;
103}
104
105#if PLATFORM(X86) || PLATFORM(X86_64)
106
107ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
9dae56ea 108{
ba379fdc 109 pop(reg);
9dae56ea
A
110}
111
ba379fdc
A
112ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
113{
114 push(reg);
115}
116
117ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
118{
119 push(address);
120}
121
122#elif PLATFORM_ARM_ARCH(7)
123
124ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
125{
126 move(linkRegister, reg);
127}
128
129ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
130{
131 move(reg, linkRegister);
132}
133
134ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
135{
136 loadPtr(address, linkRegister);
137}
138
139#endif
140
141#if USE(JIT_STUB_ARGUMENT_VA_LIST)
142ALWAYS_INLINE void JIT::restoreArgumentReference()
143{
144 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
145}
146ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
147#else
148ALWAYS_INLINE void JIT::restoreArgumentReference()
149{
150 move(stackPointerRegister, firstArgumentRegister);
151 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
152}
153ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
154{
155#if PLATFORM(X86)
156 // Within a trampoline the return address will be on the stack at this point.
157 addPtr(Imm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
158#elif PLATFORM_ARM_ARCH(7)
159 move(stackPointerRegister, firstArgumentRegister);
160#endif
161 // In the trampoline on x86-64, the first argument register is not overwritten.
162}
163#endif
164
165ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
166{
167 return branchPtr(NotEqual, Address(reg, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(structure));
168}
169
170ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
171{
172 if (!m_codeBlock->isKnownNotImmediate(vReg))
173 linkSlowCase(iter);
174}
175
176ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
9dae56ea
A
177{
178 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
179
ba379fdc
A
180 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeIndex));
181}
182
183ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
184{
185 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
186
187 const JumpList::JumpVector& jumpVector = jumpList.jumps();
188 size_t size = jumpVector.size();
189 for (size_t i = 0; i < size; ++i)
190 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeIndex));
191}
192
193ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
194{
195 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
196
197 m_jmpTable.append(JumpTable(jump, m_bytecodeIndex + relativeOffset));
198}
199
200ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
201{
202 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
203
204 jump.linkTo(m_labels[m_bytecodeIndex + relativeOffset], this);
205}
206
207#if ENABLE(SAMPLING_FLAGS)
208ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
209{
210 ASSERT(flag >= 1);
211 ASSERT(flag <= 32);
212 or32(Imm32(1u << (flag - 1)), AbsoluteAddress(&SamplingFlags::s_flags));
213}
214
215ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
216{
217 ASSERT(flag >= 1);
218 ASSERT(flag <= 32);
219 and32(Imm32(~(1u << (flag - 1))), AbsoluteAddress(&SamplingFlags::s_flags));
220}
221#endif
222
223#if ENABLE(SAMPLING_COUNTERS)
224ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
225{
226#if PLATFORM(X86_64) // Or any other 64-bit plattform.
227 addPtr(Imm32(count), AbsoluteAddress(&counter.m_counter));
228#elif PLATFORM(X86) // Or any other little-endian 32-bit plattform.
229 intptr_t hiWord = reinterpret_cast<intptr_t>(&counter.m_counter) + sizeof(int32_t);
230 add32(Imm32(count), AbsoluteAddress(&counter.m_counter));
231 addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
232#else
233#error "SAMPLING_FLAGS not implemented on this platform."
234#endif
235}
236#endif
237
238#if ENABLE(OPCODE_SAMPLING)
239#if PLATFORM(X86_64)
240ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
241{
242 move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86::ecx);
243 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86::ecx);
244}
245#else
246ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
247{
248 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
249}
250#endif
251#endif
252
253#if ENABLE(CODEBLOCK_SAMPLING)
254#if PLATFORM(X86_64)
255ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
256{
257 move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86::ecx);
258 storePtr(ImmPtr(codeBlock), X86::ecx);
259}
260#else
261ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
262{
263 storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
264}
265#endif
266#endif
267
268#if USE(JSVALUE32_64)
269
270inline JIT::Address JIT::tagFor(unsigned index, RegisterID base)
271{
272 return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
273}
274
275inline JIT::Address JIT::payloadFor(unsigned index, RegisterID base)
276{
277 return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
278}
279
280inline JIT::Address JIT::addressFor(unsigned index, RegisterID base)
281{
282 return Address(base, (index * sizeof(Register)));
283}
284
285inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
286{
287 RegisterID mappedTag;
288 if (getMappedTag(index, mappedTag)) {
289 move(mappedTag, tag);
290 unmap(tag);
9dae56ea
A
291 return;
292 }
293
ba379fdc
A
294 if (m_codeBlock->isConstantRegisterIndex(index)) {
295 move(Imm32(getConstantOperand(index).tag()), tag);
296 unmap(tag);
297 return;
298 }
9dae56ea 299
ba379fdc
A
300 load32(tagFor(index), tag);
301 unmap(tag);
302}
303
304inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
305{
306 RegisterID mappedPayload;
307 if (getMappedPayload(index, mappedPayload)) {
308 move(mappedPayload, payload);
309 unmap(payload);
310 return;
9dae56ea
A
311 }
312
ba379fdc
A
313 if (m_codeBlock->isConstantRegisterIndex(index)) {
314 move(Imm32(getConstantOperand(index).payload()), payload);
315 unmap(payload);
316 return;
317 }
318
319 load32(payloadFor(index), payload);
320 unmap(payload);
9dae56ea
A
321}
322
ba379fdc 323inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
9dae56ea 324{
ba379fdc
A
325 move(Imm32(v.payload()), payload);
326 move(Imm32(v.tag()), tag);
327}
328
329inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
330{
331 ASSERT(tag != payload);
332
333 if (base == callFrameRegister) {
334 ASSERT(payload != base);
335 emitLoadPayload(index, payload);
336 emitLoadTag(index, tag);
337 return;
9dae56ea 338 }
ba379fdc
A
339
340 if (payload == base) { // avoid stomping base
341 load32(tagFor(index, base), tag);
342 load32(payloadFor(index, base), payload);
343 return;
344 }
345
346 load32(payloadFor(index, base), payload);
347 load32(tagFor(index, base), tag);
9dae56ea
A
348}
349
ba379fdc 350inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
9dae56ea 351{
ba379fdc
A
352 if (isMapped(index1)) {
353 emitLoad(index1, tag1, payload1);
354 emitLoad(index2, tag2, payload2);
355 return;
356 }
357 emitLoad(index2, tag2, payload2);
358 emitLoad(index1, tag1, payload1);
9dae56ea
A
359}
360
ba379fdc 361inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
9dae56ea 362{
ba379fdc
A
363 if (m_codeBlock->isConstantRegisterIndex(index)) {
364 Register& inConstantPool = m_codeBlock->constantRegister(index);
365 loadDouble(&inConstantPool, value);
366 } else
367 loadDouble(addressFor(index), value);
9dae56ea
A
368}
369
ba379fdc 370inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
9dae56ea 371{
ba379fdc
A
372 if (m_codeBlock->isConstantRegisterIndex(index)) {
373 Register& inConstantPool = m_codeBlock->constantRegister(index);
374 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
375 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
376 } else
377 convertInt32ToDouble(payloadFor(index), value);
9dae56ea
A
378}
379
ba379fdc 380inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
9dae56ea 381{
ba379fdc
A
382 store32(payload, payloadFor(index, base));
383 store32(tag, tagFor(index, base));
9dae56ea
A
384}
385
ba379fdc 386inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
9dae56ea 387{
ba379fdc
A
388 store32(payload, payloadFor(index, callFrameRegister));
389 if (!indexIsInt32)
390 store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
9dae56ea
A
391}
392
ba379fdc 393inline void JIT::emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32)
9dae56ea 394{
ba379fdc
A
395 store32(payload, payloadFor(index, callFrameRegister));
396 if (!indexIsInt32)
397 store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
9dae56ea
A
398}
399
ba379fdc 400inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
9dae56ea 401{
ba379fdc
A
402 store32(payload, payloadFor(index, callFrameRegister));
403 if (!indexIsCell)
404 store32(Imm32(JSValue::CellTag), tagFor(index, callFrameRegister));
9dae56ea
A
405}
406
ba379fdc 407inline void JIT::emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool)
9dae56ea 408{
ba379fdc
A
409 if (!indexIsBool)
410 store32(Imm32(0), payloadFor(index, callFrameRegister));
411 store32(tag, tagFor(index, callFrameRegister));
412}
9dae56ea 413
ba379fdc
A
414inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
415{
416 storeDouble(value, addressFor(index));
9dae56ea
A
417}
418
ba379fdc 419inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
9dae56ea 420{
ba379fdc
A
421 store32(Imm32(constant.payload()), payloadFor(index, base));
422 store32(Imm32(constant.tag()), tagFor(index, base));
9dae56ea
A
423}
424
ba379fdc 425ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
9dae56ea 426{
ba379fdc 427 emitStore(dst, jsUndefined());
9dae56ea
A
428}
429
ba379fdc 430inline bool JIT::isLabeled(unsigned bytecodeIndex)
9dae56ea 431{
ba379fdc
A
432 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
433 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
434 if (jumpTarget == bytecodeIndex)
435 return true;
436 if (jumpTarget > bytecodeIndex)
437 return false;
438 }
439 return false;
9dae56ea
A
440}
441
ba379fdc 442inline void JIT::map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
9dae56ea 443{
ba379fdc
A
444 if (isLabeled(bytecodeIndex))
445 return;
446
447 m_mappedBytecodeIndex = bytecodeIndex;
448 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
449 m_mappedTag = tag;
450 m_mappedPayload = payload;
9dae56ea
A
451}
452
ba379fdc 453inline void JIT::unmap(RegisterID registerID)
9dae56ea 454{
ba379fdc
A
455 if (m_mappedTag == registerID)
456 m_mappedTag = (RegisterID)-1;
457 else if (m_mappedPayload == registerID)
458 m_mappedPayload = (RegisterID)-1;
9dae56ea
A
459}
460
ba379fdc 461inline void JIT::unmap()
9dae56ea 462{
ba379fdc
A
463 m_mappedBytecodeIndex = (unsigned)-1;
464 m_mappedVirtualRegisterIndex = (unsigned)-1;
465 m_mappedTag = (RegisterID)-1;
466 m_mappedPayload = (RegisterID)-1;
9dae56ea
A
467}
468
ba379fdc 469inline bool JIT::isMapped(unsigned virtualRegisterIndex)
9dae56ea 470{
ba379fdc
A
471 if (m_mappedBytecodeIndex != m_bytecodeIndex)
472 return false;
473 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
474 return false;
475 return true;
9dae56ea
A
476}
477
ba379fdc 478inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
9dae56ea 479{
ba379fdc
A
480 if (m_mappedBytecodeIndex != m_bytecodeIndex)
481 return false;
482 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
483 return false;
484 if (m_mappedPayload == (RegisterID)-1)
485 return false;
486 payload = m_mappedPayload;
487 return true;
9dae56ea
A
488}
489
ba379fdc 490inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
9dae56ea 491{
ba379fdc
A
492 if (m_mappedBytecodeIndex != m_bytecodeIndex)
493 return false;
494 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
495 return false;
496 if (m_mappedTag == (RegisterID)-1)
497 return false;
498 tag = m_mappedTag;
499 return true;
500}
9dae56ea 501
ba379fdc
A
502inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
503{
504 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
505 addSlowCase(branch32(NotEqual, tagFor(virtualRegisterIndex), Imm32(JSValue::CellTag)));
9dae56ea
A
506}
507
ba379fdc 508inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
9dae56ea 509{
ba379fdc
A
510 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
511 addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
512}
9dae56ea 513
ba379fdc
A
514inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
515{
516 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
517 linkSlowCase(iter);
9dae56ea
A
518}
519
ba379fdc 520ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
9dae56ea 521{
ba379fdc 522 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
9dae56ea 523}
ba379fdc
A
524
525ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
9dae56ea 526{
ba379fdc
A
527 if (isOperandConstantImmediateInt(op1)) {
528 constant = getConstantOperand(op1).asInt32();
529 op = op2;
530 return true;
531 }
532
533 if (isOperandConstantImmediateInt(op2)) {
534 constant = getConstantOperand(op2).asInt32();
535 op = op1;
536 return true;
537 }
538
539 return false;
9dae56ea 540}
ba379fdc
A
541
542ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
9dae56ea 543{
ba379fdc 544 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
9dae56ea 545}
ba379fdc
A
546
547/* Deprecated: Please use JITStubCall instead. */
548
549ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2)
9dae56ea 550{
ba379fdc
A
551 if (m_codeBlock->isConstantRegisterIndex(src)) {
552 JSValue constant = m_codeBlock->getConstant(src);
553 poke(Imm32(constant.payload()), argumentNumber);
554 poke(Imm32(constant.tag()), argumentNumber + 1);
555 } else {
556 emitLoad(src, scratch1, scratch2);
557 poke(scratch2, argumentNumber);
558 poke(scratch1, argumentNumber + 1);
559 }
560}
561
562#else // USE(JSVALUE32_64)
563
564ALWAYS_INLINE void JIT::killLastResultRegister()
565{
566 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
9dae56ea 567}
9dae56ea 568
ba379fdc
A
569// get arg puts an arg from the SF register array into a h/w register
570ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
9dae56ea
A
571{
572 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
573
ba379fdc
A
574 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
575 if (m_codeBlock->isConstantRegisterIndex(src)) {
576 JSValue value = m_codeBlock->getConstant(src);
577 move(ImmPtr(JSValue::encode(value)), dst);
578 killLastResultRegister();
579 return;
580 }
581
582 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src)) {
583 bool atJumpTarget = false;
584 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeIndex) {
585 if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeIndex)
586 atJumpTarget = true;
587 ++m_jumpTargetsPosition;
588 }
589
590 if (!atJumpTarget) {
591 // The argument we want is already stored in eax
592 if (dst != cachedResultRegister)
593 move(cachedResultRegister, dst);
594 killLastResultRegister();
595 return;
596 }
597 }
598
599 loadPtr(Address(callFrameRegister, src * sizeof(Register)), dst);
9dae56ea 600 killLastResultRegister();
ba379fdc
A
601}
602
603ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
604{
605 if (src2 == m_lastResultBytecodeRegister) {
606 emitGetVirtualRegister(src2, dst2);
607 emitGetVirtualRegister(src1, dst1);
608 } else {
609 emitGetVirtualRegister(src1, dst1);
610 emitGetVirtualRegister(src2, dst2);
611 }
612}
9dae56ea 613
ba379fdc
A
614ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
615{
616 return getConstantOperand(src).asInt32();
9dae56ea
A
617}
618
ba379fdc
A
619ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
620{
621 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
622}
623
624ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
9dae56ea 625{
ba379fdc
A
626 storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
627 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? dst : std::numeric_limits<int>::max();
628}
629
630ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
631{
632 storePtr(ImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
9dae56ea
A
633}
634
635ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
636{
ba379fdc
A
637#if USE(JSVALUE64)
638 return branchTestPtr(Zero, reg, tagMaskRegister);
9dae56ea 639#else
ba379fdc 640 return branchTest32(Zero, reg, Imm32(JSImmediate::TagMask));
9dae56ea
A
641#endif
642}
643
644ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
645{
646 move(reg1, scratch);
647 orPtr(reg2, scratch);
648 return emitJumpIfJSCell(scratch);
649}
650
651ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
652{
653 addSlowCase(emitJumpIfJSCell(reg));
654}
655
656ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
657{
ba379fdc
A
658#if USE(JSVALUE64)
659 return branchTestPtr(NonZero, reg, tagMaskRegister);
9dae56ea 660#else
ba379fdc 661 return branchTest32(NonZero, reg, Imm32(JSImmediate::TagMask));
9dae56ea
A
662#endif
663}
664
665ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
666{
667 addSlowCase(emitJumpIfNotJSCell(reg));
668}
669
670ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
671{
672 if (!m_codeBlock->isKnownNotImmediate(vReg))
673 emitJumpSlowCaseIfNotJSCell(reg);
674}
675
ba379fdc 676#if USE(JSVALUE64)
9dae56ea
A
677ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateNumber(RegisterID reg)
678{
ba379fdc 679 return branchTestPtr(NonZero, reg, tagTypeNumberRegister);
9dae56ea
A
680}
681ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateNumber(RegisterID reg)
682{
ba379fdc 683 return branchTestPtr(Zero, reg, tagTypeNumberRegister);
9dae56ea
A
684}
685#endif
686
687ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
688{
ba379fdc
A
689#if USE(JSVALUE64)
690 return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
9dae56ea 691#else
ba379fdc 692 return branchTest32(NonZero, reg, Imm32(JSImmediate::TagTypeNumber));
9dae56ea
A
693#endif
694}
695
696ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
697{
ba379fdc
A
698#if USE(JSVALUE64)
699 return branchPtr(Below, reg, tagTypeNumberRegister);
9dae56ea 700#else
ba379fdc 701 return branchTest32(Zero, reg, Imm32(JSImmediate::TagTypeNumber));
9dae56ea
A
702#endif
703}
704
705ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
706{
707 move(reg1, scratch);
708 andPtr(reg2, scratch);
709 return emitJumpIfNotImmediateInteger(scratch);
710}
711
712ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
713{
714 addSlowCase(emitJumpIfNotImmediateInteger(reg));
715}
716
717ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
718{
719 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
720}
721
ba379fdc 722#if !USE(JSVALUE64)
9dae56ea
A
723ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
724{
725 subPtr(Imm32(JSImmediate::TagTypeNumber), reg);
726}
727
728ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
729{
ba379fdc 730 return branchSubPtr(Zero, Imm32(JSImmediate::TagTypeNumber), reg);
9dae56ea
A
731}
732#endif
733
734ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
735{
ba379fdc 736#if USE(JSVALUE64)
9dae56ea
A
737 emitFastArithIntToImmNoCheck(src, dest);
738#else
739 if (src != dest)
740 move(src, dest);
741 addPtr(Imm32(JSImmediate::TagTypeNumber), dest);
742#endif
743}
744
745ALWAYS_INLINE void JIT::emitFastArithImmToInt(RegisterID reg)
746{
ba379fdc 747#if USE(JSVALUE64)
9dae56ea
A
748 UNUSED_PARAM(reg);
749#else
750 rshiftPtr(Imm32(JSImmediate::IntegerPayloadShift), reg);
751#endif
752}
753
754// operand is int32_t, must have been zero-extended if register is 64-bit.
755ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
756{
ba379fdc 757#if USE(JSVALUE64)
9dae56ea
A
758 if (src != dest)
759 move(src, dest);
760 orPtr(tagTypeNumberRegister, dest);
761#else
762 signExtend32ToPtr(src, dest);
763 addPtr(dest, dest);
764 emitFastArithReTagImmediate(dest, dest);
765#endif
766}
767
768ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
769{
770 lshift32(Imm32(JSImmediate::ExtendedPayloadShift), reg);
771 or32(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), reg);
772}
773
ba379fdc 774/* Deprecated: Please use JITStubCall instead. */
9dae56ea 775
ba379fdc
A
776// get arg puts an arg from the SF register array onto the stack, as an arg to a context threaded function.
777ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch)
9dae56ea 778{
ba379fdc
A
779 if (m_codeBlock->isConstantRegisterIndex(src)) {
780 JSValue value = m_codeBlock->getConstant(src);
781 emitPutJITStubArgConstant(JSValue::encode(value), argumentNumber);
782 } else {
783 loadPtr(Address(callFrameRegister, src * sizeof(Register)), scratch);
784 emitPutJITStubArg(scratch, argumentNumber);
785 }
9dae56ea 786
ba379fdc 787 killLastResultRegister();
9dae56ea
A
788}
789
ba379fdc 790#endif // USE(JSVALUE32_64)
9dae56ea 791
ba379fdc 792} // namespace JSC
9dae56ea
A
793
794#endif // ENABLE(JIT)
795
796#endif