#include "Collector.h"
#include "Debugger.h"
#include "ExceptionHelpers.h"
+#include "GetterSetter.h"
#include "GlobalEvalFunction.h"
#include "JIT.h"
#include "JSActivation.h"
#error "JIT_STUB_ARGUMENT_VA_LIST not supported on ARMv7."
#endif
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-HIDE_SYMBOL(ctiTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiTrampoline) "\n"
-SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "sub sp, sp, #0x3c" "\n"
- "str lr, [sp, #0x20]" "\n"
- "str r4, [sp, #0x24]" "\n"
- "str r5, [sp, #0x28]" "\n"
- "str r6, [sp, #0x2c]" "\n"
- "str r1, [sp, #0x30]" "\n"
- "str r2, [sp, #0x34]" "\n"
- "str r3, [sp, #0x38]" "\n"
- "cpy r5, r2" "\n"
- "mov r6, #512" "\n"
- "blx r0" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
-
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiVMThrowTrampoline) "\n"
-SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "cpy r0, sp" "\n"
- "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
-
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiOpThrowNotCaught) "\n"
-SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
+#define THUNK_RETURN_ADDRESS_OFFSET 0x3C
+#define PRESERVED_RETURN_ADDRESS_OFFSET 0x40
+#define PRESERVED_R4_OFFSET 0x44
+#define PRESERVED_R5_OFFSET 0x48
+#define PRESERVED_R6_OFFSET 0x4C
+#define REGISTER_FILE_OFFSET 0x50
+#define CALLFRAME_OFFSET 0x54
+#define EXCEPTION_OFFSET 0x58
+#define ENABLE_PROFILER_REFERENCE_OFFSET 0x60
#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
-asm volatile (
-".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "stmdb sp!, {r1-r3}" "\n"
- "stmdb sp!, {r4-r8, lr}" "\n"
- "sub sp, sp, #68" "\n"
- "mov r4, r2" "\n"
- "mov r5, #512" "\n"
- // r0 contains the code
- "mov lr, pc" "\n"
- "mov pc, r0" "\n"
- "add sp, sp, #68" "\n"
- "ldmia sp!, {r4-r8, lr}" "\n"
- "add sp, sp, #12" "\n"
- "mov pc, lr" "\n"
-);
-
-asm volatile (
-".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "mov r0, sp" "\n"
- "bl " SYMBOL_STRING(cti_vm_throw) "\n"
-
-// Both has the same return sequence
-".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "add sp, sp, #68" "\n"
- "ldmia sp!, {r4-r8, lr}" "\n"
- "add sp, sp, #12" "\n"
- "mov pc, lr" "\n"
-);
+#define THUNK_RETURN_ADDRESS_OFFSET 64
+#define PRESERVEDR4_OFFSET 68
#elif COMPILER(MSVC) && CPU(X86)
#error "JIT_STUB_ARGUMENT_VA_LIST not supported on ARMv7."
#endif
-asm volatile (
+#define THUNK_RETURN_ADDRESS_OFFSET 0x1C
+#define PRESERVED_RETURN_ADDRESS_OFFSET 0x20
+#define PRESERVED_R4_OFFSET 0x24
+#define PRESERVED_R5_OFFSET 0x28
+#define PRESERVED_R6_OFFSET 0x2C
+#define REGISTER_FILE_OFFSET 0x30
+#define CALLFRAME_OFFSET 0x34
+#define EXCEPTION_OFFSET 0x38
+#define ENABLE_PROFILER_REFERENCE_OFFSET 0x40
+
+#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
+
+#define THUNK_RETURN_ADDRESS_OFFSET 32
+#define PRESERVEDR4_OFFSET 36
+
+#elif CPU(MIPS)
+
+#if USE(JIT_STUB_ARGUMENT_VA_LIST)
+#error "JIT_STUB_ARGUMENT_VA_LIST not supported on MIPS."
+#endif
+
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-HIDE_SYMBOL(ctiTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiTrampoline) "\n"
+".ent " SYMBOL_STRING(ctiTrampoline) "\n"
SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "sub sp, sp, #0x40" "\n"
- "str lr, [sp, #0x20]" "\n"
- "str r4, [sp, #0x24]" "\n"
- "str r5, [sp, #0x28]" "\n"
- "str r6, [sp, #0x2c]" "\n"
- "str r1, [sp, #0x30]" "\n"
- "str r2, [sp, #0x34]" "\n"
- "str r3, [sp, #0x38]" "\n"
- "cpy r5, r2" "\n"
- "mov r6, #512" "\n"
- "blx r0" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x40" "\n"
- "bx lr" "\n"
+ "addiu $29,$29,-72" "\n"
+ "sw $31,44($29)" "\n"
+ "sw $18,40($29)" "\n"
+ "sw $17,36($29)" "\n"
+ "sw $16,32($29)" "\n"
+#if WTF_MIPS_PIC
+ "sw $28,28($29)" "\n"
+#endif
+ "move $16,$6 # set callFrameRegister" "\n"
+ "li $17,512 # set timeoutCheckRegister" "\n"
+ "move $25,$4 # move executableAddress to t9" "\n"
+ "sw $5,52($29) # store registerFile to current stack" "\n"
+ "sw $6,56($29) # store callFrame to curent stack" "\n"
+ "sw $7,60($29) # store exception to current stack" "\n"
+ "lw $8,88($29) # load enableProfilerReference from previous stack" "\n"
+ "lw $9,92($29) # load globalData from previous stack" "\n"
+ "sw $8,64($29) # store enableProfilerReference to current stack" "\n"
+ "jalr $25" "\n"
+ "sw $9,68($29) # store globalData to current stack" "\n"
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiTrampoline) "\n"
);
-asm volatile (
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiVMThrowTrampoline) "\n"
+".ent " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "cpy r0, sp" "\n"
- "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x40" "\n"
- "bx lr" "\n"
+#if WTF_MIPS_PIC
+ "lw $28,28($29)" "\n"
+".set macro" "\n"
+ "la $25," SYMBOL_STRING(cti_vm_throw) "\n"
+".set nomacro" "\n"
+ "bal " SYMBOL_STRING(cti_vm_throw) "\n"
+ "move $4,$29" "\n"
+#else
+ "jal " SYMBOL_STRING(cti_vm_throw) "\n"
+ "move $4,$29" "\n"
+#endif
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
);
-asm volatile (
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiOpThrowNotCaught) "\n"
+".ent " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
-
-#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
-
-asm volatile (
-".text\n"
-".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-HIDE_SYMBOL(ctiTrampoline) "\n"
-SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "stmdb sp!, {r1-r3}" "\n"
- "stmdb sp!, {r4-r8, lr}" "\n"
- "sub sp, sp, #36" "\n"
- "mov r4, r2" "\n"
- "mov r5, #512" "\n"
- "mov lr, pc" "\n"
- "mov pc, r0" "\n"
- "add sp, sp, #36" "\n"
- "ldmia sp!, {r4-r8, lr}" "\n"
- "add sp, sp, #12" "\n"
- "mov pc, lr" "\n"
-);
-
-asm volatile (
-".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
-SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "mov r0, sp" "\n"
- "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
-
-// Both has the same return sequence
-".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
-SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "add sp, sp, #36" "\n"
- "ldmia sp!, {r4-r8, lr}" "\n"
- "add sp, sp, #12" "\n"
- "mov pc, lr" "\n"
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
);
#elif COMPILER(RVCT) && CPU(ARM_TRADITIONAL)
+#define THUNK_RETURN_ADDRESS_OFFSET 32
+#define PRESERVEDR4_OFFSET 36
+
__asm EncodedJSValue ctiTrampoline(void*, RegisterFile*, CallFrame*, JSValue*, Profiler**, JSGlobalData*)
{
ARM
#endif // USE(JSVALUE32_64)
+#if COMPILER(GCC) && CPU(ARM_THUMB2)
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiTrampoline) "\n"
+SYMBOL_STRING(ctiTrampoline) ":" "\n"
+ "sub sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "str lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "str r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "str r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "str r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "str r1, [sp, #" STRINGIZE_VALUE_OF(REGISTER_FILE_OFFSET) "]" "\n"
+ "str r2, [sp, #" STRINGIZE_VALUE_OF(CALLFRAME_OFFSET) "]" "\n"
+ "str r3, [sp, #" STRINGIZE_VALUE_OF(EXCEPTION_OFFSET) "]" "\n"
+ "cpy r5, r2" "\n"
+ "mov r6, #512" "\n"
+ "blx r0" "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiVMThrowTrampoline) "\n"
+SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
+ "cpy r0, sp" "\n"
+ "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiOpThrowNotCaught) "\n"
+SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
+
+asm volatile(
+".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
+SYMBOL_STRING(ctiTrampoline) ":" "\n"
+ "stmdb sp!, {r1-r3}" "\n"
+ "stmdb sp!, {r4-r8, lr}" "\n"
+ "sub sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "mov r4, r2" "\n"
+ "mov r5, #512" "\n"
+ // r0 contains the code
+ "mov lr, pc" "\n"
+ "mov pc, r0" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "ldmia sp!, {r4-r8, lr}" "\n"
+ "add sp, sp, #12" "\n"
+ "mov pc, lr" "\n"
+);
+
+asm volatile(
+".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
+SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
+ "mov r0, sp" "\n"
+ "bl " SYMBOL_STRING(cti_vm_throw) "\n"
+
+// Both has the same return sequence
+".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
+SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "ldmia sp!, {r4-r8, lr}" "\n"
+ "add sp, sp, #12" "\n"
+ "mov pc, lr" "\n"
+);
+
+#endif
+
#if ENABLE(OPCODE_SAMPLING)
#define CTI_SAMPLER stackFrame.globalData->interpreter->sampler()
#else
JITThunks::JITThunks(JSGlobalData* globalData)
{
- JIT::compileCTIMachineTrampolines(globalData, &m_executablePool, &m_ctiStringLengthTrampoline, &m_ctiVirtualCallLink, &m_ctiVirtualCall, &m_ctiNativeCallThunk);
+ JIT::compileCTIMachineTrampolines(globalData, &m_executablePool, &m_trampolineStructure);
#if CPU(ARM_THUMB2)
// Unfortunate the arm compiler does not like the use of offsetof on JITStackFrame (since it contains non POD types),
// and the OBJECT_OFFSETOF macro does not appear constantish enough for it to be happy with its use in COMPILE_ASSERT
// macros.
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == 0x20);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == 0x24);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR5) == 0x28);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR6) == 0x2c);
-
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == 0x30);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == 0x34);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == 0x38);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == PRESERVED_RETURN_ADDRESS_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == PRESERVED_R4_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR5) == PRESERVED_R5_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR6) == PRESERVED_R6_OFFSET);
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == REGISTER_FILE_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == CALLFRAME_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == EXCEPTION_OFFSET);
// The fifth argument is the first item already on the stack.
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == 0x40);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == ENABLE_PROFILER_REFERENCE_OFFSET);
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
+
+#elif CPU(ARM_TRADITIONAL)
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == PRESERVEDR4_OFFSET);
+
+
+#elif CPU(MIPS)
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedGP) == 28);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS0) == 32);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS1) == 36);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS2) == 40);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == 44);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == 48);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == 52);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == 56);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == 60);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == 64);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, globalData) == 68);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == 0x1C);
#endif
}
+JITThunks::~JITThunks()
+{
+}
+
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
-NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo)
+NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
{
// The interpreter checks for recursion here; I do not believe this can occur in CTI.
// Uncacheable: give up.
if (!slot.isCacheable()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
Structure* structure = baseCell->structure();
if (structure->isUncacheableDictionary()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
// If baseCell != base, then baseCell must be a proxy for another object.
if (baseCell != slot.base()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
// Structure transition, cache transition info
if (slot.type() == PutPropertySlot::NewProperty) {
if (structure->isDictionary()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
stubInfo->initPutByIdTransition(structure->previousID(), structure, prototypeChain);
- JIT::compilePutByIdTransition(callFrame->scopeChain()->globalData, codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress);
+ JIT::compilePutByIdTransition(callFrame->scopeChain()->globalData, codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
return;
}
stubInfo->initPutByIdReplace(structure);
- JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
+ JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct);
}
NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
if (isJSString(globalData, baseValue) && propertyName == callFrame->propertyNames().length) {
// The tradeoff of compiling an patched inline string length access routine does not seem
// to pay off, so we currently only do this for arrays.
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->jitStubs.ctiStringLengthTrampoline());
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->jitStubs->ctiStringLengthTrampoline());
return;
}
if (slot.slotBase() == baseValue) {
// set this up, so derefStructures can do it's job.
stubInfo->initGetByIdSelf(structure);
-
- JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
+ if (slot.cachedPropertyType() != PropertySlot::Value)
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
+ else
+ JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
return;
}
ASSERT(!structure->isDictionary());
ASSERT(!slotBaseObject->structure()->isDictionary());
- JIT::compileGetByIdProto(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), offset, returnAddress);
+ JIT::compileGetByIdProto(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
return;
}
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
stubInfo->initGetByIdChain(structure, prototypeChain);
- JIT::compileGetByIdChain(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, prototypeChain, count, offset, returnAddress);
+ JIT::compileGetByIdChain(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
}
#endif // ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
".thumb" "\n" \
".thumb_func " THUMB_FUNC_PARAM(cti_##op) "\n" \
SYMBOL_STRING(cti_##op) ":" "\n" \
- "str lr, [sp, #0x1c]" "\n" \
+ "str lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
"bl " SYMBOL_STRING(JITStubThunked_##op) "\n" \
- "ldr lr, [sp, #0x1c]" "\n" \
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
"bx lr" "\n" \
); \
rtype JITStubThunked_##op(STUB_ARGS_DECLARATION) \
-#elif CPU(ARM_TRADITIONAL) && COMPILER(GCC)
+#elif CPU(MIPS)
+#if WTF_MIPS_PIC
+#define DEFINE_STUB_FUNCTION(rtype, op) \
+ extern "C" { \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
+ }; \
+ asm volatile( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".set noreorder" "\n" \
+ ".set nomacro" "\n" \
+ ".set nomips16" "\n" \
+ ".globl " SYMBOL_STRING(cti_##op) "\n" \
+ ".ent " SYMBOL_STRING(cti_##op) "\n" \
+ SYMBOL_STRING(cti_##op) ":" "\n" \
+ "lw $28,28($29)" "\n" \
+ "sw $31,48($29)" "\n" \
+ ".set macro" "\n" \
+ "la $25," SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ ".set nomacro" "\n" \
+ "bal " SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ "nop" "\n" \
+ "lw $31,48($29)" "\n" \
+ "jr $31" "\n" \
+ "nop" "\n" \
+ ".set reorder" "\n" \
+ ".set macro" "\n" \
+ ".end " SYMBOL_STRING(cti_##op) "\n" \
+ ); \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
+
+#else // WTF_MIPS_PIC
+#define DEFINE_STUB_FUNCTION(rtype, op) \
+ extern "C" { \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
+ }; \
+ asm volatile( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".set noreorder" "\n" \
+ ".set nomacro" "\n" \
+ ".set nomips16" "\n" \
+ ".globl " SYMBOL_STRING(cti_##op) "\n" \
+ ".ent " SYMBOL_STRING(cti_##op) "\n" \
+ SYMBOL_STRING(cti_##op) ":" "\n" \
+ "sw $31,48($29)" "\n" \
+ "jal " SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ "nop" "\n" \
+ "lw $31,48($29)" "\n" \
+ "jr $31" "\n" \
+ "nop" "\n" \
+ ".set reorder" "\n" \
+ ".set macro" "\n" \
+ ".end " SYMBOL_STRING(cti_##op) "\n" \
+ ); \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
-#if USE(JSVALUE32_64)
-#define THUNK_RETURN_ADDRESS_OFFSET 64
-#else
-#define THUNK_RETURN_ADDRESS_OFFSET 32
#endif
-COMPILE_ASSERT(offsetof(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET, JITStackFrame_thunkReturnAddress_offset_mismatch);
+#elif CPU(ARM_TRADITIONAL) && COMPILER(GCC)
#define DEFINE_STUB_FUNCTION(rtype, op) \
extern "C" { \
RVCT({)
RVCT( ARM)
RVCT( IMPORT JITStubThunked_#op#)
-RVCT( str lr, [sp, #32])
+RVCT( str lr, [sp, ##offset#])
RVCT( bl JITStubThunked_#op#)
-RVCT( ldr lr, [sp, #32])
+RVCT( ldr lr, [sp, ##offset#])
RVCT( bx lr)
RVCT(})
RVCT()
DEFINE_STUB_FUNCTION(int, timeout_check)
{
STUB_INIT_STACK_FRAME(stackFrame);
-
+
JSGlobalData* globalData = stackFrame.globalData;
TimeoutChecker& timeoutChecker = globalData->timeoutChecker;
- if (timeoutChecker.didTimeOut(stackFrame.callFrame)) {
+ if (globalData->terminator.shouldTerminate()) {
+ globalData->exception = createTerminatedExecutionException(globalData);
+ VM_THROW_EXCEPTION_AT_END();
+ } else if (timeoutChecker.didTimeOut(stackFrame.callFrame)) {
globalData->exception = createInterruptedExecutionException(globalData);
VM_THROW_EXCEPTION_AT_END();
}
-
+
return timeoutChecker.ticksUntilNextCheck();
}
CHECK_FOR_EXCEPTION_AT_END();
}
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_generic)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+
+ PutPropertySlot slot;
+ stackFrame.args[0].jsValue().putDirect(stackFrame.callFrame, stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
+ CHECK_FOR_EXCEPTION_AT_END();
+}
+
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_generic)
{
STUB_INIT_STACK_FRAME(stackFrame);
STUB_INIT_STACK_FRAME(stackFrame);
CallFrame* callFrame = stackFrame.callFrame;
Identifier& ident = stackFrame.args[1].identifier();
-
+
PutPropertySlot slot;
stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
-
+
CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
if (!stubInfo->seenOnce())
stubInfo->setSeen();
else
- JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo);
+ JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, false);
+
+ CHECK_FOR_EXCEPTION_AT_END();
+}
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
+ Identifier& ident = stackFrame.args[1].identifier();
+
+ PutPropertySlot slot;
+ stackFrame.args[0].jsValue().putDirect(callFrame, ident, stackFrame.args[2].jsValue(), slot);
+
+ CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
+ StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
+ if (!stubInfo->seenOnce())
+ stubInfo->setSeen();
+ else
+ JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, true);
+
CHECK_FOR_EXCEPTION_AT_END();
}
CHECK_FOR_EXCEPTION_AT_END();
}
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_fail)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+
+ CallFrame* callFrame = stackFrame.callFrame;
+ Identifier& ident = stackFrame.args[1].identifier();
+
+ PutPropertySlot slot;
+ stackFrame.args[0].jsValue().putDirect(callFrame, ident, stackFrame.args[2].jsValue(), slot);
+
+ CHECK_FOR_EXCEPTION_AT_END();
+}
+
DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
{
STUB_INIT_STACK_FRAME(stackFrame);
// If we successfully got something, then the base from which it is being accessed must
// be an object. (Assertion to ensure asObject() call below is safe, which comes after
// an isCacheable() chceck.
- ASSERT(!slot.isCacheable() || slot.slotBase().isObject());
+ ASSERT(!slot.isCacheableValue() || slot.slotBase().isObject());
// Check that:
// * We're dealing with a JSCell,
JSCell* specific;
JSObject* slotBaseObject;
if (baseValue.isCell()
- && slot.isCacheable()
+ && slot.isCacheableValue()
&& !(structure = asCell(baseValue)->structure())->isUncacheableDictionary()
&& (slotBaseObject = asObject(slot.slotBase()))->getPropertySpecificValue(callFrame, ident, specific)
&& specific
if (stubInfo->accessType == access_get_by_id_self) {
ASSERT(!stubInfo->stubRoutine);
polymorphicStructureList = new PolymorphicAccessStructureList(CodeLocationLabel(), stubInfo->u.getByIdSelf.baseObjectStructure);
- stubInfo->initGetByIdSelfList(polymorphicStructureList, 2);
+ stubInfo->initGetByIdSelfList(polymorphicStructureList, 1);
} else {
polymorphicStructureList = stubInfo->u.getByIdSelfList.structureList;
listIndex = stubInfo->u.getByIdSelfList.listSize;
- stubInfo->u.getByIdSelfList.listSize++;
}
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ stubInfo->u.getByIdSelfList.listSize++;
+ JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, polymorphicStructureList, listIndex, asCell(baseValue)->structure(), ident, slot, slot.cachedOffset());
- JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, polymorphicStructureList, listIndex, asCell(baseValue)->structure(), slot.cachedOffset());
-
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ }
} else
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
return JSValue::encode(result);
case access_get_by_id_proto_list:
prototypeStructureList = stubInfo->u.getByIdProtoList.structureList;
listIndex = stubInfo->u.getByIdProtoList.listSize;
- stubInfo->u.getByIdProtoList.listSize++;
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE)
+ stubInfo->u.getByIdProtoList.listSize++;
break;
default:
ASSERT_NOT_REACHED();
}
- ASSERT(listIndex < POLYMORPHIC_LIST_CACHE_SIZE);
+ ASSERT(listIndex <= POLYMORPHIC_LIST_CACHE_SIZE);
return prototypeStructureList;
}
+DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_getter_stub)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
+ GetterSetter* getterSetter = asGetterSetter(stackFrame.args[0].jsObject());
+ if (!getterSetter->getter())
+ return JSValue::encode(jsUndefined());
+ JSObject* getter = asObject(getterSetter->getter());
+ CallData callData;
+ CallType callType = getter->getCallData(callData);
+ JSValue result = call(callFrame, getter, callType, callData, stackFrame.args[1].jsObject(), ArgList());
+ if (callFrame->hadException())
+ returnToThrowTrampoline(&callFrame->globalData(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS);
+
+ return JSValue::encode(result);
+}
+
+DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_custom_stub)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
+ JSObject* slotBase = stackFrame.args[0].jsObject();
+ PropertySlot::GetValueFunc getter = reinterpret_cast<PropertySlot::GetValueFunc>(stackFrame.args[1].asPointer);
+ const Identifier& ident = stackFrame.args[2].identifier();
+ JSValue result = getter(callFrame, slotBase, ident);
+ if (callFrame->hadException())
+ returnToThrowTrampoline(&callFrame->globalData(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS);
+
+ return JSValue::encode(result);
+}
+
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
{
STUB_INIT_STACK_FRAME(stackFrame);
int listIndex;
PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(stubInfo, listIndex);
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);
- JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), offset);
-
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ }
} else if (size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset)) {
ASSERT(!asCell(baseValue)->structure()->isDictionary());
int listIndex;
PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(stubInfo, listIndex);
+
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ StructureChain* protoChain = structure->prototypeChain(callFrame);
+ JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);
- StructureChain* protoChain = structure->prototypeChain(callFrame);
- JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, offset);
-
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ }
} else
ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
if (argCount > newCodeBlock->m_numParameters) {
size_t numParameters = newCodeBlock->m_numParameters;
Register* r = callFrame->registers() + numParameters;
+ Register* newEnd = r + newCodeBlock->m_numCalleeRegisters;
+ if (!stackFrame.registerFile->grow(newEnd)) {
+ // Rewind to the previous call frame because op_call already optimistically
+ // moved the call frame forward.
+ stackFrame.callFrame = oldCallFrame;
+ throwStackOverflowError(oldCallFrame, stackFrame.globalData, stackFrame.args[1].returnAddress(), STUB_RETURN_ADDRESS);
+ RETURN_POINTER_PAIR(0, 0);
+ }
Register* argv = r - RegisterFile::CallFrameHeaderSize - numParameters - argCount;
for (size_t i = 0; i < numParameters; ++i)
callFrame->setCallerFrame(oldCallFrame);
}
+ ASSERT((void*)callFrame <= stackFrame.registerFile->end());
RETURN_POINTER_PAIR(callee, callFrame);
}
JSValue baseValue = stackFrame.args[0].jsValue();
JSValue subscript = stackFrame.args[1].jsValue();
- JSValue result;
+ if (LIKELY(baseValue.isCell() && subscript.isString())) {
+ Identifier propertyName(callFrame, asString(subscript)->value(callFrame));
+ PropertySlot slot(asCell(baseValue));
+ // JSString::value may have thrown, but we shouldn't find a property with a null identifier,
+ // so we should miss this case and wind up in the CHECK_FOR_EXCEPTION_AT_END, below.
+ if (asCell(baseValue)->fastGetOwnPropertySlot(callFrame, propertyName, slot)) {
+ JSValue result = slot.getValue(callFrame, propertyName);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
+ }
+ }
- if (LIKELY(subscript.isUInt32())) {
+ if (subscript.isUInt32()) {
uint32_t i = subscript.asUInt32();
- if (isJSArray(globalData, baseValue)) {
- JSArray* jsArray = asArray(baseValue);
- if (jsArray->canGetIndex(i))
- result = jsArray->getIndex(i);
- else
- result = jsArray->JSArray::get(callFrame, i);
- } else if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i)) {
- // All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
+ if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i)) {
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_string));
- result = asString(baseValue)->getIndex(callFrame, i);
- } else if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
+ JSValue result = asString(baseValue)->getIndex(callFrame, i);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
+ }
+ if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
// All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_byte_array));
return JSValue::encode(asByteArray(baseValue)->getIndex(callFrame, i));
- } else
- result = baseValue.get(callFrame, i);
- } else {
- Identifier property(callFrame, subscript.toString(callFrame));
- result = baseValue.get(callFrame, property);
+ }
+ JSValue result = baseValue.get(callFrame, i);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
}
-
+
+ Identifier property(callFrame, subscript.toString(callFrame));
+ JSValue result = baseValue.get(callFrame, property);
CHECK_FOR_EXCEPTION_AT_END();
return JSValue::encode(result);
}
PropertySlot slot(globalObject);
if (globalObject->getPropertySlot(callFrame, ident, slot)) {
JSValue result = slot.getValue(callFrame, ident);
- if (slot.isCacheable() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
+ if (slot.isCacheableValue() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
GlobalResolveInfo& globalResolveInfo = callFrame->codeBlock()->globalResolveInfo(globalResolveInfoIndex);
if (globalResolveInfo.structure)
globalResolveInfo.structure->deref();
#endif // USE(JSVALUE32_64)
}
-#if USE(JSVALUE32_64)
-
DEFINE_STUB_FUNCTION(int, op_eq_strings)
{
+#if USE(JSVALUE32_64)
STUB_INIT_STACK_FRAME(stackFrame);
JSString* string1 = stackFrame.args[0].jsString();
ASSERT(string1->isString());
ASSERT(string2->isString());
return string1->value(stackFrame.callFrame) == string2->value(stackFrame.callFrame);
-}
-
+#else
+ UNUSED_PARAM(args);
+ ASSERT_NOT_REACHED();
+ return 0;
#endif
+}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_lshift)
{
JSObject* base = stackFrame.args[0].jsObject();
JSString* property = stackFrame.args[1].jsString();
- return base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
+ int result = base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
+ CHECK_FOR_EXCEPTION_AT_END();
+ return result;
}
DEFINE_STUB_FUNCTION(JSObject*, op_push_scope)
JSValue src1 = stackFrame.args[0].jsValue();
JSValue src2 = stackFrame.args[1].jsValue();
- return JSValue::encode(jsBoolean(JSValue::strictEqual(stackFrame.callFrame, src1, src2)));
+ bool result = JSValue::strictEqual(stackFrame.callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION_AT_END();
+ return JSValue::encode(jsBoolean(result));
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_to_primitive)
JSValue src1 = stackFrame.args[0].jsValue();
JSValue src2 = stackFrame.args[1].jsValue();
- return JSValue::encode(jsBoolean(!JSValue::strictEqual(stackFrame.callFrame, src1, src2)));
+ bool result = !JSValue::strictEqual(stackFrame.callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION_AT_END();
+ return JSValue::encode(jsBoolean(result));
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_to_jsnumber)
if (scrutinee.isString()) {
UString::Rep* value = asString(scrutinee)->value(callFrame).rep();
- if (value->size() == 1)
- result = codeBlock->characterSwitchJumpTable(tableIndex).ctiForValue(value->data()[0]).executableAddress();
+ if (value->length() == 1)
+ result = codeBlock->characterSwitchJumpTable(tableIndex).ctiForValue(value->characters()[0]).executableAddress();
}
+ CHECK_FOR_EXCEPTION_AT_END();
return result;
}
result = codeBlock->stringSwitchJumpTable(tableIndex).ctiForValue(value).executableAddress();
}
+ CHECK_FOR_EXCEPTION_AT_END();
return result;
}
return JSValue::encode(stackFrame.args[0].jsValue().toObject(callFrame));
}
+NativeExecutable* JITThunks::specializedThunk(JSGlobalData* globalData, ThunkGenerator generator)
+{
+ std::pair<ThunkMap::iterator, bool> entry = m_thunkMap.add(generator, 0);
+ if (!entry.second)
+ return entry.first->second.get();
+ entry.first->second = generator(globalData, m_executablePool.get());
+ return entry.first->second.get();
+}
+
} // namespace JSC
#endif // ENABLE(JIT)