#include "Collector.h"
#include "Debugger.h"
#include "ExceptionHelpers.h"
+#include "GetterSetter.h"
#include "GlobalEvalFunction.h"
#include "JIT.h"
#include "JSActivation.h"
#include "RegExpPrototype.h"
#include "Register.h"
#include "SamplingTool.h"
+#include <wtf/StdLibExtras.h>
#include <stdarg.h>
#include <stdio.h>
namespace JSC {
-#if PLATFORM(DARWIN) || PLATFORM(WIN_OS)
+#if OS(DARWIN) || OS(WINDOWS)
#define SYMBOL_STRING(name) "_" #name
#else
#define SYMBOL_STRING(name) #name
#endif
+#if OS(IPHONE_OS)
+#define THUMB_FUNC_PARAM(name) SYMBOL_STRING(name)
+#else
+#define THUMB_FUNC_PARAM(name)
+#endif
+
+#if OS(LINUX) && CPU(X86_64)
+#define SYMBOL_STRING_RELOCATION(name) #name "@plt"
+#else
+#define SYMBOL_STRING_RELOCATION(name) SYMBOL_STRING(name)
+#endif
+
+#if OS(DARWIN)
+ // Mach-O platform
+#define HIDE_SYMBOL(name) ".private_extern _" #name
+#elif OS(AIX)
+ // IBM's own file format
+#define HIDE_SYMBOL(name) ".lglobl " #name
+#elif OS(LINUX) \
+ || OS(FREEBSD) \
+ || OS(OPENBSD) \
+ || OS(SOLARIS) \
+ || (OS(HPUX) && CPU(IA64)) \
+ || OS(SYMBIAN) \
+ || OS(NETBSD)
+ // ELF platform
+#define HIDE_SYMBOL(name) ".hidden " #name
+#else
+#define HIDE_SYMBOL(name)
+#endif
+
#if USE(JSVALUE32_64)
-#if COMPILER(GCC) && PLATFORM(X86)
+#if COMPILER(GCC) && CPU(X86)
// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
// need to change the assembly trampolines below to match.
COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x50, JITStackFrame_code_offset_matches_ctiTrampoline);
asm volatile (
+".text\n"
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
SYMBOL_STRING(ctiTrampoline) ":" "\n"
"pushl %ebp" "\n"
"movl %esp, %ebp" "\n"
asm volatile (
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
#if !USE(JIT_STUB_ARGUMENT_VA_LIST)
"movl %esp, %ecx" "\n"
#endif
- "call " SYMBOL_STRING(cti_vm_throw) "\n"
+ "call " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
"addl $0x3c, %esp" "\n"
"popl %ebx" "\n"
"popl %edi" "\n"
asm volatile (
".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
"addl $0x3c, %esp" "\n"
"popl %ebx" "\n"
"ret" "\n"
);
-#elif COMPILER(GCC) && PLATFORM(X86_64)
+#elif COMPILER(GCC) && CPU(X86_64)
#if USE(JIT_STUB_ARGUMENT_VA_LIST)
#error "JIT_STUB_ARGUMENT_VA_LIST not supported on x86-64."
asm volatile (
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
SYMBOL_STRING(ctiTrampoline) ":" "\n"
"pushq %rbp" "\n"
"movq %rsp, %rbp" "\n"
asm volatile (
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
"movq %rsp, %rdi" "\n"
- "call " SYMBOL_STRING(cti_vm_throw) "\n"
+ "call " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
"addq $0x48, %rsp" "\n"
"popq %rbx" "\n"
"popq %r15" "\n"
asm volatile (
".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
"addq $0x48, %rsp" "\n"
"popq %rbx" "\n"
"ret" "\n"
);
-#elif COMPILER(GCC) && PLATFORM_ARM_ARCH(7)
+#elif COMPILER(GCC) && CPU(ARM_THUMB2)
#if USE(JIT_STUB_ARGUMENT_VA_LIST)
#error "JIT_STUB_ARGUMENT_VA_LIST not supported on ARMv7."
#endif
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " SYMBOL_STRING(ctiTrampoline) "\n"
-SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "sub sp, sp, #0x3c" "\n"
- "str lr, [sp, #0x20]" "\n"
- "str r4, [sp, #0x24]" "\n"
- "str r5, [sp, #0x28]" "\n"
- "str r6, [sp, #0x2c]" "\n"
- "str r1, [sp, #0x30]" "\n"
- "str r2, [sp, #0x34]" "\n"
- "str r3, [sp, #0x38]" "\n"
- "cpy r5, r2" "\n"
- "mov r6, #512" "\n"
- "blx r0" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
+#define THUNK_RETURN_ADDRESS_OFFSET 0x3C
+#define PRESERVED_RETURN_ADDRESS_OFFSET 0x40
+#define PRESERVED_R4_OFFSET 0x44
+#define PRESERVED_R5_OFFSET 0x48
+#define PRESERVED_R6_OFFSET 0x4C
+#define REGISTER_FILE_OFFSET 0x50
+#define CALLFRAME_OFFSET 0x54
+#define EXCEPTION_OFFSET 0x58
+#define ENABLE_PROFILER_REFERENCE_OFFSET 0x60
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "cpy r0, sp" "\n"
- "bl " SYMBOL_STRING(cti_vm_throw) "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
+#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-".thumb" "\n"
-".thumb_func " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
+#define THUNK_RETURN_ADDRESS_OFFSET 64
+#define PRESERVEDR4_OFFSET 68
-#elif COMPILER(MSVC)
+#elif COMPILER(MSVC) && CPU(X86)
#if USE(JIT_STUB_ARGUMENT_VA_LIST)
#error "JIT_STUB_ARGUMENT_VA_LIST configuration not supported on MSVC."
}
}
-#endif // COMPILER(GCC) && PLATFORM(X86)
+#else
+ #error "JIT not supported on this platform."
+#endif
#else // USE(JSVALUE32_64)
-#if COMPILER(GCC) && PLATFORM(X86)
+#if COMPILER(GCC) && CPU(X86)
// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
// need to change the assembly trampolines below to match.
COMPILE_ASSERT(offsetof(struct JITStackFrame, savedEBX) == 0x1c, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
asm volatile (
+".text\n"
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
SYMBOL_STRING(ctiTrampoline) ":" "\n"
"pushl %ebp" "\n"
"movl %esp, %ebp" "\n"
asm volatile (
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
#if !USE(JIT_STUB_ARGUMENT_VA_LIST)
"movl %esp, %ecx" "\n"
#endif
- "call " SYMBOL_STRING(cti_vm_throw) "\n"
+ "call " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
"addl $0x1c, %esp" "\n"
"popl %ebx" "\n"
"popl %edi" "\n"
asm volatile (
".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
"addl $0x1c, %esp" "\n"
"popl %ebx" "\n"
"ret" "\n"
);
-#elif COMPILER(GCC) && PLATFORM(X86_64)
+#elif COMPILER(GCC) && CPU(X86_64)
#if USE(JIT_STUB_ARGUMENT_VA_LIST)
#error "JIT_STUB_ARGUMENT_VA_LIST not supported on x86-64."
// These ASSERTs remind you that, if you change the layout of JITStackFrame, you
// need to change the assembly trampolines below to match.
-COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x90, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
-COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x80, JITStackFrame_code_offset_matches_ctiTrampoline);
-COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x48, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
+COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x58, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
+COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x48, JITStackFrame_code_offset_matches_ctiTrampoline);
+COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x78, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
asm volatile (
+".text\n"
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
SYMBOL_STRING(ctiTrampoline) ":" "\n"
"pushq %rbp" "\n"
"movq %rsp, %rbp" "\n"
"pushq %r14" "\n"
"pushq %r15" "\n"
"pushq %rbx" "\n"
+ // Form the JIT stubs area
+ "pushq %r9" "\n"
+ "pushq %r8" "\n"
+ "pushq %rcx" "\n"
+ "pushq %rdx" "\n"
+ "pushq %rsi" "\n"
+ "pushq %rdi" "\n"
"subq $0x48, %rsp" "\n"
"movq $512, %r12" "\n"
"movq $0xFFFF000000000000, %r14" "\n"
"movq $0xFFFF000000000002, %r15" "\n"
- "movq 0x90(%rsp), %r13" "\n"
- "call *0x80(%rsp)" "\n"
- "addq $0x48, %rsp" "\n"
+ "movq %rdx, %r13" "\n"
+ "call *%rdi" "\n"
+ "addq $0x78, %rsp" "\n"
"popq %rbx" "\n"
"popq %r15" "\n"
"popq %r14" "\n"
asm volatile (
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
"movq %rsp, %rdi" "\n"
- "call " SYMBOL_STRING(cti_vm_throw) "\n"
- "addq $0x48, %rsp" "\n"
+ "call " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
+ "addq $0x78, %rsp" "\n"
"popq %rbx" "\n"
"popq %r15" "\n"
"popq %r14" "\n"
asm volatile (
".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "addq $0x48, %rsp" "\n"
+ "addq $0x78, %rsp" "\n"
"popq %rbx" "\n"
"popq %r15" "\n"
"popq %r14" "\n"
"ret" "\n"
);
-#elif COMPILER(GCC) && PLATFORM_ARM_ARCH(7)
+#elif COMPILER(GCC) && CPU(ARM_THUMB2)
#if USE(JIT_STUB_ARGUMENT_VA_LIST)
#error "JIT_STUB_ARGUMENT_VA_LIST not supported on ARMv7."
#endif
-asm volatile (
+#define THUNK_RETURN_ADDRESS_OFFSET 0x1C
+#define PRESERVED_RETURN_ADDRESS_OFFSET 0x20
+#define PRESERVED_R4_OFFSET 0x24
+#define PRESERVED_R5_OFFSET 0x28
+#define PRESERVED_R6_OFFSET 0x2C
+#define REGISTER_FILE_OFFSET 0x30
+#define CALLFRAME_OFFSET 0x34
+#define EXCEPTION_OFFSET 0x38
+#define ENABLE_PROFILER_REFERENCE_OFFSET 0x40
+
+#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
+
+#define THUNK_RETURN_ADDRESS_OFFSET 32
+#define PRESERVEDR4_OFFSET 36
+
+#elif CPU(MIPS)
+
+#if USE(JIT_STUB_ARGUMENT_VA_LIST)
+#error "JIT_STUB_ARGUMENT_VA_LIST not supported on MIPS."
+#endif
+
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " SYMBOL_STRING(ctiTrampoline) "\n"
+".ent " SYMBOL_STRING(ctiTrampoline) "\n"
SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "sub sp, sp, #0x3c" "\n"
- "str lr, [sp, #0x20]" "\n"
- "str r4, [sp, #0x24]" "\n"
- "str r5, [sp, #0x28]" "\n"
- "str r6, [sp, #0x2c]" "\n"
- "str r1, [sp, #0x30]" "\n"
- "str r2, [sp, #0x34]" "\n"
- "str r3, [sp, #0x38]" "\n"
- "cpy r5, r2" "\n"
- "mov r6, #512" "\n"
- "blx r0" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
+ "addiu $29,$29,-72" "\n"
+ "sw $31,44($29)" "\n"
+ "sw $18,40($29)" "\n"
+ "sw $17,36($29)" "\n"
+ "sw $16,32($29)" "\n"
+#if WTF_MIPS_PIC
+ "sw $28,28($29)" "\n"
+#endif
+ "move $16,$6 # set callFrameRegister" "\n"
+ "li $17,512 # set timeoutCheckRegister" "\n"
+ "move $25,$4 # move executableAddress to t9" "\n"
+ "sw $5,52($29) # store registerFile to current stack" "\n"
+ "sw $6,56($29) # store callFrame to curent stack" "\n"
+ "sw $7,60($29) # store exception to current stack" "\n"
+ "lw $8,88($29) # load enableProfilerReference from previous stack" "\n"
+ "lw $9,92($29) # load globalData from previous stack" "\n"
+ "sw $8,64($29) # store enableProfilerReference to current stack" "\n"
+ "jalr $25" "\n"
+ "sw $9,68($29) # store globalData to current stack" "\n"
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiTrampoline) "\n"
);
-asm volatile (
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+".ent " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "cpy r0, sp" "\n"
- "bl " SYMBOL_STRING(cti_vm_throw) "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
+#if WTF_MIPS_PIC
+ "lw $28,28($29)" "\n"
+".set macro" "\n"
+ "la $25," SYMBOL_STRING(cti_vm_throw) "\n"
+".set nomacro" "\n"
+ "bal " SYMBOL_STRING(cti_vm_throw) "\n"
+ "move $4,$29" "\n"
+#else
+ "jal " SYMBOL_STRING(cti_vm_throw) "\n"
+ "move $4,$29" "\n"
+#endif
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
);
-asm volatile (
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-".thumb" "\n"
-".thumb_func " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+".ent " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
);
-#elif COMPILER(MSVC)
+#elif COMPILER(RVCT) && CPU(ARM_TRADITIONAL)
+
+#define THUNK_RETURN_ADDRESS_OFFSET 32
+#define PRESERVEDR4_OFFSET 36
+
+__asm EncodedJSValue ctiTrampoline(void*, RegisterFile*, CallFrame*, JSValue*, Profiler**, JSGlobalData*)
+{
+ ARM
+ stmdb sp!, {r1-r3}
+ stmdb sp!, {r4-r8, lr}
+ sub sp, sp, #36
+ mov r4, r2
+ mov r5, #512
+ mov lr, pc
+ bx r0
+ add sp, sp, #36
+ ldmia sp!, {r4-r8, lr}
+ add sp, sp, #12
+ bx lr
+}
+
+__asm void ctiVMThrowTrampoline()
+{
+ ARM
+ PRESERVE8
+ mov r0, sp
+ bl cti_vm_throw
+ add sp, sp, #36
+ ldmia sp!, {r4-r8, lr}
+ add sp, sp, #12
+ bx lr
+}
+
+__asm void ctiOpThrowNotCaught()
+{
+ ARM
+ add sp, sp, #36
+ ldmia sp!, {r4-r8, lr}
+ add sp, sp, #12
+ bx lr
+}
+
+#elif COMPILER(MSVC) && CPU(X86)
#if USE(JIT_STUB_ARGUMENT_VA_LIST)
#error "JIT_STUB_ARGUMENT_VA_LIST configuration not supported on MSVC."
}
}
-#endif // COMPILER(GCC) && PLATFORM(X86)
+#else
+ #error "JIT not supported on this platform."
+#endif
#endif // USE(JSVALUE32_64)
+#if COMPILER(GCC) && CPU(ARM_THUMB2)
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiTrampoline) "\n"
+SYMBOL_STRING(ctiTrampoline) ":" "\n"
+ "sub sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "str lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "str r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "str r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "str r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "str r1, [sp, #" STRINGIZE_VALUE_OF(REGISTER_FILE_OFFSET) "]" "\n"
+ "str r2, [sp, #" STRINGIZE_VALUE_OF(CALLFRAME_OFFSET) "]" "\n"
+ "str r3, [sp, #" STRINGIZE_VALUE_OF(EXCEPTION_OFFSET) "]" "\n"
+ "cpy r5, r2" "\n"
+ "mov r6, #512" "\n"
+ "blx r0" "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiVMThrowTrampoline) "\n"
+SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
+ "cpy r0, sp" "\n"
+ "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiOpThrowNotCaught) "\n"
+SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
+
+asm volatile(
+".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
+SYMBOL_STRING(ctiTrampoline) ":" "\n"
+ "stmdb sp!, {r1-r3}" "\n"
+ "stmdb sp!, {r4-r8, lr}" "\n"
+ "sub sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "mov r4, r2" "\n"
+ "mov r5, #512" "\n"
+ // r0 contains the code
+ "mov lr, pc" "\n"
+ "mov pc, r0" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "ldmia sp!, {r4-r8, lr}" "\n"
+ "add sp, sp, #12" "\n"
+ "mov pc, lr" "\n"
+);
+
+asm volatile(
+".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
+SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
+ "mov r0, sp" "\n"
+ "bl " SYMBOL_STRING(cti_vm_throw) "\n"
+
+// Both has the same return sequence
+".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
+SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "ldmia sp!, {r4-r8, lr}" "\n"
+ "add sp, sp, #12" "\n"
+ "mov pc, lr" "\n"
+);
+
+#endif
+
#if ENABLE(OPCODE_SAMPLING)
#define CTI_SAMPLER stackFrame.globalData->interpreter->sampler()
#else
JITThunks::JITThunks(JSGlobalData* globalData)
{
- JIT::compileCTIMachineTrampolines(globalData, &m_executablePool, &m_ctiStringLengthTrampoline, &m_ctiVirtualCallPreLink, &m_ctiVirtualCallLink, &m_ctiVirtualCall, &m_ctiNativeCallThunk);
+ JIT::compileCTIMachineTrampolines(globalData, &m_executablePool, &m_trampolineStructure);
-#if PLATFORM_ARM_ARCH(7)
+#if CPU(ARM_THUMB2)
// Unfortunate the arm compiler does not like the use of offsetof on JITStackFrame (since it contains non POD types),
// and the OBJECT_OFFSETOF macro does not appear constantish enough for it to be happy with its use in COMPILE_ASSERT
// macros.
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == 0x20);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == 0x24);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR5) == 0x28);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR6) == 0x2c);
-
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == 0x30);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == 0x34);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == 0x38);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == PRESERVED_RETURN_ADDRESS_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == PRESERVED_R4_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR5) == PRESERVED_R5_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR6) == PRESERVED_R6_OFFSET);
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == REGISTER_FILE_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == CALLFRAME_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == EXCEPTION_OFFSET);
// The fifth argument is the first item already on the stack.
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == 0x3c);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == ENABLE_PROFILER_REFERENCE_OFFSET);
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
+
+#elif CPU(ARM_TRADITIONAL)
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == PRESERVEDR4_OFFSET);
+
+
+#elif CPU(MIPS)
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedGP) == 28);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS0) == 32);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS1) == 36);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS2) == 40);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == 44);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == 48);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == 52);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == 56);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == 60);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == 64);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, globalData) == 68);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == 0x1C);
#endif
}
+JITThunks::~JITThunks()
+{
+}
+
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
-NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot)
+NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
{
// The interpreter checks for recursion here; I do not believe this can occur in CTI.
// Uncacheable: give up.
if (!slot.isCacheable()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
Structure* structure = baseCell->structure();
if (structure->isUncacheableDictionary()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
// If baseCell != base, then baseCell must be a proxy for another object.
if (baseCell != slot.base()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
- StructureStubInfo* stubInfo = &codeBlock->getStubInfo(returnAddress);
-
// Cache hit: Specialize instruction and ref Structures.
// Structure transition, cache transition info
if (slot.type() == PutPropertySlot::NewProperty) {
- StructureChain* prototypeChain = structure->prototypeChain(callFrame);
- if (!prototypeChain->isCacheable() || structure->isDictionary()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ if (structure->isDictionary()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
+
+ // put_by_id_transition checks the prototype chain for setters.
+ normalizePrototypeChain(callFrame, baseCell);
+
+ StructureChain* prototypeChain = structure->prototypeChain(callFrame);
stubInfo->initPutByIdTransition(structure->previousID(), structure, prototypeChain);
- JIT::compilePutByIdTransition(callFrame->scopeChain()->globalData, codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress);
+ JIT::compilePutByIdTransition(callFrame->scopeChain()->globalData, codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
return;
}
stubInfo->initPutByIdReplace(structure);
- JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
+ JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct);
}
-NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot)
+NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
{
// FIXME: Write a test that proves we need to check for recursion here just
// like the interpreter does, then add a check for recursion.
if (isJSString(globalData, baseValue) && propertyName == callFrame->propertyNames().length) {
// The tradeoff of compiling an patched inline string length access routine does not seem
// to pay off, so we currently only do this for arrays.
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->jitStubs.ctiStringLengthTrampoline());
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->jitStubs->ctiStringLengthTrampoline());
return;
}
return;
}
- // In the interpreter the last structure is trapped here; in CTI we use the
- // *_second method to achieve a similar (but not quite the same) effect.
-
- StructureStubInfo* stubInfo = &codeBlock->getStubInfo(returnAddress);
-
// Cache hit: Specialize instruction and ref Structures.
if (slot.slotBase() == baseValue) {
// set this up, so derefStructures can do it's job.
stubInfo->initGetByIdSelf(structure);
-
- JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
+ if (slot.cachedPropertyType() != PropertySlot::Value)
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
+ else
+ JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
return;
}
stubInfo->initGetByIdProto(structure, slotBaseObject->structure());
- JIT::compileGetByIdProto(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), offset, returnAddress);
+ ASSERT(!structure->isDictionary());
+ ASSERT(!slotBaseObject->structure()->isDictionary());
+ JIT::compileGetByIdProto(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
return;
}
size_t offset = slot.cachedOffset();
size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset);
if (!count) {
- stubInfo->opcodeID = op_get_by_id_generic;
+ stubInfo->accessType = access_get_by_id_generic;
return;
}
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
- if (!prototypeChain->isCacheable()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
- return;
- }
stubInfo->initGetByIdChain(structure, prototypeChain);
- JIT::compileGetByIdChain(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, prototypeChain, count, offset, returnAddress);
+ JIT::compileGetByIdChain(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
}
#endif // ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
} \
} while (0)
-#if PLATFORM_ARM_ARCH(7)
+#if CPU(ARM_THUMB2)
#define DEFINE_STUB_FUNCTION(rtype, op) \
extern "C" { \
".text" "\n" \
".align 2" "\n" \
".globl " SYMBOL_STRING(cti_##op) "\n" \
+ HIDE_SYMBOL(cti_##op) "\n" \
".thumb" "\n" \
- ".thumb_func " SYMBOL_STRING(cti_##op) "\n" \
+ ".thumb_func " THUMB_FUNC_PARAM(cti_##op) "\n" \
SYMBOL_STRING(cti_##op) ":" "\n" \
- "str lr, [sp, #0x1c]" "\n" \
+ "str lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
"bl " SYMBOL_STRING(JITStubThunked_##op) "\n" \
- "ldr lr, [sp, #0x1c]" "\n" \
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
"bx lr" "\n" \
); \
rtype JITStubThunked_##op(STUB_ARGS_DECLARATION) \
+#elif CPU(MIPS)
+#if WTF_MIPS_PIC
+#define DEFINE_STUB_FUNCTION(rtype, op) \
+ extern "C" { \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
+ }; \
+ asm volatile( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".set noreorder" "\n" \
+ ".set nomacro" "\n" \
+ ".set nomips16" "\n" \
+ ".globl " SYMBOL_STRING(cti_##op) "\n" \
+ ".ent " SYMBOL_STRING(cti_##op) "\n" \
+ SYMBOL_STRING(cti_##op) ":" "\n" \
+ "lw $28,28($29)" "\n" \
+ "sw $31,48($29)" "\n" \
+ ".set macro" "\n" \
+ "la $25," SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ ".set nomacro" "\n" \
+ "bal " SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ "nop" "\n" \
+ "lw $31,48($29)" "\n" \
+ "jr $31" "\n" \
+ "nop" "\n" \
+ ".set reorder" "\n" \
+ ".set macro" "\n" \
+ ".end " SYMBOL_STRING(cti_##op) "\n" \
+ ); \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
+
+#else // WTF_MIPS_PIC
+#define DEFINE_STUB_FUNCTION(rtype, op) \
+ extern "C" { \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
+ }; \
+ asm volatile( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".set noreorder" "\n" \
+ ".set nomacro" "\n" \
+ ".set nomips16" "\n" \
+ ".globl " SYMBOL_STRING(cti_##op) "\n" \
+ ".ent " SYMBOL_STRING(cti_##op) "\n" \
+ SYMBOL_STRING(cti_##op) ":" "\n" \
+ "sw $31,48($29)" "\n" \
+ "jal " SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ "nop" "\n" \
+ "lw $31,48($29)" "\n" \
+ "jr $31" "\n" \
+ "nop" "\n" \
+ ".set reorder" "\n" \
+ ".set macro" "\n" \
+ ".end " SYMBOL_STRING(cti_##op) "\n" \
+ ); \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
+
+#endif
+
+#elif CPU(ARM_TRADITIONAL) && COMPILER(GCC)
+
+#define DEFINE_STUB_FUNCTION(rtype, op) \
+ extern "C" { \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
+ }; \
+ asm volatile ( \
+ ".globl " SYMBOL_STRING(cti_##op) "\n" \
+ SYMBOL_STRING(cti_##op) ":" "\n" \
+ "str lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
+ "bl " SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
+ "mov pc, lr" "\n" \
+ ); \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
+
+#elif CPU(ARM_TRADITIONAL) && COMPILER(RVCT)
+
+#define DEFINE_STUB_FUNCTION(rtype, op) rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
+
+/* The following is a workaround for RVCT toolchain; precompiler macros are not expanded before the code is passed to the assembler */
+
+/* The following section is a template to generate code for GeneratedJITStubs_RVCT.h */
+/* The pattern "#xxx#" will be replaced with "xxx" */
+
+/*
+RVCT(extern "C" #rtype# JITStubThunked_#op#(STUB_ARGS_DECLARATION);)
+RVCT(__asm #rtype# cti_#op#(STUB_ARGS_DECLARATION))
+RVCT({)
+RVCT( ARM)
+RVCT( IMPORT JITStubThunked_#op#)
+RVCT( str lr, [sp, ##offset#])
+RVCT( bl JITStubThunked_#op#)
+RVCT( ldr lr, [sp, ##offset#])
+RVCT( bx lr)
+RVCT(})
+RVCT()
+*/
+
+/* Include the generated file */
+#include "GeneratedJITStubs_RVCT.h"
+
#else
#define DEFINE_STUB_FUNCTION(rtype, op) rtype JIT_STUB cti_##op(STUB_ARGS_DECLARATION)
#endif
JSValue v1 = stackFrame.args[0].jsValue();
JSValue v2 = stackFrame.args[1].jsValue();
-
- double left;
- double right = 0.0;
-
- bool rightIsNumber = v2.getNumber(right);
- if (rightIsNumber && v1.getNumber(left))
- return JSValue::encode(jsNumber(stackFrame.globalData, left + right));
-
CallFrame* callFrame = stackFrame.callFrame;
- bool leftIsString = v1.isString();
- if (leftIsString && v2.isString()) {
- RefPtr<UString::Rep> value = concatenate(asString(v1)->value().rep(), asString(v2)->value().rep());
- if (UNLIKELY(!value)) {
- throwOutOfMemoryError(callFrame);
- VM_THROW_EXCEPTION();
- }
-
- return JSValue::encode(jsString(stackFrame.globalData, value.release()));
+ if (v1.isString()) {
+ JSValue result = v2.isString()
+ ? jsString(callFrame, asString(v1), asString(v2))
+ : jsString(callFrame, asString(v1), v2.toPrimitiveString(callFrame));
+ CHECK_FOR_EXCEPTION_AT_END();
+ return JSValue::encode(result);
}
- if (rightIsNumber & leftIsString) {
- RefPtr<UString::Rep> value = v2.isInt32() ?
- concatenate(asString(v1)->value().rep(), v2.asInt32()) :
- concatenate(asString(v1)->value().rep(), right);
-
- if (UNLIKELY(!value)) {
- throwOutOfMemoryError(callFrame);
- VM_THROW_EXCEPTION();
- }
- return JSValue::encode(jsString(stackFrame.globalData, value.release()));
- }
+ double left = 0.0, right;
+ if (v1.getNumber(left) && v2.getNumber(right))
+ return JSValue::encode(jsNumber(stackFrame.globalData, left + right));
// All other cases are pretty uncommon
JSValue result = jsAddSlowCase(callFrame, v1, v2);
DEFINE_STUB_FUNCTION(int, timeout_check)
{
STUB_INIT_STACK_FRAME(stackFrame);
-
+
JSGlobalData* globalData = stackFrame.globalData;
TimeoutChecker& timeoutChecker = globalData->timeoutChecker;
- if (timeoutChecker.didTimeOut(stackFrame.callFrame)) {
+ if (globalData->terminator.shouldTerminate()) {
+ globalData->exception = createTerminatedExecutionException(globalData);
+ VM_THROW_EXCEPTION_AT_END();
+ } else if (timeoutChecker.didTimeOut(stackFrame.callFrame)) {
globalData->exception = createInterruptedExecutionException(globalData);
VM_THROW_EXCEPTION_AT_END();
}
-
+
return timeoutChecker.ticksUntilNextCheck();
}
throwStackOverflowError(oldCallFrame, stackFrame.globalData, ReturnAddressPtr(oldCallFrame->returnPC()), STUB_RETURN_ADDRESS);
}
-DEFINE_STUB_FUNCTION(int, op_loop_if_less)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- JSValue src1 = stackFrame.args[0].jsValue();
- JSValue src2 = stackFrame.args[1].jsValue();
- CallFrame* callFrame = stackFrame.callFrame;
-
- bool result = jsLess(callFrame, src1, src2);
- CHECK_FOR_EXCEPTION_AT_END();
- return result;
-}
-
DEFINE_STUB_FUNCTION(int, op_loop_if_lesseq)
{
STUB_INIT_STACK_FRAME(stackFrame);
CHECK_FOR_EXCEPTION_AT_END();
}
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_generic)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+
+ PutPropertySlot slot;
+ stackFrame.args[0].jsValue().putDirect(stackFrame.callFrame, stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
+ CHECK_FOR_EXCEPTION_AT_END();
+}
+
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_generic)
{
STUB_INIT_STACK_FRAME(stackFrame);
DEFINE_STUB_FUNCTION(void, op_put_by_id)
{
STUB_INIT_STACK_FRAME(stackFrame);
-
CallFrame* callFrame = stackFrame.callFrame;
Identifier& ident = stackFrame.args[1].identifier();
-
+
PutPropertySlot slot;
stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
-
- ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_id_second));
-
+
+ CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
+ StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
+ if (!stubInfo->seenOnce())
+ stubInfo->setSeen();
+ else
+ JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, false);
+
CHECK_FOR_EXCEPTION_AT_END();
}
-DEFINE_STUB_FUNCTION(void, op_put_by_id_second)
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
{
STUB_INIT_STACK_FRAME(stackFrame);
-
+ CallFrame* callFrame = stackFrame.callFrame;
+ Identifier& ident = stackFrame.args[1].identifier();
+
PutPropertySlot slot;
- stackFrame.args[0].jsValue().put(stackFrame.callFrame, stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
- JITThunks::tryCachePutByID(stackFrame.callFrame, stackFrame.callFrame->codeBlock(), STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot);
+ stackFrame.args[0].jsValue().putDirect(callFrame, ident, stackFrame.args[2].jsValue(), slot);
+
+ CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
+ StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
+ if (!stubInfo->seenOnce())
+ stubInfo->setSeen();
+ else
+ JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, true);
+
CHECK_FOR_EXCEPTION_AT_END();
}
CHECK_FOR_EXCEPTION_AT_END();
}
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_fail)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+
+ CallFrame* callFrame = stackFrame.callFrame;
+ Identifier& ident = stackFrame.args[1].identifier();
+
+ PutPropertySlot slot;
+ stackFrame.args[0].jsValue().putDirect(callFrame, ident, stackFrame.args[2].jsValue(), slot);
+
+ CHECK_FOR_EXCEPTION_AT_END();
+}
+
DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
{
STUB_INIT_STACK_FRAME(stackFrame);
return base;
}
-DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- CallFrame* callFrame = stackFrame.callFrame;
- Identifier& ident = stackFrame.args[1].identifier();
-
- JSValue baseValue = stackFrame.args[0].jsValue();
- PropertySlot slot(baseValue);
- JSValue result = baseValue.get(callFrame, ident, slot);
-
- ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_second));
-
- CHECK_FOR_EXCEPTION_AT_END();
- return JSValue::encode(result);
-}
-
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_method_check)
{
STUB_INIT_STACK_FRAME(stackFrame);
JSValue baseValue = stackFrame.args[0].jsValue();
PropertySlot slot(baseValue);
JSValue result = baseValue.get(callFrame, ident, slot);
+ CHECK_FOR_EXCEPTION();
- ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_method_check_second));
-
- CHECK_FOR_EXCEPTION_AT_END();
- return JSValue::encode(result);
-}
-
-DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_method_check_second)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- CallFrame* callFrame = stackFrame.callFrame;
- Identifier& ident = stackFrame.args[1].identifier();
+ CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
+ MethodCallLinkInfo& methodCallLinkInfo = codeBlock->getMethodCallLinkInfo(STUB_RETURN_ADDRESS);
- JSValue baseValue = stackFrame.args[0].jsValue();
- PropertySlot slot(baseValue);
- JSValue result = baseValue.get(callFrame, ident, slot);
-
- CHECK_FOR_EXCEPTION();
+ if (!methodCallLinkInfo.seenOnce()) {
+ methodCallLinkInfo.setSeen();
+ return JSValue::encode(result);
+ }
// If we successfully got something, then the base from which it is being accessed must
// be an object. (Assertion to ensure asObject() call below is safe, which comes after
// an isCacheable() chceck.
- ASSERT(!slot.isCacheable() || slot.slotBase().isObject());
+ ASSERT(!slot.isCacheableValue() || slot.slotBase().isObject());
// Check that:
// * We're dealing with a JSCell,
JSCell* specific;
JSObject* slotBaseObject;
if (baseValue.isCell()
- && slot.isCacheable()
+ && slot.isCacheableValue()
&& !(structure = asCell(baseValue)->structure())->isUncacheableDictionary()
&& (slotBaseObject = asObject(slot.slotBase()))->getPropertySpecificValue(callFrame, ident, specific)
&& specific
// The result fetched should always be the callee!
ASSERT(result == JSValue(callee));
- MethodCallLinkInfo& methodCallLinkInfo = callFrame->codeBlock()->getMethodCallLinkInfo(STUB_RETURN_ADDRESS);
// Check to see if the function is on the object's prototype. Patch up the code to optimize.
- if (slot.slotBase() == structure->prototypeForLookup(callFrame))
- JIT::patchMethodCallProto(callFrame->codeBlock(), methodCallLinkInfo, callee, structure, slotBaseObject);
+ if (slot.slotBase() == structure->prototypeForLookup(callFrame)) {
+ JIT::patchMethodCallProto(codeBlock, methodCallLinkInfo, callee, structure, slotBaseObject, STUB_RETURN_ADDRESS);
+ return JSValue::encode(result);
+ }
+
// Check to see if the function is on the object itself.
// Since we generate the method-check to check both the structure and a prototype-structure (since this
// is the common case) we have a problem - we need to patch the prototype structure check to do something
// useful. We could try to nop it out altogether, but that's a little messy, so lets do something simpler
// for now. For now it performs a check on a special object on the global object only used for this
// purpose. The object is in no way exposed, and as such the check will always pass.
- else if (slot.slotBase() == baseValue)
- JIT::patchMethodCallProto(callFrame->codeBlock(), methodCallLinkInfo, callee, structure, callFrame->scopeChain()->globalObject()->methodCallDummy());
-
- // For now let any other case be cached as a normal get_by_id.
+ if (slot.slotBase() == baseValue) {
+ JIT::patchMethodCallProto(codeBlock, methodCallLinkInfo, callee, structure, callFrame->scopeChain()->globalObject->methodCallDummy(), STUB_RETURN_ADDRESS);
+ return JSValue::encode(result);
+ }
}
// Revert the get_by_id op back to being a regular get_by_id - allow it to cache like normal, if it needs to.
- ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id));
-
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id));
return JSValue::encode(result);
}
-DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_second)
+DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
{
STUB_INIT_STACK_FRAME(stackFrame);
-
CallFrame* callFrame = stackFrame.callFrame;
Identifier& ident = stackFrame.args[1].identifier();
PropertySlot slot(baseValue);
JSValue result = baseValue.get(callFrame, ident, slot);
- JITThunks::tryCacheGetByID(callFrame, callFrame->codeBlock(), STUB_RETURN_ADDRESS, baseValue, ident, slot);
+ CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
+ StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
+ if (!stubInfo->seenOnce())
+ stubInfo->setSeen();
+ else
+ JITThunks::tryCacheGetByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, ident, slot, stubInfo);
CHECK_FOR_EXCEPTION_AT_END();
return JSValue::encode(result);
PolymorphicAccessStructureList* polymorphicStructureList;
int listIndex = 1;
- if (stubInfo->opcodeID == op_get_by_id_self) {
+ if (stubInfo->accessType == access_get_by_id_self) {
ASSERT(!stubInfo->stubRoutine);
polymorphicStructureList = new PolymorphicAccessStructureList(CodeLocationLabel(), stubInfo->u.getByIdSelf.baseObjectStructure);
- stubInfo->initGetByIdSelfList(polymorphicStructureList, 2);
+ stubInfo->initGetByIdSelfList(polymorphicStructureList, 1);
} else {
polymorphicStructureList = stubInfo->u.getByIdSelfList.structureList;
listIndex = stubInfo->u.getByIdSelfList.listSize;
- stubInfo->u.getByIdSelfList.listSize++;
}
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ stubInfo->u.getByIdSelfList.listSize++;
+ JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, polymorphicStructureList, listIndex, asCell(baseValue)->structure(), ident, slot, slot.cachedOffset());
- JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, polymorphicStructureList, listIndex, asCell(baseValue)->structure(), slot.cachedOffset());
-
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ }
} else
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
return JSValue::encode(result);
PolymorphicAccessStructureList* prototypeStructureList = 0;
listIndex = 1;
- switch (stubInfo->opcodeID) {
- case op_get_by_id_proto:
+ switch (stubInfo->accessType) {
+ case access_get_by_id_proto:
prototypeStructureList = new PolymorphicAccessStructureList(stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure, stubInfo->u.getByIdProto.prototypeStructure);
stubInfo->stubRoutine = CodeLocationLabel();
stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
break;
- case op_get_by_id_chain:
+ case access_get_by_id_chain:
prototypeStructureList = new PolymorphicAccessStructureList(stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure, stubInfo->u.getByIdChain.chain);
stubInfo->stubRoutine = CodeLocationLabel();
stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
break;
- case op_get_by_id_proto_list:
+ case access_get_by_id_proto_list:
prototypeStructureList = stubInfo->u.getByIdProtoList.structureList;
listIndex = stubInfo->u.getByIdProtoList.listSize;
- stubInfo->u.getByIdProtoList.listSize++;
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE)
+ stubInfo->u.getByIdProtoList.listSize++;
break;
default:
ASSERT_NOT_REACHED();
}
- ASSERT(listIndex < POLYMORPHIC_LIST_CACHE_SIZE);
+ ASSERT(listIndex <= POLYMORPHIC_LIST_CACHE_SIZE);
return prototypeStructureList;
}
+DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_getter_stub)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
+ GetterSetter* getterSetter = asGetterSetter(stackFrame.args[0].jsObject());
+ if (!getterSetter->getter())
+ return JSValue::encode(jsUndefined());
+ JSObject* getter = asObject(getterSetter->getter());
+ CallData callData;
+ CallType callType = getter->getCallData(callData);
+ JSValue result = call(callFrame, getter, callType, callData, stackFrame.args[1].jsObject(), ArgList());
+ if (callFrame->hadException())
+ returnToThrowTrampoline(&callFrame->globalData(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS);
+
+ return JSValue::encode(result);
+}
+
+DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_custom_stub)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
+ JSObject* slotBase = stackFrame.args[0].jsObject();
+ PropertySlot::GetValueFunc getter = reinterpret_cast<PropertySlot::GetValueFunc>(stackFrame.args[1].asPointer);
+ const Identifier& ident = stackFrame.args[2].identifier();
+ JSValue result = getter(callFrame, slotBase, ident);
+ if (callFrame->hadException())
+ returnToThrowTrampoline(&callFrame->globalData(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS);
+
+ return JSValue::encode(result);
+}
+
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
{
STUB_INIT_STACK_FRAME(stackFrame);
CHECK_FOR_EXCEPTION();
- if (!baseValue.isCell() || !slot.isCacheable() || asCell(baseValue)->structure()->isUncacheableDictionary()) {
+ if (!baseValue.isCell() || !slot.isCacheable() || asCell(baseValue)->structure()->isDictionary()) {
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
return JSValue::encode(result);
}
if (slot.slotBase() == baseValue)
ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
else if (slot.slotBase() == asCell(baseValue)->structure()->prototypeForLookup(callFrame)) {
+ ASSERT(!asCell(baseValue)->structure()->isDictionary());
// Since we're accessing a prototype in a loop, it's a good bet that it
// should not be treated as a dictionary.
if (slotBaseObject->structure()->isDictionary()) {
int listIndex;
PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(stubInfo, listIndex);
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);
- JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), offset);
-
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
- } else if (size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset)) {
- StructureChain* protoChain = structure->prototypeChain(callFrame);
- if (!protoChain->isCacheable()) {
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
- return JSValue::encode(result);
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
}
-
+ } else if (size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset)) {
+ ASSERT(!asCell(baseValue)->structure()->isDictionary());
int listIndex;
PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(stubInfo, listIndex);
- JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, offset);
+
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ StructureChain* protoChain = structure->prototypeChain(callFrame);
+ JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ }
} else
ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
// ECMA-262 15.3.5.3:
// Throw an exception either if baseVal is not an object, or if it does not implement 'HasInstance' (i.e. is a function).
- TypeInfo typeInfo(UnspecifiedType, 0);
+ TypeInfo typeInfo(UnspecifiedType);
if (!baseVal.isObject() || !(typeInfo = asObject(baseVal)->structure()->typeInfo()).implementsHasInstance()) {
CallFrame* callFrame = stackFrame.callFrame;
CodeBlock* codeBlock = callFrame->codeBlock();
{
STUB_INIT_STACK_FRAME(stackFrame);
- return stackFrame.args[0].funcDeclNode()->makeFunction(stackFrame.callFrame, stackFrame.callFrame->scopeChain());
+ return stackFrame.args[0].function()->make(stackFrame.callFrame, stackFrame.callFrame->scopeChain());
}
DEFINE_STUB_FUNCTION(void*, op_call_JSFunction)
{
STUB_INIT_STACK_FRAME(stackFrame);
-#ifndef NDEBUG
+#if !ASSERT_DISABLED
CallData callData;
ASSERT(stackFrame.args[0].jsValue().getCallData(callData) == CallTypeJS);
#endif
JSFunction* function = asFunction(stackFrame.args[0].jsValue());
ASSERT(!function->isHostFunction());
- FunctionBodyNode* body = function->body();
+ FunctionExecutable* executable = function->jsExecutable();
ScopeChainNode* callDataScopeChain = function->scope().node();
- body->jitCode(callDataScopeChain);
+ executable->jitCode(stackFrame.callFrame, callDataScopeChain);
- return &(body->generatedBytecode());
+ return function;
}
DEFINE_STUB_FUNCTION(VoidPtrPair, op_call_arityCheck)
STUB_INIT_STACK_FRAME(stackFrame);
CallFrame* callFrame = stackFrame.callFrame;
- CodeBlock* newCodeBlock = stackFrame.args[3].codeBlock();
- ASSERT(newCodeBlock->codeType() != NativeCode);
+ JSFunction* callee = asFunction(stackFrame.args[0].jsValue());
+ ASSERT(!callee->isHostFunction());
+ CodeBlock* newCodeBlock = &callee->jsExecutable()->generatedBytecode();
int argCount = stackFrame.args[2].int32();
ASSERT(argCount != newCodeBlock->m_numParameters);
if (argCount > newCodeBlock->m_numParameters) {
size_t numParameters = newCodeBlock->m_numParameters;
Register* r = callFrame->registers() + numParameters;
+ Register* newEnd = r + newCodeBlock->m_numCalleeRegisters;
+ if (!stackFrame.registerFile->grow(newEnd)) {
+ // Rewind to the previous call frame because op_call already optimistically
+ // moved the call frame forward.
+ stackFrame.callFrame = oldCallFrame;
+ throwStackOverflowError(oldCallFrame, stackFrame.globalData, stackFrame.args[1].returnAddress(), STUB_RETURN_ADDRESS);
+ RETURN_POINTER_PAIR(0, 0);
+ }
Register* argv = r - RegisterFile::CallFrameHeaderSize - numParameters - argCount;
for (size_t i = 0; i < numParameters; ++i)
callFrame->setCallerFrame(oldCallFrame);
}
- RETURN_POINTER_PAIR(newCodeBlock, callFrame);
+ ASSERT((void*)callFrame <= stackFrame.registerFile->end());
+ RETURN_POINTER_PAIR(callee, callFrame);
}
#if ENABLE(JIT_OPTIMIZE_CALL)
-DEFINE_STUB_FUNCTION(void*, vm_dontLazyLinkCall)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- JSGlobalData* globalData = stackFrame.globalData;
- JSFunction* callee = asFunction(stackFrame.args[0].jsValue());
-
- ctiPatchNearCallByReturnAddress(stackFrame.callFrame->callerFrame()->codeBlock(), stackFrame.args[1].returnAddress(), globalData->jitStubs.ctiVirtualCallLink());
-
- return callee->body()->generatedJITCode().addressForCall().executableAddress();
-}
-
DEFINE_STUB_FUNCTION(void*, vm_lazyLinkCall)
{
STUB_INIT_STACK_FRAME(stackFrame);
-
JSFunction* callee = asFunction(stackFrame.args[0].jsValue());
- JITCode& jitCode = callee->body()->generatedJITCode();
+ ExecutableBase* executable = callee->executable();
+ JITCode& jitCode = executable->generatedJITCode();
CodeBlock* codeBlock = 0;
- if (!callee->isHostFunction())
- codeBlock = &callee->body()->bytecode(callee->scope().node());
- else
- codeBlock = &callee->body()->generatedBytecode();
-
+ if (!executable->isHostFunction())
+ codeBlock = &static_cast<FunctionExecutable*>(executable)->bytecode(stackFrame.callFrame, callee->scope().node());
CallLinkInfo* callLinkInfo = &stackFrame.callFrame->callerFrame()->codeBlock()->getCallLinkInfo(stackFrame.args[1].returnAddress());
- JIT::linkCall(callee, stackFrame.callFrame->callerFrame()->codeBlock(), codeBlock, jitCode, callLinkInfo, stackFrame.args[2].int32(), stackFrame.globalData);
+
+ if (!callLinkInfo->seenOnce())
+ callLinkInfo->setSeen();
+ else
+ JIT::linkCall(callee, stackFrame.callFrame->callerFrame()->codeBlock(), codeBlock, jitCode, callLinkInfo, stackFrame.args[2].int32(), stackFrame.globalData);
return jitCode.addressForCall().executableAddress();
}
{
STUB_INIT_STACK_FRAME(stackFrame);
- JSActivation* activation = new (stackFrame.globalData) JSActivation(stackFrame.callFrame, static_cast<FunctionBodyNode*>(stackFrame.callFrame->codeBlock()->ownerNode()));
+ JSActivation* activation = new (stackFrame.globalData) JSActivation(stackFrame.callFrame, static_cast<FunctionExecutable*>(stackFrame.callFrame->codeBlock()->ownerExecutable()));
stackFrame.callFrame->setScopeChain(stackFrame.callFrame->scopeChain()->copy()->push(activation));
return activation;
}
VM_THROW_EXCEPTION();
}
-#ifndef NDEBUG
+#if !ASSERT_DISABLED
ConstructData constructData;
ASSERT(constructor->getConstructData(constructData) == ConstructTypeJS);
#endif
if (stackFrame.args[3].jsValue().isObject())
structure = asObject(stackFrame.args[3].jsValue())->inheritorID();
else
- structure = constructor->scope().node()->globalObject()->emptyObjectStructure();
+ structure = constructor->scope().node()->globalObject->emptyObjectStructure();
return new (stackFrame.globalData) JSObject(structure);
}
JSValue baseValue = stackFrame.args[0].jsValue();
JSValue subscript = stackFrame.args[1].jsValue();
- JSValue result;
+ if (LIKELY(baseValue.isCell() && subscript.isString())) {
+ Identifier propertyName(callFrame, asString(subscript)->value(callFrame));
+ PropertySlot slot(asCell(baseValue));
+ // JSString::value may have thrown, but we shouldn't find a property with a null identifier,
+ // so we should miss this case and wind up in the CHECK_FOR_EXCEPTION_AT_END, below.
+ if (asCell(baseValue)->fastGetOwnPropertySlot(callFrame, propertyName, slot)) {
+ JSValue result = slot.getValue(callFrame, propertyName);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
+ }
+ }
- if (LIKELY(subscript.isUInt32())) {
+ if (subscript.isUInt32()) {
uint32_t i = subscript.asUInt32();
- if (isJSArray(globalData, baseValue)) {
- JSArray* jsArray = asArray(baseValue);
- if (jsArray->canGetIndex(i))
- result = jsArray->getIndex(i);
- else
- result = jsArray->JSArray::get(callFrame, i);
- } else if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i)) {
- // All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
+ if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i)) {
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_string));
- result = asString(baseValue)->getIndex(stackFrame.globalData, i);
- } else if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
+ JSValue result = asString(baseValue)->getIndex(callFrame, i);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
+ }
+ if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
// All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_byte_array));
return JSValue::encode(asByteArray(baseValue)->getIndex(callFrame, i));
- } else
- result = baseValue.get(callFrame, i);
- } else {
- Identifier property(callFrame, subscript.toString(callFrame));
- result = baseValue.get(callFrame, property);
+ }
+ JSValue result = baseValue.get(callFrame, i);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
}
-
+
+ Identifier property(callFrame, subscript.toString(callFrame));
+ JSValue result = baseValue.get(callFrame, property);
CHECK_FOR_EXCEPTION_AT_END();
return JSValue::encode(result);
}
if (LIKELY(subscript.isUInt32())) {
uint32_t i = subscript.asUInt32();
if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i))
- result = asString(baseValue)->getIndex(stackFrame.globalData, i);
+ result = asString(baseValue)->getIndex(callFrame, i);
else {
result = baseValue.get(callFrame, i);
if (!isJSString(globalData, baseValue))
CHECK_FOR_EXCEPTION_AT_END();
}
-DEFINE_STUB_FUNCTION(void, op_put_by_val_array)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- CallFrame* callFrame = stackFrame.callFrame;
- JSValue baseValue = stackFrame.args[0].jsValue();
- int i = stackFrame.args[1].int32();
- JSValue value = stackFrame.args[2].jsValue();
-
- ASSERT(isJSArray(stackFrame.globalData, baseValue));
-
- if (LIKELY(i >= 0))
- asArray(baseValue)->JSArray::put(callFrame, i, value);
- else {
- Identifier property(callFrame, UString::from(i));
- PutPropertySlot slot;
- baseValue.put(callFrame, property, value, slot);
- }
-
- CHECK_FOR_EXCEPTION_AT_END();
-}
-
DEFINE_STUB_FUNCTION(void, op_put_by_val_byte_array)
{
STUB_INIT_STACK_FRAME(stackFrame);
return JSValue::encode(result);
}
-DEFINE_STUB_FUNCTION(int, op_loop_if_true)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- JSValue src1 = stackFrame.args[0].jsValue();
-
- CallFrame* callFrame = stackFrame.callFrame;
-
- bool result = src1.toBoolean(callFrame);
- CHECK_FOR_EXCEPTION_AT_END();
- return result;
-}
-
DEFINE_STUB_FUNCTION(int, op_load_varargs)
{
STUB_INIT_STACK_FRAME(stackFrame);
if (!arguments) {
int providedParams = callFrame->registers()[RegisterFile::ArgumentCount].i() - 1;
argCount = providedParams;
+ argCount = min(argCount, static_cast<uint32_t>(Arguments::MaxArguments));
int32_t sizeDelta = argsOffset + argCount + RegisterFile::CallFrameHeaderSize;
Register* newEnd = callFrame->registers() + sizeDelta;
if (!registerFile->grow(newEnd) || ((newEnd - callFrame->registers()) != sizeDelta)) {
stackFrame.globalData->exception = createStackOverflowError(callFrame);
VM_THROW_EXCEPTION();
}
- int32_t expectedParams = callFrame->callee()->body()->parameterCount();
+ int32_t expectedParams = callFrame->callee()->jsExecutable()->parameterCount();
int32_t inplaceArgs = min(providedParams, expectedParams);
Register* inplaceArgsDst = callFrame->registers() + argsOffset;
if (asObject(arguments)->classInfo() == &Arguments::info) {
Arguments* argsObject = asArguments(arguments);
argCount = argsObject->numProvidedArguments(callFrame);
+ argCount = min(argCount, static_cast<uint32_t>(Arguments::MaxArguments));
int32_t sizeDelta = argsOffset + argCount + RegisterFile::CallFrameHeaderSize;
Register* newEnd = callFrame->registers() + sizeDelta;
if (!registerFile->grow(newEnd) || ((newEnd - callFrame->registers()) != sizeDelta)) {
} else if (isJSArray(&callFrame->globalData(), arguments)) {
JSArray* array = asArray(arguments);
argCount = array->length();
+ argCount = min(argCount, static_cast<uint32_t>(Arguments::MaxArguments));
int32_t sizeDelta = argsOffset + argCount + RegisterFile::CallFrameHeaderSize;
Register* newEnd = callFrame->registers() + sizeDelta;
if (!registerFile->grow(newEnd) || ((newEnd - callFrame->registers()) != sizeDelta)) {
} else if (asObject(arguments)->inherits(&JSArray::info)) {
JSObject* argObject = asObject(arguments);
argCount = argObject->get(callFrame, callFrame->propertyNames().length).toUInt32(callFrame);
+ argCount = min(argCount, static_cast<uint32_t>(Arguments::MaxArguments));
int32_t sizeDelta = argsOffset + argCount + RegisterFile::CallFrameHeaderSize;
Register* newEnd = callFrame->registers() + sizeDelta;
if (!registerFile->grow(newEnd) || ((newEnd - callFrame->registers()) != sizeDelta)) {
PropertySlot slot(globalObject);
if (globalObject->getPropertySlot(callFrame, ident, slot)) {
JSValue result = slot.getValue(callFrame, ident);
- if (slot.isCacheable() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
+ if (slot.isCacheableValue() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
GlobalResolveInfo& globalResolveInfo = callFrame->codeBlock()->globalResolveInfo(globalResolveInfoIndex);
if (globalResolveInfo.structure)
globalResolveInfo.structure->deref();
return JSValue::encode(number);
}
-#if USE(JSVALUE32_64)
-
DEFINE_STUB_FUNCTION(int, op_eq)
{
STUB_INIT_STACK_FRAME(stackFrame);
JSValue src1 = stackFrame.args[0].jsValue();
JSValue src2 = stackFrame.args[1].jsValue();
+#if USE(JSVALUE32_64)
start:
if (src2.isUndefined()) {
return src1.isNull() ||
if (src1.isNull())
return src2.isCell() && asCell(src2)->structure()->typeInfo().masqueradesAsUndefined();
- ASSERT(src1.isCell());
-
JSCell* cell1 = asCell(src1);
if (cell1->isString()) {
if (src2.isInt32())
- return static_cast<JSString*>(cell1)->value().toDouble() == src2.asInt32();
+ return static_cast<JSString*>(cell1)->value(stackFrame.callFrame).toDouble() == src2.asInt32();
if (src2.isDouble())
- return static_cast<JSString*>(cell1)->value().toDouble() == src2.asDouble();
+ return static_cast<JSString*>(cell1)->value(stackFrame.callFrame).toDouble() == src2.asDouble();
if (src2.isTrue())
- return static_cast<JSString*>(cell1)->value().toDouble() == 1.0;
+ return static_cast<JSString*>(cell1)->value(stackFrame.callFrame).toDouble() == 1.0;
if (src2.isFalse())
- return static_cast<JSString*>(cell1)->value().toDouble() == 0.0;
+ return static_cast<JSString*>(cell1)->value(stackFrame.callFrame).toDouble() == 0.0;
- ASSERT(src2.isCell());
JSCell* cell2 = asCell(src2);
if (cell2->isString())
- return static_cast<JSString*>(cell1)->value() == static_cast<JSString*>(cell2)->value();
+ return static_cast<JSString*>(cell1)->value(stackFrame.callFrame) == static_cast<JSString*>(cell2)->value(stackFrame.callFrame);
- ASSERT(cell2->isObject());
- src2 = static_cast<JSObject*>(cell2)->toPrimitive(stackFrame.callFrame);
+ src2 = asObject(cell2)->toPrimitive(stackFrame.callFrame);
CHECK_FOR_EXCEPTION();
goto start;
}
- ASSERT(cell1->isObject());
if (src2.isObject())
- return static_cast<JSObject*>(cell1) == asObject(src2);
- src1 = static_cast<JSObject*>(cell1)->toPrimitive(stackFrame.callFrame);
+ return asObject(cell1) == asObject(src2);
+ src1 = asObject(cell1)->toPrimitive(stackFrame.callFrame);
CHECK_FOR_EXCEPTION();
goto start;
+
+#else // USE(JSVALUE32_64)
+ CallFrame* callFrame = stackFrame.callFrame;
+
+ bool result = JSValue::equalSlowCaseInline(callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION_AT_END();
+ return result;
+#endif // USE(JSVALUE32_64)
}
DEFINE_STUB_FUNCTION(int, op_eq_strings)
{
+#if USE(JSVALUE32_64)
STUB_INIT_STACK_FRAME(stackFrame);
JSString* string1 = stackFrame.args[0].jsString();
ASSERT(string1->isString());
ASSERT(string2->isString());
- return string1->value() == string2->value();
-}
-
-#else // USE(JSVALUE32_64)
-
-DEFINE_STUB_FUNCTION(int, op_eq)
-{
- STUB_INIT_STACK_FRAME(stackFrame);
-
- JSValue src1 = stackFrame.args[0].jsValue();
- JSValue src2 = stackFrame.args[1].jsValue();
-
- CallFrame* callFrame = stackFrame.callFrame;
-
- bool result = JSValue::equalSlowCaseInline(callFrame, src1, src2);
- CHECK_FOR_EXCEPTION_AT_END();
- return result;
+ return string1->value(stackFrame.callFrame) == string2->value(stackFrame.callFrame);
+#else
+ UNUSED_PARAM(args);
+ ASSERT_NOT_REACHED();
+ return 0;
+#endif
}
-#endif // USE(JSVALUE32_64)
-
DEFINE_STUB_FUNCTION(EncodedJSValue, op_lshift)
{
STUB_INIT_STACK_FRAME(stackFrame);
DEFINE_STUB_FUNCTION(JSObject*, op_new_func_exp)
{
STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
- return stackFrame.args[0].funcExprNode()->makeFunction(stackFrame.callFrame, stackFrame.callFrame->scopeChain());
+ FunctionExecutable* function = stackFrame.args[0].function();
+ JSFunction* func = function->make(callFrame, callFrame->scopeChain());
+
+ /*
+ The Identifier in a FunctionExpression can be referenced from inside
+ the FunctionExpression's FunctionBody to allow the function to call
+ itself recursively. However, unlike in a FunctionDeclaration, the
+ Identifier in a FunctionExpression cannot be referenced from and
+ does not affect the scope enclosing the FunctionExpression.
+ */
+ if (!function->name().isNull()) {
+ JSStaticScopeObject* functionScopeObject = new (callFrame) JSStaticScopeObject(callFrame, function->name(), func, ReadOnly | DontDelete);
+ func->scope().push(functionScopeObject);
+ }
+
+ return func;
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_mod)
Register* newCallFrame = callFrame->registers() + registerOffset;
Register* argv = newCallFrame - RegisterFile::CallFrameHeaderSize - argCount;
JSValue thisValue = argv[0].jsValue();
- JSGlobalObject* globalObject = callFrame->scopeChain()->globalObject();
+ JSGlobalObject* globalObject = callFrame->scopeChain()->globalObject;
if (thisValue == globalObject && funcVal == globalObject->evalFunction()) {
JSValue exceptionValue;
if (!handler) {
*stackFrame.exception = exceptionValue;
- STUB_SET_RETURN_ADDRESS(reinterpret_cast<void*>(ctiOpThrowNotCaught));
+ STUB_SET_RETURN_ADDRESS(FunctionPtr(ctiOpThrowNotCaught).value());
return JSValue::encode(jsNull());
}
{
STUB_INIT_STACK_FRAME(stackFrame);
- return JSPropertyNameIterator::create(stackFrame.callFrame, stackFrame.args[0].jsValue());
+ CallFrame* callFrame = stackFrame.callFrame;
+ JSObject* o = stackFrame.args[0].jsObject();
+ Structure* structure = o->structure();
+ JSPropertyNameIterator* jsPropertyNameIterator = structure->enumerationCache();
+ if (!jsPropertyNameIterator || jsPropertyNameIterator->cachedPrototypeChain() != structure->prototypeChain(callFrame))
+ jsPropertyNameIterator = JSPropertyNameIterator::create(callFrame, o);
+ return jsPropertyNameIterator;
}
-DEFINE_STUB_FUNCTION(EncodedJSValue, op_next_pname)
+DEFINE_STUB_FUNCTION(int, has_property)
{
STUB_INIT_STACK_FRAME(stackFrame);
- JSPropertyNameIterator* it = stackFrame.args[0].propertyNameIterator();
- JSValue temp = it->next(stackFrame.callFrame);
- if (!temp)
- it->invalidate();
- return JSValue::encode(temp);
+ JSObject* base = stackFrame.args[0].jsObject();
+ JSString* property = stackFrame.args[1].jsString();
+ int result = base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
+ CHECK_FOR_EXCEPTION_AT_END();
+ return result;
}
DEFINE_STUB_FUNCTION(JSObject*, op_push_scope)
JSValue src1 = stackFrame.args[0].jsValue();
JSValue src2 = stackFrame.args[1].jsValue();
- return JSValue::encode(jsBoolean(JSValue::strictEqual(src1, src2)));
+ bool result = JSValue::strictEqual(stackFrame.callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION_AT_END();
+ return JSValue::encode(jsBoolean(result));
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_to_primitive)
{
STUB_INIT_STACK_FRAME(stackFrame);
- return JSValue::encode(concatenateStrings(stackFrame.callFrame, &stackFrame.callFrame->registers()[stackFrame.args[0].int32()], stackFrame.args[1].int32()));
+ JSValue result = jsString(stackFrame.callFrame, &stackFrame.callFrame->registers()[stackFrame.args[0].int32()], stackFrame.args[1].int32());
+ CHECK_FOR_EXCEPTION_AT_END();
+ return JSValue::encode(result);
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_nstricteq)
JSValue src1 = stackFrame.args[0].jsValue();
JSValue src2 = stackFrame.args[1].jsValue();
- return JSValue::encode(jsBoolean(!JSValue::strictEqual(src1, src2)));
+ bool result = !JSValue::strictEqual(stackFrame.callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION_AT_END();
+ return JSValue::encode(jsBoolean(result));
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_to_jsnumber)
void* result = codeBlock->characterSwitchJumpTable(tableIndex).ctiDefault.executableAddress();
if (scrutinee.isString()) {
- UString::Rep* value = asString(scrutinee)->value().rep();
- if (value->size() == 1)
- result = codeBlock->characterSwitchJumpTable(tableIndex).ctiForValue(value->data()[0]).executableAddress();
+ UString::Rep* value = asString(scrutinee)->value(callFrame).rep();
+ if (value->length() == 1)
+ result = codeBlock->characterSwitchJumpTable(tableIndex).ctiForValue(value->characters()[0]).executableAddress();
}
+ CHECK_FOR_EXCEPTION_AT_END();
return result;
}
void* result = codeBlock->stringSwitchJumpTable(tableIndex).ctiDefault.executableAddress();
if (scrutinee.isString()) {
- UString::Rep* value = asString(scrutinee)->value().rep();
+ UString::Rep* value = asString(scrutinee)->value(callFrame).rep();
result = codeBlock->stringSwitchJumpTable(tableIndex).ctiForValue(value).executableAddress();
}
+ CHECK_FOR_EXCEPTION_AT_END();
return result;
}
unsigned bytecodeOffset = stackFrame.args[2].int32();
unsigned lineNumber = codeBlock->lineNumberForBytecodeOffset(callFrame, bytecodeOffset);
- return Error::create(callFrame, static_cast<ErrorType>(type), message.toString(callFrame), lineNumber, codeBlock->ownerNode()->sourceID(), codeBlock->ownerNode()->sourceURL());
+ return Error::create(callFrame, static_cast<ErrorType>(type), message.toString(callFrame), lineNumber, codeBlock->ownerExecutable()->sourceID(), codeBlock->ownerExecutable()->sourceURL());
}
DEFINE_STUB_FUNCTION(void, op_debug)
return JSValue::encode(exceptionValue);
}
+DEFINE_STUB_FUNCTION(EncodedJSValue, to_object)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+
+ CallFrame* callFrame = stackFrame.callFrame;
+ return JSValue::encode(stackFrame.args[0].jsValue().toObject(callFrame));
+}
+
+NativeExecutable* JITThunks::specializedThunk(JSGlobalData* globalData, ThunkGenerator generator)
+{
+ std::pair<ThunkMap::iterator, bool> entry = m_thunkMap.add(generator, 0);
+ if (!entry.second)
+ return entry.first->second.get();
+ entry.first->second = generator(globalData, m_executablePool.get());
+ return entry.first->second.get();
+}
+
} // namespace JSC
#endif // ENABLE(JIT)