2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "JITOperations.h"
31 #include "Arguments.h"
32 #include "ArrayConstructor.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
40 #include "ErrorHandlingScope.h"
41 #include "GetterSetter.h"
42 #include "HostCallReturnValue.h"
44 #include "JITToDFGDeferredCompilationCallback.h"
45 #include "JSGlobalObjectFunctions.h"
46 #include "JSNameScope.h"
47 #include "JSPropertyNameIterator.h"
48 #include "JSStackInlines.h"
49 #include "JSWithScope.h"
50 #include "ObjectConstructor.h"
51 #include "JSCInlines.h"
53 #include "RepatchBuffer.h"
54 #include "TestRunnerUtils.h"
55 #include <wtf/InlineASM.h>
59 static unsigned s_numberOfExceptionFuzzChecks
;
60 unsigned numberOfExceptionFuzzChecks() { return s_numberOfExceptionFuzzChecks
; }
65 void * _ReturnAddress(void);
66 #pragma intrinsic(_ReturnAddress)
68 #define OUR_RETURN_ADDRESS _ReturnAddress()
70 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
73 #if ENABLE(OPCODE_SAMPLING)
74 #define CTI_SAMPLER vm->interpreter->sampler()
80 void JIT_OPERATION
operationThrowStackOverflowError(ExecState
* exec
, CodeBlock
* codeBlock
)
82 // We pass in our own code block, because the callframe hasn't been populated.
83 VM
* vm
= codeBlock
->vm();
84 CallFrame
* callerFrame
= exec
->callerFrameSkippingVMEntrySentinel();
88 NativeCallFrameTracer
tracer(vm
, callerFrame
);
89 ErrorHandlingScope
errorScope(*vm
);
90 vm
->throwException(callerFrame
, createStackOverflowError(callerFrame
));
93 int32_t JIT_OPERATION
operationCallArityCheck(ExecState
* exec
)
96 CallFrame
* callerFrame
= exec
->callerFrameSkippingVMEntrySentinel();
97 NativeCallFrameTracer
tracer(vm
, callerFrame
);
99 JSStack
& stack
= vm
->interpreter
->stack();
101 int32_t missingArgCount
= CommonSlowPaths::arityCheckFor(exec
, &stack
, CodeForCall
);
102 if (missingArgCount
< 0)
103 throwStackOverflowError(callerFrame
);
105 return missingArgCount
;
108 int32_t JIT_OPERATION
operationConstructArityCheck(ExecState
* exec
)
110 VM
* vm
= &exec
->vm();
111 CallFrame
* callerFrame
= exec
->callerFrameSkippingVMEntrySentinel();
112 NativeCallFrameTracer
tracer(vm
, callerFrame
);
114 JSStack
& stack
= vm
->interpreter
->stack();
116 int32_t missingArgCount
= CommonSlowPaths::arityCheckFor(exec
, &stack
, CodeForConstruct
);
117 if (missingArgCount
< 0)
118 throwStackOverflowError(callerFrame
);
120 return missingArgCount
;
123 EncodedJSValue JIT_OPERATION
operationGetById(ExecState
* exec
, StructureStubInfo
*, EncodedJSValue base
, StringImpl
* uid
)
125 VM
* vm
= &exec
->vm();
126 NativeCallFrameTracer
tracer(vm
, exec
);
128 JSValue baseValue
= JSValue::decode(base
);
129 PropertySlot
slot(baseValue
);
130 Identifier
ident(vm
, uid
);
131 return JSValue::encode(baseValue
.get(exec
, ident
, slot
));
134 EncodedJSValue JIT_OPERATION
operationGetByIdBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue base
, StringImpl
* uid
)
136 VM
* vm
= &exec
->vm();
137 NativeCallFrameTracer
tracer(vm
, exec
);
139 Identifier
ident(vm
, uid
);
140 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
142 JSValue baseValue
= JSValue::decode(base
);
143 PropertySlot
slot(baseValue
);
144 JSValue result
= baseValue
.get(exec
, ident
, slot
);
146 if (accessType
== static_cast<AccessType
>(stubInfo
->accessType
))
147 buildGetByIDList(exec
, baseValue
, ident
, slot
, *stubInfo
);
149 return JSValue::encode(result
);
152 EncodedJSValue JIT_OPERATION
operationGetByIdOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue base
, StringImpl
* uid
)
154 VM
* vm
= &exec
->vm();
155 NativeCallFrameTracer
tracer(vm
, exec
);
156 Identifier ident
= uid
->isEmptyUnique() ? Identifier::from(PrivateName(uid
)) : Identifier(vm
, uid
);
157 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
159 JSValue baseValue
= JSValue::decode(base
);
160 PropertySlot
slot(baseValue
);
161 JSValue result
= baseValue
.get(exec
, ident
, slot
);
163 if (accessType
== static_cast<AccessType
>(stubInfo
->accessType
)) {
165 repatchGetByID(exec
, baseValue
, ident
, slot
, *stubInfo
);
167 stubInfo
->seen
= true;
170 return JSValue::encode(result
);
173 EncodedJSValue JIT_OPERATION
operationInOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, JSCell
* base
, StringImpl
* key
)
175 VM
* vm
= &exec
->vm();
176 NativeCallFrameTracer
tracer(vm
, exec
);
178 if (!base
->isObject()) {
179 vm
->throwException(exec
, createInvalidParameterError(exec
, "in", base
));
180 return JSValue::encode(jsUndefined());
183 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
185 Identifier
ident(vm
, key
);
186 PropertySlot
slot(base
);
187 bool result
= asObject(base
)->getPropertySlot(exec
, ident
, slot
);
189 RELEASE_ASSERT(accessType
== stubInfo
->accessType
);
192 repatchIn(exec
, base
, ident
, result
, slot
, *stubInfo
);
194 stubInfo
->seen
= true;
196 return JSValue::encode(jsBoolean(result
));
199 EncodedJSValue JIT_OPERATION
operationIn(ExecState
* exec
, StructureStubInfo
*, JSCell
* base
, StringImpl
* key
)
201 VM
* vm
= &exec
->vm();
202 NativeCallFrameTracer
tracer(vm
, exec
);
204 if (!base
->isObject()) {
205 vm
->throwException(exec
, createInvalidParameterError(exec
, "in", base
));
206 return JSValue::encode(jsUndefined());
209 Identifier
ident(vm
, key
);
210 return JSValue::encode(jsBoolean(asObject(base
)->hasProperty(exec
, ident
)));
213 EncodedJSValue JIT_OPERATION
operationGenericIn(ExecState
* exec
, JSCell
* base
, EncodedJSValue key
)
215 VM
* vm
= &exec
->vm();
216 NativeCallFrameTracer
tracer(vm
, exec
);
218 return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec
, JSValue::decode(key
), base
)));
221 void JIT_OPERATION
operationPutByIdStrict(ExecState
* exec
, StructureStubInfo
*, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
223 VM
* vm
= &exec
->vm();
224 NativeCallFrameTracer
tracer(vm
, exec
);
226 Identifier
ident(vm
, uid
);
227 PutPropertySlot
slot(JSValue::decode(encodedBase
), true, exec
->codeBlock()->putByIdContext());
228 JSValue::decode(encodedBase
).put(exec
, ident
, JSValue::decode(encodedValue
), slot
);
231 void JIT_OPERATION
operationPutByIdNonStrict(ExecState
* exec
, StructureStubInfo
*, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
233 VM
* vm
= &exec
->vm();
234 NativeCallFrameTracer
tracer(vm
, exec
);
236 Identifier
ident(vm
, uid
);
237 PutPropertySlot
slot(JSValue::decode(encodedBase
), false, exec
->codeBlock()->putByIdContext());
238 JSValue::decode(encodedBase
).put(exec
, ident
, JSValue::decode(encodedValue
), slot
);
241 void JIT_OPERATION
operationPutByIdDirectStrict(ExecState
* exec
, StructureStubInfo
*, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
243 VM
* vm
= &exec
->vm();
244 NativeCallFrameTracer
tracer(vm
, exec
);
246 Identifier
ident(vm
, uid
);
247 PutPropertySlot
slot(JSValue::decode(encodedBase
), true, exec
->codeBlock()->putByIdContext());
248 asObject(JSValue::decode(encodedBase
))->putDirect(exec
->vm(), ident
, JSValue::decode(encodedValue
), slot
);
251 void JIT_OPERATION
operationPutByIdDirectNonStrict(ExecState
* exec
, StructureStubInfo
*, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
253 VM
* vm
= &exec
->vm();
254 NativeCallFrameTracer
tracer(vm
, exec
);
256 Identifier
ident(vm
, uid
);
257 PutPropertySlot
slot(JSValue::decode(encodedBase
), false, exec
->codeBlock()->putByIdContext());
258 asObject(JSValue::decode(encodedBase
))->putDirect(exec
->vm(), ident
, JSValue::decode(encodedValue
), slot
);
261 void JIT_OPERATION
operationPutByIdStrictOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
263 VM
* vm
= &exec
->vm();
264 NativeCallFrameTracer
tracer(vm
, exec
);
266 Identifier
ident(vm
, uid
);
267 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
269 JSValue value
= JSValue::decode(encodedValue
);
270 JSValue baseValue
= JSValue::decode(encodedBase
);
271 PutPropertySlot
slot(baseValue
, true, exec
->codeBlock()->putByIdContext());
273 baseValue
.put(exec
, ident
, value
, slot
);
275 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
279 repatchPutByID(exec
, baseValue
, ident
, slot
, *stubInfo
, NotDirect
);
281 stubInfo
->seen
= true;
284 void JIT_OPERATION
operationPutByIdNonStrictOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
286 VM
* vm
= &exec
->vm();
287 NativeCallFrameTracer
tracer(vm
, exec
);
289 Identifier
ident(vm
, uid
);
290 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
292 JSValue value
= JSValue::decode(encodedValue
);
293 JSValue baseValue
= JSValue::decode(encodedBase
);
294 PutPropertySlot
slot(baseValue
, false, exec
->codeBlock()->putByIdContext());
296 baseValue
.put(exec
, ident
, value
, slot
);
298 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
302 repatchPutByID(exec
, baseValue
, ident
, slot
, *stubInfo
, NotDirect
);
304 stubInfo
->seen
= true;
307 void JIT_OPERATION
operationPutByIdDirectStrictOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
309 VM
* vm
= &exec
->vm();
310 NativeCallFrameTracer
tracer(vm
, exec
);
312 Identifier
ident(vm
, uid
);
313 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
315 JSValue value
= JSValue::decode(encodedValue
);
316 JSObject
* baseObject
= asObject(JSValue::decode(encodedBase
));
317 PutPropertySlot
slot(baseObject
, true, exec
->codeBlock()->putByIdContext());
319 baseObject
->putDirect(exec
->vm(), ident
, value
, slot
);
321 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
325 repatchPutByID(exec
, baseObject
, ident
, slot
, *stubInfo
, Direct
);
327 stubInfo
->seen
= true;
330 void JIT_OPERATION
operationPutByIdDirectNonStrictOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
332 VM
* vm
= &exec
->vm();
333 NativeCallFrameTracer
tracer(vm
, exec
);
335 Identifier
ident(vm
, uid
);
336 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
338 JSValue value
= JSValue::decode(encodedValue
);
339 JSObject
* baseObject
= asObject(JSValue::decode(encodedBase
));
340 PutPropertySlot
slot(baseObject
, false, exec
->codeBlock()->putByIdContext());
342 baseObject
->putDirect(exec
->vm(), ident
, value
, slot
);
344 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
348 repatchPutByID(exec
, baseObject
, ident
, slot
, *stubInfo
, Direct
);
350 stubInfo
->seen
= true;
353 void JIT_OPERATION
operationPutByIdStrictBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
355 VM
* vm
= &exec
->vm();
356 NativeCallFrameTracer
tracer(vm
, exec
);
358 Identifier
ident(vm
, uid
);
359 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
361 JSValue value
= JSValue::decode(encodedValue
);
362 JSValue baseValue
= JSValue::decode(encodedBase
);
363 PutPropertySlot
slot(baseValue
, true, exec
->codeBlock()->putByIdContext());
365 baseValue
.put(exec
, ident
, value
, slot
);
367 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
370 buildPutByIdList(exec
, baseValue
, ident
, slot
, *stubInfo
, NotDirect
);
373 void JIT_OPERATION
operationPutByIdNonStrictBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
375 VM
* vm
= &exec
->vm();
376 NativeCallFrameTracer
tracer(vm
, exec
);
378 Identifier
ident(vm
, uid
);
379 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
381 JSValue value
= JSValue::decode(encodedValue
);
382 JSValue baseValue
= JSValue::decode(encodedBase
);
383 PutPropertySlot
slot(baseValue
, false, exec
->codeBlock()->putByIdContext());
385 baseValue
.put(exec
, ident
, value
, slot
);
387 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
390 buildPutByIdList(exec
, baseValue
, ident
, slot
, *stubInfo
, NotDirect
);
393 void JIT_OPERATION
operationPutByIdDirectStrictBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
395 VM
* vm
= &exec
->vm();
396 NativeCallFrameTracer
tracer(vm
, exec
);
398 Identifier
ident(vm
, uid
);
399 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
401 JSValue value
= JSValue::decode(encodedValue
);
402 JSObject
* baseObject
= asObject(JSValue::decode(encodedBase
));
403 PutPropertySlot
slot(baseObject
, true, exec
->codeBlock()->putByIdContext());
405 baseObject
->putDirect(exec
->vm(), ident
, value
, slot
);
407 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
410 buildPutByIdList(exec
, baseObject
, ident
, slot
, *stubInfo
, Direct
);
413 void JIT_OPERATION
operationPutByIdDirectNonStrictBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, StringImpl
* uid
)
415 VM
* vm
= &exec
->vm();
416 NativeCallFrameTracer
tracer(vm
, exec
);
418 Identifier
ident(vm
, uid
);
419 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
421 JSValue value
= JSValue::decode(encodedValue
);
422 JSObject
* baseObject
= asObject(JSValue::decode(encodedBase
));
423 PutPropertySlot
slot(baseObject
, false, exec
->codeBlock()->putByIdContext());
425 baseObject
->putDirect(exec
->vm(), ident
, value
, slot
);
427 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
430 buildPutByIdList(exec
, baseObject
, ident
, slot
, *stubInfo
, Direct
);
433 void JIT_OPERATION
operationReallocateStorageAndFinishPut(ExecState
* exec
, JSObject
* base
, Structure
* structure
, PropertyOffset offset
, EncodedJSValue value
)
436 NativeCallFrameTracer
tracer(&vm
, exec
);
438 ASSERT(structure
->outOfLineCapacity() > base
->structure(vm
)->outOfLineCapacity());
439 ASSERT(!vm
.heap
.storageAllocator().fastPathShouldSucceed(structure
->outOfLineCapacity() * sizeof(JSValue
)));
440 base
->setStructureAndReallocateStorageIfNecessary(vm
, structure
);
441 base
->putDirect(vm
, offset
, JSValue::decode(value
));
444 static void putByVal(CallFrame
* callFrame
, JSValue baseValue
, JSValue subscript
, JSValue value
)
446 VM
& vm
= callFrame
->vm();
447 if (LIKELY(subscript
.isUInt32())) {
448 uint32_t i
= subscript
.asUInt32();
449 if (baseValue
.isObject()) {
450 JSObject
* object
= asObject(baseValue
);
451 if (object
->canSetIndexQuickly(i
))
452 object
->setIndexQuickly(callFrame
->vm(), i
, value
);
454 object
->methodTable(vm
)->putByIndex(object
, callFrame
, i
, value
, callFrame
->codeBlock()->isStrictMode());
456 baseValue
.putByIndex(callFrame
, i
, value
, callFrame
->codeBlock()->isStrictMode());
457 } else if (isName(subscript
)) {
458 PutPropertySlot
slot(baseValue
, callFrame
->codeBlock()->isStrictMode());
459 baseValue
.put(callFrame
, jsCast
<NameInstance
*>(subscript
.asCell())->privateName(), value
, slot
);
461 Identifier property
= subscript
.toString(callFrame
)->toIdentifier(callFrame
);
462 if (!callFrame
->vm().exception()) { // Don't put to an object if toString threw an exception.
463 PutPropertySlot
slot(baseValue
, callFrame
->codeBlock()->isStrictMode());
464 baseValue
.put(callFrame
, property
, value
, slot
);
469 static void directPutByVal(CallFrame
* callFrame
, JSObject
* baseObject
, JSValue subscript
, JSValue value
)
471 if (LIKELY(subscript
.isUInt32())) {
472 uint32_t i
= subscript
.asUInt32();
473 baseObject
->putDirectIndex(callFrame
, i
, value
);
474 } else if (isName(subscript
)) {
475 PutPropertySlot
slot(baseObject
, callFrame
->codeBlock()->isStrictMode());
476 baseObject
->putDirect(callFrame
->vm(), jsCast
<NameInstance
*>(subscript
.asCell())->privateName(), value
, slot
);
478 Identifier
property(callFrame
, subscript
.toString(callFrame
)->value(callFrame
));
479 if (!callFrame
->vm().exception()) { // Don't put to an object if toString threw an exception.
480 PutPropertySlot
slot(baseObject
, callFrame
->codeBlock()->isStrictMode());
481 baseObject
->putDirect(callFrame
->vm(), property
, value
, slot
);
485 void JIT_OPERATION
operationPutByVal(ExecState
* exec
, EncodedJSValue encodedBaseValue
, EncodedJSValue encodedSubscript
, EncodedJSValue encodedValue
)
488 NativeCallFrameTracer
tracer(&vm
, exec
);
490 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
491 JSValue subscript
= JSValue::decode(encodedSubscript
);
492 JSValue value
= JSValue::decode(encodedValue
);
494 if (baseValue
.isObject() && subscript
.isInt32()) {
495 // See if it's worth optimizing at all.
496 JSObject
* object
= asObject(baseValue
);
497 bool didOptimize
= false;
499 unsigned bytecodeOffset
= exec
->locationAsBytecodeOffset();
500 ASSERT(bytecodeOffset
);
501 ByValInfo
& byValInfo
= exec
->codeBlock()->getByValInfo(bytecodeOffset
- 1);
502 ASSERT(!byValInfo
.stubRoutine
);
504 if (hasOptimizableIndexing(object
->structure(vm
))) {
505 // Attempt to optimize.
506 JITArrayMode arrayMode
= jitArrayModeForStructure(object
->structure(vm
));
507 if (arrayMode
!= byValInfo
.arrayMode
) {
508 JIT::compilePutByVal(&vm
, exec
->codeBlock(), &byValInfo
, ReturnAddressPtr(OUR_RETURN_ADDRESS
), arrayMode
);
514 // If we take slow path more than 10 times without patching then make sure we
515 // never make that mistake again. Or, if we failed to patch and we have some object
516 // that intercepts indexed get, then don't even wait until 10 times. For cases
517 // where we see non-index-intercepting objects, this gives 10 iterations worth of
518 // opportunity for us to observe that the get_by_val may be polymorphic.
519 if (++byValInfo
.slowPathCount
>= 10
520 || object
->structure(vm
)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
521 // Don't ever try to optimize.
522 RepatchBuffer
repatchBuffer(exec
->codeBlock());
523 repatchBuffer
.relinkCallerToFunction(ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationPutByValGeneric
));
528 putByVal(exec
, baseValue
, subscript
, value
);
531 void JIT_OPERATION
operationDirectPutByVal(ExecState
* callFrame
, EncodedJSValue encodedBaseValue
, EncodedJSValue encodedSubscript
, EncodedJSValue encodedValue
)
533 VM
& vm
= callFrame
->vm();
534 NativeCallFrameTracer
tracer(&vm
, callFrame
);
536 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
537 JSValue subscript
= JSValue::decode(encodedSubscript
);
538 JSValue value
= JSValue::decode(encodedValue
);
539 RELEASE_ASSERT(baseValue
.isObject());
540 JSObject
* object
= asObject(baseValue
);
541 if (subscript
.isInt32()) {
542 // See if it's worth optimizing at all.
543 bool didOptimize
= false;
545 unsigned bytecodeOffset
= callFrame
->locationAsBytecodeOffset();
546 ASSERT(bytecodeOffset
);
547 ByValInfo
& byValInfo
= callFrame
->codeBlock()->getByValInfo(bytecodeOffset
- 1);
548 ASSERT(!byValInfo
.stubRoutine
);
550 if (hasOptimizableIndexing(object
->structure(vm
))) {
551 // Attempt to optimize.
552 JITArrayMode arrayMode
= jitArrayModeForStructure(object
->structure(vm
));
553 if (arrayMode
!= byValInfo
.arrayMode
) {
554 JIT::compileDirectPutByVal(&vm
, callFrame
->codeBlock(), &byValInfo
, ReturnAddressPtr(OUR_RETURN_ADDRESS
), arrayMode
);
560 // If we take slow path more than 10 times without patching then make sure we
561 // never make that mistake again. Or, if we failed to patch and we have some object
562 // that intercepts indexed get, then don't even wait until 10 times. For cases
563 // where we see non-index-intercepting objects, this gives 10 iterations worth of
564 // opportunity for us to observe that the get_by_val may be polymorphic.
565 if (++byValInfo
.slowPathCount
>= 10
566 || object
->structure(vm
)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
567 // Don't ever try to optimize.
568 RepatchBuffer
repatchBuffer(callFrame
->codeBlock());
569 repatchBuffer
.relinkCallerToFunction(ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationDirectPutByValGeneric
));
573 directPutByVal(callFrame
, object
, subscript
, value
);
576 void JIT_OPERATION
operationPutByValGeneric(ExecState
* exec
, EncodedJSValue encodedBaseValue
, EncodedJSValue encodedSubscript
, EncodedJSValue encodedValue
)
579 NativeCallFrameTracer
tracer(&vm
, exec
);
581 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
582 JSValue subscript
= JSValue::decode(encodedSubscript
);
583 JSValue value
= JSValue::decode(encodedValue
);
585 putByVal(exec
, baseValue
, subscript
, value
);
589 void JIT_OPERATION
operationDirectPutByValGeneric(ExecState
* exec
, EncodedJSValue encodedBaseValue
, EncodedJSValue encodedSubscript
, EncodedJSValue encodedValue
)
592 NativeCallFrameTracer
tracer(&vm
, exec
);
594 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
595 JSValue subscript
= JSValue::decode(encodedSubscript
);
596 JSValue value
= JSValue::decode(encodedValue
);
597 RELEASE_ASSERT(baseValue
.isObject());
598 directPutByVal(exec
, asObject(baseValue
), subscript
, value
);
601 EncodedJSValue JIT_OPERATION
operationCallEval(ExecState
* exec
, ExecState
* execCallee
)
603 ASSERT(exec
->codeBlock()->codeType() != FunctionCode
604 || !exec
->codeBlock()->needsActivation()
605 || exec
->hasActivation());
607 execCallee
->setScope(exec
->scope());
608 execCallee
->setCodeBlock(0);
610 if (!isHostFunction(execCallee
->calleeAsValue(), globalFuncEval
))
611 return JSValue::encode(JSValue());
613 VM
* vm
= &execCallee
->vm();
614 JSValue result
= eval(execCallee
);
616 return EncodedJSValue();
618 return JSValue::encode(result
);
621 static void* handleHostCall(ExecState
* execCallee
, JSValue callee
, CodeSpecializationKind kind
)
623 ExecState
* exec
= execCallee
->callerFrame();
624 VM
* vm
= &exec
->vm();
626 execCallee
->setScope(exec
->scope());
627 execCallee
->setCodeBlock(0);
629 if (kind
== CodeForCall
) {
631 CallType callType
= getCallData(callee
, callData
);
633 ASSERT(callType
!= CallTypeJS
);
635 if (callType
== CallTypeHost
) {
636 NativeCallFrameTracer
tracer(vm
, execCallee
);
637 execCallee
->setCallee(asObject(callee
));
638 vm
->hostCallReturnValue
= JSValue::decode(callData
.native
.function(execCallee
));
640 return vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress();
642 return reinterpret_cast<void*>(getHostCallReturnValue
);
645 ASSERT(callType
== CallTypeNone
);
646 exec
->vm().throwException(exec
, createNotAFunctionError(exec
, callee
));
647 return vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress();
650 ASSERT(kind
== CodeForConstruct
);
652 ConstructData constructData
;
653 ConstructType constructType
= getConstructData(callee
, constructData
);
655 ASSERT(constructType
!= ConstructTypeJS
);
657 if (constructType
== ConstructTypeHost
) {
658 NativeCallFrameTracer
tracer(vm
, execCallee
);
659 execCallee
->setCallee(asObject(callee
));
660 vm
->hostCallReturnValue
= JSValue::decode(constructData
.native
.function(execCallee
));
662 return vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress();
664 return reinterpret_cast<void*>(getHostCallReturnValue
);
667 ASSERT(constructType
== ConstructTypeNone
);
668 exec
->vm().throwException(exec
, createNotAConstructorError(exec
, callee
));
669 return vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress();
672 inline char* linkFor(
673 ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
, CodeSpecializationKind kind
,
674 RegisterPreservationMode registers
)
676 ExecState
* exec
= execCallee
->callerFrame();
677 VM
* vm
= &exec
->vm();
678 NativeCallFrameTracer
tracer(vm
, exec
);
680 JSValue calleeAsValue
= execCallee
->calleeAsValue();
681 JSCell
* calleeAsFunctionCell
= getJSFunction(calleeAsValue
);
682 if (!calleeAsFunctionCell
)
683 return reinterpret_cast<char*>(handleHostCall(execCallee
, calleeAsValue
, kind
));
685 JSFunction
* callee
= jsCast
<JSFunction
*>(calleeAsFunctionCell
);
686 JSScope
* scope
= callee
->scopeUnchecked();
687 execCallee
->setScope(scope
);
688 ExecutableBase
* executable
= callee
->executable();
690 MacroAssemblerCodePtr codePtr
;
691 CodeBlock
* codeBlock
= 0;
692 if (executable
->isHostFunction())
693 codePtr
= executable
->entrypointFor(*vm
, kind
, MustCheckArity
, registers
);
695 FunctionExecutable
* functionExecutable
= static_cast<FunctionExecutable
*>(executable
);
696 JSObject
* error
= functionExecutable
->prepareForExecution(execCallee
, callee
, &scope
, kind
);
697 execCallee
->setScope(scope
);
699 throwStackOverflowError(exec
);
700 return reinterpret_cast<char*>(vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress());
702 codeBlock
= functionExecutable
->codeBlockFor(kind
);
703 ArityCheckMode arity
;
704 if (execCallee
->argumentCountIncludingThis() < static_cast<size_t>(codeBlock
->numParameters()) || callLinkInfo
->callType
== CallLinkInfo::CallVarargs
|| callLinkInfo
->callType
== CallLinkInfo::ConstructVarargs
)
705 arity
= MustCheckArity
;
707 arity
= ArityCheckNotRequired
;
708 codePtr
= functionExecutable
->entrypointFor(*vm
, kind
, arity
, registers
);
710 if (!callLinkInfo
->seenOnce())
711 callLinkInfo
->setSeen();
713 linkFor(execCallee
, *callLinkInfo
, codeBlock
, callee
, codePtr
, kind
, registers
);
714 return reinterpret_cast<char*>(codePtr
.executableAddress());
717 char* JIT_OPERATION
operationLinkCall(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
719 return linkFor(execCallee
, callLinkInfo
, CodeForCall
, RegisterPreservationNotRequired
);
722 char* JIT_OPERATION
operationLinkConstruct(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
724 return linkFor(execCallee
, callLinkInfo
, CodeForConstruct
, RegisterPreservationNotRequired
);
727 char* JIT_OPERATION
operationLinkCallThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
729 return linkFor(execCallee
, callLinkInfo
, CodeForCall
, MustPreserveRegisters
);
732 char* JIT_OPERATION
operationLinkConstructThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
734 return linkFor(execCallee
, callLinkInfo
, CodeForConstruct
, MustPreserveRegisters
);
737 inline char* virtualForWithFunction(
738 ExecState
* execCallee
, CodeSpecializationKind kind
, RegisterPreservationMode registers
,
739 JSCell
*& calleeAsFunctionCell
)
741 ExecState
* exec
= execCallee
->callerFrame();
742 VM
* vm
= &exec
->vm();
743 NativeCallFrameTracer
tracer(vm
, exec
);
745 JSValue calleeAsValue
= execCallee
->calleeAsValue();
746 calleeAsFunctionCell
= getJSFunction(calleeAsValue
);
747 if (UNLIKELY(!calleeAsFunctionCell
))
748 return reinterpret_cast<char*>(handleHostCall(execCallee
, calleeAsValue
, kind
));
750 JSFunction
* function
= jsCast
<JSFunction
*>(calleeAsFunctionCell
);
751 JSScope
* scope
= function
->scopeUnchecked();
752 execCallee
->setScope(scope
);
753 ExecutableBase
* executable
= function
->executable();
754 if (UNLIKELY(!executable
->hasJITCodeFor(kind
))) {
755 FunctionExecutable
* functionExecutable
= static_cast<FunctionExecutable
*>(executable
);
756 JSObject
* error
= functionExecutable
->prepareForExecution(execCallee
, function
, &scope
, kind
);
757 execCallee
->setScope(scope
);
759 exec
->vm().throwException(exec
, error
);
760 return reinterpret_cast<char*>(vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress());
763 return reinterpret_cast<char*>(executable
->entrypointFor(
764 *vm
, kind
, MustCheckArity
, registers
).executableAddress());
767 inline char* virtualFor(
768 ExecState
* execCallee
, CodeSpecializationKind kind
, RegisterPreservationMode registers
)
770 JSCell
* calleeAsFunctionCellIgnored
;
771 return virtualForWithFunction(execCallee
, kind
, registers
, calleeAsFunctionCellIgnored
);
774 static bool attemptToOptimizeClosureCall(
775 ExecState
* execCallee
, RegisterPreservationMode registers
, JSCell
* calleeAsFunctionCell
,
776 CallLinkInfo
& callLinkInfo
)
778 if (!calleeAsFunctionCell
)
781 VM
& vm
= execCallee
->vm();
782 JSFunction
* callee
= jsCast
<JSFunction
*>(calleeAsFunctionCell
);
783 JSFunction
* oldCallee
= callLinkInfo
.callee
.get();
786 || oldCallee
->structure(vm
) != callee
->structure(vm
)
787 || oldCallee
->executable() != callee
->executable())
790 ASSERT(callee
->executable()->hasJITCodeForCall());
791 MacroAssemblerCodePtr codePtr
=
792 callee
->executable()->generatedJITCodeForCall()->addressForCall(
793 *execCallee
->callerFrame()->codeBlock()->vm(), callee
->executable(),
794 ArityCheckNotRequired
, registers
);
796 CodeBlock
* codeBlock
;
797 if (callee
->executable()->isHostFunction())
800 codeBlock
= jsCast
<FunctionExecutable
*>(callee
->executable())->codeBlockForCall();
801 if (execCallee
->argumentCountIncludingThis() < static_cast<size_t>(codeBlock
->numParameters()) || callLinkInfo
.callType
== CallLinkInfo::CallVarargs
|| callLinkInfo
.callType
== CallLinkInfo::ConstructVarargs
)
806 execCallee
, callLinkInfo
, codeBlock
,
807 callee
->structure(), callee
->executable(), codePtr
, registers
);
812 char* JIT_OPERATION
operationLinkClosureCall(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
814 JSCell
* calleeAsFunctionCell
;
815 char* result
= virtualForWithFunction(execCallee
, CodeForCall
, RegisterPreservationNotRequired
, calleeAsFunctionCell
);
817 if (!attemptToOptimizeClosureCall(execCallee
, RegisterPreservationNotRequired
, calleeAsFunctionCell
, *callLinkInfo
))
818 linkSlowFor(execCallee
, *callLinkInfo
, CodeForCall
, RegisterPreservationNotRequired
);
823 char* JIT_OPERATION
operationVirtualCall(ExecState
* execCallee
, CallLinkInfo
*)
825 return virtualFor(execCallee
, CodeForCall
, RegisterPreservationNotRequired
);
828 char* JIT_OPERATION
operationVirtualConstruct(ExecState
* execCallee
, CallLinkInfo
*)
830 return virtualFor(execCallee
, CodeForConstruct
, RegisterPreservationNotRequired
);
833 char* JIT_OPERATION
operationLinkClosureCallThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
835 JSCell
* calleeAsFunctionCell
;
836 char* result
= virtualForWithFunction(execCallee
, CodeForCall
, MustPreserveRegisters
, calleeAsFunctionCell
);
838 if (!attemptToOptimizeClosureCall(execCallee
, MustPreserveRegisters
, calleeAsFunctionCell
, *callLinkInfo
))
839 linkSlowFor(execCallee
, *callLinkInfo
, CodeForCall
, MustPreserveRegisters
);
844 char* JIT_OPERATION
operationVirtualCallThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
*)
846 return virtualFor(execCallee
, CodeForCall
, MustPreserveRegisters
);
849 char* JIT_OPERATION
operationVirtualConstructThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
*)
851 return virtualFor(execCallee
, CodeForConstruct
, MustPreserveRegisters
);
854 size_t JIT_OPERATION
operationCompareLess(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
856 VM
* vm
= &exec
->vm();
857 NativeCallFrameTracer
tracer(vm
, exec
);
859 return jsLess
<true>(exec
, JSValue::decode(encodedOp1
), JSValue::decode(encodedOp2
));
862 size_t JIT_OPERATION
operationCompareLessEq(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
864 VM
* vm
= &exec
->vm();
865 NativeCallFrameTracer
tracer(vm
, exec
);
867 return jsLessEq
<true>(exec
, JSValue::decode(encodedOp1
), JSValue::decode(encodedOp2
));
870 size_t JIT_OPERATION
operationCompareGreater(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
872 VM
* vm
= &exec
->vm();
873 NativeCallFrameTracer
tracer(vm
, exec
);
875 return jsLess
<false>(exec
, JSValue::decode(encodedOp2
), JSValue::decode(encodedOp1
));
878 size_t JIT_OPERATION
operationCompareGreaterEq(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
880 VM
* vm
= &exec
->vm();
881 NativeCallFrameTracer
tracer(vm
, exec
);
883 return jsLessEq
<false>(exec
, JSValue::decode(encodedOp2
), JSValue::decode(encodedOp1
));
886 size_t JIT_OPERATION
operationConvertJSValueToBoolean(ExecState
* exec
, EncodedJSValue encodedOp
)
888 VM
* vm
= &exec
->vm();
889 NativeCallFrameTracer
tracer(vm
, exec
);
891 return JSValue::decode(encodedOp
).toBoolean(exec
);
894 size_t JIT_OPERATION
operationCompareEq(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
896 VM
* vm
= &exec
->vm();
897 NativeCallFrameTracer
tracer(vm
, exec
);
899 return JSValue::equalSlowCaseInline(exec
, JSValue::decode(encodedOp1
), JSValue::decode(encodedOp2
));
903 EncodedJSValue JIT_OPERATION
operationCompareStringEq(ExecState
* exec
, JSCell
* left
, JSCell
* right
)
905 size_t JIT_OPERATION
operationCompareStringEq(ExecState
* exec
, JSCell
* left
, JSCell
* right
)
908 VM
* vm
= &exec
->vm();
909 NativeCallFrameTracer
tracer(vm
, exec
);
911 bool result
= WTF::equal(*asString(left
)->value(exec
).impl(), *asString(right
)->value(exec
).impl());
913 return JSValue::encode(jsBoolean(result
));
919 size_t JIT_OPERATION
operationHasProperty(ExecState
* exec
, JSObject
* base
, JSString
* property
)
921 int result
= base
->hasProperty(exec
, Identifier(exec
, property
->value(exec
)));
926 EncodedJSValue JIT_OPERATION
operationNewArrayWithProfile(ExecState
* exec
, ArrayAllocationProfile
* profile
, const JSValue
* values
, int size
)
928 VM
* vm
= &exec
->vm();
929 NativeCallFrameTracer
tracer(vm
, exec
);
930 return JSValue::encode(constructArrayNegativeIndexed(exec
, profile
, values
, size
));
933 EncodedJSValue JIT_OPERATION
operationNewArrayBufferWithProfile(ExecState
* exec
, ArrayAllocationProfile
* profile
, const JSValue
* values
, int size
)
935 VM
* vm
= &exec
->vm();
936 NativeCallFrameTracer
tracer(vm
, exec
);
937 return JSValue::encode(constructArray(exec
, profile
, values
, size
));
940 EncodedJSValue JIT_OPERATION
operationNewArrayWithSizeAndProfile(ExecState
* exec
, ArrayAllocationProfile
* profile
, EncodedJSValue size
)
942 VM
* vm
= &exec
->vm();
943 NativeCallFrameTracer
tracer(vm
, exec
);
944 JSValue sizeValue
= JSValue::decode(size
);
945 return JSValue::encode(constructArrayWithSizeQuirk(exec
, profile
, exec
->lexicalGlobalObject(), sizeValue
));
948 EncodedJSValue JIT_OPERATION
operationNewFunction(ExecState
* exec
, JSCell
* functionExecutable
)
950 ASSERT(functionExecutable
->inherits(FunctionExecutable::info()));
952 NativeCallFrameTracer
tracer(&vm
, exec
);
953 return JSValue::encode(JSFunction::create(vm
, static_cast<FunctionExecutable
*>(functionExecutable
), exec
->scope()));
956 JSCell
* JIT_OPERATION
operationNewObject(ExecState
* exec
, Structure
* structure
)
958 VM
* vm
= &exec
->vm();
959 NativeCallFrameTracer
tracer(vm
, exec
);
961 return constructEmptyObject(exec
, structure
);
964 EncodedJSValue JIT_OPERATION
operationNewRegexp(ExecState
* exec
, void* regexpPtr
)
967 NativeCallFrameTracer
tracer(&vm
, exec
);
968 RegExp
* regexp
= static_cast<RegExp
*>(regexpPtr
);
969 if (!regexp
->isValid()) {
970 vm
.throwException(exec
, createSyntaxError(exec
, "Invalid flags supplied to RegExp constructor."));
971 return JSValue::encode(jsUndefined());
974 return JSValue::encode(RegExpObject::create(vm
, exec
->lexicalGlobalObject()->regExpStructure(), regexp
));
977 void JIT_OPERATION
operationHandleWatchdogTimer(ExecState
* exec
)
980 NativeCallFrameTracer
tracer(&vm
, exec
);
982 if (UNLIKELY(vm
.watchdog
&& vm
.watchdog
->didFire(exec
)))
983 vm
.throwException(exec
, createTerminatedExecutionException(&vm
));
986 void JIT_OPERATION
operationThrowStaticError(ExecState
* exec
, EncodedJSValue encodedValue
, int32_t referenceErrorFlag
)
989 NativeCallFrameTracer
tracer(&vm
, exec
);
991 String message
= errorDescriptionForValue(exec
, JSValue::decode(encodedValue
))->value(exec
);
992 if (referenceErrorFlag
)
993 vm
.throwException(exec
, createReferenceError(exec
, message
));
995 vm
.throwException(exec
, createTypeError(exec
, message
));
998 void JIT_OPERATION
operationDebug(ExecState
* exec
, int32_t debugHookID
)
1000 VM
& vm
= exec
->vm();
1001 NativeCallFrameTracer
tracer(&vm
, exec
);
1003 vm
.interpreter
->debug(exec
, static_cast<DebugHookID
>(debugHookID
));
1007 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock
* codeBlock
)
1009 codeBlock
->updateAllPredictions();
1010 codeBlock
->optimizeAfterWarmUp();
1013 SlowPathReturnType JIT_OPERATION
operationOptimize(ExecState
* exec
, int32_t bytecodeIndex
)
1015 VM
& vm
= exec
->vm();
1016 NativeCallFrameTracer
tracer(&vm
, exec
);
1018 // Defer GC for a while so that it doesn't run between when we enter into this
1019 // slow path and when we figure out the state of our code block. This prevents
1020 // a number of awkward reentrancy scenarios, including:
1022 // - The optimized version of our code block being jettisoned by GC right after
1023 // we concluded that we wanted to use it, but have not planted it into the JS
1026 // - An optimized version of our code block being installed just as we decided
1027 // that it wasn't ready yet.
1029 // Note that jettisoning won't happen if we already initiated OSR, because in
1030 // that case we would have already planted the optimized code block into the JS
1032 DeferGCForAWhile
deferGC(vm
.heap
);
1034 CodeBlock
* codeBlock
= exec
->codeBlock();
1036 if (bytecodeIndex
) {
1037 // If we're attempting to OSR from a loop, assume that this should be
1038 // separately optimized.
1039 codeBlock
->m_shouldAlwaysBeInlined
= false;
1042 if (Options::verboseOSR()) {
1044 *codeBlock
, ": Entered optimize with bytecodeIndex = ", bytecodeIndex
,
1045 ", executeCounter = ", codeBlock
->jitExecuteCounter(),
1046 ", optimizationDelayCounter = ", codeBlock
->reoptimizationRetryCounter(),
1047 ", exitCounter = ");
1048 if (codeBlock
->hasOptimizedReplacement())
1049 dataLog(codeBlock
->replacement()->osrExitCounter());
1055 if (!codeBlock
->checkIfOptimizationThresholdReached()) {
1056 codeBlock
->updateAllPredictions();
1057 if (Options::verboseOSR())
1058 dataLog("Choosing not to optimize ", *codeBlock
, " yet, because the threshold hasn't been reached.\n");
1059 return encodeResult(0, 0);
1062 if (vm
.enabledProfiler()) {
1063 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock
);
1064 return encodeResult(0, 0);
1067 Debugger
* debugger
= codeBlock
->globalObject()->debugger();
1068 if (debugger
&& (debugger
->isStepping() || codeBlock
->baselineAlternative()->hasDebuggerRequests())) {
1069 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock
);
1070 return encodeResult(0, 0);
1073 if (codeBlock
->m_shouldAlwaysBeInlined
) {
1074 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock
);
1075 if (Options::verboseOSR())
1076 dataLog("Choosing not to optimize ", *codeBlock
, " yet, because m_shouldAlwaysBeInlined == true.\n");
1077 return encodeResult(0, 0);
1080 // We cannot be in the process of asynchronous compilation and also have an optimized
1082 DFG::Worklist
* worklist
= DFG::existingGlobalDFGWorklistOrNull();
1085 || !(worklist
->compilationState(DFG::CompilationKey(codeBlock
, DFG::DFGMode
)) != DFG::Worklist::NotKnown
1086 && codeBlock
->hasOptimizedReplacement()));
1088 DFG::Worklist::State worklistState
;
1090 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1091 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1092 // what the result was so that we don't plow ahead and attempt OSR or immediate
1093 // reoptimization. This will have already also set the appropriate JIT execution
1094 // count threshold depending on what happened, so if the compilation was anything
1095 // but successful we just want to return early. See the case for worklistState ==
1096 // DFG::Worklist::Compiled, below.
1098 // Note that we could have alternatively just called Worklist::compilationState()
1099 // here, and if it returned Compiled, we could have then called
1100 // completeAndScheduleOSR() below. But that would have meant that it could take
1101 // longer for code blocks to be completed: they would only complete when *their*
1102 // execution count trigger fired; but that could take a while since the firing is
1103 // racy. It could also mean that code blocks that never run again after being
1104 // compiled would sit on the worklist until next GC. That's fine, but it's
1105 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1106 // possible in order to minimize the chances of us executing baseline code after
1107 // optimized code is already available.
1108 worklistState
= worklist
->completeAllReadyPlansForVM(
1109 vm
, DFG::CompilationKey(codeBlock
, DFG::DFGMode
));
1111 worklistState
= DFG::Worklist::NotKnown
;
1113 if (worklistState
== DFG::Worklist::Compiling
) {
1114 // We cannot be in the process of asynchronous compilation and also have an optimized
1116 RELEASE_ASSERT(!codeBlock
->hasOptimizedReplacement());
1117 codeBlock
->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred
);
1118 return encodeResult(0, 0);
1121 if (worklistState
== DFG::Worklist::Compiled
) {
1122 // If we don't have an optimized replacement but we did just get compiled, then
1123 // the compilation failed or was invalidated, in which case the execution count
1124 // thresholds have already been set appropriately by
1125 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1126 // nothing left to do.
1127 if (!codeBlock
->hasOptimizedReplacement()) {
1128 codeBlock
->updateAllPredictions();
1129 if (Options::verboseOSR())
1130 dataLog("Code block ", *codeBlock
, " was compiled but it doesn't have an optimized replacement.\n");
1131 return encodeResult(0, 0);
1133 } else if (codeBlock
->hasOptimizedReplacement()) {
1134 if (Options::verboseOSR())
1135 dataLog("Considering OSR ", *codeBlock
, " -> ", *codeBlock
->replacement(), ".\n");
1136 // If we have an optimized replacement, then it must be the case that we entered
1137 // cti_optimize from a loop. That's because if there's an optimized replacement,
1138 // then all calls to this function will be relinked to the replacement and so
1139 // the prologue OSR will never fire.
1141 // This is an interesting threshold check. Consider that a function OSR exits
1142 // in the middle of a loop, while having a relatively low exit count. The exit
1143 // will reset the execution counter to some target threshold, meaning that this
1144 // code won't be reached until that loop heats up for >=1000 executions. But then
1145 // we do a second check here, to see if we should either reoptimize, or just
1146 // attempt OSR entry. Hence it might even be correct for
1147 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1148 // additional checking anyway, to reduce the amount of recompilation thrashing.
1149 if (codeBlock
->replacement()->shouldReoptimizeFromLoopNow()) {
1150 if (Options::verboseOSR()) {
1152 "Triggering reoptimization of ", *codeBlock
,
1153 "(", *codeBlock
->replacement(), ") (in loop).\n");
1155 codeBlock
->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger
, CountReoptimization
);
1156 return encodeResult(0, 0);
1159 if (!codeBlock
->shouldOptimizeNow()) {
1160 if (Options::verboseOSR()) {
1162 "Delaying optimization for ", *codeBlock
,
1163 " because of insufficient profiling.\n");
1165 return encodeResult(0, 0);
1168 if (Options::verboseOSR())
1169 dataLog("Triggering optimized compilation of ", *codeBlock
, "\n");
1171 unsigned numVarsWithValues
;
1173 numVarsWithValues
= codeBlock
->m_numVars
;
1175 numVarsWithValues
= 0;
1176 Operands
<JSValue
> mustHandleValues(codeBlock
->numParameters(), numVarsWithValues
);
1177 for (size_t i
= 0; i
< mustHandleValues
.size(); ++i
) {
1178 int operand
= mustHandleValues
.operandForIndex(i
);
1179 if (operandIsArgument(operand
)
1180 && !VirtualRegister(operand
).toArgument()
1181 && codeBlock
->codeType() == FunctionCode
1182 && codeBlock
->specializationKind() == CodeForConstruct
) {
1183 // Ugh. If we're in a constructor, the 'this' argument may hold garbage. It will
1184 // also never be used. It doesn't matter what we put into the value for this,
1185 // but it has to be an actual value that can be grokked by subsequent DFG passes,
1186 // so we sanitize it here by turning it into Undefined.
1187 mustHandleValues
[i
] = jsUndefined();
1189 mustHandleValues
[i
] = exec
->uncheckedR(operand
).jsValue();
1192 RefPtr
<CodeBlock
> replacementCodeBlock
= codeBlock
->newReplacement();
1193 CompilationResult result
= DFG::compile(
1194 vm
, replacementCodeBlock
.get(), 0, DFG::DFGMode
, bytecodeIndex
,
1195 mustHandleValues
, JITToDFGDeferredCompilationCallback::create());
1197 if (result
!= CompilationSuccessful
) {
1198 ASSERT(result
== CompilationDeferred
|| replacementCodeBlock
->hasOneRef());
1199 return encodeResult(0, 0);
1203 CodeBlock
* optimizedCodeBlock
= codeBlock
->replacement();
1204 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock
->jitType()));
1206 if (void* dataBuffer
= DFG::prepareOSREntry(exec
, optimizedCodeBlock
, bytecodeIndex
)) {
1207 if (Options::verboseOSR()) {
1209 "Performing OSR ", *codeBlock
, " -> ", *optimizedCodeBlock
, ".\n");
1212 codeBlock
->optimizeSoon();
1213 return encodeResult(vm
.getCTIStub(DFG::osrEntryThunkGenerator
).code().executableAddress(), dataBuffer
);
1216 if (Options::verboseOSR()) {
1218 "Optimizing ", *codeBlock
, " -> ", *codeBlock
->replacement(),
1219 " succeeded, OSR failed, after a delay of ",
1220 codeBlock
->optimizationDelayCounter(), ".\n");
1223 // Count the OSR failure as a speculation failure. If this happens a lot, then
1225 optimizedCodeBlock
->countOSRExit();
1227 // We are a lot more conservative about triggering reoptimization after OSR failure than
1228 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1229 // already, then we really would like to reoptimize immediately. But this case covers
1230 // something else: there weren't many (or any) speculation failures before, but we just
1231 // failed to enter the speculative code because some variable had the wrong value or
1232 // because the OSR code decided for any spurious reason that it did not want to OSR
1233 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1234 // reoptimization trigger.
1235 if (optimizedCodeBlock
->shouldReoptimizeNow()) {
1236 if (Options::verboseOSR()) {
1238 "Triggering reoptimization of ", *codeBlock
, " -> ",
1239 *codeBlock
->replacement(), " (after OSR fail).\n");
1241 optimizedCodeBlock
->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail
, CountReoptimization
);
1242 return encodeResult(0, 0);
1245 // OSR failed this time, but it might succeed next time! Let the code run a bit
1246 // longer and then try again.
1247 codeBlock
->optimizeAfterWarmUp();
1249 return encodeResult(0, 0);
1253 void JIT_OPERATION
operationPutByIndex(ExecState
* exec
, EncodedJSValue encodedArrayValue
, int32_t index
, EncodedJSValue encodedValue
)
1255 VM
& vm
= exec
->vm();
1256 NativeCallFrameTracer
tracer(&vm
, exec
);
1258 JSValue arrayValue
= JSValue::decode(encodedArrayValue
);
1259 ASSERT(isJSArray(arrayValue
));
1260 asArray(arrayValue
)->putDirectIndex(exec
, index
, JSValue::decode(encodedValue
));
1264 void JIT_OPERATION
operationPutGetterSetter(ExecState
* exec
, EncodedJSValue encodedObjectValue
, Identifier
* identifier
, EncodedJSValue encodedGetterValue
, EncodedJSValue encodedSetterValue
)
1266 VM
& vm
= exec
->vm();
1267 NativeCallFrameTracer
tracer(&vm
, exec
);
1269 ASSERT(JSValue::decode(encodedObjectValue
).isObject());
1270 JSObject
* baseObj
= asObject(JSValue::decode(encodedObjectValue
));
1272 GetterSetter
* accessor
= GetterSetter::create(vm
);
1274 JSValue getter
= JSValue::decode(encodedGetterValue
);
1275 JSValue setter
= JSValue::decode(encodedSetterValue
);
1276 ASSERT(getter
.isObject() || getter
.isUndefined());
1277 ASSERT(setter
.isObject() || setter
.isUndefined());
1278 ASSERT(getter
.isObject() || setter
.isObject());
1280 if (!getter
.isUndefined())
1281 accessor
->setGetter(vm
, asObject(getter
));
1282 if (!setter
.isUndefined())
1283 accessor
->setSetter(vm
, asObject(setter
));
1284 baseObj
->putDirectAccessor(exec
, *identifier
, accessor
, Accessor
);
1287 void JIT_OPERATION
operationPutGetterSetter(ExecState
* exec
, JSCell
* object
, Identifier
* identifier
, JSCell
* getter
, JSCell
* setter
)
1289 VM
& vm
= exec
->vm();
1290 NativeCallFrameTracer
tracer(&vm
, exec
);
1292 ASSERT(object
&& object
->isObject());
1293 JSObject
* baseObj
= object
->getObject();
1295 GetterSetter
* accessor
= GetterSetter::create(vm
);
1297 ASSERT(!getter
|| getter
->isObject());
1298 ASSERT(!setter
|| setter
->isObject());
1299 ASSERT(getter
|| setter
);
1302 accessor
->setGetter(vm
, getter
->getObject());
1304 accessor
->setSetter(vm
, setter
->getObject());
1305 baseObj
->putDirectAccessor(exec
, *identifier
, accessor
, Accessor
);
1309 void JIT_OPERATION
operationPushNameScope(ExecState
* exec
, Identifier
* identifier
, EncodedJSValue encodedValue
, int32_t attibutes
)
1311 VM
& vm
= exec
->vm();
1312 NativeCallFrameTracer
tracer(&vm
, exec
);
1314 JSNameScope
* scope
= JSNameScope::create(exec
, *identifier
, JSValue::decode(encodedValue
), attibutes
);
1316 exec
->setScope(scope
);
1319 void JIT_OPERATION
operationPushWithScope(ExecState
* exec
, EncodedJSValue encodedValue
)
1321 VM
& vm
= exec
->vm();
1322 NativeCallFrameTracer
tracer(&vm
, exec
);
1324 JSObject
* o
= JSValue::decode(encodedValue
).toObject(exec
);
1328 exec
->setScope(JSWithScope::create(exec
, o
));
1331 void JIT_OPERATION
operationPopScope(ExecState
* exec
)
1333 VM
& vm
= exec
->vm();
1334 NativeCallFrameTracer
tracer(&vm
, exec
);
1336 exec
->setScope(exec
->scope()->next());
1339 void JIT_OPERATION
operationProfileDidCall(ExecState
* exec
, EncodedJSValue encodedValue
)
1341 VM
& vm
= exec
->vm();
1342 NativeCallFrameTracer
tracer(&vm
, exec
);
1344 if (LegacyProfiler
* profiler
= vm
.enabledProfiler())
1345 profiler
->didExecute(exec
, JSValue::decode(encodedValue
));
1348 void JIT_OPERATION
operationProfileWillCall(ExecState
* exec
, EncodedJSValue encodedValue
)
1350 VM
& vm
= exec
->vm();
1351 NativeCallFrameTracer
tracer(&vm
, exec
);
1353 if (LegacyProfiler
* profiler
= vm
.enabledProfiler())
1354 profiler
->willExecute(exec
, JSValue::decode(encodedValue
));
1357 EncodedJSValue JIT_OPERATION
operationCheckHasInstance(ExecState
* exec
, EncodedJSValue encodedValue
, EncodedJSValue encodedBaseVal
)
1359 VM
& vm
= exec
->vm();
1360 NativeCallFrameTracer
tracer(&vm
, exec
);
1362 JSValue value
= JSValue::decode(encodedValue
);
1363 JSValue baseVal
= JSValue::decode(encodedBaseVal
);
1365 if (baseVal
.isObject()) {
1366 JSObject
* baseObject
= asObject(baseVal
);
1367 ASSERT(!baseObject
->structure(vm
)->typeInfo().implementsDefaultHasInstance());
1368 if (baseObject
->structure(vm
)->typeInfo().implementsHasInstance()) {
1369 bool result
= baseObject
->methodTable(vm
)->customHasInstance(baseObject
, exec
, value
);
1370 return JSValue::encode(jsBoolean(result
));
1374 vm
.throwException(exec
, createInvalidParameterError(exec
, "instanceof", baseVal
));
1375 return JSValue::encode(JSValue());
1378 JSCell
* JIT_OPERATION
operationCreateActivation(ExecState
* exec
, int32_t offset
)
1380 VM
& vm
= exec
->vm();
1381 NativeCallFrameTracer
tracer(&vm
, exec
);
1382 JSActivation
* activation
= JSActivation::create(vm
, exec
, exec
->registers() + offset
, exec
->codeBlock());
1383 exec
->setScope(activation
);
1387 JSCell
* JIT_OPERATION
operationCreateArguments(ExecState
* exec
)
1389 VM
& vm
= exec
->vm();
1390 NativeCallFrameTracer
tracer(&vm
, exec
);
1391 // NB: This needs to be exceedingly careful with top call frame tracking, since it
1392 // may be called from OSR exit, while the state of the call stack is bizarre.
1393 Arguments
* result
= Arguments::create(vm
, exec
);
1394 ASSERT(!vm
.exception());
1398 JSCell
* JIT_OPERATION
operationCreateArgumentsDuringOSRExit(ExecState
* exec
)
1400 DeferGCForAWhile(exec
->vm().heap
);
1401 return operationCreateArguments(exec
);
1404 EncodedJSValue JIT_OPERATION
operationGetArgumentsLength(ExecState
* exec
, int32_t argumentsRegister
)
1406 VM
& vm
= exec
->vm();
1407 NativeCallFrameTracer
tracer(&vm
, exec
);
1408 // Here we can assume that the argumernts were created. Because otherwise the JIT code would
1409 // have not made this call.
1410 Identifier
ident(&vm
, "length");
1411 JSValue baseValue
= exec
->uncheckedR(argumentsRegister
).jsValue();
1412 PropertySlot
slot(baseValue
);
1413 return JSValue::encode(baseValue
.get(exec
, ident
, slot
));
1418 static JSValue
getByVal(ExecState
* exec
, JSValue baseValue
, JSValue subscript
, ReturnAddressPtr returnAddress
)
1420 if (LIKELY(baseValue
.isCell() && subscript
.isString())) {
1421 VM
& vm
= exec
->vm();
1422 Structure
& structure
= *baseValue
.asCell()->structure(vm
);
1423 if (JSCell::canUseFastGetOwnProperty(structure
)) {
1424 if (JSValue result
= baseValue
.asCell()->fastGetOwnProperty(vm
, structure
, asString(subscript
)->value(exec
)))
1429 if (subscript
.isUInt32()) {
1430 uint32_t i
= subscript
.asUInt32();
1431 if (isJSString(baseValue
) && asString(baseValue
)->canGetIndex(i
)) {
1432 ctiPatchCallByReturnAddress(exec
->codeBlock(), returnAddress
, FunctionPtr(operationGetByValString
));
1433 return asString(baseValue
)->getIndex(exec
, i
);
1435 return baseValue
.get(exec
, i
);
1438 if (isName(subscript
))
1439 return baseValue
.get(exec
, jsCast
<NameInstance
*>(subscript
.asCell())->privateName());
1441 Identifier property
= subscript
.toString(exec
)->toIdentifier(exec
);
1442 return baseValue
.get(exec
, property
);
1447 EncodedJSValue JIT_OPERATION
operationGetByValGeneric(ExecState
* exec
, EncodedJSValue encodedBase
, EncodedJSValue encodedSubscript
)
1449 VM
& vm
= exec
->vm();
1450 NativeCallFrameTracer
tracer(&vm
, exec
);
1451 JSValue baseValue
= JSValue::decode(encodedBase
);
1452 JSValue subscript
= JSValue::decode(encodedSubscript
);
1454 JSValue result
= getByVal(exec
, baseValue
, subscript
, ReturnAddressPtr(OUR_RETURN_ADDRESS
));
1455 return JSValue::encode(result
);
1458 EncodedJSValue JIT_OPERATION
operationGetByValDefault(ExecState
* exec
, EncodedJSValue encodedBase
, EncodedJSValue encodedSubscript
)
1460 VM
& vm
= exec
->vm();
1461 NativeCallFrameTracer
tracer(&vm
, exec
);
1462 JSValue baseValue
= JSValue::decode(encodedBase
);
1463 JSValue subscript
= JSValue::decode(encodedSubscript
);
1465 if (baseValue
.isObject() && subscript
.isInt32()) {
1466 // See if it's worth optimizing this at all.
1467 JSObject
* object
= asObject(baseValue
);
1468 bool didOptimize
= false;
1470 unsigned bytecodeOffset
= exec
->locationAsBytecodeOffset();
1471 ASSERT(bytecodeOffset
);
1472 ByValInfo
& byValInfo
= exec
->codeBlock()->getByValInfo(bytecodeOffset
- 1);
1473 ASSERT(!byValInfo
.stubRoutine
);
1475 if (hasOptimizableIndexing(object
->structure(vm
))) {
1476 // Attempt to optimize.
1477 JITArrayMode arrayMode
= jitArrayModeForStructure(object
->structure(vm
));
1478 if (arrayMode
!= byValInfo
.arrayMode
) {
1479 JIT::compileGetByVal(&vm
, exec
->codeBlock(), &byValInfo
, ReturnAddressPtr(OUR_RETURN_ADDRESS
), arrayMode
);
1485 // If we take slow path more than 10 times without patching then make sure we
1486 // never make that mistake again. Or, if we failed to patch and we have some object
1487 // that intercepts indexed get, then don't even wait until 10 times. For cases
1488 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1489 // opportunity for us to observe that the get_by_val may be polymorphic.
1490 if (++byValInfo
.slowPathCount
>= 10
1491 || object
->structure(vm
)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1492 // Don't ever try to optimize.
1493 RepatchBuffer
repatchBuffer(exec
->codeBlock());
1494 repatchBuffer
.relinkCallerToFunction(ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationGetByValGeneric
));
1499 JSValue result
= getByVal(exec
, baseValue
, subscript
, ReturnAddressPtr(OUR_RETURN_ADDRESS
));
1500 return JSValue::encode(result
);
1503 EncodedJSValue JIT_OPERATION
operationGetByValString(ExecState
* exec
, EncodedJSValue encodedBase
, EncodedJSValue encodedSubscript
)
1505 VM
& vm
= exec
->vm();
1506 NativeCallFrameTracer
tracer(&vm
, exec
);
1507 JSValue baseValue
= JSValue::decode(encodedBase
);
1508 JSValue subscript
= JSValue::decode(encodedSubscript
);
1511 if (LIKELY(subscript
.isUInt32())) {
1512 uint32_t i
= subscript
.asUInt32();
1513 if (isJSString(baseValue
) && asString(baseValue
)->canGetIndex(i
))
1514 result
= asString(baseValue
)->getIndex(exec
, i
);
1516 result
= baseValue
.get(exec
, i
);
1517 if (!isJSString(baseValue
))
1518 ctiPatchCallByReturnAddress(exec
->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationGetByValDefault
));
1520 } else if (isName(subscript
))
1521 result
= baseValue
.get(exec
, jsCast
<NameInstance
*>(subscript
.asCell())->privateName());
1523 Identifier property
= subscript
.toString(exec
)->toIdentifier(exec
);
1524 result
= baseValue
.get(exec
, property
);
1527 return JSValue::encode(result
);
1530 void JIT_OPERATION
operationTearOffActivation(ExecState
* exec
, JSCell
* activationCell
)
1532 VM
& vm
= exec
->vm();
1533 NativeCallFrameTracer
tracer(&vm
, exec
);
1535 ASSERT(exec
->codeBlock()->needsActivation());
1536 jsCast
<JSActivation
*>(activationCell
)->tearOff(vm
);
1539 void JIT_OPERATION
operationTearOffArguments(ExecState
* exec
, JSCell
* argumentsCell
, JSCell
* activationCell
)
1541 ASSERT(exec
->codeBlock()->usesArguments());
1542 if (activationCell
) {
1543 jsCast
<Arguments
*>(argumentsCell
)->didTearOffActivation(exec
, jsCast
<JSActivation
*>(activationCell
));
1546 jsCast
<Arguments
*>(argumentsCell
)->tearOff(exec
);
1549 EncodedJSValue JIT_OPERATION
operationDeleteById(ExecState
* exec
, EncodedJSValue encodedBase
, const Identifier
* identifier
)
1551 VM
& vm
= exec
->vm();
1552 NativeCallFrameTracer
tracer(&vm
, exec
);
1554 JSObject
* baseObj
= JSValue::decode(encodedBase
).toObject(exec
);
1555 bool couldDelete
= baseObj
->methodTable(vm
)->deleteProperty(baseObj
, exec
, *identifier
);
1556 JSValue result
= jsBoolean(couldDelete
);
1557 if (!couldDelete
&& exec
->codeBlock()->isStrictMode())
1558 vm
.throwException(exec
, createTypeError(exec
, "Unable to delete property."));
1559 return JSValue::encode(result
);
1562 JSCell
* JIT_OPERATION
operationGetPNames(ExecState
* exec
, JSObject
* obj
)
1564 VM
& vm
= exec
->vm();
1565 NativeCallFrameTracer
tracer(&vm
, exec
);
1567 Structure
* structure
= obj
->structure(vm
);
1568 JSPropertyNameIterator
* jsPropertyNameIterator
= structure
->enumerationCache();
1569 if (!jsPropertyNameIterator
|| jsPropertyNameIterator
->cachedPrototypeChain() != structure
->prototypeChain(exec
))
1570 jsPropertyNameIterator
= JSPropertyNameIterator::create(exec
, obj
);
1571 return jsPropertyNameIterator
;
1574 EncodedJSValue JIT_OPERATION
operationInstanceOf(ExecState
* exec
, EncodedJSValue encodedValue
, EncodedJSValue encodedProto
)
1576 VM
& vm
= exec
->vm();
1577 NativeCallFrameTracer
tracer(&vm
, exec
);
1578 JSValue value
= JSValue::decode(encodedValue
);
1579 JSValue proto
= JSValue::decode(encodedProto
);
1581 ASSERT(!value
.isObject() || !proto
.isObject());
1583 bool result
= JSObject::defaultHasInstance(exec
, value
, proto
);
1584 return JSValue::encode(jsBoolean(result
));
1587 CallFrame
* JIT_OPERATION
operationSizeFrameForVarargs(ExecState
* exec
, EncodedJSValue encodedArguments
, int32_t firstFreeRegister
, int32_t firstVarArgOffset
)
1589 VM
& vm
= exec
->vm();
1590 NativeCallFrameTracer
tracer(&vm
, exec
);
1591 JSStack
* stack
= &exec
->interpreter()->stack();
1592 JSValue arguments
= JSValue::decode(encodedArguments
);
1593 CallFrame
* newCallFrame
= sizeFrameForVarargs(exec
, stack
, arguments
, firstFreeRegister
, firstVarArgOffset
);
1594 return newCallFrame
;
1597 CallFrame
* JIT_OPERATION
operationLoadVarargs(ExecState
* exec
, CallFrame
* newCallFrame
, EncodedJSValue encodedThis
, EncodedJSValue encodedArguments
, int32_t firstVarArgOffset
)
1599 VM
& vm
= exec
->vm();
1600 NativeCallFrameTracer
tracer(&vm
, exec
);
1601 JSValue thisValue
= JSValue::decode(encodedThis
);
1602 JSValue arguments
= JSValue::decode(encodedArguments
);
1603 loadVarargs(exec
, newCallFrame
, thisValue
, arguments
, firstVarArgOffset
);
1604 return newCallFrame
;
1607 EncodedJSValue JIT_OPERATION
operationToObject(ExecState
* exec
, EncodedJSValue value
)
1609 VM
& vm
= exec
->vm();
1610 NativeCallFrameTracer
tracer(&vm
, exec
);
1611 return JSValue::encode(JSValue::decode(value
).toObject(exec
));
1614 char* JIT_OPERATION
operationSwitchCharWithUnknownKeyType(ExecState
* exec
, EncodedJSValue encodedKey
, size_t tableIndex
)
1616 VM
& vm
= exec
->vm();
1617 NativeCallFrameTracer
tracer(&vm
, exec
);
1618 JSValue key
= JSValue::decode(encodedKey
);
1619 CodeBlock
* codeBlock
= exec
->codeBlock();
1621 SimpleJumpTable
& jumpTable
= codeBlock
->switchJumpTable(tableIndex
);
1622 void* result
= jumpTable
.ctiDefault
.executableAddress();
1624 if (key
.isString()) {
1625 StringImpl
* value
= asString(key
)->value(exec
).impl();
1626 if (value
->length() == 1)
1627 result
= jumpTable
.ctiForValue((*value
)[0]).executableAddress();
1630 return reinterpret_cast<char*>(result
);
1633 char* JIT_OPERATION
operationSwitchImmWithUnknownKeyType(ExecState
* exec
, EncodedJSValue encodedKey
, size_t tableIndex
)
1635 VM
& vm
= exec
->vm();
1636 NativeCallFrameTracer
tracer(&vm
, exec
);
1637 JSValue key
= JSValue::decode(encodedKey
);
1638 CodeBlock
* codeBlock
= exec
->codeBlock();
1640 SimpleJumpTable
& jumpTable
= codeBlock
->switchJumpTable(tableIndex
);
1643 result
= jumpTable
.ctiForValue(key
.asInt32()).executableAddress();
1644 else if (key
.isDouble() && key
.asDouble() == static_cast<int32_t>(key
.asDouble()))
1645 result
= jumpTable
.ctiForValue(static_cast<int32_t>(key
.asDouble())).executableAddress();
1647 result
= jumpTable
.ctiDefault
.executableAddress();
1648 return reinterpret_cast<char*>(result
);
1651 char* JIT_OPERATION
operationSwitchStringWithUnknownKeyType(ExecState
* exec
, EncodedJSValue encodedKey
, size_t tableIndex
)
1653 VM
& vm
= exec
->vm();
1654 NativeCallFrameTracer
tracer(&vm
, exec
);
1655 JSValue key
= JSValue::decode(encodedKey
);
1656 CodeBlock
* codeBlock
= exec
->codeBlock();
1659 StringJumpTable
& jumpTable
= codeBlock
->stringSwitchJumpTable(tableIndex
);
1661 if (key
.isString()) {
1662 StringImpl
* value
= asString(key
)->value(exec
).impl();
1663 result
= jumpTable
.ctiForValue(value
).executableAddress();
1665 result
= jumpTable
.ctiDefault
.executableAddress();
1667 return reinterpret_cast<char*>(result
);
1670 EncodedJSValue JIT_OPERATION
operationResolveScope(ExecState
* exec
, int32_t identifierIndex
)
1672 VM
& vm
= exec
->vm();
1673 NativeCallFrameTracer
tracer(&vm
, exec
);
1674 const Identifier
& ident
= exec
->codeBlock()->identifier(identifierIndex
);
1675 return JSValue::encode(JSScope::resolve(exec
, exec
->scope(), ident
));
1678 EncodedJSValue JIT_OPERATION
operationGetFromScope(ExecState
* exec
, Instruction
* bytecodePC
)
1680 VM
& vm
= exec
->vm();
1681 NativeCallFrameTracer
tracer(&vm
, exec
);
1682 CodeBlock
* codeBlock
= exec
->codeBlock();
1683 Instruction
* pc
= bytecodePC
;
1685 const Identifier
& ident
= codeBlock
->identifier(pc
[3].u
.operand
);
1686 JSObject
* scope
= jsCast
<JSObject
*>(exec
->uncheckedR(pc
[2].u
.operand
).jsValue());
1687 ResolveModeAndType
modeAndType(pc
[4].u
.operand
);
1689 PropertySlot
slot(scope
);
1690 if (!scope
->getPropertySlot(exec
, ident
, slot
)) {
1691 if (modeAndType
.mode() == ThrowIfNotFound
)
1692 vm
.throwException(exec
, createUndefinedVariableError(exec
, ident
));
1693 return JSValue::encode(jsUndefined());
1696 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
1697 if (slot
.isCacheableValue() && slot
.slotBase() == scope
&& scope
->structure(vm
)->propertyAccessesAreCacheable()) {
1698 if (modeAndType
.type() == GlobalProperty
|| modeAndType
.type() == GlobalPropertyWithVarInjectionChecks
) {
1699 ConcurrentJITLocker
locker(codeBlock
->m_lock
);
1700 pc
[5].u
.structure
.set(exec
->vm(), codeBlock
->ownerExecutable(), scope
->structure(vm
));
1701 pc
[6].u
.operand
= slot
.cachedOffset();
1705 return JSValue::encode(slot
.getValue(exec
, ident
));
1708 void JIT_OPERATION
operationPutToScope(ExecState
* exec
, Instruction
* bytecodePC
)
1710 VM
& vm
= exec
->vm();
1711 NativeCallFrameTracer
tracer(&vm
, exec
);
1712 Instruction
* pc
= bytecodePC
;
1714 CodeBlock
* codeBlock
= exec
->codeBlock();
1715 const Identifier
& ident
= codeBlock
->identifier(pc
[2].u
.operand
);
1716 JSObject
* scope
= jsCast
<JSObject
*>(exec
->uncheckedR(pc
[1].u
.operand
).jsValue());
1717 JSValue value
= exec
->r(pc
[3].u
.operand
).jsValue();
1718 ResolveModeAndType modeAndType
= ResolveModeAndType(pc
[4].u
.operand
);
1720 if (modeAndType
.mode() == ThrowIfNotFound
&& !scope
->hasProperty(exec
, ident
)) {
1721 exec
->vm().throwException(exec
, createUndefinedVariableError(exec
, ident
));
1725 PutPropertySlot
slot(scope
, codeBlock
->isStrictMode());
1726 scope
->methodTable()->put(scope
, exec
, ident
, value
, slot
);
1728 if (exec
->vm().exception())
1731 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
1732 if (modeAndType
.type() == GlobalProperty
|| modeAndType
.type() == GlobalPropertyWithVarInjectionChecks
) {
1733 if (slot
.isCacheablePut() && slot
.base() == scope
&& scope
->structure()->propertyAccessesAreCacheable()) {
1734 ConcurrentJITLocker
locker(codeBlock
->m_lock
);
1735 pc
[5].u
.structure
.set(exec
->vm(), codeBlock
->ownerExecutable(), scope
->structure());
1736 pc
[6].u
.operand
= slot
.cachedOffset();
1741 void JIT_OPERATION
operationThrow(ExecState
* exec
, EncodedJSValue encodedExceptionValue
)
1743 VM
* vm
= &exec
->vm();
1744 NativeCallFrameTracer
tracer(vm
, exec
);
1746 JSValue exceptionValue
= JSValue::decode(encodedExceptionValue
);
1747 vm
->throwException(exec
, exceptionValue
);
1749 // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForThrow
1750 genericUnwind(vm
, exec
, exceptionValue
);
1753 void JIT_OPERATION
operationFlushWriteBarrierBuffer(ExecState
* exec
, JSCell
* cell
)
1755 VM
* vm
= &exec
->vm();
1756 NativeCallFrameTracer
tracer(vm
, exec
);
1757 vm
->heap
.flushWriteBarrierBuffer(cell
);
1760 void JIT_OPERATION
operationOSRWriteBarrier(ExecState
* exec
, JSCell
* cell
)
1762 VM
* vm
= &exec
->vm();
1763 NativeCallFrameTracer
tracer(vm
, exec
);
1764 vm
->heap
.writeBarrier(cell
);
1767 // NB: We don't include the value as part of the barrier because the write barrier elision
1768 // phase in the DFG only tracks whether the object being stored to has been barriered. It
1769 // would be much more complicated to try to model the value being stored as well.
1770 void JIT_OPERATION
operationUnconditionalWriteBarrier(ExecState
* exec
, JSCell
* cell
)
1772 VM
* vm
= &exec
->vm();
1773 NativeCallFrameTracer
tracer(vm
, exec
);
1774 vm
->heap
.writeBarrier(cell
);
1777 void JIT_OPERATION
operationInitGlobalConst(ExecState
* exec
, Instruction
* pc
)
1779 VM
* vm
= &exec
->vm();
1780 NativeCallFrameTracer
tracer(vm
, exec
);
1782 JSValue value
= exec
->r(pc
[2].u
.operand
).jsValue();
1783 pc
[1].u
.registerPointer
->set(*vm
, exec
->codeBlock()->globalObject(), value
);
1786 void JIT_OPERATION
lookupExceptionHandler(VM
* vm
, ExecState
* exec
)
1788 NativeCallFrameTracer
tracer(vm
, exec
, NativeCallFrameTracer::VMEntrySentinelOK
);
1790 JSValue exceptionValue
= vm
->exception();
1791 ASSERT(exceptionValue
);
1793 genericUnwind(vm
, exec
, exceptionValue
);
1794 ASSERT(vm
->targetMachinePCForThrow
);
1797 void JIT_OPERATION
operationVMHandleException(ExecState
* exec
)
1799 VM
* vm
= &exec
->vm();
1800 NativeCallFrameTracer
tracer(vm
, exec
);
1802 ASSERT(!exec
->isVMEntrySentinel());
1803 genericUnwind(vm
, exec
, vm
->exception());
1806 // This function "should" just take the ExecState*, but doing so would make it more difficult
1807 // to call from exception check sites. So, unlike all of our other functions, we allow
1808 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
1809 // particularly safe here since this is never called on the critical path - it's only for
1811 void JIT_OPERATION
operationExceptionFuzz()
1813 ASSERT(Options::enableExceptionFuzz());
1815 // This probably "just works" for GCC also, but I haven't tried.
1817 ExecState
* exec
= static_cast<ExecState
*>(__builtin_frame_address(1));
1818 DeferGCForAWhile
deferGC(exec
->vm().heap
);
1820 s_numberOfExceptionFuzzChecks
++;
1822 unsigned fireTarget
= Options::fireExceptionFuzzAt();
1823 if (fireTarget
== s_numberOfExceptionFuzzChecks
) {
1824 printf("JSC EXCEPTION FUZZ: Throwing fuzz exception with call frame %p and return address %p.\n", exec
, __builtin_return_address(0));
1825 exec
->vm().throwException(
1826 exec
, createError(exec
->lexicalGlobalObject(), ASCIILiteral("Exception Fuzz")));
1828 #endif // COMPILER(CLANG)
1833 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
1834 // definition of getHostCallReturnValue() below because the Windows build
1836 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION
getHostCallReturnValueWithExecState(ExecState
* exec
)
1839 return JSValue::encode(JSValue());
1840 return JSValue::encode(exec
->vm().hostCallReturnValue
);
1843 #if COMPILER(GCC) && CPU(X86_64)
1845 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
1846 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
1847 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
1849 "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
1852 #elif COMPILER(GCC) && CPU(X86)
1855 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
1856 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
1857 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
1859 "leal -4(%esp), %esp\n"
1861 "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
1862 "leal 8(%esp), %esp\n"
1867 #elif COMPILER(GCC) && CPU(ARM_THUMB2)
1871 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
1872 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
1874 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue
) "\n"
1875 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
1877 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
1880 #elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
1883 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
1884 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
1885 INLINE_ARM_FUNCTION(getHostCallReturnValue
)
1886 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
1888 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
1895 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
1896 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
1897 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
1899 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
1902 #elif COMPILER(GCC) && CPU(MIPS)
1905 #define LOAD_FUNCTION_TO_T9(function) \
1906 ".set noreorder" "\n" \
1907 ".cpload $25" "\n" \
1908 ".set reorder" "\n" \
1909 "la $t9, " LOCAL_REFERENCE(function) "\n"
1911 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
1916 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
1917 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
1918 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
1919 LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState
)
1920 "move $a0, $fp" "\n"
1921 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
1924 #elif COMPILER(GCC) && CPU(SH4)
1926 #define SH4_SCRATCH_REGISTER "r11"
1930 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
1931 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
1932 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
1934 "mov.l 2f, " SH4_SCRATCH_REGISTER
"\n"
1935 "braf " SH4_SCRATCH_REGISTER
"\n"
1938 "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "-1b\n"
1941 #elif COMPILER(MSVC) && CPU(X86)
1943 __declspec(naked
) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION
getHostCallReturnValue()
1945 __asm mov
[esp
+ 4], ebp
;
1946 __asm jmp getHostCallReturnValueWithExecState
1953 #endif // ENABLE(JIT)