2 * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "JITOperations.h"
31 #include "ArrayConstructor.h"
32 #include "DFGCompilationMode.h"
33 #include "DFGDriver.h"
34 #include "DFGOSREntry.h"
35 #include "DFGThunks.h"
36 #include "DFGWorklist.h"
38 #include "DirectArguments.h"
40 #include "ErrorHandlingScope.h"
41 #include "ExceptionFuzz.h"
42 #include "GetterSetter.h"
43 #include "HostCallReturnValue.h"
45 #include "JITToDFGDeferredCompilationCallback.h"
46 #include "JSCInlines.h"
47 #include "JSCatchScope.h"
48 #include "JSFunctionNameScope.h"
49 #include "JSGlobalObjectFunctions.h"
50 #include "JSLexicalEnvironment.h"
51 #include "JSNameScope.h"
52 #include "JSPropertyNameEnumerator.h"
53 #include "JSStackInlines.h"
54 #include "JSWithScope.h"
55 #include "LegacyProfiler.h"
56 #include "ObjectConstructor.h"
57 #include "PropertyName.h"
59 #include "RepatchBuffer.h"
60 #include "ScopedArguments.h"
61 #include "TestRunnerUtils.h"
62 #include "TypeProfilerLog.h"
63 #include <wtf/InlineASM.h>
67 template<typename ScopeType
>
68 void pushNameScope(ExecState
* exec
, int32_t dst
, SymbolTable
* symbolTable
, EncodedJSValue encodedValue
)
71 NativeCallFrameTracer
tracer(&vm
, exec
);
73 ASSERT(!JITCode::isOptimizingJIT(exec
->codeBlock()->jitType()));
75 // FIXME: This won't work if this operation is called from the DFG or FTL.
76 // This should be changed to pass in the new scope.
77 JSScope
* currentScope
= exec
->uncheckedR(dst
).Register::scope();
78 JSNameScope
* scope
= ScopeType::create(vm
, exec
->lexicalGlobalObject(), currentScope
, symbolTable
, JSValue::decode(encodedValue
));
80 // FIXME: This won't work if this operation is called from the DFG or FTL.
81 // This should be changed to return the new scope.
82 exec
->uncheckedR(dst
) = scope
;
88 void * _ReturnAddress(void);
89 #pragma intrinsic(_ReturnAddress)
91 #define OUR_RETURN_ADDRESS _ReturnAddress()
93 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
96 #if ENABLE(OPCODE_SAMPLING)
97 #define CTI_SAMPLER vm->interpreter->sampler()
103 void JIT_OPERATION
operationThrowStackOverflowError(ExecState
* exec
, CodeBlock
* codeBlock
)
105 // We pass in our own code block, because the callframe hasn't been populated.
106 VM
* vm
= codeBlock
->vm();
108 VMEntryFrame
* vmEntryFrame
= vm
->topVMEntryFrame
;
109 CallFrame
* callerFrame
= exec
->callerFrame(vmEntryFrame
);
113 NativeCallFrameTracerWithRestore
tracer(vm
, vmEntryFrame
, callerFrame
);
114 ErrorHandlingScope
errorScope(*vm
);
115 vm
->throwException(callerFrame
, createStackOverflowError(callerFrame
));
118 int32_t JIT_OPERATION
operationCallArityCheck(ExecState
* exec
)
120 VM
* vm
= &exec
->vm();
121 VMEntryFrame
* vmEntryFrame
= vm
->topVMEntryFrame
;
122 CallFrame
* callerFrame
= exec
->callerFrame(vmEntryFrame
);
124 JSStack
& stack
= vm
->interpreter
->stack();
126 int32_t missingArgCount
= CommonSlowPaths::arityCheckFor(exec
, &stack
, CodeForCall
);
127 if (missingArgCount
< 0) {
128 NativeCallFrameTracerWithRestore
tracer(vm
, vmEntryFrame
, callerFrame
);
129 throwStackOverflowError(callerFrame
);
132 return missingArgCount
;
135 int32_t JIT_OPERATION
operationConstructArityCheck(ExecState
* exec
)
137 VM
* vm
= &exec
->vm();
138 VMEntryFrame
* vmEntryFrame
= vm
->topVMEntryFrame
;
139 CallFrame
* callerFrame
= exec
->callerFrame(vmEntryFrame
);
141 JSStack
& stack
= vm
->interpreter
->stack();
143 int32_t missingArgCount
= CommonSlowPaths::arityCheckFor(exec
, &stack
, CodeForConstruct
);
144 if (missingArgCount
< 0) {
145 NativeCallFrameTracerWithRestore
tracer(vm
, vmEntryFrame
, callerFrame
);
146 throwStackOverflowError(callerFrame
);
149 return missingArgCount
;
152 EncodedJSValue JIT_OPERATION
operationGetById(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue base
, UniquedStringImpl
* uid
)
154 VM
* vm
= &exec
->vm();
155 NativeCallFrameTracer
tracer(vm
, exec
);
157 stubInfo
->tookSlowPath
= true;
159 JSValue baseValue
= JSValue::decode(base
);
160 PropertySlot
slot(baseValue
);
161 Identifier ident
= Identifier::fromUid(vm
, uid
);
162 return JSValue::encode(baseValue
.get(exec
, ident
, slot
));
165 EncodedJSValue JIT_OPERATION
operationGetByIdGeneric(ExecState
* exec
, EncodedJSValue base
, UniquedStringImpl
* uid
)
167 VM
* vm
= &exec
->vm();
168 NativeCallFrameTracer
tracer(vm
, exec
);
170 JSValue baseValue
= JSValue::decode(base
);
171 PropertySlot
slot(baseValue
);
172 Identifier ident
= Identifier::fromUid(vm
, uid
);
173 return JSValue::encode(baseValue
.get(exec
, ident
, slot
));
176 EncodedJSValue JIT_OPERATION
operationGetByIdBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue base
, UniquedStringImpl
* uid
)
178 VM
* vm
= &exec
->vm();
179 NativeCallFrameTracer
tracer(vm
, exec
);
181 Identifier ident
= Identifier::fromUid(vm
, uid
);
182 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
184 JSValue baseValue
= JSValue::decode(base
);
185 PropertySlot
slot(baseValue
);
186 bool hasResult
= baseValue
.getPropertySlot(exec
, ident
, slot
);
188 if (accessType
== static_cast<AccessType
>(stubInfo
->accessType
))
189 buildGetByIDList(exec
, baseValue
, ident
, slot
, *stubInfo
);
191 return JSValue::encode(hasResult
? slot
.getValue(exec
, ident
) : jsUndefined());
194 EncodedJSValue JIT_OPERATION
operationGetByIdOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue base
, UniquedStringImpl
* uid
)
196 VM
* vm
= &exec
->vm();
197 NativeCallFrameTracer
tracer(vm
, exec
);
198 Identifier ident
= Identifier::fromUid(vm
, uid
);
200 JSValue baseValue
= JSValue::decode(base
);
201 PropertySlot
slot(baseValue
);
203 bool hasResult
= baseValue
.getPropertySlot(exec
, ident
, slot
);
205 repatchGetByID(exec
, baseValue
, ident
, slot
, *stubInfo
);
207 stubInfo
->seen
= true;
209 return JSValue::encode(hasResult
? slot
.getValue(exec
, ident
) : jsUndefined());
213 EncodedJSValue JIT_OPERATION
operationInOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, JSCell
* base
, UniquedStringImpl
* key
)
215 VM
* vm
= &exec
->vm();
216 NativeCallFrameTracer
tracer(vm
, exec
);
218 if (!base
->isObject()) {
219 vm
->throwException(exec
, createInvalidInParameterError(exec
, base
));
220 return JSValue::encode(jsUndefined());
223 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
225 Identifier ident
= Identifier::fromUid(vm
, key
);
226 PropertySlot
slot(base
);
227 bool result
= asObject(base
)->getPropertySlot(exec
, ident
, slot
);
229 RELEASE_ASSERT(accessType
== stubInfo
->accessType
);
232 repatchIn(exec
, base
, ident
, result
, slot
, *stubInfo
);
234 stubInfo
->seen
= true;
236 return JSValue::encode(jsBoolean(result
));
239 EncodedJSValue JIT_OPERATION
operationIn(ExecState
* exec
, StructureStubInfo
* stubInfo
, JSCell
* base
, UniquedStringImpl
* key
)
241 VM
* vm
= &exec
->vm();
242 NativeCallFrameTracer
tracer(vm
, exec
);
244 stubInfo
->tookSlowPath
= true;
246 if (!base
->isObject()) {
247 vm
->throwException(exec
, createInvalidInParameterError(exec
, base
));
248 return JSValue::encode(jsUndefined());
251 Identifier ident
= Identifier::fromUid(vm
, key
);
252 return JSValue::encode(jsBoolean(asObject(base
)->hasProperty(exec
, ident
)));
255 EncodedJSValue JIT_OPERATION
operationGenericIn(ExecState
* exec
, JSCell
* base
, EncodedJSValue key
)
257 VM
* vm
= &exec
->vm();
258 NativeCallFrameTracer
tracer(vm
, exec
);
260 return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec
, JSValue::decode(key
), base
)));
263 void JIT_OPERATION
operationPutByIdStrict(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
265 VM
* vm
= &exec
->vm();
266 NativeCallFrameTracer
tracer(vm
, exec
);
268 stubInfo
->tookSlowPath
= true;
270 Identifier ident
= Identifier::fromUid(vm
, uid
);
271 PutPropertySlot
slot(JSValue::decode(encodedBase
), true, exec
->codeBlock()->putByIdContext());
272 JSValue::decode(encodedBase
).put(exec
, ident
, JSValue::decode(encodedValue
), slot
);
275 void JIT_OPERATION
operationPutByIdNonStrict(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
277 VM
* vm
= &exec
->vm();
278 NativeCallFrameTracer
tracer(vm
, exec
);
280 stubInfo
->tookSlowPath
= true;
282 Identifier ident
= Identifier::fromUid(vm
, uid
);
283 PutPropertySlot
slot(JSValue::decode(encodedBase
), false, exec
->codeBlock()->putByIdContext());
284 JSValue::decode(encodedBase
).put(exec
, ident
, JSValue::decode(encodedValue
), slot
);
287 void JIT_OPERATION
operationPutByIdDirectStrict(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
289 VM
* vm
= &exec
->vm();
290 NativeCallFrameTracer
tracer(vm
, exec
);
292 stubInfo
->tookSlowPath
= true;
294 Identifier ident
= Identifier::fromUid(vm
, uid
);
295 PutPropertySlot
slot(JSValue::decode(encodedBase
), true, exec
->codeBlock()->putByIdContext());
296 asObject(JSValue::decode(encodedBase
))->putDirect(exec
->vm(), ident
, JSValue::decode(encodedValue
), slot
);
299 void JIT_OPERATION
operationPutByIdDirectNonStrict(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
301 VM
* vm
= &exec
->vm();
302 NativeCallFrameTracer
tracer(vm
, exec
);
304 stubInfo
->tookSlowPath
= true;
306 Identifier ident
= Identifier::fromUid(vm
, uid
);
307 PutPropertySlot
slot(JSValue::decode(encodedBase
), false, exec
->codeBlock()->putByIdContext());
308 asObject(JSValue::decode(encodedBase
))->putDirect(exec
->vm(), ident
, JSValue::decode(encodedValue
), slot
);
311 void JIT_OPERATION
operationPutByIdStrictOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
313 VM
* vm
= &exec
->vm();
314 NativeCallFrameTracer
tracer(vm
, exec
);
316 Identifier ident
= Identifier::fromUid(vm
, uid
);
317 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
319 JSValue value
= JSValue::decode(encodedValue
);
320 JSValue baseValue
= JSValue::decode(encodedBase
);
321 PutPropertySlot
slot(baseValue
, true, exec
->codeBlock()->putByIdContext());
323 Structure
* structure
= baseValue
.isCell() ? baseValue
.asCell()->structure(*vm
) : nullptr;
324 baseValue
.put(exec
, ident
, value
, slot
);
326 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
330 repatchPutByID(exec
, baseValue
, structure
, ident
, slot
, *stubInfo
, NotDirect
);
332 stubInfo
->seen
= true;
335 void JIT_OPERATION
operationPutByIdNonStrictOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
337 VM
* vm
= &exec
->vm();
338 NativeCallFrameTracer
tracer(vm
, exec
);
340 Identifier ident
= Identifier::fromUid(vm
, uid
);
341 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
343 JSValue value
= JSValue::decode(encodedValue
);
344 JSValue baseValue
= JSValue::decode(encodedBase
);
345 PutPropertySlot
slot(baseValue
, false, exec
->codeBlock()->putByIdContext());
347 Structure
* structure
= baseValue
.isCell() ? baseValue
.asCell()->structure(*vm
) : nullptr;
348 baseValue
.put(exec
, ident
, value
, slot
);
350 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
354 repatchPutByID(exec
, baseValue
, structure
, ident
, slot
, *stubInfo
, NotDirect
);
356 stubInfo
->seen
= true;
359 void JIT_OPERATION
operationPutByIdDirectStrictOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
361 VM
* vm
= &exec
->vm();
362 NativeCallFrameTracer
tracer(vm
, exec
);
364 Identifier ident
= Identifier::fromUid(vm
, uid
);
365 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
367 JSValue value
= JSValue::decode(encodedValue
);
368 JSObject
* baseObject
= asObject(JSValue::decode(encodedBase
));
369 PutPropertySlot
slot(baseObject
, true, exec
->codeBlock()->putByIdContext());
371 Structure
* structure
= baseObject
->structure(*vm
);
372 baseObject
->putDirect(exec
->vm(), ident
, value
, slot
);
374 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
378 repatchPutByID(exec
, baseObject
, structure
, ident
, slot
, *stubInfo
, Direct
);
380 stubInfo
->seen
= true;
383 void JIT_OPERATION
operationPutByIdDirectNonStrictOptimize(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
385 VM
* vm
= &exec
->vm();
386 NativeCallFrameTracer
tracer(vm
, exec
);
388 Identifier ident
= Identifier::fromUid(vm
, uid
);
389 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
391 JSValue value
= JSValue::decode(encodedValue
);
392 JSObject
* baseObject
= asObject(JSValue::decode(encodedBase
));
393 PutPropertySlot
slot(baseObject
, false, exec
->codeBlock()->putByIdContext());
395 Structure
* structure
= baseObject
->structure(*vm
);
396 baseObject
->putDirect(exec
->vm(), ident
, value
, slot
);
398 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
402 repatchPutByID(exec
, baseObject
, structure
, ident
, slot
, *stubInfo
, Direct
);
404 stubInfo
->seen
= true;
407 void JIT_OPERATION
operationPutByIdStrictBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
409 VM
* vm
= &exec
->vm();
410 NativeCallFrameTracer
tracer(vm
, exec
);
412 Identifier ident
= Identifier::fromUid(vm
, uid
);
413 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
415 JSValue value
= JSValue::decode(encodedValue
);
416 JSValue baseValue
= JSValue::decode(encodedBase
);
417 PutPropertySlot
slot(baseValue
, true, exec
->codeBlock()->putByIdContext());
419 Structure
* structure
= baseValue
.isCell() ? baseValue
.asCell()->structure(*vm
) : nullptr;
420 baseValue
.put(exec
, ident
, value
, slot
);
422 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
425 buildPutByIdList(exec
, baseValue
, structure
, ident
, slot
, *stubInfo
, NotDirect
);
428 void JIT_OPERATION
operationPutByIdNonStrictBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
430 VM
* vm
= &exec
->vm();
431 NativeCallFrameTracer
tracer(vm
, exec
);
433 Identifier ident
= Identifier::fromUid(vm
, uid
);
434 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
436 JSValue value
= JSValue::decode(encodedValue
);
437 JSValue baseValue
= JSValue::decode(encodedBase
);
438 PutPropertySlot
slot(baseValue
, false, exec
->codeBlock()->putByIdContext());
440 Structure
* structure
= baseValue
.isCell() ? baseValue
.asCell()->structure(*vm
) : nullptr;
441 baseValue
.put(exec
, ident
, value
, slot
);
443 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
446 buildPutByIdList(exec
, baseValue
, structure
, ident
, slot
, *stubInfo
, NotDirect
);
449 void JIT_OPERATION
operationPutByIdDirectStrictBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
451 VM
* vm
= &exec
->vm();
452 NativeCallFrameTracer
tracer(vm
, exec
);
454 Identifier ident
= Identifier::fromUid(vm
, uid
);
455 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
457 JSValue value
= JSValue::decode(encodedValue
);
458 JSObject
* baseObject
= asObject(JSValue::decode(encodedBase
));
459 PutPropertySlot
slot(baseObject
, true, exec
->codeBlock()->putByIdContext());
461 Structure
* structure
= baseObject
->structure(*vm
);
462 baseObject
->putDirect(*vm
, ident
, value
, slot
);
464 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
467 buildPutByIdList(exec
, baseObject
, structure
, ident
, slot
, *stubInfo
, Direct
);
470 void JIT_OPERATION
operationPutByIdDirectNonStrictBuildList(ExecState
* exec
, StructureStubInfo
* stubInfo
, EncodedJSValue encodedValue
, EncodedJSValue encodedBase
, UniquedStringImpl
* uid
)
472 VM
* vm
= &exec
->vm();
473 NativeCallFrameTracer
tracer(vm
, exec
);
475 Identifier ident
= Identifier::fromUid(vm
, uid
);
476 AccessType accessType
= static_cast<AccessType
>(stubInfo
->accessType
);
478 JSValue value
= JSValue::decode(encodedValue
);
479 JSObject
* baseObject
= asObject(JSValue::decode(encodedBase
));
480 PutPropertySlot
slot(baseObject
, false, exec
->codeBlock()->putByIdContext());
482 Structure
* structure
= baseObject
->structure(*vm
);
483 baseObject
->putDirect(*vm
, ident
, value
, slot
);
485 if (accessType
!= static_cast<AccessType
>(stubInfo
->accessType
))
488 buildPutByIdList(exec
, baseObject
, structure
, ident
, slot
, *stubInfo
, Direct
);
491 void JIT_OPERATION
operationReallocateStorageAndFinishPut(ExecState
* exec
, JSObject
* base
, Structure
* structure
, PropertyOffset offset
, EncodedJSValue value
)
494 NativeCallFrameTracer
tracer(&vm
, exec
);
496 ASSERT(structure
->outOfLineCapacity() > base
->structure(vm
)->outOfLineCapacity());
497 ASSERT(!vm
.heap
.storageAllocator().fastPathShouldSucceed(structure
->outOfLineCapacity() * sizeof(JSValue
)));
498 base
->setStructureAndReallocateStorageIfNecessary(vm
, structure
);
499 base
->putDirect(vm
, offset
, JSValue::decode(value
));
502 static void putByVal(CallFrame
* callFrame
, JSValue baseValue
, JSValue subscript
, JSValue value
, ArrayProfile
* arrayProfile
)
504 VM
& vm
= callFrame
->vm();
505 if (LIKELY(subscript
.isUInt32())) {
506 uint32_t i
= subscript
.asUInt32();
507 if (baseValue
.isObject()) {
508 JSObject
* object
= asObject(baseValue
);
509 if (object
->canSetIndexQuickly(i
))
510 object
->setIndexQuickly(callFrame
->vm(), i
, value
);
512 arrayProfile
->setOutOfBounds();
513 object
->methodTable(vm
)->putByIndex(object
, callFrame
, i
, value
, callFrame
->codeBlock()->isStrictMode());
516 baseValue
.putByIndex(callFrame
, i
, value
, callFrame
->codeBlock()->isStrictMode());
518 auto property
= subscript
.toPropertyKey(callFrame
);
519 if (!callFrame
->vm().exception()) { // Don't put to an object if toString threw an exception.
520 PutPropertySlot
slot(baseValue
, callFrame
->codeBlock()->isStrictMode());
521 baseValue
.put(callFrame
, property
, value
, slot
);
526 static void directPutByVal(CallFrame
* callFrame
, JSObject
* baseObject
, JSValue subscript
, JSValue value
, ArrayProfile
* arrayProfile
)
528 bool isStrictMode
= callFrame
->codeBlock()->isStrictMode();
529 if (LIKELY(subscript
.isUInt32())) {
530 // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
531 uint32_t index
= subscript
.asUInt32();
532 ASSERT(isIndex(index
));
533 if (baseObject
->canSetIndexQuicklyForPutDirect(index
)) {
534 baseObject
->setIndexQuickly(callFrame
->vm(), index
, value
);
538 arrayProfile
->setOutOfBounds();
539 baseObject
->putDirectIndex(callFrame
, index
, value
, 0, isStrictMode
? PutDirectIndexShouldThrow
: PutDirectIndexShouldNotThrow
);
543 if (subscript
.isDouble()) {
544 double subscriptAsDouble
= subscript
.asDouble();
545 uint32_t subscriptAsUInt32
= static_cast<uint32_t>(subscriptAsDouble
);
546 if (subscriptAsDouble
== subscriptAsUInt32
&& isIndex(subscriptAsUInt32
)) {
547 baseObject
->putDirectIndex(callFrame
, subscriptAsUInt32
, value
, 0, isStrictMode
? PutDirectIndexShouldThrow
: PutDirectIndexShouldNotThrow
);
552 // Don't put to an object if toString threw an exception.
553 auto property
= subscript
.toPropertyKey(callFrame
);
554 if (callFrame
->vm().exception())
557 if (Optional
<uint32_t> index
= parseIndex(property
))
558 baseObject
->putDirectIndex(callFrame
, index
.value(), value
, 0, isStrictMode
? PutDirectIndexShouldThrow
: PutDirectIndexShouldNotThrow
);
560 PutPropertySlot
slot(baseObject
, isStrictMode
);
561 baseObject
->putDirect(callFrame
->vm(), property
, value
, slot
);
564 void JIT_OPERATION
operationPutByVal(ExecState
* exec
, EncodedJSValue encodedBaseValue
, EncodedJSValue encodedSubscript
, EncodedJSValue encodedValue
, ArrayProfile
* arrayProfile
)
567 NativeCallFrameTracer
tracer(&vm
, exec
);
569 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
570 JSValue subscript
= JSValue::decode(encodedSubscript
);
571 JSValue value
= JSValue::decode(encodedValue
);
573 if (baseValue
.isObject() && subscript
.isInt32()) {
574 // See if it's worth optimizing at all.
575 JSObject
* object
= asObject(baseValue
);
576 bool didOptimize
= false;
578 unsigned bytecodeOffset
= exec
->locationAsBytecodeOffset();
579 ASSERT(bytecodeOffset
);
580 ByValInfo
& byValInfo
= exec
->codeBlock()->getByValInfo(bytecodeOffset
- 1);
581 ASSERT(!byValInfo
.stubRoutine
);
583 Structure
* structure
= object
->structure(vm
);
584 if (hasOptimizableIndexing(structure
)) {
585 // Attempt to optimize.
586 JITArrayMode arrayMode
= jitArrayModeForStructure(structure
);
587 if (jitArrayModePermitsPut(arrayMode
) && arrayMode
!= byValInfo
.arrayMode
) {
588 CodeBlock
* codeBlock
= exec
->codeBlock();
589 ConcurrentJITLocker
locker(codeBlock
->m_lock
);
590 arrayProfile
->computeUpdatedPrediction(locker
, codeBlock
, structure
);
592 JIT::compilePutByVal(&vm
, exec
->codeBlock(), &byValInfo
, ReturnAddressPtr(OUR_RETURN_ADDRESS
), arrayMode
);
598 // If we take slow path more than 10 times without patching then make sure we
599 // never make that mistake again. Or, if we failed to patch and we have some object
600 // that intercepts indexed get, then don't even wait until 10 times. For cases
601 // where we see non-index-intercepting objects, this gives 10 iterations worth of
602 // opportunity for us to observe that the get_by_val may be polymorphic.
603 if (++byValInfo
.slowPathCount
>= 10
604 || object
->structure(vm
)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
605 // Don't ever try to optimize.
606 ctiPatchCallByReturnAddress(exec
->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationPutByValGeneric
));
611 putByVal(exec
, baseValue
, subscript
, value
, arrayProfile
);
614 void JIT_OPERATION
operationDirectPutByVal(ExecState
* callFrame
, EncodedJSValue encodedBaseValue
, EncodedJSValue encodedSubscript
, EncodedJSValue encodedValue
, ArrayProfile
* arrayProfile
)
616 VM
& vm
= callFrame
->vm();
617 NativeCallFrameTracer
tracer(&vm
, callFrame
);
619 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
620 JSValue subscript
= JSValue::decode(encodedSubscript
);
621 JSValue value
= JSValue::decode(encodedValue
);
622 RELEASE_ASSERT(baseValue
.isObject());
623 JSObject
* object
= asObject(baseValue
);
624 if (subscript
.isInt32()) {
625 // See if it's worth optimizing at all.
626 bool didOptimize
= false;
628 unsigned bytecodeOffset
= callFrame
->locationAsBytecodeOffset();
629 ASSERT(bytecodeOffset
);
630 ByValInfo
& byValInfo
= callFrame
->codeBlock()->getByValInfo(bytecodeOffset
- 1);
631 ASSERT(!byValInfo
.stubRoutine
);
633 Structure
* structure
= object
->structure(vm
);
634 if (hasOptimizableIndexing(structure
)) {
635 // Attempt to optimize.
636 JITArrayMode arrayMode
= jitArrayModeForStructure(structure
);
637 if (jitArrayModePermitsPut(arrayMode
) && arrayMode
!= byValInfo
.arrayMode
) {
638 CodeBlock
* codeBlock
= callFrame
->codeBlock();
639 ConcurrentJITLocker
locker(codeBlock
->m_lock
);
640 arrayProfile
->computeUpdatedPrediction(locker
, codeBlock
, structure
);
642 JIT::compileDirectPutByVal(&vm
, callFrame
->codeBlock(), &byValInfo
, ReturnAddressPtr(OUR_RETURN_ADDRESS
), arrayMode
);
648 // If we take slow path more than 10 times without patching then make sure we
649 // never make that mistake again. Or, if we failed to patch and we have some object
650 // that intercepts indexed get, then don't even wait until 10 times. For cases
651 // where we see non-index-intercepting objects, this gives 10 iterations worth of
652 // opportunity for us to observe that the get_by_val may be polymorphic.
653 if (++byValInfo
.slowPathCount
>= 10
654 || object
->structure(vm
)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
655 // Don't ever try to optimize.
656 ctiPatchCallByReturnAddress(callFrame
->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationDirectPutByValGeneric
));
660 directPutByVal(callFrame
, object
, subscript
, value
, arrayProfile
);
663 void JIT_OPERATION
operationPutByValGeneric(ExecState
* exec
, EncodedJSValue encodedBaseValue
, EncodedJSValue encodedSubscript
, EncodedJSValue encodedValue
, ArrayProfile
* arrayProfile
)
666 NativeCallFrameTracer
tracer(&vm
, exec
);
668 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
669 JSValue subscript
= JSValue::decode(encodedSubscript
);
670 JSValue value
= JSValue::decode(encodedValue
);
672 putByVal(exec
, baseValue
, subscript
, value
, arrayProfile
);
676 void JIT_OPERATION
operationDirectPutByValGeneric(ExecState
* exec
, EncodedJSValue encodedBaseValue
, EncodedJSValue encodedSubscript
, EncodedJSValue encodedValue
, ArrayProfile
* arrayProfile
)
679 NativeCallFrameTracer
tracer(&vm
, exec
);
681 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
682 JSValue subscript
= JSValue::decode(encodedSubscript
);
683 JSValue value
= JSValue::decode(encodedValue
);
684 RELEASE_ASSERT(baseValue
.isObject());
685 directPutByVal(exec
, asObject(baseValue
), subscript
, value
, arrayProfile
);
688 EncodedJSValue JIT_OPERATION
operationCallEval(ExecState
* exec
, ExecState
* execCallee
)
691 ASSERT_UNUSED(exec
, exec
->codeBlock()->codeType() != FunctionCode
692 || !exec
->codeBlock()->needsActivation()
693 || exec
->hasActivation());
695 execCallee
->setCodeBlock(0);
697 if (!isHostFunction(execCallee
->calleeAsValue(), globalFuncEval
))
698 return JSValue::encode(JSValue());
700 VM
* vm
= &execCallee
->vm();
701 JSValue result
= eval(execCallee
);
703 return EncodedJSValue();
705 return JSValue::encode(result
);
708 static void* handleHostCall(ExecState
* execCallee
, JSValue callee
, CodeSpecializationKind kind
)
710 ExecState
* exec
= execCallee
->callerFrame();
711 VM
* vm
= &exec
->vm();
713 execCallee
->setCodeBlock(0);
715 if (kind
== CodeForCall
) {
717 CallType callType
= getCallData(callee
, callData
);
719 ASSERT(callType
!= CallTypeJS
);
721 if (callType
== CallTypeHost
) {
722 NativeCallFrameTracer
tracer(vm
, execCallee
);
723 execCallee
->setCallee(asObject(callee
));
724 vm
->hostCallReturnValue
= JSValue::decode(callData
.native
.function(execCallee
));
726 return vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress();
728 return reinterpret_cast<void*>(getHostCallReturnValue
);
731 ASSERT(callType
== CallTypeNone
);
732 exec
->vm().throwException(exec
, createNotAFunctionError(exec
, callee
));
733 return vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress();
736 ASSERT(kind
== CodeForConstruct
);
738 ConstructData constructData
;
739 ConstructType constructType
= getConstructData(callee
, constructData
);
741 ASSERT(constructType
!= ConstructTypeJS
);
743 if (constructType
== ConstructTypeHost
) {
744 NativeCallFrameTracer
tracer(vm
, execCallee
);
745 execCallee
->setCallee(asObject(callee
));
746 vm
->hostCallReturnValue
= JSValue::decode(constructData
.native
.function(execCallee
));
748 return vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress();
750 return reinterpret_cast<void*>(getHostCallReturnValue
);
753 ASSERT(constructType
== ConstructTypeNone
);
754 exec
->vm().throwException(exec
, createNotAConstructorError(exec
, callee
));
755 return vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress();
758 inline char* linkFor(
759 ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
, CodeSpecializationKind kind
,
760 RegisterPreservationMode registers
)
762 ExecState
* exec
= execCallee
->callerFrame();
763 VM
* vm
= &exec
->vm();
764 NativeCallFrameTracer
tracer(vm
, exec
);
766 JSValue calleeAsValue
= execCallee
->calleeAsValue();
767 JSCell
* calleeAsFunctionCell
= getJSFunction(calleeAsValue
);
768 if (!calleeAsFunctionCell
) {
769 // FIXME: We should cache these kinds of calls. They can be common and currently they are
771 // https://bugs.webkit.org/show_bug.cgi?id=144458
772 return reinterpret_cast<char*>(handleHostCall(execCallee
, calleeAsValue
, kind
));
775 JSFunction
* callee
= jsCast
<JSFunction
*>(calleeAsFunctionCell
);
776 JSScope
* scope
= callee
->scopeUnchecked();
777 ExecutableBase
* executable
= callee
->executable();
779 MacroAssemblerCodePtr codePtr
;
780 CodeBlock
* codeBlock
= 0;
781 if (executable
->isHostFunction())
782 codePtr
= executable
->entrypointFor(*vm
, kind
, MustCheckArity
, registers
);
784 FunctionExecutable
* functionExecutable
= static_cast<FunctionExecutable
*>(executable
);
786 if (!isCall(kind
) && functionExecutable
->isBuiltinFunction()) {
787 exec
->vm().throwException(exec
, createNotAConstructorError(exec
, callee
));
788 return reinterpret_cast<char*>(vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress());
791 JSObject
* error
= functionExecutable
->prepareForExecution(execCallee
, callee
, scope
, kind
);
793 exec
->vm().throwException(exec
, error
);
794 return reinterpret_cast<char*>(vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress());
796 codeBlock
= functionExecutable
->codeBlockFor(kind
);
797 ArityCheckMode arity
;
798 if (execCallee
->argumentCountIncludingThis() < static_cast<size_t>(codeBlock
->numParameters()) || callLinkInfo
->callType() == CallLinkInfo::CallVarargs
|| callLinkInfo
->callType() == CallLinkInfo::ConstructVarargs
)
799 arity
= MustCheckArity
;
801 arity
= ArityCheckNotRequired
;
802 codePtr
= functionExecutable
->entrypointFor(*vm
, kind
, arity
, registers
);
804 if (!callLinkInfo
->seenOnce())
805 callLinkInfo
->setSeen();
807 linkFor(execCallee
, *callLinkInfo
, codeBlock
, callee
, codePtr
, kind
, registers
);
809 return reinterpret_cast<char*>(codePtr
.executableAddress());
812 char* JIT_OPERATION
operationLinkCall(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
814 return linkFor(execCallee
, callLinkInfo
, CodeForCall
, RegisterPreservationNotRequired
);
817 char* JIT_OPERATION
operationLinkConstruct(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
819 return linkFor(execCallee
, callLinkInfo
, CodeForConstruct
, RegisterPreservationNotRequired
);
822 char* JIT_OPERATION
operationLinkCallThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
824 return linkFor(execCallee
, callLinkInfo
, CodeForCall
, MustPreserveRegisters
);
827 char* JIT_OPERATION
operationLinkConstructThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
829 return linkFor(execCallee
, callLinkInfo
, CodeForConstruct
, MustPreserveRegisters
);
832 inline char* virtualForWithFunction(
833 ExecState
* execCallee
, CodeSpecializationKind kind
, RegisterPreservationMode registers
,
834 JSCell
*& calleeAsFunctionCell
)
836 ExecState
* exec
= execCallee
->callerFrame();
837 VM
* vm
= &exec
->vm();
838 NativeCallFrameTracer
tracer(vm
, exec
);
840 JSValue calleeAsValue
= execCallee
->calleeAsValue();
841 calleeAsFunctionCell
= getJSFunction(calleeAsValue
);
842 if (UNLIKELY(!calleeAsFunctionCell
))
843 return reinterpret_cast<char*>(handleHostCall(execCallee
, calleeAsValue
, kind
));
845 JSFunction
* function
= jsCast
<JSFunction
*>(calleeAsFunctionCell
);
846 JSScope
* scope
= function
->scopeUnchecked();
847 ExecutableBase
* executable
= function
->executable();
848 if (UNLIKELY(!executable
->hasJITCodeFor(kind
))) {
849 FunctionExecutable
* functionExecutable
= static_cast<FunctionExecutable
*>(executable
);
851 if (!isCall(kind
) && functionExecutable
->isBuiltinFunction()) {
852 exec
->vm().throwException(exec
, createNotAConstructorError(exec
, function
));
853 return reinterpret_cast<char*>(vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress());
856 JSObject
* error
= functionExecutable
->prepareForExecution(execCallee
, function
, scope
, kind
);
858 exec
->vm().throwException(exec
, error
);
859 return reinterpret_cast<char*>(vm
->getCTIStub(throwExceptionFromCallSlowPathGenerator
).code().executableAddress());
862 return reinterpret_cast<char*>(executable
->entrypointFor(
863 *vm
, kind
, MustCheckArity
, registers
).executableAddress());
866 inline char* virtualFor(
867 ExecState
* execCallee
, CodeSpecializationKind kind
, RegisterPreservationMode registers
)
869 JSCell
* calleeAsFunctionCellIgnored
;
870 return virtualForWithFunction(execCallee
, kind
, registers
, calleeAsFunctionCellIgnored
);
873 char* JIT_OPERATION
operationLinkPolymorphicCall(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
875 JSCell
* calleeAsFunctionCell
;
876 char* result
= virtualForWithFunction(execCallee
, CodeForCall
, RegisterPreservationNotRequired
, calleeAsFunctionCell
);
878 linkPolymorphicCall(execCallee
, *callLinkInfo
, CallVariant(calleeAsFunctionCell
), RegisterPreservationNotRequired
);
883 char* JIT_OPERATION
operationVirtualCall(ExecState
* execCallee
, CallLinkInfo
*)
885 return virtualFor(execCallee
, CodeForCall
, RegisterPreservationNotRequired
);
888 char* JIT_OPERATION
operationVirtualConstruct(ExecState
* execCallee
, CallLinkInfo
*)
890 return virtualFor(execCallee
, CodeForConstruct
, RegisterPreservationNotRequired
);
893 char* JIT_OPERATION
operationLinkPolymorphicCallThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
* callLinkInfo
)
895 JSCell
* calleeAsFunctionCell
;
896 char* result
= virtualForWithFunction(execCallee
, CodeForCall
, MustPreserveRegisters
, calleeAsFunctionCell
);
898 linkPolymorphicCall(execCallee
, *callLinkInfo
, CallVariant(calleeAsFunctionCell
), MustPreserveRegisters
);
903 char* JIT_OPERATION
operationVirtualCallThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
*)
905 return virtualFor(execCallee
, CodeForCall
, MustPreserveRegisters
);
908 char* JIT_OPERATION
operationVirtualConstructThatPreservesRegs(ExecState
* execCallee
, CallLinkInfo
*)
910 return virtualFor(execCallee
, CodeForConstruct
, MustPreserveRegisters
);
913 size_t JIT_OPERATION
operationCompareLess(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
915 VM
* vm
= &exec
->vm();
916 NativeCallFrameTracer
tracer(vm
, exec
);
918 return jsLess
<true>(exec
, JSValue::decode(encodedOp1
), JSValue::decode(encodedOp2
));
921 size_t JIT_OPERATION
operationCompareLessEq(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
923 VM
* vm
= &exec
->vm();
924 NativeCallFrameTracer
tracer(vm
, exec
);
926 return jsLessEq
<true>(exec
, JSValue::decode(encodedOp1
), JSValue::decode(encodedOp2
));
929 size_t JIT_OPERATION
operationCompareGreater(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
931 VM
* vm
= &exec
->vm();
932 NativeCallFrameTracer
tracer(vm
, exec
);
934 return jsLess
<false>(exec
, JSValue::decode(encodedOp2
), JSValue::decode(encodedOp1
));
937 size_t JIT_OPERATION
operationCompareGreaterEq(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
939 VM
* vm
= &exec
->vm();
940 NativeCallFrameTracer
tracer(vm
, exec
);
942 return jsLessEq
<false>(exec
, JSValue::decode(encodedOp2
), JSValue::decode(encodedOp1
));
945 size_t JIT_OPERATION
operationConvertJSValueToBoolean(ExecState
* exec
, EncodedJSValue encodedOp
)
947 VM
* vm
= &exec
->vm();
948 NativeCallFrameTracer
tracer(vm
, exec
);
950 return JSValue::decode(encodedOp
).toBoolean(exec
);
953 size_t JIT_OPERATION
operationCompareEq(ExecState
* exec
, EncodedJSValue encodedOp1
, EncodedJSValue encodedOp2
)
955 VM
* vm
= &exec
->vm();
956 NativeCallFrameTracer
tracer(vm
, exec
);
958 return JSValue::equalSlowCaseInline(exec
, JSValue::decode(encodedOp1
), JSValue::decode(encodedOp2
));
962 EncodedJSValue JIT_OPERATION
operationCompareStringEq(ExecState
* exec
, JSCell
* left
, JSCell
* right
)
964 size_t JIT_OPERATION
operationCompareStringEq(ExecState
* exec
, JSCell
* left
, JSCell
* right
)
967 VM
* vm
= &exec
->vm();
968 NativeCallFrameTracer
tracer(vm
, exec
);
970 bool result
= WTF::equal(*asString(left
)->value(exec
).impl(), *asString(right
)->value(exec
).impl());
972 return JSValue::encode(jsBoolean(result
));
978 size_t JIT_OPERATION
operationHasProperty(ExecState
* exec
, JSObject
* base
, JSString
* property
)
980 int result
= base
->hasProperty(exec
, property
->toIdentifier(exec
));
985 EncodedJSValue JIT_OPERATION
operationNewArrayWithProfile(ExecState
* exec
, ArrayAllocationProfile
* profile
, const JSValue
* values
, int size
)
987 VM
* vm
= &exec
->vm();
988 NativeCallFrameTracer
tracer(vm
, exec
);
989 return JSValue::encode(constructArrayNegativeIndexed(exec
, profile
, values
, size
));
992 EncodedJSValue JIT_OPERATION
operationNewArrayBufferWithProfile(ExecState
* exec
, ArrayAllocationProfile
* profile
, const JSValue
* values
, int size
)
994 VM
* vm
= &exec
->vm();
995 NativeCallFrameTracer
tracer(vm
, exec
);
996 return JSValue::encode(constructArray(exec
, profile
, values
, size
));
999 EncodedJSValue JIT_OPERATION
operationNewArrayWithSizeAndProfile(ExecState
* exec
, ArrayAllocationProfile
* profile
, EncodedJSValue size
)
1001 VM
* vm
= &exec
->vm();
1002 NativeCallFrameTracer
tracer(vm
, exec
);
1003 JSValue sizeValue
= JSValue::decode(size
);
1004 return JSValue::encode(constructArrayWithSizeQuirk(exec
, profile
, exec
->lexicalGlobalObject(), sizeValue
));
1007 EncodedJSValue JIT_OPERATION
operationNewFunction(ExecState
* exec
, JSScope
* scope
, JSCell
* functionExecutable
)
1009 ASSERT(functionExecutable
->inherits(FunctionExecutable::info()));
1010 VM
& vm
= exec
->vm();
1011 NativeCallFrameTracer
tracer(&vm
, exec
);
1012 return JSValue::encode(JSFunction::create(vm
, static_cast<FunctionExecutable
*>(functionExecutable
), scope
));
1015 EncodedJSValue JIT_OPERATION
operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState
* exec
, JSScope
* scope
, JSCell
* functionExecutable
)
1017 ASSERT(functionExecutable
->inherits(FunctionExecutable::info()));
1018 VM
& vm
= exec
->vm();
1019 NativeCallFrameTracer
tracer(&vm
, exec
);
1020 return JSValue::encode(JSFunction::createWithInvalidatedReallocationWatchpoint(vm
, static_cast<FunctionExecutable
*>(functionExecutable
), scope
));
1023 JSCell
* JIT_OPERATION
operationNewObject(ExecState
* exec
, Structure
* structure
)
1025 VM
* vm
= &exec
->vm();
1026 NativeCallFrameTracer
tracer(vm
, exec
);
1028 return constructEmptyObject(exec
, structure
);
1031 EncodedJSValue JIT_OPERATION
operationNewRegexp(ExecState
* exec
, void* regexpPtr
)
1033 VM
& vm
= exec
->vm();
1034 NativeCallFrameTracer
tracer(&vm
, exec
);
1035 RegExp
* regexp
= static_cast<RegExp
*>(regexpPtr
);
1036 if (!regexp
->isValid()) {
1037 vm
.throwException(exec
, createSyntaxError(exec
, ASCIILiteral("Invalid flags supplied to RegExp constructor.")));
1038 return JSValue::encode(jsUndefined());
1041 return JSValue::encode(RegExpObject::create(vm
, exec
->lexicalGlobalObject()->regExpStructure(), regexp
));
1044 void JIT_OPERATION
operationHandleWatchdogTimer(ExecState
* exec
)
1046 VM
& vm
= exec
->vm();
1047 NativeCallFrameTracer
tracer(&vm
, exec
);
1049 if (UNLIKELY(vm
.watchdog
&& vm
.watchdog
->didFire(exec
)))
1050 vm
.throwException(exec
, createTerminatedExecutionException(&vm
));
1053 void JIT_OPERATION
operationThrowStaticError(ExecState
* exec
, EncodedJSValue encodedValue
, int32_t referenceErrorFlag
)
1055 VM
& vm
= exec
->vm();
1056 NativeCallFrameTracer
tracer(&vm
, exec
);
1057 JSValue errorMessageValue
= JSValue::decode(encodedValue
);
1058 RELEASE_ASSERT(errorMessageValue
.isString());
1059 String errorMessage
= asString(errorMessageValue
)->value(exec
);
1060 if (referenceErrorFlag
)
1061 vm
.throwException(exec
, createReferenceError(exec
, errorMessage
));
1063 vm
.throwException(exec
, createTypeError(exec
, errorMessage
));
1066 void JIT_OPERATION
operationDebug(ExecState
* exec
, int32_t debugHookID
)
1068 VM
& vm
= exec
->vm();
1069 NativeCallFrameTracer
tracer(&vm
, exec
);
1071 vm
.interpreter
->debug(exec
, static_cast<DebugHookID
>(debugHookID
));
1075 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock
* codeBlock
)
1077 codeBlock
->updateAllPredictions();
1078 codeBlock
->optimizeAfterWarmUp();
1081 SlowPathReturnType JIT_OPERATION
operationOptimize(ExecState
* exec
, int32_t bytecodeIndex
)
1083 VM
& vm
= exec
->vm();
1084 NativeCallFrameTracer
tracer(&vm
, exec
);
1086 // Defer GC for a while so that it doesn't run between when we enter into this
1087 // slow path and when we figure out the state of our code block. This prevents
1088 // a number of awkward reentrancy scenarios, including:
1090 // - The optimized version of our code block being jettisoned by GC right after
1091 // we concluded that we wanted to use it, but have not planted it into the JS
1094 // - An optimized version of our code block being installed just as we decided
1095 // that it wasn't ready yet.
1097 // Note that jettisoning won't happen if we already initiated OSR, because in
1098 // that case we would have already planted the optimized code block into the JS
1100 DeferGCForAWhile
deferGC(vm
.heap
);
1102 CodeBlock
* codeBlock
= exec
->codeBlock();
1103 if (codeBlock
->jitType() != JITCode::BaselineJIT
) {
1104 dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock
, "\n");
1105 RELEASE_ASSERT_NOT_REACHED();
1108 if (bytecodeIndex
) {
1109 // If we're attempting to OSR from a loop, assume that this should be
1110 // separately optimized.
1111 codeBlock
->m_shouldAlwaysBeInlined
= false;
1114 if (Options::verboseOSR()) {
1116 *codeBlock
, ": Entered optimize with bytecodeIndex = ", bytecodeIndex
,
1117 ", executeCounter = ", codeBlock
->jitExecuteCounter(),
1118 ", optimizationDelayCounter = ", codeBlock
->reoptimizationRetryCounter(),
1119 ", exitCounter = ");
1120 if (codeBlock
->hasOptimizedReplacement())
1121 dataLog(codeBlock
->replacement()->osrExitCounter());
1127 if (!codeBlock
->checkIfOptimizationThresholdReached()) {
1128 codeBlock
->updateAllPredictions();
1129 if (Options::verboseOSR())
1130 dataLog("Choosing not to optimize ", *codeBlock
, " yet, because the threshold hasn't been reached.\n");
1131 return encodeResult(0, 0);
1134 if (vm
.enabledProfiler()) {
1135 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock
);
1136 return encodeResult(0, 0);
1139 Debugger
* debugger
= codeBlock
->globalObject()->debugger();
1140 if (debugger
&& (debugger
->isStepping() || codeBlock
->baselineAlternative()->hasDebuggerRequests())) {
1141 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock
);
1142 return encodeResult(0, 0);
1145 if (codeBlock
->m_shouldAlwaysBeInlined
) {
1146 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock
);
1147 if (Options::verboseOSR())
1148 dataLog("Choosing not to optimize ", *codeBlock
, " yet, because m_shouldAlwaysBeInlined == true.\n");
1149 return encodeResult(0, 0);
1152 // We cannot be in the process of asynchronous compilation and also have an optimized
1154 DFG::Worklist
* worklist
= DFG::existingGlobalDFGWorklistOrNull();
1157 || !(worklist
->compilationState(DFG::CompilationKey(codeBlock
, DFG::DFGMode
)) != DFG::Worklist::NotKnown
1158 && codeBlock
->hasOptimizedReplacement()));
1160 DFG::Worklist::State worklistState
;
1162 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1163 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1164 // what the result was so that we don't plow ahead and attempt OSR or immediate
1165 // reoptimization. This will have already also set the appropriate JIT execution
1166 // count threshold depending on what happened, so if the compilation was anything
1167 // but successful we just want to return early. See the case for worklistState ==
1168 // DFG::Worklist::Compiled, below.
1170 // Note that we could have alternatively just called Worklist::compilationState()
1171 // here, and if it returned Compiled, we could have then called
1172 // completeAndScheduleOSR() below. But that would have meant that it could take
1173 // longer for code blocks to be completed: they would only complete when *their*
1174 // execution count trigger fired; but that could take a while since the firing is
1175 // racy. It could also mean that code blocks that never run again after being
1176 // compiled would sit on the worklist until next GC. That's fine, but it's
1177 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1178 // possible in order to minimize the chances of us executing baseline code after
1179 // optimized code is already available.
1180 worklistState
= worklist
->completeAllReadyPlansForVM(
1181 vm
, DFG::CompilationKey(codeBlock
, DFG::DFGMode
));
1183 worklistState
= DFG::Worklist::NotKnown
;
1185 if (worklistState
== DFG::Worklist::Compiling
) {
1186 // We cannot be in the process of asynchronous compilation and also have an optimized
1188 RELEASE_ASSERT(!codeBlock
->hasOptimizedReplacement());
1189 codeBlock
->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred
);
1190 return encodeResult(0, 0);
1193 if (worklistState
== DFG::Worklist::Compiled
) {
1194 // If we don't have an optimized replacement but we did just get compiled, then
1195 // the compilation failed or was invalidated, in which case the execution count
1196 // thresholds have already been set appropriately by
1197 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1198 // nothing left to do.
1199 if (!codeBlock
->hasOptimizedReplacement()) {
1200 codeBlock
->updateAllPredictions();
1201 if (Options::verboseOSR())
1202 dataLog("Code block ", *codeBlock
, " was compiled but it doesn't have an optimized replacement.\n");
1203 return encodeResult(0, 0);
1205 } else if (codeBlock
->hasOptimizedReplacement()) {
1206 if (Options::verboseOSR())
1207 dataLog("Considering OSR ", *codeBlock
, " -> ", *codeBlock
->replacement(), ".\n");
1208 // If we have an optimized replacement, then it must be the case that we entered
1209 // cti_optimize from a loop. That's because if there's an optimized replacement,
1210 // then all calls to this function will be relinked to the replacement and so
1211 // the prologue OSR will never fire.
1213 // This is an interesting threshold check. Consider that a function OSR exits
1214 // in the middle of a loop, while having a relatively low exit count. The exit
1215 // will reset the execution counter to some target threshold, meaning that this
1216 // code won't be reached until that loop heats up for >=1000 executions. But then
1217 // we do a second check here, to see if we should either reoptimize, or just
1218 // attempt OSR entry. Hence it might even be correct for
1219 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1220 // additional checking anyway, to reduce the amount of recompilation thrashing.
1221 if (codeBlock
->replacement()->shouldReoptimizeFromLoopNow()) {
1222 if (Options::verboseOSR()) {
1224 "Triggering reoptimization of ", *codeBlock
,
1225 "(", *codeBlock
->replacement(), ") (in loop).\n");
1227 codeBlock
->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger
, CountReoptimization
);
1228 return encodeResult(0, 0);
1231 if (!codeBlock
->shouldOptimizeNow()) {
1232 if (Options::verboseOSR()) {
1234 "Delaying optimization for ", *codeBlock
,
1235 " because of insufficient profiling.\n");
1237 return encodeResult(0, 0);
1240 if (Options::verboseOSR())
1241 dataLog("Triggering optimized compilation of ", *codeBlock
, "\n");
1243 unsigned numVarsWithValues
;
1245 numVarsWithValues
= codeBlock
->m_numVars
;
1247 numVarsWithValues
= 0;
1248 Operands
<JSValue
> mustHandleValues(codeBlock
->numParameters(), numVarsWithValues
);
1249 for (size_t i
= 0; i
< mustHandleValues
.size(); ++i
) {
1250 int operand
= mustHandleValues
.operandForIndex(i
);
1251 mustHandleValues
[i
] = exec
->uncheckedR(operand
).jsValue();
1254 RefPtr
<CodeBlock
> replacementCodeBlock
= codeBlock
->newReplacement();
1255 CompilationResult result
= DFG::compile(
1256 vm
, replacementCodeBlock
.get(), 0, DFG::DFGMode
, bytecodeIndex
,
1257 mustHandleValues
, JITToDFGDeferredCompilationCallback::create());
1259 if (result
!= CompilationSuccessful
) {
1260 ASSERT(result
== CompilationDeferred
|| replacementCodeBlock
->hasOneRef());
1261 return encodeResult(0, 0);
1265 CodeBlock
* optimizedCodeBlock
= codeBlock
->replacement();
1266 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock
->jitType()));
1268 if (void* dataBuffer
= DFG::prepareOSREntry(exec
, optimizedCodeBlock
, bytecodeIndex
)) {
1269 if (Options::verboseOSR()) {
1271 "Performing OSR ", *codeBlock
, " -> ", *optimizedCodeBlock
, ".\n");
1274 codeBlock
->optimizeSoon();
1275 return encodeResult(vm
.getCTIStub(DFG::osrEntryThunkGenerator
).code().executableAddress(), dataBuffer
);
1278 if (Options::verboseOSR()) {
1280 "Optimizing ", *codeBlock
, " -> ", *codeBlock
->replacement(),
1281 " succeeded, OSR failed, after a delay of ",
1282 codeBlock
->optimizationDelayCounter(), ".\n");
1285 // Count the OSR failure as a speculation failure. If this happens a lot, then
1287 optimizedCodeBlock
->countOSRExit();
1289 // We are a lot more conservative about triggering reoptimization after OSR failure than
1290 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1291 // already, then we really would like to reoptimize immediately. But this case covers
1292 // something else: there weren't many (or any) speculation failures before, but we just
1293 // failed to enter the speculative code because some variable had the wrong value or
1294 // because the OSR code decided for any spurious reason that it did not want to OSR
1295 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1296 // reoptimization trigger.
1297 if (optimizedCodeBlock
->shouldReoptimizeNow()) {
1298 if (Options::verboseOSR()) {
1300 "Triggering reoptimization of ", *codeBlock
, " -> ",
1301 *codeBlock
->replacement(), " (after OSR fail).\n");
1303 optimizedCodeBlock
->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail
, CountReoptimization
);
1304 return encodeResult(0, 0);
1307 // OSR failed this time, but it might succeed next time! Let the code run a bit
1308 // longer and then try again.
1309 codeBlock
->optimizeAfterWarmUp();
1311 return encodeResult(0, 0);
1315 void JIT_OPERATION
operationPutByIndex(ExecState
* exec
, EncodedJSValue encodedArrayValue
, int32_t index
, EncodedJSValue encodedValue
)
1317 VM
& vm
= exec
->vm();
1318 NativeCallFrameTracer
tracer(&vm
, exec
);
1320 JSValue arrayValue
= JSValue::decode(encodedArrayValue
);
1321 ASSERT(isJSArray(arrayValue
));
1322 asArray(arrayValue
)->putDirectIndex(exec
, index
, JSValue::decode(encodedValue
));
1326 void JIT_OPERATION
operationPutGetterById(ExecState
* exec
, EncodedJSValue encodedObjectValue
, Identifier
* identifier
, EncodedJSValue encodedGetterValue
)
1328 VM
& vm
= exec
->vm();
1329 NativeCallFrameTracer
tracer(&vm
, exec
);
1331 ASSERT(JSValue::decode(encodedObjectValue
).isObject());
1332 JSObject
* baseObj
= asObject(JSValue::decode(encodedObjectValue
));
1334 JSValue getter
= JSValue::decode(encodedGetterValue
);
1335 ASSERT(getter
.isObject());
1336 baseObj
->putGetter(exec
, *identifier
, asObject(getter
));
1339 void JIT_OPERATION
operationPutSetterById(ExecState
* exec
, EncodedJSValue encodedObjectValue
, Identifier
* identifier
, EncodedJSValue encodedSetterValue
)
1341 VM
& vm
= exec
->vm();
1342 NativeCallFrameTracer
tracer(&vm
, exec
);
1344 ASSERT(JSValue::decode(encodedObjectValue
).isObject());
1345 JSObject
* baseObj
= asObject(JSValue::decode(encodedObjectValue
));
1347 JSValue setter
= JSValue::decode(encodedSetterValue
);
1348 ASSERT(setter
.isObject());
1349 baseObj
->putSetter(exec
, *identifier
, asObject(setter
));
1352 void JIT_OPERATION
operationPutGetterSetter(ExecState
* exec
, EncodedJSValue encodedObjectValue
, Identifier
* identifier
, EncodedJSValue encodedGetterValue
, EncodedJSValue encodedSetterValue
)
1354 VM
& vm
= exec
->vm();
1355 NativeCallFrameTracer
tracer(&vm
, exec
);
1357 ASSERT(JSValue::decode(encodedObjectValue
).isObject());
1358 JSObject
* baseObj
= asObject(JSValue::decode(encodedObjectValue
));
1360 GetterSetter
* accessor
= GetterSetter::create(vm
, exec
->lexicalGlobalObject());
1362 JSValue getter
= JSValue::decode(encodedGetterValue
);
1363 JSValue setter
= JSValue::decode(encodedSetterValue
);
1364 ASSERT(getter
.isObject() || getter
.isUndefined());
1365 ASSERT(setter
.isObject() || setter
.isUndefined());
1366 ASSERT(getter
.isObject() || setter
.isObject());
1368 if (!getter
.isUndefined())
1369 accessor
->setGetter(vm
, exec
->lexicalGlobalObject(), asObject(getter
));
1370 if (!setter
.isUndefined())
1371 accessor
->setSetter(vm
, exec
->lexicalGlobalObject(), asObject(setter
));
1372 baseObj
->putDirectAccessor(exec
, *identifier
, accessor
, Accessor
);
1375 void JIT_OPERATION
operationPutGetterById(ExecState
* exec
, JSCell
* object
, Identifier
* identifier
, JSCell
* getter
)
1377 VM
& vm
= exec
->vm();
1378 NativeCallFrameTracer
tracer(&vm
, exec
);
1380 ASSERT(object
&& object
->isObject());
1381 JSObject
* baseObj
= object
->getObject();
1383 ASSERT(getter
->isObject());
1384 baseObj
->putGetter(exec
, *identifier
, getter
);
1387 void JIT_OPERATION
operationPutSetterById(ExecState
* exec
, JSCell
* object
, Identifier
* identifier
, JSCell
* setter
)
1389 VM
& vm
= exec
->vm();
1390 NativeCallFrameTracer
tracer(&vm
, exec
);
1392 ASSERT(object
&& object
->isObject());
1393 JSObject
* baseObj
= object
->getObject();
1395 ASSERT(setter
->isObject());
1396 baseObj
->putSetter(exec
, *identifier
, setter
);
1399 void JIT_OPERATION
operationPutGetterSetter(ExecState
* exec
, JSCell
* object
, Identifier
* identifier
, JSCell
* getter
, JSCell
* setter
)
1401 VM
& vm
= exec
->vm();
1402 NativeCallFrameTracer
tracer(&vm
, exec
);
1404 ASSERT(object
&& object
->isObject());
1405 JSObject
* baseObj
= object
->getObject();
1407 GetterSetter
* accessor
= GetterSetter::create(vm
, exec
->lexicalGlobalObject());
1409 ASSERT(!getter
|| getter
->isObject());
1410 ASSERT(!setter
|| setter
->isObject());
1411 ASSERT(getter
|| setter
);
1414 accessor
->setGetter(vm
, exec
->lexicalGlobalObject(), getter
->getObject());
1416 accessor
->setSetter(vm
, exec
->lexicalGlobalObject(), setter
->getObject());
1417 baseObj
->putDirectAccessor(exec
, *identifier
, accessor
, Accessor
);
1421 void JIT_OPERATION
operationPushCatchScope(ExecState
* exec
, int32_t dst
, SymbolTable
* symbolTable
, EncodedJSValue encodedValue
)
1423 pushNameScope
<JSCatchScope
>(exec
, dst
, symbolTable
, encodedValue
);
1426 void JIT_OPERATION
operationPushFunctionNameScope(ExecState
* exec
, int32_t dst
, SymbolTable
* symbolTable
, EncodedJSValue encodedValue
)
1428 pushNameScope
<JSFunctionNameScope
>(exec
, dst
, symbolTable
, encodedValue
);
1431 void JIT_OPERATION
operationPushWithScope(ExecState
* exec
, int32_t dst
, EncodedJSValue encodedValue
)
1433 VM
& vm
= exec
->vm();
1434 NativeCallFrameTracer
tracer(&vm
, exec
);
1436 JSObject
* o
= JSValue::decode(encodedValue
).toObject(exec
);
1440 // FIXME: This won't work if this operation is called from the DFG or FTL.
1441 // This should be changed to pass in the old scope and return the new scope.
1442 JSScope
* currentScope
= exec
->uncheckedR(dst
).Register::scope();
1443 exec
->uncheckedR(dst
) = JSWithScope::create(exec
, o
, currentScope
);
1446 void JIT_OPERATION
operationPopScope(ExecState
* exec
, int32_t scopeReg
)
1448 VM
& vm
= exec
->vm();
1449 NativeCallFrameTracer
tracer(&vm
, exec
);
1451 JSScope
* scope
= exec
->uncheckedR(scopeReg
).Register::scope();
1452 exec
->uncheckedR(scopeReg
) = scope
->next();
1455 void JIT_OPERATION
operationProfileDidCall(ExecState
* exec
, EncodedJSValue encodedValue
)
1457 VM
& vm
= exec
->vm();
1458 NativeCallFrameTracer
tracer(&vm
, exec
);
1460 if (LegacyProfiler
* profiler
= vm
.enabledProfiler())
1461 profiler
->didExecute(exec
, JSValue::decode(encodedValue
));
1464 void JIT_OPERATION
operationProfileWillCall(ExecState
* exec
, EncodedJSValue encodedValue
)
1466 VM
& vm
= exec
->vm();
1467 NativeCallFrameTracer
tracer(&vm
, exec
);
1469 if (LegacyProfiler
* profiler
= vm
.enabledProfiler())
1470 profiler
->willExecute(exec
, JSValue::decode(encodedValue
));
1473 EncodedJSValue JIT_OPERATION
operationCheckHasInstance(ExecState
* exec
, EncodedJSValue encodedValue
, EncodedJSValue encodedBaseVal
)
1475 VM
& vm
= exec
->vm();
1476 NativeCallFrameTracer
tracer(&vm
, exec
);
1478 JSValue value
= JSValue::decode(encodedValue
);
1479 JSValue baseVal
= JSValue::decode(encodedBaseVal
);
1481 if (baseVal
.isObject()) {
1482 JSObject
* baseObject
= asObject(baseVal
);
1483 ASSERT(!baseObject
->structure(vm
)->typeInfo().implementsDefaultHasInstance());
1484 if (baseObject
->structure(vm
)->typeInfo().implementsHasInstance()) {
1485 bool result
= baseObject
->methodTable(vm
)->customHasInstance(baseObject
, exec
, value
);
1486 return JSValue::encode(jsBoolean(result
));
1490 vm
.throwException(exec
, createInvalidInstanceofParameterError(exec
, baseVal
));
1491 return JSValue::encode(JSValue());
1494 JSCell
* JIT_OPERATION
operationCreateActivation(ExecState
* exec
, JSScope
* currentScope
)
1496 VM
& vm
= exec
->vm();
1497 NativeCallFrameTracer
tracer(&vm
, exec
);
1498 JSLexicalEnvironment
* lexicalEnvironment
= JSLexicalEnvironment::create(vm
, exec
, currentScope
, exec
->codeBlock());
1499 return lexicalEnvironment
;
1504 static bool canAccessArgumentIndexQuickly(JSObject
& object
, uint32_t index
)
1506 switch (object
.structure()->typeInfo().type()) {
1507 case DirectArgumentsType
: {
1508 DirectArguments
* directArguments
= jsCast
<DirectArguments
*>(&object
);
1509 if (directArguments
->canAccessArgumentIndexQuicklyInDFG(index
))
1513 case ScopedArgumentsType
: {
1514 ScopedArguments
* scopedArguments
= jsCast
<ScopedArguments
*>(&object
);
1515 if (scopedArguments
->canAccessArgumentIndexQuicklyInDFG(index
))
1525 static JSValue
getByVal(ExecState
* exec
, JSValue baseValue
, JSValue subscript
, ArrayProfile
* arrayProfile
, ReturnAddressPtr returnAddress
)
1527 if (LIKELY(baseValue
.isCell() && subscript
.isString())) {
1528 VM
& vm
= exec
->vm();
1529 Structure
& structure
= *baseValue
.asCell()->structure(vm
);
1530 if (JSCell::canUseFastGetOwnProperty(structure
)) {
1531 if (RefPtr
<AtomicStringImpl
> existingAtomicString
= asString(subscript
)->toExistingAtomicString(exec
)) {
1532 if (JSValue result
= baseValue
.asCell()->fastGetOwnProperty(vm
, structure
, existingAtomicString
.get()))
1538 if (subscript
.isUInt32()) {
1539 uint32_t i
= subscript
.asUInt32();
1540 if (isJSString(baseValue
)) {
1541 if (asString(baseValue
)->canGetIndex(i
)) {
1542 ctiPatchCallByReturnAddress(exec
->codeBlock(), returnAddress
, FunctionPtr(operationGetByValString
));
1543 return asString(baseValue
)->getIndex(exec
, i
);
1545 arrayProfile
->setOutOfBounds();
1546 } else if (baseValue
.isObject()) {
1547 JSObject
* object
= asObject(baseValue
);
1548 if (object
->canGetIndexQuickly(i
))
1549 return object
->getIndexQuickly(i
);
1551 if (!canAccessArgumentIndexQuickly(*object
, i
))
1552 arrayProfile
->setOutOfBounds();
1555 return baseValue
.get(exec
, i
);
1558 baseValue
.requireObjectCoercible(exec
);
1559 if (exec
->hadException())
1560 return jsUndefined();
1561 auto property
= subscript
.toPropertyKey(exec
);
1562 if (exec
->hadException())
1563 return jsUndefined();
1564 return baseValue
.get(exec
, property
);
1569 EncodedJSValue JIT_OPERATION
operationGetByValGeneric(ExecState
* exec
, EncodedJSValue encodedBase
, EncodedJSValue encodedSubscript
, ArrayProfile
* arrayProfile
)
1571 VM
& vm
= exec
->vm();
1572 NativeCallFrameTracer
tracer(&vm
, exec
);
1573 JSValue baseValue
= JSValue::decode(encodedBase
);
1574 JSValue subscript
= JSValue::decode(encodedSubscript
);
1576 JSValue result
= getByVal(exec
, baseValue
, subscript
, arrayProfile
, ReturnAddressPtr(OUR_RETURN_ADDRESS
));
1577 return JSValue::encode(result
);
1580 EncodedJSValue JIT_OPERATION
operationGetByValDefault(ExecState
* exec
, EncodedJSValue encodedBase
, EncodedJSValue encodedSubscript
, ArrayProfile
* arrayProfile
)
1582 VM
& vm
= exec
->vm();
1583 NativeCallFrameTracer
tracer(&vm
, exec
);
1584 JSValue baseValue
= JSValue::decode(encodedBase
);
1585 JSValue subscript
= JSValue::decode(encodedSubscript
);
1587 if (baseValue
.isObject() && subscript
.isInt32()) {
1588 // See if it's worth optimizing this at all.
1589 JSObject
* object
= asObject(baseValue
);
1590 bool didOptimize
= false;
1592 unsigned bytecodeOffset
= exec
->locationAsBytecodeOffset();
1593 ASSERT(bytecodeOffset
);
1594 ByValInfo
& byValInfo
= exec
->codeBlock()->getByValInfo(bytecodeOffset
- 1);
1595 ASSERT(!byValInfo
.stubRoutine
);
1597 if (hasOptimizableIndexing(object
->structure(vm
))) {
1598 // Attempt to optimize.
1599 Structure
* structure
= object
->structure(vm
);
1600 JITArrayMode arrayMode
= jitArrayModeForStructure(structure
);
1601 if (arrayMode
!= byValInfo
.arrayMode
) {
1602 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1603 // Let's update the profile to do better next time.
1604 CodeBlock
* codeBlock
= exec
->codeBlock();
1605 ConcurrentJITLocker
locker(codeBlock
->m_lock
);
1606 arrayProfile
->computeUpdatedPrediction(locker
, codeBlock
, structure
);
1608 JIT::compileGetByVal(&vm
, exec
->codeBlock(), &byValInfo
, ReturnAddressPtr(OUR_RETURN_ADDRESS
), arrayMode
);
1614 // If we take slow path more than 10 times without patching then make sure we
1615 // never make that mistake again. Or, if we failed to patch and we have some object
1616 // that intercepts indexed get, then don't even wait until 10 times. For cases
1617 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1618 // opportunity for us to observe that the get_by_val may be polymorphic.
1619 if (++byValInfo
.slowPathCount
>= 10
1620 || object
->structure(vm
)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1621 // Don't ever try to optimize.
1622 ctiPatchCallByReturnAddress(exec
->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationGetByValGeneric
));
1627 JSValue result
= getByVal(exec
, baseValue
, subscript
, arrayProfile
, ReturnAddressPtr(OUR_RETURN_ADDRESS
));
1628 return JSValue::encode(result
);
1631 EncodedJSValue JIT_OPERATION
operationHasIndexedPropertyDefault(ExecState
* exec
, EncodedJSValue encodedBase
, EncodedJSValue encodedSubscript
, ArrayProfile
* arrayProfile
)
1633 VM
& vm
= exec
->vm();
1634 NativeCallFrameTracer
tracer(&vm
, exec
);
1635 JSValue baseValue
= JSValue::decode(encodedBase
);
1636 JSValue subscript
= JSValue::decode(encodedSubscript
);
1638 ASSERT(baseValue
.isObject());
1639 ASSERT(subscript
.isUInt32());
1641 JSObject
* object
= asObject(baseValue
);
1642 bool didOptimize
= false;
1644 unsigned bytecodeOffset
= exec
->locationAsBytecodeOffset();
1645 ASSERT(bytecodeOffset
);
1646 ByValInfo
& byValInfo
= exec
->codeBlock()->getByValInfo(bytecodeOffset
- 1);
1647 ASSERT(!byValInfo
.stubRoutine
);
1649 if (hasOptimizableIndexing(object
->structure(vm
))) {
1650 // Attempt to optimize.
1651 JITArrayMode arrayMode
= jitArrayModeForStructure(object
->structure(vm
));
1652 if (arrayMode
!= byValInfo
.arrayMode
) {
1653 JIT::compileHasIndexedProperty(&vm
, exec
->codeBlock(), &byValInfo
, ReturnAddressPtr(OUR_RETURN_ADDRESS
), arrayMode
);
1659 // If we take slow path more than 10 times without patching then make sure we
1660 // never make that mistake again. Or, if we failed to patch and we have some object
1661 // that intercepts indexed get, then don't even wait until 10 times. For cases
1662 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1663 // opportunity for us to observe that the get_by_val may be polymorphic.
1664 if (++byValInfo
.slowPathCount
>= 10
1665 || object
->structure(vm
)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1666 // Don't ever try to optimize.
1667 ctiPatchCallByReturnAddress(exec
->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationHasIndexedPropertyGeneric
));
1671 uint32_t index
= subscript
.asUInt32();
1672 if (object
->canGetIndexQuickly(index
))
1673 return JSValue::encode(JSValue(JSValue::JSTrue
));
1675 if (!canAccessArgumentIndexQuickly(*object
, index
))
1676 arrayProfile
->setOutOfBounds();
1677 return JSValue::encode(jsBoolean(object
->hasProperty(exec
, index
)));
1680 EncodedJSValue JIT_OPERATION
operationHasIndexedPropertyGeneric(ExecState
* exec
, EncodedJSValue encodedBase
, EncodedJSValue encodedSubscript
, ArrayProfile
* arrayProfile
)
1682 VM
& vm
= exec
->vm();
1683 NativeCallFrameTracer
tracer(&vm
, exec
);
1684 JSValue baseValue
= JSValue::decode(encodedBase
);
1685 JSValue subscript
= JSValue::decode(encodedSubscript
);
1687 ASSERT(baseValue
.isObject());
1688 ASSERT(subscript
.isUInt32());
1690 JSObject
* object
= asObject(baseValue
);
1691 uint32_t index
= subscript
.asUInt32();
1692 if (object
->canGetIndexQuickly(index
))
1693 return JSValue::encode(JSValue(JSValue::JSTrue
));
1695 if (!canAccessArgumentIndexQuickly(*object
, index
))
1696 arrayProfile
->setOutOfBounds();
1697 return JSValue::encode(jsBoolean(object
->hasProperty(exec
, subscript
.asUInt32())));
1700 EncodedJSValue JIT_OPERATION
operationGetByValString(ExecState
* exec
, EncodedJSValue encodedBase
, EncodedJSValue encodedSubscript
)
1702 VM
& vm
= exec
->vm();
1703 NativeCallFrameTracer
tracer(&vm
, exec
);
1704 JSValue baseValue
= JSValue::decode(encodedBase
);
1705 JSValue subscript
= JSValue::decode(encodedSubscript
);
1708 if (LIKELY(subscript
.isUInt32())) {
1709 uint32_t i
= subscript
.asUInt32();
1710 if (isJSString(baseValue
) && asString(baseValue
)->canGetIndex(i
))
1711 result
= asString(baseValue
)->getIndex(exec
, i
);
1713 result
= baseValue
.get(exec
, i
);
1714 if (!isJSString(baseValue
))
1715 ctiPatchCallByReturnAddress(exec
->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS
), FunctionPtr(operationGetByValDefault
));
1718 baseValue
.requireObjectCoercible(exec
);
1719 if (exec
->hadException())
1720 return JSValue::encode(jsUndefined());
1721 auto property
= subscript
.toPropertyKey(exec
);
1722 if (exec
->hadException())
1723 return JSValue::encode(jsUndefined());
1724 result
= baseValue
.get(exec
, property
);
1727 return JSValue::encode(result
);
1730 EncodedJSValue JIT_OPERATION
operationDeleteById(ExecState
* exec
, EncodedJSValue encodedBase
, const Identifier
* identifier
)
1732 VM
& vm
= exec
->vm();
1733 NativeCallFrameTracer
tracer(&vm
, exec
);
1735 JSObject
* baseObj
= JSValue::decode(encodedBase
).toObject(exec
);
1736 bool couldDelete
= baseObj
->methodTable(vm
)->deleteProperty(baseObj
, exec
, *identifier
);
1737 JSValue result
= jsBoolean(couldDelete
);
1738 if (!couldDelete
&& exec
->codeBlock()->isStrictMode())
1739 vm
.throwException(exec
, createTypeError(exec
, ASCIILiteral("Unable to delete property.")));
1740 return JSValue::encode(result
);
1743 EncodedJSValue JIT_OPERATION
operationInstanceOf(ExecState
* exec
, EncodedJSValue encodedValue
, EncodedJSValue encodedProto
)
1745 VM
& vm
= exec
->vm();
1746 NativeCallFrameTracer
tracer(&vm
, exec
);
1747 JSValue value
= JSValue::decode(encodedValue
);
1748 JSValue proto
= JSValue::decode(encodedProto
);
1750 ASSERT(!value
.isObject() || !proto
.isObject());
1752 bool result
= JSObject::defaultHasInstance(exec
, value
, proto
);
1753 return JSValue::encode(jsBoolean(result
));
1756 int32_t JIT_OPERATION
operationSizeFrameForVarargs(ExecState
* exec
, EncodedJSValue encodedArguments
, int32_t numUsedStackSlots
, int32_t firstVarArgOffset
)
1758 VM
& vm
= exec
->vm();
1759 NativeCallFrameTracer
tracer(&vm
, exec
);
1760 JSStack
* stack
= &exec
->interpreter()->stack();
1761 JSValue arguments
= JSValue::decode(encodedArguments
);
1762 return sizeFrameForVarargs(exec
, stack
, arguments
, numUsedStackSlots
, firstVarArgOffset
);
1765 CallFrame
* JIT_OPERATION
operationSetupVarargsFrame(ExecState
* exec
, CallFrame
* newCallFrame
, EncodedJSValue encodedArguments
, int32_t firstVarArgOffset
, int32_t length
)
1767 VM
& vm
= exec
->vm();
1768 NativeCallFrameTracer
tracer(&vm
, exec
);
1769 JSValue arguments
= JSValue::decode(encodedArguments
);
1770 setupVarargsFrame(exec
, newCallFrame
, arguments
, firstVarArgOffset
, length
);
1771 return newCallFrame
;
1774 EncodedJSValue JIT_OPERATION
operationToObject(ExecState
* exec
, EncodedJSValue value
)
1776 VM
& vm
= exec
->vm();
1777 NativeCallFrameTracer
tracer(&vm
, exec
);
1778 return JSValue::encode(JSValue::decode(value
).toObject(exec
));
1781 char* JIT_OPERATION
operationSwitchCharWithUnknownKeyType(ExecState
* exec
, EncodedJSValue encodedKey
, size_t tableIndex
)
1783 VM
& vm
= exec
->vm();
1784 NativeCallFrameTracer
tracer(&vm
, exec
);
1785 JSValue key
= JSValue::decode(encodedKey
);
1786 CodeBlock
* codeBlock
= exec
->codeBlock();
1788 SimpleJumpTable
& jumpTable
= codeBlock
->switchJumpTable(tableIndex
);
1789 void* result
= jumpTable
.ctiDefault
.executableAddress();
1791 if (key
.isString()) {
1792 StringImpl
* value
= asString(key
)->value(exec
).impl();
1793 if (value
->length() == 1)
1794 result
= jumpTable
.ctiForValue((*value
)[0]).executableAddress();
1797 return reinterpret_cast<char*>(result
);
1800 char* JIT_OPERATION
operationSwitchImmWithUnknownKeyType(ExecState
* exec
, EncodedJSValue encodedKey
, size_t tableIndex
)
1802 VM
& vm
= exec
->vm();
1803 NativeCallFrameTracer
tracer(&vm
, exec
);
1804 JSValue key
= JSValue::decode(encodedKey
);
1805 CodeBlock
* codeBlock
= exec
->codeBlock();
1807 SimpleJumpTable
& jumpTable
= codeBlock
->switchJumpTable(tableIndex
);
1810 result
= jumpTable
.ctiForValue(key
.asInt32()).executableAddress();
1811 else if (key
.isDouble() && key
.asDouble() == static_cast<int32_t>(key
.asDouble()))
1812 result
= jumpTable
.ctiForValue(static_cast<int32_t>(key
.asDouble())).executableAddress();
1814 result
= jumpTable
.ctiDefault
.executableAddress();
1815 return reinterpret_cast<char*>(result
);
1818 char* JIT_OPERATION
operationSwitchStringWithUnknownKeyType(ExecState
* exec
, EncodedJSValue encodedKey
, size_t tableIndex
)
1820 VM
& vm
= exec
->vm();
1821 NativeCallFrameTracer
tracer(&vm
, exec
);
1822 JSValue key
= JSValue::decode(encodedKey
);
1823 CodeBlock
* codeBlock
= exec
->codeBlock();
1826 StringJumpTable
& jumpTable
= codeBlock
->stringSwitchJumpTable(tableIndex
);
1828 if (key
.isString()) {
1829 StringImpl
* value
= asString(key
)->value(exec
).impl();
1830 result
= jumpTable
.ctiForValue(value
).executableAddress();
1832 result
= jumpTable
.ctiDefault
.executableAddress();
1834 return reinterpret_cast<char*>(result
);
1837 EncodedJSValue JIT_OPERATION
operationResolveScope(ExecState
* exec
, int32_t scopeReg
, int32_t identifierIndex
)
1839 VM
& vm
= exec
->vm();
1840 NativeCallFrameTracer
tracer(&vm
, exec
);
1841 const Identifier
& ident
= exec
->codeBlock()->identifier(identifierIndex
);
1842 JSScope
* scope
= exec
->uncheckedR(scopeReg
).Register::scope();
1843 return JSValue::encode(JSScope::resolve(exec
, scope
, ident
));
1846 EncodedJSValue JIT_OPERATION
operationGetFromScope(ExecState
* exec
, Instruction
* bytecodePC
)
1848 VM
& vm
= exec
->vm();
1849 NativeCallFrameTracer
tracer(&vm
, exec
);
1850 CodeBlock
* codeBlock
= exec
->codeBlock();
1851 Instruction
* pc
= bytecodePC
;
1853 const Identifier
& ident
= codeBlock
->identifier(pc
[3].u
.operand
);
1854 JSObject
* scope
= jsCast
<JSObject
*>(exec
->uncheckedR(pc
[2].u
.operand
).jsValue());
1855 ResolveModeAndType
modeAndType(pc
[4].u
.operand
);
1857 PropertySlot
slot(scope
);
1858 if (!scope
->getPropertySlot(exec
, ident
, slot
)) {
1859 if (modeAndType
.mode() == ThrowIfNotFound
)
1860 vm
.throwException(exec
, createUndefinedVariableError(exec
, ident
));
1861 return JSValue::encode(jsUndefined());
1864 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
1865 if (slot
.isCacheableValue() && slot
.slotBase() == scope
&& scope
->structure(vm
)->propertyAccessesAreCacheable()) {
1866 if (modeAndType
.type() == GlobalProperty
|| modeAndType
.type() == GlobalPropertyWithVarInjectionChecks
) {
1867 Structure
* structure
= scope
->structure(vm
);
1869 ConcurrentJITLocker
locker(codeBlock
->m_lock
);
1870 pc
[5].u
.structure
.set(exec
->vm(), codeBlock
->ownerExecutable(), structure
);
1871 pc
[6].u
.operand
= slot
.cachedOffset();
1873 structure
->startWatchingPropertyForReplacements(vm
, slot
.cachedOffset());
1877 return JSValue::encode(slot
.getValue(exec
, ident
));
1880 void JIT_OPERATION
operationPutToScope(ExecState
* exec
, Instruction
* bytecodePC
)
1882 VM
& vm
= exec
->vm();
1883 NativeCallFrameTracer
tracer(&vm
, exec
);
1884 Instruction
* pc
= bytecodePC
;
1886 CodeBlock
* codeBlock
= exec
->codeBlock();
1887 const Identifier
& ident
= codeBlock
->identifier(pc
[2].u
.operand
);
1888 JSObject
* scope
= jsCast
<JSObject
*>(exec
->uncheckedR(pc
[1].u
.operand
).jsValue());
1889 JSValue value
= exec
->r(pc
[3].u
.operand
).jsValue();
1890 ResolveModeAndType modeAndType
= ResolveModeAndType(pc
[4].u
.operand
);
1891 if (modeAndType
.type() == LocalClosureVar
) {
1892 JSLexicalEnvironment
* environment
= jsCast
<JSLexicalEnvironment
*>(scope
);
1893 environment
->variableAt(ScopeOffset(pc
[6].u
.operand
)).set(vm
, environment
, value
);
1894 if (WatchpointSet
* set
= pc
[5].u
.watchpointSet
)
1895 set
->touch("Executed op_put_scope<LocalClosureVar>");
1898 if (modeAndType
.mode() == ThrowIfNotFound
&& !scope
->hasProperty(exec
, ident
)) {
1899 exec
->vm().throwException(exec
, createUndefinedVariableError(exec
, ident
));
1903 PutPropertySlot
slot(scope
, codeBlock
->isStrictMode());
1904 scope
->methodTable()->put(scope
, exec
, ident
, value
, slot
);
1906 if (exec
->vm().exception())
1909 CommonSlowPaths::tryCachePutToScopeGlobal(exec
, codeBlock
, pc
, scope
, modeAndType
, slot
);
1912 void JIT_OPERATION
operationThrow(ExecState
* exec
, EncodedJSValue encodedExceptionValue
)
1914 VM
* vm
= &exec
->vm();
1915 NativeCallFrameTracer
tracer(vm
, exec
);
1917 JSValue exceptionValue
= JSValue::decode(encodedExceptionValue
);
1918 vm
->throwException(exec
, exceptionValue
);
1920 // Results stored out-of-band in vm.targetMachinePCForThrow, vm.callFrameForThrow & vm.vmEntryFrameForThrow
1921 genericUnwind(vm
, exec
);
1924 void JIT_OPERATION
operationFlushWriteBarrierBuffer(ExecState
* exec
, JSCell
* cell
)
1926 VM
* vm
= &exec
->vm();
1927 NativeCallFrameTracer
tracer(vm
, exec
);
1928 vm
->heap
.flushWriteBarrierBuffer(cell
);
1931 void JIT_OPERATION
operationOSRWriteBarrier(ExecState
* exec
, JSCell
* cell
)
1933 VM
* vm
= &exec
->vm();
1934 NativeCallFrameTracer
tracer(vm
, exec
);
1935 vm
->heap
.writeBarrier(cell
);
1938 // NB: We don't include the value as part of the barrier because the write barrier elision
1939 // phase in the DFG only tracks whether the object being stored to has been barriered. It
1940 // would be much more complicated to try to model the value being stored as well.
1941 void JIT_OPERATION
operationUnconditionalWriteBarrier(ExecState
* exec
, JSCell
* cell
)
1943 VM
* vm
= &exec
->vm();
1944 NativeCallFrameTracer
tracer(vm
, exec
);
1945 vm
->heap
.writeBarrier(cell
);
1948 void JIT_OPERATION
operationInitGlobalConst(ExecState
* exec
, Instruction
* pc
)
1950 VM
* vm
= &exec
->vm();
1951 NativeCallFrameTracer
tracer(vm
, exec
);
1953 JSValue value
= exec
->r(pc
[2].u
.operand
).jsValue();
1954 pc
[1].u
.variablePointer
->set(*vm
, exec
->codeBlock()->globalObject(), value
);
1957 void JIT_OPERATION
lookupExceptionHandler(VM
* vm
, ExecState
* exec
)
1959 NativeCallFrameTracer
tracer(vm
, exec
);
1960 genericUnwind(vm
, exec
);
1961 ASSERT(vm
->targetMachinePCForThrow
);
1964 void JIT_OPERATION
lookupExceptionHandlerFromCallerFrame(VM
* vm
, ExecState
* exec
)
1966 VMEntryFrame
* vmEntryFrame
= vm
->topVMEntryFrame
;
1967 CallFrame
* callerFrame
= exec
->callerFrame(vmEntryFrame
);
1968 ASSERT(callerFrame
);
1970 NativeCallFrameTracerWithRestore
tracer(vm
, vmEntryFrame
, callerFrame
);
1971 genericUnwind(vm
, callerFrame
);
1972 ASSERT(vm
->targetMachinePCForThrow
);
1975 void JIT_OPERATION
operationVMHandleException(ExecState
* exec
)
1977 VM
* vm
= &exec
->vm();
1978 NativeCallFrameTracer
tracer(vm
, exec
);
1979 genericUnwind(vm
, exec
);
1982 // This function "should" just take the ExecState*, but doing so would make it more difficult
1983 // to call from exception check sites. So, unlike all of our other functions, we allow
1984 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
1985 // particularly safe here since this is never called on the critical path - it's only for
1987 void JIT_OPERATION
operationExceptionFuzz()
1989 // This probably "just works" for GCC also, but I haven't tried.
1991 ExecState
* exec
= static_cast<ExecState
*>(__builtin_frame_address(1));
1992 void* returnPC
= __builtin_return_address(0);
1993 doExceptionFuzzing(exec
, "JITOperations", returnPC
);
1994 #endif // COMPILER(CLANG)
1997 EncodedJSValue JIT_OPERATION
operationHasGenericProperty(ExecState
* exec
, EncodedJSValue encodedBaseValue
, JSCell
* propertyName
)
1999 VM
& vm
= exec
->vm();
2000 NativeCallFrameTracer
tracer(&vm
, exec
);
2001 JSValue baseValue
= JSValue::decode(encodedBaseValue
);
2002 if (baseValue
.isUndefinedOrNull())
2003 return JSValue::encode(jsBoolean(false));
2005 JSObject
* base
= baseValue
.toObject(exec
);
2006 return JSValue::encode(jsBoolean(base
->hasProperty(exec
, asString(propertyName
)->toIdentifier(exec
))));
2009 EncodedJSValue JIT_OPERATION
operationHasIndexedProperty(ExecState
* exec
, JSCell
* baseCell
, int32_t subscript
)
2011 VM
& vm
= exec
->vm();
2012 NativeCallFrameTracer
tracer(&vm
, exec
);
2013 JSObject
* object
= baseCell
->toObject(exec
, exec
->lexicalGlobalObject());
2014 return JSValue::encode(jsBoolean(object
->hasProperty(exec
, subscript
)));
2017 JSCell
* JIT_OPERATION
operationGetPropertyEnumerator(ExecState
* exec
, JSCell
* cell
)
2019 VM
& vm
= exec
->vm();
2020 NativeCallFrameTracer
tracer(&vm
, exec
);
2022 JSObject
* base
= cell
->toObject(exec
, exec
->lexicalGlobalObject());
2024 return propertyNameEnumerator(exec
, base
);
2027 EncodedJSValue JIT_OPERATION
operationNextEnumeratorPname(ExecState
* exec
, JSCell
* enumeratorCell
, int32_t index
)
2029 VM
& vm
= exec
->vm();
2030 NativeCallFrameTracer
tracer(&vm
, exec
);
2031 JSPropertyNameEnumerator
* enumerator
= jsCast
<JSPropertyNameEnumerator
*>(enumeratorCell
);
2032 JSString
* propertyName
= enumerator
->propertyNameAtIndex(index
);
2033 return JSValue::encode(propertyName
? propertyName
: jsNull());
2036 JSCell
* JIT_OPERATION
operationToIndexString(ExecState
* exec
, int32_t index
)
2038 VM
& vm
= exec
->vm();
2039 NativeCallFrameTracer
tracer(&vm
, exec
);
2040 return jsString(exec
, Identifier::from(exec
, index
).string());
2043 void JIT_OPERATION
operationProcessTypeProfilerLog(ExecState
* exec
)
2045 exec
->vm().typeProfilerLog()->processLogEntries(ASCIILiteral("Log Full, called from inside baseline JIT"));
2050 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
2051 // definition of getHostCallReturnValue() below because the Windows build
2053 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION
getHostCallReturnValueWithExecState(ExecState
* exec
)
2056 return JSValue::encode(JSValue());
2057 return JSValue::encode(exec
->vm().hostCallReturnValue
);
2060 #if COMPILER(GCC) && CPU(X86_64)
2062 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
2063 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
2064 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
2066 "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
2069 #elif COMPILER(GCC) && CPU(X86)
2072 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
2073 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
2074 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
2076 "leal -4(%esp), %esp\n"
2078 "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
2079 "leal 8(%esp), %esp\n"
2084 #elif COMPILER(GCC) && CPU(ARM_THUMB2)
2088 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
2089 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
2091 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue
) "\n"
2092 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
2094 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
2097 #elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
2100 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
2101 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
2102 INLINE_ARM_FUNCTION(getHostCallReturnValue
)
2103 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
2105 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
2112 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
2113 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
2114 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
2116 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
2119 #elif COMPILER(GCC) && CPU(MIPS)
2122 #define LOAD_FUNCTION_TO_T9(function) \
2123 ".set noreorder" "\n" \
2124 ".cpload $25" "\n" \
2125 ".set reorder" "\n" \
2126 "la $t9, " LOCAL_REFERENCE(function) "\n"
2128 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
2133 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
2134 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
2135 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
2136 LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState
)
2137 "move $a0, $fp" "\n"
2138 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "\n"
2141 #elif COMPILER(GCC) && CPU(SH4)
2143 #define SH4_SCRATCH_REGISTER "r11"
2147 ".globl " SYMBOL_STRING(getHostCallReturnValue
) "\n"
2148 HIDE_SYMBOL(getHostCallReturnValue
) "\n"
2149 SYMBOL_STRING(getHostCallReturnValue
) ":" "\n"
2151 "mov.l 2f, " SH4_SCRATCH_REGISTER
"\n"
2152 "braf " SH4_SCRATCH_REGISTER
"\n"
2155 "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState
) "-1b\n"
2158 #elif COMPILER(MSVC) && CPU(X86)
2160 __declspec(naked
) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION
getHostCallReturnValue()
2162 __asm mov
[esp
+ 4], ebp
;
2163 __asm jmp getHostCallReturnValueWithExecState
2170 #endif // ENABLE(JIT)