]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITOperations.cpp
JavaScriptCore-7600.1.4.9.tar.gz
[apple/javascriptcore.git] / jit / JITOperations.cpp
1 /*
2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "JITOperations.h"
28
29 #if ENABLE(JIT)
30
31 #include "Arguments.h"
32 #include "ArrayConstructor.h"
33 #include "DFGCompilationMode.h"
34 #include "DFGDriver.h"
35 #include "DFGOSREntry.h"
36 #include "DFGThunks.h"
37 #include "DFGWorklist.h"
38 #include "Debugger.h"
39 #include "Error.h"
40 #include "ErrorHandlingScope.h"
41 #include "GetterSetter.h"
42 #include "HostCallReturnValue.h"
43 #include "JIT.h"
44 #include "JITToDFGDeferredCompilationCallback.h"
45 #include "JSGlobalObjectFunctions.h"
46 #include "JSNameScope.h"
47 #include "JSPropertyNameIterator.h"
48 #include "JSStackInlines.h"
49 #include "JSWithScope.h"
50 #include "ObjectConstructor.h"
51 #include "JSCInlines.h"
52 #include "Repatch.h"
53 #include "RepatchBuffer.h"
54 #include "TestRunnerUtils.h"
55 #include <wtf/InlineASM.h>
56
57 namespace JSC {
58
59 static unsigned s_numberOfExceptionFuzzChecks;
60 unsigned numberOfExceptionFuzzChecks() { return s_numberOfExceptionFuzzChecks; }
61
62 extern "C" {
63
64 #if COMPILER(MSVC)
65 void * _ReturnAddress(void);
66 #pragma intrinsic(_ReturnAddress)
67
68 #define OUR_RETURN_ADDRESS _ReturnAddress()
69 #else
70 #define OUR_RETURN_ADDRESS __builtin_return_address(0)
71 #endif
72
73 #if ENABLE(OPCODE_SAMPLING)
74 #define CTI_SAMPLER vm->interpreter->sampler()
75 #else
76 #define CTI_SAMPLER 0
77 #endif
78
79
80 void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
81 {
82 // We pass in our own code block, because the callframe hasn't been populated.
83 VM* vm = codeBlock->vm();
84 CallFrame* callerFrame = exec->callerFrameSkippingVMEntrySentinel();
85 if (!callerFrame)
86 callerFrame = exec;
87
88 NativeCallFrameTracer tracer(vm, callerFrame);
89 ErrorHandlingScope errorScope(*vm);
90 vm->throwException(callerFrame, createStackOverflowError(callerFrame));
91 }
92
93 int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
94 {
95 VM* vm = &exec->vm();
96 CallFrame* callerFrame = exec->callerFrameSkippingVMEntrySentinel();
97 NativeCallFrameTracer tracer(vm, callerFrame);
98
99 JSStack& stack = vm->interpreter->stack();
100
101 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForCall);
102 if (missingArgCount < 0)
103 throwStackOverflowError(callerFrame);
104
105 return missingArgCount;
106 }
107
108 int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
109 {
110 VM* vm = &exec->vm();
111 CallFrame* callerFrame = exec->callerFrameSkippingVMEntrySentinel();
112 NativeCallFrameTracer tracer(vm, callerFrame);
113
114 JSStack& stack = vm->interpreter->stack();
115
116 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, &stack, CodeForConstruct);
117 if (missingArgCount < 0)
118 throwStackOverflowError(callerFrame);
119
120 return missingArgCount;
121 }
122
123 EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo*, EncodedJSValue base, StringImpl* uid)
124 {
125 VM* vm = &exec->vm();
126 NativeCallFrameTracer tracer(vm, exec);
127
128 JSValue baseValue = JSValue::decode(base);
129 PropertySlot slot(baseValue);
130 Identifier ident(vm, uid);
131 return JSValue::encode(baseValue.get(exec, ident, slot));
132 }
133
134 EncodedJSValue JIT_OPERATION operationGetByIdBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, StringImpl* uid)
135 {
136 VM* vm = &exec->vm();
137 NativeCallFrameTracer tracer(vm, exec);
138
139 Identifier ident(vm, uid);
140 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
141
142 JSValue baseValue = JSValue::decode(base);
143 PropertySlot slot(baseValue);
144 JSValue result = baseValue.get(exec, ident, slot);
145
146 if (accessType == static_cast<AccessType>(stubInfo->accessType))
147 buildGetByIDList(exec, baseValue, ident, slot, *stubInfo);
148
149 return JSValue::encode(result);
150 }
151
152 EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, StringImpl* uid)
153 {
154 VM* vm = &exec->vm();
155 NativeCallFrameTracer tracer(vm, exec);
156 Identifier ident = uid->isEmptyUnique() ? Identifier::from(PrivateName(uid)) : Identifier(vm, uid);
157 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
158
159 JSValue baseValue = JSValue::decode(base);
160 PropertySlot slot(baseValue);
161 JSValue result = baseValue.get(exec, ident, slot);
162
163 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
164 if (stubInfo->seen)
165 repatchGetByID(exec, baseValue, ident, slot, *stubInfo);
166 else
167 stubInfo->seen = true;
168 }
169
170 return JSValue::encode(result);
171 }
172
173 EncodedJSValue JIT_OPERATION operationInOptimize(ExecState* exec, StructureStubInfo* stubInfo, JSCell* base, StringImpl* key)
174 {
175 VM* vm = &exec->vm();
176 NativeCallFrameTracer tracer(vm, exec);
177
178 if (!base->isObject()) {
179 vm->throwException(exec, createInvalidParameterError(exec, "in", base));
180 return JSValue::encode(jsUndefined());
181 }
182
183 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
184
185 Identifier ident(vm, key);
186 PropertySlot slot(base);
187 bool result = asObject(base)->getPropertySlot(exec, ident, slot);
188
189 RELEASE_ASSERT(accessType == stubInfo->accessType);
190
191 if (stubInfo->seen)
192 repatchIn(exec, base, ident, result, slot, *stubInfo);
193 else
194 stubInfo->seen = true;
195
196 return JSValue::encode(jsBoolean(result));
197 }
198
199 EncodedJSValue JIT_OPERATION operationIn(ExecState* exec, StructureStubInfo*, JSCell* base, StringImpl* key)
200 {
201 VM* vm = &exec->vm();
202 NativeCallFrameTracer tracer(vm, exec);
203
204 if (!base->isObject()) {
205 vm->throwException(exec, createInvalidParameterError(exec, "in", base));
206 return JSValue::encode(jsUndefined());
207 }
208
209 Identifier ident(vm, key);
210 return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
211 }
212
213 EncodedJSValue JIT_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
214 {
215 VM* vm = &exec->vm();
216 NativeCallFrameTracer tracer(vm, exec);
217
218 return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
219 }
220
221 void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo*, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
222 {
223 VM* vm = &exec->vm();
224 NativeCallFrameTracer tracer(vm, exec);
225
226 Identifier ident(vm, uid);
227 PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
228 JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
229 }
230
231 void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo*, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
232 {
233 VM* vm = &exec->vm();
234 NativeCallFrameTracer tracer(vm, exec);
235
236 Identifier ident(vm, uid);
237 PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
238 JSValue::decode(encodedBase).put(exec, ident, JSValue::decode(encodedValue), slot);
239 }
240
241 void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo*, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
242 {
243 VM* vm = &exec->vm();
244 NativeCallFrameTracer tracer(vm, exec);
245
246 Identifier ident(vm, uid);
247 PutPropertySlot slot(JSValue::decode(encodedBase), true, exec->codeBlock()->putByIdContext());
248 asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
249 }
250
251 void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo*, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
252 {
253 VM* vm = &exec->vm();
254 NativeCallFrameTracer tracer(vm, exec);
255
256 Identifier ident(vm, uid);
257 PutPropertySlot slot(JSValue::decode(encodedBase), false, exec->codeBlock()->putByIdContext());
258 asObject(JSValue::decode(encodedBase))->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
259 }
260
261 void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
262 {
263 VM* vm = &exec->vm();
264 NativeCallFrameTracer tracer(vm, exec);
265
266 Identifier ident(vm, uid);
267 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
268
269 JSValue value = JSValue::decode(encodedValue);
270 JSValue baseValue = JSValue::decode(encodedBase);
271 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
272
273 baseValue.put(exec, ident, value, slot);
274
275 if (accessType != static_cast<AccessType>(stubInfo->accessType))
276 return;
277
278 if (stubInfo->seen)
279 repatchPutByID(exec, baseValue, ident, slot, *stubInfo, NotDirect);
280 else
281 stubInfo->seen = true;
282 }
283
284 void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
285 {
286 VM* vm = &exec->vm();
287 NativeCallFrameTracer tracer(vm, exec);
288
289 Identifier ident(vm, uid);
290 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
291
292 JSValue value = JSValue::decode(encodedValue);
293 JSValue baseValue = JSValue::decode(encodedBase);
294 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
295
296 baseValue.put(exec, ident, value, slot);
297
298 if (accessType != static_cast<AccessType>(stubInfo->accessType))
299 return;
300
301 if (stubInfo->seen)
302 repatchPutByID(exec, baseValue, ident, slot, *stubInfo, NotDirect);
303 else
304 stubInfo->seen = true;
305 }
306
307 void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
308 {
309 VM* vm = &exec->vm();
310 NativeCallFrameTracer tracer(vm, exec);
311
312 Identifier ident(vm, uid);
313 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
314
315 JSValue value = JSValue::decode(encodedValue);
316 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
317 PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
318
319 baseObject->putDirect(exec->vm(), ident, value, slot);
320
321 if (accessType != static_cast<AccessType>(stubInfo->accessType))
322 return;
323
324 if (stubInfo->seen)
325 repatchPutByID(exec, baseObject, ident, slot, *stubInfo, Direct);
326 else
327 stubInfo->seen = true;
328 }
329
330 void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
331 {
332 VM* vm = &exec->vm();
333 NativeCallFrameTracer tracer(vm, exec);
334
335 Identifier ident(vm, uid);
336 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
337
338 JSValue value = JSValue::decode(encodedValue);
339 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
340 PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
341
342 baseObject->putDirect(exec->vm(), ident, value, slot);
343
344 if (accessType != static_cast<AccessType>(stubInfo->accessType))
345 return;
346
347 if (stubInfo->seen)
348 repatchPutByID(exec, baseObject, ident, slot, *stubInfo, Direct);
349 else
350 stubInfo->seen = true;
351 }
352
353 void JIT_OPERATION operationPutByIdStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
354 {
355 VM* vm = &exec->vm();
356 NativeCallFrameTracer tracer(vm, exec);
357
358 Identifier ident(vm, uid);
359 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
360
361 JSValue value = JSValue::decode(encodedValue);
362 JSValue baseValue = JSValue::decode(encodedBase);
363 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
364
365 baseValue.put(exec, ident, value, slot);
366
367 if (accessType != static_cast<AccessType>(stubInfo->accessType))
368 return;
369
370 buildPutByIdList(exec, baseValue, ident, slot, *stubInfo, NotDirect);
371 }
372
373 void JIT_OPERATION operationPutByIdNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
374 {
375 VM* vm = &exec->vm();
376 NativeCallFrameTracer tracer(vm, exec);
377
378 Identifier ident(vm, uid);
379 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
380
381 JSValue value = JSValue::decode(encodedValue);
382 JSValue baseValue = JSValue::decode(encodedBase);
383 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
384
385 baseValue.put(exec, ident, value, slot);
386
387 if (accessType != static_cast<AccessType>(stubInfo->accessType))
388 return;
389
390 buildPutByIdList(exec, baseValue, ident, slot, *stubInfo, NotDirect);
391 }
392
393 void JIT_OPERATION operationPutByIdDirectStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
394 {
395 VM* vm = &exec->vm();
396 NativeCallFrameTracer tracer(vm, exec);
397
398 Identifier ident(vm, uid);
399 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
400
401 JSValue value = JSValue::decode(encodedValue);
402 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
403 PutPropertySlot slot(baseObject, true, exec->codeBlock()->putByIdContext());
404
405 baseObject->putDirect(exec->vm(), ident, value, slot);
406
407 if (accessType != static_cast<AccessType>(stubInfo->accessType))
408 return;
409
410 buildPutByIdList(exec, baseObject, ident, slot, *stubInfo, Direct);
411 }
412
413 void JIT_OPERATION operationPutByIdDirectNonStrictBuildList(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, StringImpl* uid)
414 {
415 VM* vm = &exec->vm();
416 NativeCallFrameTracer tracer(vm, exec);
417
418 Identifier ident(vm, uid);
419 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
420
421 JSValue value = JSValue::decode(encodedValue);
422 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
423 PutPropertySlot slot(baseObject, false, exec->codeBlock()->putByIdContext());
424
425 baseObject ->putDirect(exec->vm(), ident, value, slot);
426
427 if (accessType != static_cast<AccessType>(stubInfo->accessType))
428 return;
429
430 buildPutByIdList(exec, baseObject, ident, slot, *stubInfo, Direct);
431 }
432
433 void JIT_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
434 {
435 VM& vm = exec->vm();
436 NativeCallFrameTracer tracer(&vm, exec);
437
438 ASSERT(structure->outOfLineCapacity() > base->structure(vm)->outOfLineCapacity());
439 ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
440 base->setStructureAndReallocateStorageIfNecessary(vm, structure);
441 base->putDirect(vm, offset, JSValue::decode(value));
442 }
443
444 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value)
445 {
446 VM& vm = callFrame->vm();
447 if (LIKELY(subscript.isUInt32())) {
448 uint32_t i = subscript.asUInt32();
449 if (baseValue.isObject()) {
450 JSObject* object = asObject(baseValue);
451 if (object->canSetIndexQuickly(i))
452 object->setIndexQuickly(callFrame->vm(), i, value);
453 else
454 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
455 } else
456 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
457 } else if (isName(subscript)) {
458 PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
459 baseValue.put(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName(), value, slot);
460 } else {
461 Identifier property = subscript.toString(callFrame)->toIdentifier(callFrame);
462 if (!callFrame->vm().exception()) { // Don't put to an object if toString threw an exception.
463 PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
464 baseValue.put(callFrame, property, value, slot);
465 }
466 }
467 }
468
469 static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value)
470 {
471 if (LIKELY(subscript.isUInt32())) {
472 uint32_t i = subscript.asUInt32();
473 baseObject->putDirectIndex(callFrame, i, value);
474 } else if (isName(subscript)) {
475 PutPropertySlot slot(baseObject, callFrame->codeBlock()->isStrictMode());
476 baseObject->putDirect(callFrame->vm(), jsCast<NameInstance*>(subscript.asCell())->privateName(), value, slot);
477 } else {
478 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
479 if (!callFrame->vm().exception()) { // Don't put to an object if toString threw an exception.
480 PutPropertySlot slot(baseObject, callFrame->codeBlock()->isStrictMode());
481 baseObject->putDirect(callFrame->vm(), property, value, slot);
482 }
483 }
484 }
485 void JIT_OPERATION operationPutByVal(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue)
486 {
487 VM& vm = exec->vm();
488 NativeCallFrameTracer tracer(&vm, exec);
489
490 JSValue baseValue = JSValue::decode(encodedBaseValue);
491 JSValue subscript = JSValue::decode(encodedSubscript);
492 JSValue value = JSValue::decode(encodedValue);
493
494 if (baseValue.isObject() && subscript.isInt32()) {
495 // See if it's worth optimizing at all.
496 JSObject* object = asObject(baseValue);
497 bool didOptimize = false;
498
499 unsigned bytecodeOffset = exec->locationAsBytecodeOffset();
500 ASSERT(bytecodeOffset);
501 ByValInfo& byValInfo = exec->codeBlock()->getByValInfo(bytecodeOffset - 1);
502 ASSERT(!byValInfo.stubRoutine);
503
504 if (hasOptimizableIndexing(object->structure(vm))) {
505 // Attempt to optimize.
506 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
507 if (arrayMode != byValInfo.arrayMode) {
508 JIT::compilePutByVal(&vm, exec->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
509 didOptimize = true;
510 }
511 }
512
513 if (!didOptimize) {
514 // If we take slow path more than 10 times without patching then make sure we
515 // never make that mistake again. Or, if we failed to patch and we have some object
516 // that intercepts indexed get, then don't even wait until 10 times. For cases
517 // where we see non-index-intercepting objects, this gives 10 iterations worth of
518 // opportunity for us to observe that the get_by_val may be polymorphic.
519 if (++byValInfo.slowPathCount >= 10
520 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
521 // Don't ever try to optimize.
522 RepatchBuffer repatchBuffer(exec->codeBlock());
523 repatchBuffer.relinkCallerToFunction(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric));
524 }
525 }
526 }
527
528 putByVal(exec, baseValue, subscript, value);
529 }
530
531 void JIT_OPERATION operationDirectPutByVal(ExecState* callFrame, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue)
532 {
533 VM& vm = callFrame->vm();
534 NativeCallFrameTracer tracer(&vm, callFrame);
535
536 JSValue baseValue = JSValue::decode(encodedBaseValue);
537 JSValue subscript = JSValue::decode(encodedSubscript);
538 JSValue value = JSValue::decode(encodedValue);
539 RELEASE_ASSERT(baseValue.isObject());
540 JSObject* object = asObject(baseValue);
541 if (subscript.isInt32()) {
542 // See if it's worth optimizing at all.
543 bool didOptimize = false;
544
545 unsigned bytecodeOffset = callFrame->locationAsBytecodeOffset();
546 ASSERT(bytecodeOffset);
547 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
548 ASSERT(!byValInfo.stubRoutine);
549
550 if (hasOptimizableIndexing(object->structure(vm))) {
551 // Attempt to optimize.
552 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
553 if (arrayMode != byValInfo.arrayMode) {
554 JIT::compileDirectPutByVal(&vm, callFrame->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
555 didOptimize = true;
556 }
557 }
558
559 if (!didOptimize) {
560 // If we take slow path more than 10 times without patching then make sure we
561 // never make that mistake again. Or, if we failed to patch and we have some object
562 // that intercepts indexed get, then don't even wait until 10 times. For cases
563 // where we see non-index-intercepting objects, this gives 10 iterations worth of
564 // opportunity for us to observe that the get_by_val may be polymorphic.
565 if (++byValInfo.slowPathCount >= 10
566 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
567 // Don't ever try to optimize.
568 RepatchBuffer repatchBuffer(callFrame->codeBlock());
569 repatchBuffer.relinkCallerToFunction(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric));
570 }
571 }
572 }
573 directPutByVal(callFrame, object, subscript, value);
574 }
575
576 void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue)
577 {
578 VM& vm = exec->vm();
579 NativeCallFrameTracer tracer(&vm, exec);
580
581 JSValue baseValue = JSValue::decode(encodedBaseValue);
582 JSValue subscript = JSValue::decode(encodedSubscript);
583 JSValue value = JSValue::decode(encodedValue);
584
585 putByVal(exec, baseValue, subscript, value);
586 }
587
588
589 void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue)
590 {
591 VM& vm = exec->vm();
592 NativeCallFrameTracer tracer(&vm, exec);
593
594 JSValue baseValue = JSValue::decode(encodedBaseValue);
595 JSValue subscript = JSValue::decode(encodedSubscript);
596 JSValue value = JSValue::decode(encodedValue);
597 RELEASE_ASSERT(baseValue.isObject());
598 directPutByVal(exec, asObject(baseValue), subscript, value);
599 }
600
601 EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
602 {
603 ASSERT(exec->codeBlock()->codeType() != FunctionCode
604 || !exec->codeBlock()->needsActivation()
605 || exec->hasActivation());
606
607 execCallee->setScope(exec->scope());
608 execCallee->setCodeBlock(0);
609
610 if (!isHostFunction(execCallee->calleeAsValue(), globalFuncEval))
611 return JSValue::encode(JSValue());
612
613 VM* vm = &execCallee->vm();
614 JSValue result = eval(execCallee);
615 if (vm->exception())
616 return EncodedJSValue();
617
618 return JSValue::encode(result);
619 }
620
621 static void* handleHostCall(ExecState* execCallee, JSValue callee, CodeSpecializationKind kind)
622 {
623 ExecState* exec = execCallee->callerFrame();
624 VM* vm = &exec->vm();
625
626 execCallee->setScope(exec->scope());
627 execCallee->setCodeBlock(0);
628
629 if (kind == CodeForCall) {
630 CallData callData;
631 CallType callType = getCallData(callee, callData);
632
633 ASSERT(callType != CallTypeJS);
634
635 if (callType == CallTypeHost) {
636 NativeCallFrameTracer tracer(vm, execCallee);
637 execCallee->setCallee(asObject(callee));
638 vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
639 if (vm->exception())
640 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
641
642 return reinterpret_cast<void*>(getHostCallReturnValue);
643 }
644
645 ASSERT(callType == CallTypeNone);
646 exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
647 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
648 }
649
650 ASSERT(kind == CodeForConstruct);
651
652 ConstructData constructData;
653 ConstructType constructType = getConstructData(callee, constructData);
654
655 ASSERT(constructType != ConstructTypeJS);
656
657 if (constructType == ConstructTypeHost) {
658 NativeCallFrameTracer tracer(vm, execCallee);
659 execCallee->setCallee(asObject(callee));
660 vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
661 if (vm->exception())
662 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
663
664 return reinterpret_cast<void*>(getHostCallReturnValue);
665 }
666
667 ASSERT(constructType == ConstructTypeNone);
668 exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
669 return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
670 }
671
672 inline char* linkFor(
673 ExecState* execCallee, CallLinkInfo* callLinkInfo, CodeSpecializationKind kind,
674 RegisterPreservationMode registers)
675 {
676 ExecState* exec = execCallee->callerFrame();
677 VM* vm = &exec->vm();
678 NativeCallFrameTracer tracer(vm, exec);
679
680 JSValue calleeAsValue = execCallee->calleeAsValue();
681 JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
682 if (!calleeAsFunctionCell)
683 return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
684
685 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
686 JSScope* scope = callee->scopeUnchecked();
687 execCallee->setScope(scope);
688 ExecutableBase* executable = callee->executable();
689
690 MacroAssemblerCodePtr codePtr;
691 CodeBlock* codeBlock = 0;
692 if (executable->isHostFunction())
693 codePtr = executable->entrypointFor(*vm, kind, MustCheckArity, registers);
694 else {
695 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
696 JSObject* error = functionExecutable->prepareForExecution(execCallee, callee, &scope, kind);
697 execCallee->setScope(scope);
698 if (error) {
699 throwStackOverflowError(exec);
700 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
701 }
702 codeBlock = functionExecutable->codeBlockFor(kind);
703 ArityCheckMode arity;
704 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->callType == CallLinkInfo::CallVarargs || callLinkInfo->callType == CallLinkInfo::ConstructVarargs)
705 arity = MustCheckArity;
706 else
707 arity = ArityCheckNotRequired;
708 codePtr = functionExecutable->entrypointFor(*vm, kind, arity, registers);
709 }
710 if (!callLinkInfo->seenOnce())
711 callLinkInfo->setSeen();
712 else
713 linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr, kind, registers);
714 return reinterpret_cast<char*>(codePtr.executableAddress());
715 }
716
717 char* JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
718 {
719 return linkFor(execCallee, callLinkInfo, CodeForCall, RegisterPreservationNotRequired);
720 }
721
722 char* JIT_OPERATION operationLinkConstruct(ExecState* execCallee, CallLinkInfo* callLinkInfo)
723 {
724 return linkFor(execCallee, callLinkInfo, CodeForConstruct, RegisterPreservationNotRequired);
725 }
726
727 char* JIT_OPERATION operationLinkCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
728 {
729 return linkFor(execCallee, callLinkInfo, CodeForCall, MustPreserveRegisters);
730 }
731
732 char* JIT_OPERATION operationLinkConstructThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
733 {
734 return linkFor(execCallee, callLinkInfo, CodeForConstruct, MustPreserveRegisters);
735 }
736
737 inline char* virtualForWithFunction(
738 ExecState* execCallee, CodeSpecializationKind kind, RegisterPreservationMode registers,
739 JSCell*& calleeAsFunctionCell)
740 {
741 ExecState* exec = execCallee->callerFrame();
742 VM* vm = &exec->vm();
743 NativeCallFrameTracer tracer(vm, exec);
744
745 JSValue calleeAsValue = execCallee->calleeAsValue();
746 calleeAsFunctionCell = getJSFunction(calleeAsValue);
747 if (UNLIKELY(!calleeAsFunctionCell))
748 return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
749
750 JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
751 JSScope* scope = function->scopeUnchecked();
752 execCallee->setScope(scope);
753 ExecutableBase* executable = function->executable();
754 if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
755 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
756 JSObject* error = functionExecutable->prepareForExecution(execCallee, function, &scope, kind);
757 execCallee->setScope(scope);
758 if (error) {
759 exec->vm().throwException(exec, error);
760 return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
761 }
762 }
763 return reinterpret_cast<char*>(executable->entrypointFor(
764 *vm, kind, MustCheckArity, registers).executableAddress());
765 }
766
767 inline char* virtualFor(
768 ExecState* execCallee, CodeSpecializationKind kind, RegisterPreservationMode registers)
769 {
770 JSCell* calleeAsFunctionCellIgnored;
771 return virtualForWithFunction(execCallee, kind, registers, calleeAsFunctionCellIgnored);
772 }
773
774 static bool attemptToOptimizeClosureCall(
775 ExecState* execCallee, RegisterPreservationMode registers, JSCell* calleeAsFunctionCell,
776 CallLinkInfo& callLinkInfo)
777 {
778 if (!calleeAsFunctionCell)
779 return false;
780
781 VM& vm = execCallee->vm();
782 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
783 JSFunction* oldCallee = callLinkInfo.callee.get();
784
785 if (!oldCallee
786 || oldCallee->structure(vm) != callee->structure(vm)
787 || oldCallee->executable() != callee->executable())
788 return false;
789
790 ASSERT(callee->executable()->hasJITCodeForCall());
791 MacroAssemblerCodePtr codePtr =
792 callee->executable()->generatedJITCodeForCall()->addressForCall(
793 *execCallee->callerFrame()->codeBlock()->vm(), callee->executable(),
794 ArityCheckNotRequired, registers);
795
796 CodeBlock* codeBlock;
797 if (callee->executable()->isHostFunction())
798 codeBlock = 0;
799 else {
800 codeBlock = jsCast<FunctionExecutable*>(callee->executable())->codeBlockForCall();
801 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.callType == CallLinkInfo::CallVarargs || callLinkInfo.callType == CallLinkInfo::ConstructVarargs)
802 return false;
803 }
804
805 linkClosureCall(
806 execCallee, callLinkInfo, codeBlock,
807 callee->structure(), callee->executable(), codePtr, registers);
808
809 return true;
810 }
811
812 char* JIT_OPERATION operationLinkClosureCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
813 {
814 JSCell* calleeAsFunctionCell;
815 char* result = virtualForWithFunction(execCallee, CodeForCall, RegisterPreservationNotRequired, calleeAsFunctionCell);
816
817 if (!attemptToOptimizeClosureCall(execCallee, RegisterPreservationNotRequired, calleeAsFunctionCell, *callLinkInfo))
818 linkSlowFor(execCallee, *callLinkInfo, CodeForCall, RegisterPreservationNotRequired);
819
820 return result;
821 }
822
823 char* JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo*)
824 {
825 return virtualFor(execCallee, CodeForCall, RegisterPreservationNotRequired);
826 }
827
828 char* JIT_OPERATION operationVirtualConstruct(ExecState* execCallee, CallLinkInfo*)
829 {
830 return virtualFor(execCallee, CodeForConstruct, RegisterPreservationNotRequired);
831 }
832
833 char* JIT_OPERATION operationLinkClosureCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo* callLinkInfo)
834 {
835 JSCell* calleeAsFunctionCell;
836 char* result = virtualForWithFunction(execCallee, CodeForCall, MustPreserveRegisters, calleeAsFunctionCell);
837
838 if (!attemptToOptimizeClosureCall(execCallee, MustPreserveRegisters, calleeAsFunctionCell, *callLinkInfo))
839 linkSlowFor(execCallee, *callLinkInfo, CodeForCall, MustPreserveRegisters);
840
841 return result;
842 }
843
844 char* JIT_OPERATION operationVirtualCallThatPreservesRegs(ExecState* execCallee, CallLinkInfo*)
845 {
846 return virtualFor(execCallee, CodeForCall, MustPreserveRegisters);
847 }
848
849 char* JIT_OPERATION operationVirtualConstructThatPreservesRegs(ExecState* execCallee, CallLinkInfo*)
850 {
851 return virtualFor(execCallee, CodeForConstruct, MustPreserveRegisters);
852 }
853
854 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
855 {
856 VM* vm = &exec->vm();
857 NativeCallFrameTracer tracer(vm, exec);
858
859 return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
860 }
861
862 size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
863 {
864 VM* vm = &exec->vm();
865 NativeCallFrameTracer tracer(vm, exec);
866
867 return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
868 }
869
870 size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
871 {
872 VM* vm = &exec->vm();
873 NativeCallFrameTracer tracer(vm, exec);
874
875 return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
876 }
877
878 size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
879 {
880 VM* vm = &exec->vm();
881 NativeCallFrameTracer tracer(vm, exec);
882
883 return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
884 }
885
886 size_t JIT_OPERATION operationConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
887 {
888 VM* vm = &exec->vm();
889 NativeCallFrameTracer tracer(vm, exec);
890
891 return JSValue::decode(encodedOp).toBoolean(exec);
892 }
893
894 size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
895 {
896 VM* vm = &exec->vm();
897 NativeCallFrameTracer tracer(vm, exec);
898
899 return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
900 }
901
902 #if USE(JSVALUE64)
903 EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
904 #else
905 size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
906 #endif
907 {
908 VM* vm = &exec->vm();
909 NativeCallFrameTracer tracer(vm, exec);
910
911 bool result = WTF::equal(*asString(left)->value(exec).impl(), *asString(right)->value(exec).impl());
912 #if USE(JSVALUE64)
913 return JSValue::encode(jsBoolean(result));
914 #else
915 return result;
916 #endif
917 }
918
919 size_t JIT_OPERATION operationHasProperty(ExecState* exec, JSObject* base, JSString* property)
920 {
921 int result = base->hasProperty(exec, Identifier(exec, property->value(exec)));
922 return result;
923 }
924
925
926 EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
927 {
928 VM* vm = &exec->vm();
929 NativeCallFrameTracer tracer(vm, exec);
930 return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
931 }
932
933 EncodedJSValue JIT_OPERATION operationNewArrayBufferWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
934 {
935 VM* vm = &exec->vm();
936 NativeCallFrameTracer tracer(vm, exec);
937 return JSValue::encode(constructArray(exec, profile, values, size));
938 }
939
940 EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
941 {
942 VM* vm = &exec->vm();
943 NativeCallFrameTracer tracer(vm, exec);
944 JSValue sizeValue = JSValue::decode(size);
945 return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
946 }
947
948 EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSCell* functionExecutable)
949 {
950 ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
951 VM& vm = exec->vm();
952 NativeCallFrameTracer tracer(&vm, exec);
953 return JSValue::encode(JSFunction::create(vm, static_cast<FunctionExecutable*>(functionExecutable), exec->scope()));
954 }
955
956 JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
957 {
958 VM* vm = &exec->vm();
959 NativeCallFrameTracer tracer(vm, exec);
960
961 return constructEmptyObject(exec, structure);
962 }
963
964 EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
965 {
966 VM& vm = exec->vm();
967 NativeCallFrameTracer tracer(&vm, exec);
968 RegExp* regexp = static_cast<RegExp*>(regexpPtr);
969 if (!regexp->isValid()) {
970 vm.throwException(exec, createSyntaxError(exec, "Invalid flags supplied to RegExp constructor."));
971 return JSValue::encode(jsUndefined());
972 }
973
974 return JSValue::encode(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp));
975 }
976
977 void JIT_OPERATION operationHandleWatchdogTimer(ExecState* exec)
978 {
979 VM& vm = exec->vm();
980 NativeCallFrameTracer tracer(&vm, exec);
981
982 if (UNLIKELY(vm.watchdog && vm.watchdog->didFire(exec)))
983 vm.throwException(exec, createTerminatedExecutionException(&vm));
984 }
985
986 void JIT_OPERATION operationThrowStaticError(ExecState* exec, EncodedJSValue encodedValue, int32_t referenceErrorFlag)
987 {
988 VM& vm = exec->vm();
989 NativeCallFrameTracer tracer(&vm, exec);
990
991 String message = errorDescriptionForValue(exec, JSValue::decode(encodedValue))->value(exec);
992 if (referenceErrorFlag)
993 vm.throwException(exec, createReferenceError(exec, message));
994 else
995 vm.throwException(exec, createTypeError(exec, message));
996 }
997
998 void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookID)
999 {
1000 VM& vm = exec->vm();
1001 NativeCallFrameTracer tracer(&vm, exec);
1002
1003 vm.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID));
1004 }
1005
1006 #if ENABLE(DFG_JIT)
1007 static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1008 {
1009 codeBlock->updateAllPredictions();
1010 codeBlock->optimizeAfterWarmUp();
1011 }
1012
1013 SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, int32_t bytecodeIndex)
1014 {
1015 VM& vm = exec->vm();
1016 NativeCallFrameTracer tracer(&vm, exec);
1017
1018 // Defer GC for a while so that it doesn't run between when we enter into this
1019 // slow path and when we figure out the state of our code block. This prevents
1020 // a number of awkward reentrancy scenarios, including:
1021 //
1022 // - The optimized version of our code block being jettisoned by GC right after
1023 // we concluded that we wanted to use it, but have not planted it into the JS
1024 // stack yet.
1025 //
1026 // - An optimized version of our code block being installed just as we decided
1027 // that it wasn't ready yet.
1028 //
1029 // Note that jettisoning won't happen if we already initiated OSR, because in
1030 // that case we would have already planted the optimized code block into the JS
1031 // stack.
1032 DeferGCForAWhile deferGC(vm.heap);
1033
1034 CodeBlock* codeBlock = exec->codeBlock();
1035
1036 if (bytecodeIndex) {
1037 // If we're attempting to OSR from a loop, assume that this should be
1038 // separately optimized.
1039 codeBlock->m_shouldAlwaysBeInlined = false;
1040 }
1041
1042 if (Options::verboseOSR()) {
1043 dataLog(
1044 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1045 ", executeCounter = ", codeBlock->jitExecuteCounter(),
1046 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1047 ", exitCounter = ");
1048 if (codeBlock->hasOptimizedReplacement())
1049 dataLog(codeBlock->replacement()->osrExitCounter());
1050 else
1051 dataLog("N/A");
1052 dataLog("\n");
1053 }
1054
1055 if (!codeBlock->checkIfOptimizationThresholdReached()) {
1056 codeBlock->updateAllPredictions();
1057 if (Options::verboseOSR())
1058 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1059 return encodeResult(0, 0);
1060 }
1061
1062 if (vm.enabledProfiler()) {
1063 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1064 return encodeResult(0, 0);
1065 }
1066
1067 Debugger* debugger = codeBlock->globalObject()->debugger();
1068 if (debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests())) {
1069 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1070 return encodeResult(0, 0);
1071 }
1072
1073 if (codeBlock->m_shouldAlwaysBeInlined) {
1074 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1075 if (Options::verboseOSR())
1076 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1077 return encodeResult(0, 0);
1078 }
1079
1080 // We cannot be in the process of asynchronous compilation and also have an optimized
1081 // replacement.
1082 DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1083 ASSERT(
1084 !worklist
1085 || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1086 && codeBlock->hasOptimizedReplacement()));
1087
1088 DFG::Worklist::State worklistState;
1089 if (worklist) {
1090 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1091 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1092 // what the result was so that we don't plow ahead and attempt OSR or immediate
1093 // reoptimization. This will have already also set the appropriate JIT execution
1094 // count threshold depending on what happened, so if the compilation was anything
1095 // but successful we just want to return early. See the case for worklistState ==
1096 // DFG::Worklist::Compiled, below.
1097
1098 // Note that we could have alternatively just called Worklist::compilationState()
1099 // here, and if it returned Compiled, we could have then called
1100 // completeAndScheduleOSR() below. But that would have meant that it could take
1101 // longer for code blocks to be completed: they would only complete when *their*
1102 // execution count trigger fired; but that could take a while since the firing is
1103 // racy. It could also mean that code blocks that never run again after being
1104 // compiled would sit on the worklist until next GC. That's fine, but it's
1105 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1106 // possible in order to minimize the chances of us executing baseline code after
1107 // optimized code is already available.
1108 worklistState = worklist->completeAllReadyPlansForVM(
1109 vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1110 } else
1111 worklistState = DFG::Worklist::NotKnown;
1112
1113 if (worklistState == DFG::Worklist::Compiling) {
1114 // We cannot be in the process of asynchronous compilation and also have an optimized
1115 // replacement.
1116 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1117 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1118 return encodeResult(0, 0);
1119 }
1120
1121 if (worklistState == DFG::Worklist::Compiled) {
1122 // If we don't have an optimized replacement but we did just get compiled, then
1123 // the compilation failed or was invalidated, in which case the execution count
1124 // thresholds have already been set appropriately by
1125 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1126 // nothing left to do.
1127 if (!codeBlock->hasOptimizedReplacement()) {
1128 codeBlock->updateAllPredictions();
1129 if (Options::verboseOSR())
1130 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1131 return encodeResult(0, 0);
1132 }
1133 } else if (codeBlock->hasOptimizedReplacement()) {
1134 if (Options::verboseOSR())
1135 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
1136 // If we have an optimized replacement, then it must be the case that we entered
1137 // cti_optimize from a loop. That's because if there's an optimized replacement,
1138 // then all calls to this function will be relinked to the replacement and so
1139 // the prologue OSR will never fire.
1140
1141 // This is an interesting threshold check. Consider that a function OSR exits
1142 // in the middle of a loop, while having a relatively low exit count. The exit
1143 // will reset the execution counter to some target threshold, meaning that this
1144 // code won't be reached until that loop heats up for >=1000 executions. But then
1145 // we do a second check here, to see if we should either reoptimize, or just
1146 // attempt OSR entry. Hence it might even be correct for
1147 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1148 // additional checking anyway, to reduce the amount of recompilation thrashing.
1149 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
1150 if (Options::verboseOSR()) {
1151 dataLog(
1152 "Triggering reoptimization of ", *codeBlock,
1153 "(", *codeBlock->replacement(), ") (in loop).\n");
1154 }
1155 codeBlock->replacement()->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1156 return encodeResult(0, 0);
1157 }
1158 } else {
1159 if (!codeBlock->shouldOptimizeNow()) {
1160 if (Options::verboseOSR()) {
1161 dataLog(
1162 "Delaying optimization for ", *codeBlock,
1163 " because of insufficient profiling.\n");
1164 }
1165 return encodeResult(0, 0);
1166 }
1167
1168 if (Options::verboseOSR())
1169 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1170
1171 unsigned numVarsWithValues;
1172 if (bytecodeIndex)
1173 numVarsWithValues = codeBlock->m_numVars;
1174 else
1175 numVarsWithValues = 0;
1176 Operands<JSValue> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1177 for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1178 int operand = mustHandleValues.operandForIndex(i);
1179 if (operandIsArgument(operand)
1180 && !VirtualRegister(operand).toArgument()
1181 && codeBlock->codeType() == FunctionCode
1182 && codeBlock->specializationKind() == CodeForConstruct) {
1183 // Ugh. If we're in a constructor, the 'this' argument may hold garbage. It will
1184 // also never be used. It doesn't matter what we put into the value for this,
1185 // but it has to be an actual value that can be grokked by subsequent DFG passes,
1186 // so we sanitize it here by turning it into Undefined.
1187 mustHandleValues[i] = jsUndefined();
1188 } else
1189 mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1190 }
1191
1192 RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
1193 CompilationResult result = DFG::compile(
1194 vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
1195 mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1196
1197 if (result != CompilationSuccessful) {
1198 ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
1199 return encodeResult(0, 0);
1200 }
1201 }
1202
1203 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1204 ASSERT(JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1205
1206 if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1207 if (Options::verboseOSR()) {
1208 dataLog(
1209 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ".\n");
1210 }
1211
1212 codeBlock->optimizeSoon();
1213 return encodeResult(vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress(), dataBuffer);
1214 }
1215
1216 if (Options::verboseOSR()) {
1217 dataLog(
1218 "Optimizing ", *codeBlock, " -> ", *codeBlock->replacement(),
1219 " succeeded, OSR failed, after a delay of ",
1220 codeBlock->optimizationDelayCounter(), ".\n");
1221 }
1222
1223 // Count the OSR failure as a speculation failure. If this happens a lot, then
1224 // reoptimize.
1225 optimizedCodeBlock->countOSRExit();
1226
1227 // We are a lot more conservative about triggering reoptimization after OSR failure than
1228 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1229 // already, then we really would like to reoptimize immediately. But this case covers
1230 // something else: there weren't many (or any) speculation failures before, but we just
1231 // failed to enter the speculative code because some variable had the wrong value or
1232 // because the OSR code decided for any spurious reason that it did not want to OSR
1233 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1234 // reoptimization trigger.
1235 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1236 if (Options::verboseOSR()) {
1237 dataLog(
1238 "Triggering reoptimization of ", *codeBlock, " -> ",
1239 *codeBlock->replacement(), " (after OSR fail).\n");
1240 }
1241 optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1242 return encodeResult(0, 0);
1243 }
1244
1245 // OSR failed this time, but it might succeed next time! Let the code run a bit
1246 // longer and then try again.
1247 codeBlock->optimizeAfterWarmUp();
1248
1249 return encodeResult(0, 0);
1250 }
1251 #endif
1252
1253 void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1254 {
1255 VM& vm = exec->vm();
1256 NativeCallFrameTracer tracer(&vm, exec);
1257
1258 JSValue arrayValue = JSValue::decode(encodedArrayValue);
1259 ASSERT(isJSArray(arrayValue));
1260 asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1261 }
1262
1263 #if USE(JSVALUE64)
1264 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, EncodedJSValue encodedObjectValue, Identifier* identifier, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1265 {
1266 VM& vm = exec->vm();
1267 NativeCallFrameTracer tracer(&vm, exec);
1268
1269 ASSERT(JSValue::decode(encodedObjectValue).isObject());
1270 JSObject* baseObj = asObject(JSValue::decode(encodedObjectValue));
1271
1272 GetterSetter* accessor = GetterSetter::create(vm);
1273
1274 JSValue getter = JSValue::decode(encodedGetterValue);
1275 JSValue setter = JSValue::decode(encodedSetterValue);
1276 ASSERT(getter.isObject() || getter.isUndefined());
1277 ASSERT(setter.isObject() || setter.isUndefined());
1278 ASSERT(getter.isObject() || setter.isObject());
1279
1280 if (!getter.isUndefined())
1281 accessor->setGetter(vm, asObject(getter));
1282 if (!setter.isUndefined())
1283 accessor->setSetter(vm, asObject(setter));
1284 baseObj->putDirectAccessor(exec, *identifier, accessor, Accessor);
1285 }
1286 #else
1287 void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, Identifier* identifier, JSCell* getter, JSCell* setter)
1288 {
1289 VM& vm = exec->vm();
1290 NativeCallFrameTracer tracer(&vm, exec);
1291
1292 ASSERT(object && object->isObject());
1293 JSObject* baseObj = object->getObject();
1294
1295 GetterSetter* accessor = GetterSetter::create(vm);
1296
1297 ASSERT(!getter || getter->isObject());
1298 ASSERT(!setter || setter->isObject());
1299 ASSERT(getter || setter);
1300
1301 if (getter)
1302 accessor->setGetter(vm, getter->getObject());
1303 if (setter)
1304 accessor->setSetter(vm, setter->getObject());
1305 baseObj->putDirectAccessor(exec, *identifier, accessor, Accessor);
1306 }
1307 #endif
1308
1309 void JIT_OPERATION operationPushNameScope(ExecState* exec, Identifier* identifier, EncodedJSValue encodedValue, int32_t attibutes)
1310 {
1311 VM& vm = exec->vm();
1312 NativeCallFrameTracer tracer(&vm, exec);
1313
1314 JSNameScope* scope = JSNameScope::create(exec, *identifier, JSValue::decode(encodedValue), attibutes);
1315
1316 exec->setScope(scope);
1317 }
1318
1319 void JIT_OPERATION operationPushWithScope(ExecState* exec, EncodedJSValue encodedValue)
1320 {
1321 VM& vm = exec->vm();
1322 NativeCallFrameTracer tracer(&vm, exec);
1323
1324 JSObject* o = JSValue::decode(encodedValue).toObject(exec);
1325 if (vm.exception())
1326 return;
1327
1328 exec->setScope(JSWithScope::create(exec, o));
1329 }
1330
1331 void JIT_OPERATION operationPopScope(ExecState* exec)
1332 {
1333 VM& vm = exec->vm();
1334 NativeCallFrameTracer tracer(&vm, exec);
1335
1336 exec->setScope(exec->scope()->next());
1337 }
1338
1339 void JIT_OPERATION operationProfileDidCall(ExecState* exec, EncodedJSValue encodedValue)
1340 {
1341 VM& vm = exec->vm();
1342 NativeCallFrameTracer tracer(&vm, exec);
1343
1344 if (LegacyProfiler* profiler = vm.enabledProfiler())
1345 profiler->didExecute(exec, JSValue::decode(encodedValue));
1346 }
1347
1348 void JIT_OPERATION operationProfileWillCall(ExecState* exec, EncodedJSValue encodedValue)
1349 {
1350 VM& vm = exec->vm();
1351 NativeCallFrameTracer tracer(&vm, exec);
1352
1353 if (LegacyProfiler* profiler = vm.enabledProfiler())
1354 profiler->willExecute(exec, JSValue::decode(encodedValue));
1355 }
1356
1357 EncodedJSValue JIT_OPERATION operationCheckHasInstance(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedBaseVal)
1358 {
1359 VM& vm = exec->vm();
1360 NativeCallFrameTracer tracer(&vm, exec);
1361
1362 JSValue value = JSValue::decode(encodedValue);
1363 JSValue baseVal = JSValue::decode(encodedBaseVal);
1364
1365 if (baseVal.isObject()) {
1366 JSObject* baseObject = asObject(baseVal);
1367 ASSERT(!baseObject->structure(vm)->typeInfo().implementsDefaultHasInstance());
1368 if (baseObject->structure(vm)->typeInfo().implementsHasInstance()) {
1369 bool result = baseObject->methodTable(vm)->customHasInstance(baseObject, exec, value);
1370 return JSValue::encode(jsBoolean(result));
1371 }
1372 }
1373
1374 vm.throwException(exec, createInvalidParameterError(exec, "instanceof", baseVal));
1375 return JSValue::encode(JSValue());
1376 }
1377
1378 JSCell* JIT_OPERATION operationCreateActivation(ExecState* exec, int32_t offset)
1379 {
1380 VM& vm = exec->vm();
1381 NativeCallFrameTracer tracer(&vm, exec);
1382 JSActivation* activation = JSActivation::create(vm, exec, exec->registers() + offset, exec->codeBlock());
1383 exec->setScope(activation);
1384 return activation;
1385 }
1386
1387 JSCell* JIT_OPERATION operationCreateArguments(ExecState* exec)
1388 {
1389 VM& vm = exec->vm();
1390 NativeCallFrameTracer tracer(&vm, exec);
1391 // NB: This needs to be exceedingly careful with top call frame tracking, since it
1392 // may be called from OSR exit, while the state of the call stack is bizarre.
1393 Arguments* result = Arguments::create(vm, exec);
1394 ASSERT(!vm.exception());
1395 return result;
1396 }
1397
1398 JSCell* JIT_OPERATION operationCreateArgumentsDuringOSRExit(ExecState* exec)
1399 {
1400 DeferGCForAWhile(exec->vm().heap);
1401 return operationCreateArguments(exec);
1402 }
1403
1404 EncodedJSValue JIT_OPERATION operationGetArgumentsLength(ExecState* exec, int32_t argumentsRegister)
1405 {
1406 VM& vm = exec->vm();
1407 NativeCallFrameTracer tracer(&vm, exec);
1408 // Here we can assume that the argumernts were created. Because otherwise the JIT code would
1409 // have not made this call.
1410 Identifier ident(&vm, "length");
1411 JSValue baseValue = exec->uncheckedR(argumentsRegister).jsValue();
1412 PropertySlot slot(baseValue);
1413 return JSValue::encode(baseValue.get(exec, ident, slot));
1414 }
1415
1416 }
1417
1418 static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ReturnAddressPtr returnAddress)
1419 {
1420 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1421 VM& vm = exec->vm();
1422 Structure& structure = *baseValue.asCell()->structure(vm);
1423 if (JSCell::canUseFastGetOwnProperty(structure)) {
1424 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, asString(subscript)->value(exec)))
1425 return result;
1426 }
1427 }
1428
1429 if (subscript.isUInt32()) {
1430 uint32_t i = subscript.asUInt32();
1431 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
1432 ctiPatchCallByReturnAddress(exec->codeBlock(), returnAddress, FunctionPtr(operationGetByValString));
1433 return asString(baseValue)->getIndex(exec, i);
1434 }
1435 return baseValue.get(exec, i);
1436 }
1437
1438 if (isName(subscript))
1439 return baseValue.get(exec, jsCast<NameInstance*>(subscript.asCell())->privateName());
1440
1441 Identifier property = subscript.toString(exec)->toIdentifier(exec);
1442 return baseValue.get(exec, property);
1443 }
1444
1445 extern "C" {
1446
1447 EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1448 {
1449 VM& vm = exec->vm();
1450 NativeCallFrameTracer tracer(&vm, exec);
1451 JSValue baseValue = JSValue::decode(encodedBase);
1452 JSValue subscript = JSValue::decode(encodedSubscript);
1453
1454 JSValue result = getByVal(exec, baseValue, subscript, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1455 return JSValue::encode(result);
1456 }
1457
1458 EncodedJSValue JIT_OPERATION operationGetByValDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1459 {
1460 VM& vm = exec->vm();
1461 NativeCallFrameTracer tracer(&vm, exec);
1462 JSValue baseValue = JSValue::decode(encodedBase);
1463 JSValue subscript = JSValue::decode(encodedSubscript);
1464
1465 if (baseValue.isObject() && subscript.isInt32()) {
1466 // See if it's worth optimizing this at all.
1467 JSObject* object = asObject(baseValue);
1468 bool didOptimize = false;
1469
1470 unsigned bytecodeOffset = exec->locationAsBytecodeOffset();
1471 ASSERT(bytecodeOffset);
1472 ByValInfo& byValInfo = exec->codeBlock()->getByValInfo(bytecodeOffset - 1);
1473 ASSERT(!byValInfo.stubRoutine);
1474
1475 if (hasOptimizableIndexing(object->structure(vm))) {
1476 // Attempt to optimize.
1477 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
1478 if (arrayMode != byValInfo.arrayMode) {
1479 JIT::compileGetByVal(&vm, exec->codeBlock(), &byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
1480 didOptimize = true;
1481 }
1482 }
1483
1484 if (!didOptimize) {
1485 // If we take slow path more than 10 times without patching then make sure we
1486 // never make that mistake again. Or, if we failed to patch and we have some object
1487 // that intercepts indexed get, then don't even wait until 10 times. For cases
1488 // where we see non-index-intercepting objects, this gives 10 iterations worth of
1489 // opportunity for us to observe that the get_by_val may be polymorphic.
1490 if (++byValInfo.slowPathCount >= 10
1491 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
1492 // Don't ever try to optimize.
1493 RepatchBuffer repatchBuffer(exec->codeBlock());
1494 repatchBuffer.relinkCallerToFunction(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationGetByValGeneric));
1495 }
1496 }
1497 }
1498
1499 JSValue result = getByVal(exec, baseValue, subscript, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1500 return JSValue::encode(result);
1501 }
1502
1503 EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript)
1504 {
1505 VM& vm = exec->vm();
1506 NativeCallFrameTracer tracer(&vm, exec);
1507 JSValue baseValue = JSValue::decode(encodedBase);
1508 JSValue subscript = JSValue::decode(encodedSubscript);
1509
1510 JSValue result;
1511 if (LIKELY(subscript.isUInt32())) {
1512 uint32_t i = subscript.asUInt32();
1513 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
1514 result = asString(baseValue)->getIndex(exec, i);
1515 else {
1516 result = baseValue.get(exec, i);
1517 if (!isJSString(baseValue))
1518 ctiPatchCallByReturnAddress(exec->codeBlock(), ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationGetByValDefault));
1519 }
1520 } else if (isName(subscript))
1521 result = baseValue.get(exec, jsCast<NameInstance*>(subscript.asCell())->privateName());
1522 else {
1523 Identifier property = subscript.toString(exec)->toIdentifier(exec);
1524 result = baseValue.get(exec, property);
1525 }
1526
1527 return JSValue::encode(result);
1528 }
1529
1530 void JIT_OPERATION operationTearOffActivation(ExecState* exec, JSCell* activationCell)
1531 {
1532 VM& vm = exec->vm();
1533 NativeCallFrameTracer tracer(&vm, exec);
1534
1535 ASSERT(exec->codeBlock()->needsActivation());
1536 jsCast<JSActivation*>(activationCell)->tearOff(vm);
1537 }
1538
1539 void JIT_OPERATION operationTearOffArguments(ExecState* exec, JSCell* argumentsCell, JSCell* activationCell)
1540 {
1541 ASSERT(exec->codeBlock()->usesArguments());
1542 if (activationCell) {
1543 jsCast<Arguments*>(argumentsCell)->didTearOffActivation(exec, jsCast<JSActivation*>(activationCell));
1544 return;
1545 }
1546 jsCast<Arguments*>(argumentsCell)->tearOff(exec);
1547 }
1548
1549 EncodedJSValue JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, const Identifier* identifier)
1550 {
1551 VM& vm = exec->vm();
1552 NativeCallFrameTracer tracer(&vm, exec);
1553
1554 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
1555 bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, *identifier);
1556 JSValue result = jsBoolean(couldDelete);
1557 if (!couldDelete && exec->codeBlock()->isStrictMode())
1558 vm.throwException(exec, createTypeError(exec, "Unable to delete property."));
1559 return JSValue::encode(result);
1560 }
1561
1562 JSCell* JIT_OPERATION operationGetPNames(ExecState* exec, JSObject* obj)
1563 {
1564 VM& vm = exec->vm();
1565 NativeCallFrameTracer tracer(&vm, exec);
1566
1567 Structure* structure = obj->structure(vm);
1568 JSPropertyNameIterator* jsPropertyNameIterator = structure->enumerationCache();
1569 if (!jsPropertyNameIterator || jsPropertyNameIterator->cachedPrototypeChain() != structure->prototypeChain(exec))
1570 jsPropertyNameIterator = JSPropertyNameIterator::create(exec, obj);
1571 return jsPropertyNameIterator;
1572 }
1573
1574 EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
1575 {
1576 VM& vm = exec->vm();
1577 NativeCallFrameTracer tracer(&vm, exec);
1578 JSValue value = JSValue::decode(encodedValue);
1579 JSValue proto = JSValue::decode(encodedProto);
1580
1581 ASSERT(!value.isObject() || !proto.isObject());
1582
1583 bool result = JSObject::defaultHasInstance(exec, value, proto);
1584 return JSValue::encode(jsBoolean(result));
1585 }
1586
1587 CallFrame* JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t firstFreeRegister, int32_t firstVarArgOffset)
1588 {
1589 VM& vm = exec->vm();
1590 NativeCallFrameTracer tracer(&vm, exec);
1591 JSStack* stack = &exec->interpreter()->stack();
1592 JSValue arguments = JSValue::decode(encodedArguments);
1593 CallFrame* newCallFrame = sizeFrameForVarargs(exec, stack, arguments, firstFreeRegister, firstVarArgOffset);
1594 return newCallFrame;
1595 }
1596
1597 CallFrame* JIT_OPERATION operationLoadVarargs(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedThis, EncodedJSValue encodedArguments, int32_t firstVarArgOffset)
1598 {
1599 VM& vm = exec->vm();
1600 NativeCallFrameTracer tracer(&vm, exec);
1601 JSValue thisValue = JSValue::decode(encodedThis);
1602 JSValue arguments = JSValue::decode(encodedArguments);
1603 loadVarargs(exec, newCallFrame, thisValue, arguments, firstVarArgOffset);
1604 return newCallFrame;
1605 }
1606
1607 EncodedJSValue JIT_OPERATION operationToObject(ExecState* exec, EncodedJSValue value)
1608 {
1609 VM& vm = exec->vm();
1610 NativeCallFrameTracer tracer(&vm, exec);
1611 return JSValue::encode(JSValue::decode(value).toObject(exec));
1612 }
1613
1614 char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1615 {
1616 VM& vm = exec->vm();
1617 NativeCallFrameTracer tracer(&vm, exec);
1618 JSValue key = JSValue::decode(encodedKey);
1619 CodeBlock* codeBlock = exec->codeBlock();
1620
1621 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1622 void* result = jumpTable.ctiDefault.executableAddress();
1623
1624 if (key.isString()) {
1625 StringImpl* value = asString(key)->value(exec).impl();
1626 if (value->length() == 1)
1627 result = jumpTable.ctiForValue((*value)[0]).executableAddress();
1628 }
1629
1630 return reinterpret_cast<char*>(result);
1631 }
1632
1633 char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1634 {
1635 VM& vm = exec->vm();
1636 NativeCallFrameTracer tracer(&vm, exec);
1637 JSValue key = JSValue::decode(encodedKey);
1638 CodeBlock* codeBlock = exec->codeBlock();
1639
1640 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
1641 void* result;
1642 if (key.isInt32())
1643 result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
1644 else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
1645 result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
1646 else
1647 result = jumpTable.ctiDefault.executableAddress();
1648 return reinterpret_cast<char*>(result);
1649 }
1650
1651 char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
1652 {
1653 VM& vm = exec->vm();
1654 NativeCallFrameTracer tracer(&vm, exec);
1655 JSValue key = JSValue::decode(encodedKey);
1656 CodeBlock* codeBlock = exec->codeBlock();
1657
1658 void* result;
1659 StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
1660
1661 if (key.isString()) {
1662 StringImpl* value = asString(key)->value(exec).impl();
1663 result = jumpTable.ctiForValue(value).executableAddress();
1664 } else
1665 result = jumpTable.ctiDefault.executableAddress();
1666
1667 return reinterpret_cast<char*>(result);
1668 }
1669
1670 EncodedJSValue JIT_OPERATION operationResolveScope(ExecState* exec, int32_t identifierIndex)
1671 {
1672 VM& vm = exec->vm();
1673 NativeCallFrameTracer tracer(&vm, exec);
1674 const Identifier& ident = exec->codeBlock()->identifier(identifierIndex);
1675 return JSValue::encode(JSScope::resolve(exec, exec->scope(), ident));
1676 }
1677
1678 EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, Instruction* bytecodePC)
1679 {
1680 VM& vm = exec->vm();
1681 NativeCallFrameTracer tracer(&vm, exec);
1682 CodeBlock* codeBlock = exec->codeBlock();
1683 Instruction* pc = bytecodePC;
1684
1685 const Identifier& ident = codeBlock->identifier(pc[3].u.operand);
1686 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[2].u.operand).jsValue());
1687 ResolveModeAndType modeAndType(pc[4].u.operand);
1688
1689 PropertySlot slot(scope);
1690 if (!scope->getPropertySlot(exec, ident, slot)) {
1691 if (modeAndType.mode() == ThrowIfNotFound)
1692 vm.throwException(exec, createUndefinedVariableError(exec, ident));
1693 return JSValue::encode(jsUndefined());
1694 }
1695
1696 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
1697 if (slot.isCacheableValue() && slot.slotBase() == scope && scope->structure(vm)->propertyAccessesAreCacheable()) {
1698 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
1699 ConcurrentJITLocker locker(codeBlock->m_lock);
1700 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure(vm));
1701 pc[6].u.operand = slot.cachedOffset();
1702 }
1703 }
1704
1705 return JSValue::encode(slot.getValue(exec, ident));
1706 }
1707
1708 void JIT_OPERATION operationPutToScope(ExecState* exec, Instruction* bytecodePC)
1709 {
1710 VM& vm = exec->vm();
1711 NativeCallFrameTracer tracer(&vm, exec);
1712 Instruction* pc = bytecodePC;
1713
1714 CodeBlock* codeBlock = exec->codeBlock();
1715 const Identifier& ident = codeBlock->identifier(pc[2].u.operand);
1716 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(pc[1].u.operand).jsValue());
1717 JSValue value = exec->r(pc[3].u.operand).jsValue();
1718 ResolveModeAndType modeAndType = ResolveModeAndType(pc[4].u.operand);
1719
1720 if (modeAndType.mode() == ThrowIfNotFound && !scope->hasProperty(exec, ident)) {
1721 exec->vm().throwException(exec, createUndefinedVariableError(exec, ident));
1722 return;
1723 }
1724
1725 PutPropertySlot slot(scope, codeBlock->isStrictMode());
1726 scope->methodTable()->put(scope, exec, ident, value, slot);
1727
1728 if (exec->vm().exception())
1729 return;
1730
1731 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
1732 if (modeAndType.type() == GlobalProperty || modeAndType.type() == GlobalPropertyWithVarInjectionChecks) {
1733 if (slot.isCacheablePut() && slot.base() == scope && scope->structure()->propertyAccessesAreCacheable()) {
1734 ConcurrentJITLocker locker(codeBlock->m_lock);
1735 pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure());
1736 pc[6].u.operand = slot.cachedOffset();
1737 }
1738 }
1739 }
1740
1741 void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
1742 {
1743 VM* vm = &exec->vm();
1744 NativeCallFrameTracer tracer(vm, exec);
1745
1746 JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
1747 vm->throwException(exec, exceptionValue);
1748
1749 // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForThrow
1750 genericUnwind(vm, exec, exceptionValue);
1751 }
1752
1753 void JIT_OPERATION operationFlushWriteBarrierBuffer(ExecState* exec, JSCell* cell)
1754 {
1755 VM* vm = &exec->vm();
1756 NativeCallFrameTracer tracer(vm, exec);
1757 vm->heap.flushWriteBarrierBuffer(cell);
1758 }
1759
1760 void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
1761 {
1762 VM* vm = &exec->vm();
1763 NativeCallFrameTracer tracer(vm, exec);
1764 vm->heap.writeBarrier(cell);
1765 }
1766
1767 // NB: We don't include the value as part of the barrier because the write barrier elision
1768 // phase in the DFG only tracks whether the object being stored to has been barriered. It
1769 // would be much more complicated to try to model the value being stored as well.
1770 void JIT_OPERATION operationUnconditionalWriteBarrier(ExecState* exec, JSCell* cell)
1771 {
1772 VM* vm = &exec->vm();
1773 NativeCallFrameTracer tracer(vm, exec);
1774 vm->heap.writeBarrier(cell);
1775 }
1776
1777 void JIT_OPERATION operationInitGlobalConst(ExecState* exec, Instruction* pc)
1778 {
1779 VM* vm = &exec->vm();
1780 NativeCallFrameTracer tracer(vm, exec);
1781
1782 JSValue value = exec->r(pc[2].u.operand).jsValue();
1783 pc[1].u.registerPointer->set(*vm, exec->codeBlock()->globalObject(), value);
1784 }
1785
1786 void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
1787 {
1788 NativeCallFrameTracer tracer(vm, exec, NativeCallFrameTracer::VMEntrySentinelOK);
1789
1790 JSValue exceptionValue = vm->exception();
1791 ASSERT(exceptionValue);
1792
1793 genericUnwind(vm, exec, exceptionValue);
1794 ASSERT(vm->targetMachinePCForThrow);
1795 }
1796
1797 void JIT_OPERATION operationVMHandleException(ExecState* exec)
1798 {
1799 VM* vm = &exec->vm();
1800 NativeCallFrameTracer tracer(vm, exec);
1801
1802 ASSERT(!exec->isVMEntrySentinel());
1803 genericUnwind(vm, exec, vm->exception());
1804 }
1805
1806 // This function "should" just take the ExecState*, but doing so would make it more difficult
1807 // to call from exception check sites. So, unlike all of our other functions, we allow
1808 // ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
1809 // particularly safe here since this is never called on the critical path - it's only for
1810 // testing.
1811 void JIT_OPERATION operationExceptionFuzz()
1812 {
1813 ASSERT(Options::enableExceptionFuzz());
1814
1815 // This probably "just works" for GCC also, but I haven't tried.
1816 #if COMPILER(CLANG)
1817 ExecState* exec = static_cast<ExecState*>(__builtin_frame_address(1));
1818 DeferGCForAWhile deferGC(exec->vm().heap);
1819
1820 s_numberOfExceptionFuzzChecks++;
1821
1822 unsigned fireTarget = Options::fireExceptionFuzzAt();
1823 if (fireTarget == s_numberOfExceptionFuzzChecks) {
1824 printf("JSC EXCEPTION FUZZ: Throwing fuzz exception with call frame %p and return address %p.\n", exec, __builtin_return_address(0));
1825 exec->vm().throwException(
1826 exec, createError(exec->lexicalGlobalObject(), ASCIILiteral("Exception Fuzz")));
1827 }
1828 #endif // COMPILER(CLANG)
1829 }
1830
1831 } // extern "C"
1832
1833 // Note: getHostCallReturnValueWithExecState() needs to be placed before the
1834 // definition of getHostCallReturnValue() below because the Windows build
1835 // requires it.
1836 extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState* exec)
1837 {
1838 if (!exec)
1839 return JSValue::encode(JSValue());
1840 return JSValue::encode(exec->vm().hostCallReturnValue);
1841 }
1842
1843 #if COMPILER(GCC) && CPU(X86_64)
1844 asm (
1845 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1846 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1847 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1848 "mov %rbp, %rdi\n"
1849 "jmp " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
1850 );
1851
1852 #elif COMPILER(GCC) && CPU(X86)
1853 asm (
1854 ".text" "\n" \
1855 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1856 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1857 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1858 "push %ebp\n"
1859 "leal -4(%esp), %esp\n"
1860 "push %ebp\n"
1861 "call " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
1862 "leal 8(%esp), %esp\n"
1863 "pop %ebp\n"
1864 "ret\n"
1865 );
1866
1867 #elif COMPILER(GCC) && CPU(ARM_THUMB2)
1868 asm (
1869 ".text" "\n"
1870 ".align 2" "\n"
1871 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1872 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1873 ".thumb" "\n"
1874 ".thumb_func " THUMB_FUNC_PARAM(getHostCallReturnValue) "\n"
1875 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1876 "mov r0, r7" "\n"
1877 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
1878 );
1879
1880 #elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
1881 asm (
1882 ".text" "\n"
1883 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1884 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1885 INLINE_ARM_FUNCTION(getHostCallReturnValue)
1886 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1887 "mov r0, r11" "\n"
1888 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
1889 );
1890
1891 #elif CPU(ARM64)
1892 asm (
1893 ".text" "\n"
1894 ".align 2" "\n"
1895 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1896 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1897 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1898 "mov x0, x29" "\n"
1899 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
1900 );
1901
1902 #elif COMPILER(GCC) && CPU(MIPS)
1903
1904 #if WTF_MIPS_PIC
1905 #define LOAD_FUNCTION_TO_T9(function) \
1906 ".set noreorder" "\n" \
1907 ".cpload $25" "\n" \
1908 ".set reorder" "\n" \
1909 "la $t9, " LOCAL_REFERENCE(function) "\n"
1910 #else
1911 #define LOAD_FUNCTION_TO_T9(function) "" "\n"
1912 #endif
1913
1914 asm (
1915 ".text" "\n"
1916 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1917 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1918 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1919 LOAD_FUNCTION_TO_T9(getHostCallReturnValueWithExecState)
1920 "move $a0, $fp" "\n"
1921 "b " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "\n"
1922 );
1923
1924 #elif COMPILER(GCC) && CPU(SH4)
1925
1926 #define SH4_SCRATCH_REGISTER "r11"
1927
1928 asm (
1929 ".text" "\n"
1930 ".globl " SYMBOL_STRING(getHostCallReturnValue) "\n"
1931 HIDE_SYMBOL(getHostCallReturnValue) "\n"
1932 SYMBOL_STRING(getHostCallReturnValue) ":" "\n"
1933 "mov r14, r4" "\n"
1934 "mov.l 2f, " SH4_SCRATCH_REGISTER "\n"
1935 "braf " SH4_SCRATCH_REGISTER "\n"
1936 "nop" "\n"
1937 "1: .balign 4" "\n"
1938 "2: .long " LOCAL_REFERENCE(getHostCallReturnValueWithExecState) "-1b\n"
1939 );
1940
1941 #elif COMPILER(MSVC) && CPU(X86)
1942 extern "C" {
1943 __declspec(naked) EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue()
1944 {
1945 __asm mov [esp + 4], ebp;
1946 __asm jmp getHostCallReturnValueWithExecState
1947 }
1948 }
1949 #endif
1950
1951 } // namespace JSC
1952
1953 #endif // ENABLE(JIT)